From c69afd398fa58f89716a1f8e553ebd6b2126d1ba Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 17 Nov 2021 22:34:54 +0000 Subject: [PATCH 01/73] Bump Confluent to 7.2.0-0, Kafka to 7.2.0-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 28 ++++++++++++------------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 25 files changed, 40 insertions(+), 40 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index d5dd002ff9b..ca34f79fa29 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index dd3ef9b4518..dbf17c52109 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 384088cb767..1328f092f37 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index bfec3a75b11..626f8aeeabf 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 8a895f521ce..503447b4783 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index ed182603bc6..4f308253fc5 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/client/pom.xml b/client/pom.xml index 67ef3f19bb1..a6af240ab8f 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/core/pom.xml b/core/pom.xml index 4b46caab552..6a51bffa1ef 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 87b072a4385..9f2e99b617d 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 3ab178fc514..9987428add3 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index fd9a15d8b30..9c58d87c841 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 6b7177e4eea..3d98cfa7a2e 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 65c9b24e4ca..f94875cfef8 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 4fe7c2a084a..3e6a261a37b 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -90,33 +90,33 @@

License Report


jopt-simple-4.9jar4.9The MIT License
<<<<<<< HEAD -kafka-avro-serializer-7.1.0-0jar7.1.0-0 +kafka-avro-serializer-7.2.0-0jar7.2.0-0 -kafka-clients-7.1.0-0-ccsjarincluded file +kafka-clients-7.2.0-0-ccsjarincluded file -kafka-connect-avro-converter-7.1.0-0jar7.1.0-0 +kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 -kafka-json-serializer-7.1.0-0jar7.1.0-0 +kafka-json-serializer-7.2.0-0jar7.2.0-0 -kafka-schema-registry-7.1.0-0jar7.1.0-0 +kafka-schema-registry-7.2.0-0jar7.2.0-0 -kafka-schema-registry-client-7.1.0-0jar7.1.0-0 +kafka-schema-registry-client-7.2.0-0jar7.2.0-0 -kafka_2.11-7.1.0-0-ccsjarincluded file +kafka_2.11-7.2.0-0-ccsjarincluded file ======= -kafka-avro-serializer-7.1.0-0jar7.1.0-0 +kafka-avro-serializer-7.2.0-0jar7.2.0-0 -kafka-clients-7.1.0-0-ccsjarincluded file +kafka-clients-7.2.0-0-ccsjarincluded file -kafka-connect-avro-converter-7.1.0-0jar7.1.0-0 +kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 -kafka-json-serializer-7.1.0-0jar7.1.0-0 +kafka-json-serializer-7.2.0-0jar7.2.0-0 -kafka-schema-registry-7.1.0-0jar7.1.0-0 +kafka-schema-registry-7.2.0-0jar7.2.0-0 -kafka-schema-registry-client-7.1.0-0jar7.1.0-0 +kafka-schema-registry-client-7.2.0-0jar7.2.0-0 -kafka_2.11-7.1.0-0-ccsjarincluded file +kafka_2.11-7.2.0-0-ccsjarincluded file >>>>>>> 5.1.x log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 04d50fceb16..2ed58c461fa 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index a1a8879efc7..5ef810306fe 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 74211a01e57..78abca99a6a 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 9f322bd2f20..d94eff63f61 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.1.0-0, 7.1.1-0) + [7.2.0-0, 7.2.1-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 Confluent, Inc. http://confluent.io @@ -83,7 +83,7 @@ 3.11.4 3.7.1 2.1.10 - 7.1.0-0 + 7.2.0-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 3fe40d1c259..5ece85bf250 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 02b033fbeb4..dfa87e4f194 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 324a2c9ee6f..3671151dfa0 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index d1c75680876..83fed7559d7 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 7ffc8d55a57..ec22e40251d 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 8c37beed3c0..abad999a218 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 0b97f4ba2cb..8f3fdc31984 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.0-0 + 7.2.0-0 From bbe5a2d2da6360373db6de2064da3b6d6a0a6a15 Mon Sep 17 00:00:00 2001 From: Arnaud Esteve Date: Mon, 29 Nov 2021 23:12:36 +0100 Subject: [PATCH 02/73] Allow passing a specific class when creating a KafkaJsonSchemaSerde with a client (in tests) (#2088) * (feat) Allow passing a specific class when creating a KafkaJsonSchemaSerde with a client (in tests) * (feat) Allow passing a specific class when creating a KafkaProtobufSerde with a client (in tests) --- .../serdes/json/KafkaJsonSchemaSerde.java | 12 +++- .../serdes/json/KafkaJsonSchemaSerdeTest.java | 64 ++++++++++++++++++- .../serdes/protobuf/KafkaProtobufSerde.java | 12 +++- 3 files changed, 83 insertions(+), 5 deletions(-) diff --git a/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java b/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java index 6b822909b73..d7a6028f587 100644 --- a/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java +++ b/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java @@ -53,11 +53,19 @@ public KafkaJsonSchemaSerde(Class specificClass) { * For testing purposes only. */ public KafkaJsonSchemaSerde(final SchemaRegistryClient client) { + this(client, null); + } + + /** + * For testing purposes only. + */ + public KafkaJsonSchemaSerde(final SchemaRegistryClient client, final Class specificClass) { if (client == null) { throw new IllegalArgumentException("schema registry client must not be null"); } + this.specificClass = specificClass; inner = Serdes.serdeFrom(new KafkaJsonSchemaSerializer<>(client), - new KafkaJsonSchemaDeserializer<>(client)); + new KafkaJsonSchemaDeserializer<>(client)); } @Override @@ -103,4 +111,4 @@ private Map withSpecificClass(final Map config, boole return newConfig; } -} \ No newline at end of file +} diff --git a/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java b/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java index 74c514ca23d..a7a5de167c3 100644 --- a/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java +++ b/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java @@ -16,12 +16,15 @@ package io.confluent.kafka.streams.serdes.json; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.ObjectMapper; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; import org.junit.Test; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; @@ -49,6 +52,26 @@ public class KafkaJsonSchemaSerdeTest { private static final JsonSchema recordSchema = new JsonSchema(recordSchemaString); + @JsonIgnoreProperties(ignoreUnknown = true) + private static class SomeTestRecord { + String string; + Integer number; + private SomeTestRecord() {} + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeTestRecord that = (SomeTestRecord) o; + return Objects.equals(string, that.string) && + Objects.equals(number, that.number); + } + } + private Object createJsonRecord() throws IOException { String json = "{\n" + " \"null\": null,\n" @@ -60,6 +83,17 @@ private Object createJsonRecord() throws IOException { return objectMapper.readValue(json, Object.class); } + private SomeTestRecord createJsonRecordWithClass() throws IOException { + String json = "{\n" + + " \"null\": null,\n" + + " \"boolean\": true,\n" + + " \"number\": 12,\n" + + " \"string\": \"string\"\n" + + "}"; + + return objectMapper.readValue(json, SomeTestRecord.class); + } + private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValues() { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaJsonSchemaSerde serde = new KafkaJsonSchemaSerde<>(schemaRegistryClient); @@ -69,6 +103,16 @@ private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValues return serde; } + private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValuesWithClass() { + SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); + KafkaJsonSchemaSerde serde = new KafkaJsonSchemaSerde<>(schemaRegistryClient, SomeTestRecord.class); + Map serdeConfig = new HashMap<>(); + serdeConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "fake"); + serde.configure(serdeConfig, false); + return serde; + } + + @Test public void shouldRoundTripRecords() throws Exception { // Given @@ -105,4 +149,22 @@ public void shouldRoundTripNullRecordsToNull() { public void shouldFailWhenInstantiatedWithNullSchemaRegistryClient() { new KafkaJsonSchemaSerde<>((SchemaRegistryClient) null); } -} \ No newline at end of file + + @Test + public void shouldLetTheAbilityToDeserializeToASpecificClass() throws IOException { + // Given + KafkaJsonSchemaSerde serde = createConfiguredSerdeForRecordValuesWithClass(); + SomeTestRecord record = createJsonRecordWithClass(); + + // When + Object roundtrippedRecord = serde.deserializer().deserialize( + ANY_TOPIC, serde.serializer().serialize(ANY_TOPIC, record)); + + // Then + assertThat(roundtrippedRecord, equalTo(record)); + + // Cleanup + serde.close(); + } + +} diff --git a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java index 4d33daefd95..8718c886e52 100644 --- a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java +++ b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java @@ -54,11 +54,19 @@ public KafkaProtobufSerde(Class specificProtobufClass) { * For testing purposes only. */ public KafkaProtobufSerde(final SchemaRegistryClient client) { + this(client, null); + } + + /** + * For testing purposes only. + */ + public KafkaProtobufSerde(final SchemaRegistryClient client, final Class specificClass) { if (client == null) { throw new IllegalArgumentException("schema registry client must not be null"); } + this.specificProtobufClass = specificClass; inner = Serdes.serdeFrom(new KafkaProtobufSerializer<>(client), - new KafkaProtobufDeserializer<>(client)); + new KafkaProtobufDeserializer<>(client)); } @Override @@ -106,4 +114,4 @@ private Map withSpecificClass(final Map config, boole return newConfig; } -} \ No newline at end of file +} From 78673ec2333a2cdd428a15865bc065472cc3852c Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Thu, 2 Dec 2021 17:11:17 -0800 Subject: [PATCH 03/73] Minor: close stream in (#2095) ResourceLoader --- .../io/confluent/connect/json/ResourceLoader.java | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java b/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java index bd5499afb6e..69072721a84 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java @@ -36,14 +36,16 @@ public ResourceLoader(String rootPath) { this.rootPath = requireNonNull(rootPath, "rootPath cannot be null"); } - public JSONObject readJSONObject(String relPath) { - InputStream stream = getStream(relPath); - return new JSONObject(new JSONTokener(stream)); + public JSONObject readJSONObject(String relPath) throws IOException { + try (InputStream stream = getStream(relPath)) { + return new JSONObject(new JSONTokener(stream)); + } } public JsonNode readJsonNode(String relPath) throws IOException { - InputStream stream = getStream(relPath); - return new ObjectMapper().readTree(stream); + try (InputStream stream = getStream(relPath)) { + return new ObjectMapper().readTree(stream); + } } public InputStream getStream(String relPath) { From c88dfa897867ddac41d4be13efbc8697e6bc5f4e Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Thu, 2 Dec 2021 17:11:29 -0800 Subject: [PATCH 04/73] Minor: upgrade org.apache.maven:maven-plugin-api to 3.8.1 (#2094) --- maven-plugin/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 2ed58c461fa..b39578350b2 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -26,7 +26,7 @@ org.apache.maven maven-plugin-api - 3.6.3 + 3.8.1 org.apache.maven.plugin-tools From 006b4ded8b63715aa50760d5de26c0d62bfd7eb4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Giovanny=20Guti=C3=A9rrez?= Date: Fri, 10 Dec 2021 11:44:54 -0500 Subject: [PATCH 05/73] feat: Update wire-schema version to 4.0.0 (#2101) --- pom.xml | 15 +++++++- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 6 ++- .../protobuf/ProtobufSchemaUtils.java | 11 ++++++ .../protobuf/ProtobufSchemaTest.java | 37 +++++++++++++++++++ 5 files changed, 67 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index d94eff63f61..8bfa9ebb6a0 100644 --- a/pom.xml +++ b/pom.xml @@ -81,7 +81,7 @@ 1.12.2 2.5.1 3.11.4 - 3.7.1 + 4.0.0 2.1.10 7.2.0-0 1.21 @@ -133,7 +133,18 @@ com.squareup.wire - wire-schema + wire-schema-jvm + ${wire.version} + + + org.jetbrains.kotlin + kotlin-stdlib + + + + + com.squareup.wire + wire-runtime-jvm ${wire.version} diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 5ece85bf250..e8211580504 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -50,7 +50,7 @@ com.squareup.wire - wire-schema + wire-schema-jvm org.jetbrains.kotlin diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index dfa87e4f194..548aa006cbc 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -34,7 +34,11 @@ com.squareup.wire - wire-schema + wire-schema-jvm + + + com.squareup.wire + wire-runtime-jvm org.jetbrains.kotlin diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaUtils.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaUtils.java index b2020395009..ac6766aa57a 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaUtils.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaUtils.java @@ -649,6 +649,17 @@ private static String formatOptionMapValue(Object value, boolean normalize) { appendIndented(sb, v); } sb.append("]"); + } else if (value instanceof OptionElement.OptionPrimitive) { + OptionElement.OptionPrimitive primitive = (OptionElement.OptionPrimitive)value; + switch (primitive.getKind()) { + case BOOLEAN: + case ENUM: + case NUMBER: + sb.append(primitive.getValue()); + break; + default: + sb.append(formatOptionMapValue(primitive.getValue(), normalize)); + } } else { sb.append(value); } diff --git a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java index 8a125b8a00e..ba28ab6c492 100644 --- a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java +++ b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java @@ -325,6 +325,43 @@ public void testOptionEscape() throws Exception { assertTrue(parsed.contains("https://someUrl.com")); } + @Test + public void testEnumOption() { + String optionSchemaString = "import \"google/protobuf/descriptor.proto\";\n" + + "enum FooParameterType {\n" + + " NUMBER = 1;\n" + + " STRING = 2;\n" + + "}\n" + + " \n" + + "message FooOptions {\n" + + " optional string name = 1;\n" + + " optional FooParameterType type = 2; \n" + + "} \n" + + "extend google.protobuf.MessageOptions {\n" + + " repeated FooOptions foo = 12345;\n" + + "}\n" + + "\n" + + "message Message {\n" + + " option (foo) = {\n" + + " name: \"test\"\n" + + " type: STRING\n" + + " };\n" + + " \n" + + " option (foo) = {\n" + + " name: \"test2\"\n" + + " type: NUMBER\n" + + " };\n" + + " \n" + + " optional int32 b = 2;\n" + + "}"; + + ProtobufSchema schema = new ProtobufSchema(optionSchemaString); + String parsed = schema.canonicalString(); + + assertTrue(parsed.contains("type: STRING")); + assertTrue(parsed.contains("type: NUMBER")); + } + @Test public void testRecordToJson() throws Exception { DynamicMessage.Builder builder = recordSchema.newMessageBuilder(); From 65812b2ed066ce9d8659db084b5bfb054bd82df6 Mon Sep 17 00:00:00 2001 From: Ron Dagostino Date: Fri, 17 Dec 2021 12:00:26 -0500 Subject: [PATCH 06/73] Fix compile error from KAFKA-13456 fix: effectiveAdvertisedListeners (#2116) --- .../io/confluent/kafka/schemaregistry/ClusterTestHarness.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java index 5840072fa01..821b82e7167 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java @@ -144,7 +144,7 @@ public void setUp() throws Exception { for(int i = 0; i < servers.size(); i++) { serverUrls[i] = getSecurityProtocol() + "://" + Utils.formatAddress( - servers.get(i).config().advertisedListeners().head().host(), + servers.get(i).config().effectiveAdvertisedListeners().head().host(), servers.get(i).boundPort(listenerType) ); } From 55dd7d94396f188065a78b69d2dee6ae2368397b Mon Sep 17 00:00:00 2001 From: amalgawa Date: Wed, 19 Jan 2022 23:35:47 +0530 Subject: [PATCH 07/73] Added restriction for __GLOBAL subject --- .../kafka/schemaregistry/storage/KafkaSchemaRegistry.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 89561f83906..a71d69a9bb7 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -103,6 +103,8 @@ public class KafkaSchemaRegistry implements SchemaRegistry, LeaderAwareSchemaReg * Schema versions under a particular subject are indexed from MIN_VERSION. */ public static final int MIN_VERSION = 1; + // Subject name under which global permissions are stored. + public static final String GLOBAL_RESOURCE_NAME = "__GLOBAL"; public static final int MAX_VERSION = Integer.MAX_VALUE; private static final Logger log = LoggerFactory.getLogger(KafkaSchemaRegistry.class); @@ -587,6 +589,11 @@ public int registerOrForward(String subject, boolean normalize, Map headerProperties) throws SchemaRegistryException { + + if(subject.equals(GLOBAL_RESOURCE_NAME)){ + throw new OperationNotPermittedException(GLOBAL_RESOURCE_NAME + " subject name is not allowed"); + } + Schema existingSchema = lookUpSchemaUnderSubject(subject, schema, normalize, false); if (existingSchema != null) { if (schema.getId() != null From 81bf7e5bbd9f4b39e689f4a4adca44b90ad7b16d Mon Sep 17 00:00:00 2001 From: amalgawa Date: Thu, 20 Jan 2022 01:06:48 +0530 Subject: [PATCH 08/73] Getting subject from Qualified subject, so that Tenant and context is removed --- .../kafka/schemaregistry/storage/KafkaSchemaRegistry.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index a71d69a9bb7..2c77569f662 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -590,7 +590,7 @@ public int registerOrForward(String subject, Map headerProperties) throws SchemaRegistryException { - if(subject.equals(GLOBAL_RESOURCE_NAME)){ + if(QualifiedSubject.create(this.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME)){ throw new OperationNotPermittedException(GLOBAL_RESOURCE_NAME + " subject name is not allowed"); } From 5fad217b46ab6cceccf9acb4b4700fe41d8dec67 Mon Sep 17 00:00:00 2001 From: amalgawa Date: Thu, 20 Jan 2022 10:55:17 +0530 Subject: [PATCH 09/73] Made suggested changes --- .../schemaregistry/rest/resources/ConfigResource.java | 6 +++++- .../kafka/schemaregistry/rest/resources/ModeResource.java | 6 +++++- .../rest/resources/SubjectVersionsResource.java | 7 ++++++- .../kafka/schemaregistry/storage/KafkaSchemaRegistry.java | 5 ----- 4 files changed, 16 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java index 4a117c301d4..748b8202a0c 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.CompatibilityLevel; import io.confluent.kafka.schemaregistry.client.rest.Versions; @@ -91,9 +93,11 @@ public ConfigUpdateRequest updateSubjectLevelConfig( throw new RestInvalidCompatibilityException(); } - if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject)) { + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) || + QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } + subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject); try { diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java index cbd57aa4807..ed1398548eb 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.client.rest.Versions; import io.confluent.kafka.schemaregistry.client.rest.entities.Mode; @@ -90,9 +92,11 @@ public ModeUpdateRequest updateMode( @QueryParam("force") boolean force ) { - if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject)) { + if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject) || + QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME)) { throw Errors.invalidSubjectException(subject); } + subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject); io.confluent.kafka.schemaregistry.storage.Mode mode; diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index 902cf1f060f..73c73771686 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.rest.Versions; @@ -35,6 +37,7 @@ import io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException; import io.confluent.kafka.schemaregistry.rest.VersionId; import io.confluent.kafka.schemaregistry.rest.exceptions.Errors; +import io.confluent.kafka.schemaregistry.rest.exceptions.RestInvalidSubjectException; import io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry; import io.confluent.kafka.schemaregistry.utils.QualifiedSubject; import io.confluent.rest.annotations.PerformanceMetric; @@ -279,9 +282,11 @@ public void register( subjectName, request.getVersion(), request.getId(), request.getSchemaType(), request.getSchema() == null ? 0 : request.getSchema().length()); - if (subjectName != null && CharMatcher.javaIsoControl().matchesAnyOf(subjectName)) { + if (subjectName != null && CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || + QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject().equals(GLOBAL_RESOURCE_NAME)) { throw Errors.invalidSubjectException(subjectName); } + subjectName = QualifiedSubject.normalize(schemaRegistry.tenant(), subjectName); Map headerProperties = requestHeaderBuilder.buildRequestHeaders( diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 2c77569f662..26829514887 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -589,11 +589,6 @@ public int registerOrForward(String subject, boolean normalize, Map headerProperties) throws SchemaRegistryException { - - if(QualifiedSubject.create(this.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME)){ - throw new OperationNotPermittedException(GLOBAL_RESOURCE_NAME + " subject name is not allowed"); - } - Schema existingSchema = lookUpSchemaUnderSubject(subject, schema, normalize, false); if (existingSchema != null) { if (schema.getId() != null From e194900d2a3d79f2c7dfd6a265b470c8b6636010 Mon Sep 17 00:00:00 2001 From: amalgawa Date: Thu, 20 Jan 2022 11:19:26 +0530 Subject: [PATCH 10/73] Made suggested changes --- .../kafka/schemaregistry/rest/resources/ConfigResource.java | 3 ++- .../kafka/schemaregistry/rest/resources/ModeResource.java | 3 ++- .../schemaregistry/rest/resources/SubjectVersionsResource.java | 3 ++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java index 748b8202a0c..1a4d5e5ce99 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java @@ -94,7 +94,8 @@ public ConfigUpdateRequest updateSubjectLevelConfig( } if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME))) { + QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java index ed1398548eb..f2e491a04d7 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java @@ -93,7 +93,8 @@ public ModeUpdateRequest updateMode( ) { if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject().equals(GLOBAL_RESOURCE_NAME)) { + QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + .equals(GLOBAL_RESOURCE_NAME)) { throw Errors.invalidSubjectException(subject); } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index 73c73771686..bf4d1c30805 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -283,7 +283,8 @@ public void register( request.getSchema() == null ? 0 : request.getSchema().length()); if (subjectName != null && CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject().equals(GLOBAL_RESOURCE_NAME)) { + QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject() + .equals(GLOBAL_RESOURCE_NAME)) { throw Errors.invalidSubjectException(subjectName); } From 31c9dc8da036a044f91f8833ec908bd2cd00b8f2 Mon Sep 17 00:00:00 2001 From: amalgawa Date: Thu, 20 Jan 2022 12:41:54 +0530 Subject: [PATCH 11/73] Made suggested changes --- .../kafka/schemaregistry/rest/resources/ModeResource.java | 4 ++-- .../rest/resources/SubjectVersionsResource.java | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java index f2e491a04d7..4d9737bb8dc 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java @@ -92,9 +92,9 @@ public ModeUpdateRequest updateMode( @QueryParam("force") boolean force ) { - if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject) || + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) || QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() - .equals(GLOBAL_RESOURCE_NAME)) { + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index bf4d1c30805..b8ff4c4b908 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -282,9 +282,9 @@ public void register( subjectName, request.getVersion(), request.getId(), request.getSchemaType(), request.getSchema() == null ? 0 : request.getSchema().length()); - if (subjectName != null && CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || + if (subjectName != null && (CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject() - .equals(GLOBAL_RESOURCE_NAME)) { + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subjectName); } From ce5250f2340813346c85c06968116f4e14983eec Mon Sep 17 00:00:00 2001 From: Elliot West Date: Fri, 21 Jan 2022 06:46:57 +0000 Subject: [PATCH 12/73] Configure Schema Registry Client [m]TLS using PEM (#2062) * PEM implementation * logging * No password * Test * Minor changes to config checks, fixing tests and checkstyle * Remove apache commons dependency Co-authored-by: Elliot West Co-authored-by: Paul McDermott --- .../client/security/SslFactory.java | 550 +++++++++++++++--- .../client/security/SslFactoryTest.java | 312 ++++++++++ .../schemaregistry/rest/RestApiSslTest.java | 6 + 3 files changed, 791 insertions(+), 77 deletions(-) create mode 100644 client/src/test/java/io/confluent/kafka/schemaregistry/client/security/SslFactoryTest.java diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/security/SslFactory.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/security/SslFactory.java index 93b32283216..b5be739af5a 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/security/SslFactory.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/security/SslFactory.java @@ -16,29 +16,60 @@ package io.confluent.kafka.schemaregistry.client.security; -import org.apache.kafka.common.config.SslConfigs; +import static org.apache.kafka.common.security.ssl.DefaultSslEngineFactory.PEM_TYPE; -import javax.net.ssl.KeyManager; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; -import java.io.FileInputStream; +import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; import java.security.GeneralSecurityException; +import java.security.Key; +import java.security.KeyFactory; import java.security.KeyStore; +import java.security.PrivateKey; import java.security.SecureRandom; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.Date; +import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.crypto.Cipher; +import javax.crypto.EncryptedPrivateKeyInfo; +import javax.crypto.SecretKey; +import javax.crypto.SecretKeyFactory; +import javax.crypto.spec.PBEKeySpec; +import javax.net.ssl.KeyManager; +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import org.apache.kafka.common.KafkaException; +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.config.types.Password; +import org.apache.kafka.common.errors.InvalidConfigurationException; +import org.apache.kafka.common.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class SslFactory { + private static final Logger log = LoggerFactory.getLogger(SslFactory.class); + private final String provider; + private final String kmfAlgorithm; + private final String tmfAlgorithm; + private final SecurityStore keystore; + private final SecurityStore truststore; + private final SSLContext sslContext; + private final SecureRandom secureRandomImplementation; private String protocol; - private String provider; - private String kmfAlgorithm; - private String tmfAlgorithm; - private SecurityStore keystore = null; - private String keyPassword; - private SecurityStore truststore; - private SSLContext sslContext; public SslFactory(Map configs) { @@ -53,62 +84,142 @@ public SslFactory(Map configs) { this.tmfAlgorithm = (String) configs.get( SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); + this.secureRandomImplementation = createSecureRandom((String) + configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG)); + try { - createKeystore( - (String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), + this.keystore = createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), - (String) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), - (String) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG) - ); + passwordOf(configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)), + passwordOf(configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)), + passwordOf(configs.get(SslConfigs.SSL_KEYSTORE_KEY_CONFIG)), + passwordOf(configs.get(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG))); - createTruststore( + this.truststore = createTruststore( (String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), - (String) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG) - ); + passwordOf(configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)), + passwordOf(configs.get(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG))); - this.sslContext = createSslContext(); + this.sslContext = createSslContext(keystore, truststore); } catch (Exception e) { - throw new RuntimeException("Error initializing the ssl context for RestService" , e); + throw new RuntimeException("Error initializing the ssl context for RestService", e); + } + } + + private static SecureRandom createSecureRandom(String key) { + if (key == null) { + return null; + } + try { + return SecureRandom.getInstance(key); + } catch (GeneralSecurityException e) { + throw new KafkaException(e); } } - private static boolean isNotBlank(String str) { - return str != null && !str.trim().isEmpty(); + private static boolean isNotEmpty(CharSequence cs) { + return !isEmpty(cs); + } + + private static boolean isEmpty(CharSequence cs) { + return cs == null || cs.length() == 0; } - private SSLContext createSslContext() throws GeneralSecurityException, IOException { - if (truststore == null && keystore == null) { + private static SecurityStore createTruststore(String type, String path, Password password, + Password trustStoreCerts) { + if (trustStoreCerts != null) { + return createPemTrustStore(type, path, password, trustStoreCerts); + } else if (PEM_TYPE.equals(type) && isNotEmpty(path)) { + if (password != null) { + throw new InvalidConfigurationException( + "SSL trust store password cannot be specified for PEM format."); + } else { + return new FileBasedPemStore(path, null, false); + } + } else if (path == null && password != null) { + throw new InvalidConfigurationException( + "SSL trust store is not specified, but trust store password is specified."); + } else if (isNotEmpty(path)) { + return new FileBasedStore(type, path, password, null, false); + } else { return null; } - SSLContext sslContext; - if (isNotBlank(provider)) { - sslContext = SSLContext.getInstance(protocol, provider); + } + + private static SecurityStore createPemTrustStore(String type, String path, Password password, + Password trustStoreCerts) { + if (!PEM_TYPE.equals(type)) { + throw new InvalidConfigurationException( + "SSL trust store certs can be specified only for PEM, but trust store type is " + + type + "."); + } else if (isNotEmpty(path)) { + throw new InvalidConfigurationException( + "Both SSL trust store location and separate trust certificates are specified."); + } else if (password != null) { + throw new InvalidConfigurationException( + "SSL trust store password cannot be specified for PEM format."); } else { - sslContext = SSLContext.getInstance(protocol); + return new PemStore(trustStoreCerts); } + } + + // Visibility for testing + SecurityStore keyStore() { + return this.keystore; + } + + // Visibility for testing + SecurityStore trustStore() { + return this.truststore; + } - KeyManager[] keyManagers = null; - if (keystore != null) { - String kmfAlgorithm = - isNotBlank(this.kmfAlgorithm) ? this.kmfAlgorithm - : KeyManagerFactory.getDefaultAlgorithm(); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(kmfAlgorithm); - KeyStore ks = keystore.load(); - String keyPassword = this.keyPassword != null ? this.keyPassword : keystore.password; - kmf.init(ks, keyPassword.toCharArray()); - keyManagers = kmf.getKeyManagers(); + private Password passwordOf(Object val) { + if (val == null || val.toString().trim().isEmpty()) { + return null; } + return new Password(val.toString()); + } - String tmfAlgorithm = - isNotBlank(this.tmfAlgorithm) ? this.tmfAlgorithm - : TrustManagerFactory.getDefaultAlgorithm(); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm); - KeyStore ts = truststore == null ? null : truststore.load(); - tmf.init(ts); + private SSLContext createSslContext(SecurityStore keystore, SecurityStore truststore) { + try { + SSLContext sslContext; + if (isNotEmpty(provider)) { + sslContext = SSLContext.getInstance(protocol, provider); + } else { + sslContext = SSLContext.getInstance(protocol); + } - sslContext.init(keyManagers, tmf.getTrustManagers(), new SecureRandom()); - return sslContext; + KeyManager[] keyManagers = null; + if (keystore != null || isNotEmpty(kmfAlgorithm)) { + String kmfAlgorithm; + if (isNotEmpty(this.kmfAlgorithm)) { + kmfAlgorithm = this.kmfAlgorithm; + } else { + kmfAlgorithm = KeyManagerFactory.getDefaultAlgorithm(); + } + KeyManagerFactory kmf = KeyManagerFactory.getInstance(kmfAlgorithm); + if (keystore != null) { + kmf.init(keystore.get(), keystore.keyPassword()); + } else { + kmf.init(null, null); + } + keyManagers = kmf.getKeyManagers(); + } + + String tmfAlgorithm = isNotEmpty(this.tmfAlgorithm) ? this.tmfAlgorithm : + TrustManagerFactory.getDefaultAlgorithm(); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm); + KeyStore ts = truststore == null ? null : truststore.get(); + tmf.init(ts); + + sslContext.init(keyManagers, tmf.getTrustManagers(), this.secureRandomImplementation); + log.debug("Created SSL context with keystore {}, truststore {}, provider {}.", + keystore, truststore, sslContext.getProvider().getName()); + return sslContext; + } catch (Exception e) { + throw new KafkaException(e); + } } /** @@ -120,53 +231,338 @@ public SSLContext sslContext() { return sslContext; } - private void createKeystore(String type, String path, String password, String keyPassword) { - if (path == null && password != null) { - throw new RuntimeException( + // Visibility to override for testing + protected SecurityStore createKeystore(String type, String path, Password password, + Password keyPassword, Password privateKey, + Password certificateChain) { + if (privateKey != null) { + return createPemKeyStore(type, path, password, keyPassword, privateKey, certificateChain); + } else if (certificateChain != null) { + throw new InvalidConfigurationException( + "SSL certificate chain is specified, but private key is not specified"); + } else if (PEM_TYPE.equals(type) && path != null) { + return createFileBasedPemStore(path, password, keyPassword); + } else if (path == null && password != null) { + throw new InvalidConfigurationException( "SSL key store is not specified, but key store password is specified."); - } else if (path != null && password == null) { - throw new RuntimeException( - "SSL key store is specified, but key store password is not specified."); - } else if (isNotBlank(path) && isNotBlank(password)) { - this.keystore = new SecurityStore(type, path, password); - this.keyPassword = keyPassword; + } else if (isNotEmpty(path)) { + if (password == null) { + throw new InvalidConfigurationException( + "SSL key store is specified, but key store password is not specified."); + } else if (isEmpty(type)) { + throw new InvalidConfigurationException( + "SSL key store is specified, but store type is null or empty"); + } + return new FileBasedStore(type, path, password, keyPassword, true); + } else { + // path is null/empty, clients may use this path with brokers that don't require client auth + return null; } } - private void createTruststore(String type, String path, String password) { - if (path == null && password != null) { - throw new RuntimeException( - "SSL trust store is not specified, but trust store password is specified."); - } else if (isNotBlank(path)) { - this.truststore = new SecurityStore(type, path, password); + private static SecurityStore createPemKeyStore(String type, String path, Password password, + Password keyPassword, Password privateKey, + Password certificateChain) { + if (!PEM_TYPE.equals(type)) { + throw new InvalidConfigurationException("SSL private key can be specified only for " + + "PEM, but key store type is " + type + "."); + } else if (certificateChain == null) { + throw new InvalidConfigurationException("SSL private key is specified, " + + "but certificate chain is not specified."); + } else if (path != null) { + throw new InvalidConfigurationException("Both SSL key store location and separate " + + "private key are specified."); + } else if (password != null) { + throw new InvalidConfigurationException( + "SSL key store password cannot be specified with PEM format, " + + "only key password may be specified."); } + return new PemStore(certificateChain, privateKey, keyPassword); } - private static class SecurityStore { + private static SecurityStore createFileBasedPemStore(String path, Password password, + Password keyPassword) { + if (password != null) { + throw new InvalidConfigurationException( + "SSL key store password cannot be specified with PEM format, " + + "only key password may be specified"); + } else if (keyPassword == null) { + throw new InvalidConfigurationException( + "SSL PEM key store is specified, but key password is not specified."); + } else { + return new FileBasedPemStore(path, keyPassword, true); + } + } + + interface SecurityStore { + KeyStore get(); + + char[] keyPassword(); + + boolean modified(); + } + // package access for testing + static class FileBasedStore implements SecurityStore { + protected final String path; + protected final Password keyPassword; private final String type; - private final String path; - private final String password; + private final Password password; + private final Long fileLastModifiedMs; + private final KeyStore keyStore; - private SecurityStore(String type, String path, String password) { - this.type = type == null ? KeyStore.getDefaultType() : type; + FileBasedStore(String type, String path, Password password, Password keyPassword, + boolean isKeyStore) { + Objects.requireNonNull(type, "type must not be null"); + this.type = type; this.path = path; this.password = password; + this.keyPassword = keyPassword; + fileLastModifiedMs = lastModifiedMs(path); + this.keyStore = load(isKeyStore); } - private KeyStore load() throws GeneralSecurityException, IOException { - FileInputStream in = null; - try { + @Override + public KeyStore get() { + return keyStore; + } + + @Override + public char[] keyPassword() { + Password passwd = keyPassword != null ? keyPassword : password; + return passwd == null ? null : passwd.value().toCharArray(); + } + + /** + * Loads this keystore. + * + * @return the keystore + * @throws KafkaException if the file could not be read or if the keystore could not be loaded + * using the specified configs (e.g. if the password or keystore + * type is invalid) + */ + protected KeyStore load(boolean isKeyStore) { + try (InputStream in = Files.newInputStream(Paths.get(path))) { KeyStore ks = KeyStore.getInstance(type); - in = new FileInputStream(path); - char[] passwordChars = password != null ? password.toCharArray() : null; + // If a password is not set access to the truststore is + // still available, but integrity checking is disabled. + char[] passwordChars = password != null ? password.value().toCharArray() : null; ks.load(in, passwordChars); return ks; - } finally { - if (in != null) { - in.close(); + } catch (GeneralSecurityException | IOException e) { + throw new KafkaException("Failed to load SSL keystore " + path + " of type " + type, e); + } + } + + private Long lastModifiedMs(String path) { + try { + return Files.getLastModifiedTime(Paths.get(path)).toMillis(); + } catch (IOException e) { + log.error("Modification time of key store could not be obtained: " + path, e); + return null; + } + } + + public boolean modified() { + Long modifiedMs = lastModifiedMs(path); + return modifiedMs != null && !Objects.equals(modifiedMs, this.fileLastModifiedMs); + } + + @Override + public String toString() { + return "SecurityStore(" + "path=" + path + ", modificationTime=" + + (fileLastModifiedMs == null ? null : new Date(fileLastModifiedMs)) + ")"; + } + } + + static class FileBasedPemStore extends FileBasedStore { + FileBasedPemStore(String path, Password keyPassword, boolean isKeyStore) { + super(PEM_TYPE, path, null, keyPassword, isKeyStore); + } + + @Override + protected KeyStore load(boolean isKeyStore) { + try { + Password storeContents = new Password(Utils.readFileAsString(path)); + PemStore pemStore = isKeyStore ? new PemStore(storeContents, storeContents, keyPassword) : + new PemStore(storeContents); + return pemStore.keyStore; + } catch (Exception e) { + throw new InvalidConfigurationException("Failed to load PEM SSL keystore " + path, e); + } + } + } + + static class PemStore implements SecurityStore { + private static final PemParser CERTIFICATE_PARSER = new PemParser("CERTIFICATE"); + private static final PemParser PRIVATE_KEY_PARSER = new PemParser("PRIVATE KEY"); + private static final List KEY_FACTORIES = Arrays.asList( + keyFactory("RSA"), + keyFactory("DSA"), + keyFactory("EC") + ); + + private final char[] keyPassword; + private final KeyStore keyStore; + + PemStore(Password certificateChain, Password privateKey, Password keyPassword) { + this.keyPassword = keyPassword == null ? null : keyPassword.value().toCharArray(); + keyStore = + createKeyStoreFromPem(privateKey.value(), certificateChain.value(), this.keyPassword); + } + + PemStore(Password trustStoreCerts) { + this.keyPassword = null; + keyStore = createTrustStoreFromPem(trustStoreCerts.value()); + } + + private static KeyFactory keyFactory(String algorithm) { + try { + return KeyFactory.getInstance(algorithm); + } catch (Exception e) { + throw new InvalidConfigurationException( + "Could not create key factory for algorithm " + algorithm, e); + } + } + + @Override + public KeyStore get() { + return keyStore; + } + + @Override + public char[] keyPassword() { + return keyPassword; + } + + @Override + public boolean modified() { + return false; + } + + private KeyStore createKeyStoreFromPem(String privateKeyPem, String certChainPem, + char[] keyPassword) { + try { + KeyStore ks = KeyStore.getInstance("PKCS12"); + ks.load(null, null); + Key key = privateKey(privateKeyPem, keyPassword); + Certificate[] certChain = certs(certChainPem); + ks.setKeyEntry("kafka", key, keyPassword, certChain); + return ks; + } catch (Exception e) { + throw new InvalidConfigurationException("Invalid PEM keystore configs", e); + } + } + + private KeyStore createTrustStoreFromPem(String trustedCertsPem) { + try { + KeyStore ts = KeyStore.getInstance("PKCS12"); + ts.load(null, null); + Certificate[] certs = certs(trustedCertsPem); + for (int i = 0; i < certs.length; i++) { + ts.setCertificateEntry("kafka" + i, certs[i]); } + return ts; + } catch (InvalidConfigurationException e) { + throw e; + } catch (Exception e) { + throw new InvalidConfigurationException("Invalid PEM keystore configs", e); + } + } + + private Certificate[] certs(String pem) throws GeneralSecurityException { + List certEntries = CERTIFICATE_PARSER.pemEntries(pem); + if (certEntries.isEmpty()) { + throw new InvalidConfigurationException( + "At least one certificate expected, but none found"); + } + + Certificate[] certs = new Certificate[certEntries.size()]; + for (int i = 0; i < certs.length; i++) { + certs[i] = CertificateFactory.getInstance("X.509") + .generateCertificate(new ByteArrayInputStream(certEntries.get(i))); + } + return certs; + } + + private PrivateKey privateKey(String pem, char[] keyPassword) throws Exception { + List keyEntries = PRIVATE_KEY_PARSER.pemEntries(pem); + if (keyEntries.isEmpty()) { + throw new InvalidConfigurationException("Private key not provided"); + } + if (keyEntries.size() != 1) { + throw new InvalidConfigurationException( + "Expected one private key, but found " + keyEntries.size()); + } + + byte[] keyBytes = keyEntries.get(0); + PKCS8EncodedKeySpec keySpec; + if (keyPassword == null) { + keySpec = new PKCS8EncodedKeySpec(keyBytes); + } else { + EncryptedPrivateKeyInfo keyInfo = new EncryptedPrivateKeyInfo(keyBytes); + String algorithm = keyInfo.getAlgName(); + SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(algorithm); + SecretKey pbeKey = keyFactory.generateSecret(new PBEKeySpec(keyPassword)); + Cipher cipher = Cipher.getInstance(algorithm); + cipher.init(Cipher.DECRYPT_MODE, pbeKey, keyInfo.getAlgParameters()); + keySpec = keyInfo.getKeySpec(cipher); + } + + InvalidKeySpecException firstException = null; + for (KeyFactory factory : KEY_FACTORIES) { + try { + return factory.generatePrivate(keySpec); + } catch (InvalidKeySpecException e) { + if (firstException == null) { + firstException = e; + } + } + } + throw new InvalidConfigurationException("Private key could not be loaded", firstException); + } + } + + /** + * Parser to process certificate/private key entries from PEM files + * Examples: + * -----BEGIN CERTIFICATE----- + * Base64 cert + * -----END CERTIFICATE----- + * + *

-----BEGIN ENCRYPTED PRIVATE KEY----- + * Base64 private key + * -----END ENCRYPTED PRIVATE KEY----- + * Additional data may be included before headers, so we match all entries within the PEM. + */ + static class PemParser { + private final String name; + private final Pattern pattern; + + PemParser(String name) { + this.name = name; + String beginOrEndFormat = "-+%s\\s*.*%s[^-]*-+\\s+"; + String nameIgnoreSpace = name.replace(" ", "\\s+"); + + String encodingParams = "\\s*[^\\r\\n]*:[^\\r\\n]*[\\r\\n]+"; + String base64Pattern = "([a-zA-Z0-9/+=\\s]*)"; + String patternStr = String.format(beginOrEndFormat, "BEGIN", nameIgnoreSpace) + + String.format("(?:%s)*", encodingParams) + + base64Pattern + String.format(beginOrEndFormat, "END", nameIgnoreSpace); + pattern = Pattern.compile(patternStr); + } + + private List pemEntries(String pem) { + Matcher matcher = pattern.matcher(pem + "\n"); // allow last newline to be omitted in value + List entries = new ArrayList<>(); + while (matcher.find()) { + String base64Str = matcher.group(1).replaceAll("\\s", ""); + entries.add(Base64.getDecoder().decode(base64Str)); + } + if (entries.isEmpty()) { + throw new InvalidConfigurationException("No matching " + name + " entries in PEM file"); } + return entries; } } } diff --git a/client/src/test/java/io/confluent/kafka/schemaregistry/client/security/SslFactoryTest.java b/client/src/test/java/io/confluent/kafka/schemaregistry/client/security/SslFactoryTest.java new file mode 100644 index 00000000000..122110fff7d --- /dev/null +++ b/client/src/test/java/io/confluent/kafka/schemaregistry/client/security/SslFactoryTest.java @@ -0,0 +1,312 @@ +package io.confluent.kafka.schemaregistry.client.security; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import java.io.File; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.security.KeyStore; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import io.confluent.common.utils.TestUtils; + +import org.apache.kafka.common.config.SslConfigs; +import org.apache.kafka.common.config.types.Password; +import org.apache.kafka.common.security.ssl.DefaultSslEngineFactory; +import org.junit.Before; +import org.junit.Test; + +public class SslFactoryTest { + + /* + * Key and certificates were extracted using openssl from a key store file created with 100 years validity using: + * + * openssl pkcs12 -in server.keystore.p12 -nodes -nocerts -out test.key.pem -passin pass:key-password + * openssl pkcs12 -in server.keystore.p12 -nodes -nokeys -out test.certchain.pem -passin pass:key-password + * openssl pkcs12 -in server.keystore.p12 -nodes -out test.keystore.pem -passin pass:key-password + * openssl pkcs8 -topk8 -v1 pbeWithSHA1And3-KeyTripleDES-CBC -in test.key.pem -out test.key.encrypted.pem -passout pass:key-password + */ + + private static final String CA1 = "-----BEGIN CERTIFICATE-----\n" + + "MIIC0zCCAbugAwIBAgIEStdXHTANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDEwdU\n" + + "ZXN0Q0ExMCAXDTIwMDkyODA5MDI0MFoYDzIxMjAwOTA0MDkwMjQwWjASMRAwDgYD\n" + + "VQQDEwdUZXN0Q0ExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAo3Gr\n" + + "WJAkjnvgcuIfjArDhNdtAlRTt094WMUXhYDibgGtd+CLcWqA+c4PEoK4oybnKZqU\n" + + "6MlDfPgesIK2YiNBuSVWMtZ2doageOBnd80Iwbg8DqGtQpUsvw8X5fOmuza+4inv\n" + + "/8IpiTizq8YjSMT4nYDmIjyyRCSNY4atjgMnskutJ0v6i69+ZAA520Y6nn2n4RD5\n" + + "8Yc+y7yCkbZXnYS5xBOFEExmtc0Xa7S9nM157xqKws9Z+rTKZYLrryaHI9JNcXgG\n" + + "kzQEH9fBePASeWfi9AGRvAyS2GMSIBOsihIDIha/mqQcJOGCEqTMtefIj2FaErO2\n" + + "bL9yU7OpW53iIC8y0QIDAQABoy8wLTAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBRf\n" + + "svKcoQ9ZBvjwyUSV2uMFzlkOWDANBgkqhkiG9w0BAQsFAAOCAQEAEE1ZG2MGE248\n" + + "glO83ROrHbxmnVWSQHt/JZANR1i362sY1ekL83wlhkriuvGVBlHQYWezIfo/4l9y\n" + + "JTHNX3Mrs9eWUkaDXADkHWj3AyLXN3nfeU307x1wA7OvI4YKpwvfb4aYS8RTPz9d\n" + + "JtrfR0r8aGTgsXvCe4SgwDBKv7bckctOwD3S7D/b6y3w7X0s7JCU5+8ZjgoYfcLE\n" + + "gNqQEaOwdT2LHCvxHmGn/2VGs/yatPQIYYuufe5i8yX7pp4Xbd2eD6LULYkHFs3x\n" + + "uJzMRI7BukmIIWuBbAkYI0atxLQIysnVFXdL9pBgvgso2nA3FgP/XeORhkyHVvtL\n" + + "REH2YTlftQ==\n" + + "-----END CERTIFICATE-----"; + + private static final String CA2 = "-----BEGIN CERTIFICATE-----\n" + + "MIIC0zCCAbugAwIBAgIEfk9e9DANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDEwdU\n" + + "ZXN0Q0EyMCAXDTIwMDkyODA5MDI0MVoYDzIxMjAwOTA0MDkwMjQxWjASMRAwDgYD\n" + + "VQQDEwdUZXN0Q0EyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvCh0\n" + + "UO5op9eHfz7mvZ7IySK7AOCTC56QYFJcU+hD6yk1wKg2qot7naI5ozAc8n7c4pMt\n" + + "LjI3D0VtC/oHC29R2HNMSWyHcxIXw8z127XeCLRkCqYWuVAl3nBuWfWVPObjKetH\n" + + "TWlQANYWAfk1VbS6wfzgp9cMaK7wQ+VoGEo4x3pjlrdlyg4k4O2yubcpWmJ2TjxS\n" + + "gg7TfKGizUVAvF9wUG9Q4AlCg4uuww5RN9w6vnzDKGhWJhkQ6pf/m1xB+WueFOeU\n" + + "aASGhGqCTqiz3p3M3M4OZzG3KptjQ/yb67x4T5U5RxqoiN4L57E7ZJLREpa6ZZNs\n" + + "ps/gQ8dR9Uo/PRyAkQIDAQABoy8wLTAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBRg\n" + + "IAOVH5LeE6nZmdScEE3JO/AhvTANBgkqhkiG9w0BAQsFAAOCAQEAHkk1iybwy/Lf\n" + + "iEQMVRy7XfuC008O7jfCUBMgUvE+oO2RadH5MmsXHG3YerdsDM90dui4JqQNZOUh\n" + + "kF8dIWPQHE0xDsR9jiUsemZFpVMN7DcvVZ3eFhbvJA8Q50rxcNGA+tn9xT/xdQ6z\n" + + "1eRq9IPoYcRexQ7s9mincM4T4lLm8GGcd7ZPHy8kw0Bp3E/enRHWaF5b8KbXezXD\n" + + "I3SEYUyRL2K3px4FImT4X9XQm2EX6EONlu4GRcJpD6RPc0zC7c9dwEnSo+0NnewR\n" + + "gjgO34CLzShB/kASLS9VQXcUC6bsggAVK2rWQMmy35SOEUufSuvg8kUFoyuTzfhn\n" + + "hL+PVwIu7g==\n" + + "-----END CERTIFICATE-----"; + + private static final String CERTCHAIN = "Bag Attributes\n" + + " friendlyName: server\n" + + " localKeyID: 54 69 6D 65 20 31 36 30 31 32 38 33 37 36 35 34 32 33 \n" + + "subject=/CN=TestBroker\n" + + "issuer=/CN=TestCA1\n" + + "-----BEGIN CERTIFICATE-----\n" + + "MIIC/zCCAeegAwIBAgIEatBnEzANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDEwdU\n" + + "ZXN0Q0ExMCAXDTIwMDkyODA5MDI0NFoYDzIxMjAwOTA0MDkwMjQ0WjAVMRMwEQYD\n" + + "VQQDEwpUZXN0QnJva2VyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA\n" + + "pkw1AS71ej/iOMvzVgVL1dkQOYzI842NcPmx0yFFsue2umL8WVd3085NgWRb3SS1\n" + + "4X676t7zxjPGzYi7jwmA8stCrDt0NAPWd/Ko6ErsCs87CUs4u1Cinf+b3o9NF5u0\n" + + "UPYBQLF4Ir8T1jQ+tKiqsChGDt6urRAg1Cro5i7r10jN1uofY2tBs+r8mALhJ17c\n" + + "T5LKawXeYwNOQ86c5djClbcP0RrfcPyRyj1/Cp1axo28iO0fXFyO2Zf3a4vtt+Ih\n" + + "PW+A2tL+t3JTBd8g7Fl3ozzpcotAi7MDcZaYA9GiTP4DOiKUeDt6yMYQQr3VEqGa\n" + + "pXp4fKY+t9slqnAmcBZ4kQIDAQABo1gwVjAfBgNVHSMEGDAWgBRfsvKcoQ9ZBvjw\n" + + "yUSV2uMFzlkOWDAUBgNVHREEDTALgglsb2NhbGhvc3QwHQYDVR0OBBYEFGWt+27P\n" + + "INk/S5X+PRV/jW3WOhtaMA0GCSqGSIb3DQEBCwUAA4IBAQCLHCjFFvqa+0GcG9eq\n" + + "v1QWaXDohY5t5CCwD8Z+lT9wcSruTxDPwL7LrR36h++D6xJYfiw4iaRighoA40xP\n" + + "W6+0zGK/UtWV4t+ODTDzyAWgls5w+0R5ki6447qGqu5tXlW5DCHkkxWiozMnhNU2\n" + + "G3P/Drh7DhmADDBjtVLsu5M1sagF/xwTP/qCLMdChlJNdeqyLnAUa9SYG1eNZS/i\n" + + "wrCC8m9RUQb4+OlQuFtr0KhaaCkBXfmhigQAmh44zSyO+oa3qQDEavVFo/Mcui9o\n" + + "WBYetcgVbXPNoti+hQEMqmJYBHlLbhxMnkooGn2fa70f453Bdu/Xh6Yphi5NeCHn\n" + + "1I+y\n" + + "-----END CERTIFICATE-----\n" + + "Bag Attributes\n" + + " friendlyName: CN=TestCA1\n" + + "subject=/CN=TestCA1\n" + + "issuer=/CN=TestCA1\n" + + "-----BEGIN CERTIFICATE-----\n" + + "MIIC0zCCAbugAwIBAgIEStdXHTANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDEwdU\n" + + "ZXN0Q0ExMCAXDTIwMDkyODA5MDI0MFoYDzIxMjAwOTA0MDkwMjQwWjASMRAwDgYD\n" + + "VQQDEwdUZXN0Q0ExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAo3Gr\n" + + "WJAkjnvgcuIfjArDhNdtAlRTt094WMUXhYDibgGtd+CLcWqA+c4PEoK4oybnKZqU\n" + + "6MlDfPgesIK2YiNBuSVWMtZ2doageOBnd80Iwbg8DqGtQpUsvw8X5fOmuza+4inv\n" + + "/8IpiTizq8YjSMT4nYDmIjyyRCSNY4atjgMnskutJ0v6i69+ZAA520Y6nn2n4RD5\n" + + "8Yc+y7yCkbZXnYS5xBOFEExmtc0Xa7S9nM157xqKws9Z+rTKZYLrryaHI9JNcXgG\n" + + "kzQEH9fBePASeWfi9AGRvAyS2GMSIBOsihIDIha/mqQcJOGCEqTMtefIj2FaErO2\n" + + "bL9yU7OpW53iIC8y0QIDAQABoy8wLTAMBgNVHRMEBTADAQH/MB0GA1UdDgQWBBRf\n" + + "svKcoQ9ZBvjwyUSV2uMFzlkOWDANBgkqhkiG9w0BAQsFAAOCAQEAEE1ZG2MGE248\n" + + "glO83ROrHbxmnVWSQHt/JZANR1i362sY1ekL83wlhkriuvGVBlHQYWezIfo/4l9y\n" + + "JTHNX3Mrs9eWUkaDXADkHWj3AyLXN3nfeU307x1wA7OvI4YKpwvfb4aYS8RTPz9d\n" + + "JtrfR0r8aGTgsXvCe4SgwDBKv7bckctOwD3S7D/b6y3w7X0s7JCU5+8ZjgoYfcLE\n" + + "gNqQEaOwdT2LHCvxHmGn/2VGs/yatPQIYYuufe5i8yX7pp4Xbd2eD6LULYkHFs3x\n" + + "uJzMRI7BukmIIWuBbAkYI0atxLQIysnVFXdL9pBgvgso2nA3FgP/XeORhkyHVvtL\n" + + "REH2YTlftQ==\n" + + "-----END CERTIFICATE-----"; + + private static final String KEY = "Bag Attributes\n" + + " friendlyName: server\n" + + " localKeyID: 54 69 6D 65 20 31 36 30 31 32 38 33 37 36 35 34 32 33\n" + + "Key Attributes: \n" + + "-----BEGIN PRIVATE KEY-----\n" + + "MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCmTDUBLvV6P+I4\n" + + "y/NWBUvV2RA5jMjzjY1w+bHTIUWy57a6YvxZV3fTzk2BZFvdJLXhfrvq3vPGM8bN\n" + + "iLuPCYDyy0KsO3Q0A9Z38qjoSuwKzzsJSzi7UKKd/5vej00Xm7RQ9gFAsXgivxPW\n" + + "ND60qKqwKEYO3q6tECDUKujmLuvXSM3W6h9ja0Gz6vyYAuEnXtxPksprBd5jA05D\n" + + "zpzl2MKVtw/RGt9w/JHKPX8KnVrGjbyI7R9cXI7Zl/dri+234iE9b4Da0v63clMF\n" + + "3yDsWXejPOlyi0CLswNxlpgD0aJM/gM6IpR4O3rIxhBCvdUSoZqlenh8pj632yWq\n" + + "cCZwFniRAgMBAAECggEAOfC/XwQvf0KW3VciF0yNGZshbgvBUCp3p284J+ml0Smu\n" + + "ns4yQiaZl3B/zJ9c6nYJ8OEpNDIuGVac46vKPZIAHZf4SO4GFMFpji078IN6LmH5\n" + + "nclZoNn9brNKaYbgQ2N6teKgmRu8Uc7laHKXjnZd0jaWAkRP8/h0l7fDob+jaERj\n" + + "oJBx4ux2Z62TTCP6W4VY3KZgSL1p6dQswqlukPVytMeI2XEwWnO+w8ED0BxCxM4F\n" + + "K//dw7nUMGS9GUNkgyDcH1akYSCDzdBeymQBp2latBotVfGNK1hq9nC1iaxmRkJL\n" + + "sYjwVc24n37u+txOovy3daq2ySj9trF7ySAPVYkh4QKBgQDWeN/MR6cy1TLF2j3g\n" + + "eMMeM32LxXArIPsar+mft+uisKWk5LDpsKpph93sl0JjFi4x0t1mqw23h23I+B2c\n" + + "JWiPAHUG3FGvvkPPcfMUvd7pODyE2XaXi+36UZAH7qc94VZGJEb+sPITckSruREE\n" + + "QErWZyrbBRgvQXsmVme5B2/kRQKBgQDGf2HQH0KHl54O2r9vrhiQxWIIMSWlizJC\n" + + "hjboY6DkIsAMwnXp3wn3Bk4tSgeLk8DEVlmEaE3gvGpiIp0vQnSOlME2TXfEthdM\n" + + "uS3+BFXN4Vxxx/qjKL2WfZloyzdaaaF7s+LIwmXgLsFFCUSq+uLtBqfpH2Qv+paX\n" + + "Xqm7LN3V3QKBgH5ssj/Q3RZx5oQKqf7wMNRUteT2dbB2uI56s9SariQwzPPuevrG\n" + + "US30ETWt1ExkfsaP7kLfAi71fhnBaHLq+j+RnWp15REbrw1RtmC7q/L+W25UYjvj\n" + + "GF0+RxDl9V/cvOaL6+2mkIw2B5TSet1uqK7KEdEZp6/zgYyP0oSXhbWhAoGAdnlZ\n" + + "HCtMPjnUcPFHCZVTvDTTSihrW9805FfPNe0g/olvLy5xymEBRZtR1d41mq1ZhNY1\n" + + "H75RnS1YIbKfNrHnd6J5n7ulHJfCWFy+grp7rCIyVwcRJYkPf17/zXhdVW1uoLLB\n" + + "TSoaPDAr0tSxU4vjHa23UoEV/z0F3Nr3W2xwC1ECgYBHKjv6ekLhx7HbP797+Ai+\n" + + "wkHvS2L/MqEBxuHzcQ9G6Mj3ANAeyDB8YSC8qGtDQoEyukv2dO73lpodNgbR8P+Q\n" + + "PDBb6eyntAo2sSeo0jZkiXvDOfRaGuGVrxjuTfaqcVB33jC6BYfi61/3Sr5oG9Nd\n" + + "tDGh1HlOIRm1jD9KQNVZ/Q==\n" + + "-----END PRIVATE KEY-----"; + + private static final String ENCRYPTED_KEY = "-----BEGIN ENCRYPTED PRIVATE KEY-----\n" + + "MIIE6jAcBgoqhkiG9w0BDAEDMA4ECGyAEWAXlaXzAgIIAASCBMgt7QD1Bbz7MAHI\n" + + "Ni0eTrwNiuAPluHirLXzsV57d1O9i4EXVp5nzRy6753cjXbGXARbBeaJD+/+jbZp\n" + + "CBZTHMG8rTCfbsg5kMqxT6XuuqWlKLKc4gaq+QNgHHleKqnpwZQmOQ+awKWEK/Ow\n" + + "Z0KxXqkp+b4/qJK3MqKZDsJtVdyUhO0tLVxd+BHDg9B93oExc87F16h3R0+T4rxE\n" + + "Tvz2c2upBqva49AbLDxpWXLCJC8CRkxM+KHrPkYjpNx3jCjtwiiXfzJCWjuCkVrL\n" + + "2F4bqvpYPIseoPtMvWaplNtoPwhpzBB/hoJ+R+URr4XHX3Y+bz6k6iQnhoCOIviy\n" + + "oEEUvWtKnaEEKSauR+Wyj3MoeB64g9NWMEHv7+SQeA4WqlgV2s4txwRxFGKyKLPq\n" + + "caMSpfxvYujtSh0DOv9GI3cVHPM8WsebCz9cNrbKSR8/8JufcoonTitwF/4vm1Et\n" + + "AdmCuH9JIYVvmFKFVxY9SvRAvo43OQaPmJQHMUa4yDfMtpTSgmB/7HFgxtksYs++\n" + + "Gbrq6F/hon+0bLx+bMz2FK635UU+iVno+qaScKWN3BFqDl+KnZprBhLSXTT3aHmp\n" + + "fisQit/HWp71a0Vzq85WwI4ucMKNc8LemlwNBxWLLiJDp7sNPLb5dIl8yIwSEIgd\n" + + "vC5px9KWEdt3GxTUEqtIeBmagbBhahcv+c9Dq924DLI+Slv6TJKZpIcMqUECgzvi\n" + + "hb8gegyEscBEcDSzl0ojlFVz4Va5eZS/linTjNJhnkx8BKLn/QFco7FpEE6uOmQ3\n" + + "0kF64M2Rv67cJbYVrhD46TgIzH3Y/FOMSi1zFHQ14nVXWMu0yAlBX+QGk7Xl+/aF\n" + + "BIq+i9WcBqbttR3CwyeTnIFXkdC66iTZYhDl9HT6yMcazql2Or2TjIIWr6tfNWH/\n" + + "5dWSEHYM5m8F2/wF0ANWJyR1oPr4ckcUsfl5TfOWVj5wz4QVF6EGV7FxEnQHrdx0\n" + + "6rXThRKFjqxUubsNt1yUEwdlTNz2UFhobGF9MmFeB97BZ6T4v8G825de/Caq9FzO\n" + + "yMFFCRcGC7gIzMXRPEjHIvBdTThm9rbNzKPXHqw0LHG478yIqzxvraCYTRw/4eWN\n" + + "Q+hyOL/5T5QNXHpR8Udp/7sptw7HfRnecQ/Vz9hOKShQq3h4Sz6eQMQm7P9qGo/N\n" + + "bltEAIECRVcNYLN8LuEORfeecNcV3BX+4BBniFtdD2bIRsWC0ZUsGf14Yhr4P1OA\n" + + "PtMJzy99mrcq3h+o+hEW6bhIj1gA88JSMJ4iRuwTLRKE81w7EyziScDsotYKvDPu\n" + + "w4+PFbQO3fr/Zga3LgYis8/DMqZoWjVCjAeVoypuOZreieZYC/BgBS8qSUAmDPKq\n" + + "jK+T5pwMMchfXbkV80LTu1kqLfKWdE0AmZfGy8COE/NNZ/FeiWZPdwu2Ix6u/RoY\n" + + "LTjNy4YLIBdVELFXaFJF2GfzLpnwrW5tyNPVVrGmUoiyOzgx8gMyCLGavGtduyoY\n" + + "tBiUTmd05Ugscn4Rz9X30S4NbnjL/h+bWl1m6/M+9FHEe85FPxmt/GRmJPbFPMR5\n" + + "q5EgQGkt4ifiaP6qvyFulwvVwx+m0bf1q6Vb/k3clIyLMcVZWFE1TqNH2Ife46AE\n" + + "2I39ZnGTt0mbWskpHBA=\n" + + "-----END ENCRYPTED PRIVATE KEY-----"; + + private static final Password KEY_PASSWORD = new Password("key-password"); + + Map configs = new HashMap<>(); + + @Before + public void setUp() { + configs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "TLSv1.2"); + } + + @Test + public void testPemTrustStoreConfigWithOneCert() throws Exception { + configs.put(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, pemAsConfigValue(CA1).value()); + configs.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + SslFactory factory = new SslFactory(configs); + + KeyStore trustStore = factory.trustStore().get(); + List aliases = Collections.list(trustStore.aliases()); + assertEquals(Collections.singletonList("kafka0"), aliases); + assertNotNull("Certificate not loaded", trustStore.getCertificate("kafka0")); + assertNull("Unexpected private key", trustStore.getKey("kafka0", null)); + } + + @Test + public void testPemTrustStoreConfigWithMultipleCerts() throws Exception { + configs.put(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG, pemAsConfigValue(CA1, CA2).value()); + configs.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + SslFactory factory = new SslFactory(configs); + + KeyStore trustStore = factory.trustStore().get(); + List aliases = Collections.list(trustStore.aliases()); + assertEquals(Arrays.asList("kafka0", "kafka1"), aliases); + assertNotNull("Certificate not loaded", trustStore.getCertificate("kafka0")); + assertNull("Unexpected private key", trustStore.getKey("kafka0", null)); + assertNotNull("Certificate not loaded", trustStore.getCertificate("kafka1")); + assertNull("Unexpected private key", trustStore.getKey("kafka1", null)); + } + + @Test + public void testPemKeyStoreConfigNoPassword() throws Exception { + verifyPemKeyStoreConfig(KEY, null); + } + + @Test + public void testPemKeyStoreConfigWithKeyPassword() throws Exception { + verifyPemKeyStoreConfig(ENCRYPTED_KEY, KEY_PASSWORD); + } + + @Test + public void testTrailingNewLines() throws Exception { + verifyPemKeyStoreConfig(ENCRYPTED_KEY + "\n\n", KEY_PASSWORD); + } + + @Test + public void testLeadingNewLines() throws Exception { + verifyPemKeyStoreConfig("\n\n" + ENCRYPTED_KEY, KEY_PASSWORD); + } + + @Test + public void testCarriageReturnLineFeed() throws Exception { + verifyPemKeyStoreConfig(ENCRYPTED_KEY.replaceAll("\n", "\r\n"), KEY_PASSWORD); + } + + private void verifyPemKeyStoreConfig(String keyFileName, Password keyPassword) throws Exception { + configs.put(SslConfigs.SSL_KEYSTORE_KEY_CONFIG, pemAsConfigValue(keyFileName).value()); + configs.put(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG, pemAsConfigValue(CERTCHAIN).value()); + configs.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, keyPassword == null ? null : keyPassword.value()); + configs.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + SslFactory factory = new SslFactory(configs); + + KeyStore keyStore = factory.keyStore().get(); + List aliases = Collections.list(keyStore.aliases()); + assertEquals(Collections.singletonList("kafka"), aliases); + assertNotNull("Certificate not loaded", keyStore.getCertificate("kafka")); + assertNotNull("Private key not loaded", keyStore.getKey("kafka", keyPassword == null ? null : keyPassword.value().toCharArray())); + } + + @Test + public void testPemTrustStoreFile() throws Exception { + configs.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, pemFilePath(CA1)); + configs.put(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + SslFactory factory = new SslFactory(configs); + + KeyStore trustStore = factory.trustStore().get(); + List aliases = Collections.list(trustStore.aliases()); + assertEquals(Collections.singletonList("kafka0"), aliases); + assertNotNull("Certificate not found", trustStore.getCertificate("kafka0")); + assertNull("Unexpected private key", trustStore.getKey("kafka0", null)); + } + + @Test + public void testPemKeyStoreFileNoKeyPassword() throws Exception { + configs.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + pemFilePath(pemAsConfigValue(KEY, CERTCHAIN).value())); + configs.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + assertThrows(RuntimeException.class, () -> new SslFactory(configs)); + } + + @Test + public void testPemKeyStoreFileWithKeyPassword() throws Exception { + configs.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + pemFilePath(pemAsConfigValue(ENCRYPTED_KEY, CERTCHAIN).value())); + configs.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, KEY_PASSWORD.value()); + configs.put(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, DefaultSslEngineFactory.PEM_TYPE); + SslFactory factory = new SslFactory(configs); + + KeyStore keyStore = factory.keyStore().get(); + List aliases = Collections.list(keyStore.aliases()); + assertEquals(Collections.singletonList("kafka"), aliases); + assertNotNull("Certificate not found", keyStore.getCertificate("kafka")); + assertNotNull("Private key not found", keyStore.getKey("kafka", KEY_PASSWORD.value().toCharArray())); + } + + private String pemFilePath(String pem) throws Exception { + File pemFile = File.createTempFile(getClass().getSimpleName(),".pem", TestUtils.tempDirectory()); + Files.write(pemFile.toPath(), pem.getBytes(StandardCharsets.UTF_8)); + return pemFile.getAbsolutePath(); + } + + private Password pemAsConfigValue(String... pemValues) { + StringBuilder builder = new StringBuilder(); + for (String pem : pemValues) { + builder.append(pem); + builder.append("\n"); + } + return new Password(builder.toString().trim()); + } + +} diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java index 561329a7293..c34ea023ea8 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java @@ -118,12 +118,18 @@ public void testRegisterWithClientSecurityWithMinimalProperties() throws Excepti clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_KEYSTORE_PASSWORD_CONFIG, props.get(SchemaRegistryConfig.SSL_KEYSTORE_PASSWORD_CONFIG)); + clientsslConfigs.put( + SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_KEYSTORE_TYPE_CONFIG, + props.get(SchemaRegistryConfig.SSL_KEYSTORE_TYPE_CONFIG)); clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_LOCATION_CONFIG, props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_LOCATION_CONFIG)); clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_PASSWORD_CONFIG, props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_PASSWORD_CONFIG)); + clientsslConfigs.put( + SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_TYPE_CONFIG, + props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_TYPE_CONFIG)); CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restApp.restClient, 10, clientsslConfigs); assertEquals( From f584b193f119e7d2cfee2946a3ce3ba43ef74890 Mon Sep 17 00:00:00 2001 From: amalgawa Date: Fri, 21 Jan 2022 14:44:26 +0530 Subject: [PATCH 13/73] Removed extra import --- .../schemaregistry/rest/resources/SubjectVersionsResource.java | 1 - 1 file changed, 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index b8ff4c4b908..03dc4b1bdc2 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -37,7 +37,6 @@ import io.confluent.kafka.schemaregistry.exceptions.UnknownLeaderException; import io.confluent.kafka.schemaregistry.rest.VersionId; import io.confluent.kafka.schemaregistry.rest.exceptions.Errors; -import io.confluent.kafka.schemaregistry.rest.exceptions.RestInvalidSubjectException; import io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry; import io.confluent.kafka.schemaregistry.utils.QualifiedSubject; import io.confluent.rest.annotations.PerformanceMetric; From fa2dde67e5fe54090a4e9f015ad920c87f473571 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Fri, 21 Jan 2022 07:51:57 -0800 Subject: [PATCH 14/73] Upgrade json-schema to 1.14.0 (#2132) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8bfa9ebb6a0..45cc4fdcc3c 100644 --- a/pom.xml +++ b/pom.xml @@ -78,7 +78,7 @@ checkstyle/suppressions.xml 0.11.1 1.4.21 - 1.12.2 + 1.14.0 2.5.1 3.11.4 4.0.0 From a2063e576b228d9eadd88bf34ef5f2c3b26e0d11 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Fri, 21 Jan 2022 17:09:58 -0800 Subject: [PATCH 15/73] DGS-2770 Allow Protobuf enums to be be converted to int32 (#2138) * DGS-2770 Allow Protobuf enums to be be converted to int32 * Minor fix --- .../connect/protobuf/ProtobufData.java | 22 ++++- .../connect/protobuf/ProtobufDataConfig.java | 13 +++ .../connect/protobuf/ProtobufDataTest.java | 92 ++++++++++++++++--- 3 files changed, 109 insertions(+), 18 deletions(-) diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java index ae3d7025bfb..32400b56e43 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java @@ -293,6 +293,7 @@ public class ProtobufData { private final Map, Schema> toConnectSchemaCache; private boolean enhancedSchemaSupport; private boolean scrubInvalidNames; + private boolean useIntForEnums; private boolean useOptionalForNullables; private boolean useWrapperForNullables; private boolean useWrapperForRawPrimitives; @@ -313,6 +314,7 @@ public ProtobufData(ProtobufDataConfig protobufDataConfig) { toConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize()); this.enhancedSchemaSupport = protobufDataConfig.isEnhancedProtobufSchemaSupport(); this.scrubInvalidNames = protobufDataConfig.isScrubInvalidNames(); + this.useIntForEnums = protobufDataConfig.useIntForEnums(); this.useOptionalForNullables = protobufDataConfig.useOptionalForNullables(); this.useWrapperForNullables = protobufDataConfig.useWrapperForNullables(); this.useWrapperForRawPrimitives = protobufDataConfig.useWrapperForRawPrimitives(); @@ -373,6 +375,10 @@ private Object fromConnectData( case INT16: case INT32: { final int intValue = ((Number) value).intValue(); // Check for correct type + if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM); + return protobufSchema.getEnumValue(scope + enumType, intValue); + } return isWrapper ? Int32Value.newBuilder().setValue(intValue).build() : intValue; } @@ -1038,6 +1044,9 @@ private String dataTypeFromConnectSchema( return useWrapperForNullables && schema.isOptional() ? PROTOBUF_INT32_WRAPPER_TYPE : FieldDescriptor.Type.INT32.toString().toLowerCase(); case INT32: + if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + return schema.parameters().get(PROTOBUF_TYPE_ENUM); + } defaultType = FieldDescriptor.Type.INT32.toString().toLowerCase(); if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_PROP)) { defaultType = schema.parameters().get(PROTOBUF_TYPE_PROP); @@ -1150,8 +1159,15 @@ protected Object toConnectData(Schema schema, Object value) { ? getWrappedValue((Message) value) : ((Number) value).shortValue(); break; case INT32: - converted = value instanceof Message - ? getWrappedValue((Message) value) : ((Number) value).intValue(); + if (value instanceof Message) { + converted = getWrappedValue((Message) value); + } else if (value instanceof Number) { + converted = ((Number) value).intValue(); + } else if (value instanceof Enum) { + converted = ((Enum) value).ordinal(); + } else if (value instanceof EnumValueDescriptor) { + converted = ((EnumValueDescriptor) value).getNumber(); + } break; case INT64: if (value instanceof Message) { @@ -1446,7 +1462,7 @@ private Schema toConnectSchema(ToConnectContext ctx, FieldDescriptor descriptor) break; case ENUM: - builder = SchemaBuilder.string(); + builder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string(); EnumDescriptor enumDescriptor = descriptor.getEnumType(); String name = enhancedSchemaSupport ? enumDescriptor.getFullName() : enumDescriptor.getName(); diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java index 76aa4480001..3a53e416954 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java @@ -36,6 +36,10 @@ public class ProtobufDataConfig extends AbstractConfig { public static final String SCRUB_INVALID_NAMES_DOC = "Whether to scrub invalid names by replacing invalid characters with valid ones"; + public static final String INT_FOR_ENUMS_CONFIG = "int.for.enums"; + public static final boolean INT_FOR_ENUMS_DEFAULT = false; + public static final String INT_FOR_ENUMS_DOC = "Whether to represent enums as integers"; + public static final String OPTIONAL_FOR_NULLABLES_CONFIG = "optional.for.nullables"; public static final boolean OPTIONAL_FOR_NULLABLES_DEFAULT = false; public static final String OPTIONAL_FOR_NULLABLES_DOC = "Whether nullable fields should be " @@ -64,6 +68,11 @@ public static ConfigDef baseConfigDef() { ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DOC) .define(SCRUB_INVALID_NAMES_CONFIG, ConfigDef.Type.BOOLEAN, SCRUB_INVALID_NAMES_DEFAULT, ConfigDef.Importance.MEDIUM, SCRUB_INVALID_NAMES_DOC) + .define(INT_FOR_ENUMS_CONFIG, + ConfigDef.Type.BOOLEAN, + INT_FOR_ENUMS_DEFAULT, + ConfigDef.Importance.MEDIUM, + INT_FOR_ENUMS_DOC) .define(OPTIONAL_FOR_NULLABLES_CONFIG, ConfigDef.Type.BOOLEAN, OPTIONAL_FOR_NULLABLES_DEFAULT, @@ -99,6 +108,10 @@ public boolean isScrubInvalidNames() { return this.getBoolean(SCRUB_INVALID_NAMES_CONFIG); } + public boolean useIntForEnums() { + return this.getBoolean(INT_FOR_ENUMS_CONFIG); + } + public boolean useOptionalForNullables() { return this.getBoolean(OPTIONAL_FOR_NULLABLES_CONFIG); } diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java index f2b00102a65..1927b5617cb 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java @@ -208,12 +208,12 @@ private NestedMessage createEmptyNestedTestProto() throws ParseException { return message.build(); } - private Schema getExpectedNestedTestProtoSchemaStringUserId() { - return getExpectedNestedTestProtoSchema(); + private Schema getExpectedNestedTestProtoSchemaStringUserId(boolean useIntForEnums) { + return getExpectedNestedTestProtoSchema(useIntForEnums); } - private Schema getExpectedNestedTestProtoSchemaIntUserId() { - return getExpectedNestedTestProtoSchema(); + private Schema getExpectedNestedTestProtoSchemaIntUserId(boolean useIntForEnums) { + return getExpectedNestedTestProtoSchema(useIntForEnums); } private SchemaBuilder getEnumUnionSchemaBuilder() { @@ -312,6 +312,10 @@ private SchemaBuilder getInnerMessageSchemaBuilder() { } private Schema getExpectedNestedTestProtoSchema() { + return getExpectedNestedTestProtoSchema(false); + } + + private Schema getExpectedNestedTestProtoSchema(boolean useIntForEnums) { final SchemaBuilder builder = SchemaBuilder.struct(); builder.name("NestedMessage"); final SchemaBuilder userIdBuilder = SchemaBuilder.struct(); @@ -359,8 +363,9 @@ private Schema getExpectedNestedTestProtoSchema() { .parameter(PROTOBUF_TYPE_TAG, String.valueOf(4)) .build() ); + SchemaBuilder enumBuilder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string(); builder.field("status", - SchemaBuilder.string() + enumBuilder .name("Status") .optional() .parameter(PROTOBUF_TYPE_TAG, String.valueOf(5)) @@ -399,8 +404,8 @@ private Map getTestKeyValueMap() { return result; } - private Struct getExpectedNestedProtoResultStringUserId() throws ParseException { - Schema schema = getExpectedNestedTestProtoSchemaStringUserId(); + private Struct getExpectedNestedProtoResultStringUserId(boolean useIntForEnums) throws ParseException { + Schema schema = getExpectedNestedTestProtoSchemaStringUserId(useIntForEnums); Struct result = new Struct(schema.schema()); Struct userId = new Struct(schema.field("user_id").schema()); Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema()); @@ -418,7 +423,7 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException experiments.add("second experiment"); result.put("experiments_active", experiments); - result.put("status", "INACTIVE"); + result.put("status", useIntForEnums ? 1 : "INACTIVE"); result.put("map_type", getTestKeyValueMap()); Struct inner = new Struct(schema.field("inner").schema()); @@ -428,8 +433,8 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException return result; } - private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException { - Schema schema = getExpectedNestedTestProtoSchemaIntUserId(); + private Struct getExpectedNestedTestProtoResultIntUserId(boolean useIntForEnums) throws ParseException { + Schema schema = getExpectedNestedTestProtoSchemaIntUserId(useIntForEnums); Struct result = new Struct(schema.schema()); Struct userId = new Struct(schema.field("user_id").schema()); Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema()); @@ -447,7 +452,7 @@ private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException experiments.add("second experiment"); result.put("experiments_active", experiments); - result.put("status", "INACTIVE"); + result.put("status", useIntForEnums ? 1 : "INACTIVE"); result.put("map_type", getTestKeyValueMap()); Struct inner = new Struct(schema.field("inner").schema()); @@ -549,9 +554,23 @@ private SchemaAndValue getSchemaAndValue(ProtobufData protobufData, Message mess public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception { NestedMessage message = createNestedTestProtoStringUserId(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(); + Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(false); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getExpectedNestedProtoResultStringUserId(); + Struct expected = getExpectedNestedProtoResultStringUserId(false); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception { + NestedMessage message = createNestedTestProtoStringUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(true); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getExpectedNestedProtoResultStringUserId(true); assertEquals(expected, result.value()); } @@ -559,9 +578,25 @@ public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws E public void testToConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception { NestedMessage message = createNestedTestProtoIntUserId(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(); + Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(false); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getExpectedNestedTestProtoResultIntUserId(false); + assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema()); + assertEquals(expected.schema(), ((Struct) result.value()).schema()); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception { + NestedMessage message = createNestedTestProtoIntUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(true); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getExpectedNestedTestProtoResultIntUserId(); + Struct expected = getExpectedNestedTestProtoResultIntUserId(true); assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema()); assertEquals(expected.schema(), ((Struct) result.value()).schema()); assertEquals(expected, result.value()); @@ -1087,6 +1122,7 @@ public void testRoundTripConnectUInt32Fixed32() throws Exception { assertTrue(parsedMessage.toString().contains("test_uint32: " + UNSIGNED_RESULT)); } + @Test public void testFromConnectEnumUnionWithString() throws Exception { EnumUnion message = createEnumUnionWithString(); SchemaAndValue schemaAndValue = getSchemaAndValue(message); @@ -1113,6 +1149,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws assertArrayEquals(messageBytes, nestedMessage.toByteArray()); } + @Test + public void testFromConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception { + NestedMessage nestedMessage = createNestedTestProtoStringUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage); + byte[] messageBytes = getMessageBytes(schemaAndValue); + + assertArrayEquals(messageBytes, nestedMessage.toByteArray()); + } + @Test public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception { NestedMessage nestedMessage = createNestedTestProtoIntUserId(); @@ -1122,6 +1171,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Ex assertArrayEquals(messageBytes, nestedMessage.toByteArray()); } + @Test + public void testFromConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception { + NestedMessage nestedMessage = createNestedTestProtoIntUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage); + byte[] messageBytes = getMessageBytes(schemaAndValue); + + assertArrayEquals(messageBytes, nestedMessage.toByteArray()); + } + @Test public void testFromConnectDataWithEmptyNestedProtobufMessage() throws Exception { NestedMessage nestedMessage = createEmptyNestedTestProto(); From 54009ef474c571379cbaafa9c3575b69b74adf46 Mon Sep 17 00:00:00 2001 From: amalgawa Date: Mon, 24 Jan 2022 22:11:30 +0530 Subject: [PATCH 16/73] Resolved checkstyle issue --- .../kafka/schemaregistry/rest/resources/ConfigResource.java | 4 ++-- .../kafka/schemaregistry/rest/resources/ModeResource.java | 4 ++-- .../rest/resources/SubjectVersionsResource.java | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java index 1a4d5e5ce99..e2cc6e253cb 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java @@ -93,8 +93,8 @@ public ConfigUpdateRequest updateSubjectLevelConfig( throw new RestInvalidCompatibilityException(); } - if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java index 4d9737bb8dc..86cf924160f 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java @@ -92,8 +92,8 @@ public ModeUpdateRequest updateMode( @QueryParam("force") boolean force ) { - if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index 03dc4b1bdc2..7a59b4a87c6 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -281,8 +281,8 @@ public void register( subjectName, request.getVersion(), request.getId(), request.getSchemaType(), request.getSchema() == null ? 0 : request.getSchema().length()); - if (subjectName != null && (CharMatcher.javaIsoControl().matchesAnyOf(subjectName) || - QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject() + if (subjectName != null && (CharMatcher.javaIsoControl().matchesAnyOf(subjectName) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject() .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subjectName); } From 664ad3bfc9b1a4b5c7bae23b6412214f3a6d450b Mon Sep 17 00:00:00 2001 From: NeoKoder <87475799+varunpv@users.noreply.github.com> Date: Mon, 31 Jan 2022 12:04:24 +0530 Subject: [PATCH 17/73] DGS-2750 Add API to SchemaRegistryClient interface to get deleted subjects and schemas (#2136) * Added methods to SchemaRegistryClient Interface to get subjects, versions and metadata for soft deleted schemas. --- .../client/CachedSchemaRegistryClient.java | 25 +++++++++++++++++++ .../client/SchemaRegistryClient.java | 15 +++++++++++ 2 files changed, 40 insertions(+) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index 1b53656608f..00d6998349b 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -465,6 +465,18 @@ public SchemaMetadata getSchemaMetadata(String subject, int version) return new SchemaMetadata(id, version, schemaType, references, schema); } + @Override + public SchemaMetadata getSchemaMetadata(String subject, int version, boolean lookupDeletedSchema) + throws IOException, RestClientException { + io.confluent.kafka.schemaregistry.client.rest.entities.Schema response + = restService.getVersion(subject, version, lookupDeletedSchema); + int id = response.getId(); + String schemaType = response.getSchemaType(); + String schema = response.getSchema(); + List references = response.getReferences(); + return new SchemaMetadata(id, version, schemaType, references, schema); + } + @Override public SchemaMetadata getLatestSchemaMetadata(String subject) throws IOException, RestClientException { @@ -513,6 +525,13 @@ public List getAllVersions(String subject) return restService.getAllVersions(subject); } + @Override + public List getAllVersions(String subject, boolean lookupDeletedSchema) + throws IOException, RestClientException { + return restService.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, lookupDeletedSchema); + } + @Override public int getId(String subject, ParsedSchema schema) throws IOException, RestClientException { @@ -651,6 +670,12 @@ public Collection getAllSubjects() throws IOException, RestClientExcepti return restService.getAllSubjects(); } + @Override + public Collection getAllSubjects(boolean lookupDeletedSubject) + throws IOException, RestClientException { + return restService.getAllSubjects(lookupDeletedSubject); + } + @Override public Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java index 245d6349a87..865c7e767d6 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java @@ -136,6 +136,11 @@ public SchemaMetadata getLatestSchemaMetadata(String subject) public SchemaMetadata getSchemaMetadata(String subject, int version) throws IOException, RestClientException; + default SchemaMetadata getSchemaMetadata(String subject, int version, + boolean lookupDeletedSchema) throws IOException, RestClientException { + throw new UnsupportedOperationException(); + } + @Deprecated default int getVersion(String subject, org.apache.avro.Schema schema) throws IOException, RestClientException { @@ -152,6 +157,11 @@ default int getVersion(String subject, ParsedSchema schema, boolean normalize) public List getAllVersions(String subject) throws IOException, RestClientException; + default List getAllVersions(String subject, boolean lookupDeletedSchema) + throws IOException, RestClientException { + throw new UnsupportedOperationException(); + } + @Deprecated default boolean testCompatibility(String subject, org.apache.avro.Schema schema) throws IOException, RestClientException { @@ -191,6 +201,11 @@ default void deleteMode(String subject) throws IOException, RestClientException public Collection getAllSubjects() throws IOException, RestClientException; + default Collection getAllSubjects(boolean lookupDeletedSubject) throws IOException, + RestClientException { + throw new UnsupportedOperationException(); + } + default Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { throw new UnsupportedOperationException(); From e695ade317bd6d1f70f36bbd0502c73d7e3bc5de Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Thu, 3 Feb 2022 08:23:39 -0800 Subject: [PATCH 18/73] DGS-2672 Add generalized sum type support (#2149) * DGS-2672 Add generalized sum type support * Minor fix --- avro-data/pom.xml | 5 + .../io/confluent/connect/avro/AvroData.java | 166 +++++++++++------- .../connect/avro/AvroDataConfig.java | 15 ++ .../confluent/connect/avro/AvroDataTest.java | 151 +++++++++++++++- json-schema-converter/pom.xml | 5 + .../connect/json/JsonSchemaData.java | 71 ++++++-- .../connect/json/JsonSchemaDataConfig.java | 16 ++ .../connect/json/JsonSchemaDataTest.java | 100 +++++++++++ pom.xml | 1 + protobuf-converter/pom.xml | 5 + .../connect/protobuf/ProtobufData.java | 85 ++++++--- .../connect/protobuf/ProtobufDataConfig.java | 15 ++ .../connect/protobuf/ProtobufDataTest.java | 124 ++++++++++++- .../protobuf/dynamic/DynamicSchema.java | 11 ++ .../protobuf/test/EnumReferenceOuter.java | 10 +- .../protobuf/test/EnumRootOuter.java | 4 + .../protobuf/test/EnumUnionOuter.java | 79 ++++++++- schema-converter/pom.xml | 37 ++++ .../confluent/connect/schema/ConnectEnum.java | 135 ++++++++++++++ .../connect/schema/ConnectUnion.java | 131 ++++++++++++++ 20 files changed, 1051 insertions(+), 115 deletions(-) create mode 100644 schema-converter/pom.xml create mode 100644 schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java create mode 100644 schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java diff --git a/avro-data/pom.xml b/avro-data/pom.xml index dbf17c52109..e133df7455f 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -42,6 +42,11 @@ kafka-avro-serializer ${io.confluent.schema-registry.version} + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + com.google.guava diff --git a/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java b/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java index 8e1cbb58be8..a1a944c4585 100644 --- a/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java +++ b/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java @@ -22,12 +22,15 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.NullNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.regex.Pattern; import org.apache.avro.JsonProperties; import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericData.EnumSymbol; import org.apache.avro.generic.GenericEnumSymbol; import org.apache.avro.generic.GenericFixed; import org.apache.avro.generic.GenericRecord; @@ -111,6 +114,12 @@ public class AvroData { public static final String AVRO_TYPE_ANYTHING = NAMESPACE + ".Anything"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_UNION_PREFIX = "connect_union_"; + public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = + GENERALIZED_TYPE_UNION_PREFIX + "field_"; + private static final Map NON_AVRO_TYPES_BY_TYPE_CODE = new HashMap<>(); private static Pattern NAME_START_CHAR = Pattern.compile("^[A-Za-z_]"); @@ -318,9 +327,12 @@ public Object convert(Schema schema, Object value) { }); } + private int unionIndex = 0; + private Map fromConnectSchemaCache; private Map toConnectSchemaCache; private boolean connectMetaData; + private boolean generalizedSumTypeSupport; private boolean enhancedSchemaSupport; private boolean scrubInvalidNames; private boolean discardTypeDocDefault; @@ -335,6 +347,7 @@ public AvroData(AvroDataConfig avroDataConfig) { fromConnectSchemaCache = new BoundedConcurrentHashMap<>(avroDataConfig.getSchemasCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(avroDataConfig.getSchemasCacheSize()); this.connectMetaData = avroDataConfig.isConnectMetaData(); + this.generalizedSumTypeSupport = avroDataConfig.isGeneralizedSumTypeSupport(); this.enhancedSchemaSupport = avroDataConfig.isEnhancedAvroSchemaSupport(); this.scrubInvalidNames = avroDataConfig.isScrubInvalidNames(); this.discardTypeDocDefault = avroDataConfig.isDiscardTypeDocDefault(); @@ -349,8 +362,7 @@ public Object fromConnectData(Schema schema, Object value) { } protected Object fromConnectData(Schema schema, org.apache.avro.Schema avroSchema, Object value) { - return fromConnectData(schema, avroSchema, value, true, false, - enhancedSchemaSupport, scrubInvalidNames); + return fromConnectData(schema, avroSchema, value, true, false); } /** @@ -375,9 +387,7 @@ private Object fromConnectData( org.apache.avro.Schema avroSchema, Object logicalValue, boolean requireContainer, - boolean requireSchemalessContainerNull, - boolean enhancedSchemaSupport, - boolean scrubInvalidNames + boolean requireSchemalessContainerNull ) { Schema.Type schemaType = schema != null ? schema.type() @@ -462,17 +472,13 @@ private Object fromConnectData( maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_BOOLEAN_FIELD), requireContainer); case STRING: - if (enhancedSchemaSupport && schema != null && schema.parameters() != null + if (generalizedSumTypeSupport && ConnectEnum.isEnum(schema)) { + String enumSchemaName = schema.parameters().get(GENERALIZED_TYPE_ENUM); + value = enumSymbol(avroSchema, value, enumSchemaName); + } else if (enhancedSchemaSupport && schema != null && schema.parameters() != null && schema.parameters().containsKey(AVRO_TYPE_ENUM)) { String enumSchemaName = schema.parameters().get(AVRO_TYPE_ENUM); - org.apache.avro.Schema enumSchema; - if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { - int enumIndex = avroSchema.getIndexNamed(enumSchemaName); - enumSchema = avroSchema.getTypes().get(enumIndex); - } else { - enumSchema = avroSchema; - } - value = new GenericData.EnumSymbol(enumSchema, (String) value); + value = enumSymbol(avroSchema, value, enumSchemaName); } else { String stringValue = (String) value; // Check for correct type } @@ -488,13 +494,15 @@ private Object fromConnectData( int size = Integer.parseInt(schema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP)); org.apache.avro.Schema fixedSchema = null; if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { + int index = 0; for (org.apache.avro.Schema memberSchema : avroSchema.getTypes()) { if (memberSchema.getType() == org.apache.avro.Schema.Type.FIXED && memberSchema.getFixedSize() == size - && unionMemberFieldName(memberSchema, enhancedSchemaSupport) - .equals(unionMemberFieldName(schema, enhancedSchemaSupport))) { + && unionMemberFieldName(memberSchema, index) + .equals(unionMemberFieldName(schema, index))) { fixedSchema = memberSchema; } + index++; } if (fixedSchema == null) { throw new DataException("Fixed size " + size + " not in union " + avroSchema); @@ -526,9 +534,7 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) elementAvroSchema, val, false, - true, - enhancedSchemaSupport, - scrubInvalidNames + true ) ); } @@ -552,7 +558,7 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) // Key is a String, no conversion needed Object convertedValue = fromConnectData(schema.valueSchema(), underlyingAvroSchema.getValueType(), - entry.getValue(), false, true, enhancedSchemaSupport, scrubInvalidNames + entry.getValue(), false, true ); converted.put((String) entry.getKey(), convertedValue); } @@ -568,12 +574,10 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) org.apache.avro.Schema avroValueSchema = elementSchema.getField(VALUE_FIELD).schema(); for (Map.Entry entry : map.entrySet()) { Object keyConverted = fromConnectData(schema != null ? schema.keySchema() : null, - avroKeySchema, entry.getKey(), false, true, - enhancedSchemaSupport, scrubInvalidNames); + avroKeySchema, entry.getKey(), false, true); Object valueConverted = fromConnectData(schema != null ? schema.valueSchema() : null, avroValueSchema, entry.getValue(), false, - true, enhancedSchemaSupport, - scrubInvalidNames); + true); converted.add( new GenericRecordBuilder(elementSchema) .set(KEY_FIELD, keyConverted) @@ -594,7 +598,7 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), } //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (AVRO_TYPE_UNION.equals(schema.name())) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -603,14 +607,11 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), avroSchema, object, false, - true, - enhancedSchemaSupport, - scrubInvalidNames + true ); } } - return fromConnectData(schema, avroSchema, null, false, true, - enhancedSchemaSupport, scrubInvalidNames); + return fromConnectData(schema, avroSchema, null, false, true); } else { org.apache.avro.Schema underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional( schema, avroSchema, scrubInvalidNames); @@ -621,8 +622,7 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), org.apache.avro.Schema fieldAvroSchema = theField.schema(); convertedBuilder.set( fieldName, - fromConnectData(field.schema(), fieldAvroSchema, struct.get(field), false, - true, enhancedSchemaSupport, scrubInvalidNames) + fromConnectData(field.schema(), fieldAvroSchema, struct.get(field), false, true) ); } return convertedBuilder.build(); @@ -637,6 +637,18 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), } } + private EnumSymbol enumSymbol( + org.apache.avro.Schema avroSchema, Object value, String enumSchemaName) { + org.apache.avro.Schema enumSchema; + if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { + int enumIndex = avroSchema.getIndexNamed(enumSchemaName); + enumSchema = avroSchema.getTypes().get(enumIndex); + } else { + enumSchema = avroSchema; + } + return new GenericData.EnumSymbol(enumSchema, (String) value); + } + /** * MapEntry types in connect Schemas are represented as Arrays of record. * Return the array type from the union instead of the union itself. @@ -772,7 +784,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, org.apache.avro.Schema cached = fromConnectSchemaCache.get(schema); - if (cached == null && !AVRO_TYPE_UNION.equals(schema.name()) && !schema.isOptional()) { + if (cached == null && !isUnionSchema(schema) && !schema.isOptional()) { cached = fromConnectContext.schemaMap.get(schema); } if (cached != null) { @@ -808,25 +820,28 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, baseSchema = org.apache.avro.SchemaBuilder.builder().booleanType(); break; case STRING: - if (enhancedSchemaSupport && schema.parameters() != null - && schema.parameters().containsKey(AVRO_TYPE_ENUM)) { + if ((generalizedSumTypeSupport || enhancedSchemaSupport) + && schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(AVRO_TYPE_ENUM))) { + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; List symbols = new ArrayList<>(); for (Map.Entry entry : schema.parameters().entrySet()) { - if (entry.getKey().startsWith(AVRO_TYPE_ENUM + ".")) { - symbols.add(entry.getValue()); + if (entry.getKey().startsWith(paramName + ".")) { + String enumSymbol = entry.getKey().substring(paramName.length() + 1); + symbols.add(enumSymbol); } } Pair names = getNameOrDefault(fromConnectContext, schema.name()); String name = names.getValue(); + String enumName = schema.parameters().get(paramName); String enumDoc = schema.parameters().get(AVRO_ENUM_DOC_PREFIX_PROP + name); String enumDefault = schema.parameters().get(AVRO_ENUM_DEFAULT_PREFIX_PROP + name); baseSchema = discardTypeDocDefault - ? org.apache.avro.SchemaBuilder.builder().enumeration( - schema.parameters().get(AVRO_TYPE_ENUM)) + ? org.apache.avro.SchemaBuilder.builder().enumeration(enumName) .doc(schema.parameters().get(CONNECT_ENUM_DOC_PROP)) .symbols(symbols.toArray(new String[symbols.size()])) - : org.apache.avro.SchemaBuilder.builder().enumeration( - schema.parameters().get(AVRO_TYPE_ENUM)) + : org.apache.avro.SchemaBuilder.builder().enumeration(enumName) .doc(enumDoc) .defaultSymbol(enumDefault) .symbols(symbols.toArray(new String[symbols.size()])); @@ -905,7 +920,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, } break; case STRUCT: - if (AVRO_TYPE_UNION.equals(schema.name())) { + if (isUnionSchema(schema)) { List unionSchemas = new ArrayList<>(); if (schema.isOptional()) { unionSchemas.add(org.apache.avro.SchemaBuilder.builder().nullType()); @@ -1258,7 +1273,7 @@ private JsonNode defaultValueFromConnect(Schema schema, Object value) { return array; } case STRUCT: { - boolean isUnion = AVRO_TYPE_UNION.equals(schema.name()); + boolean isUnion = isUnionSchema(schema); ObjectNode node = JsonNodeFactory.instance.objectNode(); Struct struct = ((Struct) defaultVal); for (Field field : (schema.fields())) { @@ -1564,23 +1579,24 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon case STRUCT: { // Special case support for union types - if (schema.name() != null && schema.name().equals(AVRO_TYPE_UNION)) { + if (isUnionSchema(schema)) { Schema valueRecordSchema = null; if (value instanceof IndexedRecord) { IndexedRecord valueRecord = ((IndexedRecord) value); valueRecordSchema = toConnectSchemaWithCycles( valueRecord.getSchema(), true, null, null, toConnectContext); } + int index = 0; for (Field field : schema.fields()) { Schema fieldSchema = field.schema(); - if (isInstanceOfAvroSchemaTypeForSimpleSchema( - fieldSchema, value, enhancedSchemaSupport) + if (isInstanceOfAvroSchemaTypeForSimpleSchema(fieldSchema, value, index) || (valueRecordSchema != null && schemaEquals(valueRecordSchema, fieldSchema))) { converted = new Struct(schema).put( - unionMemberFieldName(fieldSchema, enhancedSchemaSupport), + unionMemberFieldName(fieldSchema, index), toConnectData(fieldSchema, value, toConnectContext)); break; } + index++; } if (converted == null) { throw new DataException( @@ -1838,9 +1854,16 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, schema.getEnumDefault()); } } - builder.parameter(AVRO_TYPE_ENUM, schema.getFullName()); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; + builder.parameter(paramName, schema.getFullName()); + int symbolIndex = 0; for (String enumSymbol : schema.getEnumSymbols()) { - builder.parameter(AVRO_TYPE_ENUM + "." + enumSymbol, enumSymbol); + if (generalizedSumTypeSupport) { + builder.parameter(paramName + "." + enumSymbol, String.valueOf(symbolIndex)); + } else { + builder.parameter(paramName + "." + enumSymbol, enumSymbol); + } + symbolIndex++; } break; @@ -1855,13 +1878,20 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, } } } - builder = SchemaBuilder.struct().name(AVRO_TYPE_UNION); + String unionName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_PREFIX + (unionIndex++) + : AVRO_TYPE_UNION; + builder = SchemaBuilder.struct().name(unionName); + if (generalizedSumTypeSupport) { + builder.parameter(GENERALIZED_TYPE_UNION, unionName); + } Set fieldNames = new HashSet<>(); + int fieldIndex = 0; for (org.apache.avro.Schema memberSchema : schema.getTypes()) { if (memberSchema.getType() == org.apache.avro.Schema.Type.NULL) { builder.optional(); } else { - String fieldName = unionMemberFieldName(memberSchema, enhancedSchemaSupport); + String fieldName = unionMemberFieldName(memberSchema, fieldIndex); if (fieldNames.contains(fieldName)) { throw new DataException("Multiple union schemas map to the Connect union field name"); } @@ -1871,6 +1901,7 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, toConnectSchemaWithCycles(memberSchema, true, null, null, toConnectContext) ); } + fieldIndex++; } break; } @@ -2148,7 +2179,7 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, return null; } else { return defaultValueFromAvro( - schema.field(unionMemberFieldName(memberAvroSchema, enhancedSchemaSupport)).schema(), + schema.field(unionMemberFieldName(memberAvroSchema, 0)).schema(), memberAvroSchema, value, toConnectContext); @@ -2162,8 +2193,10 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, } - private String unionMemberFieldName(org.apache.avro.Schema schema, - boolean enhancedSchemaSupport) { + private String unionMemberFieldName(org.apache.avro.Schema schema, int index) { + if (generalizedSumTypeSupport) { + return GENERALIZED_TYPE_UNION_FIELD_PREFIX + index; + } if (schema.getType() == org.apache.avro.Schema.Type.RECORD || schema.getType() == org.apache.avro.Schema.Type.ENUM || schema.getType() == org.apache.avro.Schema.Type.FIXED) { @@ -2176,7 +2209,10 @@ private String unionMemberFieldName(org.apache.avro.Schema schema, return schema.getType().getName(); } - private String unionMemberFieldName(Schema schema, boolean enhancedSchemaSupport) { + private String unionMemberFieldName(Schema schema, int index) { + if (generalizedSumTypeSupport) { + return GENERALIZED_TYPE_UNION_FIELD_PREFIX + index; + } if (schema.type() == Schema.Type.STRUCT || isEnumSchema(schema) || isFixedSchema(schema)) { if (enhancedSchemaSupport) { return scrubFullName(schema.name(), scrubInvalidNames); @@ -2187,10 +2223,15 @@ private String unionMemberFieldName(Schema schema, boolean enhancedSchemaSupport return CONNECT_TYPES_TO_AVRO_TYPES.get(schema.type()).getName(); } + private static boolean isUnionSchema(Schema schema) { + return AVRO_TYPE_UNION.equals(schema.name()) || ConnectUnion.isUnion(schema); + } + private static boolean isEnumSchema(Schema schema) { return schema.type() == Schema.Type.STRING && schema.parameters() != null - && schema.parameters().containsKey(AVRO_TYPE_ENUM); + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(AVRO_TYPE_ENUM)); } private static boolean isFixedSchema(Schema schema) { @@ -2202,9 +2243,10 @@ private static boolean isFixedSchema(Schema schema) { private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, Object value, - boolean enhancedSchemaSupport) { + int index) { if (isEnumSchema(fieldSchema)) { - String enumSchemaName = fieldSchema.parameters().get(AVRO_TYPE_ENUM); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; + String enumSchemaName = fieldSchema.parameters().get(paramName); if (value instanceof GenericData.EnumSymbol) { return ((GenericData.EnumSymbol) value).getSchema().getFullName().equals(enumSchemaName); } else { @@ -2220,7 +2262,7 @@ private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, if (isFixedSchema(fieldSchema)) { if (fixedValueSizeMatch(fieldSchema, value, Integer.parseInt(fieldSchema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP)), - enhancedSchemaSupport)) { + index)) { return true; } } else { @@ -2237,14 +2279,14 @@ private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, private boolean fixedValueSizeMatch(Schema fieldSchema, Object value, int size, - boolean enhancedSchemaSupport) { + int index) { if (value instanceof byte[]) { return ((byte[]) value).length == size; } else if (value instanceof ByteBuffer) { return ((ByteBuffer)value).remaining() == size; } else if (value instanceof GenericFixed) { - return unionMemberFieldName(((GenericFixed) value).getSchema(), enhancedSchemaSupport) - .equals(unionMemberFieldName(fieldSchema, enhancedSchemaSupport)); + return unionMemberFieldName(((GenericFixed) value).getSchema(), index) + .equals(unionMemberFieldName(fieldSchema, index)); } else { throw new DataException("Invalid class for fixed, expecting GenericFixed, byte[]" + " or ByteBuffer but found " + value.getClass()); diff --git a/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java b/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java index 9c953e82a17..2a2fc7c4033 100644 --- a/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java +++ b/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java @@ -25,6 +25,12 @@ public class AvroDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG = "enhanced.avro.schema.support"; public static final boolean ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT = false; public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_DOC = @@ -56,6 +62,11 @@ public class AvroDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef() + .define(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC) .define(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG, ConfigDef.Type.BOOLEAN, ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT, @@ -78,6 +89,10 @@ public AvroDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupport() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean isEnhancedAvroSchemaSupport() { return this.getBoolean(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG); } diff --git a/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java b/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java index 66c5800fbcd..f037e7aa750 100644 --- a/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java +++ b/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import java.util.LinkedHashMap; import org.apache.avro.LogicalTypes; import org.apache.avro.generic.GenericContainer; import org.apache.avro.generic.GenericData; @@ -211,6 +212,37 @@ public void testFromConnectEnum() { avroData); } + @Test + public void testFromConnectEnumWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Enums are just converted to strings, original enum is preserved in parameters + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder() + .enumeration("TestEnum") + .doc("some documentation") + .symbols("foo", "bar", "baz"); + Map params = new LinkedHashMap<>(); + params.put("io.confluent.connect.avro.enum.doc.TestEnum", "some documentation"); + params.put("org.apache.kafka.connect.data.Enum", "TestEnum"); + params.put("org.apache.kafka.connect.data.Enum.foo", "0"); + params.put("org.apache.kafka.connect.data.Enum.bar", "1"); + params.put("org.apache.kafka.connect.data.Enum.baz", "2"); + avroSchema.addProp("connect.parameters", params); + avroSchema.addProp("connect.name", "TestEnum"); + SchemaBuilder builder = SchemaBuilder.string().name("TestEnum"); + builder.parameter(AVRO_ENUM_DOC_PREFIX_PROP + "TestEnum", "some documentation"); + builder.parameter(GENERALIZED_TYPE_ENUM, "TestEnum"); + int i = 0; + for(String enumSymbol : new String[]{"foo", "bar", "baz"}) { + builder.parameter(GENERALIZED_TYPE_ENUM+"."+enumSymbol, String.valueOf(i++)); + } + + checkNonRecordConversion(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"), + builder.build(), "bar", avroData); + } + @Test public void testFromConnectMapWithStringKey() { final Schema schema = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA); @@ -300,7 +332,54 @@ public void testFromConnectFixedUnion() { assertEquals(2, genericData.resolveUnion(unionSchema, avroData.fromConnectData(union, unionSameOther))); } - + + @Test + public void testFromConnectUnionWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Make sure we handle primitive types and named types properly by using a variety of types + org.apache.avro.Schema avroRecordSchema1 = org.apache.avro.SchemaBuilder.builder() + .record("Test1").fields().requiredInt("test").endRecord(); + // Add connect name + avroRecordSchema1.addProp("connect.name", "Test1"); + org.apache.avro.Schema avroRecordSchema2 = org.apache.avro.SchemaBuilder.builder() + .record("Test2").namespace("io.confluent").fields().requiredInt("test").endRecord(); + // Add connect name + avroRecordSchema2.addProp("connect.name", "io.confluent.Test2"); + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder().unionOf() + .intType().and() + .stringType().and() + .type(avroRecordSchema1).and() + .type(avroRecordSchema2) + .endUnion(); + + Schema recordSchema1 = SchemaBuilder.struct().name("Test1") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema recordSchema2 = SchemaBuilder.struct().name("io.confluent.Test2") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema schema = SchemaBuilder.struct() + .name("connect_union_0") + .parameter("org.apache.kafka.connect.data.Union", "connect_union_0") + .field("connect_union_field_0", Schema.OPTIONAL_INT32_SCHEMA) + .field("connect_union_field_1", Schema.OPTIONAL_STRING_SCHEMA) + .field("connect_union_field_2", recordSchema1) + .field("connect_union_field_3", recordSchema2) + .build(); + assertEquals(12, + avroData.fromConnectData(schema, new Struct(schema).put("connect_union_field_0", 12))); + assertEquals("teststring", + avroData.fromConnectData(schema, new Struct(schema).put("connect_union_field_1", "teststring"))); + + Struct schema1Test = new Struct(schema).put("connect_union_field_2", new Struct(recordSchema1).put("test", 12)); + GenericRecord record1Test = new GenericRecordBuilder(avroRecordSchema1).set("test", 12).build(); + Struct schema2Test = new Struct(schema).put("connect_union_field_3", new Struct(recordSchema2).put("test", 12)); + GenericRecord record2Test = new GenericRecordBuilder(avroRecordSchema2).set("test", 12).build(); + assertEquals(record1Test, avroData.fromConnectData(schema, schema1Test)); + assertEquals(record2Test, avroData.fromConnectData(schema, schema2Test)); + } + @Test public void testFromConnectWithInvalidName() { AvroDataConfig avroDataConfig = new AvroDataConfig.Builder() @@ -2053,6 +2132,51 @@ public void testToConnectUnionWithEnhanced() { avroData.toConnectData(avroSchema, record2Test)); } + @Test + public void testToConnectUnionWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Make sure we handle primitive types and named types properly by using a variety of types + org.apache.avro.Schema avroRecordSchema1 = org.apache.avro.SchemaBuilder.builder() + .record("Test1").fields().requiredInt("test").endRecord(); + org.apache.avro.Schema avroRecordSchema2 = org.apache.avro.SchemaBuilder.builder() + .record("Test2").namespace("io.confluent").fields().requiredInt("test").endRecord(); + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder().unionOf() + .intType().and() + .stringType().and() + .type(avroRecordSchema1).and() + .type(avroRecordSchema2) + .endUnion(); + + Schema recordSchema1 = SchemaBuilder.struct().name("Test1") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema recordSchema2 = SchemaBuilder.struct().name("io.confluent.Test2") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema schema = SchemaBuilder.struct() + .name("connect_union_0") + .parameter("org.apache.kafka.connect.data.Union", "connect_union_0") + .field("connect_union_field_0", Schema.OPTIONAL_INT32_SCHEMA) + .field("connect_union_field_1", Schema.OPTIONAL_STRING_SCHEMA) + .field("connect_union_field_2", recordSchema1) + .field("connect_union_field_3", recordSchema2) + .build(); + assertEquals(new SchemaAndValue(schema, new Struct(schema).put("connect_union_field_0", 12)), + avroData.toConnectData(avroSchema, 12)); + assertEquals(new SchemaAndValue(schema, new Struct(schema).put("connect_union_field_1", "teststring")), + avroData.toConnectData(avroSchema, "teststring")); + + Struct schema1Test = new Struct(schema).put("connect_union_field_2", new Struct(recordSchema1).put("test", 12)); + GenericRecord record1Test = new GenericRecordBuilder(avroRecordSchema1).set("test", 12).build(); + Struct schema2Test = new Struct(schema).put("connect_union_field_3", new Struct(recordSchema2).put("test", 12)); + GenericRecord record2Test = new GenericRecordBuilder(avroRecordSchema2).set("test", 12).build(); + assertEquals(new SchemaAndValue(schema, schema1Test), + avroData.toConnectData(avroSchema, record1Test)); + assertEquals(new SchemaAndValue(schema, schema2Test), + avroData.toConnectData(avroSchema, record2Test)); + } + @Test(expected = DataException.class) public void testToConnectUnionRecordConflict() { // If the records have the same name but are in different namespaces, we don't support this @@ -2129,6 +2253,31 @@ public void testToConnectEnumWithNoDoc() { avroData.toConnectData(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"))); } + @Test + public void testToConnectEnumWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Enums are just converted to strings, original enum is preserved in parameters + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder() + .enumeration("TestEnum") + .doc("some documentation") + .symbols("foo", "bar", "baz"); + SchemaBuilder builder = SchemaBuilder.string().name("TestEnum"); + builder.parameter(AVRO_ENUM_DOC_PREFIX_PROP + "TestEnum", "some documentation"); + builder.parameter(GENERALIZED_TYPE_ENUM, "TestEnum"); + int i = 0; + for(String enumSymbol : new String[]{"foo", "bar", "baz"}) { + builder.parameter(GENERALIZED_TYPE_ENUM+"."+enumSymbol, String.valueOf(i++)); + } + + assertEquals(new SchemaAndValue(builder.build(), "bar"), + avroData.toConnectData(avroSchema, "bar")); + assertEquals(new SchemaAndValue(builder.build(), "bar"), + avroData.toConnectData(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"))); + } + @Test public void testToConnectOptionalPrimitiveWithConnectMetadata() { Schema schema = SchemaBuilder.string(). diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 9f2e99b617d..483a5de7692 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -36,6 +36,11 @@ kafka-json-schema-serializer ${io.confluent.schema-registry.version} + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + io.confluent kafka-schema-serializer diff --git a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java index 5b1cffe64bd..cc67bce67fc 100644 --- a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java +++ b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java @@ -21,6 +21,8 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.annotations.VisibleForTesting; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.json.jackson.Jackson; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import java.util.HashSet; @@ -94,9 +96,14 @@ public class JsonSchemaData { public static final String DEFAULT_ID_PREFIX = "#id"; public static final String JSON_ID_PROP = NAMESPACE + ".Id"; public static final String JSON_TYPE_ENUM = NAMESPACE + ".Enum"; - public static final String JSON_TYPE_ENUM_PREFIX = JSON_TYPE_ENUM + "."; public static final String JSON_TYPE_ONE_OF = NAMESPACE + ".OneOf"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_UNION_PREFIX = "connect_union_"; + public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = + GENERALIZED_TYPE_UNION_PREFIX + "field_"; + private static final JsonNodeFactory JSON_NODE_FACTORY = JsonNodeFactory.withExactBigDecimals(true); @@ -174,14 +181,18 @@ public class JsonSchemaData { return result; }); TO_CONNECT_CONVERTERS.put(Schema.Type.STRUCT, (schema, value) -> { - if (schema.name() != null && schema.name().equals(JSON_TYPE_ONE_OF)) { + if (isUnionSchema(schema)) { + boolean generalizedSumTypeSupport = ConnectUnion.isUnion(schema); + String fieldNamePrefix = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_FIELD_PREFIX + : JSON_TYPE_ONE_OF + ".field."; int numMatchingProperties = -1; Field matchingField = null; for (Field field : schema.fields()) { Schema fieldSchema = field.schema(); if (isInstanceOfSchemaTypeForSimpleSchema(fieldSchema, value)) { - return new Struct(schema.schema()).put(JSON_TYPE_ONE_OF + ".field." + field.index(), + return new Struct(schema.schema()).put(fieldNamePrefix + field.index(), toConnectData(fieldSchema, value) ); } else { @@ -194,7 +205,7 @@ public class JsonSchemaData { } if (matchingField != null) { return new Struct(schema.schema()).put( - JSON_TYPE_ONE_OF + ".field." + matchingField.index(), + fieldNamePrefix + matchingField.index(), toConnectData(matchingField.schema(), value) ); } @@ -362,6 +373,7 @@ private static int matchStructSchema(Schema fieldSchema, JsonNode value) { private JsonSchemaDataConfig config; private Map fromConnectSchemaCache; private Map toConnectSchemaCache; + private boolean generalizedSumTypeSupport; public JsonSchemaData() { this(new JsonSchemaDataConfig.Builder().with( @@ -374,6 +386,7 @@ public JsonSchemaData(JsonSchemaDataConfig jsonSchemaDataConfig) { this.config = jsonSchemaDataConfig; fromConnectSchemaCache = new BoundedConcurrentHashMap<>(jsonSchemaDataConfig.schemaCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(jsonSchemaDataConfig.schemaCacheSize()); + generalizedSumTypeSupport = jsonSchemaDataConfig.isGeneralizedSumTypeSupport(); } /** @@ -504,7 +517,7 @@ public JsonNode fromConnectData(Schema schema, Object logicalValue) { } //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (JSON_TYPE_ONE_OF.equals(schema.name())) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -667,11 +680,15 @@ private org.everit.json.schema.Schema rawSchemaFromConnectSchema( builder = BooleanSchema.builder(); break; case STRING: - if (schema.parameters() != null && schema.parameters().containsKey(JSON_TYPE_ENUM)) { + if (schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(JSON_TYPE_ENUM))) { EnumSchema.Builder enumBuilder = EnumSchema.builder(); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : JSON_TYPE_ENUM; for (Map.Entry entry : schema.parameters().entrySet()) { - if (entry.getKey().startsWith(JSON_TYPE_ENUM_PREFIX)) { - enumBuilder.possibleValue(entry.getValue()); + if (entry.getKey().startsWith(paramName + ".")) { + String enumSymbol = entry.getKey().substring(paramName.length() + 1); + enumBuilder.possibleValue(enumSymbol); } } builder = enumBuilder; @@ -709,7 +726,7 @@ private org.everit.json.schema.Schema rawSchemaFromConnectSchema( } break; case STRUCT: - if (JSON_TYPE_ONE_OF.equals(schema.name())) { + if (isUnionSchema(schema)) { CombinedSchema.Builder combinedBuilder = CombinedSchema.builder(); combinedBuilder.criterion(CombinedSchema.ONE_CRITERION); if (schema.isOptional()) { @@ -923,19 +940,28 @@ private Schema toConnectSchema( } else if (jsonSchema instanceof EnumSchema) { EnumSchema enumSchema = (EnumSchema) jsonSchema; builder = SchemaBuilder.string(); - builder.parameter(JSON_TYPE_ENUM, - "" - ); // JSON enums have no name, use empty string as placeholder + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : JSON_TYPE_ENUM; + builder.parameter(paramName, ""); // JSON enums have no name, use empty string as placeholder + int symbolIndex = 0; for (Object enumObj : enumSchema.getPossibleValuesAsList()) { String enumSymbol = enumObj.toString(); - builder.parameter(JSON_TYPE_ENUM_PREFIX + enumSymbol, enumSymbol); + if (generalizedSumTypeSupport) { + builder.parameter(paramName + "." + enumSymbol, String.valueOf(symbolIndex)); + } else { + builder.parameter(paramName + "." + enumSymbol, enumSymbol); + } + symbolIndex++; } } else if (jsonSchema instanceof CombinedSchema) { CombinedSchema combinedSchema = (CombinedSchema) jsonSchema; CombinedSchema.ValidationCriterion criterion = combinedSchema.getCriterion(); String name = null; if (criterion == CombinedSchema.ONE_CRITERION || criterion == CombinedSchema.ANY_CRITERION) { - name = JSON_TYPE_ONE_OF; + if (generalizedSumTypeSupport) { + name = GENERALIZED_TYPE_UNION_PREFIX + ctx.getAndIncrementUnionIndex(); + } else { + name = JSON_TYPE_ONE_OF; + } } else if (criterion == CombinedSchema.ALL_CRITERION) { return allOfToConnectSchema(ctx, combinedSchema, version, forceOptional); } else { @@ -957,12 +983,18 @@ private Schema toConnectSchema( } int index = 0; builder = SchemaBuilder.struct().name(name); + if (generalizedSumTypeSupport) { + builder.parameter(GENERALIZED_TYPE_UNION, name); + } for (org.everit.json.schema.Schema subSchema : combinedSchema.getSubschemas()) { if (subSchema instanceof NullSchema) { builder.optional(); } else { - String subFieldName = name + ".field." + index++; + String subFieldName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_FIELD_PREFIX + index + : name + ".field." + index; builder.field(subFieldName, toConnectSchema(ctx, subSchema, null, true)); + index++; } } } else if (jsonSchema instanceof ArraySchema) { @@ -1155,6 +1187,10 @@ private void collectPropertySchemas( } } + private static boolean isUnionSchema(Schema schema) { + return JSON_TYPE_ONE_OF.equals(schema.name()) || ConnectUnion.isUnion(schema); + } + private interface JsonToConnectTypeConverter { Object convert(Schema schema, JsonNode value); } @@ -1319,6 +1355,7 @@ public Schema schema() { private static class ToConnectContext { private final Map schemaToStructMap; private int idIndex = 0; + private int unionIndex = 0; public ToConnectContext() { this.schemaToStructMap = new IdentityHashMap<>(); @@ -1335,6 +1372,10 @@ public void put(org.everit.json.schema.Schema schema, SchemaBuilder builder) { public int incrementAndGetIdIndex() { return ++idIndex; } + + public int getAndIncrementUnionIndex() { + return unionIndex++; + } } /** diff --git a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java index 3af561dcf15..e4b02cb953e 100644 --- a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java +++ b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java @@ -32,6 +32,12 @@ public class JsonSchemaDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String OBJECT_ADDITIONAL_PROPERTIES_CONFIG = "object.additional.properties"; public static final boolean OBJECT_ADDITIONAL_PROPERTIES_DEFAULT = true; public static final String OBJECT_ADDITIONAL_PROPERTIES_DOC = @@ -54,6 +60,12 @@ public class JsonSchemaDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef().define( + GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC + ).define( OBJECT_ADDITIONAL_PROPERTIES_CONFIG, ConfigDef.Type.BOOLEAN, OBJECT_ADDITIONAL_PROPERTIES_DEFAULT, @@ -86,6 +98,10 @@ public JsonSchemaDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupport() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean allowAdditionalProperties() { return getBoolean(OBJECT_ADDITIONAL_PROPERTIES_CONFIG); } diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java index 274ed33223f..3be7c591626 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java @@ -34,7 +34,9 @@ import io.confluent.connect.json.JsonSchemaData.SchemaWrapper; import io.confluent.kafka.schemaregistry.json.jackson.Jackson; import java.util.ArrayList; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import org.apache.kafka.connect.data.ConnectSchema; import org.apache.kafka.connect.data.Decimal; import org.apache.kafka.connect.data.Schema; @@ -62,6 +64,10 @@ import static io.confluent.connect.json.JsonSchemaData.CONNECT_TYPE_MAP; import static io.confluent.connect.json.JsonSchemaData.CONNECT_TYPE_PROP; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_ENUM; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION_FIELD_PREFIX; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION_PREFIX; import static io.confluent.connect.json.JsonSchemaData.JSON_TYPE_ENUM; import static io.confluent.connect.json.JsonSchemaData.JSON_TYPE_ONE_OF; import static io.confluent.connect.json.JsonSchemaData.KEY_FIELD; @@ -203,6 +209,31 @@ public void testFromConnectEnum() { checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); } + @Test + public void testFromConnectEnumWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + Map params = new LinkedHashMap<>(); + params.put("org.apache.kafka.connect.data.Enum", ""); + params.put("org.apache.kafka.connect.data.Enum.one", "0"); + params.put("org.apache.kafka.connect.data.Enum.two", "1"); + params.put("org.apache.kafka.connect.data.Enum.three", "2"); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .unprocessedProperties(Collections.singletonMap("connect.parameters", params)) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + @Test public void testFromConnectUnion() { NumberSchema firstSchema = NumberSchema.builder() @@ -224,6 +255,31 @@ public void testFromConnectUnion() { checkNonObjectConversion(schema, ShortNode.valueOf((short) 12), connectSchema, actual); } + @Test + public void testFromConnectUnionWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + NumberSchema firstSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(ImmutableMap.of("connect.type", "int8", "connect.index", 0)) + .build(); + NumberSchema secondSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(ImmutableMap.of("connect.type", "int16", "connect.index", 1)) + .build(); + CombinedSchema schema = CombinedSchema.oneOf(ImmutableList.of(firstSchema, secondSchema)) + .build(); + SchemaBuilder builder = SchemaBuilder.struct().name("connect_union_0"); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", Schema.INT8_SCHEMA); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "1", Schema.INT16_SCHEMA); + builder.parameter(GENERALIZED_TYPE_UNION, "connect_union_0"); + Schema connectSchema = builder.build(); + + Struct actual = new Struct(connectSchema).put(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", (byte) 12); + checkNonObjectConversion(schema, ShortNode.valueOf((short) 12), connectSchema, actual); + } + @Test public void testFromConnectUnionDifferentStruct() { NumberSchema numberSchema = NumberSchema.builder() @@ -1113,6 +1169,25 @@ public void testToConnectEnum() { checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); } + @Test + public void testToConnectEnumWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + @Test public void testToConnectEnumInAllOf() { StringSchema stringSchema = StringSchema.builder().build(); @@ -1226,6 +1301,31 @@ public void testToConnectUnion() { checkNonObjectConversion(expectedSchema, expected, schema, ShortNode.valueOf((short) 12)); } + @Test + public void testToConnectUnionWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + NumberSchema firstSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(Collections.singletonMap("connect.type", "int8")) + .build(); + NumberSchema secondSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(Collections.singletonMap("connect.type", "int16")) + .build(); + CombinedSchema schema = CombinedSchema.oneOf(ImmutableList.of(firstSchema, secondSchema)) + .build(); + SchemaBuilder builder = SchemaBuilder.struct().name("connect_union_0"); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", Schema.OPTIONAL_INT8_SCHEMA); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "1", Schema.OPTIONAL_INT16_SCHEMA); + builder.parameter(GENERALIZED_TYPE_UNION, "connect_union_0"); + Schema expectedSchema = builder.build(); + + Struct expected = new Struct(expectedSchema).put(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", (byte) 12); + checkNonObjectConversion(expectedSchema, expected, schema, ShortNode.valueOf((short) 12)); + } + @Test public void testToConnectUnionSecondField() { StringSchema firstSchema = StringSchema.builder() diff --git a/pom.xml b/pom.xml index 45cc4fdcc3c..a633382e6ee 100644 --- a/pom.xml +++ b/pom.xml @@ -50,6 +50,7 @@ schema-serializer avro-serializer json-serializer + schema-converter avro-data avro-converter package-schema-registry diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index e8211580504..0f3b7a52de3 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -56,6 +56,11 @@ org.jetbrains.kotlin kotlin-stdlib + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + io.confluent kafka-schema-serializer diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java index 847141a86be..5302a39be1f 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java @@ -33,6 +33,8 @@ import com.google.protobuf.Message; import com.google.protobuf.StringValue; import com.google.protobuf.util.Timestamps; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import io.confluent.protobuf.MetaProto; import io.confluent.protobuf.MetaProto.Meta; @@ -122,6 +124,9 @@ public class ProtobufData { public static final String CONNECT_TYPE_INT8 = "int8"; public static final String CONNECT_TYPE_INT16 = "int16"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + private static final long MILLIS_PER_DAY = 24 * 60 * 60 * 1000; private static final int MILLIS_PER_NANO = 1_000_000; private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); @@ -289,6 +294,7 @@ public class ProtobufData { private final Map fromConnectSchemaCache; private final Map, Schema> toConnectSchemaCache; + private boolean generalizedSumTypeSupport; private boolean enhancedSchemaSupport; private boolean scrubInvalidNames; private boolean useIntForEnums; @@ -310,6 +316,7 @@ public ProtobufData(int cacheSize) { public ProtobufData(ProtobufDataConfig protobufDataConfig) { fromConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize()); + this.generalizedSumTypeSupport = protobufDataConfig.isGeneralizedSumTypeSupportDefault(); this.enhancedSchemaSupport = protobufDataConfig.isEnhancedProtobufSchemaSupport(); this.scrubInvalidNames = protobufDataConfig.isScrubInvalidNames(); this.useIntForEnums = protobufDataConfig.useIntForEnums(); @@ -409,9 +416,13 @@ private Object fromConnectData( case STRING: { final String stringValue = (String) value; // Check for correct type - if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { - String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM); - String tag = schema.parameters().get(PROTOBUF_TYPE_ENUM_PREFIX + stringValue); + if (schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) { + String paramName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; + String enumType = schema.parameters().get(paramName); + String tag = schema.parameters().get(paramName + "." + stringValue); if (tag != null) { return protobufSchema.getEnumValue(scope + enumType, Integer.parseInt(tag)); } @@ -472,10 +483,9 @@ private Object fromConnectData( if (!struct.schema().equals(schema)) { throw new DataException("Mismatching struct schema"); } - String structName = schema.name(); //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (structName != null && structName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -711,6 +721,7 @@ private MessageDefinition messageDefinitionFromConnectSchema( if (fieldDef != null) { boolean isProto3Optional = "optional".equals(fieldDef.getLabel()); if (isProto3Optional) { + // Add a synthentic oneof MessageDefinition.OneofBuilder oneofBuilder = message.addOneof("_" + fieldDef.getName()); oneofBuilder.addField( true, @@ -793,9 +804,11 @@ private FieldDefinition fieldDefinitionFromConnectSchema( Object defaultVal = null; if (fieldSchema.type() == Schema.Type.STRUCT) { String fieldSchemaName = fieldSchema.name(); - if (fieldSchemaName != null && fieldSchemaName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) { - String unionName = - getUnqualifiedName(ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length())); + if (isUnionSchema(fieldSchema)) { + String unionName = generalizedSumTypeSupport + ? fieldSchema.parameters().get(GENERALIZED_TYPE_UNION) + : getUnqualifiedName( + ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length())); oneofDefinitionFromConnectSchema(ctx, schema, message, fieldSchema, unionName); return null; } else { @@ -812,8 +825,9 @@ private FieldDefinition fieldDefinitionFromConnectSchema( } else if (fieldSchema.type() == Schema.Type.MAP) { message.addMessageDefinition( mapDefinitionFromConnectSchema(ctx, schema, type, fieldSchema)); - } else if (fieldSchema.parameters() != null && fieldSchema.parameters() - .containsKey(PROTOBUF_TYPE_ENUM)) { + } else if (fieldSchema.parameters() != null + && (fieldSchema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || fieldSchema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) { String enumName = getUnqualifiedName(ctx, fieldSchema.name()); if (!message.containsEnum(enumName)) { message.addEnumDefinition(enumDefinitionFromConnectSchema(ctx, schema, fieldSchema)); @@ -1000,15 +1014,16 @@ private EnumDefinition enumDefinitionFromConnectSchema( Schema enumElem ) { String enumName = getUnqualifiedName(ctx, enumElem.name()); - EnumDefinition.Builder enumer = EnumDefinition.newBuilder(enumName); + EnumDefinition.Builder enumBuilder = EnumDefinition.newBuilder(enumName); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; for (Map.Entry entry : enumElem.parameters().entrySet()) { - if (entry.getKey().startsWith(PROTOBUF_TYPE_ENUM_PREFIX)) { - String name = entry.getKey().substring(PROTOBUF_TYPE_ENUM_PREFIX.length()); + if (entry.getKey().startsWith(paramName + ".")) { + String name = entry.getKey().substring(paramName.length() + 1); int tag = Integer.parseInt(entry.getValue()); - enumer.addValue(name, tag); + enumBuilder.addValue(name, tag); } } - return enumer.build(); + return enumBuilder.build(); } private String dataTypeFromConnectSchema( @@ -1082,8 +1097,12 @@ private String dataTypeFromConnectSchema( return useWrapperForNullables && schema.isOptional() ? PROTOBUF_BOOL_WRAPPER_TYPE : FieldDescriptor.Type.BOOL.toString().toLowerCase(); case STRING: - if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { - return schema.parameters().get(PROTOBUF_TYPE_ENUM); + if (schema.parameters() != null) { + if (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM)) { + return schema.parameters().get(GENERALIZED_TYPE_ENUM); + } else if (schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + return schema.parameters().get(PROTOBUF_TYPE_ENUM); + } } return useWrapperForNullables && schema.isOptional() ? PROTOBUF_STRING_WRAPPER_TYPE : FieldDescriptor.Type.STRING.toString().toLowerCase(); @@ -1123,6 +1142,11 @@ private boolean isTimestampSchema(Schema schema) { return Timestamp.LOGICAL_NAME.equals(schema.name()); } + private static boolean isUnionSchema(Schema schema) { + return (schema.name() != null && schema.name().startsWith(PROTOBUF_TYPE_UNION)) + || ConnectUnion.isUnion(schema); + } + public SchemaAndValue toConnectData(ProtobufSchema protobufSchema, Message message) { if (message == null) { return SchemaAndValue.NULL; @@ -1296,8 +1320,8 @@ private void setUnionField( OneofDescriptor oneOfDescriptor, FieldDescriptor fieldDescriptor ) { - String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex(); - Field unionField = schema.field(unionName); + String unionFieldName = unionFieldName(oneOfDescriptor); + Field unionField = schema.field(unionFieldName); Schema unionSchema = unionField.schema(); Struct union = new Struct(unionSchema); @@ -1309,6 +1333,10 @@ private void setUnionField( result.put(unionField, union); } + private String unionFieldName(OneofDescriptor oneofDescriptor) { + return oneofDescriptor.getName() + "_" + oneofDescriptor.getIndex(); + } + private void setStructField( Schema schema, Message message, @@ -1362,8 +1390,8 @@ private SchemaBuilder toConnectSchema( builder.name(name); List oneOfDescriptors = descriptor.getRealOneofs(); for (OneofDescriptor oneOfDescriptor : oneOfDescriptors) { - String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex(); - builder.field(unionName, toConnectSchema(ctx, oneOfDescriptor)); + String unionFieldName = unionFieldName(oneOfDescriptor); + builder.field(unionFieldName, toConnectSchema(ctx, oneOfDescriptor)); } List fieldDescriptors = descriptor.getFields(); for (FieldDescriptor fieldDescriptor : fieldDescriptors) { @@ -1385,8 +1413,14 @@ private SchemaBuilder toConnectSchema( private Schema toConnectSchema(ToConnectContext ctx, OneofDescriptor descriptor) { SchemaBuilder builder = SchemaBuilder.struct(); - String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName(); - builder.name(PROTOBUF_TYPE_UNION_PREFIX + name); + if (generalizedSumTypeSupport) { + String name = descriptor.getName(); + builder.name(name); + builder.parameter(GENERALIZED_TYPE_UNION, name); + } else { + String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName(); + builder.name(PROTOBUF_TYPE_UNION_PREFIX + name); + } List fieldDescriptors = descriptor.getFields(); for (FieldDescriptor fieldDescriptor : fieldDescriptors) { builder.field(fieldDescriptor.getName(), toConnectSchema(ctx, fieldDescriptor)); @@ -1466,11 +1500,12 @@ private Schema toConnectSchema(ToConnectContext ctx, FieldDescriptor descriptor) String name = enhancedSchemaSupport ? enumDescriptor.getFullName() : enumDescriptor.getName(); builder.name(name); - builder.parameter(PROTOBUF_TYPE_ENUM, enumDescriptor.getName()); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; + builder.parameter(paramName, enumDescriptor.getName()); for (EnumValueDescriptor enumValueDesc : enumDescriptor.getValues()) { String enumSymbol = enumValueDesc.getName(); String enumTag = String.valueOf(enumValueDesc.getNumber()); - builder.parameter(PROTOBUF_TYPE_ENUM_PREFIX + enumSymbol, enumTag); + builder.parameter(paramName + "." + enumSymbol, enumTag); } builder.optional(); break; diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java index 3a53e416954..9aebe918b09 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java @@ -24,6 +24,12 @@ public class ProtobufDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG = "enhanced.protobuf.schema.support"; public static final boolean ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT = false; @@ -61,6 +67,11 @@ public class ProtobufDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef() + .define(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC) .define(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG, ConfigDef.Type.BOOLEAN, ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT, @@ -100,6 +111,10 @@ public ProtobufDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupportDefault() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean isEnhancedProtobufSchemaSupport() { return this.getBoolean(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG); } diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java index 1927b5617cb..76502923fbb 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java @@ -88,9 +88,11 @@ import io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue; import io.confluent.kafka.serializers.protobuf.test.UInt32ValueOuterClass; +import static io.confluent.connect.protobuf.ProtobufData.GENERALIZED_TYPE_UNION; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_ENUM; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_PROP; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_TAG; +import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION_PREFIX; import static io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue.newBuilder; import static org.junit.Assert.assertArrayEquals; @@ -255,6 +257,46 @@ private SchemaBuilder getEnumUnionSchemaBuilder() { return enumUnionBuilder; } + private SchemaBuilder getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport() { + final SchemaBuilder enumUnionBuilder = SchemaBuilder.struct(); + enumUnionBuilder.name("EnumUnion"); + final SchemaBuilder someValBuilder = SchemaBuilder.struct(); + someValBuilder.name("some_val"); + someValBuilder.parameter(GENERALIZED_TYPE_UNION, "some_val"); + someValBuilder.field( + "one_id", + SchemaBuilder.string().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(1)).build() + ); + someValBuilder.field( + "other_id", + SchemaBuilder.int32().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(2)).build() + ); + someValBuilder.field( + "some_status", + SchemaBuilder.string() + .name("Status") + .optional() + .parameter(PROTOBUF_TYPE_TAG, String.valueOf(3)) + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1") + .build() + ); + enumUnionBuilder.field("some_val_0", someValBuilder.optional().build()); + enumUnionBuilder.field( + "status", + SchemaBuilder.string() + .name("Status") + .optional() + .parameter(PROTOBUF_TYPE_TAG, String.valueOf(4)) + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1") + .build() + ); + return enumUnionBuilder; + } + private Struct getEnumUnionWithString() throws ParseException { Schema schema = getEnumUnionSchemaBuilder().build(); Struct result = new Struct(schema.schema()); @@ -265,6 +307,16 @@ private Struct getEnumUnionWithString() throws ParseException { return result; } + private Struct getEnumUnionWithStringWithGeneralizedSumTypeSupport() throws ParseException { + Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); + Struct result = new Struct(schema.schema()); + Struct union = new Struct(schema.field("some_val_0").schema()); + union.put("one_id", "ID"); + result.put("some_val_0", union); + result.put("status", "INACTIVE"); + return result; + } + private Struct getEnumUnionWithSomeStatus() throws ParseException { Schema schema = getEnumUnionSchemaBuilder().build(); Struct result = new Struct(schema.schema()); @@ -275,6 +327,16 @@ private Struct getEnumUnionWithSomeStatus() throws ParseException { return result; } + private Struct getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws ParseException { + Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); + Struct result = new Struct(schema.schema()); + Struct union = new Struct(schema.field("some_val_0").schema()); + union.put("some_status", "INACTIVE"); + result.put("some_val_0", union); + result.put("status", "INACTIVE"); + return result; + } + private SchemaBuilder getComplexTypeSchemaBuilder() { final SchemaBuilder complexTypeBuilder = SchemaBuilder.struct(); complexTypeBuilder.name("ComplexType"); @@ -636,9 +698,23 @@ public void testToConnectDataDefaultOneOfCannotHaveTwoOneOfsSet() throws Excepti public void testToConnectEnumUnionWithString() throws Exception { EnumUnion message = createEnumUnionWithString(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getEnumUnionWithString(); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithString(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getEnumUnionWithString(); + Struct expected = getEnumUnionWithStringWithGeneralizedSumTypeSupport(); assertEquals(expected, result.value()); } @@ -646,9 +722,23 @@ public void testToConnectEnumUnionWithString() throws Exception { public void testToConnectEnumUnionWithSomeStatus() throws Exception { EnumUnion message = createEnumUnionWithSomeStatus(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getEnumUnionWithSomeStatus(); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithSomeStatus(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getEnumUnionWithSomeStatus(); + Struct expected = getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport(); assertEquals(expected, result.value()); } @@ -1140,6 +1230,32 @@ public void testFromConnectEnumUnionWithSomeStatus() throws Exception { assertArrayEquals(messageBytes, message.toByteArray()); } + @Test + public void testFromConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithString(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message); + byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue); + + assertArrayEquals(messageBytes, message.toByteArray()); + } + + @Test + public void testFromConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithSomeStatus(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message); + byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue); + + assertArrayEquals(messageBytes, message.toByteArray()); + } + @Test public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception { NestedMessage nestedMessage = createNestedTestProtoStringUserId(); diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java index a7ea7ebc372..23e3dc026e9 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java @@ -24,6 +24,7 @@ import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import com.google.protobuf.DescriptorProtos.FileOptions; import com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode; +import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.DescriptorValidationException; import com.google.protobuf.Descriptors.EnumDescriptor; @@ -422,6 +423,16 @@ public Builder addEnumDefinition(EnumDefinition enumDef) { return this; } + public boolean containsService(String name) { + List services = mFileDescProtoBuilder.getServiceList(); + for (ServiceDescriptorProto service : services) { + if (service.getName().equals(name)) { + return true; + } + } + return false; + } + public Builder addServiceDefinition(ServiceDefinition serviceDef) { mFileDescProtoBuilder.addService(serviceDef.getServiceType()); return this; diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumReferenceOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumReferenceOuter.java index 92175084f42..0178dba38bb 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumReferenceOuter.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumReferenceOuter.java @@ -32,7 +32,7 @@ public interface EnumReferenceOrBuilder extends /** * Protobuf type {@code EnumReference} */ - public static final class EnumReference extends + public static final class EnumReference extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:EnumReference) EnumReferenceOrBuilder { @@ -119,14 +119,14 @@ private EnumReference( * .EnumRoot enum_root = 1; * @return The enum numeric value on the wire for enumRoot. */ - public int getEnumRootValue() { + @java.lang.Override public int getEnumRootValue() { return enumRoot_; } /** * .EnumRoot enum_root = 1; * @return The enumRoot. */ - public io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot getEnumRoot() { + @java.lang.Override public io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot getEnumRoot() { @SuppressWarnings("deprecation") io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot result = io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot.valueOf(enumRoot_); return result == null ? io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot.UNRECOGNIZED : result; @@ -438,7 +438,7 @@ public Builder mergeFrom( * .EnumRoot enum_root = 1; * @return The enum numeric value on the wire for enumRoot. */ - public int getEnumRootValue() { + @java.lang.Override public int getEnumRootValue() { return enumRoot_; } /** @@ -447,6 +447,7 @@ public int getEnumRootValue() { * @return This builder for chaining. */ public Builder setEnumRootValue(int value) { + enumRoot_ = value; onChanged(); return this; @@ -455,6 +456,7 @@ public Builder setEnumRootValue(int value) { * .EnumRoot enum_root = 1; * @return The enumRoot. */ + @java.lang.Override public io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot getEnumRoot() { @SuppressWarnings("deprecation") io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot result = io.confluent.kafka.serializers.protobuf.test.EnumRootOuter.EnumRoot.valueOf(enumRoot_); diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumRootOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumRootOuter.java index 35f8caf2c04..a245da16989 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumRootOuter.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumRootOuter.java @@ -84,6 +84,10 @@ public EnumRoot findValueByNumber(int number) { public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java index aa069b24dc0..212e9aaec47 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java @@ -84,6 +84,10 @@ public Status findValueByNumber(int number) { public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { + if (this == UNRECOGNIZED) { + throw new java.lang.IllegalStateException( + "Can't get the descriptor of an unrecognized enum value."); + } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor @@ -122,6 +126,11 @@ public interface EnumUnionOrBuilder extends // @@protoc_insertion_point(interface_extends:io.confluent.kafka.serializers.protobuf.test.EnumUnion) com.google.protobuf.MessageOrBuilder { + /** + * string one_id = 1; + * @return Whether the oneId field is set. + */ + boolean hasOneId(); /** * string one_id = 1; * @return The oneId. @@ -134,12 +143,22 @@ public interface EnumUnionOrBuilder extends com.google.protobuf.ByteString getOneIdBytes(); + /** + * int32 other_id = 2; + * @return Whether the otherId field is set. + */ + boolean hasOtherId(); /** * int32 other_id = 2; * @return The otherId. */ int getOtherId(); + /** + * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; + * @return Whether the someStatus field is set. + */ + boolean hasSomeStatus(); /** * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; * @return The enum numeric value on the wire for someStatus. @@ -167,7 +186,7 @@ public interface EnumUnionOrBuilder extends /** * Protobuf type {@code io.confluent.kafka.serializers.protobuf.test.EnumUnion} */ - public static final class EnumUnion extends + public static final class EnumUnion extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:io.confluent.kafka.serializers.protobuf.test.EnumUnion) EnumUnionOrBuilder { @@ -309,6 +328,13 @@ public int getNumber() { } public static final int ONE_ID_FIELD_NUMBER = 1; + /** + * string one_id = 1; + * @return Whether the oneId field is set. + */ + public boolean hasOneId() { + return someValCase_ == 1; + } /** * string one_id = 1; * @return The oneId. @@ -354,10 +380,19 @@ public java.lang.String getOneId() { } public static final int OTHER_ID_FIELD_NUMBER = 2; + /** + * int32 other_id = 2; + * @return Whether the otherId field is set. + */ + @java.lang.Override + public boolean hasOtherId() { + return someValCase_ == 2; + } /** * int32 other_id = 2; * @return The otherId. */ + @java.lang.Override public int getOtherId() { if (someValCase_ == 2) { return (java.lang.Integer) someVal_; @@ -366,6 +401,13 @@ public int getOtherId() { } public static final int SOME_STATUS_FIELD_NUMBER = 3; + /** + * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; + * @return Whether the someStatus field is set. + */ + public boolean hasSomeStatus() { + return someValCase_ == 3; + } /** * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; * @return The enum numeric value on the wire for someStatus. @@ -396,14 +438,14 @@ public io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status getSom * .io.confluent.kafka.serializers.protobuf.test.Status status = 4; * @return The enum numeric value on the wire for status. */ - public int getStatusValue() { + @java.lang.Override public int getStatusValue() { return status_; } /** * .io.confluent.kafka.serializers.protobuf.test.Status status = 4; * @return The status. */ - public io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status getStatus() { + @java.lang.Override public io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status getStatus() { @SuppressWarnings("deprecation") io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status result = io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status.valueOf(status_); return result == null ? io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status.UNRECOGNIZED : result; @@ -811,10 +853,19 @@ public Builder clearSomeVal() { } + /** + * string one_id = 1; + * @return Whether the oneId field is set. + */ + @java.lang.Override + public boolean hasOneId() { + return someValCase_ == 1; + } /** * string one_id = 1; * @return The oneId. */ + @java.lang.Override public java.lang.String getOneId() { java.lang.Object ref = ""; if (someValCase_ == 1) { @@ -836,6 +887,7 @@ public java.lang.String getOneId() { * string one_id = 1; * @return The bytes for oneId. */ + @java.lang.Override public com.google.protobuf.ByteString getOneIdBytes() { java.lang.Object ref = ""; @@ -898,6 +950,13 @@ public Builder setOneIdBytes( return this; } + /** + * int32 other_id = 2; + * @return Whether the otherId field is set. + */ + public boolean hasOtherId() { + return someValCase_ == 2; + } /** * int32 other_id = 2; * @return The otherId. @@ -932,10 +991,19 @@ public Builder clearOtherId() { return this; } + /** + * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; + * @return Whether the someStatus field is set. + */ + @java.lang.Override + public boolean hasSomeStatus() { + return someValCase_ == 3; + } /** * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; * @return The enum numeric value on the wire for someStatus. */ + @java.lang.Override public int getSomeStatusValue() { if (someValCase_ == 3) { return ((java.lang.Integer) someVal_).intValue(); @@ -957,6 +1025,7 @@ public Builder setSomeStatusValue(int value) { * .io.confluent.kafka.serializers.protobuf.test.Status some_status = 3; * @return The someStatus. */ + @java.lang.Override public io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status getSomeStatus() { if (someValCase_ == 3) { @SuppressWarnings("deprecation") @@ -998,7 +1067,7 @@ public Builder clearSomeStatus() { * .io.confluent.kafka.serializers.protobuf.test.Status status = 4; * @return The enum numeric value on the wire for status. */ - public int getStatusValue() { + @java.lang.Override public int getStatusValue() { return status_; } /** @@ -1007,6 +1076,7 @@ public int getStatusValue() { * @return This builder for chaining. */ public Builder setStatusValue(int value) { + status_ = value; onChanged(); return this; @@ -1015,6 +1085,7 @@ public Builder setStatusValue(int value) { * .io.confluent.kafka.serializers.protobuf.test.Status status = 4; * @return The status. */ + @java.lang.Override public io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status getStatus() { @SuppressWarnings("deprecation") io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status result = io.confluent.kafka.serializers.protobuf.test.EnumUnionOuter.Status.valueOf(status_); diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml new file mode 100644 index 00000000000..96a8d28e78b --- /dev/null +++ b/schema-converter/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + + io.confluent + kafka-schema-registry-parent + 7.2.0-0 + + + + + Apache License 2.0 + http://www.apache.org/licenses/LICENSE-2.0.html + repo + + + + kafka-schema-converter + jar + kafka-schema-converter + + + + org.apache.kafka + connect-api + provided + + + junit + junit + test + + + diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java new file mode 100644 index 00000000000..3fd0b59b2c6 --- /dev/null +++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.connect.schema; + +import java.util.List; +import java.util.Map; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.errors.DataException; + +public class ConnectEnum { + + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Enum"; + + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols the enum symbols + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, List symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (int i = 0; i < symbols.size(); i++) { + builder.parameter(LOGICAL_PARAMETER + "." + symbols.get(i), String.valueOf(i)); + } + return builder; + } + + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols a map of enum symbol to its ordinal + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, Map symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (Map.Entry symbol : symbols.entrySet()) { + builder.parameter(LOGICAL_PARAMETER + "." + symbol.getKey(), + String.valueOf(symbol.getValue())); + } + return builder; + } + + /** + * Returns whether a schema represents an Enum. + * + * @param schema the schema + * @return whether the schema represents an Enum + */ + public static boolean isEnum(Schema schema) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); + } + + /** + * Returns whether a schema has an Enum symbol. + * + * @param schema the schema + * @param symbol the enum symbol + * @return whether the schema represents an Enum + */ + public static boolean hasEnumSymbol(Schema schema, String symbol) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER) + && schema.parameters().containsKey(LOGICAL_PARAMETER + "." + symbol); + } + + /** + * Convert a value from its logical format (Enum) to its encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static > String fromLogical(Schema schema, T value) { + if (!hasEnumSymbol(schema, value.name())) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return value.name(); + } + + /** + * Convert a value from its encoded format to its logical format (Enum). + * + * @param schema the schema + * @param cls the class of the logical value + * @param symbol the enum symbol + * @return the logical value + */ + public static > T toLogical(Schema schema, Class cls, + String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return java.lang.Enum.valueOf(cls, symbol); + } + + /** + * Convert a value from its encoded format to its ordinal. + * + * @param schema the schema + * @param symbol the enum symbol + * @return the ordinal + */ + public static int toOrdinal(Schema schema, String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return Integer.parseInt(schema.parameters().get(LOGICAL_PARAMETER + "." + symbol)); + } +} diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java new file mode 100644 index 00000000000..7bf388d8e7e --- /dev/null +++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java @@ -0,0 +1,131 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.connect.schema; + +import org.apache.kafka.connect.data.ConnectSchema; +import org.apache.kafka.connect.data.Field; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.data.Struct; +import org.apache.kafka.connect.errors.DataException; + +public class ConnectUnion { + + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Union"; + + /** + * Returns a SchemaBuilder for a Union. + * + * @param annotation an arbitrary annotation to be associated with the union + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation) { + return SchemaBuilder.struct().parameter(LOGICAL_PARAMETER, annotation); + } + + /** + * Returns whether a schema represents a Union. + * + * @param schema the schema + * @return whether the schema represents a Union + */ + public static boolean isUnion(Schema schema) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); + } + + /** + * Convert a value from its logical format (Union) to it's encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static Object fromLogical(Schema schema, Struct value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + for (Field field : schema.fields()) { + Object object = value.get(field); + if (object != null) { + return object; + } + } + return null; + } + + /** + * Convert a value from its encoded format to its logical format (Union). + * The value is associated with the field whose schema matches the given value. + * + * @param schema the schema + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogical(Schema schema, Object value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (validate(field.schema(), value)) { + struct.put(field, value); + break; + } + } + return struct; + } + + private static boolean validate(Schema schema, Object value) { + try { + ConnectSchema.validateValue(schema, value); + } catch (DataException e) { + return false; + } + return true; + } + + /** + * Convert a value from its encoded format to its logical format (Union). + * The value is associated with the field with the given field name. + * + * @param schema the schema + * @param fieldName the field name + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogicalUsingName(Schema schema, String fieldName, Object value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (field.name().equals(fieldName)) { + struct.put(field, value); + break; + } + } + return struct; + } +} From 613c91dd4a873d366848b54aa874d8eacefc8b28 Mon Sep 17 00:00:00 2001 From: Anshul Goyal Date: Fri, 4 Feb 2022 01:43:45 +0530 Subject: [PATCH 19/73] DGS-2929 Optimising getLatestVersion implementation [skip secret scan] (#2163) * DGS-2929: basic linear time getLatestVersion impl * DGS-2929: adding more unit tests for getLatestVersion api * DGS-2929: address comment - remove returnDeletedSchemas flag * DGS-2929: address checkstyle error --- .../storage/KafkaSchemaRegistry.java | 22 +++++++++- .../schemaregistry/rest/RestApiTest.java | 41 ++++++++++++++++++- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 26829514887..19a829bafb6 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -1376,11 +1376,29 @@ private List getAllSchemaValues(String subject) @Override public Schema getLatestVersion(String subject) throws SchemaRegistryException { try (CloseableIterator allVersions = allVersions(subject, false)) { - List sortedVersions = sortSchemasByVersion(allVersions, false); - return sortedVersions.size() > 0 ? sortedVersions.get(sortedVersions.size() - 1) : null; + return getLatestVersionFromSubjectSchemas(allVersions); } } + private Schema getLatestVersionFromSubjectSchemas( + CloseableIterator schemas) { + int latestVersionId = -1; + SchemaValue latestSchemaValue = null; + + while (schemas.hasNext()) { + SchemaValue schemaValue = (SchemaValue) schemas.next(); + if (schemaValue.isDeleted()) { + continue; + } + if (schemaValue.getVersion() > latestVersionId) { + latestVersionId = schemaValue.getVersion(); + latestSchemaValue = schemaValue; + } + } + + return latestSchemaValue != null ? getSchemaEntityFromSchemaValue(latestSchemaValue) : null; + } + private CloseableIterator allVersions( String subjectOrPrefix, boolean isPrefix) throws SchemaRegistryException { try { diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java index 70c8f363d5b..c1027eb2a0b 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java @@ -1280,7 +1280,7 @@ public void testDeleteSchemaVersionInvalidSubject() throws Exception { @Test public void testDeleteLatestVersion() throws Exception { - List schemas = TestUtils.getRandomCanonicalAvroString(2); + List schemas = TestUtils.getRandomCanonicalAvroString(3); String subject = "test"; TestUtils.registerAndVerifySchema(restApp.restClient, schemas.get(0), 1, subject); @@ -1306,6 +1306,45 @@ public void testDeleteLatestVersion() throws Exception { Errors.SUBJECT_NOT_FOUND_ERROR_CODE, rce.getErrorCode()); } + + TestUtils.registerAndVerifySchema(restApp.restClient, schemas.get(2), 3, subject); + assertEquals("Latest version available after subject re-registration", + schemas.get(2), + restApp.restClient.getLatestVersion(subject).getSchema()); + } + + @Test + public void testGetLatestVersionNonExistentSubject() throws Exception { + String subject = "non_existent_subject"; + + try { + restApp.restClient.getLatestVersion(subject); + fail("Getting latest versions from non-existing subject should fail with " + + Errors.SUBJECT_NOT_FOUND_ERROR_CODE + + " (subject not found)."); + } catch (RestClientException rce) { + assertEquals("Should get a 404 status for non-existing subject", + Errors.SUBJECT_NOT_FOUND_ERROR_CODE, + rce.getErrorCode()); + } + } + + @Test + public void testGetLatestVersionDeleteOlder() throws Exception { + List schemas = TestUtils.getRandomCanonicalAvroString(2); + String subject = "test"; + + TestUtils.registerAndVerifySchema(restApp.restClient, schemas.get(0), 1, subject); + TestUtils.registerAndVerifySchema(restApp.restClient, schemas.get(1), 2, subject); + + assertEquals("Latest Version Schema", schemas.get(1), restApp.restClient.getLatestVersion(subject).getSchema()); + + assertEquals("Deleting Schema Older Version Success", (Integer) 1, restApp.restClient + .deleteSchemaVersion + (RestService.DEFAULT_REQUEST_PROPERTIES, subject, "1")); + assertEquals("Latest Version Schema Still Same", + schemas.get(1), + restApp.restClient.getLatestVersion(subject).getSchema()); } @Test From 1d61943c7a5585d085d742b2e7b1fe32e8812e89 Mon Sep 17 00:00:00 2001 From: NeoKoder <87475799+varunpv@users.noreply.github.com> Date: Fri, 4 Feb 2022 18:37:51 +0530 Subject: [PATCH 20/73] DGS-2871 Add API to delete Global-level compatibility setting (#2153) * Added the API Endpoint to delete the global config and revert back to default. * Added the support for the this new endpoint in RestService. --- .../client/CachedSchemaRegistryClient.java | 2 +- .../client/rest/RestService.java | 10 +++--- .../swagger-ui/schema-registry-api-spec.yaml | 29 +++++++++++++++ .../rest/resources/ConfigResource.java | 35 ++++++++++++++++++- .../storage/KafkaSchemaRegistry.java | 20 +++++------ .../schemaregistry/rest/RestApiTest.java | 30 +++++++++++++++- 6 files changed, 108 insertions(+), 18 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index 00d6998349b..27c91523507 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -631,7 +631,7 @@ public String getCompatibility(String subject) throws IOException, RestClientExc @Override public void deleteCompatibility(String subject) throws IOException, RestClientException { - restService.deleteSubjectConfig(subject); + restService.deleteConfig(subject); } @Override diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java index 96897ebfd69..a12036606bb 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java @@ -672,15 +672,15 @@ public Config getConfig(Map requestProperties, return config; } - public Config deleteSubjectConfig(String subject) + public Config deleteConfig(String subject) throws IOException, RestClientException { - return deleteSubjectConfig(DEFAULT_REQUEST_PROPERTIES, subject); + return deleteConfig(DEFAULT_REQUEST_PROPERTIES, subject); } - public Config deleteSubjectConfig(Map requestProperties, String subject) + public Config deleteConfig(Map requestProperties, String subject) throws IOException, RestClientException { - UriBuilder builder = UriBuilder.fromPath("/config/{subject}"); - String path = builder.build(subject).toString(); + String path = subject != null + ? UriBuilder.fromPath("/config/{subject}").build(subject).toString() : "/config"; Config response = httpRequest(path, "DELETE", null, requestProperties, DELETE_SUBJECT_CONFIG_RESPONSE_TYPE); diff --git a/core/generated/swagger-ui/schema-registry-api-spec.yaml b/core/generated/swagger-ui/schema-registry-api-spec.yaml index 0c651fb91e1..04520402dcd 100644 --- a/core/generated/swagger-ui/schema-registry-api-spec.yaml +++ b/core/generated/swagger-ui/schema-registry-api-spec.yaml @@ -236,6 +236,35 @@ paths: description: | Error code 50001 -- Error in the backend data store Error code 50003 -- Error while forwarding the request to the primary + delete: + summary: "Deletes the Global compatibility level config and\ + \ revert to the global default." + description: "" + operationId: "deleteTopLevelConfig" + consumes: + - "application/vnd.schemaregistry.v1+json" + - "application/vnd.schemaregistry+json" + - "application/json" + - "application/octet-stream" + produces: + - "application/vnd.schemaregistry.v1+json" + - "application/vnd.schemaregistry+json; qs=0.9" + - "application/json; qs=0.5" + responses: + 200: + description: "successful operation" + schema: + type: "string" + enum: + - "NONE" + - "BACKWARD" + - "BACKWARD_TRANSITIVE" + - "FORWARD" + - "FORWARD_TRANSITIVE" + - "FULL" + - "FULL_TRANSITIVE" + 500: + description: "Error code 50001 -- Error in the backend datastore" /config/{subject}: get: summary: Get compatibility level for a subject. diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java index e2cc6e253cb..c66f494e8de 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java @@ -201,6 +201,39 @@ public Config getTopLevelConfig() { return config; } + @DELETE + @Operation(summary = "Deletes the Global-level compatibility level config and " + + "revert to the global default.", responses = { + @ApiResponse(content = @Content( + schema = @Schema(implementation = CompatibilityLevel.class))), + @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + + "datastore") + }) + public void deleteTopLevelConfig( + final @Suspended AsyncResponse asyncResponse, + @Context HttpHeaders headers) { + log.info("Deleting Global compatibility setting and reverting back to default"); + + Config deletedConfig; + try { + CompatibilityLevel currentCompatibility = schemaRegistry.getCompatibilityLevel(null); + Map headerProperties = requestHeaderBuilder.buildRequestHeaders( + headers, schemaRegistry.config().whitelistHeaders()); + schemaRegistry.deleteCompatibilityConfigOrForward(null, headerProperties); + deletedConfig = new Config(currentCompatibility.name); + } catch (OperationNotPermittedException e) { + throw Errors.operationNotPermittedException(e.getMessage()); + } catch (SchemaRegistryStoreException e) { + throw Errors.storeException("Failed to delete compatibility level", e); + } catch (UnknownLeaderException e) { + throw Errors.unknownLeaderException("Failed to delete compatibility level", e); + } catch (SchemaRegistryRequestForwardingException e) { + throw Errors.requestForwardingFailedException("Error while forwarding delete config request" + + " to the leader", e); + } + asyncResponse.resume(deletedConfig); + } + @DELETE @Path("/{subject}") @Operation(summary = "Deletes the specified subject-level compatibility level config and " @@ -229,7 +262,7 @@ public void deleteSubjectConfig( Map headerProperties = requestHeaderBuilder.buildRequestHeaders( headers, schemaRegistry.config().whitelistHeaders()); - schemaRegistry.deleteSubjectCompatibilityConfigOrForward(subject, headerProperties); + schemaRegistry.deleteCompatibilityConfigOrForward(subject, headerProperties); deletedConfig = new Config(currentCompatibility.name); } catch (OperationNotPermittedException e) { throw Errors.operationNotPermittedException(e.getMessage()); diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 19a829bafb6..b8c0af1a18f 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -646,7 +646,7 @@ public void deleteSchemaVersion(String subject, deleteMode(subject); } if (getCompatibilityLevel(subject) != null) { - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } } } else { @@ -719,7 +719,7 @@ public List deleteSubject(String subject, deleteMode(subject); } if (getCompatibilityLevel(subject) != null) { - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } } else { for (Integer version : deletedVersions) { @@ -914,7 +914,7 @@ private List forwardDeleteSubjectRequestToLeader( } } - private void forwardDeleteSubjectCompatibilityConfigToLeader( + private void forwardDeleteCompatibilityConfigToLeader( Map requestProperties, String subject ) throws SchemaRegistryRequestForwardingException { @@ -923,7 +923,7 @@ private void forwardDeleteSubjectCompatibilityConfigToLeader( log.debug(String.format("Forwarding delete subject compatibility config request %s to %s", subject, baseUrl)); try { - leaderRestService.deleteSubjectConfig(requestProperties, subject); + leaderRestService.deleteConfig(requestProperties, subject); } catch (IOException e) { throw new SchemaRegistryRequestForwardingException( String.format( @@ -1472,32 +1472,32 @@ public void updateConfigOrForward(String subject, CompatibilityLevel newCompatib } } - public void deleteSubjectCompatibilityConfig(String subject) + public void deleteCompatibilityConfig(String subject) throws SchemaRegistryStoreException, OperationNotPermittedException { if (isReadOnlyMode(subject)) { throw new OperationNotPermittedException("Subject " + subject + " is in read-only mode"); } try { kafkaStore.waitUntilKafkaReaderReachesLastOffset(subject, kafkaStoreTimeoutMs); - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } catch (StoreException e) { throw new SchemaRegistryStoreException("Failed to delete subject config value from store", e); } } - public void deleteSubjectCompatibilityConfigOrForward(String subject, + public void deleteCompatibilityConfigOrForward(String subject, Map headerProperties) throws SchemaRegistryStoreException, SchemaRegistryRequestForwardingException, OperationNotPermittedException, UnknownLeaderException { kafkaStore.lockFor(subject).lock(); try { if (isLeader()) { - deleteSubjectCompatibilityConfig(subject); + deleteCompatibilityConfig(subject); } else { // forward delete subject config request to the leader if (leaderIdentity != null) { - forwardDeleteSubjectCompatibilityConfigToLeader(headerProperties, subject); + forwardDeleteCompatibilityConfigToLeader(headerProperties, subject); } else { throw new UnknownLeaderException("Delete config request failed since leader is " + "unknown"); @@ -1599,7 +1599,7 @@ private void deleteMode(String subject) throws StoreException { this.kafkaStore.delete(modeKey); } - private void deleteSubjectCompatibility(String subject) throws StoreException { + private void deleteCompatibility(String subject) throws StoreException { ConfigKey configKey = new ConfigKey(subject); this.kafkaStore.delete(configKey); } diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java index c1027eb2a0b..7a1dc29d9cb 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java @@ -39,6 +39,7 @@ import java.net.URL; import java.net.HttpURLConnection; +import static io.confluent.kafka.schemaregistry.CompatibilityLevel.BACKWARD; import static io.confluent.kafka.schemaregistry.CompatibilityLevel.FORWARD; import static io.confluent.kafka.schemaregistry.CompatibilityLevel.NONE; import static io.confluent.kafka.schemaregistry.utils.QualifiedSubject.DEFAULT_CONTEXT; @@ -479,7 +480,7 @@ public void testSubjectConfigChange() throws Exception { restApp.restClient.getConfig(subject).getCompatibilityLevel()); // delete subject compatibility - restApp.restClient.deleteSubjectConfig(subject); + restApp.restClient.deleteConfig(subject); assertEquals("Compatibility level for this subject should be reverted to none", NONE.name, @@ -488,6 +489,33 @@ public void testSubjectConfigChange() throws Exception { .getCompatibilityLevel()); } + @Test + public void testGlobalConfigChange() throws Exception{ + assertEquals("Default compatibility level should be none for this test instance", + NONE.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // change subject compatibility to forward + restApp.restClient.updateCompatibility(CompatibilityLevel.FORWARD.name, null); + assertEquals("New Global compatibility level should be forward", + FORWARD.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // change subject compatibility to backward + restApp.restClient.updateCompatibility(BACKWARD.name, null); + assertEquals("New Global compatibility level should be backward", + BACKWARD.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // delete Global compatibility + restApp.restClient.deleteConfig(null); + assertEquals("Global compatibility level should be reverted to none", + NONE.name, + restApp.restClient + .getConfig(RestService.DEFAULT_REQUEST_PROPERTIES, null, true) + .getCompatibilityLevel()); + } + @Test public void testGetSchemaNonExistingId() throws Exception { try { From e14d36e1e72fa58c38365b8407546421dfebd753 Mon Sep 17 00:00:00 2001 From: Anshul Goyal Date: Tue, 8 Feb 2022 01:39:04 +0530 Subject: [PATCH 21/73] DGS-2951 Make Register Api's incompatibility error message verbose (#2166) * DGS-2951: add verbosity in register API with incompatible schemas * DGS-2951: remove verbose query param * DGS-2951: adding unit test * DGS-2951: address comment - format change --- .../rest/resources/SubjectVersionsResource.java | 6 +++--- .../schemaregistry/storage/KafkaSchemaRegistry.java | 9 +++++---- .../schemaregistry/rest/RestApiCompatibilityTest.java | 4 ++++ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index 7a59b4a87c6..15b189d3776 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -318,9 +318,9 @@ public void register( throw Errors.requestForwardingFailedException("Error while forwarding register schema request" + " to the leader", e); } catch (IncompatibleSchemaException e) { - throw Errors.incompatibleSchemaException("Schema being registered is incompatible with an" - + " earlier schema for subject " - + "\"" + subjectName + "\"", e); + throw Errors.incompatibleSchemaException("Schema being registered is incompatible with" + + " an earlier schema for subject \"" + subjectName + "\", details: " + + e.getMessage(), e); } catch (UnknownLeaderException e) { throw Errors.unknownLeaderException("Leader not known.", e); } catch (SchemaRegistryException e) { diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index b8c0af1a18f..c72f983c5b2 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -488,8 +488,10 @@ public int register(String subject, } Collections.reverse(undeletedVersions); - final boolean isCompatible = - isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions).isEmpty(); + final List compatibilityErrorLogs = isCompatibleWithPrevious( + subject, parsedSchema, undeletedVersions); + final boolean isCompatible = compatibilityErrorLogs.isEmpty(); + if (normalize) { parsedSchema = parsedSchema.normalize(); } @@ -548,8 +550,7 @@ public int register(String subject, return schema.getId(); } else { - throw new IncompatibleSchemaException( - "New schema is incompatible with an earlier schema."); + throw new IncompatibleSchemaException(compatibilityErrorLogs.toString()); } } catch (StoreTimeoutException te) { throw new SchemaRegistryTimeoutException("Write to the Kafka store timed out while", te); diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java index aa0cbfba80a..4e997c5dcab 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java @@ -22,7 +22,9 @@ import io.confluent.kafka.schemaregistry.rest.exceptions.RestInvalidSchemaException; import org.junit.Test; +import static org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType.READER_FIELD_MISSING_DEFAULT_VALUE; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class RestApiCompatibilityTest extends ClusterTestHarness { @@ -59,6 +61,8 @@ public void testCompatibility() throws Exception { assertEquals("Should get a conflict status", RestIncompatibleSchemaException.DEFAULT_ERROR_CODE, e.getStatus()); + assertTrue("Verifying error message verbosity", + e.getMessage().contains(READER_FIELD_MISSING_DEFAULT_VALUE.toString())); } // register a non-avro From b10e063d26e38794cacb45cce2ae720387d52941 Mon Sep 17 00:00:00 2001 From: sanjay-awatramani <94131090+sanjay-awatramani@users.noreply.github.com> Date: Fri, 11 Feb 2022 01:09:46 +0530 Subject: [PATCH 22/73] DGS-2989 Update function return type due to changes in parent class (#2172) --- .../leaderelector/kafka/SchemaRegistryCoordinator.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java index 6898100bbd0..6a484fafd7f 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java @@ -211,11 +211,16 @@ protected Map performAssignment( } @Override - protected void onJoinPrepare(int generation, String memberId) { + protected boolean onJoinPrepare(int generation, String memberId) { log.debug("Revoking previous assignment {}", assignmentSnapshot); if (assignmentSnapshot != null) { listener.onRevoked(); } + // return true if the cleanup succeeds or if it fails with a non-retriable exception. + // return false otherwise. + // listener.onRevoked() called above removes this instance as the leader + // and even if we got an exception, it wouldn't help retrying. + return true; } @Override From 0c06f3b77c7eedc2a511e226c77a0b3285e21d10 Mon Sep 17 00:00:00 2001 From: Anshul Goyal Date: Fri, 11 Feb 2022 02:56:43 +0530 Subject: [PATCH 23/73] DGS-2975 Make register API 422 error code verbose (#2170) * DGS-2975: parse schema method to throw exception * DGS-2975: capture error details while loading invalid schema * DGS-2975: fix checkstyle * DGS-2975: unit test for bad field type invalid schema * DGS-2975: unit test for bad ref invalid schema * DGS-2975: add details for validation failures * DGS-2975: retain error message in protobuf parser * DGS-2975: unit tests for protobuf schema parsing * DGS-2975: unit tests for json schema parser * DGS-2975: address comment - move parseSchema method to interface as default * DGS-2975: address comment - split invalid schema tests * DGS-2975: address comment - add fail to try block --- .../kafka/schemaregistry/SchemaProvider.java | 24 +++++++-- .../avro/AvroSchemaProvider.java | 14 +++-- .../storage/KafkaSchemaRegistry.java | 12 +++-- .../CustomSchemaProviderMetricTest.java | 7 +++ .../schemaregistry/rest/RestApiTest.java | 52 +++++++++++++++++++ .../json/JsonSchemaProvider.java | 21 ++++---- .../schemaregistry/json/JsonSchemaTest.java | 39 +++++++++++++- .../protobuf/ProtobufSchema.java | 2 +- .../protobuf/ProtobufSchemaProvider.java | 23 ++++---- .../protobuf/ProtobufSchemaTest.java | 43 +++++++++++++++ 10 files changed, 196 insertions(+), 41 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java b/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java index 5a5c7c56b27..ac50a4d882f 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java @@ -54,12 +54,30 @@ default void configure(Map configs) { * @param isNew whether the schema is new * @return an optional parsed schema */ - Optional parseSchema(String schemaString, - List references, - boolean isNew); + default Optional parseSchema(String schemaString, + List references, + boolean isNew) { + try { + return Optional.of(parseSchemaOrElseThrow(schemaString, references, isNew)); + } catch (Exception e) { + return Optional.empty(); + } + } default Optional parseSchema(String schemaString, List references) { return parseSchema(schemaString, references, false); } + + /** + * Parses a string representing a schema. + * + * @param schemaString the schema + * @param references a list of schema references + * @param isNew whether the schema is new + * @return a parsed schema or throw an error + */ + ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew); } diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java b/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java index 381780956d0..05b49dd90e0 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -47,16 +46,15 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of( - new AvroSchema(schemaString, references, resolveReferences(references), null, - validateDefaults && isNew)); + return new AvroSchema(schemaString, references, resolveReferences(references), null, + validateDefaults && isNew); } catch (Exception e) { log.error("Could not parse Avro schema", e); - return Optional.empty(); + throw e; } } } diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 56d60a308a8..3c03cc81c9f 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -1023,7 +1023,7 @@ private ParsedSchema canonicalizeSchema(Schema schema, boolean isNew, boolean no try { parsedSchema.validate(); } catch (Exception e) { - String errMsg = "Invalid schema " + schema; + String errMsg = "Invalid schema " + schema + ", details: " + e.getMessage(); log.error(errMsg, e); throw new InvalidSchemaException(errMsg, e); } @@ -1087,10 +1087,12 @@ private ParsedSchema loadSchema( } final String type = schemaType; - ParsedSchema parsedSchema = provider.parseSchema(schema, references, isNew) - .orElseThrow(() -> new InvalidSchemaException("Invalid schema " + schema - + " with refs " + references + " of type " + type)); - return parsedSchema; + try { + return provider.parseSchemaOrElseThrow(schema, references, isNew); + } catch (Exception e) { + throw new InvalidSchemaException("Invalid schema " + schema + + " with refs " + references + " of type " + type + ", details: " + e.getMessage()); + } } public Schema getUsingContexts(String subject, int version, boolean diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java index d67add3cbb6..4e0037ca3b1 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java @@ -59,5 +59,12 @@ public Optional parseSchema(String schemaString, boolean isNew) { return Optional.empty(); } + + @Override + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { + return null; + } } } diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java index db81caf4202..4b8171a132a 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java @@ -33,6 +33,7 @@ import io.confluent.kafka.schemaregistry.utils.TestUtils; import org.apache.avro.Schema.Parser; +import org.apache.avro.SchemaParseException; import org.junit.Test; import java.util.*; @@ -182,6 +183,57 @@ public void testRegisterBadDefault() throws Exception { } } + @Test + public void testRegisterInvalidSchemaBadType() throws Exception { + String subject = "testSubject"; + + //Invalid Field Type 'str' + String badSchemaString = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"str\",\"name\":\"field1\"}]}"; + + String expectedErrorMessage = null; + try { + new Parser().parse(badSchemaString); + fail("Parsing invalid schema string should fail with SchemaParseException"); + } catch (SchemaParseException spe) { + expectedErrorMessage = spe.getMessage(); + } + + try { + restApp.restClient.registerSchema(badSchemaString, subject); + fail("Registering schema with invalid field type should fail with " + + Errors.INVALID_SCHEMA_ERROR_CODE + + " (invalid schema)"); + } catch (RestClientException rce) { + assertEquals("Invalid schema", Errors.INVALID_SCHEMA_ERROR_CODE, rce.getErrorCode()); + assertTrue("Verify error message verbosity", rce.getMessage().contains(expectedErrorMessage)); + } + } + + @Test + public void testRegisterInvalidSchemaBadReference() throws Exception { + String subject = "testSubject"; + + //Invalid Reference + SchemaReference invalidReference = new SchemaReference("invalid.schema", "badSubject", 1); + String schemaString = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"field1\"}]}"; + + try { + restApp.restClient.registerSchema(schemaString, "AVRO", + Collections.singletonList(invalidReference), subject); + fail("Registering schema with invalid reference should fail with " + + Errors.INVALID_SCHEMA_ERROR_CODE + + " (invalid schema)"); + } catch (RestClientException rce) { + assertEquals("Invalid schema", Errors.INVALID_SCHEMA_ERROR_CODE, rce.getErrorCode()); + } + } + @Test public void testRegisterDiffSchemaType() throws Exception { String subject = "testSubject"; diff --git a/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java b/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java index 97ff4891284..a08ec4bae22 100644 --- a/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java +++ b/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java @@ -19,7 +19,6 @@ import org.slf4j.LoggerFactory; import java.util.List; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -35,19 +34,19 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of(new JsonSchema( - schemaString, - references, - resolveReferences(references), - null - )); + return new JsonSchema( + schemaString, + references, + resolveReferences(references), + null + ); } catch (Exception e) { log.error("Could not parse JSON schema", e); - return Optional.empty(); + throw e; } } } diff --git a/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java b/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java index decf2e616bd..55f27e734c1 100644 --- a/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java +++ b/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.NullNode; import com.fasterxml.jackson.databind.node.NumericNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; import io.confluent.kafka.schemaregistry.json.diff.Difference; import io.confluent.kafka.schemaregistry.json.diff.SchemaDiff; @@ -36,9 +37,12 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Optional; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -76,6 +80,12 @@ public class JsonSchemaTest { private static final JsonSchema enumSchema = new JsonSchema(enumSchemaString); + private static final String invalidSchemaString = "{\"properties\": {\n" + + " \"string\": {\"type\": \"str\"}\n" + + " }" + + " \"additionalProperties\": false\n" + + "}"; + @Test public void testPrimitiveTypesToJsonSchema() throws Exception { Object envelope = JsonSchemaUtils.toObject((String) null, createPrimitiveSchema("null")); @@ -324,6 +334,33 @@ public void testRecursiveSchema() { assertEquals(0, diff.size()); } + @Test + public void testParseSchema() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + ParsedSchema parsedSchema = jsonSchemaProvider.parseSchemaOrElseThrow(recordSchemaString, + new ArrayList<>(), false); + Optional parsedSchemaOptional = jsonSchemaProvider.parseSchema(recordSchemaString, + new ArrayList<>(), false); + + assertNotNull(parsedSchema); + assertTrue(parsedSchemaOptional.isPresent()); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseSchemaThrowException() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + jsonSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString, + new ArrayList<>(), false); + } + + @Test + public void testParseSchemaSuppressException() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + Optional parsedSchema = jsonSchemaProvider.parseSchema(invalidSchemaString, + new ArrayList<>(), false); + assertFalse(parsedSchema.isPresent()); + } + private static Map getJsonSchemaWithReferences() { Map schemas = new HashMap<>(); String reference = "{\"type\":\"object\",\"additionalProperties\":false,\"definitions\":" diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java index f3d4d106ff1..4fbd0de7960 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java @@ -459,7 +459,7 @@ private ProtoFileElement toProtoFile(String schema) { byte[] bytes = base64Decoder.decode(schema); return toProtoFile(FileDescriptorProto.parseFrom(bytes)); } catch (Exception pe) { - throw new IllegalArgumentException("Could not parse Protobuf", e); + throw new IllegalArgumentException("Could not parse Protobuf - " + e.getMessage(), e); } } } diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java index e18063dbefb..936501bceb0 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java @@ -17,7 +17,6 @@ package io.confluent.kafka.schemaregistry.protobuf; import java.util.List; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -35,20 +34,20 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of(new ProtobufSchema( - schemaString, - references, - resolveReferences(references), - null, - null - )); + return new ProtobufSchema( + schemaString, + references, + resolveReferences(references), + null, + null + ); } catch (Exception e) { log.error("Could not parse Protobuf schema", e); - return Optional.empty(); + throw e; } } } diff --git a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java index 2d3006f0c29..ee40cfabace 100644 --- a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java +++ b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java @@ -24,6 +24,8 @@ import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; import com.squareup.wire.schema.internal.parser.ProtoFileElement; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; import io.confluent.kafka.schemaregistry.protobuf.dynamic.DynamicSchema; import io.confluent.kafka.schemaregistry.protobuf.dynamic.MessageDefinition; @@ -35,6 +37,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; import io.confluent.kafka.schemaregistry.CompatibilityLevel; import io.confluent.kafka.schemaregistry.protobuf.diff.ResourceLoader; @@ -42,6 +45,7 @@ import static io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema.PROTO3; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -170,6 +174,18 @@ public class ProtobufSchemaTest { private static final ProtobufSchema enumBeforeMessageSchema = new ProtobufSchema(enumBeforeMessageSchemaString); + private static final String invalidSchemaString = "syntax = \"proto3\";\n" + + "\n" + + "option java_package = \"io.confluent.kafka.serializers.protobuf.test\";\n" + + "option java_outer_classname = \"TestMessageProtos\";\n" + + "\n" + + "import \"google/protobuf/descriptor.proto\";\n" + + "\n" + + "message TestMessage {\n" + + " string test_string = 1 [json_name = \"test_str\"];\n" + + " int32 test_int32 = 8.01;\n" + + "}\n"; + @Test public void testRecordToProtobuf() throws Exception { String json = "{\n" @@ -968,6 +984,33 @@ public void testEnumAfterMessage() throws Exception { new ProtobufSchema(enumBeforeMessageSchema.toDescriptor()).canonicalString()); } + @Test + public void testParseSchema() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + ParsedSchema parsedSchema = protobufSchemaProvider.parseSchemaOrElseThrow(recordSchemaString, + new ArrayList<>(), false); + Optional parsedSchemaOptional = protobufSchemaProvider.parseSchema(recordSchemaString, + new ArrayList<>(), false); + + assertNotNull(parsedSchema); + assertTrue(parsedSchemaOptional.isPresent()); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseSchemaThrowException() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + protobufSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString, + new ArrayList<>(), false); + } + + @Test + public void testParseSchemaSuppressException() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + Optional parsedSchema = protobufSchemaProvider.parseSchema(invalidSchemaString, + new ArrayList<>(), false); + assertFalse(parsedSchema.isPresent()); + } + private static JsonNode jsonTree(String jsonData) { try { return objectMapper.readTree(jsonData); From 4ff632696bd7a815b375bfc396a10beec3ec0f58 Mon Sep 17 00:00:00 2001 From: Dragos Misca Date: Tue, 15 Feb 2022 19:09:02 -0800 Subject: [PATCH 24/73] MINOR: Update generated protobuf classes (#2183) --- .../kafka/serializers/protobuf/test/Ref.java | 4 +- .../kafka/serializers/protobuf/test/Root.java | 4 +- .../connect/protobuf/test/KeyValue.java | 4 +- .../connect/protobuf/test/MapReferences.java | 8 +- .../protobuf/test/RecursiveKeyValue.java | 4 +- .../java/com/acme/glup/ExampleProtoAcme.java | 4 +- .../java/com/acme/glup/MetadataProto.java | 228 +++++++++--------- .../protobuf/test/EnumUnionOuter.java | 2 +- .../protobuf/test/NestedTestProto.java | 33 +-- .../test/TestMessageOptionalProtos.java | 4 +- .../protobuf/test/TestMessageProtos.java | 24 +- .../java/io/confluent/protobuf/MetaProto.java | 25 +- 12 files changed, 175 insertions(+), 169 deletions(-) diff --git a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java index 55c225ad27c..268eed615c8 100644 --- a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java +++ b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java @@ -188,7 +188,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getRefIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(refId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, refId_); } if (isActive_ != false) { @@ -203,7 +203,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getRefIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(refId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, refId_); } if (isActive_ != false) { diff --git a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java index c7507ff7e36..958ae01f068 100644 --- a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java +++ b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java @@ -220,7 +220,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getRootIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rootId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, rootId_); } if (ref_ != null) { @@ -235,7 +235,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getRootIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rootId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, rootId_); } if (ref_ != null) { diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/KeyValue.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/KeyValue.java index f5ade658f3e..35594258e85 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/KeyValue.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/KeyValue.java @@ -191,7 +191,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (key_ != 0) { output.writeInt32(1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } unknownFields.writeTo(output); @@ -207,7 +207,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } size += unknownFields.getSerializedSize(); diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/MapReferences.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/MapReferences.java index 101acc150cf..d37fc313832 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/MapReferences.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/MapReferences.java @@ -1794,10 +1794,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getKeyBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } unknownFields.writeTo(output); @@ -1809,10 +1809,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getKeyBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } size += unknownFields.getSerializedSize(); diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/RecursiveKeyValue.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/RecursiveKeyValue.java index ba1c8bc2686..8356ca146db 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/RecursiveKeyValue.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/test/RecursiveKeyValue.java @@ -245,7 +245,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (key_ != 0) { output.writeInt32(1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, value_); } if (keyValue_ != null) { @@ -264,7 +264,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, key_); } - if (!getValueBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, value_); } if (keyValue_ != null) { diff --git a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java index b7b65676977..f651800cde3 100644 --- a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java +++ b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java @@ -583,7 +583,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (partition_ != null) { output.writeMessage(2, getPartition()); } - if (!getUidBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, uid_); } com.google.protobuf.GeneratedMessageV3 @@ -612,7 +612,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getPartition()); } - if (!getUidBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, uid_); } for (java.util.Map.Entry entry diff --git a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java index dd53e2749ee..1ebe41d52c8 100644 --- a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java +++ b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java @@ -2713,7 +2713,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } for (int i = 0; i < format_.size(); i++) { @@ -2722,13 +2722,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (partitionScheme_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { output.writeEnum(3, partitionScheme_); } - if (!getJavaClassBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, javaClass_); } if (forTests_ != false) { output.writeBool(5, forTests_); } - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_); } if (private_ != false) { @@ -2749,7 +2749,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } for (int i = 0; i < format_.size(); i++) { @@ -2760,14 +2760,14 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, partitionScheme_); } - if (!getJavaClassBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, javaClass_); } if (forTests_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, forTests_); } - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_); } if (private_ != false) { @@ -4414,7 +4414,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (format_ != null) { output.writeMessage(2, getFormat()); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, datasetId_); } unknownFields.writeTo(output); @@ -4434,7 +4434,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getFormat()); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, datasetId_); } size += unknownFields.getSerializedSize(); @@ -5894,7 +5894,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getPathBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_); } if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -5915,7 +5915,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (priority_ != 0) { output.writeInt32(8, priority_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, label_); } if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) { @@ -5930,7 +5930,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getPathBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_); } if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -5957,7 +5957,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeInt32Size(8, priority_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, label_); } if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) { @@ -9786,16 +9786,16 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_); } if (useHippoCuttleJob_ != false) { @@ -9810,16 +9810,16 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_); } if (useHippoCuttleJob_ != false) { @@ -10972,16 +10972,16 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_); } if (deduplicate_ != false) { output.writeBool(3, deduplicate_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_); } unknownFields.writeTo(output); @@ -10993,17 +10993,17 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_); } if (deduplicate_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, deduplicate_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_); } size += unknownFields.getSerializedSize(); @@ -13657,10 +13657,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_); } if (deduplicate_ != false) { @@ -13669,7 +13669,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (config_ != null) { output.writeMessage(4, getConfig()); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_); } for (int i = 0; i < configPerDc_.size(); i++) { @@ -13684,10 +13684,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_); } if (deduplicate_ != false) { @@ -13698,7 +13698,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getConfig()); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_); } for (int i = 0; i < configPerDc_.size(); i++) { @@ -16422,7 +16422,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (from_ != null) { output.writeMessage(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, sourceNamespace_); } if (getPlatformsList().size() > 0) { @@ -16435,10 +16435,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (isBackfilling_ != false) { output.writeBool(8, isBackfilling_); } - if (!getToLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, toLabel_); } - if (!getToDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, toDatasetId_); } if (withBackfilling_ != false) { @@ -16460,7 +16460,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, sourceNamespace_); } { @@ -16479,10 +16479,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(8, isBackfilling_); } - if (!getToLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, toLabel_); } - if (!getToDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, toDatasetId_); } if (withBackfilling_ != false) { @@ -18109,7 +18109,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (from_ != null) { output.writeMessage(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceNamespace_); } if (getPlatformsList().size() > 0) { @@ -18132,7 +18132,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceNamespace_); } { @@ -19485,10 +19485,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_); } if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -19497,10 +19497,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (outputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { output.writeEnum(4, outputFormat_); } - if (!getInputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, inputDatasetLabel_); } - if (!getOutputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, outputDatasetLabel_); } if (isByPlatform_ != false) { @@ -19515,10 +19515,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_); } if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -19529,10 +19529,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, outputFormat_); } - if (!getInputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, inputDatasetLabel_); } - if (!getOutputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, outputDatasetLabel_); } if (isByPlatform_ != false) { @@ -20922,19 +20922,19 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_); } - if (samplingRate_ != 0F) { + if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) { output.writeFloat(5, samplingRate_); } unknownFields.writeTo(output); @@ -20946,19 +20946,19 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_); } - if (samplingRate_ != 0F) { + if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, samplingRate_); } @@ -22298,22 +22298,22 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getLeftDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, leftDatasetId_); } - if (!getLeftFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, leftFormatLabel_); } - if (!getRightDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rightDatasetId_); } - if (!getRightFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, rightFormatLabel_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, hostname_); } - if (!getIgnoredFieldsBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, ignoredFields_); } unknownFields.writeTo(output); @@ -22325,22 +22325,22 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getLeftDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, leftDatasetId_); } - if (!getLeftFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, leftFormatLabel_); } - if (!getRightDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rightDatasetId_); } - if (!getRightFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, rightFormatLabel_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, hostname_); } - if (!getIgnoredFieldsBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, ignoredFields_); } size += unknownFields.getSerializedSize(); @@ -24259,13 +24259,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < to_.size(); i++) { output.writeMessage(250, to_.get(i)); } - if (!getNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 251, namespace_); } - if (!getStartDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 253, startDate_); } - if (!getStopDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 254, stopDate_); } if (ignoreCn_ != false) { @@ -24324,13 +24324,13 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(250, to_.get(i)); } - if (!getNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(251, namespace_); } - if (!getStartDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(253, startDate_); } - if (!getStopDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(254, stopDate_); } if (ignoreCn_ != false) { @@ -27092,10 +27092,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, owner_); } - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { @@ -27122,10 +27122,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, owner_); } - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { @@ -32944,7 +32944,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (pendingDeletion_ != false) { output.writeBool(5, pendingDeletion_); } - if (!getAddedAtBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, addedAt_); } unknownFields.writeTo(output); @@ -32984,7 +32984,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, pendingDeletion_); } - if (!getAddedAtBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, addedAt_); } size += unknownFields.getSerializedSize(); @@ -33962,7 +33962,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (skip_ != false) { @@ -33977,7 +33977,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (skip_ != false) { @@ -34670,7 +34670,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (useEnumFieldId_ != false) { @@ -34685,7 +34685,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (useEnumFieldId_ != false) { @@ -37356,10 +37356,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (consolidationEnabled_ != false) { output.writeBool(7, consolidationEnabled_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, datasetId_); } - if (!getDatasetFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 11, datasetFormatLabel_); } for (int i = 0; i < controlMessage_.size(); i++) { @@ -37386,10 +37386,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(7, consolidationEnabled_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, datasetId_); } - if (!getDatasetFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, datasetFormatLabel_); } for (int i = 0; i < controlMessage_.size(); i++) { @@ -38773,10 +38773,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (dc_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { output.writeEnum(2, dc_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, label_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, datasetId_); } unknownFields.writeTo(output); @@ -38796,10 +38796,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, dc_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, label_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, datasetId_); } size += unknownFields.getSerializedSize(); @@ -39849,13 +39849,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (ip4_ != 0) { output.writeFixed32(2, ip4_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hostname_); } - if (!getContainerTaskBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, containerTask_); } - if (!getContainerAppBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, containerApp_); } unknownFields.writeTo(output); @@ -39875,13 +39875,13 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeFixed32Size(2, ip4_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hostname_); } - if (!getContainerTaskBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, containerTask_); } - if (!getContainerAppBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, containerApp_); } size += unknownFields.getSerializedSize(); @@ -41107,7 +41107,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kafkaTopic_); } if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { @@ -41125,7 +41125,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kafkaTopic_); } if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { @@ -42494,13 +42494,13 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTypeBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, hostname_); } - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, kafkaTopic_); } if (partition_ != 0) { @@ -42512,16 +42512,16 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!processUuid_.isEmpty()) { output.writeBytes(6, processUuid_); } - if (!getRegionBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, region_); } if (timestampSeconds_ != 0) { output.writeInt32(8, timestampSeconds_); } - if (!getClusterBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, cluster_); } - if (!getEnvironmentBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, environment_); } com.google.protobuf.GeneratedMessageV3 @@ -42539,13 +42539,13 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTypeBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, hostname_); } - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, kafkaTopic_); } if (partition_ != 0) { @@ -42560,17 +42560,17 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, processUuid_); } - if (!getRegionBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, region_); } if (timestampSeconds_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(8, timestampSeconds_); } - if (!getClusterBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, cluster_); } - if (!getEnvironmentBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, environment_); } for (java.util.Map.Entry entry diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java index 212e9aaec47..d712b61efa1 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java @@ -236,8 +236,8 @@ private EnumUnion( break; } case 16: { - someValCase_ = 2; someVal_ = input.readInt32(); + someValCase_ = 2; break; } case 24: { diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java index acc1da0958b..c90ad498263 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java @@ -223,8 +223,8 @@ private UserId( break; } case 16: { - userIdCase_ = 2; userId_ = input.readInt32(); + userIdCase_ = 2; break; } case 26: { @@ -1293,7 +1293,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } unknownFields.writeTo(output); @@ -1305,7 +1305,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } size += unknownFields.getSerializedSize(); @@ -1801,8 +1801,8 @@ private ComplexType( break; } case 16: { - someValCase_ = 2; someVal_ = input.readInt32(); + someValCase_ = 2; break; } case 24: { @@ -3229,7 +3229,7 @@ public final boolean isInitialized() { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } if (getIdsList().size() > 0) { @@ -3248,7 +3248,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } { @@ -3946,7 +3946,7 @@ public int getMapTypeCount() { @java.lang.Override public boolean containsMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetMapType().getMap().containsKey(key); } /** @@ -3973,7 +3973,7 @@ public java.util.Map getMapTypeMap() { public java.lang.String getMapTypeOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -3985,7 +3985,7 @@ public java.lang.String getMapTypeOrDefault( public java.lang.String getMapTypeOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); if (!map.containsKey(key)) { @@ -5133,7 +5133,7 @@ public int getMapTypeCount() { @java.lang.Override public boolean containsMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetMapType().getMap().containsKey(key); } /** @@ -5160,7 +5160,7 @@ public java.util.Map getMapTypeMap() { public java.lang.String getMapTypeOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -5172,7 +5172,7 @@ public java.lang.String getMapTypeOrDefault( public java.lang.String getMapTypeOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); if (!map.containsKey(key)) { @@ -5192,7 +5192,7 @@ public Builder clearMapType() { public Builder removeMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } internalGetMutableMapType().getMutableMap() .remove(key); return this; @@ -5211,8 +5211,11 @@ public Builder removeMapType( public Builder putMapType( java.lang.String key, java.lang.String value) { - if (key == null) { throw new java.lang.NullPointerException(); } - if (value == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { + throw new NullPointerException("map value"); +} + internalGetMutableMapType().getMutableMap() .put(key, value); return this; diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java index 1bd7557e8ed..2cfce29e32d 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java @@ -238,7 +238,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (((bitField0_ & 0x00000001) != 0)) { @@ -253,7 +253,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (((bitField0_ & 0x00000001) != 0)) { diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java index ae7bffb5f76..2432b6a2d5e 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java @@ -475,7 +475,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (testBool_ != false) { @@ -484,10 +484,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!testBytes_.isEmpty()) { output.writeBytes(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { output.writeDouble(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { output.writeFloat(5, testFloat_); } if (testFixed32_ != 0) { @@ -529,7 +529,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (testBool_ != false) { @@ -540,11 +540,11 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, testFloat_); } @@ -2091,7 +2091,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (testBool_ != false) { @@ -2100,10 +2100,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!testBytes_.isEmpty()) { output.writeBytes(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { output.writeDouble(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { output.writeFloat(5, testFloat_); } if (testFixed32_ != 0) { @@ -2148,7 +2148,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (testBool_ != false) { @@ -2159,11 +2159,11 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, testFloat_); } diff --git a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java index 5fbc82985b0..01fff765271 100644 --- a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java +++ b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java @@ -250,7 +250,7 @@ public int getParamsCount() { @java.lang.Override public boolean containsParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetParams().getMap().containsKey(key); } /** @@ -277,7 +277,7 @@ public java.util.Map getParamsMap() { public java.lang.String getParamsOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -289,7 +289,7 @@ public java.lang.String getParamsOrDefault( public java.lang.String getParamsOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); if (!map.containsKey(key)) { @@ -312,7 +312,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getDocBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, doc_); } com.google.protobuf.GeneratedMessageV3 @@ -330,7 +330,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getDocBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, doc_); } for (java.util.Map.Entry entry @@ -760,7 +760,7 @@ public int getParamsCount() { @java.lang.Override public boolean containsParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetParams().getMap().containsKey(key); } /** @@ -787,7 +787,7 @@ public java.util.Map getParamsMap() { public java.lang.String getParamsOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -799,7 +799,7 @@ public java.lang.String getParamsOrDefault( public java.lang.String getParamsOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); if (!map.containsKey(key)) { @@ -819,7 +819,7 @@ public Builder clearParams() { public Builder removeParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } internalGetMutableParams().getMutableMap() .remove(key); return this; @@ -838,8 +838,11 @@ public Builder removeParams( public Builder putParams( java.lang.String key, java.lang.String value) { - if (key == null) { throw new java.lang.NullPointerException(); } - if (value == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { + throw new NullPointerException("map value"); +} + internalGetMutableParams().getMutableMap() .put(key, value); return this; From a0ddda64237370f939f5528c4ee2b3979eb9fff8 Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Wed, 23 Feb 2022 10:00:41 -0800 Subject: [PATCH 25/73] DGS-3087: Accommodate upstream change in AbstractCoordinator (#2188) * rename method, add skipAssignment * add back old method --- .../kafka/SchemaRegistryCoordinator.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java index 6a484fafd7f..b4d0b02b86d 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java @@ -149,11 +149,20 @@ protected void onJoinComplete( listener.onAssigned(assignmentSnapshot, generation); } - @Override + // todo: to be removed protected Map performAssignment( String kafkaLeaderId, // Kafka group "leader" who does assignment, *not* the SR leader String protocol, List allMemberMetadata + ) { + return onLeaderElected(kafkaLeaderId, protocol, allMemberMetadata, false); + } + + protected Map onLeaderElected( + String kafkaLeaderId, // Kafka group "leader" who does assignment, *not* the SR leader + String protocol, + List allMemberMetadata, + boolean skipAssignment ) { log.debug("Performing assignment"); From d8747f1f0c29e856c3af0e37aa20798ec3f8c937 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Wed, 23 Feb 2022 12:04:22 -0800 Subject: [PATCH 26/73] MINOR add JSON Schema test for anyOf with object or array (#2189) --- .../connect/json/JsonSchemaDataTest.java | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java index 3be7c591626..111315c2422 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java @@ -1820,6 +1820,47 @@ public void testOptionalReferencedSchema() { assertTrue(connectSchema.field("complexNode").schema().isOptional()); } + @Test + public void testOptionalObjectOrArray() { + // From https://stackoverflow.com/questions/36413015/json-schema-which-allows-either-an-object-or-an-array-of-those-objects + String schema = "{\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"assetMetadata\": {\n" + + " \"anyOf\": [\n" + + " { \"$ref\": \"#/definitions/assetMetadata\" },\n" + + " {\n" + + " \"type\": \"array\",\n" + + " \"items\": { \"$ref\": \"#/definitions/assetMetadata\" }\n" + + " },\n" + + " {\n" + + " \"type\": \"null\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " \"definitions\": {\n" + + " \"assetMetadata\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": false,\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"type\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + JsonSchema jsonSchema = new JsonSchema(schema); + JsonSchemaData jsonSchemaData = new JsonSchemaData(); + Schema connectSchema = jsonSchemaData.toConnectSchema(jsonSchema); + assertTrue(connectSchema.field("assetMetadata").schema().isOptional()); + } + @Test public void testToConnectRecursiveSchema() { JsonSchema jsonSchema = getRecursiveJsonSchema(); From 21fed5a79e3cb951b1f07cf0ac100e130931f006 Mon Sep 17 00:00:00 2001 From: pagrawal10 <98726675+pagrawal10@users.noreply.github.com> Date: Fri, 25 Feb 2022 14:53:16 +0530 Subject: [PATCH 27/73] Downloading specific version of schemas capability added (#2178) * Downloading specific version of schemas capability added * Made download goal backward compatible * Added versions' size check and unit tests * Added space before versionsToDownload --- .../maven/DownloadSchemaRegistryMojo.java | 111 +++++++++++------- .../maven/DownloadSchemaRegistryMojoTest.java | 12 +- 2 files changed, 77 insertions(+), 46 deletions(-) diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java index 4559f10d9e6..471ad183795 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 Confluent Inc. + * Copyright 2022 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,11 +34,9 @@ import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -51,33 +49,52 @@ public class DownloadSchemaRegistryMojo extends SchemaRegistryMojo { @Parameter(required = true) List subjectPatterns = new ArrayList<>(); + @Parameter(required = false) + List versions = new ArrayList<>(); + @Parameter(required = true) File outputDirectory; - Map downloadSchemas(Collection subjects) + Map downloadSchemas(List subjects, List versionsToDownload) throws MojoExecutionException { Map results = new LinkedHashMap<>(); - for (String subject : subjects) { + if (versionsToDownload.size() != subjects.size()) { + throw new MojoExecutionException("Number of versions specified should " + + "be same as number of subjects"); + } + for (int i = 0; i < subjects.size(); i++) { SchemaMetadata schemaMetadata; try { - getLog().info(String.format("Downloading latest metadata for %s.", subject)); - schemaMetadata = this.client().getLatestSchemaMetadata(subject); + getLog().info(String.format("Downloading metadata " + + "for %s.for version %s", subjects.get(i), versionsToDownload.get(i))); + schemaMetadata = this.client().getLatestSchemaMetadata(subjects.get(i)); + if (!versionsToDownload.get(i).equalsIgnoreCase("latest")) { + Integer maxVersion = schemaMetadata.getVersion(); + if (maxVersion < Integer.parseInt(versionsToDownload.get(i))) { + throw new MojoExecutionException( + String.format("Max possible version " + + "for %s is %d", subjects.get(i), maxVersion)); + } else { + schemaMetadata = this.client().getSchemaMetadata(subjects.get(i), + Integer.parseInt(versionsToDownload.get(i))); + } + } Optional schema = this.client().parseSchema( schemaMetadata.getSchemaType(), schemaMetadata.getSchema(), schemaMetadata.getReferences()); if (schema.isPresent()) { - results.put(subject, schema.get()); + results.put(subjects.get(i), schema.get()); } else { throw new MojoExecutionException( - String.format("Error while parsing schema for %s", subject) + String.format("Error while parsing schema for %s", subjects.get(i)) ); } } catch (Exception ex) { throw new MojoExecutionException( - String.format("Exception thrown while downloading metadata for %s.", subject), + String.format("Exception thrown while downloading metadata for %s.", subjects.get(i)), ex ); } @@ -92,26 +109,7 @@ public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("Plugin execution has been skipped"); return; } - - try { - getLog().debug( - String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory)); - if (outputDirectory.exists() && !outputDirectory.isDirectory()) { - throw new IllegalStateException("outputDirectory must be a directory"); - } - getLog() - .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory)); - if (!outputDirectory.isDirectory()) { - getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory)); - if (!outputDirectory.mkdirs()) { - throw new IllegalStateException( - "Could not create output directory " + this.outputDirectory); - } - } - } catch (Exception ex) { - throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex); - } - + outputDirValidation(); List patterns = new ArrayList<>(); for (String subject : subjectPatterns) { @@ -126,7 +124,6 @@ public void execute() throws MojoExecutionException, MojoFailureException { ); } } - Collection allSubjects; try { getLog().info("Getting all subjects on schema registry..."); @@ -134,31 +131,42 @@ public void execute() throws MojoExecutionException, MojoFailureException { } catch (Exception ex) { throw new MojoExecutionException("Exception thrown", ex); } - getLog().info(String.format("Schema Registry has %s subject(s).", allSubjects.size())); - Set subjectsToDownload = new LinkedHashSet<>(); + List subjectsToDownload = new ArrayList<>(); + List versionsToDownload = new ArrayList<>(); + if (!versions.isEmpty()) { + if (versions.size() != subjectPatterns.size()) { + throw new IllegalStateException("versions size should be same as subjectPatterns size"); + } + } for (String subject : allSubjects) { - for (Pattern pattern : patterns) { + for (int i = 0 ; i < patterns.size() ; i++) { getLog() - .debug(String.format("Checking '%s' against pattern '%s'", subject, pattern.pattern())); - Matcher matcher = pattern.matcher(subject); + .debug(String.format("Checking '%s' against pattern '%s'", + subject, patterns.get(i).pattern())); + Matcher matcher = patterns.get(i).matcher(subject); if (matcher.matches()) { - getLog().debug(String.format("'%s' matches pattern '%s' so downloading.", subject, - pattern.pattern())); + getLog().debug(String.format("'%s' matches " + + "pattern '%s' so downloading.", subject, + patterns.get(i).pattern())); + if (versions.isEmpty()) { + versionsToDownload.add("latest"); + } else { + versionsToDownload.add(versions.get(i)); + } subjectsToDownload.add(subject); break; } } } - - Map subjectToSchema = downloadSchemas(subjectsToDownload); + Map subjectToSchema = + downloadSchemas(subjectsToDownload, versionsToDownload); for (Map.Entry kvp : subjectToSchema.entrySet()) { String fileName = String.format("%s%s", kvp.getKey(), getExtension(kvp.getValue())); File outputFile = new File(this.outputDirectory, fileName); - getLog().info( String.format("Writing schema for Subject(%s) to %s.", kvp.getKey(), outputFile) ); @@ -177,6 +185,27 @@ public void execute() throws MojoExecutionException, MojoFailureException { } } + public void outputDirValidation() throws MojoExecutionException, MojoFailureException { + try { + getLog().debug( + String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory)); + if (outputDirectory.exists() && !outputDirectory.isDirectory()) { + throw new IllegalStateException("outputDirectory must be a directory"); + } + getLog() + .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory)); + if (!outputDirectory.isDirectory()) { + getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory)); + if (!outputDirectory.mkdirs()) { + throw new IllegalStateException( + "Could not create output directory " + this.outputDirectory); + } + } + } catch (Exception ex) { + throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex); + } + } + private String getExtension(ParsedSchema parsedSchema) { if (this.schemaExtension != null) { return schemaExtension; diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java index 7952757c7b2..49e4a1aa704 100644 --- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java @@ -22,11 +22,14 @@ import org.junit.Before; import org.junit.Test; +import javax.security.auth.Subject; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; +import java.util.regex.Matcher; public class DownloadSchemaRegistryMojoTest extends SchemaRegistryTest { DownloadSchemaRegistryMojo mojo; @@ -55,12 +58,11 @@ public void specificSubjects() throws IOException, RestClientException { File valueSchemaFile = new File(this.tempDirectory, valueSubject + ".avsc"); if (i % 10 == 0) { - String subjectPattern = String.format("^TestSubject%03d-(Key|Value)$", i); - files.add(keySchemaFile); - files.add(valueSchemaFile); - this.mojo.subjectPatterns.add(subjectPattern); + this.mojo.client().getSchemaMetadata(keySubject,1); + this.mojo.client().getLatestSchemaMetadata(keySubject); + this.mojo.client().getSchemaMetadata(valueSubject,1); + this.mojo.client().getLatestSchemaMetadata(valueSubject); } } } - } From cbf58db38c83dbdee3f5daaf7435695078498d9c Mon Sep 17 00:00:00 2001 From: jshahc <98798896+jshahc@users.noreply.github.com> Date: Fri, 25 Feb 2022 15:56:11 +0530 Subject: [PATCH 28/73] Adding new goal test-local-compatibility for schema registry maven plugin (#2177) * Adding new goal test-local-compatibility for schema registry maven plugin * New utils class and compatibility set to enum * Name change SchemaUtils to MojoUtils --- .../kafka/schemaregistry/maven/MojoUtils.java | 43 ++ .../maven/SchemaRegistryMojo.java | 11 +- .../maven/TestLocalCompatibilityMojo.java | 135 ++++++ .../maven/UploadSchemaRegistryMojo.java | 8 +- .../maven/TestLocalCompatibilityMojoTest.java | 394 ++++++++++++++++++ 5 files changed, 574 insertions(+), 17 deletions(-) create mode 100644 maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java create mode 100644 maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java create mode 100644 maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java new file mode 100644 index 00000000000..c075de3efb0 --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.SchemaProvider; +import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider; +import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider; +import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.util.Arrays; +import java.util.List; + +public class MojoUtils { + + public static String readFile(File file, Charset encoding) throws IOException { + byte[] encoded = Files.readAllBytes(file.toPath()); + return new String(encoded, encoding); + } + + public static List defaultSchemaProviders() { + return Arrays.asList( + new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider() + ); + } + +} diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java index 778011e79c9..a068501e8a9 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java @@ -21,19 +21,15 @@ import org.apache.maven.plugins.annotations.Parameter; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import io.confluent.kafka.schemaregistry.SchemaProvider; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig; -import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider; -import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider; public abstract class SchemaRegistryMojo extends AbstractMojo { @@ -75,7 +71,7 @@ protected SchemaRegistryClient client() { } List providers = schemaProviders != null && !schemaProviders.isEmpty() ? schemaProviders() - : defaultSchemaProviders(); + : MojoUtils.defaultSchemaProviders(); this.client = new CachedSchemaRegistryClient( this.schemaRegistryUrls, 1000, @@ -97,9 +93,4 @@ private List schemaProviders() { }).collect(Collectors.toList()); } - private List defaultSchemaProviders() { - return Arrays.asList( - new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider() - ); - } } diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java new file mode 100644 index 00000000000..04695a34675 --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java @@ -0,0 +1,135 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityChecker; +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +@Mojo(name = "test-local-compatibility", configurator = "custom-basic") +public class TestLocalCompatibilityMojo extends AbstractMojo { + + @Parameter(required = true) + File schemaPath; + + @Parameter(required = true) + ArrayList previousSchemaPaths; + + @Parameter(defaultValue = "BACKWARD") + CompatibilityLevel compatibilityLevel; + + @Parameter(defaultValue = AvroSchema.TYPE) + String schemaType; + + boolean success = false; + + protected Optional parseSchema( + String schemaType, + String schemaString, + List references, + Map providers) throws MojoExecutionException { + + SchemaProvider schemaProvider = providers.get(schemaType.toUpperCase()); + if (schemaProvider == null) { + throw new MojoExecutionException( + String.format("Invalid schema type %s", schemaType)); + } + + return schemaProvider.parseSchema(schemaString, references); + + } + + protected ParsedSchema loadSchema(File path, Map schemaProviders) throws MojoExecutionException { + + String schemaString; + try { + schemaString = MojoUtils.readFile(path, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new MojoExecutionException( + String.format("File cannot be found at: %s", path)); + } + List references = new ArrayList<>(); + Optional schema = parseSchema(schemaType, schemaString, + references, schemaProviders); + + if (schema.isPresent()) { + return schema.get(); + } + + throw new MojoExecutionException(String.format("Unable to parse schema from %s " + + "with schema type as %s", path, schemaType)); + } + + public void execute() throws MojoExecutionException { + + List providers = MojoUtils.defaultSchemaProviders(); + Map schemaProviders = providers.stream() + .collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)); + + getLog().debug(String.format("Loading Schema at %s", schemaPath)); + ParsedSchema schema = loadSchema(schemaPath, schemaProviders); + + getLog().debug("Loading Previous Schemas"); + ArrayList previousSchemas = new ArrayList<>(); + for (File previousSchemaPath : previousSchemaPaths) { + previousSchemas.add(loadSchema(previousSchemaPath, schemaProviders)); + } + + CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel); + + List errorMessages = checker.isCompatible(schema, previousSchemas); + + if (previousSchemas.size() > 1 + && (compatibilityLevel == CompatibilityLevel.BACKWARD + || compatibilityLevel == CompatibilityLevel.FORWARD + || compatibilityLevel == CompatibilityLevel.FULL)) { + + getLog().info(String.format("Checking only with latest Schema at %s", + previousSchemaPaths.get(previousSchemaPaths.size() - 1))); + } + + success = errorMessages.isEmpty(); + + if (success) { + getLog().info(String.format("Schema is %s compatible with previous schemas", + compatibilityLevel.name.toLowerCase())); + } else { + String errorLog = String.format("Schema is not %s compatible with previous schemas %n", + compatibilityLevel.name.toLowerCase()) + errorMessages.get(0); + getLog().error(errorLog); + } + + } + +} diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java index fa310c6aba5..a00efe15308 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java @@ -27,9 +27,7 @@ import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -109,7 +107,7 @@ private void processSubject(String key, boolean isReference) { } return; } - String schemaString = readFile(file, StandardCharsets.UTF_8); + String schemaString = MojoUtils.readFile(file, StandardCharsets.UTF_8); Optional schema = client().parseSchema( schemaType, schemaString, schemaReferences); if (schema.isPresent()) { @@ -179,9 +177,5 @@ private List getReferences(String subject, Map return result; } - private static String readFile(File file, Charset encoding) throws IOException { - byte[] encoded = Files.readAllBytes(file.toPath()); - return new String(encoded, encoding); - } } diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java new file mode 100644 index 00000000000..a2214c7fb72 --- /dev/null +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java @@ -0,0 +1,394 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.apache.maven.plugin.MojoExecutionException; +import org.junit.Before; +import org.junit.Test; + +/* + * The tests for avro are taken from AvroCompatibilityTest + */ +public class TestLocalCompatibilityMojoTest extends SchemaRegistryTest{ + TestLocalCompatibilityMojo mojo; + + final String schema1 = "schema1"; + final String schema2 = "schema2"; + final String schema3 = "schema3"; + final String schema4 = "schema4"; + final String schema6 = "schema6"; + final String schema7 = "schema7"; + final String schema8 = "schema8"; + final String schema10 = "schema10"; + final String schema11 = "schema11"; + final String schema12 = "schema12"; + final String schema13 = "schema13"; + final String schema14 = "schema14"; + + String fileExtension; + + @Before + public void createMojoAndFiles() { + this.mojo = new TestLocalCompatibilityMojo(); + makeFiles(); + } + + private void makeFile(String schemaString, String name){ + + try (FileWriter writer = new FileWriter(this.tempDirectory+"/"+name)) { + writer.write(schemaString); + } catch (IOException e) { + e.printStackTrace(); + } + + } + + private void makeFiles(){ + + String schemaString1 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}]}"; + makeFile(schemaString1, "schema1.avsc"); + + String schemaString2 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}"; + makeFile(schemaString2, "schema2.avsc"); + + String schemaString3 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\"}]}"; + makeFile(schemaString3, "schema3.avsc"); + + String schemaString4 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1_new\", \"aliases\": [\"f1\"]}]}"; + makeFile(schemaString4, "schema4.avsc"); + + String schemaString6 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\"," + + " \"doc\":\"doc of f1\"}]}"; + makeFile(schemaString6, "schema6.avsc"); + + String schemaString7 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":[\"null\", \"string\", \"int\"],\"name\":\"f1\"," + + " \"doc\":\"doc of f1\"}]}"; + makeFile(schemaString7, "schema7.avsc"); + + String schemaString8 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}," + + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; + makeFile(schemaString8, "schema8.avsc"); + + String badDefaultNullString = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\", \"default\": \"null\"}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}," + + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; + makeFile(badDefaultNullString, "schema9.avsc"); + + String schemaString10 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " }\n" + + "}"; + makeFile(schemaString10, "schema10.json"); + + String schemaString11 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": false\n" + + "}"; + makeFile(schemaString11, "schema11.json"); + + String schemaString12 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString12, "schema12.json"); + + String schemaString13 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" },\n" + + " \"zap\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString13, "schema13.json"); + + String schemaString14 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" },\n" + + " \"zap\": { \n" + + " \"oneOf\": [ { \"type\": \"string\" }, { \"type\": \"integer\" } ] \n" + + " }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString14, "schema14.json"); + + } + + + private void setMojo(String schema, List previousSchemas){ + + this.mojo.schemaPath = new File(this.tempDirectory + "/" + schema + fileExtension); + this.mojo.previousSchemaPaths = new ArrayList<>(); + + for (String path : previousSchemas) { + this.mojo.previousSchemaPaths.add(new File(this.tempDirectory + "/" + path + fileExtension)); + } + this.mojo.success = false; + + } + + private boolean isCompatible(String schema, List previousSchemas) + throws MojoExecutionException { + + setMojo(schema, previousSchemas); + this.mojo.execute(); + return this.mojo.success; + + } + + + + /* + * Backward compatibility: A new schema is backward compatible if it can be used to read the data + * written in the previous schema. + */ + @Test + public void testBasicBackwardsCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.BACKWARD; + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + assertTrue("adding a field with default is a backward compatible change", + isCompatible(schema2, Collections.singletonList(schema1))); + assertFalse("adding a field w/o default is not a backward compatible change", + isCompatible(schema3, Collections.singletonList(schema1))); + assertTrue("changing field name with alias is a backward compatible change", + isCompatible(schema4, Collections.singletonList(schema1))); + assertTrue("evolving a field type to a union is a backward compatible change", + isCompatible(schema6, Collections.singletonList(schema1))); + assertFalse("removing a type from a union is not a backward compatible change", + isCompatible(schema1, Collections.singletonList(schema6))); + assertTrue("adding a new type in union is a backward compatible change", + isCompatible(schema7, Collections.singletonList(schema6))); + assertFalse("removing a type from a union is not a backward compatible change", + isCompatible(schema6, Collections.singletonList(schema7))); + + // Only schema 2 is checked + assertTrue("removing a default is not a transitively compatible change", + isCompatible(schema3, Arrays.asList(schema1, schema2))); + + fileExtension = ".json"; + this.mojo.schemaType = "json"; + assertTrue("setting additional properties to true from false is a backward compatible change", + isCompatible(schema10, Collections.singletonList(schema11))); + + assertTrue("adding property of string type (same as additional properties type) is " + + "a backward compatible change", isCompatible(schema13, + Collections.singletonList(schema12))); + + assertTrue("adding property of string or int type (string is additional properties type) is " + + "a backward compatible change", isCompatible(schema14, + Collections.singletonList(schema12))); + + } + @Test + public void testBasicBackwardsTransitiveCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.BACKWARD_TRANSITIVE; + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + // All compatible + assertTrue("iteratively adding fields with defaults is a compatible change", + isCompatible(schema8, Arrays.asList(schema1, schema2))); + +// 1 == 2, 2 == 3, 3 != 1 + assertTrue("adding a field with default is a backward compatible change", + isCompatible(schema2, Collections.singletonList(schema1))); + assertTrue("removing a default is a compatible change, but not transitively", + isCompatible(schema3, Collections.singletonList(schema2))); + assertFalse("removing a default is not a transitively compatible change", + isCompatible(schema3, Arrays.asList(schema2, schema1))); + } + + /* + * Forward compatibility: A new schema is forward compatible if the previous schema can read data written in this + * schema. + */ + @Test + public void testBasicForwardsCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.FORWARD; + + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + assertTrue("adding a field is a forward compatible change", + isCompatible(schema2, Collections.singletonList(schema1))); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema3, Collections.singletonList(schema1))); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema3, Collections.singletonList(schema2))); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema2, Collections.singletonList(schema3))); + + // Only schema 2 is checked + assertTrue("removing a default is not a transitively compatible change", + isCompatible(schema1, Arrays.asList(schema3, schema2))); + + fileExtension = ".json"; + this.mojo.schemaType = "json"; + + assertTrue("setting additional properties to false from true is a forward compatible change", + isCompatible(schema11, Collections.singletonList(schema10))); + + assertTrue("removing property of string type (same as additional properties type)" + + " is a backward compatible change", isCompatible(schema13, + Collections.singletonList(schema12))); + + assertTrue("removing property of string or int type (string is additional properties type) is " + + "a backward compatible change", isCompatible(schema12, + Collections.singletonList(schema14))); + + } + + /* + * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read data written + * in this schema. + */ + @Test + public void testBasicForwardsTransitiveCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.FORWARD_TRANSITIVE; + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + // All compatible + assertTrue("iteratively removing fields with defaults is a compatible change", + isCompatible(schema1, Arrays.asList(schema8, schema2))); + + // 1 == 2, 2 == 3, 3 != 1 + assertTrue("adding default to a field is a compatible change", + isCompatible(schema2, Collections.singletonList(schema3))); + assertTrue("removing a field with a default is a compatible change", + isCompatible(schema1, Collections.singletonList(schema2))); + assertFalse("removing a default is not a transitively compatible change", + isCompatible(schema1, Arrays.asList(schema2, schema3))); + } + + /* + * Full compatibility: A new schema is fully compatible if it’s both backward and forward compatible. + */ + @Test + public void testBasicFullCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.FULL; + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + assertTrue("adding a field with default is a backward and a forward compatible change", + isCompatible(schema2, Collections.singletonList(schema1))); + + // Only schema 2 is checked! + assertTrue("transitively adding a field without a default is not a compatible change", + isCompatible(schema3, Arrays.asList(schema1, schema2))); + // Only schema 2 is checked! + assertTrue("transitively removing a field without a default is not a compatible change", + isCompatible(schema1, Arrays.asList(schema3, schema2))); + + } + + /* + * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward + * and transitively forward compatible with the entire schema history. + */ + @Test + public void testBasicFullTransitiveCompatibility() throws MojoExecutionException { + + this.mojo.compatibilityLevel = CompatibilityLevel.FULL_TRANSITIVE; + fileExtension = ".avsc"; + this.mojo.schemaType = "avro"; + + // Simple check + assertTrue("iteratively adding fields with defaults is a compatible change", + isCompatible(schema8, Arrays.asList(schema1, schema2))); + assertTrue("iteratively removing fields with defaults is a compatible change", + isCompatible(schema1, Arrays.asList(schema8, schema2))); + + assertTrue("adding default to a field is a compatible change", + isCompatible(schema2, Collections.singletonList(schema3))); + assertTrue("removing a field with a default is a compatible change", + isCompatible(schema1, Collections.singletonList(schema2))); + + assertTrue("adding a field with default is a compatible change", + isCompatible(schema2, Collections.singletonList(schema1))); + assertTrue("removing a default from a field compatible change", + isCompatible(schema3, Collections.singletonList(schema2))); + + assertFalse("transitively adding a field without a default is not a compatible change", + isCompatible(schema3, Arrays.asList(schema2, schema1))); + assertFalse("transitively removing a field without a default is not a compatible change", + isCompatible(schema1, Arrays.asList(schema2, schema3))); + } +} \ No newline at end of file From 7b7db681ccb5903c0c34cac1e5137901ea8fe079 Mon Sep 17 00:00:00 2001 From: pagrawal10 <98726675+pagrawal10@users.noreply.github.com> Date: Tue, 1 Mar 2022 09:37:23 +0530 Subject: [PATCH 29/73] Added goal set-compatibility (#2176) * Downloading specific version of schemas capability added * Modified option parameter to delete and removed get functionality * Reverted changes in DownloadSchemaRegistryMojo * Made subject parameter optional * Added global delete-compatibility * Added test for global compatibility --- .../maven/DownloadSchemaRegistryMojo.java | 2 +- .../maven/SetCompatibilityMojo.java | 94 +++++++++++++++++++ .../maven/SetCompatibilityMojoTest.java | 56 +++++++++++ 3 files changed, 151 insertions(+), 1 deletion(-) create mode 100644 maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java create mode 100644 maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java index 471ad183795..1ac9c3d8e67 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java @@ -221,4 +221,4 @@ private String getExtension(ParsedSchema parsedSchema) { return ".txt"; } } -} +} \ No newline at end of file diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java new file mode 100644 index 00000000000..cfe9241b043 --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java @@ -0,0 +1,94 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import java.io.IOException; + +@Mojo(name = "set-compatibility", configurator = "custom-basic") +public class SetCompatibilityMojo extends SchemaRegistryMojo { + + @Parameter(required = false, defaultValue = "false") + boolean delete; + + @Parameter() + String subject; + + @Parameter(defaultValue = "BACKWARD") + CompatibilityLevel compatibility; + + public void execute() throws MojoExecutionException { + if (delete) { + deleteConfig(subject); + } else { + updateConfig(subject, compatibility); + } + } + + public void updateConfig(String subject, CompatibilityLevel compatibility) { + + try { + String updatedCompatibility = + this.client().updateCompatibility(subject, compatibility.toString()); + if (subject == null) { + getLog().info("Global Compatibility set to " + + updatedCompatibility); + } else { + getLog().info("Compatibility of " + subject + + " set to " + updatedCompatibility); + } + } catch (RestClientException | IOException e) { + getLog().error(e.getMessage()); + e.printStackTrace(); + } + + } + + public void deleteConfig(String subject) { + if (getLog().isDebugEnabled()) { + getLog().info("Deleting compatibility"); + } + try { + this.client().deleteCompatibility(subject); + if (subject == null) { + getLog().info("Deleted global compatibility"); + } else { + getLog().info(String.format("Deleted compatibility of %s", subject)); + } + + } catch (IOException | RestClientException e) { + e.printStackTrace(); + } + } + + public String getConfig(String subject) { + if (getLog().isDebugEnabled()) { + getLog().info(String.format("Getting compatibility of %s", subject)); + } + try { + return String.format(this.client().getCompatibility(subject)); + } catch (IOException | RestClientException e) { + e.printStackTrace(); + } + return ""; + } +} \ No newline at end of file diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java new file mode 100644 index 00000000000..272de8894ac --- /dev/null +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java @@ -0,0 +1,56 @@ +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import org.apache.avro.Schema; +import org.apache.maven.plugin.MojoExecutionException; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertThrows; + +public class SetCompatibilityMojoTest extends SchemaRegistryTest{ + SetCompatibilityMojo mojo; + + @Before + public void createMojoAndFiles() { + this.mojo = new SetCompatibilityMojo(); + this.mojo.client(new MockSchemaRegistryClient()); + } + + @Test + public void specificSubjects() throws IOException, RestClientException, MojoExecutionException { + String keySubject = String.format("TestSubject-key"); + Schema keySchema = Schema.create(Schema.Type.STRING); + + this.mojo.client().register(keySubject, new AvroSchema(keySchema)); + // Compatibility not set till now and hence should throw error + assertThrows("Checking that compatibility hasn't been set", + RestClientException.class, () -> this.mojo.client().getCompatibility(keySubject)); + + // Setting compatibility & checking if it matches + + this.mojo.subject = keySubject; + this.mojo.compatibility = CompatibilityLevel.BACKWARD; + this.mojo.execute(); + + assert(this.mojo.getConfig(keySubject).equals("BACKWARD")); + + //Updating to a different compatibility + this.mojo.compatibility = CompatibilityLevel.FULL; + this.mojo.execute(); + + assert(this.mojo.getConfig(keySubject).equals("FULL")); + + //Checking for Global Compatibility + this.mojo.subject = null; + this.mojo.compatibility = CompatibilityLevel.BACKWARD_TRANSITIVE; + this.mojo.execute(); + assert(this.mojo.getConfig(null).equals("BACKWARD_TRANSITIVE")); + + } +} From 2c9f6dacd6648a81b88d8291318cabf420213897 Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Tue, 1 Mar 2022 09:48:30 -0800 Subject: [PATCH 30/73] Minor: Fix ClusterTestHarness due to upstream changes (#2192) * Fix ClusterTestHarness due to upstream changes * should not hardcode SecurityProtocol --- .../kafka/schemaregistry/ClusterTestHarness.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java index 821b82e7167..87d44d0fb16 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java @@ -130,17 +130,13 @@ public void setUp() throws Exception { servers.add(server); } - brokerList = - TestUtils.getBrokerListStrFromServers( - JavaConverters.asScalaBuffer(servers), - getSecurityProtocol() - ); + ListenerName listenerType = ListenerName.forSecurityProtocol(getSecurityProtocol()); + brokerList = TestUtils.bootstrapServers(JavaConverters.asScalaBuffer(servers), listenerType); // Initialize the rest app ourselves so we can ensure we don't pass any info about the Kafka // zookeeper. The format for this config includes the security protocol scheme in the URLs so // we can't use the pre-generated server list. String[] serverUrls = new String[servers.size()]; - ListenerName listenerType = ListenerName.forSecurityProtocol(getSecurityProtocol()); for(int i = 0; i < servers.size(); i++) { serverUrls[i] = getSecurityProtocol() + "://" + Utils.formatAddress( From 27d48dbc489b8d58f168512f5f079b1b17583966 Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Tue, 1 Mar 2022 09:51:22 -0800 Subject: [PATCH 31/73] remove old method (#2196) --- .../leaderelector/kafka/SchemaRegistryCoordinator.java | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java index b4d0b02b86d..5642f74985b 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java @@ -149,15 +149,7 @@ protected void onJoinComplete( listener.onAssigned(assignmentSnapshot, generation); } - // todo: to be removed - protected Map performAssignment( - String kafkaLeaderId, // Kafka group "leader" who does assignment, *not* the SR leader - String protocol, - List allMemberMetadata - ) { - return onLeaderElected(kafkaLeaderId, protocol, allMemberMetadata, false); - } - + @Override protected Map onLeaderElected( String kafkaLeaderId, // Kafka group "leader" who does assignment, *not* the SR leader String protocol, From 7a8779876f4ff77ff65057de0851ea46d2034eef Mon Sep 17 00:00:00 2001 From: Prathamesh Date: Thu, 3 Mar 2022 22:09:06 +0530 Subject: [PATCH 32/73] Incorporated endpoint verification disable for schema registry client (#2194) * Incorporated endpoint verification disable for schema registry client * Update CachedSchemaRegistryClient.java Co-authored-by: Robert Yokota --- .../client/CachedSchemaRegistryClient.java | 21 +++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index 27c91523507..1719793ebd7 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -20,6 +20,7 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import io.confluent.kafka.schemaregistry.utils.QualifiedSubject; +import org.apache.kafka.common.config.SslConfigs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,6 +53,8 @@ import io.confluent.kafka.schemaregistry.client.security.SslFactory; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; +import javax.net.ssl.HostnameVerifier; + /** * Thread-safe Schema Registry Client with client side caching. @@ -200,8 +203,8 @@ public CachedSchemaRegistryClient( .build(); this.providers = providers != null && !providers.isEmpty() - ? providers.stream().collect(Collectors.toMap(p -> p.schemaType(), p -> p)) - : Collections.singletonMap(AvroSchema.TYPE, new AvroSchemaProvider()); + ? providers.stream().collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)) + : Collections.singletonMap(AvroSchema.TYPE, new AvroSchemaProvider()); Map schemaProviderConfigs = new HashMap<>(); schemaProviderConfigs.put(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, this); for (SchemaProvider provider : this.providers.values()) { @@ -228,6 +231,7 @@ public CachedSchemaRegistryClient( SslFactory sslFactory = new SslFactory(sslConfigs); if (sslFactory.sslContext() != null) { restService.setSslSocketFactory(sslFactory.sslContext().getSocketFactory()); + restService.setHostnameVerifier(getHostnameVerifier(sslConfigs)); } } } @@ -252,6 +256,19 @@ public Map getSchemaProviders() { return providers; } + private HostnameVerifier getHostnameVerifier(Map config) { + String sslEndpointIdentificationAlgo = + (String) config.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); + + if (sslEndpointIdentificationAlgo == null + || sslEndpointIdentificationAlgo.equals("none") + || sslEndpointIdentificationAlgo.isEmpty()) { + return (hostname, session) -> true; + } + + return null; + } + private int registerAndGetId(String subject, ParsedSchema schema, boolean normalize) throws IOException, RestClientException { return restService.registerSchema(schema.canonicalString(), schema.schemaType(), From 6f86e4407972a27491829b42e70429812d251e79 Mon Sep 17 00:00:00 2001 From: Luke Young <91491244+lyoung-confluent@users.noreply.github.com> Date: Mon, 21 Mar 2022 10:01:25 -0700 Subject: [PATCH 33/73] Introduce Pull Request Reviewers (#2206) * go/codeowners: Generate CODEOWNERS [ci skip] * Update CODEOWNERS Co-authored-by: Robert Yokota --- .github/CODEOWNERS | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000000..d29b415709a --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,4 @@ +# See go/codeowners - automatically generated for confluentinc/schema-registry: +* @confluentinc/data-governance +* @confluentinc/devx +* @confluentinc/security From 0d86c4bbd62de614b27eaf02f0d97e187e5972cc Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Thu, 24 Mar 2022 13:57:38 -0700 Subject: [PATCH 34/73] Update CODEOWNERS --- .github/CODEOWNERS | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d29b415709a..f9105c8113f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,4 +1,2 @@ # See go/codeowners - automatically generated for confluentinc/schema-registry: * @confluentinc/data-governance -* @confluentinc/devx -* @confluentinc/security From 2704a1f7bd9024e0fd9a34bdfa650261ce9a5659 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Thu, 24 Mar 2022 14:42:52 -0700 Subject: [PATCH 35/73] DGS-3329 Expose ObjectMapper to allow customizations (#2221) * DGS-3329 Expose ObjectMapper to allow customizations * Minor renaming --- .../serializers/json/AbstractKafkaJsonSchemaDeserializer.java | 4 ++++ .../serializers/json/AbstractKafkaJsonSchemaSerializer.java | 4 ++++ .../io/confluent/kafka/serializers/KafkaJsonDeserializer.java | 4 ++++ .../io/confluent/kafka/serializers/KafkaJsonSerializer.java | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java index 68f34066416..630c5049661 100644 --- a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java +++ b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java @@ -75,6 +75,10 @@ protected KafkaJsonSchemaDeserializerConfig deserializerConfig(Properties props) return new KafkaJsonSchemaDeserializerConfig(props); } + public ObjectMapper objectMapper() { + return objectMapper; + } + /** * Deserializes the payload without including schema information for primitive types, maps, and * arrays. Just the resulting deserialized object is returned. diff --git a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java index 0ab75c86c27..a02a631abc2 100644 --- a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java +++ b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java @@ -80,6 +80,10 @@ protected KafkaJsonSchemaSerializerConfig serializerConfig(Map props) } } + public ObjectMapper objectMapper() { + return objectMapper; + } + protected byte[] serializeImpl( String subject, T object, diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java index dc55e888589..795f48b7ef4 100644 --- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java +++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java @@ -65,6 +65,10 @@ private void configure(KafkaJsonDeserializerConfig config, boolean isKey) { } } + public ObjectMapper objectMapper() { + return objectMapper; + } + @Override public T deserialize(String ignored, byte[] bytes) { if (bytes == null || bytes.length == 0) { diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java index 2fe5e4634a7..1ec76f55f27 100644 --- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java +++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java @@ -54,6 +54,10 @@ protected void configure(KafkaJsonSerializerConfig config) { SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, !writeDatesAsIso8601); } + public ObjectMapper objectMapper() { + return objectMapper; + } + @Override public byte[] serialize(String topic, T data) { if (data == null) { From 593b75c45d6e3c08e22f789c41fc18853d0a4ab0 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Tue, 19 Apr 2022 11:11:42 -0700 Subject: [PATCH 36/73] MINOR: add enum-related methods to ProtobufSchema (#2245) --- .../protobuf/ProtobufSchema.java | 19 +++++++++++++++++++ .../protobuf/ProtobufSchemaTest.java | 9 +++++++++ 2 files changed, 28 insertions(+) diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java index a2f1c0bfce7..247a9ec42b3 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java @@ -36,6 +36,7 @@ import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; +import com.google.protobuf.Descriptors.EnumDescriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.Descriptors.FileDescriptor; import com.google.protobuf.DurationProto; @@ -378,6 +379,20 @@ public ProtobufSchema(Descriptor descriptor, List references) { this.descriptor = descriptor; } + public ProtobufSchema(EnumDescriptor enumDescriptor) { + this(enumDescriptor, Collections.emptyList()); + } + + public ProtobufSchema(EnumDescriptor enumDescriptor, List references) { + Map dependencies = new HashMap<>(); + this.schemaObj = toProtoFile(enumDescriptor.getFile(), dependencies); + this.version = null; + this.name = enumDescriptor.getFullName(); + this.references = Collections.unmodifiableList(references); + this.dependencies = Collections.unmodifiableMap(dependencies); + this.descriptor = null; + } + private ProtobufSchema( ProtoFileElement schemaObj, Integer version, @@ -956,6 +971,10 @@ public DynamicMessage.Builder newMessageBuilder(String name) { return toDynamicSchema().newMessageBuilder(name); } + public EnumDescriptor getEnumDescriptor(String enumTypeName) { + return toDynamicSchema().getEnumDescriptor(enumTypeName); + } + public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, int enumNumber) { return toDynamicSchema().getEnumValue(enumTypeName, enumNumber); } diff --git a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java index 17eb2f17c79..8e585670154 100644 --- a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java +++ b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors.Descriptor; +import com.google.protobuf.Descriptors.EnumDescriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; import com.squareup.wire.schema.internal.parser.ProtoFileElement; @@ -1051,6 +1052,14 @@ public void testParseSchemaSuppressException() { assertFalse(parsedSchema.isPresent()); } + @Test + public void testEnumMethods() { + EnumDescriptor enumDescriptor = enumSchema.getEnumDescriptor("TestEnum.Suit"); + ProtobufSchema enumSchema2 = new ProtobufSchema(enumDescriptor); + EnumDescriptor enumDescriptor2 = enumSchema2.getEnumDescriptor("TestEnum.Suit"); + assertEquals(enumDescriptor.getFullName(), enumDescriptor2.getFullName()); + } + private static JsonNode jsonTree(String jsonData) { try { return objectMapper.readTree(jsonData); From a6858ebe76f2bce2c2a40662a58d7c1c13dfbbeb Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Tue, 19 Apr 2022 20:41:41 -0700 Subject: [PATCH 37/73] MINOR: fixes for Mock SR client (#2246) --- .../schemaregistry/client/MockSchemaRegistryClient.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java index a40c83cbf3a..aba9741a4b5 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java @@ -637,15 +637,19 @@ public Collection getAllSubjects() throws IOException, RestClientExcepti public Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { Stream validSubjects = getAllSubjects().stream() - .filter(subject -> subject.startsWith(subjectPrefix)); + .filter(subject -> subjectPrefix == null || subject.startsWith(subjectPrefix)); return validSubjects.collect(Collectors.toCollection(LinkedHashSet::new)); } @Override public synchronized void reset() { schemaCache.clear(); + schemaIdCache.clear(); idCache.clear(); versionCache.clear(); + compatibilityCache.clear(); + modes.clear(); + ids.clear(); } private static String toQualifiedContext(String subject) { From de5aef475a000446c290c768b827bf973d7efb57 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Wed, 27 Apr 2022 09:55:44 -0700 Subject: [PATCH 38/73] MINOR: upgrade json-schema to 1.14.1 (#2260) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 873a1af336b..89527182aa6 100644 --- a/pom.xml +++ b/pom.xml @@ -79,7 +79,7 @@ checkstyle/suppressions.xml 0.11.1 1.4.21 - 1.14.0 + 1.14.1 2.5.1 3.11.4 3.0.0 From b1632679c082afebd0300a3bc7e00945378b5899 Mon Sep 17 00:00:00 2001 From: pagrawal10 <98726675+pagrawal10@users.noreply.github.com> Date: Mon, 2 May 2022 14:37:46 +0530 Subject: [PATCH 39/73] cherry-picked commits (#2238) --- .../maven/SetCompatibilityMojo.java | 61 ++++++++++++------- .../maven/SetCompatibilityMojoTest.java | 9 +-- 2 files changed, 42 insertions(+), 28 deletions(-) diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java index cfe9241b043..4eb2cdbcd0b 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java @@ -23,64 +23,78 @@ import org.apache.maven.plugins.annotations.Parameter; import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; @Mojo(name = "set-compatibility", configurator = "custom-basic") public class SetCompatibilityMojo extends SchemaRegistryMojo { - @Parameter(required = false, defaultValue = "false") - boolean delete; - - @Parameter() - String subject; - - @Parameter(defaultValue = "BACKWARD") - CompatibilityLevel compatibility; + @Parameter(required = true) + Map compatibilityLevels = new HashMap<>(); public void execute() throws MojoExecutionException { - if (delete) { - deleteConfig(subject); - } else { - updateConfig(subject, compatibility); + for (Map.Entry entry : compatibilityLevels.entrySet()) { + if (entry.getValue().equalsIgnoreCase("null")) { + deleteConfig(entry.getKey()); + } else { + updateConfig(entry.getKey(), CompatibilityLevel.valueOf(entry.getValue())); + } } } - public void updateConfig(String subject, CompatibilityLevel compatibility) { + public void updateConfig(String subject, CompatibilityLevel compatibility) + throws MojoExecutionException { try { - String updatedCompatibility = - this.client().updateCompatibility(subject, compatibility.toString()); - if (subject == null) { + String updatedCompatibility; + + if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) { + updatedCompatibility = this.client().updateCompatibility(null, compatibility.toString()); getLog().info("Global Compatibility set to " + updatedCompatibility); } else { + Collection allSubjects = this.client().getAllSubjects(); + if (!allSubjects.contains(subject)) { + throw new MojoExecutionException( + "Subject not found" + ); + } + updatedCompatibility = this.client().updateCompatibility(subject, compatibility.toString()); getLog().info("Compatibility of " + subject + " set to " + updatedCompatibility); } } catch (RestClientException | IOException e) { - getLog().error(e.getMessage()); e.printStackTrace(); + throw new MojoExecutionException( + "Exception thrown while updating config", + e + ); } } - public void deleteConfig(String subject) { + public void deleteConfig(String subject) throws MojoExecutionException { if (getLog().isDebugEnabled()) { getLog().info("Deleting compatibility"); } try { this.client().deleteCompatibility(subject); - if (subject == null) { + if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) { getLog().info("Deleted global compatibility"); } else { getLog().info(String.format("Deleted compatibility of %s", subject)); } } catch (IOException | RestClientException e) { - e.printStackTrace(); + throw new MojoExecutionException( + "Exception thrown while updating config", + e + ); } } - public String getConfig(String subject) { + public String getConfig(String subject) throws MojoExecutionException { if (getLog().isDebugEnabled()) { getLog().info(String.format("Getting compatibility of %s", subject)); } @@ -88,7 +102,10 @@ public String getConfig(String subject) { return String.format(this.client().getCompatibility(subject)); } catch (IOException | RestClientException e) { e.printStackTrace(); + throw new MojoExecutionException( + "Exception thrown while getting config", + e + ); } - return ""; } } \ No newline at end of file diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java index 272de8894ac..2e2907a4ced 100644 --- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java @@ -33,22 +33,19 @@ public void specificSubjects() throws IOException, RestClientException, MojoExec RestClientException.class, () -> this.mojo.client().getCompatibility(keySubject)); // Setting compatibility & checking if it matches - - this.mojo.subject = keySubject; - this.mojo.compatibility = CompatibilityLevel.BACKWARD; + this.mojo.compatibilityLevels.put(keySubject,"BACKWARD"); this.mojo.execute(); assert(this.mojo.getConfig(keySubject).equals("BACKWARD")); //Updating to a different compatibility - this.mojo.compatibility = CompatibilityLevel.FULL; + this.mojo.compatibilityLevels.replace(keySubject, "BACKWARD", "FULL"); this.mojo.execute(); assert(this.mojo.getConfig(keySubject).equals("FULL")); //Checking for Global Compatibility - this.mojo.subject = null; - this.mojo.compatibility = CompatibilityLevel.BACKWARD_TRANSITIVE; + this.mojo.compatibilityLevels.put("__GLOBAL", "BACKWARD_TRANSITIVE"); this.mojo.execute(); assert(this.mojo.getConfig(null).equals("BACKWARD_TRANSITIVE")); From a77c862a30c9635083b4bfa10a88e66de7a73c13 Mon Sep 17 00:00:00 2001 From: jshahc <98798896+jshahc@users.noreply.github.com> Date: Mon, 2 May 2022 16:09:52 +0530 Subject: [PATCH 40/73] Adding folder support for Test Local Compatibility maven plugin (#2197) (#2240) * Adding new goal test-local-compatibility for schema registry maven plugin * New utils class and compatibility set to enum * Name change SchemaUtils to MojoUtils * Adding a folder of schemas support added * Adding a folder of schemas support added * Throwing error on failure * Change in input type * schemaTypes change * minor changes --- .../maven/TestLocalCompatibilityMojo.java | 111 +++++++--- .../maven/TestLocalCompatibilityMojoTest.java | 199 +++++++++--------- 2 files changed, 178 insertions(+), 132 deletions(-) diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java index 04695a34675..5c7d6692b71 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java @@ -27,9 +27,11 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; @@ -40,18 +42,16 @@ public class TestLocalCompatibilityMojo extends AbstractMojo { @Parameter(required = true) - File schemaPath; + Map schemas = new HashMap<>(); - @Parameter(required = true) - ArrayList previousSchemaPaths; - - @Parameter(defaultValue = "BACKWARD") - CompatibilityLevel compatibilityLevel; + @Parameter(required = false) + Map schemaTypes = new HashMap<>(); - @Parameter(defaultValue = AvroSchema.TYPE) - String schemaType; + @Parameter(required = true) + Map previousSchemaPaths = new HashMap<>(); - boolean success = false; + @Parameter(required = true) + Map compatibilityLevels = new HashMap<>(); protected Optional parseSchema( String schemaType, @@ -69,8 +69,8 @@ protected Optional parseSchema( } - protected ParsedSchema loadSchema(File path, Map schemaProviders) throws MojoExecutionException { + protected ParsedSchema loadSchema(File path, String schemaType, + Map schemaProviders) throws MojoExecutionException { String schemaString; try { @@ -91,43 +91,96 @@ protected ParsedSchema loadSchema(File path, Map getFiles(File previousSchemaPath) { - List providers = MojoUtils.defaultSchemaProviders(); - Map schemaProviders = providers.stream() - .collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)); + ArrayList previousSchemaFiles = new ArrayList<>(); - getLog().debug(String.format("Loading Schema at %s", schemaPath)); - ParsedSchema schema = loadSchema(schemaPath, schemaProviders); + getLog().debug(String.format("Loading File %s", previousSchemaPath)); + // Add all files inside a directory, inside directories are skipped + if (previousSchemaPath.isDirectory()) { - getLog().debug("Loading Previous Schemas"); - ArrayList previousSchemas = new ArrayList<>(); - for (File previousSchemaPath : previousSchemaPaths) { - previousSchemas.add(loadSchema(previousSchemaPath, schemaProviders)); + File[] fileList = previousSchemaPath.listFiles(); + if (fileList == null) { + return previousSchemaFiles; + } + + for (File f : fileList) { + if (!f.isDirectory()) { + previousSchemaFiles.add(f); + } + } + + } else { + previousSchemaFiles.add(previousSchemaPath); } - CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel); + return previousSchemaFiles; + } - List errorMessages = checker.isCompatible(schema, previousSchemas); - if (previousSchemas.size() > 1 + protected void testSchema(String key, Map schemaProviders) + throws MojoExecutionException { + + File schemaPath = schemas.get(key); + + if (!previousSchemaPaths.containsKey(key)) { + throw new MojoExecutionException(String.format("Previous schemas not found for %s", key)); + } + + File previousSchemaPath = previousSchemaPaths.get(key); + String schemaType = schemaTypes.getOrDefault(key, AvroSchema.TYPE); + + if (!compatibilityLevels.containsKey(key)) { + throw new MojoExecutionException(String.format("Compatibility Level not found for %s", key)); + } + + CompatibilityLevel compatibilityLevel = compatibilityLevels.get(key); + + ArrayList previousSchemaFiles = getFiles(previousSchemaPath); + + if (previousSchemaFiles.size() > 1 && (compatibilityLevel == CompatibilityLevel.BACKWARD || compatibilityLevel == CompatibilityLevel.FORWARD || compatibilityLevel == CompatibilityLevel.FULL)) { - getLog().info(String.format("Checking only with latest Schema at %s", - previousSchemaPaths.get(previousSchemaPaths.size() - 1))); + throw new MojoExecutionException(String.format("Provide exactly one file for %s check " + + "for schema %s", compatibilityLevel.name.toLowerCase(), schemaPath)); + } - success = errorMessages.isEmpty(); + ParsedSchema schema = loadSchema(schemaPath, schemaType, schemaProviders); + ArrayList previousSchemas = new ArrayList<>(); + + for (File previousSchemaFile : previousSchemaFiles) { + previousSchemas.add(loadSchema(previousSchemaFile, schemaType, schemaProviders)); + } + + CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel); + List errorMessages = checker.isCompatible(schema, previousSchemas); + + boolean success = errorMessages.isEmpty(); if (success) { getLog().info(String.format("Schema is %s compatible with previous schemas", compatibilityLevel.name.toLowerCase())); } else { - String errorLog = String.format("Schema is not %s compatible with previous schemas %n", + String errorLog = String.format("Schema is not %s compatible with previous schemas. ", compatibilityLevel.name.toLowerCase()) + errorMessages.get(0); - getLog().error(errorLog); + throw new MojoExecutionException(errorLog); + } + + } + + public void execute() throws MojoExecutionException { + + List providers = MojoUtils.defaultSchemaProviders(); + Map schemaProviders = providers.stream() + .collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)); + + Set keys = schemas.keySet(); + + for (String key : keys) { + testSchema(key, schemaProviders); } } diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java index a2214c7fb72..09e72950425 100644 --- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java @@ -16,17 +16,15 @@ package io.confluent.kafka.schemaregistry.maven; -import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import io.confluent.kafka.schemaregistry.CompatibilityLevel; import java.io.File; import java.io.FileWriter; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.List; +import java.util.HashMap; import org.apache.maven.plugin.MojoExecutionException; import org.junit.Before; import org.junit.Test; @@ -56,9 +54,18 @@ public class TestLocalCompatibilityMojoTest extends SchemaRegistryTest{ public void createMojoAndFiles() { this.mojo = new TestLocalCompatibilityMojo(); makeFiles(); + + for(int i=1;i<=9;i++) { + this.mojo.schemaTypes.put("schema"+i, "AVRO"); + } + + this.mojo.schemaTypes.put(schema10, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); + this.mojo.schemaTypes.put(schema14, "JSON"); + } - private void makeFile(String schemaString, String name){ + private void makeFile(String schemaString, String name) { try (FileWriter writer = new FileWriter(this.tempDirectory+"/"+name)) { writer.write(schemaString); @@ -66,10 +73,25 @@ private void makeFile(String schemaString, String name){ e.printStackTrace(); } + if (name.contains("1.avsc") || name.contains("2.avsc")) { + + try (FileWriter writer = new FileWriter(this.tempDirectory+"/schema12Folder/"+name)) { + writer.write(schemaString); + } catch (IOException e) { + e.printStackTrace(); + } + + } + } private void makeFiles(){ + File newFolder = new File(this.tempDirectory.toString() + "/schema12Folder"); + if( newFolder.mkdir()) { + System.out.println("New Folder avro created successfully."); + } + String schemaString1 = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":" @@ -118,14 +140,6 @@ private void makeFiles(){ + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; makeFile(schemaString8, "schema8.avsc"); - String badDefaultNullString = "{\"type\":\"record\"," - + "\"name\":\"myrecord\"," - + "\"fields\":" - + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\", \"default\": \"null\"}," - + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}," - + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; - makeFile(badDefaultNullString, "schema9.avsc"); - String schemaString10 = "{\n" + " \"type\": \"object\",\n" + " \"properties\": {\n" @@ -185,29 +199,29 @@ private void makeFiles(){ } - private void setMojo(String schema, List previousSchemas){ + private void setMojo(String schema, String previousSchemas){ - this.mojo.schemaPath = new File(this.tempDirectory + "/" + schema + fileExtension); - this.mojo.previousSchemaPaths = new ArrayList<>(); + this.mojo.schemas = Collections.singletonMap(schema, new File(this.tempDirectory + "/" + schema + fileExtension)); + this.mojo.previousSchemaPaths = new HashMap<>(); - for (String path : previousSchemas) { - this.mojo.previousSchemaPaths.add(new File(this.tempDirectory + "/" + path + fileExtension)); - } - this.mojo.success = false; + File temp = new File(this.tempDirectory + "/" + previousSchemas); + if(temp.isDirectory()) + this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas)); + else + this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas + fileExtension)); } - private boolean isCompatible(String schema, List previousSchemas) + private boolean isCompatible(String schema, String previousSchemas, CompatibilityLevel compatibilityLevel) throws MojoExecutionException { setMojo(schema, previousSchemas); + this.mojo.compatibilityLevels.put(schema, compatibilityLevel); this.mojo.execute(); - return this.mojo.success; + return true; } - - /* * Backward compatibility: A new schema is backward compatible if it can be used to read the data * written in the previous schema. @@ -215,61 +229,59 @@ private boolean isCompatible(String schema, List previousSchemas) @Test public void testBasicBackwardsCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.BACKWARD; fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; assertTrue("adding a field with default is a backward compatible change", - isCompatible(schema2, Collections.singletonList(schema1))); - assertFalse("adding a field w/o default is not a backward compatible change", - isCompatible(schema3, Collections.singletonList(schema1))); + isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD)); + assertThrows("adding a field w/o default is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema3, (schema1), CompatibilityLevel.BACKWARD)); assertTrue("changing field name with alias is a backward compatible change", - isCompatible(schema4, Collections.singletonList(schema1))); + isCompatible(schema4, (schema1), CompatibilityLevel.BACKWARD)); assertTrue("evolving a field type to a union is a backward compatible change", - isCompatible(schema6, Collections.singletonList(schema1))); - assertFalse("removing a type from a union is not a backward compatible change", - isCompatible(schema1, Collections.singletonList(schema6))); + isCompatible(schema6, (schema1), CompatibilityLevel.BACKWARD)); + assertThrows("removing a type from a union is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema1, (schema6), CompatibilityLevel.BACKWARD)); assertTrue("adding a new type in union is a backward compatible change", - isCompatible(schema7, Collections.singletonList(schema6))); - assertFalse("removing a type from a union is not a backward compatible change", - isCompatible(schema6, Collections.singletonList(schema7))); + isCompatible(schema7, (schema6), CompatibilityLevel.BACKWARD)); + assertThrows("removing a type from a union is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema6, (schema7), CompatibilityLevel.BACKWARD)); - // Only schema 2 is checked - assertTrue("removing a default is not a transitively compatible change", - isCompatible(schema3, Arrays.asList(schema1, schema2))); + + this.mojo.schemaTypes.put(schema10, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); + this.mojo.schemaTypes.put(schema14, "JSON"); fileExtension = ".json"; - this.mojo.schemaType = "json"; assertTrue("setting additional properties to true from false is a backward compatible change", - isCompatible(schema10, Collections.singletonList(schema11))); + isCompatible(schema10, schema11, CompatibilityLevel.BACKWARD)); assertTrue("adding property of string type (same as additional properties type) is " - + "a backward compatible change", isCompatible(schema13, - Collections.singletonList(schema12))); + + "a backward compatible change", isCompatible(schema13, schema12, CompatibilityLevel.BACKWARD)); assertTrue("adding property of string or int type (string is additional properties type) is " - + "a backward compatible change", isCompatible(schema14, - Collections.singletonList(schema12))); + + "a backward compatible change", isCompatible(schema14, schema12, CompatibilityLevel.BACKWARD)); } + @Test public void testBasicBackwardsTransitiveCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.BACKWARD_TRANSITIVE; fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; - - // All compatible - assertTrue("iteratively adding fields with defaults is a compatible change", - isCompatible(schema8, Arrays.asList(schema1, schema2))); -// 1 == 2, 2 == 3, 3 != 1 + // 1 == 2, 2 == 3, 3 != 1 assertTrue("adding a field with default is a backward compatible change", - isCompatible(schema2, Collections.singletonList(schema1))); + isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD_TRANSITIVE)); assertTrue("removing a default is a compatible change, but not transitively", - isCompatible(schema3, Collections.singletonList(schema2))); - assertFalse("removing a default is not a transitively compatible change", - isCompatible(schema3, Arrays.asList(schema2, schema1))); + isCompatible(schema3, (schema2), CompatibilityLevel.BACKWARD_TRANSITIVE)); + + // Not compatible throws error + assertThrows("removing a default is not a transitively compatible change", + MojoExecutionException.class, () ->isCompatible(schema3, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE)); + + assertTrue("Checking if schema8 is backward compatible with schema1 and schema2 present in avro folder" + , isCompatible(schema8, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE )); + + } /* @@ -279,37 +291,38 @@ public void testBasicBackwardsTransitiveCompatibility() throws MojoExecutionExce @Test public void testBasicForwardsCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.FORWARD; - fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; assertTrue("adding a field is a forward compatible change", - isCompatible(schema2, Collections.singletonList(schema1))); + isCompatible(schema2, (schema1), CompatibilityLevel.FORWARD)); assertTrue("adding a field is a forward compatible change", - isCompatible(schema3, Collections.singletonList(schema1))); + isCompatible(schema3, (schema1), CompatibilityLevel.FORWARD)); assertTrue("adding a field is a forward compatible change", - isCompatible(schema3, Collections.singletonList(schema2))); + isCompatible(schema3, (schema2), CompatibilityLevel.FORWARD)); assertTrue("adding a field is a forward compatible change", - isCompatible(schema2, Collections.singletonList(schema3))); + isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD)); + + fileExtension = ".avsc"; // Only schema 2 is checked - assertTrue("removing a default is not a transitively compatible change", - isCompatible(schema1, Arrays.asList(schema3, schema2))); + assertThrows( MojoExecutionException.class, () -> + isCompatible(schema1, "schema12Folder", CompatibilityLevel.FORWARD)); fileExtension = ".json"; - this.mojo.schemaType = "json"; + this.mojo.schemaTypes.put(schema11, "JSON"); + this.mojo.schemaTypes.put(schema12, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); assertTrue("setting additional properties to false from true is a forward compatible change", - isCompatible(schema11, Collections.singletonList(schema10))); + isCompatible(schema11, schema10, CompatibilityLevel.FORWARD)); assertTrue("removing property of string type (same as additional properties type)" + " is a backward compatible change", isCompatible(schema13, - Collections.singletonList(schema12))); + schema12, CompatibilityLevel.FORWARD)); assertTrue("removing property of string or int type (string is additional properties type) is " + "a backward compatible change", isCompatible(schema12, - Collections.singletonList(schema14))); + schema14, CompatibilityLevel.FORWARD)); } @@ -320,21 +333,13 @@ public void testBasicForwardsCompatibility() throws MojoExecutionException { @Test public void testBasicForwardsTransitiveCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.FORWARD_TRANSITIVE; fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; - - // All compatible - assertTrue("iteratively removing fields with defaults is a compatible change", - isCompatible(schema1, Arrays.asList(schema8, schema2))); // 1 == 2, 2 == 3, 3 != 1 assertTrue("adding default to a field is a compatible change", - isCompatible(schema2, Collections.singletonList(schema3))); + isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD_TRANSITIVE)); assertTrue("removing a field with a default is a compatible change", - isCompatible(schema1, Collections.singletonList(schema2))); - assertFalse("removing a default is not a transitively compatible change", - isCompatible(schema1, Arrays.asList(schema2, schema3))); + isCompatible(schema1, (schema2), CompatibilityLevel.FORWARD_TRANSITIVE)); } /* @@ -343,19 +348,14 @@ public void testBasicForwardsTransitiveCompatibility() throws MojoExecutionExcep @Test public void testBasicFullCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.FULL; fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; assertTrue("adding a field with default is a backward and a forward compatible change", - isCompatible(schema2, Collections.singletonList(schema1))); + isCompatible(schema2, (schema1), CompatibilityLevel.FULL)); - // Only schema 2 is checked! - assertTrue("transitively adding a field without a default is not a compatible change", - isCompatible(schema3, Arrays.asList(schema1, schema2))); - // Only schema 2 is checked! - assertTrue("transitively removing a field without a default is not a compatible change", - isCompatible(schema1, Arrays.asList(schema3, schema2))); + // Throws error, provide exactly one file for checking full compatibility + assertThrows(MojoExecutionException.class, () -> + isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL)); } @@ -366,29 +366,22 @@ public void testBasicFullCompatibility() throws MojoExecutionException { @Test public void testBasicFullTransitiveCompatibility() throws MojoExecutionException { - this.mojo.compatibilityLevel = CompatibilityLevel.FULL_TRANSITIVE; fileExtension = ".avsc"; - this.mojo.schemaType = "avro"; - // Simple check assertTrue("iteratively adding fields with defaults is a compatible change", - isCompatible(schema8, Arrays.asList(schema1, schema2))); - assertTrue("iteratively removing fields with defaults is a compatible change", - isCompatible(schema1, Arrays.asList(schema8, schema2))); - + isCompatible(schema8, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE)); assertTrue("adding default to a field is a compatible change", - isCompatible(schema2, Collections.singletonList(schema3))); + isCompatible(schema2, (schema3), CompatibilityLevel.FULL_TRANSITIVE)); assertTrue("removing a field with a default is a compatible change", - isCompatible(schema1, Collections.singletonList(schema2))); + isCompatible(schema1, (schema2), CompatibilityLevel.FULL_TRANSITIVE)); assertTrue("adding a field with default is a compatible change", - isCompatible(schema2, Collections.singletonList(schema1))); + isCompatible(schema2, (schema1), CompatibilityLevel.FULL_TRANSITIVE)); assertTrue("removing a default from a field compatible change", - isCompatible(schema3, Collections.singletonList(schema2))); + isCompatible(schema3, (schema2), CompatibilityLevel.FULL_TRANSITIVE)); + + assertThrows( "transitively adding a field without a default is not a compatible change", + MojoExecutionException.class, () -> isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE)); - assertFalse("transitively adding a field without a default is not a compatible change", - isCompatible(schema3, Arrays.asList(schema2, schema1))); - assertFalse("transitively removing a field without a default is not a compatible change", - isCompatible(schema1, Arrays.asList(schema2, schema3))); } } \ No newline at end of file From e762e01e03dc50c127e8f851c9327117e5e12eb5 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Mon, 2 May 2022 14:21:55 -0700 Subject: [PATCH 41/73] MINOR minor mock client fixes (#2271) --- .../client/MockSchemaRegistryClient.java | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java index aba9741a4b5..4f7f43edbd6 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java @@ -112,7 +112,7 @@ private int getIdFromRegistry( idCache.computeIfAbsent(subject, k -> new ConcurrentHashMap<>()); if (!idSchemaMap.isEmpty()) { for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { if (registerRequest) { checkId(id, entry.getKey()); generateVersion(subject, schema); @@ -142,6 +142,11 @@ private int getIdFromRegistry( } } + private boolean schemasEqual(ParsedSchema schema1, ParsedSchema schema2) { + return schema1.canonicalString().equals(schema2.canonicalString()) + || schema1.deepEquals(schema2); + } + private void generateVersion(String subject, ParsedSchema schema) { List versions = allVersions(subject); int currentVersion; @@ -362,7 +367,7 @@ public Schema getByVersion(String subject, int version, boolean lookupDeletedSch int id = -1; Map idSchemaMap = idCache.get(subject); for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { id = entry.getKey(); } } @@ -389,7 +394,7 @@ public SchemaMetadata getSchemaMetadata(String subject, int version) int id = -1; Map idSchemaMap = idCache.get(subject); for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { id = entry.getKey(); } } @@ -517,9 +522,9 @@ public synchronized List deleteSubject( throws IOException, RestClientException { schemaCache.remove(subject); idCache.remove(subject); - versionCache.remove(subject); + Map versions = versionCache.remove(subject); compatibilityCache.remove(subject); - return Collections.singletonList(0); + return versions.values().stream().sorted().collect(Collectors.toList()); } @Override From b068aa05ea5592dda80a4d807f279735726dd8ea Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Tue, 3 May 2022 13:37:07 -0700 Subject: [PATCH 42/73] MINOR fix npe in mock client when deleting subject (#2272) --- .../kafka/schemaregistry/client/MockSchemaRegistryClient.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java index 4f7f43edbd6..867b6dc3bb3 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java @@ -524,7 +524,9 @@ public synchronized List deleteSubject( idCache.remove(subject); Map versions = versionCache.remove(subject); compatibilityCache.remove(subject); - return versions.values().stream().sorted().collect(Collectors.toList()); + return versions != null + ? versions.values().stream().sorted().collect(Collectors.toList()) + : Collections.emptyList(); } @Override From a3cfc522075b850f65e4be6572c7fd515fe3514f Mon Sep 17 00:00:00 2001 From: pushpabaral <87644741+pushpabaral@users.noreply.github.com> Date: Tue, 10 May 2022 17:38:52 +0530 Subject: [PATCH 43/73] Migrate from confluent-log4j to reload4j (#2275) --- avro-serializer/pom.xml | 15 ++------------- core/pom.xml | 19 +++++-------------- package-kafka-serde-tools/pom.xml | 6 ++++++ package-schema-registry/pom.xml | 16 ++-------------- 4 files changed, 15 insertions(+), 41 deletions(-) diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index f0d9c4af74f..976d82f0bf0 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -58,23 +58,12 @@ org.slf4j - slf4j-log4j12 + slf4j-reload4j test io.confluent - confluent-log4j - test - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - + logredactor diff --git a/core/pom.xml b/core/pom.xml index 0b59a2fa42b..f370c63ba6c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -60,23 +60,14 @@ org.slf4j - slf4j-log4j12 + slf4j-reload4j - + io.confluent - confluent-log4j - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - - + logredactor + + com.google.guava guava diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 5ef810306fe..7ef0d34c241 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -93,6 +93,12 @@ org.apache.kafka connect-runtime provided + + + org.slf4j + slf4j-log4j12 + + org.apache.kafka diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 15a4d549328..65aeab09e7e 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -18,24 +18,12 @@ org.slf4j - slf4j-log4j12 + slf4j-reload4j compile - io.confluent - confluent-log4j - compile - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - + logredactor io.confluent From 491fc97d656d2b6c32feb3e4a42ff8e1682ad4bb Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Tue, 17 May 2022 19:34:25 -0700 Subject: [PATCH 44/73] Fix checkstyle issues --- .../client/CachedSchemaRegistryClient.java | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index cfd098f5989..07f07d49c59 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -271,19 +271,6 @@ public Map getSchemaProviders() { return providers; } - private HostnameVerifier getHostnameVerifier(Map config) { - String sslEndpointIdentificationAlgo = - (String) config.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); - - if (sslEndpointIdentificationAlgo == null - || sslEndpointIdentificationAlgo.equals("none") - || sslEndpointIdentificationAlgo.isEmpty()) { - return (hostname, session) -> true; - } - - return null; - } - private int registerAndGetId(String subject, ParsedSchema schema, boolean normalize) throws IOException, RestClientException { return restService.registerSchema(schema.canonicalString(), schema.schemaType(), From e2962cb8f6964af526528c8a973b9090c42602f5 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Tue, 17 May 2022 19:45:38 -0700 Subject: [PATCH 45/73] Fix checkstyle issues --- .../kafka/schemaregistry/client/CachedSchemaRegistryClient.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index 07f07d49c59..5073daf6653 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -24,8 +24,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.kafka.common.config.SslConfigs; - import java.io.IOException; import java.util.Collection; import java.util.Collections; From 80220dc3b6b4b9ebf8dc02e2299b9efad09e9a3a Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Wed, 1 Jun 2022 10:35:08 -0700 Subject: [PATCH 46/73] Change everit-json-schema coordinates to reflect artifact published on Central (#2305) (#2307) * fixes #2171 : change coords to reflect artifact published on Central * Update pom.xml Co-authored-by: Robert Yokota Co-authored-by: Arnaud Esteve --- json-schema-converter/pom.xml | 4 ++-- json-schema-provider/pom.xml | 4 ++-- pom.xml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 483a5de7692..63a3fee846e 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -62,8 +62,8 @@ provided - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema org.mockito diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 23265a78b1f..db71c352b20 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -32,8 +32,8 @@ ${io.confluent.schema-registry.version} - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema com.fasterxml.jackson.datatype diff --git a/pom.xml b/pom.xml index a5ec44647ba..12c878e6a4a 100644 --- a/pom.xml +++ b/pom.xml @@ -191,8 +191,8 @@ ${jackson.version} - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema ${json-schema.version} From e4de3a37417136b4863ede44b57055423c1d113a Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Tue, 5 Jul 2022 03:22:57 +0000 Subject: [PATCH 47/73] Set Confluent to 7.2.0, Kafka to 7.2.0. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 28 ++++++++++++------------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 41 insertions(+), 41 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index ca34f79fa29..bcb1e878e9b 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index e133df7455f..1cdccf02941 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 1328f092f37..8ee66ce8cb7 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 976d82f0bf0..0be8f5f43d4 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 503447b4783..ef81f9c98e9 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 4f308253fc5..91154575927 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/client/pom.xml b/client/pom.xml index a6af240ab8f..62cf8c7a601 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/core/pom.xml b/core/pom.xml index f370c63ba6c..1a3b7404f1f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 63a3fee846e..f0bf63b4fae 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index db71c352b20..73ccac66e02 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 9c58d87c841..7917e5c674e 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 3d98cfa7a2e..19efee54332 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index f94875cfef8..1e49dcc26f0 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 3e6a261a37b..540f457fbc7 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -90,33 +90,33 @@

License Report


jopt-simple-4.9jar4.9The MIT License
<<<<<<< HEAD -kafka-avro-serializer-7.2.0-0jar7.2.0-0 +kafka-avro-serializer-7.2.0jar7.2.0 -kafka-clients-7.2.0-0-ccsjarincluded file +kafka-clients-7.2.0-ccsjarincluded file -kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 +kafka-connect-avro-converter-7.2.0jar7.2.0 -kafka-json-serializer-7.2.0-0jar7.2.0-0 +kafka-json-serializer-7.2.0jar7.2.0 -kafka-schema-registry-7.2.0-0jar7.2.0-0 +kafka-schema-registry-7.2.0jar7.2.0 -kafka-schema-registry-client-7.2.0-0jar7.2.0-0 +kafka-schema-registry-client-7.2.0jar7.2.0 -kafka_2.11-7.2.0-0-ccsjarincluded file +kafka_2.11-7.2.0-ccsjarincluded file ======= -kafka-avro-serializer-7.2.0-0jar7.2.0-0 +kafka-avro-serializer-7.2.0jar7.2.0 -kafka-clients-7.2.0-0-ccsjarincluded file +kafka-clients-7.2.0-ccsjarincluded file -kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 +kafka-connect-avro-converter-7.2.0jar7.2.0 -kafka-json-serializer-7.2.0-0jar7.2.0-0 +kafka-json-serializer-7.2.0jar7.2.0 -kafka-schema-registry-7.2.0-0jar7.2.0-0 +kafka-schema-registry-7.2.0jar7.2.0 -kafka-schema-registry-client-7.2.0-0jar7.2.0-0 +kafka-schema-registry-client-7.2.0jar7.2.0 -kafka_2.11-7.2.0-0-ccsjarincluded file +kafka_2.11-7.2.0-ccsjarincluded file >>>>>>> 5.1.x log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index b39578350b2..c613c89815d 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 7ef0d34c241..25effbbe219 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 65aeab09e7e..a26e52adbc5 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 12c878e6a4a..4b2a9b4c964 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.0-0, 7.2.1-0) + 7.2.0 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.0-0 + 7.2.0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index b9823d8a3e9..4802f88fdfb 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 7ad11fda5fe..e02355004d5 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 3671151dfa0..c6c6708fb7b 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 83fed7559d7..b15a99e5109 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index ec22e40251d..3b94dbb8686 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 96a8d28e78b..6531fb005ed 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index abad999a218..cd55f4b6c42 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 8f3fdc31984..298e7028270 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.0 From 1e376c5e7ea44f7cd12a8d3819e05a471efc15af Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Thu, 7 Jul 2022 20:56:31 +0000 Subject: [PATCH 48/73] Bump Confluent to 7.2.1-0, Kafka to 7.2.1-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 28 ++++++++++++------------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 41 insertions(+), 41 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index ca34f79fa29..bb067b7dcae 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index e133df7455f..5d5be4fadde 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 1328f092f37..668de183ba3 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 976d82f0bf0..ff87eb050b8 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 503447b4783..464531a2566 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 4f308253fc5..009a902df62 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/client/pom.xml b/client/pom.xml index a6af240ab8f..4956c820549 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/core/pom.xml b/core/pom.xml index f370c63ba6c..7fd07de53c6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 63a3fee846e..a9ec22f7b7f 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index db71c352b20..be24a2e2c4c 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 9c58d87c841..f5b816fa03c 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 3d98cfa7a2e..1dd30f97778 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index f94875cfef8..a3f27ea32c3 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 3e6a261a37b..6918f25da6c 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -90,33 +90,33 @@

License Report


jopt-simple-4.9jar4.9The MIT License
<<<<<<< HEAD -kafka-avro-serializer-7.2.0-0jar7.2.0-0 +kafka-avro-serializer-7.2.1-0jar7.2.1-0 -kafka-clients-7.2.0-0-ccsjarincluded file +kafka-clients-7.2.1-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 +kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 -kafka-json-serializer-7.2.0-0jar7.2.0-0 +kafka-json-serializer-7.2.1-0jar7.2.1-0 -kafka-schema-registry-7.2.0-0jar7.2.0-0 +kafka-schema-registry-7.2.1-0jar7.2.1-0 -kafka-schema-registry-client-7.2.0-0jar7.2.0-0 +kafka-schema-registry-client-7.2.1-0jar7.2.1-0 -kafka_2.11-7.2.0-0-ccsjarincluded file +kafka_2.11-7.2.1-0-ccsjarincluded file ======= -kafka-avro-serializer-7.2.0-0jar7.2.0-0 +kafka-avro-serializer-7.2.1-0jar7.2.1-0 -kafka-clients-7.2.0-0-ccsjarincluded file +kafka-clients-7.2.1-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.0-0jar7.2.0-0 +kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 -kafka-json-serializer-7.2.0-0jar7.2.0-0 +kafka-json-serializer-7.2.1-0jar7.2.1-0 -kafka-schema-registry-7.2.0-0jar7.2.0-0 +kafka-schema-registry-7.2.1-0jar7.2.1-0 -kafka-schema-registry-client-7.2.0-0jar7.2.0-0 +kafka-schema-registry-client-7.2.1-0jar7.2.1-0 -kafka_2.11-7.2.0-0-ccsjarincluded file +kafka_2.11-7.2.1-0-ccsjarincluded file >>>>>>> 5.1.x log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index b39578350b2..9ca2716774f 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 7ef0d34c241..5b94b5642e9 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 65aeab09e7e..fd7651a1c3b 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 12c878e6a4a..2e211c15ceb 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.0-0, 7.2.1-0) + [7.2.1-0, 7.2.2-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.0-0 + 7.2.1-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index b9823d8a3e9..62c83ded826 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 7ad11fda5fe..5b08362ea9d 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 3671151dfa0..1295ac38734 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 83fed7559d7..f77072d10bb 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index ec22e40251d..36517c33ca4 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 96a8d28e78b..694d42af4b0 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index abad999a218..53b98c92352 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 8f3fdc31984..ced6b7d37cf 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.0-0 + 7.2.1-0 From 92766eea4594cbf25d4b9026f2ce772b58ce0e1b Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Thu, 14 Jul 2022 01:03:38 +0000 Subject: [PATCH 49/73] Set Confluent to 7.2.1, Kafka to 7.2.1. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 28 ++++++++++++------------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 41 insertions(+), 41 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index bb067b7dcae..6efadddfec9 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 5d5be4fadde..f0d0cef177b 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 668de183ba3..92a28a6a305 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ff87eb050b8..b4569d6186a 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 464531a2566..138350a6fb8 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 009a902df62..7d1577605e4 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/client/pom.xml b/client/pom.xml index 4956c820549..24d8ae9fae3 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/core/pom.xml b/core/pom.xml index 7fd07de53c6..e8b80792b3a 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index a9ec22f7b7f..e0a34d8c662 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index be24a2e2c4c..480b464b5e5 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index f5b816fa03c..80712b43e46 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 1dd30f97778..54ee0310165 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a3f27ea32c3..3b6ec4fc155 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 6918f25da6c..e38bb3ab0cc 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -90,33 +90,33 @@

License Report


jopt-simple-4.9jar4.9The MIT License
<<<<<<< HEAD -kafka-avro-serializer-7.2.1-0jar7.2.1-0 +kafka-avro-serializer-7.2.1jar7.2.1 -kafka-clients-7.2.1-0-ccsjarincluded file +kafka-clients-7.2.1-ccsjarincluded file -kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 +kafka-connect-avro-converter-7.2.1jar7.2.1 -kafka-json-serializer-7.2.1-0jar7.2.1-0 +kafka-json-serializer-7.2.1jar7.2.1 -kafka-schema-registry-7.2.1-0jar7.2.1-0 +kafka-schema-registry-7.2.1jar7.2.1 -kafka-schema-registry-client-7.2.1-0jar7.2.1-0 +kafka-schema-registry-client-7.2.1jar7.2.1 -kafka_2.11-7.2.1-0-ccsjarincluded file +kafka_2.11-7.2.1-ccsjarincluded file ======= -kafka-avro-serializer-7.2.1-0jar7.2.1-0 +kafka-avro-serializer-7.2.1jar7.2.1 -kafka-clients-7.2.1-0-ccsjarincluded file +kafka-clients-7.2.1-ccsjarincluded file -kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 +kafka-connect-avro-converter-7.2.1jar7.2.1 -kafka-json-serializer-7.2.1-0jar7.2.1-0 +kafka-json-serializer-7.2.1jar7.2.1 -kafka-schema-registry-7.2.1-0jar7.2.1-0 +kafka-schema-registry-7.2.1jar7.2.1 -kafka-schema-registry-client-7.2.1-0jar7.2.1-0 +kafka-schema-registry-client-7.2.1jar7.2.1 -kafka_2.11-7.2.1-0-ccsjarincluded file +kafka_2.11-7.2.1-ccsjarincluded file >>>>>>> 5.1.x log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 9ca2716774f..f52d5065648 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 5b94b5642e9..7906400d79a 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index fd7651a1c3b..672dea75f1c 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 2e211c15ceb..0fa3c8394ce 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.1-0, 7.2.2-0) + 7.2.1 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.1-0 + 7.2.1 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 62c83ded826..94fea6dd1e2 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 5b08362ea9d..860b5c1d794 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 1295ac38734..ab3fc53b441 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index f77072d10bb..8b6c61e1b1b 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 36517c33ca4..330236100e2 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 694d42af4b0..276cc63d049 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 53b98c92352..6847a787c02 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index ced6b7d37cf..422813502aa 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.1 From f06f488073a7530b8d7a3be5e085d7fe567963b9 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Fri, 15 Jul 2022 15:11:50 -0700 Subject: [PATCH 50/73] Fix merge issue --- licenses-and-notices.html | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 6918f25da6c..f31091d25d2 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,7 +89,6 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-<<<<<<< HEAD kafka-avro-serializer-7.2.1-0jar7.2.1-0 kafka-clients-7.2.1-0-ccsjarincluded file @@ -103,21 +102,6 @@

License Report


kafka-schema-registry-client-7.2.1-0jar7.2.1-0 kafka_2.11-7.2.1-0-ccsjarincluded file -======= -kafka-avro-serializer-7.2.1-0jar7.2.1-0 - -kafka-clients-7.2.1-0-ccsjarincluded file - -kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 - -kafka-json-serializer-7.2.1-0jar7.2.1-0 - -kafka-schema-registry-7.2.1-0jar7.2.1-0 - -kafka-schema-registry-client-7.2.1-0jar7.2.1-0 - -kafka_2.11-7.2.1-0-ccsjarincluded file ->>>>>>> 5.1.x log4j-1.2.17jar1.2.17Apache 2.0
From fd552138c5bccf035c46fe8cc6ca6ac9d5267526 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Sat, 23 Jul 2022 19:50:13 -0700 Subject: [PATCH 51/73] Add leader change listeners (#2345) * Add leader change listeners * Minor refactoring * Fix checkstyle --- .../storage/KafkaSchemaRegistry.java | 22 +++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 84b61a27b1f..58f02a018ec 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -67,6 +67,8 @@ import java.util.LinkedHashSet; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; import java.util.stream.Stream; import org.apache.avro.reflect.Nullable; import org.apache.kafka.clients.admin.AdminClient; @@ -132,6 +134,7 @@ public class KafkaSchemaRegistry implements SchemaRegistry, LeaderAwareSchemaReg private final Map providers; private final String kafkaClusterId; private final String groupId; + private final List> leaderChangeListeners = new CopyOnWriteArrayList<>(); public KafkaSchemaRegistry(SchemaRegistryConfig config, Serializer serializer) @@ -336,6 +339,15 @@ public boolean initialized() { return kafkaStore.initialized(); } + /** + * Add a leader change listener. + * + * @param listener a function that takes whether this node is a leader + */ + public void addLeaderChangeListener(Consumer listener) { + leaderChangeListeners.add(listener); + } + public boolean isLeader() { kafkaStore.leaderLock().lock(); try { @@ -367,9 +379,10 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) "Tried to set an ineligible node to leader: " + newLeader); } + boolean isLeader; kafkaStore.leaderLock().lock(); try { - SchemaRegistryIdentity previousLeader = leaderIdentity; + final SchemaRegistryIdentity previousLeader = leaderIdentity; leaderIdentity = newLeader; if (leaderIdentity == null) { @@ -382,7 +395,8 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } } - if (leaderIdentity != null && !leaderIdentity.equals(previousLeader) && isLeader()) { + isLeader = isLeader(); + if (leaderIdentity != null && !leaderIdentity.equals(previousLeader) && isLeader) { // The new leader may not know the exact last offset in the Kafka log. So, mark the // last offset invalid here kafkaStore.markLastWrittenOffsetInvalid(); @@ -399,6 +413,10 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } finally { kafkaStore.leaderLock().unlock(); } + + for (Consumer listener : leaderChangeListeners) { + listener.accept(isLeader); + } } /** From ee3ad2b8fb55ccf14d1e1b9d0725f6d160193ae5 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Sun, 24 Jul 2022 23:54:34 -0700 Subject: [PATCH 52/73] Optimize sync call (#2346) --- .../storage/KafkaSchemaRegistry.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 58f02a018ec..dea2db8619b 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -380,6 +380,7 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } boolean isLeader; + boolean leaderChanged; kafkaStore.leaderLock().lock(); try { final SchemaRegistryIdentity previousLeader = leaderIdentity; @@ -396,7 +397,8 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } isLeader = isLeader(); - if (leaderIdentity != null && !leaderIdentity.equals(previousLeader) && isLeader) { + leaderChanged = leaderIdentity != null && !leaderIdentity.equals(previousLeader); + if (leaderChanged && isLeader) { // The new leader may not know the exact last offset in the Kafka log. So, mark the // last offset invalid here kafkaStore.markLastWrittenOffsetInvalid(); @@ -414,8 +416,14 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) kafkaStore.leaderLock().unlock(); } - for (Consumer listener : leaderChangeListeners) { - listener.accept(isLeader); + if (leaderChanged) { + for (Consumer listener : leaderChangeListeners) { + try { + listener.accept(isLeader); + } catch (Exception e) { + log.error("Could not invoke leader change listener", e); + } + } } } From 7e22f459b65616286b5c51fdf69c7689f059ab23 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Fri, 29 Jul 2022 18:56:15 +0000 Subject: [PATCH 53/73] Bump Confluent to 7.2.2-0, Kafka to 7.2.2-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index bb067b7dcae..74d12e3b947 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 5d5be4fadde..dbea84dabf4 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 668de183ba3..8fb42294671 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ff87eb050b8..042b94b5774 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 464531a2566..4fd29cfbc85 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 009a902df62..c4c479bf0fc 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/client/pom.xml b/client/pom.xml index 4956c820549..5de2471bb97 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/core/pom.xml b/core/pom.xml index c1a5603bd93..925ba4d8415 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index a9ec22f7b7f..f2451b19100 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index be24a2e2c4c..00b937322f6 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index f5b816fa03c..65221bfac0e 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 1dd30f97778..0d79ce98a28 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a3f27ea32c3..469c76c674d 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index f31091d25d2..c86165ce649 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.1-0jar7.2.1-0 +kafka-avro-serializer-7.2.2-0jar7.2.2-0 -kafka-clients-7.2.1-0-ccsjarincluded file +kafka-clients-7.2.2-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.1-0jar7.2.1-0 +kafka-connect-avro-converter-7.2.2-0jar7.2.2-0 -kafka-json-serializer-7.2.1-0jar7.2.1-0 +kafka-json-serializer-7.2.2-0jar7.2.2-0 -kafka-schema-registry-7.2.1-0jar7.2.1-0 +kafka-schema-registry-7.2.2-0jar7.2.2-0 -kafka-schema-registry-client-7.2.1-0jar7.2.1-0 +kafka-schema-registry-client-7.2.2-0jar7.2.2-0 -kafka_2.11-7.2.1-0-ccsjarincluded file +kafka_2.11-7.2.2-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 9ca2716774f..dd4d664f02b 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 5b94b5642e9..ef34a0920f4 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index fd7651a1c3b..4fa13b1e3c8 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 2e211c15ceb..a2a018bdc50 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.1-0, 7.2.2-0) + [7.2.2-0, 7.2.3-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.1-0 + 7.2.2-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 62c83ded826..1cdc091bf0e 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 5b08362ea9d..b35f816a56a 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 1295ac38734..e95e2a0ad81 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index f77072d10bb..e0c78c92710 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 36517c33ca4..b9109b77eb6 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 694d42af4b0..990e02dad43 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 53b98c92352..179f671c7f5 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index ced6b7d37cf..474bac3a1bb 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.1-0 + 7.2.2-0 From 981f857aa53423825926ec99f9f2bf79210267e6 Mon Sep 17 00:00:00 2001 From: Xiaoya Li Date: Thu, 15 Sep 2022 18:37:22 -0700 Subject: [PATCH 54/73] Adding timer in onJoinPrepare (#2381) Co-authored-by: jshahc --- .../leaderelector/kafka/SchemaRegistryCoordinator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java index a195c89737e..5216dbbdd38 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java @@ -212,7 +212,7 @@ protected Map onLeaderElected( } @Override - protected boolean onJoinPrepare(int generation, String memberId) { + protected boolean onJoinPrepare(Timer timer, int generation, String memberId) { log.debug("Revoking previous assignment {}", assignmentSnapshot); if (assignmentSnapshot != null) { listener.onRevoked(); From e7f0a3e00f468c9450422d8ab384ec8c0ef434d7 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Tue, 20 Sep 2022 01:29:19 +0000 Subject: [PATCH 55/73] Set Confluent to 7.2.2, Kafka to 7.2.2. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 74d12e3b947..6b8df315112 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index dbea84dabf4..dda961206f8 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 8fb42294671..768cfc0bd70 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 042b94b5774..1a37b4f8bab 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 4fd29cfbc85..9d292f2b747 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index c4c479bf0fc..183af49483c 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/client/pom.xml b/client/pom.xml index 5de2471bb97..7737a4fd2dc 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/core/pom.xml b/core/pom.xml index 925ba4d8415..cdc05ee5c61 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index f2451b19100..6ce49538b44 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 00b937322f6..17227d3c22f 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 65221bfac0e..c91a09e68c6 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 0d79ce98a28..9fbf8c9697f 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 469c76c674d..6489d2dbec1 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index c86165ce649..22e75d7f300 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.2-0jar7.2.2-0 +kafka-avro-serializer-7.2.2jar7.2.2 -kafka-clients-7.2.2-0-ccsjarincluded file +kafka-clients-7.2.2-ccsjarincluded file -kafka-connect-avro-converter-7.2.2-0jar7.2.2-0 +kafka-connect-avro-converter-7.2.2jar7.2.2 -kafka-json-serializer-7.2.2-0jar7.2.2-0 +kafka-json-serializer-7.2.2jar7.2.2 -kafka-schema-registry-7.2.2-0jar7.2.2-0 +kafka-schema-registry-7.2.2jar7.2.2 -kafka-schema-registry-client-7.2.2-0jar7.2.2-0 +kafka-schema-registry-client-7.2.2jar7.2.2 -kafka_2.11-7.2.2-0-ccsjarincluded file +kafka_2.11-7.2.2-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index dd4d664f02b..85652ea772d 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index ef34a0920f4..8267a0bf4b3 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 4fa13b1e3c8..1232b48265c 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index a2a018bdc50..74227e295ae 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.2-0, 7.2.3-0) + 7.2.2 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.2-0 + 7.2.2 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 1cdc091bf0e..a6c37f0b38a 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index b35f816a56a..c7fc8d8ecc4 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index e95e2a0ad81..ae77188908e 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index e0c78c92710..80a5f5191c4 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index b9109b77eb6..74c0e93e737 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 990e02dad43..1f297de6358 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 179f671c7f5..682edc15036 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 474bac3a1bb..02b4cafb806 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.2 From bd9ff9108e4e6ac1e1dfe7ed839ff67d39f6a53d Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 28 Sep 2022 17:02:42 +0000 Subject: [PATCH 56/73] Bump Confluent to 7.2.3-0, Kafka to 7.2.3-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 74d12e3b947..bebc7048d3b 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index dbea84dabf4..8e41bbe3d37 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 8fb42294671..d81841ed78e 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 042b94b5774..924325e0fcf 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 4fd29cfbc85..dbe64621266 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index c4c479bf0fc..3465bd07e4e 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/client/pom.xml b/client/pom.xml index 5de2471bb97..bc12a5982e9 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/core/pom.xml b/core/pom.xml index 925ba4d8415..e424ee9f385 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index f2451b19100..db503812df9 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 00b937322f6..aa6666ca16d 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 65221bfac0e..8831772b79c 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 0d79ce98a28..76af3148c05 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 469c76c674d..2018be804e1 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index c86165ce649..aeed447d2ae 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.2-0jar7.2.2-0 +kafka-avro-serializer-7.2.3-0jar7.2.3-0 -kafka-clients-7.2.2-0-ccsjarincluded file +kafka-clients-7.2.3-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.2-0jar7.2.2-0 +kafka-connect-avro-converter-7.2.3-0jar7.2.3-0 -kafka-json-serializer-7.2.2-0jar7.2.2-0 +kafka-json-serializer-7.2.3-0jar7.2.3-0 -kafka-schema-registry-7.2.2-0jar7.2.2-0 +kafka-schema-registry-7.2.3-0jar7.2.3-0 -kafka-schema-registry-client-7.2.2-0jar7.2.2-0 +kafka-schema-registry-client-7.2.3-0jar7.2.3-0 -kafka_2.11-7.2.2-0-ccsjarincluded file +kafka_2.11-7.2.3-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index dd4d664f02b..e9953b62475 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index ef34a0920f4..a5eaea33b46 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 4fa13b1e3c8..d16668818cf 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index a2a018bdc50..91826162860 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.2-0, 7.2.3-0) + [7.2.3-0, 7.2.4-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.2-0 + 7.2.3-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 1cdc091bf0e..0abc7e4838b 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index b35f816a56a..da696a4f3f9 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index e95e2a0ad81..7dd4037a7c7 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index e0c78c92710..50fc9d0dcb3 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index b9109b77eb6..f55e9863ab4 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 990e02dad43..7eac810f076 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 179f671c7f5..f08cdcb19d1 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 474bac3a1bb..8158ab9fa80 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.2-0 + 7.2.3-0 From 4a84866c0736d7be783ac0fc1e34f7d8d50db8a9 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Mon, 12 Dec 2022 17:44:45 +0000 Subject: [PATCH 57/73] Set Confluent to 7.2.3, Kafka to 7.2.3. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index bebc7048d3b..bca4754f2a4 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 8e41bbe3d37..767fb0da3ed 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index d81841ed78e..4074750c834 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 924325e0fcf..c1c9f258e16 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index dbe64621266..8ea64dab7a9 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 3465bd07e4e..fbf34adb4cb 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/client/pom.xml b/client/pom.xml index bc12a5982e9..9608460d678 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/core/pom.xml b/core/pom.xml index e424ee9f385..41a80bae0ea 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index db503812df9..94d6b12e2e1 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index aa6666ca16d..cf5ed97bc8d 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 8831772b79c..ec91cc5d946 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 76af3148c05..53f78302d88 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 2018be804e1..3218d93a042 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index aeed447d2ae..29f830fefc5 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.3-0jar7.2.3-0 +kafka-avro-serializer-7.2.3jar7.2.3 -kafka-clients-7.2.3-0-ccsjarincluded file +kafka-clients-7.2.3-ccsjarincluded file -kafka-connect-avro-converter-7.2.3-0jar7.2.3-0 +kafka-connect-avro-converter-7.2.3jar7.2.3 -kafka-json-serializer-7.2.3-0jar7.2.3-0 +kafka-json-serializer-7.2.3jar7.2.3 -kafka-schema-registry-7.2.3-0jar7.2.3-0 +kafka-schema-registry-7.2.3jar7.2.3 -kafka-schema-registry-client-7.2.3-0jar7.2.3-0 +kafka-schema-registry-client-7.2.3jar7.2.3 -kafka_2.11-7.2.3-0-ccsjarincluded file +kafka_2.11-7.2.3-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e9953b62475..aef295a3265 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index a5eaea33b46..9fe86d4b5af 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index d16668818cf..ed10264861e 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 0362cad123a..541efa5ae73 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.3-0, 7.2.4-0) + 7.2.3 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.3-0 + 7.2.3 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 0abc7e4838b..8a31b7d51d7 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index da696a4f3f9..00308192def 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 7dd4037a7c7..3c7022dab37 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 50fc9d0dcb3..bd467c9af0e 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index f55e9863ab4..fc931644f38 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 7eac810f076..a9104eac525 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index f08cdcb19d1..fced1f84899 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 8158ab9fa80..b0251812191 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.3 From 244536b1e0dc5b25e55ce7fe852035aa40e97301 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 28 Dec 2022 06:54:12 +0000 Subject: [PATCH 58/73] Bump Confluent to 7.2.4-0, Kafka to 7.2.4-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index bebc7048d3b..568d2035986 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 8e41bbe3d37..61e5d67ed8e 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index d81841ed78e..d1eef6a57fb 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 924325e0fcf..ff8c0fd68c6 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index dbe64621266..9dde03c6624 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 3465bd07e4e..d20dd94d36a 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/client/pom.xml b/client/pom.xml index bc12a5982e9..873fe3678a4 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/core/pom.xml b/core/pom.xml index e424ee9f385..a2093b86a73 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index db503812df9..01ac50fff42 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index aa6666ca16d..b4811c4f669 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 8831772b79c..c4699209f4e 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 76af3148c05..dbdbc7ef03f 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 2018be804e1..a1799a86084 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index aeed447d2ae..ab5f79f6f70 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.3-0jar7.2.3-0 +kafka-avro-serializer-7.2.4-0jar7.2.4-0 -kafka-clients-7.2.3-0-ccsjarincluded file +kafka-clients-7.2.4-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.3-0jar7.2.3-0 +kafka-connect-avro-converter-7.2.4-0jar7.2.4-0 -kafka-json-serializer-7.2.3-0jar7.2.3-0 +kafka-json-serializer-7.2.4-0jar7.2.4-0 -kafka-schema-registry-7.2.3-0jar7.2.3-0 +kafka-schema-registry-7.2.4-0jar7.2.4-0 -kafka-schema-registry-client-7.2.3-0jar7.2.3-0 +kafka-schema-registry-client-7.2.4-0jar7.2.4-0 -kafka_2.11-7.2.3-0-ccsjarincluded file +kafka_2.11-7.2.4-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e9953b62475..85efd59fe40 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index a5eaea33b46..cf70835a657 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index d16668818cf..848f00f57e1 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 0362cad123a..ecb32690a03 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.3-0, 7.2.4-0) + [7.2.4-0, 7.2.5-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.3-0 + 7.2.4-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 0abc7e4838b..674526afb36 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index da696a4f3f9..5c3d64d9421 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 7dd4037a7c7..48c27618d59 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 50fc9d0dcb3..87cae98c6fe 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index f55e9863ab4..177a2878e48 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 7eac810f076..335c9fa3da4 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index f08cdcb19d1..6f1af97df39 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 8158ab9fa80..e15796c3914 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3-0 + 7.2.4-0 From a383f833aea71c0f91f0e66c2a68949392fa9e93 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Fri, 17 Feb 2023 00:35:39 +0000 Subject: [PATCH 59/73] Set Confluent to 7.2.4, Kafka to 7.2.4. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index bca4754f2a4..62d728d1e3f 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 767fb0da3ed..43987d5a998 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 4074750c834..f226a680d47 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index c1c9f258e16..3be1387fe2e 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 8ea64dab7a9..d597358f692 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index fbf34adb4cb..017490fbd51 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/client/pom.xml b/client/pom.xml index 9608460d678..f5d76cdb68e 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/core/pom.xml b/core/pom.xml index 41a80bae0ea..45177fcb83c 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 94d6b12e2e1..db49f6a1e25 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index cf5ed97bc8d..acc3b37ba46 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index ec91cc5d946..49ed0739542 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 53f78302d88..ab9a6e21a1d 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 3218d93a042..cccb3c201c7 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 29f830fefc5..08d434a8312 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.3jar7.2.3 +kafka-avro-serializer-7.2.4jar7.2.4 -kafka-clients-7.2.3-ccsjarincluded file +kafka-clients-7.2.4-ccsjarincluded file -kafka-connect-avro-converter-7.2.3jar7.2.3 +kafka-connect-avro-converter-7.2.4jar7.2.4 -kafka-json-serializer-7.2.3jar7.2.3 +kafka-json-serializer-7.2.4jar7.2.4 -kafka-schema-registry-7.2.3jar7.2.3 +kafka-schema-registry-7.2.4jar7.2.4 -kafka-schema-registry-client-7.2.3jar7.2.3 +kafka-schema-registry-client-7.2.4jar7.2.4 -kafka_2.11-7.2.3-ccsjarincluded file +kafka_2.11-7.2.4-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index aef295a3265..fd44d53e360 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 9fe86d4b5af..0c9f14c0e52 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index ed10264861e..904212c641f 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 541efa5ae73..9185cb80ad1 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - 7.2.3 + 7.2.4 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.3 + 7.2.4 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.3 + 7.2.4 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 8a31b7d51d7..feeecdf51b3 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 00308192def..b98bd50031b 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 3c7022dab37..5349d6072ed 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index bd467c9af0e..94f642785c6 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index fc931644f38..848d8eb1342 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index a9104eac525..01c70cd06a2 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index fced1f84899..a82b82a9b4d 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index b0251812191..b0eb447c9b0 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.3 + 7.2.4 From 533a1b1564f79a9b8cd71a7a23c7eef4a0801b84 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Sun, 26 Feb 2023 06:30:30 +0000 Subject: [PATCH 60/73] Bump Confluent to 7.2.5-0, Kafka to 7.2.5-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 568d2035986..ba9bdd63300 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 61e5d67ed8e..89356a88b3a 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index d1eef6a57fb..01b9a6a37ef 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ff8c0fd68c6..82bbd35f50c 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 9dde03c6624..f1a1d5f1c32 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index d20dd94d36a..96d8f97ba25 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/client/pom.xml b/client/pom.xml index 99ff5fa0d3b..f7871a655ef 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/core/pom.xml b/core/pom.xml index a2093b86a73..19451cb07cf 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 01ac50fff42..2bea0f25050 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index b4811c4f669..57f1cc17345 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index c4699209f4e..e1e1b69a4f8 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index dbdbc7ef03f..e7eef0d13ea 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a1799a86084..57c1e767781 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index ab5f79f6f70..306b46021bb 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.4-0jar7.2.4-0 +kafka-avro-serializer-7.2.5-0jar7.2.5-0 -kafka-clients-7.2.4-0-ccsjarincluded file +kafka-clients-7.2.5-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.4-0jar7.2.4-0 +kafka-connect-avro-converter-7.2.5-0jar7.2.5-0 -kafka-json-serializer-7.2.4-0jar7.2.4-0 +kafka-json-serializer-7.2.5-0jar7.2.5-0 -kafka-schema-registry-7.2.4-0jar7.2.4-0 +kafka-schema-registry-7.2.5-0jar7.2.5-0 -kafka-schema-registry-client-7.2.4-0jar7.2.4-0 +kafka-schema-registry-client-7.2.5-0jar7.2.5-0 -kafka_2.11-7.2.4-0-ccsjarincluded file +kafka_2.11-7.2.5-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 85efd59fe40..e5936c81371 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index cf70835a657..874db339d87 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 848f00f57e1..e7c2a56c9da 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index ecb32690a03..6b1b927ff48 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.4-0, 7.2.5-0) + [7.2.5-0, 7.2.6-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.4-0 + 7.2.5-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 674526afb36..9a70dbdc454 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 5c3d64d9421..f2b0b1890ae 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 48c27618d59..8f052d9081d 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 87cae98c6fe..430d494af65 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 177a2878e48..3290e81dacc 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 335c9fa3da4..2d97b7ded15 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 6f1af97df39..5ba26905b8f 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index e15796c3914..566a2b71881 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.4-0 + 7.2.5-0 From 83ffd05c62b20c874d2bd8e81676dbc3c663acd2 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Thu, 23 Mar 2023 19:20:38 +0000 Subject: [PATCH 61/73] Set Confluent to 7.2.5, Kafka to 7.2.5. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index ba9bdd63300..4545ec3b20c 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 89356a88b3a..35cac87406f 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 01b9a6a37ef..ae8481edaef 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 82bbd35f50c..e6670927250 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index f1a1d5f1c32..68fd76b3dae 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 96d8f97ba25..b59fd0e378b 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/client/pom.xml b/client/pom.xml index f7871a655ef..3c94b93581e 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/core/pom.xml b/core/pom.xml index 19451cb07cf..8e1b1c3cd23 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 2bea0f25050..ffdc5a92015 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 57f1cc17345..d6e22022103 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index e1e1b69a4f8..22a034c945b 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index e7eef0d13ea..028c4ec9103 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 57c1e767781..372293f9831 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 306b46021bb..7e6f8ec340d 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.5-0jar7.2.5-0 +kafka-avro-serializer-7.2.5jar7.2.5 -kafka-clients-7.2.5-0-ccsjarincluded file +kafka-clients-7.2.5-ccsjarincluded file -kafka-connect-avro-converter-7.2.5-0jar7.2.5-0 +kafka-connect-avro-converter-7.2.5jar7.2.5 -kafka-json-serializer-7.2.5-0jar7.2.5-0 +kafka-json-serializer-7.2.5jar7.2.5 -kafka-schema-registry-7.2.5-0jar7.2.5-0 +kafka-schema-registry-7.2.5jar7.2.5 -kafka-schema-registry-client-7.2.5-0jar7.2.5-0 +kafka-schema-registry-client-7.2.5jar7.2.5 -kafka_2.11-7.2.5-0-ccsjarincluded file +kafka_2.11-7.2.5-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e5936c81371..faf887eff8a 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 874db339d87..9fc95cdecb4 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index e7c2a56c9da..dd4c5d580be 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 6b1b927ff48..e0eb37083c4 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.5-0, 7.2.6-0) + 7.2.5 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.5-0 + 7.2.5 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 9a70dbdc454..823e030edc5 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index f2b0b1890ae..c7e5ef380c1 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 8f052d9081d..e0ebd730c7e 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 430d494af65..969664452ef 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 3290e81dacc..3221ed8f506 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 2d97b7ded15..2a580c3e3c7 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 5ba26905b8f..871bd4681fb 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 566a2b71881..58c18a30c0a 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.5 From b709d937ed6a8fbcda43af630c088e0338857d74 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Fri, 7 Apr 2023 07:52:54 +0000 Subject: [PATCH 62/73] Bump Confluent to 7.2.6-0, Kafka to 7.2.6-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index ba9bdd63300..982c8b85c41 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 89356a88b3a..a37ed6b726c 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 01b9a6a37ef..44e0fb20355 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 82bbd35f50c..ea9798a41eb 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index f1a1d5f1c32..ae68ddc7981 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 96d8f97ba25..8c1b59c02d4 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/client/pom.xml b/client/pom.xml index f7871a655ef..9c770b97366 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/core/pom.xml b/core/pom.xml index 19451cb07cf..fdda11786a2 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 2bea0f25050..945118e2cd0 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 57f1cc17345..553ee1c5c51 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index e1e1b69a4f8..83ea3f7ebaa 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index e7eef0d13ea..b677edba938 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 57c1e767781..5363e1a9859 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 306b46021bb..3d529d96d41 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.5-0jar7.2.5-0 +kafka-avro-serializer-7.2.6-0jar7.2.6-0 -kafka-clients-7.2.5-0-ccsjarincluded file +kafka-clients-7.2.6-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.5-0jar7.2.5-0 +kafka-connect-avro-converter-7.2.6-0jar7.2.6-0 -kafka-json-serializer-7.2.5-0jar7.2.5-0 +kafka-json-serializer-7.2.6-0jar7.2.6-0 -kafka-schema-registry-7.2.5-0jar7.2.5-0 +kafka-schema-registry-7.2.6-0jar7.2.6-0 -kafka-schema-registry-client-7.2.5-0jar7.2.5-0 +kafka-schema-registry-client-7.2.6-0jar7.2.6-0 -kafka_2.11-7.2.5-0-ccsjarincluded file +kafka_2.11-7.2.6-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e5936c81371..917527f8338 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 874db339d87..0192e879001 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index e7c2a56c9da..24a06e31685 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 6b1b927ff48..d9e29a864a3 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.5-0, 7.2.6-0) + [7.2.6-0, 7.2.7-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 Confluent, Inc. http://confluent.io @@ -86,7 +86,7 @@ 4.3.0 1.32 2.1.10 - 7.2.5-0 + 7.2.6-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 9a70dbdc454..5c32026a037 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index f2b0b1890ae..df72eaf6cd0 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 8f052d9081d..a760bcd81b4 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 430d494af65..3f72c4f5d0b 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 3290e81dacc..b9767becd33 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 2d97b7ded15..b557df5ca63 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 5ba26905b8f..35bfd593c8c 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 566a2b71881..7e7ede8729a 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.5-0 + 7.2.6-0 From 18d69e07c5e041c1d2192edc63797cdc1256f15b Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Wed, 24 May 2023 16:40:28 -0700 Subject: [PATCH 63/73] Fix merge issue --- .../kafka/schemaregistry/storage/KafkaSchemaRegistry.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 9df0e2f3ea9..12a86ccab95 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -517,8 +517,8 @@ public int register(String subject, boolean isCompatible = true; List compatibilityErrorLogs = new ArrayList<>(); if (mode != Mode.IMPORT) { - isCompatible = isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions).isEmpty(); compatibilityErrorLogs = isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions); + isCompatible = compatibilityErrorLogs.isEmpty(); } try { From f143353c72e30b0c01e7b47bef952351d6c67c03 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Mon, 10 Jul 2023 10:50:36 +0000 Subject: [PATCH 64/73] Set Confluent to 7.2.6, Kafka to 7.2.6. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 982c8b85c41..a5bde8033c0 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index a37ed6b726c..ec120dba4d7 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 44e0fb20355..3fce6fc4982 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ea9798a41eb..0d7b9716e8f 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index ae68ddc7981..41bba031bcc 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 8c1b59c02d4..fc5de8f68ea 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/client/pom.xml b/client/pom.xml index 9c770b97366..10a0dd66c62 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/core/pom.xml b/core/pom.xml index fdda11786a2..afb338165e4 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 945118e2cd0..0a639a7f908 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 553ee1c5c51..4fdcfe5f9fd 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 83ea3f7ebaa..43863729890 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index b677edba938..e19bac87351 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 5363e1a9859..77bcee5907b 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 3d529d96d41..522713c2049 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.6-0jar7.2.6-0 +kafka-avro-serializer-7.2.6jar7.2.6 -kafka-clients-7.2.6-0-ccsjarincluded file +kafka-clients-7.2.6-ccsjarincluded file -kafka-connect-avro-converter-7.2.6-0jar7.2.6-0 +kafka-connect-avro-converter-7.2.6jar7.2.6 -kafka-json-serializer-7.2.6-0jar7.2.6-0 +kafka-json-serializer-7.2.6jar7.2.6 -kafka-schema-registry-7.2.6-0jar7.2.6-0 +kafka-schema-registry-7.2.6jar7.2.6 -kafka-schema-registry-client-7.2.6-0jar7.2.6-0 +kafka-schema-registry-client-7.2.6jar7.2.6 -kafka_2.11-7.2.6-0-ccsjarincluded file +kafka_2.11-7.2.6-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 917527f8338..93dfabca5b4 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 0192e879001..0caa1a2bded 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 24a06e31685..c0e717b9538 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 4b7455d3a4b..9189e83ff41 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.6-0, 7.2.7-0) + 7.2.6 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.6-0 + 7.2.6 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 5c32026a037..b8c5e003d66 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index df72eaf6cd0..d24beefdb22 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index a760bcd81b4..a88eea13d8e 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 3f72c4f5d0b..e7cfd3df821 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index b9767becd33..9addae487c2 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index b557df5ca63..41b0c5fc217 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 35bfd593c8c..28df27bb691 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 7e7ede8729a..e82a490f42b 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.6 From 15cc280ac65e6ded73bed9ba375ea6819d37a504 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Tue, 18 Jul 2023 15:22:43 +0000 Subject: [PATCH 65/73] Bump Confluent to 7.2.7-0, Kafka to 7.2.7-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 982c8b85c41..44ba77ca493 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index a37ed6b726c..c6424541c51 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 44e0fb20355..9293150f640 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ea9798a41eb..62fc7203e1d 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index ae68ddc7981..071718b53a7 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 8c1b59c02d4..0dfe8c879d7 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/client/pom.xml b/client/pom.xml index 9c770b97366..93b61830b4c 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/core/pom.xml b/core/pom.xml index fdda11786a2..ed255049a1b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 945118e2cd0..613e9fa5704 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 553ee1c5c51..289f233d6ee 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 83ea3f7ebaa..4c0def0b9e1 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index b677edba938..c3ec11a11b9 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 5363e1a9859..a2ec0520ee2 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 3d529d96d41..46aa6640a7b 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.6-0jar7.2.6-0 +kafka-avro-serializer-7.2.7-0jar7.2.7-0 -kafka-clients-7.2.6-0-ccsjarincluded file +kafka-clients-7.2.7-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.6-0jar7.2.6-0 +kafka-connect-avro-converter-7.2.7-0jar7.2.7-0 -kafka-json-serializer-7.2.6-0jar7.2.6-0 +kafka-json-serializer-7.2.7-0jar7.2.7-0 -kafka-schema-registry-7.2.6-0jar7.2.6-0 +kafka-schema-registry-7.2.7-0jar7.2.7-0 -kafka-schema-registry-client-7.2.6-0jar7.2.6-0 +kafka-schema-registry-client-7.2.7-0jar7.2.7-0 -kafka_2.11-7.2.6-0-ccsjarincluded file +kafka_2.11-7.2.7-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 917527f8338..42fec35aa66 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 0192e879001..f4cec803e25 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 24a06e31685..cab03e7296f 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 4b7455d3a4b..962affdedfd 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.6-0, 7.2.7-0) + [7.2.7-0, 7.2.8-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.6-0 + 7.2.7-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 5c32026a037..9227f4d0a48 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index df72eaf6cd0..073ee254cf8 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index a760bcd81b4..957dc1bf438 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 3f72c4f5d0b..329168a904d 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index b9767becd33..aa516e5c258 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index b557df5ca63..0caa635c6db 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 35bfd593c8c..11fafdd66fd 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 7e7ede8729a..7576afa53ac 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.6-0 + 7.2.7-0 From af376b6e05adb12bad1a082135340e2055d0436f Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 13 Sep 2023 18:49:55 +0000 Subject: [PATCH 66/73] Set Confluent to 7.2.7, Kafka to 7.2.7. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 44ba77ca493..71023e1a7ad 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index c6424541c51..3762a9171f3 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 9293150f640..dc4598818aa 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 62fc7203e1d..62acab748c0 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 071718b53a7..422aa613bfd 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 0dfe8c879d7..494aabfdf37 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/client/pom.xml b/client/pom.xml index 93b61830b4c..a2bb2041720 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/core/pom.xml b/core/pom.xml index ed255049a1b..9d437bdeb4d 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 613e9fa5704..c0d7a74fcf9 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 289f233d6ee..9416bdefabd 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 4c0def0b9e1..ca4c401e0ad 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index c3ec11a11b9..0dbd7a4ba9e 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a2ec0520ee2..e8d0fa27a93 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 46aa6640a7b..dbb8a74a488 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.7-0jar7.2.7-0 +kafka-avro-serializer-7.2.7jar7.2.7 -kafka-clients-7.2.7-0-ccsjarincluded file +kafka-clients-7.2.7-ccsjarincluded file -kafka-connect-avro-converter-7.2.7-0jar7.2.7-0 +kafka-connect-avro-converter-7.2.7jar7.2.7 -kafka-json-serializer-7.2.7-0jar7.2.7-0 +kafka-json-serializer-7.2.7jar7.2.7 -kafka-schema-registry-7.2.7-0jar7.2.7-0 +kafka-schema-registry-7.2.7jar7.2.7 -kafka-schema-registry-client-7.2.7-0jar7.2.7-0 +kafka-schema-registry-client-7.2.7jar7.2.7 -kafka_2.11-7.2.7-0-ccsjarincluded file +kafka_2.11-7.2.7-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 42fec35aa66..e0d3a85b87c 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index f4cec803e25..7afd892870c 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index cab03e7296f..413b9d8c5bf 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index f6c9b32a0c9..880d2420797 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.7-0, 7.2.8-0) + 7.2.7 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.7-0 + 7.2.7 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 9227f4d0a48..c9b78b0159d 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 073ee254cf8..1b3c9b8268b 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 957dc1bf438..feba8a394f0 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 329168a904d..a4d685094a2 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index aa516e5c258..34332afc8ff 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 0caa635c6db..619b5f8e472 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 11fafdd66fd..7380561aa15 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 7576afa53ac..99e4031886f 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.7 From 2af4b729faab1d17e270067e586cdcdc201c81a9 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 4 Oct 2023 23:22:31 +0000 Subject: [PATCH 67/73] Bump Confluent to 7.2.8-0, Kafka to 7.2.8-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 44ba77ca493..6b6b6203e8e 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index c6424541c51..c813aa99141 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 9293150f640..415be893d87 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 62fc7203e1d..0fba7498227 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 071718b53a7..592792d400b 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index 0dfe8c879d7..b2e9fb457db 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/client/pom.xml b/client/pom.xml index 93b61830b4c..03ed86245a7 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/core/pom.xml b/core/pom.xml index ed255049a1b..09a17f6d89b 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 613e9fa5704..4d8453da3e2 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 289f233d6ee..4b37fa96acb 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 4c0def0b9e1..b5383ae2329 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index c3ec11a11b9..f44962bb2a2 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a2ec0520ee2..a5e452a10cf 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index 46aa6640a7b..c3ebaf3ef56 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.7-0jar7.2.7-0 +kafka-avro-serializer-7.2.8-0jar7.2.8-0 -kafka-clients-7.2.7-0-ccsjarincluded file +kafka-clients-7.2.8-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.7-0jar7.2.7-0 +kafka-connect-avro-converter-7.2.8-0jar7.2.8-0 -kafka-json-serializer-7.2.7-0jar7.2.7-0 +kafka-json-serializer-7.2.8-0jar7.2.8-0 -kafka-schema-registry-7.2.7-0jar7.2.7-0 +kafka-schema-registry-7.2.8-0jar7.2.8-0 -kafka-schema-registry-client-7.2.7-0jar7.2.7-0 +kafka-schema-registry-client-7.2.8-0jar7.2.8-0 -kafka_2.11-7.2.7-0-ccsjarincluded file +kafka_2.11-7.2.8-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 42fec35aa66..381c528b77b 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index f4cec803e25..21667ba26e0 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index cab03e7296f..50e979d5e38 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index f6c9b32a0c9..5ff6cb470b8 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.7-0, 7.2.8-0) + [7.2.8-0, 7.2.9-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.7-0 + 7.2.8-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 9227f4d0a48..8270a4ed6ed 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 073ee254cf8..b3c636bcfdc 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 957dc1bf438..98049e4fe0c 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 329168a904d..3936b42363d 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index aa516e5c258..11a910a84bd 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 0caa635c6db..5b0be1be59d 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 11fafdd66fd..1ae5b736e2f 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index 7576afa53ac..c5b1f279ff1 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.7-0 + 7.2.8-0 From e9a23a296dcad0c4ebf00f229a2d6c5c68d57098 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Fri, 13 Oct 2023 21:45:17 -0700 Subject: [PATCH 68/73] Fix build --- .../schemaregistry/maven/TestLocalCompatibilityMojoTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java index 09e72950425..ab76aac682b 100644 --- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java @@ -136,7 +136,7 @@ private void makeFiles(){ + "\"name\":\"myrecord\"," + "\"fields\":" + "[{\"type\":\"string\",\"name\":\"f1\"}," - + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}," + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; makeFile(schemaString8, "schema8.avsc"); From 3d461888655005b8d6643febe62713f155dfa234 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Fri, 20 Oct 2023 14:27:16 +0000 Subject: [PATCH 69/73] Set Confluent to 7.2.8, Kafka to 7.2.8. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 6b6b6203e8e..31856c23c71 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index c813aa99141..a03aa4095a4 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 415be893d87..5efa8102911 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 0fba7498227..d7bfd5d9665 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 592792d400b..ecea82239b1 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index b2e9fb457db..ed6fcdd2ed8 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/client/pom.xml b/client/pom.xml index 03ed86245a7..f6d6ff7f2b4 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/core/pom.xml b/core/pom.xml index 09a17f6d89b..15b5ceae17e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 4d8453da3e2..8a0dbbc54a4 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 4b37fa96acb..6bb65fc78cf 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index b5383ae2329..d77ee37ddff 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index f44962bb2a2..5b3711c62a0 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a5e452a10cf..1772b314d42 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index c3ebaf3ef56..afa49b4b6bf 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.8-0jar7.2.8-0 +kafka-avro-serializer-7.2.8jar7.2.8 -kafka-clients-7.2.8-0-ccsjarincluded file +kafka-clients-7.2.8-ccsjarincluded file -kafka-connect-avro-converter-7.2.8-0jar7.2.8-0 +kafka-connect-avro-converter-7.2.8jar7.2.8 -kafka-json-serializer-7.2.8-0jar7.2.8-0 +kafka-json-serializer-7.2.8jar7.2.8 -kafka-schema-registry-7.2.8-0jar7.2.8-0 +kafka-schema-registry-7.2.8jar7.2.8 -kafka-schema-registry-client-7.2.8-0jar7.2.8-0 +kafka-schema-registry-client-7.2.8jar7.2.8 -kafka_2.11-7.2.8-0-ccsjarincluded file +kafka_2.11-7.2.8-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 381c528b77b..2f56356d1db 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 21667ba26e0..ed1ced3c77e 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 50e979d5e38..f344c525585 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 64f63b8943e..a1814cd7d3e 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.8-0, 7.2.9-0) + 7.2.8 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.8-0 + 7.2.8 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 8270a4ed6ed..5c4ba0d0d5c 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index b3c636bcfdc..ad2f9cdf8b5 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 98049e4fe0c..6d9596cbfab 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 3936b42363d..dffe1364553 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 11a910a84bd..4c39d67101c 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 5b0be1be59d..e8f7e74b2f2 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 1ae5b736e2f..8daa4dac908 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index c5b1f279ff1..7562bcef213 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.8 From f1dbde0cf4bb7f7d5ee24ed268ff60944902fb60 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Sun, 12 Nov 2023 11:39:35 +0000 Subject: [PATCH 70/73] Bump Confluent to 7.2.9-0, Kafka to 7.2.9-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 6b6b6203e8e..2981af1cc51 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index c813aa99141..f3eee38e5d4 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 415be893d87..c5de343cf82 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index 0fba7498227..b079d1f3f8c 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 592792d400b..d1822cead63 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index b2e9fb457db..c0d255138fb 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/client/pom.xml b/client/pom.xml index 03ed86245a7..1498fdeda91 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/core/pom.xml b/core/pom.xml index 09a17f6d89b..a9e624d74b6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 4d8453da3e2..1afcfaefafd 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 4b37fa96acb..9b4464a1fa9 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index b5383ae2329..19d1306cfc4 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index f44962bb2a2..8b0f2cb5fa3 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index a5e452a10cf..59bf1755aa3 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index c3ebaf3ef56..a19741657bf 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.8-0jar7.2.8-0 +kafka-avro-serializer-7.2.9-0jar7.2.9-0 -kafka-clients-7.2.8-0-ccsjarincluded file +kafka-clients-7.2.9-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.8-0jar7.2.8-0 +kafka-connect-avro-converter-7.2.9-0jar7.2.9-0 -kafka-json-serializer-7.2.8-0jar7.2.8-0 +kafka-json-serializer-7.2.9-0jar7.2.9-0 -kafka-schema-registry-7.2.8-0jar7.2.8-0 +kafka-schema-registry-7.2.9-0jar7.2.9-0 -kafka-schema-registry-client-7.2.8-0jar7.2.8-0 +kafka-schema-registry-client-7.2.9-0jar7.2.9-0 -kafka_2.11-7.2.8-0-ccsjarincluded file +kafka_2.11-7.2.9-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 381c528b77b..e24b1457fea 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 21667ba26e0..8b89f17a2d1 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index 50e979d5e38..e6804b22e45 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 64f63b8943e..62d7e8d5030 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.8-0, 7.2.9-0) + [7.2.9-0, 7.2.10-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.0.0 4.3.0 2.1.10 - 7.2.8-0 + 7.2.9-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 8270a4ed6ed..3be5c6c316c 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index b3c636bcfdc..6c6b5a8cb5a 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 98049e4fe0c..23dd9a15f1e 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 3936b42363d..444838ed4f6 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 11a910a84bd..72031077f2b 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 5b0be1be59d..691fb5039cd 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 1ae5b736e2f..9473d16aa2d 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index c5b1f279ff1..cfad795a7b9 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.8-0 + 7.2.9-0 From 6a40b34dfc26568c65b6aa0e6043a4ee3f09a6fa Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 20 Dec 2023 14:38:52 +0000 Subject: [PATCH 71/73] Set Confluent to 7.2.9, Kafka to 7.2.9. --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 2981af1cc51..b630d43c25a 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index f3eee38e5d4..efa76d810ad 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index c5de343cf82..0dede4365d3 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index b079d1f3f8c..6e228d5797b 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index d1822cead63..d797ffc7866 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index c0d255138fb..5b708309ce7 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/client/pom.xml b/client/pom.xml index 1498fdeda91..feddf3173ec 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/core/pom.xml b/core/pom.xml index a9e624d74b6..7755b578774 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 1afcfaefafd..5d7004b4e90 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 9b4464a1fa9..7ce83bb8401 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 19d1306cfc4..c5e97cb4838 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 8b0f2cb5fa3..d9b5b86a180 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 59bf1755aa3..6f214105f0a 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index a19741657bf..d75f411c654 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.9-0jar7.2.9-0 +kafka-avro-serializer-7.2.9jar7.2.9 -kafka-clients-7.2.9-0-ccsjarincluded file +kafka-clients-7.2.9-ccsjarincluded file -kafka-connect-avro-converter-7.2.9-0jar7.2.9-0 +kafka-connect-avro-converter-7.2.9jar7.2.9 -kafka-json-serializer-7.2.9-0jar7.2.9-0 +kafka-json-serializer-7.2.9jar7.2.9 -kafka-schema-registry-7.2.9-0jar7.2.9-0 +kafka-schema-registry-7.2.9jar7.2.9 -kafka-schema-registry-client-7.2.9-0jar7.2.9-0 +kafka-schema-registry-client-7.2.9jar7.2.9 -kafka_2.11-7.2.9-0-ccsjarincluded file +kafka_2.11-7.2.9-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e24b1457fea..a112b81f788 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 8b89f17a2d1..0f768f67000 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index e6804b22e45..b3773a1c1e0 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 054d0763364..ed42b0f8b15 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.9-0, 7.2.10-0) + 7.2.9 kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.4.0 4.3.0 2.1.10 - 7.2.9-0 + 7.2.9 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 3be5c6c316c..f97b872917c 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 6c6b5a8cb5a..47e7c658b34 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 23dd9a15f1e..5d2f2abd44d 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 444838ed4f6..546c6a7d608 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 72031077f2b..1a7b105150c 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 691fb5039cd..6872ca3312f 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 9473d16aa2d..a2463bf2805 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index cfad795a7b9..7a753131674 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.9 From 2b07ab672eb07370a472a340cb177836e5913a2d Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Mon, 8 Jan 2024 12:41:58 +0000 Subject: [PATCH 72/73] Bump Confluent to 7.2.10-0, Kafka to 7.2.10-0 --- avro-converter/pom.xml | 2 +- avro-data/pom.xml | 2 +- avro-serde/pom.xml | 2 +- avro-serializer/pom.xml | 2 +- benchmark/pom.xml | 2 +- client-console-scripts/pom.xml | 2 +- client/pom.xml | 2 +- core/pom.xml | 2 +- json-schema-converter/pom.xml | 2 +- json-schema-provider/pom.xml | 2 +- json-schema-serde/pom.xml | 2 +- json-schema-serializer/pom.xml | 2 +- json-serializer/pom.xml | 2 +- licenses-and-notices.html | 14 +++++++------- maven-plugin/pom.xml | 2 +- package-kafka-serde-tools/pom.xml | 2 +- package-schema-registry/pom.xml | 2 +- pom.xml | 6 +++--- protobuf-converter/pom.xml | 2 +- protobuf-provider/pom.xml | 2 +- protobuf-serde/pom.xml | 2 +- protobuf-serializer/pom.xml | 2 +- protobuf-types/pom.xml | 2 +- schema-converter/pom.xml | 2 +- schema-registry-console-scripts/pom.xml | 2 +- schema-serializer/pom.xml | 2 +- 26 files changed, 34 insertions(+), 34 deletions(-) diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 2981af1cc51..3ca6f6c74e2 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index f3eee38e5d4..8d5c4270f6a 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index c5de343cf82..928bfac25bb 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index b079d1f3f8c..8369d4c123c 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/benchmark/pom.xml b/benchmark/pom.xml index d1822cead63..02737bb327b 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index c0d255138fb..4f608ae384f 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/client/pom.xml b/client/pom.xml index 1498fdeda91..1f179ba02e7 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/core/pom.xml b/core/pom.xml index a9e624d74b6..65c1efde6e8 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 kafka-schema-registry diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 1afcfaefafd..c257423a0e8 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index 9b4464a1fa9..c490e8ed96e 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 19d1306cfc4..06ba6d7d011 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 8b0f2cb5fa3..fe96f43f568 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index 59bf1755aa3..a0cb5463eeb 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/licenses-and-notices.html b/licenses-and-notices.html index a19741657bf..e0b0f36a0e9 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.2.9-0jar7.2.9-0 +kafka-avro-serializer-7.2.10-0jar7.2.10-0 -kafka-clients-7.2.9-0-ccsjarincluded file +kafka-clients-7.2.10-0-ccsjarincluded file -kafka-connect-avro-converter-7.2.9-0jar7.2.9-0 +kafka-connect-avro-converter-7.2.10-0jar7.2.10-0 -kafka-json-serializer-7.2.9-0jar7.2.9-0 +kafka-json-serializer-7.2.10-0jar7.2.10-0 -kafka-schema-registry-7.2.9-0jar7.2.9-0 +kafka-schema-registry-7.2.10-0jar7.2.10-0 -kafka-schema-registry-client-7.2.9-0jar7.2.9-0 +kafka-schema-registry-client-7.2.10-0jar7.2.10-0 -kafka_2.11-7.2.9-0-ccsjarincluded file +kafka_2.11-7.2.10-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index e24b1457fea..097edbd61eb 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index 8b89f17a2d1..3b42510945c 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index e6804b22e45..8cca8a5124c 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 kafka-schema-registry-package diff --git a/pom.xml b/pom.xml index 054d0763364..eeb4d4d9c60 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.2.9-0, 7.2.10-0) + [7.2.10-0, 7.2.11-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 Confluent, Inc. http://confluent.io @@ -85,7 +85,7 @@ 3.4.0 4.3.0 2.1.10 - 7.2.9-0 + 7.2.10-0 1.21 diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 3be5c6c316c..9695f62ab5f 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index 6c6b5a8cb5a..e032c9a6622 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index 23dd9a15f1e..293c24724d8 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 444838ed4f6..836d5ea91e6 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 72031077f2b..3b251c949e1 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml index 691fb5039cd..a4d2b6a92ac 100644 --- a/schema-converter/pom.xml +++ b/schema-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 9473d16aa2d..c2949863a59 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index cfad795a7b9..190032c68af 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.2.9-0 + 7.2.10-0 From 38e981450db3703d315bb36d1478ab8b3c312693 Mon Sep 17 00:00:00 2001 From: Robert Yokota Date: Wed, 20 Mar 2024 17:34:52 -0700 Subject: [PATCH 73/73] DGS-10394 Handle nulls in enum in JSON Schema converter --- .../connect/json/JsonSchemaData.java | 7 +- .../connect/json/JsonSchemaDataTest.java | 85 +++++++++++++++++++ 2 files changed, 91 insertions(+), 1 deletion(-) diff --git a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java index 520a998f2f4..d5a667691e8 100644 --- a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java +++ b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java @@ -112,6 +112,8 @@ public class JsonSchemaData { public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = GENERALIZED_TYPE_UNION_PREFIX + "field_"; + public static final String NULL_MARKER = ""; + private static final JsonNodeFactory JSON_NODE_FACTORY = JsonNodeFactory.withExactBigDecimals(true); @@ -696,6 +698,9 @@ private org.everit.json.schema.Schema rawSchemaFromConnectSchema( for (Map.Entry entry : schema.parameters().entrySet()) { if (entry.getKey().startsWith(paramName + ".")) { String enumSymbol = entry.getKey().substring(paramName.length() + 1); + if (enumSymbol.equals(NULL_MARKER)) { + enumSymbol = null; + } enumBuilder.possibleValue(enumSymbol); } } @@ -1000,7 +1005,7 @@ private Schema toConnectSchema( builder.parameter(paramName, ""); // JSON enums have no name, use empty string as placeholder int symbolIndex = 0; for (Object enumObj : enumSchema.getPossibleValuesAsList()) { - String enumSymbol = enumObj.toString(); + String enumSymbol = enumObj != null ? enumObj.toString() : NULL_MARKER; if (generalizedSumTypeSupport) { builder.parameter(paramName + "." + enumSymbol, String.valueOf(symbolIndex)); } else { diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java index 270d9e53b3c..eaff691b63f 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java @@ -211,6 +211,24 @@ public void testFromConnectEnum() { checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); } + @Test + public void testFromConnectEnumWithNull() { + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(JSON_TYPE_ENUM, "") + .parameter(JSON_TYPE_ENUM + ".one", "one") + .parameter(JSON_TYPE_ENUM + ".two", "two") + .parameter(JSON_TYPE_ENUM + ".three", "three") + .parameter(JSON_TYPE_ENUM + ".", "") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + @Test public void testFromConnectEnumWithGeneralizedSumTypeSupport() { jsonSchemaData = @@ -236,6 +254,34 @@ public void testFromConnectEnumWithGeneralizedSumTypeSupport() { checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); } + @Test + public void testFromConnectEnumWithNullGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + Map params = new LinkedHashMap<>(); + params.put("org.apache.kafka.connect.data.Enum", ""); + params.put("org.apache.kafka.connect.data.Enum.one", "0"); + params.put("org.apache.kafka.connect.data.Enum.two", "1"); + params.put("org.apache.kafka.connect.data.Enum.three", "2"); + params.put("org.apache.kafka.connect.data.Enum.", "3"); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .unprocessedProperties(Collections.singletonMap("connect.parameters", params)) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .parameter(GENERALIZED_TYPE_ENUM + ".", "3") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + @Test public void testFromConnectUnion() { NumberSchema firstSchema = NumberSchema.builder() @@ -1190,6 +1236,24 @@ public void testToConnectEnum() { checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); } + @Test + public void testToConnectEnumWithNull() { + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(JSON_TYPE_ENUM, "") + .parameter(JSON_TYPE_ENUM + ".one", "one") + .parameter(JSON_TYPE_ENUM + ".two", "two") + .parameter(JSON_TYPE_ENUM + ".three", "three") + .parameter(JSON_TYPE_ENUM + ".", "") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + @Test public void testToConnectEnumWithGeneralizedSumTypeSupport() { jsonSchemaData = @@ -1209,6 +1273,27 @@ public void testToConnectEnumWithGeneralizedSumTypeSupport() { checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); } + @Test + public void testToConnectEnumWithNullGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .parameter(GENERALIZED_TYPE_ENUM + ".", "3") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + @Test public void testToConnectEnumInAllOf() { StringSchema stringSchema = StringSchema.builder().build();