diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java
index dc55e888589..795f48b7ef4 100644
--- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java
+++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java
@@ -65,6 +65,10 @@ private void configure(KafkaJsonDeserializerConfig config, boolean isKey) {
}
}
+ public ObjectMapper objectMapper() {
+ return objectMapper;
+ }
+
@Override
public T deserialize(String ignored, byte[] bytes) {
if (bytes == null || bytes.length == 0) {
diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java
index 2fe5e4634a7..1ec76f55f27 100644
--- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java
+++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java
@@ -54,6 +54,10 @@ protected void configure(KafkaJsonSerializerConfig config) {
SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, !writeDatesAsIso8601);
}
+ public ObjectMapper objectMapper() {
+ return objectMapper;
+ }
+
@Override
public byte[] serialize(String topic, T data) {
if (data == null) {
diff --git a/licenses-and-notices.html b/licenses-and-notices.html
index ab2a549f6aa..e0b0f36a0e9 100644
--- a/licenses-and-notices.html
+++ b/licenses-and-notices.html
@@ -89,19 +89,19 @@ License Report
jopt-simple-4.9 | jar | 4.9 | The MIT License
|
-kafka-avro-serializer-7.1.12-0 | jar | 7.1.12-0 | |
+kafka-avro-serializer-7.2.10-0 | jar | 7.2.10-0 | |
-kafka-clients-7.1.12-0-ccs | jar | | included file |
+kafka-clients-7.2.10-0-ccs | jar | | included file |
-kafka-connect-avro-converter-7.1.12-0 | jar | 7.1.12-0 | |
+kafka-connect-avro-converter-7.2.10-0 | jar | 7.2.10-0 | |
-kafka-json-serializer-7.1.12-0 | jar | 7.1.12-0 | |
+kafka-json-serializer-7.2.10-0 | jar | 7.2.10-0 | |
-kafka-schema-registry-7.1.12-0 | jar | 7.1.12-0 | |
+kafka-schema-registry-7.2.10-0 | jar | 7.2.10-0 | |
-kafka-schema-registry-client-7.1.12-0 | jar | 7.1.12-0 | |
+kafka-schema-registry-client-7.2.10-0 | jar | 7.2.10-0 | |
-kafka_2.11-7.1.12-0-ccs | jar | | included file |
+kafka_2.11-7.2.10-0-ccs | jar | | included file |
log4j-1.2.17 | jar | 1.2.17 | Apache 2.0
|
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml
index 1f6940a1f83..097edbd61eb 100644
--- a/maven-plugin/pom.xml
+++ b/maven-plugin/pom.xml
@@ -7,7 +7,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
@@ -26,7 +26,7 @@
org.apache.maven
maven-plugin-api
- 3.6.3
+ 3.8.1
org.apache.maven.plugin-tools
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java
index feab9d1e5d6..6aab458dd68 100644
--- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2018 Confluent Inc.
+ * Copyright 2022 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -36,11 +36,9 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
-import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -55,36 +53,55 @@ public class DownloadSchemaRegistryMojo extends SchemaRegistryMojo {
@Parameter(required = true)
List subjectPatterns = new ArrayList<>();
+ @Parameter(required = false)
+ List versions = new ArrayList<>();
+
@Parameter(required = true)
File outputDirectory;
@Parameter(required = false)
boolean encodeSubject = true;
- Map downloadSchemas(Collection subjects)
+ Map downloadSchemas(List subjects, List versionsToDownload)
throws MojoExecutionException {
Map results = new LinkedHashMap<>();
- for (String subject : subjects) {
+ if (versionsToDownload.size() != subjects.size()) {
+ throw new MojoExecutionException("Number of versions specified should "
+ + "be same as number of subjects");
+ }
+ for (int i = 0; i < subjects.size(); i++) {
SchemaMetadata schemaMetadata;
try {
- getLog().info(String.format("Downloading latest metadata for %s.", subject));
- schemaMetadata = this.client().getLatestSchemaMetadata(subject);
+ getLog().info(String.format("Downloading metadata "
+ + "for %s.for version %s", subjects.get(i), versionsToDownload.get(i)));
+ schemaMetadata = this.client().getLatestSchemaMetadata(subjects.get(i));
+ if (!versionsToDownload.get(i).equalsIgnoreCase("latest")) {
+ Integer maxVersion = schemaMetadata.getVersion();
+ if (maxVersion < Integer.parseInt(versionsToDownload.get(i))) {
+ throw new MojoExecutionException(
+ String.format("Max possible version "
+ + "for %s is %d", subjects.get(i), maxVersion));
+ } else {
+ schemaMetadata = this.client().getSchemaMetadata(subjects.get(i),
+ Integer.parseInt(versionsToDownload.get(i)));
+ }
+ }
Optional schema =
this.client().parseSchema(
schemaMetadata.getSchemaType(),
schemaMetadata.getSchema(),
schemaMetadata.getReferences());
if (schema.isPresent()) {
- results.put(subject, schema.get());
+ results.put(subjects.get(i), schema.get());
} else {
throw new MojoExecutionException(
- String.format("Error while parsing schema for %s", subject)
+ String.format("Error while parsing schema for %s", subjects.get(i))
);
}
} catch (Exception ex) {
throw new MojoExecutionException(
- String.format("Exception thrown while downloading metadata for %s.", subject),
+ String.format("Exception thrown while downloading metadata for %s.", subjects.get(i)),
ex
);
}
@@ -99,26 +116,7 @@ public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("Plugin execution has been skipped");
return;
}
-
- try {
- getLog().debug(
- String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory));
- if (outputDirectory.exists() && !outputDirectory.isDirectory()) {
- throw new IllegalStateException("outputDirectory must be a directory");
- }
- getLog()
- .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory));
- if (!outputDirectory.isDirectory()) {
- getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory));
- if (!outputDirectory.mkdirs()) {
- throw new IllegalStateException(
- "Could not create output directory " + this.outputDirectory);
- }
- }
- } catch (Exception ex) {
- throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex);
- }
-
+ outputDirValidation();
List patterns = new ArrayList<>();
for (String subject : subjectPatterns) {
@@ -133,7 +131,6 @@ public void execute() throws MojoExecutionException, MojoFailureException {
);
}
}
-
Collection allSubjects;
try {
getLog().info("Getting all subjects on schema registry...");
@@ -141,33 +138,44 @@ public void execute() throws MojoExecutionException, MojoFailureException {
} catch (Exception ex) {
throw new MojoExecutionException("Exception thrown", ex);
}
-
getLog().info(String.format("Schema Registry has %s subject(s).", allSubjects.size()));
- Set subjectsToDownload = new LinkedHashSet<>();
+ List subjectsToDownload = new ArrayList<>();
+ List versionsToDownload = new ArrayList<>();
+ if (!versions.isEmpty()) {
+ if (versions.size() != subjectPatterns.size()) {
+ throw new IllegalStateException("versions size should be same as subjectPatterns size");
+ }
+ }
for (String subject : allSubjects) {
- for (Pattern pattern : patterns) {
+ for (int i = 0 ; i < patterns.size() ; i++) {
getLog()
- .debug(String.format("Checking '%s' against pattern '%s'", subject, pattern.pattern()));
- Matcher matcher = pattern.matcher(subject);
+ .debug(String.format("Checking '%s' against pattern '%s'",
+ subject, patterns.get(i).pattern()));
+ Matcher matcher = patterns.get(i).matcher(subject);
if (matcher.matches()) {
- getLog().debug(String.format("'%s' matches pattern '%s' so downloading.", subject,
- pattern.pattern()));
+ getLog().debug(String.format("'%s' matches "
+ + "pattern '%s' so downloading.", subject,
+ patterns.get(i).pattern()));
+ if (versions.isEmpty()) {
+ versionsToDownload.add("latest");
+ } else {
+ versionsToDownload.add(versions.get(i));
+ }
subjectsToDownload.add(subject);
break;
}
}
}
-
- Map subjectToSchema = downloadSchemas(subjectsToDownload);
+ Map subjectToSchema =
+ downloadSchemas(subjectsToDownload, versionsToDownload);
for (Map.Entry kvp : subjectToSchema.entrySet()) {
String subject = kvp.getKey();
String encodedSubject = encodeSubject ? encode(subject) : subject;
String fileName = String.format("%s%s", encodedSubject, getExtension(kvp.getValue()));
File outputFile = new File(this.outputDirectory, fileName);
-
getLog().info(
String.format("Writing schema for Subject(%s) to %s.", subject, outputFile)
);
@@ -191,6 +199,27 @@ public void execute() throws MojoExecutionException, MojoFailureException {
}
}
+ public void outputDirValidation() throws MojoExecutionException, MojoFailureException {
+ try {
+ getLog().debug(
+ String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory));
+ if (outputDirectory.exists() && !outputDirectory.isDirectory()) {
+ throw new IllegalStateException("outputDirectory must be a directory");
+ }
+ getLog()
+ .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory));
+ if (!outputDirectory.isDirectory()) {
+ getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory));
+ if (!outputDirectory.mkdirs()) {
+ throw new IllegalStateException(
+ "Could not create output directory " + this.outputDirectory);
+ }
+ }
+ } catch (Exception ex) {
+ throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex);
+ }
+ }
+
private String getExtension(ParsedSchema parsedSchema) {
if (this.schemaExtension != null) {
return schemaExtension;
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java
new file mode 100644
index 00000000000..c075de3efb0
--- /dev/null
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2022 Confluent Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.kafka.schemaregistry.maven;
+
+import io.confluent.kafka.schemaregistry.SchemaProvider;
+import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
+import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
+import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.util.Arrays;
+import java.util.List;
+
+public class MojoUtils {
+
+ public static String readFile(File file, Charset encoding) throws IOException {
+ byte[] encoded = Files.readAllBytes(file.toPath());
+ return new String(encoded, encoding);
+ }
+
+ public static List defaultSchemaProviders() {
+ return Arrays.asList(
+ new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider()
+ );
+ }
+
+}
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java
index 0878268cda3..013ab684c30 100644
--- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java
@@ -23,19 +23,15 @@
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import io.confluent.kafka.schemaregistry.SchemaProvider;
-import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider;
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig;
-import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
-import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
public abstract class SchemaRegistryMojo extends AbstractMojo implements Closeable {
@@ -77,7 +73,7 @@ protected SchemaRegistryClient client() {
}
List providers = schemaProviders != null && !schemaProviders.isEmpty()
? schemaProviders()
- : defaultSchemaProviders();
+ : MojoUtils.defaultSchemaProviders();
this.client = new CachedSchemaRegistryClient(
this.schemaRegistryUrls,
1000,
@@ -99,12 +95,6 @@ private List schemaProviders() {
}).collect(Collectors.toList());
}
- private List defaultSchemaProviders() {
- return Arrays.asList(
- new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider()
- );
- }
-
@Override
public void close() throws IOException {
if (client != null) {
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java
new file mode 100644
index 00000000000..4eb2cdbcd0b
--- /dev/null
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2022 Confluent Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.kafka.schemaregistry.maven;
+
+import io.confluent.kafka.schemaregistry.CompatibilityLevel;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugins.annotations.Mojo;
+import org.apache.maven.plugins.annotations.Parameter;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+@Mojo(name = "set-compatibility", configurator = "custom-basic")
+public class SetCompatibilityMojo extends SchemaRegistryMojo {
+
+ @Parameter(required = true)
+ Map compatibilityLevels = new HashMap<>();
+
+ public void execute() throws MojoExecutionException {
+ for (Map.Entry entry : compatibilityLevels.entrySet()) {
+ if (entry.getValue().equalsIgnoreCase("null")) {
+ deleteConfig(entry.getKey());
+ } else {
+ updateConfig(entry.getKey(), CompatibilityLevel.valueOf(entry.getValue()));
+ }
+ }
+ }
+
+ public void updateConfig(String subject, CompatibilityLevel compatibility)
+ throws MojoExecutionException {
+
+ try {
+ String updatedCompatibility;
+
+ if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) {
+ updatedCompatibility = this.client().updateCompatibility(null, compatibility.toString());
+ getLog().info("Global Compatibility set to "
+ + updatedCompatibility);
+ } else {
+ Collection allSubjects = this.client().getAllSubjects();
+ if (!allSubjects.contains(subject)) {
+ throw new MojoExecutionException(
+ "Subject not found"
+ );
+ }
+ updatedCompatibility = this.client().updateCompatibility(subject, compatibility.toString());
+ getLog().info("Compatibility of " + subject
+ + " set to " + updatedCompatibility);
+ }
+ } catch (RestClientException | IOException e) {
+ e.printStackTrace();
+ throw new MojoExecutionException(
+ "Exception thrown while updating config",
+ e
+ );
+ }
+
+ }
+
+ public void deleteConfig(String subject) throws MojoExecutionException {
+ if (getLog().isDebugEnabled()) {
+ getLog().info("Deleting compatibility");
+ }
+ try {
+ this.client().deleteCompatibility(subject);
+ if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) {
+ getLog().info("Deleted global compatibility");
+ } else {
+ getLog().info(String.format("Deleted compatibility of %s", subject));
+ }
+
+ } catch (IOException | RestClientException e) {
+ throw new MojoExecutionException(
+ "Exception thrown while updating config",
+ e
+ );
+ }
+ }
+
+ public String getConfig(String subject) throws MojoExecutionException {
+ if (getLog().isDebugEnabled()) {
+ getLog().info(String.format("Getting compatibility of %s", subject));
+ }
+ try {
+ return String.format(this.client().getCompatibility(subject));
+ } catch (IOException | RestClientException e) {
+ e.printStackTrace();
+ throw new MojoExecutionException(
+ "Exception thrown while getting config",
+ e
+ );
+ }
+ }
+}
\ No newline at end of file
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java
new file mode 100644
index 00000000000..5c7d6692b71
--- /dev/null
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2022 Confluent Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.kafka.schemaregistry.maven;
+
+import io.confluent.kafka.schemaregistry.CompatibilityChecker;
+import io.confluent.kafka.schemaregistry.CompatibilityLevel;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.SchemaProvider;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugins.annotations.Mojo;
+import org.apache.maven.plugins.annotations.Parameter;
+
+@Mojo(name = "test-local-compatibility", configurator = "custom-basic")
+public class TestLocalCompatibilityMojo extends AbstractMojo {
+
+ @Parameter(required = true)
+ Map schemas = new HashMap<>();
+
+ @Parameter(required = false)
+ Map schemaTypes = new HashMap<>();
+
+ @Parameter(required = true)
+ Map previousSchemaPaths = new HashMap<>();
+
+ @Parameter(required = true)
+ Map compatibilityLevels = new HashMap<>();
+
+ protected Optional parseSchema(
+ String schemaType,
+ String schemaString,
+ List references,
+ Map providers) throws MojoExecutionException {
+
+ SchemaProvider schemaProvider = providers.get(schemaType.toUpperCase());
+ if (schemaProvider == null) {
+ throw new MojoExecutionException(
+ String.format("Invalid schema type %s", schemaType));
+ }
+
+ return schemaProvider.parseSchema(schemaString, references);
+
+ }
+
+ protected ParsedSchema loadSchema(File path, String schemaType,
+ Map schemaProviders) throws MojoExecutionException {
+
+ String schemaString;
+ try {
+ schemaString = MojoUtils.readFile(path, StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ throw new MojoExecutionException(
+ String.format("File cannot be found at: %s", path));
+ }
+ List references = new ArrayList<>();
+ Optional schema = parseSchema(schemaType, schemaString,
+ references, schemaProviders);
+
+ if (schema.isPresent()) {
+ return schema.get();
+ }
+
+ throw new MojoExecutionException(String.format("Unable to parse schema from %s "
+ + "with schema type as %s", path, schemaType));
+ }
+
+ protected ArrayList getFiles(File previousSchemaPath) {
+
+ ArrayList previousSchemaFiles = new ArrayList<>();
+
+ getLog().debug(String.format("Loading File %s", previousSchemaPath));
+ // Add all files inside a directory, inside directories are skipped
+ if (previousSchemaPath.isDirectory()) {
+
+ File[] fileList = previousSchemaPath.listFiles();
+ if (fileList == null) {
+ return previousSchemaFiles;
+ }
+
+ for (File f : fileList) {
+ if (!f.isDirectory()) {
+ previousSchemaFiles.add(f);
+ }
+ }
+
+ } else {
+ previousSchemaFiles.add(previousSchemaPath);
+ }
+
+ return previousSchemaFiles;
+ }
+
+
+ protected void testSchema(String key, Map schemaProviders)
+ throws MojoExecutionException {
+
+ File schemaPath = schemas.get(key);
+
+ if (!previousSchemaPaths.containsKey(key)) {
+ throw new MojoExecutionException(String.format("Previous schemas not found for %s", key));
+ }
+
+ File previousSchemaPath = previousSchemaPaths.get(key);
+ String schemaType = schemaTypes.getOrDefault(key, AvroSchema.TYPE);
+
+ if (!compatibilityLevels.containsKey(key)) {
+ throw new MojoExecutionException(String.format("Compatibility Level not found for %s", key));
+ }
+
+ CompatibilityLevel compatibilityLevel = compatibilityLevels.get(key);
+
+ ArrayList previousSchemaFiles = getFiles(previousSchemaPath);
+
+ if (previousSchemaFiles.size() > 1
+ && (compatibilityLevel == CompatibilityLevel.BACKWARD
+ || compatibilityLevel == CompatibilityLevel.FORWARD
+ || compatibilityLevel == CompatibilityLevel.FULL)) {
+
+ throw new MojoExecutionException(String.format("Provide exactly one file for %s check "
+ + "for schema %s", compatibilityLevel.name.toLowerCase(), schemaPath));
+
+ }
+
+ ParsedSchema schema = loadSchema(schemaPath, schemaType, schemaProviders);
+ ArrayList previousSchemas = new ArrayList<>();
+
+ for (File previousSchemaFile : previousSchemaFiles) {
+ previousSchemas.add(loadSchema(previousSchemaFile, schemaType, schemaProviders));
+ }
+
+ CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel);
+ List errorMessages = checker.isCompatible(schema, previousSchemas);
+
+ boolean success = errorMessages.isEmpty();
+
+ if (success) {
+ getLog().info(String.format("Schema is %s compatible with previous schemas",
+ compatibilityLevel.name.toLowerCase()));
+ } else {
+ String errorLog = String.format("Schema is not %s compatible with previous schemas. ",
+ compatibilityLevel.name.toLowerCase()) + errorMessages.get(0);
+ throw new MojoExecutionException(errorLog);
+ }
+
+ }
+
+ public void execute() throws MojoExecutionException {
+
+ List providers = MojoUtils.defaultSchemaProviders();
+ Map schemaProviders = providers.stream()
+ .collect(Collectors.toMap(SchemaProvider::schemaType, p -> p));
+
+ Set keys = schemas.keySet();
+
+ for (String key : keys) {
+ testSchema(key, schemaProviders);
+ }
+
+ }
+
+}
diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java
index ad69a94c1a1..e0576d6ed40 100644
--- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java
+++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java
@@ -26,9 +26,7 @@
import java.io.File;
import java.io.IOException;
-import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -108,7 +106,7 @@ private void processSubject(String key, boolean isReference) {
}
return;
}
- String schemaString = readFile(file, StandardCharsets.UTF_8);
+ String schemaString = MojoUtils.readFile(file, StandardCharsets.UTF_8);
Optional schema = client().parseSchema(
schemaType, schemaString, schemaReferences);
if (schema.isPresent()) {
@@ -186,9 +184,5 @@ private List getReferences(String subject, Map
return result;
}
- private static String readFile(File file, Charset encoding) throws IOException {
- byte[] encoded = Files.readAllBytes(file.toPath());
- return new String(encoded, encoding);
- }
}
diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java
index 6b2445a9513..6e6f523a2b0 100644
--- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java
+++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java
@@ -24,11 +24,14 @@
import org.junit.Before;
import org.junit.Test;
+import javax.security.auth.Subject;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.List;
+import java.util.regex.Matcher;
public class DownloadSchemaRegistryMojoTest extends SchemaRegistryTest {
DownloadSchemaRegistryMojo mojo;
diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java
new file mode 100644
index 00000000000..2e2907a4ced
--- /dev/null
+++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java
@@ -0,0 +1,53 @@
+package io.confluent.kafka.schemaregistry.maven;
+
+import io.confluent.kafka.schemaregistry.CompatibilityLevel;
+import io.confluent.kafka.schemaregistry.avro.AvroSchema;
+import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException;
+import org.apache.avro.Schema;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.junit.Assert.assertThrows;
+
+public class SetCompatibilityMojoTest extends SchemaRegistryTest{
+ SetCompatibilityMojo mojo;
+
+ @Before
+ public void createMojoAndFiles() {
+ this.mojo = new SetCompatibilityMojo();
+ this.mojo.client(new MockSchemaRegistryClient());
+ }
+
+ @Test
+ public void specificSubjects() throws IOException, RestClientException, MojoExecutionException {
+ String keySubject = String.format("TestSubject-key");
+ Schema keySchema = Schema.create(Schema.Type.STRING);
+
+ this.mojo.client().register(keySubject, new AvroSchema(keySchema));
+ // Compatibility not set till now and hence should throw error
+ assertThrows("Checking that compatibility hasn't been set",
+ RestClientException.class, () -> this.mojo.client().getCompatibility(keySubject));
+
+ // Setting compatibility & checking if it matches
+ this.mojo.compatibilityLevels.put(keySubject,"BACKWARD");
+ this.mojo.execute();
+
+ assert(this.mojo.getConfig(keySubject).equals("BACKWARD"));
+
+ //Updating to a different compatibility
+ this.mojo.compatibilityLevels.replace(keySubject, "BACKWARD", "FULL");
+ this.mojo.execute();
+
+ assert(this.mojo.getConfig(keySubject).equals("FULL"));
+
+ //Checking for Global Compatibility
+ this.mojo.compatibilityLevels.put("__GLOBAL", "BACKWARD_TRANSITIVE");
+ this.mojo.execute();
+ assert(this.mojo.getConfig(null).equals("BACKWARD_TRANSITIVE"));
+
+ }
+}
diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java
new file mode 100644
index 00000000000..ab76aac682b
--- /dev/null
+++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java
@@ -0,0 +1,387 @@
+/*
+ * Copyright 2022 Confluent Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.kafka.schemaregistry.maven;
+
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import io.confluent.kafka.schemaregistry.CompatibilityLevel;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.junit.Before;
+import org.junit.Test;
+
+/*
+ * The tests for avro are taken from AvroCompatibilityTest
+ */
+public class TestLocalCompatibilityMojoTest extends SchemaRegistryTest{
+ TestLocalCompatibilityMojo mojo;
+
+ final String schema1 = "schema1";
+ final String schema2 = "schema2";
+ final String schema3 = "schema3";
+ final String schema4 = "schema4";
+ final String schema6 = "schema6";
+ final String schema7 = "schema7";
+ final String schema8 = "schema8";
+ final String schema10 = "schema10";
+ final String schema11 = "schema11";
+ final String schema12 = "schema12";
+ final String schema13 = "schema13";
+ final String schema14 = "schema14";
+
+ String fileExtension;
+
+ @Before
+ public void createMojoAndFiles() {
+ this.mojo = new TestLocalCompatibilityMojo();
+ makeFiles();
+
+ for(int i=1;i<=9;i++) {
+ this.mojo.schemaTypes.put("schema"+i, "AVRO");
+ }
+
+ this.mojo.schemaTypes.put(schema10, "JSON");
+ this.mojo.schemaTypes.put(schema13, "JSON");
+ this.mojo.schemaTypes.put(schema14, "JSON");
+
+ }
+
+ private void makeFile(String schemaString, String name) {
+
+ try (FileWriter writer = new FileWriter(this.tempDirectory+"/"+name)) {
+ writer.write(schemaString);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ if (name.contains("1.avsc") || name.contains("2.avsc")) {
+
+ try (FileWriter writer = new FileWriter(this.tempDirectory+"/schema12Folder/"+name)) {
+ writer.write(schemaString);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ }
+
+ private void makeFiles(){
+
+ File newFolder = new File(this.tempDirectory.toString() + "/schema12Folder");
+ if( newFolder.mkdir()) {
+ System.out.println("New Folder avro created successfully.");
+ }
+
+ String schemaString1 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":\"string\",\"name\":\"f1\"}]}";
+ makeFile(schemaString1, "schema1.avsc");
+
+ String schemaString2 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":\"string\",\"name\":\"f1\"},"
+ + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}";
+ makeFile(schemaString2, "schema2.avsc");
+
+ String schemaString3 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":\"string\",\"name\":\"f1\"},"
+ + " {\"type\":\"string\",\"name\":\"f2\"}]}";
+ makeFile(schemaString3, "schema3.avsc");
+
+ String schemaString4 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":\"string\",\"name\":\"f1_new\", \"aliases\": [\"f1\"]}]}";
+ makeFile(schemaString4, "schema4.avsc");
+
+ String schemaString6 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\","
+ + " \"doc\":\"doc of f1\"}]}";
+ makeFile(schemaString6, "schema6.avsc");
+
+ String schemaString7 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":[\"null\", \"string\", \"int\"],\"name\":\"f1\","
+ + " \"doc\":\"doc of f1\"}]}";
+ makeFile(schemaString7, "schema7.avsc");
+
+ String schemaString8 = "{\"type\":\"record\","
+ + "\"name\":\"myrecord\","
+ + "\"fields\":"
+ + "[{\"type\":\"string\",\"name\":\"f1\"},"
+ + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"},"
+ + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}";
+ makeFile(schemaString8, "schema8.avsc");
+
+ String schemaString10 = "{\n"
+ + " \"type\": \"object\",\n"
+ + " \"properties\": {\n"
+ + " \"foo\": { \"type\": \"string\" },\n"
+ + " \"bar\": { \"type\": \"string\" }\n"
+ + " }\n"
+ + "}";
+ makeFile(schemaString10, "schema10.json");
+
+ String schemaString11 = "{\n"
+ + " \"type\": \"object\",\n"
+ + " \"properties\": {\n"
+ + " \"foo\": { \"type\": \"string\" },\n"
+ + " \"bar\": { \"type\": \"string\" }\n"
+ + " },\n"
+ + " \"additionalProperties\": false\n"
+ + "}";
+ makeFile(schemaString11, "schema11.json");
+
+ String schemaString12 = "{\n"
+ + " \"type\": \"object\",\n"
+ + " \"properties\": {\n"
+ + " \"foo\": { \"type\": \"string\" },\n"
+ + " \"bar\": { \"type\": \"string\" }\n"
+ + " },\n"
+ + " \"additionalProperties\": { \"type\": \"string\" }\n"
+ + "}";
+
+ makeFile(schemaString12, "schema12.json");
+
+ String schemaString13 = "{\n"
+ + " \"type\": \"object\",\n"
+ + " \"properties\": {\n"
+ + " \"foo\": { \"type\": \"string\" },\n"
+ + " \"bar\": { \"type\": \"string\" },\n"
+ + " \"zap\": { \"type\": \"string\" }\n"
+ + " },\n"
+ + " \"additionalProperties\": { \"type\": \"string\" }\n"
+ + "}";
+
+ makeFile(schemaString13, "schema13.json");
+
+ String schemaString14 = "{\n"
+ + " \"type\": \"object\",\n"
+ + " \"properties\": {\n"
+ + " \"foo\": { \"type\": \"string\" },\n"
+ + " \"bar\": { \"type\": \"string\" },\n"
+ + " \"zap\": { \n"
+ + " \"oneOf\": [ { \"type\": \"string\" }, { \"type\": \"integer\" } ] \n"
+ + " }\n"
+ + " },\n"
+ + " \"additionalProperties\": { \"type\": \"string\" }\n"
+ + "}";
+
+ makeFile(schemaString14, "schema14.json");
+
+ }
+
+
+ private void setMojo(String schema, String previousSchemas){
+
+ this.mojo.schemas = Collections.singletonMap(schema, new File(this.tempDirectory + "/" + schema + fileExtension));
+ this.mojo.previousSchemaPaths = new HashMap<>();
+
+ File temp = new File(this.tempDirectory + "/" + previousSchemas);
+ if(temp.isDirectory())
+ this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas));
+ else
+ this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas + fileExtension));
+
+ }
+
+ private boolean isCompatible(String schema, String previousSchemas, CompatibilityLevel compatibilityLevel)
+ throws MojoExecutionException {
+
+ setMojo(schema, previousSchemas);
+ this.mojo.compatibilityLevels.put(schema, compatibilityLevel);
+ this.mojo.execute();
+ return true;
+
+ }
+
+ /*
+ * Backward compatibility: A new schema is backward compatible if it can be used to read the data
+ * written in the previous schema.
+ */
+ @Test
+ public void testBasicBackwardsCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ assertTrue("adding a field with default is a backward compatible change",
+ isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD));
+ assertThrows("adding a field w/o default is not a backward compatible change",
+ MojoExecutionException.class, () -> isCompatible(schema3, (schema1), CompatibilityLevel.BACKWARD));
+ assertTrue("changing field name with alias is a backward compatible change",
+ isCompatible(schema4, (schema1), CompatibilityLevel.BACKWARD));
+ assertTrue("evolving a field type to a union is a backward compatible change",
+ isCompatible(schema6, (schema1), CompatibilityLevel.BACKWARD));
+ assertThrows("removing a type from a union is not a backward compatible change",
+ MojoExecutionException.class, () -> isCompatible(schema1, (schema6), CompatibilityLevel.BACKWARD));
+ assertTrue("adding a new type in union is a backward compatible change",
+ isCompatible(schema7, (schema6), CompatibilityLevel.BACKWARD));
+ assertThrows("removing a type from a union is not a backward compatible change",
+ MojoExecutionException.class, () -> isCompatible(schema6, (schema7), CompatibilityLevel.BACKWARD));
+
+
+ this.mojo.schemaTypes.put(schema10, "JSON");
+ this.mojo.schemaTypes.put(schema13, "JSON");
+ this.mojo.schemaTypes.put(schema14, "JSON");
+
+ fileExtension = ".json";
+ assertTrue("setting additional properties to true from false is a backward compatible change",
+ isCompatible(schema10, schema11, CompatibilityLevel.BACKWARD));
+
+ assertTrue("adding property of string type (same as additional properties type) is "
+ + "a backward compatible change", isCompatible(schema13, schema12, CompatibilityLevel.BACKWARD));
+
+ assertTrue("adding property of string or int type (string is additional properties type) is "
+ + "a backward compatible change", isCompatible(schema14, schema12, CompatibilityLevel.BACKWARD));
+
+ }
+
+ @Test
+ public void testBasicBackwardsTransitiveCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ // 1 == 2, 2 == 3, 3 != 1
+ assertTrue("adding a field with default is a backward compatible change",
+ isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD_TRANSITIVE));
+ assertTrue("removing a default is a compatible change, but not transitively",
+ isCompatible(schema3, (schema2), CompatibilityLevel.BACKWARD_TRANSITIVE));
+
+ // Not compatible throws error
+ assertThrows("removing a default is not a transitively compatible change",
+ MojoExecutionException.class, () ->isCompatible(schema3, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE));
+
+ assertTrue("Checking if schema8 is backward compatible with schema1 and schema2 present in avro folder"
+ , isCompatible(schema8, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE ));
+
+
+ }
+
+ /*
+ * Forward compatibility: A new schema is forward compatible if the previous schema can read data written in this
+ * schema.
+ */
+ @Test
+ public void testBasicForwardsCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ assertTrue("adding a field is a forward compatible change",
+ isCompatible(schema2, (schema1), CompatibilityLevel.FORWARD));
+ assertTrue("adding a field is a forward compatible change",
+ isCompatible(schema3, (schema1), CompatibilityLevel.FORWARD));
+ assertTrue("adding a field is a forward compatible change",
+ isCompatible(schema3, (schema2), CompatibilityLevel.FORWARD));
+ assertTrue("adding a field is a forward compatible change",
+ isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD));
+
+ fileExtension = ".avsc";
+
+ // Only schema 2 is checked
+ assertThrows( MojoExecutionException.class, () ->
+ isCompatible(schema1, "schema12Folder", CompatibilityLevel.FORWARD));
+
+ fileExtension = ".json";
+ this.mojo.schemaTypes.put(schema11, "JSON");
+ this.mojo.schemaTypes.put(schema12, "JSON");
+ this.mojo.schemaTypes.put(schema13, "JSON");
+
+ assertTrue("setting additional properties to false from true is a forward compatible change",
+ isCompatible(schema11, schema10, CompatibilityLevel.FORWARD));
+
+ assertTrue("removing property of string type (same as additional properties type)"
+ + " is a backward compatible change", isCompatible(schema13,
+ schema12, CompatibilityLevel.FORWARD));
+
+ assertTrue("removing property of string or int type (string is additional properties type) is "
+ + "a backward compatible change", isCompatible(schema12,
+ schema14, CompatibilityLevel.FORWARD));
+
+ }
+
+ /*
+ * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read data written
+ * in this schema.
+ */
+ @Test
+ public void testBasicForwardsTransitiveCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ // 1 == 2, 2 == 3, 3 != 1
+ assertTrue("adding default to a field is a compatible change",
+ isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD_TRANSITIVE));
+ assertTrue("removing a field with a default is a compatible change",
+ isCompatible(schema1, (schema2), CompatibilityLevel.FORWARD_TRANSITIVE));
+ }
+
+ /*
+ * Full compatibility: A new schema is fully compatible if it’s both backward and forward compatible.
+ */
+ @Test
+ public void testBasicFullCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ assertTrue("adding a field with default is a backward and a forward compatible change",
+ isCompatible(schema2, (schema1), CompatibilityLevel.FULL));
+
+ // Throws error, provide exactly one file for checking full compatibility
+ assertThrows(MojoExecutionException.class, () ->
+ isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL));
+
+ }
+
+ /*
+ * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward
+ * and transitively forward compatible with the entire schema history.
+ */
+ @Test
+ public void testBasicFullTransitiveCompatibility() throws MojoExecutionException {
+
+ fileExtension = ".avsc";
+
+ assertTrue("iteratively adding fields with defaults is a compatible change",
+ isCompatible(schema8, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE));
+ assertTrue("adding default to a field is a compatible change",
+ isCompatible(schema2, (schema3), CompatibilityLevel.FULL_TRANSITIVE));
+ assertTrue("removing a field with a default is a compatible change",
+ isCompatible(schema1, (schema2), CompatibilityLevel.FULL_TRANSITIVE));
+
+ assertTrue("adding a field with default is a compatible change",
+ isCompatible(schema2, (schema1), CompatibilityLevel.FULL_TRANSITIVE));
+ assertTrue("removing a default from a field compatible change",
+ isCompatible(schema3, (schema2), CompatibilityLevel.FULL_TRANSITIVE));
+
+ assertThrows( "transitively adding a field without a default is not a compatible change",
+ MojoExecutionException.class, () -> isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE));
+
+ }
+}
\ No newline at end of file
diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml
index ea3047a22b7..3b42510945c 100644
--- a/package-kafka-serde-tools/pom.xml
+++ b/package-kafka-serde-tools/pom.xml
@@ -7,7 +7,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
@@ -93,6 +93,12 @@
org.apache.kafka
connect-runtime
provided
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
org.apache.kafka
diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml
index e6355857364..8cca8a5124c 100644
--- a/package-schema-registry/pom.xml
+++ b/package-schema-registry/pom.xml
@@ -7,7 +7,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
kafka-schema-registry-package
@@ -20,16 +20,10 @@
org.slf4j
slf4j-reload4j
compile
-
-
- org.apache.logging.log4j
- log4j-api
-
-
- org.apache.logging.log4j
- log4j-core
-
-
+
+
+ io.confluent
+ logredactor
io.confluent
diff --git a/pom.xml b/pom.xml
index 3d7beafb38d..2dfe552d513 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,13 +7,13 @@
io.confluent
rest-utils-parent
- [7.1.12-0, 7.1.13-0)
+ [7.2.10-0, 7.2.11-0)
kafka-schema-registry-parent
pom
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
Confluent, Inc.
http://confluent.io
@@ -50,6 +50,7 @@
schema-serializer
avro-serializer
json-serializer
+ schema-converter
avro-data
avro-converter
package-schema-registry
@@ -84,7 +85,7 @@
3.4.0
4.9.7
2.1.10
- 7.1.12-0
+ 7.2.10-0
1.26.0
1.77
@@ -191,8 +192,8 @@
${jackson.version}
- com.github.everit-org.json-schema
- org.everit.json.schema
+ com.github.erosb
+ everit-json-schema
${json-schema.version}
diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml
index 10c3b2b7650..9695f62ab5f 100644
--- a/protobuf-converter/pom.xml
+++ b/protobuf-converter/pom.xml
@@ -6,7 +6,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
@@ -60,6 +60,11 @@
org.jetbrains.kotlin
kotlin-stdlib
+
+ io.confluent
+ kafka-schema-converter
+ ${io.confluent.schema-registry.version}
+
io.confluent
kafka-schema-serializer
diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java
index 9cb3c377c29..012c93e9f9f 100644
--- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java
+++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java
@@ -33,6 +33,8 @@
import com.google.protobuf.Message;
import com.google.protobuf.StringValue;
import com.google.protobuf.util.Timestamps;
+import io.confluent.connect.schema.ConnectEnum;
+import io.confluent.connect.schema.ConnectUnion;
import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap;
import io.confluent.protobuf.MetaProto;
import io.confluent.protobuf.MetaProto.Meta;
@@ -122,6 +124,9 @@ public class ProtobufData {
public static final String CONNECT_TYPE_INT8 = "int8";
public static final String CONNECT_TYPE_INT16 = "int16";
+ public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER;
+ public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER;
+
private static final long MILLIS_PER_DAY = 24 * 60 * 60 * 1000;
private static final int MILLIS_PER_NANO = 1_000_000;
private static final TimeZone UTC = TimeZone.getTimeZone("UTC");
@@ -286,8 +291,10 @@ public class ProtobufData {
private final Map fromConnectSchemaCache;
private final Map, Schema> toConnectSchemaCache;
+ private boolean generalizedSumTypeSupport;
private boolean enhancedSchemaSupport;
private boolean scrubInvalidNames;
+ private boolean useIntForEnums;
private boolean useOptionalForNullables;
private boolean supportOptionalForProto2;
private boolean useWrapperForNullables;
@@ -307,8 +314,10 @@ public ProtobufData(int cacheSize) {
public ProtobufData(ProtobufDataConfig protobufDataConfig) {
fromConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize());
toConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize());
+ this.generalizedSumTypeSupport = protobufDataConfig.isGeneralizedSumTypeSupportDefault();
this.enhancedSchemaSupport = protobufDataConfig.isEnhancedProtobufSchemaSupport();
this.scrubInvalidNames = protobufDataConfig.isScrubInvalidNames();
+ this.useIntForEnums = protobufDataConfig.useIntForEnums();
this.useOptionalForNullables = protobufDataConfig.useOptionalForNullables();
this.supportOptionalForProto2 = protobufDataConfig.supportOptionalForProto2();
this.useWrapperForNullables = protobufDataConfig.useWrapperForNullables();
@@ -370,6 +379,10 @@ private Object fromConnectData(
case INT16:
case INT32: {
final int intValue = ((Number) value).intValue(); // Check for correct type
+ if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) {
+ String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM);
+ return protobufSchema.getEnumValue(scope + enumType, intValue);
+ }
return isWrapper ? Int32Value.newBuilder().setValue(intValue).build() : intValue;
}
@@ -402,9 +415,13 @@ private Object fromConnectData(
case STRING: {
final String stringValue = (String) value; // Check for correct type
- if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) {
- String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM);
- String tag = schema.parameters().get(PROTOBUF_TYPE_ENUM_PREFIX + stringValue);
+ if (schema.parameters() != null
+ && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM)
+ || schema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) {
+ String paramName = generalizedSumTypeSupport
+ ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM;
+ String enumType = schema.parameters().get(paramName);
+ String tag = schema.parameters().get(paramName + "." + stringValue);
if (tag != null) {
return protobufSchema.getEnumValue(scope + enumType, Integer.parseInt(tag));
}
@@ -469,10 +486,9 @@ private Object fromConnectData(
if (!struct.schema().equals(schema)) {
throw new DataException("Mismatching struct schema");
}
- String structName = schema.name();
//This handles the inverting of a union which is held as a struct, where each field is
// one of the union types.
- if (structName != null && structName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) {
+ if (isUnionSchema(schema)) {
for (Field field : schema.fields()) {
Object object = struct.get(field);
if (object != null) {
@@ -709,6 +725,7 @@ private MessageDefinition messageDefinitionFromConnectSchema(
if (fieldDef != null) {
boolean isProto3Optional = "optional".equals(fieldDef.getLabel());
if (isProto3Optional) {
+ // Add a synthentic oneof
MessageDefinition.OneofBuilder oneofBuilder = message.addOneof("_" + fieldDef.getName());
oneofBuilder.addField(
true,
@@ -791,9 +808,11 @@ private FieldDefinition fieldDefinitionFromConnectSchema(
Object defaultVal = null;
if (fieldSchema.type() == Schema.Type.STRUCT) {
String fieldSchemaName = fieldSchema.name();
- if (fieldSchemaName != null && fieldSchemaName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) {
- String unionName =
- getUnqualifiedName(ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length()));
+ if (isUnionSchema(fieldSchema)) {
+ String unionName = generalizedSumTypeSupport
+ ? fieldSchema.parameters().get(GENERALIZED_TYPE_UNION)
+ : getUnqualifiedName(
+ ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length()));
oneofDefinitionFromConnectSchema(ctx, schema, message, fieldSchema, unionName);
return null;
} else {
@@ -810,8 +829,9 @@ private FieldDefinition fieldDefinitionFromConnectSchema(
} else if (fieldSchema.type() == Schema.Type.MAP) {
message.addMessageDefinition(
mapDefinitionFromConnectSchema(ctx, schema, type, fieldSchema));
- } else if (fieldSchema.parameters() != null && fieldSchema.parameters()
- .containsKey(PROTOBUF_TYPE_ENUM)) {
+ } else if (fieldSchema.parameters() != null
+ && (fieldSchema.parameters().containsKey(GENERALIZED_TYPE_ENUM)
+ || fieldSchema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) {
String enumName = getUnqualifiedName(ctx, fieldSchema.name());
if (!message.containsEnum(enumName)) {
message.addEnumDefinition(enumDefinitionFromConnectSchema(ctx, schema, fieldSchema));
@@ -998,15 +1018,16 @@ private EnumDefinition enumDefinitionFromConnectSchema(
Schema enumElem
) {
String enumName = getUnqualifiedName(ctx, enumElem.name());
- EnumDefinition.Builder enumer = EnumDefinition.newBuilder(enumName);
+ EnumDefinition.Builder enumBuilder = EnumDefinition.newBuilder(enumName);
+ String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM;
for (Map.Entry entry : enumElem.parameters().entrySet()) {
- if (entry.getKey().startsWith(PROTOBUF_TYPE_ENUM_PREFIX)) {
- String name = entry.getKey().substring(PROTOBUF_TYPE_ENUM_PREFIX.length());
+ if (entry.getKey().startsWith(paramName + ".")) {
+ String name = entry.getKey().substring(paramName.length() + 1);
int tag = Integer.parseInt(entry.getValue());
- enumer.addValue(name, tag);
+ enumBuilder.addValue(name, tag);
}
}
- return enumer.build();
+ return enumBuilder.build();
}
private String dataTypeFromConnectSchema(
@@ -1041,6 +1062,9 @@ private String dataTypeFromConnectSchema(
return useWrapperForNullables && schema.isOptional()
? PROTOBUF_INT32_WRAPPER_TYPE : FieldDescriptor.Type.INT32.toString().toLowerCase();
case INT32:
+ if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) {
+ return schema.parameters().get(PROTOBUF_TYPE_ENUM);
+ }
defaultType = FieldDescriptor.Type.INT32.toString().toLowerCase();
if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_PROP)) {
defaultType = schema.parameters().get(PROTOBUF_TYPE_PROP);
@@ -1077,8 +1101,12 @@ private String dataTypeFromConnectSchema(
return useWrapperForNullables && schema.isOptional()
? PROTOBUF_BOOL_WRAPPER_TYPE : FieldDescriptor.Type.BOOL.toString().toLowerCase();
case STRING:
- if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) {
- return schema.parameters().get(PROTOBUF_TYPE_ENUM);
+ if (schema.parameters() != null) {
+ if (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM)) {
+ return schema.parameters().get(GENERALIZED_TYPE_ENUM);
+ } else if (schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) {
+ return schema.parameters().get(PROTOBUF_TYPE_ENUM);
+ }
}
return useWrapperForNullables && schema.isOptional()
? PROTOBUF_STRING_WRAPPER_TYPE : FieldDescriptor.Type.STRING.toString().toLowerCase();
@@ -1118,6 +1146,11 @@ private boolean isTimestampSchema(Schema schema) {
return Timestamp.LOGICAL_NAME.equals(schema.name());
}
+ private static boolean isUnionSchema(Schema schema) {
+ return (schema.name() != null && schema.name().startsWith(PROTOBUF_TYPE_UNION))
+ || ConnectUnion.isUnion(schema);
+ }
+
public SchemaAndValue toConnectData(ProtobufSchema protobufSchema, Message message) {
if (message == null) {
return SchemaAndValue.NULL;
@@ -1160,7 +1193,13 @@ protected Object toConnectData(Schema schema, Object value) {
if (value instanceof Message) {
value = getWrappedValue((Message) value);
}
- converted = ((Number) value).intValue();
+ if (value instanceof Number) {
+ converted = ((Number) value).intValue();
+ } else if (value instanceof Enum) {
+ converted = ((Enum) value).ordinal();
+ } else if (value instanceof EnumValueDescriptor) {
+ converted = ((EnumValueDescriptor) value).getNumber();
+ }
break;
case INT64:
if (value instanceof Message) {
@@ -1295,8 +1334,8 @@ private void setUnionField(
OneofDescriptor oneOfDescriptor,
FieldDescriptor fieldDescriptor
) {
- String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex();
- Field unionField = schema.field(unionName);
+ String unionFieldName = unionFieldName(oneOfDescriptor);
+ Field unionField = schema.field(unionFieldName);
Schema unionSchema = unionField.schema();
Struct union = new Struct(unionSchema);
@@ -1308,6 +1347,10 @@ private void setUnionField(
result.put(unionField, union);
}
+ private String unionFieldName(OneofDescriptor oneofDescriptor) {
+ return oneofDescriptor.getName() + "_" + oneofDescriptor.getIndex();
+ }
+
private void setStructField(
Schema schema,
Message message,
@@ -1366,8 +1409,8 @@ private SchemaBuilder toConnectSchema(
builder.name(name);
List oneOfDescriptors = descriptor.getRealOneofs();
for (OneofDescriptor oneOfDescriptor : oneOfDescriptors) {
- String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex();
- builder.field(unionName, toConnectSchema(ctx, oneOfDescriptor));
+ String unionFieldName = unionFieldName(oneOfDescriptor);
+ builder.field(unionFieldName, toConnectSchema(ctx, oneOfDescriptor));
}
List fieldDescriptors = descriptor.getFields();
for (FieldDescriptor fieldDescriptor : fieldDescriptors) {
@@ -1389,8 +1432,14 @@ private SchemaBuilder toConnectSchema(
private Schema toConnectSchema(ToConnectContext ctx, OneofDescriptor descriptor) {
SchemaBuilder builder = SchemaBuilder.struct();
- String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName();
- builder.name(PROTOBUF_TYPE_UNION_PREFIX + name);
+ if (generalizedSumTypeSupport) {
+ String name = descriptor.getName();
+ builder.name(name);
+ builder.parameter(GENERALIZED_TYPE_UNION, name);
+ } else {
+ String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName();
+ builder.name(PROTOBUF_TYPE_UNION_PREFIX + name);
+ }
List fieldDescriptors = descriptor.getFields();
for (FieldDescriptor fieldDescriptor : fieldDescriptors) {
builder.field(fieldDescriptor.getName(), toConnectSchema(ctx, fieldDescriptor));
@@ -1465,16 +1514,17 @@ private Schema toConnectSchema(ToConnectContext ctx, FieldDescriptor descriptor)
break;
case ENUM:
- builder = SchemaBuilder.string();
+ builder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string();
EnumDescriptor enumDescriptor = descriptor.getEnumType();
String name = enhancedSchemaSupport
? enumDescriptor.getFullName() : enumDescriptor.getName();
builder.name(name);
- builder.parameter(PROTOBUF_TYPE_ENUM, enumDescriptor.getName());
+ String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM;
+ builder.parameter(paramName, enumDescriptor.getName());
for (EnumValueDescriptor enumValueDesc : enumDescriptor.getValues()) {
String enumSymbol = enumValueDesc.getName();
String enumTag = String.valueOf(enumValueDesc.getNumber());
- builder.parameter(PROTOBUF_TYPE_ENUM_PREFIX + enumSymbol, enumTag);
+ builder.parameter(paramName + "." + enumSymbol, enumTag);
}
builder.optional();
break;
diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java
index fd3f76e7a8d..699463a68a3 100644
--- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java
+++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java
@@ -24,6 +24,12 @@
public class ProtobufDataConfig extends AbstractConfig {
+ public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support";
+ public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false;
+ public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC =
+ "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union "
+ + "with other schema formats";
+
public static final String ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG =
"enhanced.protobuf.schema.support";
public static final boolean ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT = false;
@@ -36,6 +42,10 @@ public class ProtobufDataConfig extends AbstractConfig {
public static final String SCRUB_INVALID_NAMES_DOC =
"Whether to scrub invalid names by replacing invalid characters with valid ones";
+ public static final String INT_FOR_ENUMS_CONFIG = "int.for.enums";
+ public static final boolean INT_FOR_ENUMS_DEFAULT = false;
+ public static final String INT_FOR_ENUMS_DOC = "Whether to represent enums as integers";
+
public static final String OPTIONAL_FOR_NULLABLES_CONFIG = "optional.for.nullables";
public static final boolean OPTIONAL_FOR_NULLABLES_DEFAULT = false;
public static final String OPTIONAL_FOR_NULLABLES_DOC = "Whether nullable fields should be "
@@ -61,6 +71,11 @@ public class ProtobufDataConfig extends AbstractConfig {
public static ConfigDef baseConfigDef() {
return new ConfigDef()
+ .define(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG,
+ ConfigDef.Type.BOOLEAN,
+ GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT,
+ ConfigDef.Importance.MEDIUM,
+ GENERALIZED_SUM_TYPE_SUPPORT_DOC)
.define(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG,
ConfigDef.Type.BOOLEAN,
ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT,
@@ -68,6 +83,11 @@ public static ConfigDef baseConfigDef() {
ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DOC)
.define(SCRUB_INVALID_NAMES_CONFIG, ConfigDef.Type.BOOLEAN, SCRUB_INVALID_NAMES_DEFAULT,
ConfigDef.Importance.MEDIUM, SCRUB_INVALID_NAMES_DOC)
+ .define(INT_FOR_ENUMS_CONFIG,
+ ConfigDef.Type.BOOLEAN,
+ INT_FOR_ENUMS_DEFAULT,
+ ConfigDef.Importance.MEDIUM,
+ INT_FOR_ENUMS_DOC)
.define(OPTIONAL_FOR_NULLABLES_CONFIG,
ConfigDef.Type.BOOLEAN,
OPTIONAL_FOR_NULLABLES_DEFAULT,
@@ -100,6 +120,10 @@ public ProtobufDataConfig(Map, ?> props) {
super(baseConfigDef(), props);
}
+ public boolean isGeneralizedSumTypeSupportDefault() {
+ return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG);
+ }
+
public boolean isEnhancedProtobufSchemaSupport() {
return this.getBoolean(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG);
}
@@ -108,6 +132,10 @@ public boolean isScrubInvalidNames() {
return this.getBoolean(SCRUB_INVALID_NAMES_CONFIG);
}
+ public boolean useIntForEnums() {
+ return this.getBoolean(INT_FOR_ENUMS_CONFIG);
+ }
+
public boolean useOptionalForNullables() {
return this.getBoolean(OPTIONAL_FOR_NULLABLES_CONFIG);
}
diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java
index eccf610febf..dcf982ef1e4 100644
--- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java
+++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java
@@ -91,9 +91,11 @@
import io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue;
import io.confluent.kafka.serializers.protobuf.test.UInt32ValueOuterClass;
+import static io.confluent.connect.protobuf.ProtobufData.GENERALIZED_TYPE_UNION;
import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_ENUM;
import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_PROP;
import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_TAG;
+import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION;
import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION_PREFIX;
import static io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue.newBuilder;
import static org.junit.Assert.assertArrayEquals;
@@ -212,12 +214,12 @@ private NestedMessage createEmptyNestedTestProto() throws ParseException {
return message.build();
}
- private Schema getExpectedNestedTestProtoSchemaStringUserId() {
- return getExpectedNestedTestProtoSchema();
+ private Schema getExpectedNestedTestProtoSchemaStringUserId(boolean useIntForEnums) {
+ return getExpectedNestedTestProtoSchema(useIntForEnums);
}
- private Schema getExpectedNestedTestProtoSchemaIntUserId() {
- return getExpectedNestedTestProtoSchema();
+ private Schema getExpectedNestedTestProtoSchemaIntUserId(boolean useIntForEnums) {
+ return getExpectedNestedTestProtoSchema(useIntForEnums);
}
private SchemaBuilder getEnumUnionSchemaBuilder() {
@@ -259,6 +261,46 @@ private SchemaBuilder getEnumUnionSchemaBuilder() {
return enumUnionBuilder;
}
+ private SchemaBuilder getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport() {
+ final SchemaBuilder enumUnionBuilder = SchemaBuilder.struct();
+ enumUnionBuilder.name("EnumUnion");
+ final SchemaBuilder someValBuilder = SchemaBuilder.struct();
+ someValBuilder.name("some_val");
+ someValBuilder.parameter(GENERALIZED_TYPE_UNION, "some_val");
+ someValBuilder.field(
+ "one_id",
+ SchemaBuilder.string().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(1)).build()
+ );
+ someValBuilder.field(
+ "other_id",
+ SchemaBuilder.int32().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(2)).build()
+ );
+ someValBuilder.field(
+ "some_status",
+ SchemaBuilder.string()
+ .name("Status")
+ .optional()
+ .parameter(PROTOBUF_TYPE_TAG, String.valueOf(3))
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status")
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0")
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1")
+ .build()
+ );
+ enumUnionBuilder.field("some_val_0", someValBuilder.optional().build());
+ enumUnionBuilder.field(
+ "status",
+ SchemaBuilder.string()
+ .name("Status")
+ .optional()
+ .parameter(PROTOBUF_TYPE_TAG, String.valueOf(4))
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status")
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0")
+ .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1")
+ .build()
+ );
+ return enumUnionBuilder;
+ }
+
private Struct getEnumUnionWithString() throws ParseException {
Schema schema = getEnumUnionSchemaBuilder().build();
Struct result = new Struct(schema.schema());
@@ -269,6 +311,16 @@ private Struct getEnumUnionWithString() throws ParseException {
return result;
}
+ private Struct getEnumUnionWithStringWithGeneralizedSumTypeSupport() throws ParseException {
+ Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build();
+ Struct result = new Struct(schema.schema());
+ Struct union = new Struct(schema.field("some_val_0").schema());
+ union.put("one_id", "ID");
+ result.put("some_val_0", union);
+ result.put("status", "INACTIVE");
+ return result;
+ }
+
private Struct getEnumUnionWithSomeStatus() throws ParseException {
Schema schema = getEnumUnionSchemaBuilder().build();
Struct result = new Struct(schema.schema());
@@ -279,6 +331,16 @@ private Struct getEnumUnionWithSomeStatus() throws ParseException {
return result;
}
+ private Struct getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws ParseException {
+ Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build();
+ Struct result = new Struct(schema.schema());
+ Struct union = new Struct(schema.field("some_val_0").schema());
+ union.put("some_status", "INACTIVE");
+ result.put("some_val_0", union);
+ result.put("status", "INACTIVE");
+ return result;
+ }
+
private SchemaBuilder getComplexTypeSchemaBuilder() {
final SchemaBuilder complexTypeBuilder = SchemaBuilder.struct();
complexTypeBuilder.name("ComplexType");
@@ -316,6 +378,10 @@ private SchemaBuilder getInnerMessageSchemaBuilder() {
}
private Schema getExpectedNestedTestProtoSchema() {
+ return getExpectedNestedTestProtoSchema(false);
+ }
+
+ private Schema getExpectedNestedTestProtoSchema(boolean useIntForEnums) {
final SchemaBuilder builder = SchemaBuilder.struct();
builder.name("NestedMessage");
final SchemaBuilder userIdBuilder = SchemaBuilder.struct();
@@ -363,8 +429,9 @@ private Schema getExpectedNestedTestProtoSchema() {
.parameter(PROTOBUF_TYPE_TAG, String.valueOf(4))
.build()
);
+ SchemaBuilder enumBuilder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string();
builder.field("status",
- SchemaBuilder.string()
+ enumBuilder
.name("Status")
.optional()
.parameter(PROTOBUF_TYPE_TAG, String.valueOf(5))
@@ -403,8 +470,8 @@ private Map getTestKeyValueMap() {
return result;
}
- private Struct getExpectedNestedProtoResultStringUserId() throws ParseException {
- Schema schema = getExpectedNestedTestProtoSchemaStringUserId();
+ private Struct getExpectedNestedProtoResultStringUserId(boolean useIntForEnums) throws ParseException {
+ Schema schema = getExpectedNestedTestProtoSchemaStringUserId(useIntForEnums);
Struct result = new Struct(schema.schema());
Struct userId = new Struct(schema.field("user_id").schema());
Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema());
@@ -422,7 +489,7 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException
experiments.add("second experiment");
result.put("experiments_active", experiments);
- result.put("status", "INACTIVE");
+ result.put("status", useIntForEnums ? 1 : "INACTIVE");
result.put("map_type", getTestKeyValueMap());
Struct inner = new Struct(schema.field("inner").schema());
@@ -432,8 +499,8 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException
return result;
}
- private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException {
- Schema schema = getExpectedNestedTestProtoSchemaIntUserId();
+ private Struct getExpectedNestedTestProtoResultIntUserId(boolean useIntForEnums) throws ParseException {
+ Schema schema = getExpectedNestedTestProtoSchemaIntUserId(useIntForEnums);
Struct result = new Struct(schema.schema());
Struct userId = new Struct(schema.field("user_id").schema());
Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema());
@@ -451,7 +518,7 @@ private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException
experiments.add("second experiment");
result.put("experiments_active", experiments);
- result.put("status", "INACTIVE");
+ result.put("status", useIntForEnums ? 1 : "INACTIVE");
result.put("map_type", getTestKeyValueMap());
Struct inner = new Struct(schema.field("inner").schema());
@@ -553,9 +620,23 @@ private SchemaAndValue getSchemaAndValue(ProtobufData protobufData, Message mess
public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception {
NestedMessage message = createNestedTestProtoStringUserId();
SchemaAndValue result = getSchemaAndValue(message);
- Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId();
+ Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(false);
+ assertSchemasEqual(expectedSchema, result.schema());
+ Struct expected = getExpectedNestedProtoResultStringUserId(false);
+ assertEquals(expected, result.value());
+ }
+
+ @Test
+ public void testToConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception {
+ NestedMessage message = createNestedTestProtoStringUserId();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true)
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue result = getSchemaAndValue(protobufData, message);
+ Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(true);
assertSchemasEqual(expectedSchema, result.schema());
- Struct expected = getExpectedNestedProtoResultStringUserId();
+ Struct expected = getExpectedNestedProtoResultStringUserId(true);
assertEquals(expected, result.value());
}
@@ -563,9 +644,25 @@ public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws E
public void testToConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception {
NestedMessage message = createNestedTestProtoIntUserId();
SchemaAndValue result = getSchemaAndValue(message);
- Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId();
+ Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(false);
assertSchemasEqual(expectedSchema, result.schema());
- Struct expected = getExpectedNestedTestProtoResultIntUserId();
+ Struct expected = getExpectedNestedTestProtoResultIntUserId(false);
+ assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema());
+ assertEquals(expected.schema(), ((Struct) result.value()).schema());
+ assertEquals(expected, result.value());
+ }
+
+ @Test
+ public void testToConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception {
+ NestedMessage message = createNestedTestProtoIntUserId();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true)
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue result = getSchemaAndValue(protobufData, message);
+ Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(true);
+ assertSchemasEqual(expectedSchema, result.schema());
+ Struct expected = getExpectedNestedTestProtoResultIntUserId(true);
assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema());
assertEquals(expected.schema(), ((Struct) result.value()).schema());
assertEquals(expected, result.value());
@@ -605,9 +702,23 @@ public void testToConnectDataDefaultOneOfCannotHaveTwoOneOfsSet() throws Excepti
public void testToConnectEnumUnionWithString() throws Exception {
EnumUnion message = createEnumUnionWithString();
SchemaAndValue result = getSchemaAndValue(message);
- Schema expectedSchema = getEnumUnionSchemaBuilder().build();
+ Schema expectedSchema = getEnumUnionSchemaBuilder().build();
+ assertSchemasEqual(expectedSchema, result.schema());
+ Struct expected = getEnumUnionWithString();
+ assertEquals(expected, result.value());
+ }
+
+ @Test
+ public void testToConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception {
+ EnumUnion message = createEnumUnionWithString();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true")
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue result = getSchemaAndValue(protobufData, message);
+ Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build();
assertSchemasEqual(expectedSchema, result.schema());
- Struct expected = getEnumUnionWithString();
+ Struct expected = getEnumUnionWithStringWithGeneralizedSumTypeSupport();
assertEquals(expected, result.value());
}
@@ -615,9 +726,23 @@ public void testToConnectEnumUnionWithString() throws Exception {
public void testToConnectEnumUnionWithSomeStatus() throws Exception {
EnumUnion message = createEnumUnionWithSomeStatus();
SchemaAndValue result = getSchemaAndValue(message);
- Schema expectedSchema = getEnumUnionSchemaBuilder().build();
+ Schema expectedSchema = getEnumUnionSchemaBuilder().build();
+ assertSchemasEqual(expectedSchema, result.schema());
+ Struct expected = getEnumUnionWithSomeStatus();
+ assertEquals(expected, result.value());
+ }
+
+ @Test
+ public void testToConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception {
+ EnumUnion message = createEnumUnionWithSomeStatus();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true")
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue result = getSchemaAndValue(protobufData, message);
+ Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build();
assertSchemasEqual(expectedSchema, result.schema());
- Struct expected = getEnumUnionWithSomeStatus();
+ Struct expected = getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport();
assertEquals(expected, result.value());
}
@@ -1091,6 +1216,7 @@ public void testRoundTripConnectUInt32Fixed32() throws Exception {
assertTrue(parsedMessage.toString().contains("test_uint32: " + UNSIGNED_RESULT));
}
+ @Test
public void testFromConnectEnumUnionWithString() throws Exception {
EnumUnion message = createEnumUnionWithString();
SchemaAndValue schemaAndValue = getSchemaAndValue(message);
@@ -1108,6 +1234,32 @@ public void testFromConnectEnumUnionWithSomeStatus() throws Exception {
assertArrayEquals(messageBytes, message.toByteArray());
}
+ @Test
+ public void testFromConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception {
+ EnumUnion message = createEnumUnionWithString();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true")
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message);
+ byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue);
+
+ assertArrayEquals(messageBytes, message.toByteArray());
+ }
+
+ @Test
+ public void testFromConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception {
+ EnumUnion message = createEnumUnionWithSomeStatus();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true")
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message);
+ byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue);
+
+ assertArrayEquals(messageBytes, message.toByteArray());
+ }
+
@Test
public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception {
NestedMessage nestedMessage = createNestedTestProtoStringUserId();
@@ -1117,6 +1269,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws
assertArrayEquals(messageBytes, nestedMessage.toByteArray());
}
+ @Test
+ public void testFromConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception {
+ NestedMessage nestedMessage = createNestedTestProtoStringUserId();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true)
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage);
+ byte[] messageBytes = getMessageBytes(schemaAndValue);
+
+ assertArrayEquals(messageBytes, nestedMessage.toByteArray());
+ }
+
@Test
public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception {
NestedMessage nestedMessage = createNestedTestProtoIntUserId();
@@ -1126,6 +1291,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Ex
assertArrayEquals(messageBytes, nestedMessage.toByteArray());
}
+ @Test
+ public void testFromConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception {
+ NestedMessage nestedMessage = createNestedTestProtoIntUserId();
+ ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder()
+ .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true)
+ .build();
+ ProtobufData protobufData = new ProtobufData(protobufDataConfig);
+ SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage);
+ byte[] messageBytes = getMessageBytes(schemaAndValue);
+
+ assertArrayEquals(messageBytes, nestedMessage.toByteArray());
+ }
+
@Test
public void testFromConnectDataWithEmptyNestedProtobufMessage() throws Exception {
NestedMessage nestedMessage = createEmptyNestedTestProto();
diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml
index bc0c8a7e164..e032c9a6622 100644
--- a/protobuf-provider/pom.xml
+++ b/protobuf-provider/pom.xml
@@ -6,7 +6,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java
index c6719905f1f..376d5b0d8a8 100644
--- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java
+++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java
@@ -36,6 +36,7 @@
import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.DurationProto;
@@ -378,6 +379,20 @@ public ProtobufSchema(Descriptor descriptor, List references) {
this.descriptor = descriptor;
}
+ public ProtobufSchema(EnumDescriptor enumDescriptor) {
+ this(enumDescriptor, Collections.emptyList());
+ }
+
+ public ProtobufSchema(EnumDescriptor enumDescriptor, List references) {
+ Map dependencies = new HashMap<>();
+ this.schemaObj = toProtoFile(enumDescriptor.getFile(), dependencies);
+ this.version = null;
+ this.name = enumDescriptor.getFullName();
+ this.references = Collections.unmodifiableList(references);
+ this.dependencies = Collections.unmodifiableMap(dependencies);
+ this.descriptor = null;
+ }
+
private ProtobufSchema(
ProtoFileElement schemaObj,
Integer version,
@@ -465,7 +480,7 @@ private ProtoFileElement toProtoFile(String schema) {
byte[] bytes = base64Decoder.decode(schema);
return toProtoFile(FileDescriptorProto.parseFrom(bytes));
} catch (Exception pe) {
- throw new IllegalArgumentException("Could not parse Protobuf", e);
+ throw new IllegalArgumentException("Could not parse Protobuf - " + e.getMessage(), e);
}
}
}
@@ -964,6 +979,10 @@ public DynamicMessage.Builder newMessageBuilder(String name) {
return toDynamicSchema().newMessageBuilder(name);
}
+ public EnumDescriptor getEnumDescriptor(String enumTypeName) {
+ return toDynamicSchema().getEnumDescriptor(enumTypeName);
+ }
+
public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, int enumNumber) {
return toDynamicSchema().getEnumValue(enumTypeName, enumNumber);
}
diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java
index e18063dbefb..936501bceb0 100644
--- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java
+++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java
@@ -17,7 +17,6 @@
package io.confluent.kafka.schemaregistry.protobuf;
import java.util.List;
-import java.util.Optional;
import io.confluent.kafka.schemaregistry.AbstractSchemaProvider;
import io.confluent.kafka.schemaregistry.ParsedSchema;
@@ -35,20 +34,20 @@ public String schemaType() {
}
@Override
- public Optional parseSchema(String schemaString,
- List references,
- boolean isNew) {
+ public ParsedSchema parseSchemaOrElseThrow(String schemaString,
+ List references,
+ boolean isNew) {
try {
- return Optional.of(new ProtobufSchema(
- schemaString,
- references,
- resolveReferences(references),
- null,
- null
- ));
+ return new ProtobufSchema(
+ schemaString,
+ references,
+ resolveReferences(references),
+ null,
+ null
+ );
} catch (Exception e) {
log.error("Could not parse Protobuf schema", e);
- return Optional.empty();
+ throw e;
}
}
}
diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java
index f65e68150d5..2bc1fd48163 100644
--- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java
+++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java
@@ -24,6 +24,7 @@
import com.google.protobuf.DescriptorProtos.FileDescriptorSet;
import com.google.protobuf.DescriptorProtos.FileOptions;
import com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode;
+import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.DescriptorValidationException;
import com.google.protobuf.Descriptors.EnumDescriptor;
@@ -426,6 +427,16 @@ public Builder addEnumDefinition(EnumDefinition enumDef) {
return this;
}
+ public boolean containsService(String name) {
+ List services = mFileDescProtoBuilder.getServiceList();
+ for (ServiceDescriptorProto service : services) {
+ if (service.getName().equals(name)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
public Builder addServiceDefinition(ServiceDefinition serviceDef) {
mFileDescProtoBuilder.addService(serviceDef.getServiceType());
return this;
diff --git a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java
index f5ff2507965..2eb2c428003 100644
--- a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java
+++ b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java
@@ -21,9 +21,12 @@
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.DynamicMessage;
import com.squareup.wire.schema.internal.parser.ProtoFileElement;
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.SchemaProvider;
import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference;
import io.confluent.kafka.schemaregistry.protobuf.dynamic.DynamicSchema;
import io.confluent.kafka.schemaregistry.protobuf.dynamic.MessageDefinition;
@@ -35,6 +38,7 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
+import java.util.Optional;
import io.confluent.kafka.schemaregistry.CompatibilityLevel;
import io.confluent.kafka.schemaregistry.protobuf.diff.ResourceLoader;
@@ -42,6 +46,7 @@
import static io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema.PROTO3;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@@ -170,6 +175,18 @@ public class ProtobufSchemaTest {
private static final ProtobufSchema enumBeforeMessageSchema =
new ProtobufSchema(enumBeforeMessageSchemaString);
+ private static final String invalidSchemaString = "syntax = \"proto3\";\n"
+ + "\n"
+ + "option java_package = \"io.confluent.kafka.serializers.protobuf.test\";\n"
+ + "option java_outer_classname = \"TestMessageProtos\";\n"
+ + "\n"
+ + "import \"google/protobuf/descriptor.proto\";\n"
+ + "\n"
+ + "message TestMessage {\n"
+ + " string test_string = 1 [json_name = \"test_str\"];\n"
+ + " int32 test_int32 = 8.01;\n"
+ + "}\n";
+
@Test
public void testRecordToProtobuf() throws Exception {
String json = "{\n"
@@ -1228,6 +1245,41 @@ public void testEnumAfterMessage() throws Exception {
new ProtobufSchema(enumBeforeMessageSchema.toDescriptor()).canonicalString());
}
+ @Test
+ public void testParseSchema() {
+ SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider();
+ ParsedSchema parsedSchema = protobufSchemaProvider.parseSchemaOrElseThrow(recordSchemaString,
+ new ArrayList<>(), false);
+ Optional parsedSchemaOptional = protobufSchemaProvider.parseSchema(recordSchemaString,
+ new ArrayList<>(), false);
+
+ assertNotNull(parsedSchema);
+ assertTrue(parsedSchemaOptional.isPresent());
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void testParseSchemaThrowException() {
+ SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider();
+ protobufSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString,
+ new ArrayList<>(), false);
+ }
+
+ @Test
+ public void testParseSchemaSuppressException() {
+ SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider();
+ Optional parsedSchema = protobufSchemaProvider.parseSchema(invalidSchemaString,
+ new ArrayList<>(), false);
+ assertFalse(parsedSchema.isPresent());
+ }
+
+ @Test
+ public void testEnumMethods() {
+ EnumDescriptor enumDescriptor = enumSchema.getEnumDescriptor("TestEnum.Suit");
+ ProtobufSchema enumSchema2 = new ProtobufSchema(enumDescriptor);
+ EnumDescriptor enumDescriptor2 = enumSchema2.getEnumDescriptor("TestEnum.Suit");
+ assertEquals(enumDescriptor.getFullName(), enumDescriptor2.getFullName());
+ }
+
private static JsonNode jsonTree(String jsonData) {
try {
return objectMapper.readTree(jsonData);
diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml
index d806fe4de71..293c24724d8 100644
--- a/protobuf-serde/pom.xml
+++ b/protobuf-serde/pom.xml
@@ -7,7 +7,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
diff --git a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java
index 4d33daefd95..8718c886e52 100644
--- a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java
+++ b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java
@@ -54,11 +54,19 @@ public KafkaProtobufSerde(Class specificProtobufClass) {
* For testing purposes only.
*/
public KafkaProtobufSerde(final SchemaRegistryClient client) {
+ this(client, null);
+ }
+
+ /**
+ * For testing purposes only.
+ */
+ public KafkaProtobufSerde(final SchemaRegistryClient client, final Class specificClass) {
if (client == null) {
throw new IllegalArgumentException("schema registry client must not be null");
}
+ this.specificProtobufClass = specificClass;
inner = Serdes.serdeFrom(new KafkaProtobufSerializer<>(client),
- new KafkaProtobufDeserializer<>(client));
+ new KafkaProtobufDeserializer<>(client));
}
@Override
@@ -106,4 +114,4 @@ private Map withSpecificClass(final Map config, boole
return newConfig;
}
-}
\ No newline at end of file
+}
diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml
index 336ab08438e..836d5ea91e6 100644
--- a/protobuf-serializer/pom.xml
+++ b/protobuf-serializer/pom.xml
@@ -6,7 +6,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
diff --git a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java
index b7b65676977..f651800cde3 100644
--- a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java
+++ b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java
@@ -583,7 +583,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (partition_ != null) {
output.writeMessage(2, getPartition());
}
- if (!getUidBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, uid_);
}
com.google.protobuf.GeneratedMessageV3
@@ -612,7 +612,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getPartition());
}
- if (!getUidBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, uid_);
}
for (java.util.Map.Entry entry
diff --git a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java
index dd53e2749ee..1ebe41d52c8 100644
--- a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java
+++ b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java
@@ -2713,7 +2713,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
for (int i = 0; i < format_.size(); i++) {
@@ -2722,13 +2722,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (partitionScheme_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) {
output.writeEnum(3, partitionScheme_);
}
- if (!getJavaClassBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, javaClass_);
}
if (forTests_ != false) {
output.writeBool(5, forTests_);
}
- if (!getOwnerBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_);
}
if (private_ != false) {
@@ -2749,7 +2749,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
for (int i = 0; i < format_.size(); i++) {
@@ -2760,14 +2760,14 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, partitionScheme_);
}
- if (!getJavaClassBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, javaClass_);
}
if (forTests_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, forTests_);
}
- if (!getOwnerBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_);
}
if (private_ != false) {
@@ -4414,7 +4414,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (format_ != null) {
output.writeMessage(2, getFormat());
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, datasetId_);
}
unknownFields.writeTo(output);
@@ -4434,7 +4434,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getFormat());
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, datasetId_);
}
size += unknownFields.getSerializedSize();
@@ -5894,7 +5894,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getPathBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_);
}
if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) {
@@ -5915,7 +5915,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (priority_ != 0) {
output.writeInt32(8, priority_);
}
- if (!getLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 9, label_);
}
if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) {
@@ -5930,7 +5930,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getPathBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_);
}
if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) {
@@ -5957,7 +5957,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(8, priority_);
}
- if (!getLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, label_);
}
if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) {
@@ -9786,16 +9786,16 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_);
}
- if (!getInputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_);
}
if (useHippoCuttleJob_ != false) {
@@ -9810,16 +9810,16 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_);
}
- if (!getInputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_);
}
if (useHippoCuttleJob_ != false) {
@@ -10972,16 +10972,16 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_);
}
if (deduplicate_ != false) {
output.writeBool(3, deduplicate_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_);
}
unknownFields.writeTo(output);
@@ -10993,17 +10993,17 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_);
}
if (deduplicate_ != false) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, deduplicate_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_);
}
size += unknownFields.getSerializedSize();
@@ -13657,10 +13657,10 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_);
}
if (deduplicate_ != false) {
@@ -13669,7 +13669,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (config_ != null) {
output.writeMessage(4, getConfig());
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_);
}
for (int i = 0; i < configPerDc_.size(); i++) {
@@ -13684,10 +13684,10 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_);
}
if (deduplicate_ != false) {
@@ -13698,7 +13698,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, getConfig());
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_);
}
for (int i = 0; i < configPerDc_.size(); i++) {
@@ -16422,7 +16422,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (from_ != null) {
output.writeMessage(1, getFrom());
}
- if (!getSourceNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, sourceNamespace_);
}
if (getPlatformsList().size() > 0) {
@@ -16435,10 +16435,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (isBackfilling_ != false) {
output.writeBool(8, isBackfilling_);
}
- if (!getToLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 9, toLabel_);
}
- if (!getToDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, toDatasetId_);
}
if (withBackfilling_ != false) {
@@ -16460,7 +16460,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getFrom());
}
- if (!getSourceNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, sourceNamespace_);
}
{
@@ -16479,10 +16479,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(8, isBackfilling_);
}
- if (!getToLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, toLabel_);
}
- if (!getToDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, toDatasetId_);
}
if (withBackfilling_ != false) {
@@ -18109,7 +18109,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (from_ != null) {
output.writeMessage(1, getFrom());
}
- if (!getSourceNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceNamespace_);
}
if (getPlatformsList().size() > 0) {
@@ -18132,7 +18132,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getFrom());
}
- if (!getSourceNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceNamespace_);
}
{
@@ -19485,10 +19485,10 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_);
}
if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) {
@@ -19497,10 +19497,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (outputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) {
output.writeEnum(4, outputFormat_);
}
- if (!getInputDatasetLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, inputDatasetLabel_);
}
- if (!getOutputDatasetLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, outputDatasetLabel_);
}
if (isByPlatform_ != false) {
@@ -19515,10 +19515,10 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_);
}
if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) {
@@ -19529,10 +19529,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(4, outputFormat_);
}
- if (!getInputDatasetLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, inputDatasetLabel_);
}
- if (!getOutputDatasetLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, outputDatasetLabel_);
}
if (isByPlatform_ != false) {
@@ -20922,19 +20922,19 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_);
}
- if (!getInputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_);
}
- if (samplingRate_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) {
output.writeFloat(5, samplingRate_);
}
unknownFields.writeTo(output);
@@ -20946,19 +20946,19 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getInputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_);
}
- if (!getInputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_);
}
- if (!getOutputDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_);
}
- if (!getOutputFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_);
}
- if (samplingRate_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(5, samplingRate_);
}
@@ -22298,22 +22298,22 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getLeftDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, leftDatasetId_);
}
- if (!getLeftFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, leftFormatLabel_);
}
- if (!getRightDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rightDatasetId_);
}
- if (!getRightFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, rightFormatLabel_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, hostname_);
}
- if (!getIgnoredFieldsBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, ignoredFields_);
}
unknownFields.writeTo(output);
@@ -22325,22 +22325,22 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getLeftDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, leftDatasetId_);
}
- if (!getLeftFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, leftFormatLabel_);
}
- if (!getRightDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rightDatasetId_);
}
- if (!getRightFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, rightFormatLabel_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, hostname_);
}
- if (!getIgnoredFieldsBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, ignoredFields_);
}
size += unknownFields.getSerializedSize();
@@ -24259,13 +24259,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
for (int i = 0; i < to_.size(); i++) {
output.writeMessage(250, to_.get(i));
}
- if (!getNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 251, namespace_);
}
- if (!getStartDateBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 253, startDate_);
}
- if (!getStopDateBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 254, stopDate_);
}
if (ignoreCn_ != false) {
@@ -24324,13 +24324,13 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(250, to_.get(i));
}
- if (!getNamespaceBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(251, namespace_);
}
- if (!getStartDateBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(253, startDate_);
}
- if (!getStopDateBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(254, stopDate_);
}
if (ignoreCn_ != false) {
@@ -27092,10 +27092,10 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getOwnerBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, owner_);
}
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_);
}
if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) {
@@ -27122,10 +27122,10 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getOwnerBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, owner_);
}
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_);
}
if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) {
@@ -32944,7 +32944,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (pendingDeletion_ != false) {
output.writeBool(5, pendingDeletion_);
}
- if (!getAddedAtBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, addedAt_);
}
unknownFields.writeTo(output);
@@ -32984,7 +32984,7 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(5, pendingDeletion_);
}
- if (!getAddedAtBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, addedAt_);
}
size += unknownFields.getSerializedSize();
@@ -33962,7 +33962,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (skip_ != false) {
@@ -33977,7 +33977,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (skip_ != false) {
@@ -34670,7 +34670,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (useEnumFieldId_ != false) {
@@ -34685,7 +34685,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getNameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (useEnumFieldId_ != false) {
@@ -37356,10 +37356,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (consolidationEnabled_ != false) {
output.writeBool(7, consolidationEnabled_);
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, datasetId_);
}
- if (!getDatasetFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 11, datasetFormatLabel_);
}
for (int i = 0; i < controlMessage_.size(); i++) {
@@ -37386,10 +37386,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(7, consolidationEnabled_);
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, datasetId_);
}
- if (!getDatasetFormatLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, datasetFormatLabel_);
}
for (int i = 0; i < controlMessage_.size(); i++) {
@@ -38773,10 +38773,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (dc_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) {
output.writeEnum(2, dc_);
}
- if (!getLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, label_);
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, datasetId_);
}
unknownFields.writeTo(output);
@@ -38796,10 +38796,10 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(2, dc_);
}
- if (!getLabelBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, label_);
}
- if (!getDatasetIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, datasetId_);
}
size += unknownFields.getSerializedSize();
@@ -39849,13 +39849,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (ip4_ != 0) {
output.writeFixed32(2, ip4_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hostname_);
}
- if (!getContainerTaskBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, containerTask_);
}
- if (!getContainerAppBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, containerApp_);
}
unknownFields.writeTo(output);
@@ -39875,13 +39875,13 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeFixed32Size(2, ip4_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hostname_);
}
- if (!getContainerTaskBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, containerTask_);
}
- if (!getContainerAppBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, containerApp_);
}
size += unknownFields.getSerializedSize();
@@ -41107,7 +41107,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getKafkaTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kafkaTopic_);
}
if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) {
@@ -41125,7 +41125,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getKafkaTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kafkaTopic_);
}
if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) {
@@ -42494,13 +42494,13 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTypeBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, hostname_);
}
- if (!getKafkaTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, kafkaTopic_);
}
if (partition_ != 0) {
@@ -42512,16 +42512,16 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (!processUuid_.isEmpty()) {
output.writeBytes(6, processUuid_);
}
- if (!getRegionBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 7, region_);
}
if (timestampSeconds_ != 0) {
output.writeInt32(8, timestampSeconds_);
}
- if (!getClusterBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 9, cluster_);
}
- if (!getEnvironmentBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 10, environment_);
}
com.google.protobuf.GeneratedMessageV3
@@ -42539,13 +42539,13 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTypeBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_);
}
- if (!getHostnameBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, hostname_);
}
- if (!getKafkaTopicBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, kafkaTopic_);
}
if (partition_ != 0) {
@@ -42560,17 +42560,17 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(6, processUuid_);
}
- if (!getRegionBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, region_);
}
if (timestampSeconds_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(8, timestampSeconds_);
}
- if (!getClusterBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, cluster_);
}
- if (!getEnvironmentBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, environment_);
}
for (java.util.Map.Entry entry
diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java
index 212e9aaec47..d712b61efa1 100644
--- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java
+++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java
@@ -236,8 +236,8 @@ private EnumUnion(
break;
}
case 16: {
- someValCase_ = 2;
someVal_ = input.readInt32();
+ someValCase_ = 2;
break;
}
case 24: {
diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java
index acc1da0958b..c90ad498263 100644
--- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java
+++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java
@@ -223,8 +223,8 @@ private UserId(
break;
}
case 16: {
- userIdCase_ = 2;
userId_ = input.readInt32();
+ userIdCase_ = 2;
break;
}
case 26: {
@@ -1293,7 +1293,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
unknownFields.writeTo(output);
@@ -1305,7 +1305,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
size += unknownFields.getSerializedSize();
@@ -1801,8 +1801,8 @@ private ComplexType(
break;
}
case 16: {
- someValCase_ = 2;
someVal_ = input.readInt32();
+ someValCase_ = 2;
break;
}
case 24: {
@@ -3229,7 +3229,7 @@ public final boolean isInitialized() {
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
if (getIdsList().size() > 0) {
@@ -3248,7 +3248,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getIdBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
{
@@ -3946,7 +3946,7 @@ public int getMapTypeCount() {
@java.lang.Override
public boolean containsMapType(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
return internalGetMapType().getMap().containsKey(key);
}
/**
@@ -3973,7 +3973,7 @@ public java.util.Map getMapTypeMap() {
public java.lang.String getMapTypeOrDefault(
java.lang.String key,
java.lang.String defaultValue) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetMapType().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
@@ -3985,7 +3985,7 @@ public java.lang.String getMapTypeOrDefault(
public java.lang.String getMapTypeOrThrow(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetMapType().getMap();
if (!map.containsKey(key)) {
@@ -5133,7 +5133,7 @@ public int getMapTypeCount() {
@java.lang.Override
public boolean containsMapType(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
return internalGetMapType().getMap().containsKey(key);
}
/**
@@ -5160,7 +5160,7 @@ public java.util.Map getMapTypeMap() {
public java.lang.String getMapTypeOrDefault(
java.lang.String key,
java.lang.String defaultValue) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetMapType().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
@@ -5172,7 +5172,7 @@ public java.lang.String getMapTypeOrDefault(
public java.lang.String getMapTypeOrThrow(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetMapType().getMap();
if (!map.containsKey(key)) {
@@ -5192,7 +5192,7 @@ public Builder clearMapType() {
public Builder removeMapType(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
internalGetMutableMapType().getMutableMap()
.remove(key);
return this;
@@ -5211,8 +5211,11 @@ public Builder removeMapType(
public Builder putMapType(
java.lang.String key,
java.lang.String value) {
- if (key == null) { throw new java.lang.NullPointerException(); }
- if (value == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
+ if (value == null) {
+ throw new NullPointerException("map value");
+}
+
internalGetMutableMapType().getMutableMap()
.put(key, value);
return this;
diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java
index 1bd7557e8ed..2cfce29e32d 100644
--- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java
+++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java
@@ -238,7 +238,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_);
}
if (((bitField0_ & 0x00000001) != 0)) {
@@ -253,7 +253,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_);
}
if (((bitField0_ & 0x00000001) != 0)) {
diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java
index ae7bffb5f76..2432b6a2d5e 100644
--- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java
+++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java
@@ -475,7 +475,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_);
}
if (testBool_ != false) {
@@ -484,10 +484,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (!testBytes_.isEmpty()) {
output.writeBytes(3, testBytes_);
}
- if (testDouble_ != 0D) {
+ if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) {
output.writeDouble(4, testDouble_);
}
- if (testFloat_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) {
output.writeFloat(5, testFloat_);
}
if (testFixed32_ != 0) {
@@ -529,7 +529,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_);
}
if (testBool_ != false) {
@@ -540,11 +540,11 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, testBytes_);
}
- if (testDouble_ != 0D) {
+ if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(4, testDouble_);
}
- if (testFloat_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(5, testFloat_);
}
@@ -2091,7 +2091,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_);
}
if (testBool_ != false) {
@@ -2100,10 +2100,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output)
if (!testBytes_.isEmpty()) {
output.writeBytes(3, testBytes_);
}
- if (testDouble_ != 0D) {
+ if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) {
output.writeDouble(4, testDouble_);
}
- if (testFloat_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) {
output.writeFloat(5, testFloat_);
}
if (testFixed32_ != 0) {
@@ -2148,7 +2148,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getTestStringBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_);
}
if (testBool_ != false) {
@@ -2159,11 +2159,11 @@ public int getSerializedSize() {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, testBytes_);
}
- if (testDouble_ != 0D) {
+ if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(4, testDouble_);
}
- if (testFloat_ != 0F) {
+ if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) {
size += com.google.protobuf.CodedOutputStream
.computeFloatSize(5, testFloat_);
}
diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml
index 02b0dfd4da6..3b251c949e1 100644
--- a/protobuf-types/pom.xml
+++ b/protobuf-types/pom.xml
@@ -6,7 +6,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
diff --git a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java
index 5fbc82985b0..01fff765271 100644
--- a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java
+++ b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java
@@ -250,7 +250,7 @@ public int getParamsCount() {
@java.lang.Override
public boolean containsParams(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
return internalGetParams().getMap().containsKey(key);
}
/**
@@ -277,7 +277,7 @@ public java.util.Map getParamsMap() {
public java.lang.String getParamsOrDefault(
java.lang.String key,
java.lang.String defaultValue) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetParams().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
@@ -289,7 +289,7 @@ public java.lang.String getParamsOrDefault(
public java.lang.String getParamsOrThrow(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetParams().getMap();
if (!map.containsKey(key)) {
@@ -312,7 +312,7 @@ public final boolean isInitialized() {
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
- if (!getDocBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, doc_);
}
com.google.protobuf.GeneratedMessageV3
@@ -330,7 +330,7 @@ public int getSerializedSize() {
if (size != -1) return size;
size = 0;
- if (!getDocBytes().isEmpty()) {
+ if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, doc_);
}
for (java.util.Map.Entry entry
@@ -760,7 +760,7 @@ public int getParamsCount() {
@java.lang.Override
public boolean containsParams(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
return internalGetParams().getMap().containsKey(key);
}
/**
@@ -787,7 +787,7 @@ public java.util.Map getParamsMap() {
public java.lang.String getParamsOrDefault(
java.lang.String key,
java.lang.String defaultValue) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetParams().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
@@ -799,7 +799,7 @@ public java.lang.String getParamsOrDefault(
public java.lang.String getParamsOrThrow(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
java.util.Map map =
internalGetParams().getMap();
if (!map.containsKey(key)) {
@@ -819,7 +819,7 @@ public Builder clearParams() {
public Builder removeParams(
java.lang.String key) {
- if (key == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
internalGetMutableParams().getMutableMap()
.remove(key);
return this;
@@ -838,8 +838,11 @@ public Builder removeParams(
public Builder putParams(
java.lang.String key,
java.lang.String value) {
- if (key == null) { throw new java.lang.NullPointerException(); }
- if (value == null) { throw new java.lang.NullPointerException(); }
+ if (key == null) { throw new NullPointerException("map key"); }
+ if (value == null) {
+ throw new NullPointerException("map value");
+}
+
internalGetMutableParams().getMutableMap()
.put(key, value);
return this;
diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml
new file mode 100644
index 00000000000..a4d2b6a92ac
--- /dev/null
+++ b/schema-converter/pom.xml
@@ -0,0 +1,37 @@
+
+
+ 4.0.0
+
+
+ io.confluent
+ kafka-schema-registry-parent
+ 7.2.10-0
+
+
+
+
+ Apache License 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.html
+ repo
+
+
+
+ kafka-schema-converter
+ jar
+ kafka-schema-converter
+
+
+
+ org.apache.kafka
+ connect-api
+ provided
+
+
+ junit
+ junit
+ test
+
+
+
diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java
new file mode 100644
index 00000000000..3fd0b59b2c6
--- /dev/null
+++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2021 Confluent Inc.
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.connect.schema;
+
+import java.util.List;
+import java.util.Map;
+import org.apache.kafka.connect.data.Schema;
+import org.apache.kafka.connect.data.SchemaBuilder;
+import org.apache.kafka.connect.errors.DataException;
+
+public class ConnectEnum {
+
+ public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Enum";
+
+ /**
+ * Returns a SchemaBuilder for an Enum.
+ *
+ * @param annotation an arbitrary annotation to be associated with the enum
+ * @param symbols the enum symbols
+ * @return a SchemaBuilder
+ */
+ public static SchemaBuilder builder(String annotation, List symbols) {
+ SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation);
+ for (int i = 0; i < symbols.size(); i++) {
+ builder.parameter(LOGICAL_PARAMETER + "." + symbols.get(i), String.valueOf(i));
+ }
+ return builder;
+ }
+
+ /**
+ * Returns a SchemaBuilder for an Enum.
+ *
+ * @param annotation an arbitrary annotation to be associated with the enum
+ * @param symbols a map of enum symbol to its ordinal
+ * @return a SchemaBuilder
+ */
+ public static SchemaBuilder builder(String annotation, Map symbols) {
+ SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation);
+ for (Map.Entry symbol : symbols.entrySet()) {
+ builder.parameter(LOGICAL_PARAMETER + "." + symbol.getKey(),
+ String.valueOf(symbol.getValue()));
+ }
+ return builder;
+ }
+
+ /**
+ * Returns whether a schema represents an Enum.
+ *
+ * @param schema the schema
+ * @return whether the schema represents an Enum
+ */
+ public static boolean isEnum(Schema schema) {
+ return schema != null
+ && schema.parameters() != null
+ && schema.parameters().containsKey(LOGICAL_PARAMETER);
+ }
+
+ /**
+ * Returns whether a schema has an Enum symbol.
+ *
+ * @param schema the schema
+ * @param symbol the enum symbol
+ * @return whether the schema represents an Enum
+ */
+ public static boolean hasEnumSymbol(Schema schema, String symbol) {
+ return schema != null
+ && schema.parameters() != null
+ && schema.parameters().containsKey(LOGICAL_PARAMETER)
+ && schema.parameters().containsKey(LOGICAL_PARAMETER + "." + symbol);
+ }
+
+ /**
+ * Convert a value from its logical format (Enum) to its encoded format.
+ *
+ * @param schema the schema
+ * @param value the logical value
+ * @return the encoded value
+ */
+ public static > String fromLogical(Schema schema, T value) {
+ if (!hasEnumSymbol(schema, value.name())) {
+ throw new DataException(
+ "Requested conversion of Enum object but the schema does not match.");
+ }
+ return value.name();
+ }
+
+ /**
+ * Convert a value from its encoded format to its logical format (Enum).
+ *
+ * @param schema the schema
+ * @param cls the class of the logical value
+ * @param symbol the enum symbol
+ * @return the logical value
+ */
+ public static > T toLogical(Schema schema, Class cls,
+ String symbol) {
+ if (!hasEnumSymbol(schema, symbol)) {
+ throw new DataException(
+ "Requested conversion of Enum object but the schema does not match.");
+ }
+ return java.lang.Enum.valueOf(cls, symbol);
+ }
+
+ /**
+ * Convert a value from its encoded format to its ordinal.
+ *
+ * @param schema the schema
+ * @param symbol the enum symbol
+ * @return the ordinal
+ */
+ public static int toOrdinal(Schema schema, String symbol) {
+ if (!hasEnumSymbol(schema, symbol)) {
+ throw new DataException(
+ "Requested conversion of Enum object but the schema does not match.");
+ }
+ return Integer.parseInt(schema.parameters().get(LOGICAL_PARAMETER + "." + symbol));
+ }
+}
diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java
new file mode 100644
index 00000000000..7bf388d8e7e
--- /dev/null
+++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2021 Confluent Inc.
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.confluent.connect.schema;
+
+import org.apache.kafka.connect.data.ConnectSchema;
+import org.apache.kafka.connect.data.Field;
+import org.apache.kafka.connect.data.Schema;
+import org.apache.kafka.connect.data.SchemaBuilder;
+import org.apache.kafka.connect.data.Struct;
+import org.apache.kafka.connect.errors.DataException;
+
+public class ConnectUnion {
+
+ public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Union";
+
+ /**
+ * Returns a SchemaBuilder for a Union.
+ *
+ * @param annotation an arbitrary annotation to be associated with the union
+ * @return a SchemaBuilder
+ */
+ public static SchemaBuilder builder(String annotation) {
+ return SchemaBuilder.struct().parameter(LOGICAL_PARAMETER, annotation);
+ }
+
+ /**
+ * Returns whether a schema represents a Union.
+ *
+ * @param schema the schema
+ * @return whether the schema represents a Union
+ */
+ public static boolean isUnion(Schema schema) {
+ return schema != null
+ && schema.parameters() != null
+ && schema.parameters().containsKey(LOGICAL_PARAMETER);
+ }
+
+ /**
+ * Convert a value from its logical format (Union) to it's encoded format.
+ *
+ * @param schema the schema
+ * @param value the logical value
+ * @return the encoded value
+ */
+ public static Object fromLogical(Schema schema, Struct value) {
+ if (!isUnion(schema)) {
+ throw new DataException(
+ "Requested conversion of Union object but the schema does not match.");
+ }
+ for (Field field : schema.fields()) {
+ Object object = value.get(field);
+ if (object != null) {
+ return object;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Convert a value from its encoded format to its logical format (Union).
+ * The value is associated with the field whose schema matches the given value.
+ *
+ * @param schema the schema
+ * @param value the encoded value
+ * @return the logical value
+ */
+ public static Struct toLogical(Schema schema, Object value) {
+ if (!isUnion(schema)) {
+ throw new DataException(
+ "Requested conversion of Union object but the schema does not match.");
+ }
+ Struct struct = new Struct(schema);
+ for (Field field : schema.fields()) {
+ if (validate(field.schema(), value)) {
+ struct.put(field, value);
+ break;
+ }
+ }
+ return struct;
+ }
+
+ private static boolean validate(Schema schema, Object value) {
+ try {
+ ConnectSchema.validateValue(schema, value);
+ } catch (DataException e) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Convert a value from its encoded format to its logical format (Union).
+ * The value is associated with the field with the given field name.
+ *
+ * @param schema the schema
+ * @param fieldName the field name
+ * @param value the encoded value
+ * @return the logical value
+ */
+ public static Struct toLogicalUsingName(Schema schema, String fieldName, Object value) {
+ if (!isUnion(schema)) {
+ throw new DataException(
+ "Requested conversion of Union object but the schema does not match.");
+ }
+ Struct struct = new Struct(schema);
+ for (Field field : schema.fields()) {
+ if (field.name().equals(fieldName)) {
+ struct.put(field, value);
+ break;
+ }
+ }
+ return struct;
+ }
+}
diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml
index 66a7e8cefac..c2949863a59 100644
--- a/schema-registry-console-scripts/pom.xml
+++ b/schema-registry-console-scripts/pom.xml
@@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0
io.confluent
diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml
index c6bcdb311aa..190032c68af 100644
--- a/schema-serializer/pom.xml
+++ b/schema-serializer/pom.xml
@@ -7,7 +7,7 @@
io.confluent
kafka-schema-registry-parent
- 7.1.12-0
+ 7.2.10-0