diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000000..f9105c8113f --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +# See go/codeowners - automatically generated for confluentinc/schema-registry: +* @confluentinc/data-governance diff --git a/avro-converter/pom.xml b/avro-converter/pom.xml index 4768a20823f..3ca6f6c74e2 100644 --- a/avro-converter/pom.xml +++ b/avro-converter/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/avro-data/pom.xml b/avro-data/pom.xml index 82c660fb7bd..8d5c4270f6a 100644 --- a/avro-data/pom.xml +++ b/avro-data/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -42,6 +42,11 @@ kafka-avro-serializer ${io.confluent.schema-registry.version} + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + com.google.guava diff --git a/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java b/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java index 687dc8376f3..caf6a0575a5 100644 --- a/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java +++ b/avro-data/src/main/java/io/confluent/connect/avro/AvroData.java @@ -22,12 +22,15 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.NullNode; import com.fasterxml.jackson.databind.node.ObjectNode; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import org.apache.avro.AvroTypeException; import org.apache.avro.JsonProperties; import org.apache.avro.generic.GenericData; +import org.apache.avro.generic.GenericData.EnumSymbol; import org.apache.avro.generic.GenericEnumSymbol; import org.apache.avro.generic.GenericFixed; import org.apache.avro.generic.GenericRecord; @@ -111,6 +114,12 @@ public class AvroData { public static final String AVRO_TYPE_ANYTHING = NAMESPACE + ".Anything"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_UNION_PREFIX = "connect_union_"; + public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = + GENERALIZED_TYPE_UNION_PREFIX + "field_"; + private static final Map NON_AVRO_TYPES_BY_TYPE_CODE = new HashMap<>(); static { @@ -315,9 +324,12 @@ public Object convert(Schema schema, Object value) { }); } + private int unionIndex = 0; + private Map fromConnectSchemaCache; private Map toConnectSchemaCache; private boolean connectMetaData; + private boolean generalizedSumTypeSupport; private boolean enhancedSchemaSupport; private boolean scrubInvalidNames; private boolean discardTypeDocDefault; @@ -333,6 +345,7 @@ public AvroData(AvroDataConfig avroDataConfig) { fromConnectSchemaCache = new BoundedConcurrentHashMap<>(avroDataConfig.getSchemasCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(avroDataConfig.getSchemasCacheSize()); this.connectMetaData = avroDataConfig.isConnectMetaData(); + this.generalizedSumTypeSupport = avroDataConfig.isGeneralizedSumTypeSupport(); this.enhancedSchemaSupport = avroDataConfig.isEnhancedAvroSchemaSupport(); this.scrubInvalidNames = avroDataConfig.isScrubInvalidNames(); this.discardTypeDocDefault = avroDataConfig.isDiscardTypeDocDefault(); @@ -348,8 +361,7 @@ public Object fromConnectData(Schema schema, Object value) { } protected Object fromConnectData(Schema schema, org.apache.avro.Schema avroSchema, Object value) { - return fromConnectData(schema, avroSchema, value, true, false, - enhancedSchemaSupport, scrubInvalidNames, allowOptionalMapKey); + return fromConnectData(schema, avroSchema, value, true, false); } /** @@ -374,10 +386,7 @@ private Object fromConnectData( org.apache.avro.Schema avroSchema, Object logicalValue, boolean requireContainer, - boolean requireSchemalessContainerNull, - boolean enhancedSchemaSupport, - boolean scrubInvalidNames, - boolean allowOptionalMapKey + boolean requireSchemalessContainerNull ) { Schema.Type schemaType = schema != null ? schema.type() @@ -462,17 +471,13 @@ private Object fromConnectData( maybeWrapSchemaless(schema, value, ANYTHING_SCHEMA_BOOLEAN_FIELD), requireContainer); case STRING: - if (enhancedSchemaSupport && schema != null && schema.parameters() != null + if (generalizedSumTypeSupport && ConnectEnum.isEnum(schema)) { + String enumSchemaName = schema.parameters().get(GENERALIZED_TYPE_ENUM); + value = enumSymbol(avroSchema, value, enumSchemaName); + } else if (enhancedSchemaSupport && schema != null && schema.parameters() != null && schema.parameters().containsKey(AVRO_TYPE_ENUM)) { String enumSchemaName = schema.parameters().get(AVRO_TYPE_ENUM); - org.apache.avro.Schema enumSchema; - if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { - int enumIndex = avroSchema.getIndexNamed(enumSchemaName); - enumSchema = avroSchema.getTypes().get(enumIndex); - } else { - enumSchema = avroSchema; - } - value = new GenericData.EnumSymbol(enumSchema, (String) value); + value = enumSymbol(avroSchema, value, enumSchemaName); } else { String stringValue = (String) value; // Check for correct type } @@ -488,13 +493,15 @@ private Object fromConnectData( int size = Integer.parseInt(schema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP)); org.apache.avro.Schema fixedSchema = null; if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { + int index = 0; for (org.apache.avro.Schema memberSchema : avroSchema.getTypes()) { if (memberSchema.getType() == org.apache.avro.Schema.Type.FIXED && memberSchema.getFixedSize() == size - && unionMemberFieldName(memberSchema, enhancedSchemaSupport) - .equals(unionMemberFieldName(schema, enhancedSchemaSupport))) { + && unionMemberFieldName(memberSchema, index) + .equals(unionMemberFieldName(schema, index))) { fixedSchema = memberSchema; } + index++; } if (fixedSchema == null) { throw new DataException("Fixed size " + size + " not in union " + avroSchema); @@ -526,10 +533,7 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) elementAvroSchema, val, false, - true, - enhancedSchemaSupport, - scrubInvalidNames, - allowOptionalMapKey + true ) ); } @@ -553,8 +557,7 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) // Key is a String, no conversion needed Object convertedValue = fromConnectData(schema.valueSchema(), underlyingAvroSchema.getValueType(), - entry.getValue(), false, true, enhancedSchemaSupport, scrubInvalidNames, - allowOptionalMapKey + entry.getValue(), false, true ); converted.put((String) entry.getKey(), convertedValue); } @@ -570,13 +573,10 @@ && unionMemberFieldName(memberSchema, enhancedSchemaSupport) org.apache.avro.Schema avroValueSchema = elementSchema.getField(VALUE_FIELD).schema(); for (Map.Entry entry : map.entrySet()) { Object keyConverted = fromConnectData(schema != null ? schema.keySchema() : null, - avroKeySchema, entry.getKey(), false, true, - enhancedSchemaSupport, scrubInvalidNames, - allowOptionalMapKey); + avroKeySchema, entry.getKey(), false, true); Object valueConverted = fromConnectData(schema != null ? schema.valueSchema() : null, avroValueSchema, entry.getValue(), false, - true, enhancedSchemaSupport, - scrubInvalidNames, allowOptionalMapKey); + true); converted.add( new GenericRecordBuilder(elementSchema) .set(KEY_FIELD, keyConverted) @@ -597,7 +597,7 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), } //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (AVRO_TYPE_UNION.equals(schema.name())) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -606,15 +606,11 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), avroSchema, object, false, - true, - enhancedSchemaSupport, - scrubInvalidNames, - allowOptionalMapKey + true ); } } - return fromConnectData(schema, avroSchema, null, false, true, - enhancedSchemaSupport, scrubInvalidNames, allowOptionalMapKey); + return fromConnectData(schema, avroSchema, null, false, true); } else { org.apache.avro.Schema underlyingAvroSchema = avroSchemaForUnderlyingTypeIfOptional( schema, avroSchema, scrubInvalidNames); @@ -625,8 +621,7 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), org.apache.avro.Schema fieldAvroSchema = theField.schema(); convertedBuilder.set( fieldName, - fromConnectData(field.schema(), fieldAvroSchema, struct.get(field), false, - true, enhancedSchemaSupport, scrubInvalidNames, allowOptionalMapKey) + fromConnectData(field.schema(), fieldAvroSchema, struct.get(field), false, true) ); } return convertedBuilder.build(); @@ -641,6 +636,18 @@ avroSchema, maybeWrapSchemaless(schema, converted, ANYTHING_SCHEMA_MAP_FIELD), } } + private EnumSymbol enumSymbol( + org.apache.avro.Schema avroSchema, Object value, String enumSchemaName) { + org.apache.avro.Schema enumSchema; + if (avroSchema.getType() == org.apache.avro.Schema.Type.UNION) { + int enumIndex = avroSchema.getIndexNamed(enumSchemaName); + enumSchema = avroSchema.getTypes().get(enumIndex); + } else { + enumSchema = avroSchema; + } + return new GenericData.EnumSymbol(enumSchema, (String) value); + } + /** * MapEntry types in connect Schemas are represented as Arrays of record. * Return the array type from the union instead of the union itself. @@ -785,7 +792,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, return ANYTHING_SCHEMA; } - if (!AVRO_TYPE_UNION.equals(schema.name()) && !schema.isOptional()) { + if (!isUnionSchema(schema) && !schema.isOptional()) { org.apache.avro.Schema cached = fromConnectContext.schemaMap.get(schema); if (cached != null) { return cached; @@ -821,25 +828,28 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, baseSchema = org.apache.avro.SchemaBuilder.builder().booleanType(); break; case STRING: - if (enhancedSchemaSupport && schema.parameters() != null - && schema.parameters().containsKey(AVRO_TYPE_ENUM)) { + if ((generalizedSumTypeSupport || enhancedSchemaSupport) + && schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(AVRO_TYPE_ENUM))) { + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; List symbols = new ArrayList<>(); for (Map.Entry entry : schema.parameters().entrySet()) { - if (entry.getKey().startsWith(AVRO_TYPE_ENUM + ".")) { - symbols.add(entry.getValue()); + if (entry.getKey().startsWith(paramName + ".")) { + String enumSymbol = entry.getKey().substring(paramName.length() + 1); + symbols.add(enumSymbol); } } Pair names = getNameOrDefault(fromConnectContext, schema.name()); String name = names.getValue(); + String enumName = schema.parameters().get(paramName); String enumDoc = schema.parameters().get(AVRO_ENUM_DOC_PREFIX_PROP + name); String enumDefault = schema.parameters().get(AVRO_ENUM_DEFAULT_PREFIX_PROP + name); baseSchema = discardTypeDocDefault - ? org.apache.avro.SchemaBuilder.builder().enumeration( - schema.parameters().get(AVRO_TYPE_ENUM)) + ? org.apache.avro.SchemaBuilder.builder().enumeration(enumName) .doc(schema.parameters().get(CONNECT_ENUM_DOC_PROP)) .symbols(symbols.toArray(new String[symbols.size()])) - : org.apache.avro.SchemaBuilder.builder().enumeration( - schema.parameters().get(AVRO_TYPE_ENUM)) + : org.apache.avro.SchemaBuilder.builder().enumeration(enumName) .doc(enumDoc) .defaultSymbol(enumDefault) .symbols(symbols.toArray(new String[symbols.size()])); @@ -919,7 +929,7 @@ public org.apache.avro.Schema fromConnectSchema(Schema schema, } break; case STRUCT: - if (AVRO_TYPE_UNION.equals(schema.name())) { + if (isUnionSchema(schema)) { List unionSchemas = new ArrayList<>(); if (schema.isOptional()) { unionSchemas.add(org.apache.avro.SchemaBuilder.builder().nullType()); @@ -1312,7 +1322,7 @@ private JsonNode defaultValueFromConnect(Schema schema, Object value) { return array; } case STRUCT: { - boolean isUnion = AVRO_TYPE_UNION.equals(schema.name()); + boolean isUnion = isUnionSchema(schema); ObjectNode node = JsonNodeFactory.instance.objectNode(); Struct struct = ((Struct) defaultVal); for (Field field : (schema.fields())) { @@ -1618,23 +1628,24 @@ private Object toConnectData(Schema schema, Object value, ToConnectContext toCon case STRUCT: { // Special case support for union types - if (schema.name() != null && schema.name().equals(AVRO_TYPE_UNION)) { + if (isUnionSchema(schema)) { Schema valueRecordSchema = null; if (value instanceof IndexedRecord) { IndexedRecord valueRecord = ((IndexedRecord) value); valueRecordSchema = toConnectSchemaWithCycles( valueRecord.getSchema(), true, null, null, toConnectContext); } + int index = 0; for (Field field : schema.fields()) { Schema fieldSchema = field.schema(); - if (isInstanceOfAvroSchemaTypeForSimpleSchema( - fieldSchema, value, enhancedSchemaSupport) + if (isInstanceOfAvroSchemaTypeForSimpleSchema(fieldSchema, value, index) || (valueRecordSchema != null && schemaEquals(valueRecordSchema, fieldSchema))) { converted = new Struct(schema).put( - unionMemberFieldName(fieldSchema, enhancedSchemaSupport), + unionMemberFieldName(fieldSchema, index), toConnectData(fieldSchema, value, toConnectContext)); break; } + index++; } if (converted == null) { throw new DataException("Did not find matching union field for data"); @@ -1890,9 +1901,16 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, schema.getEnumDefault()); } } - builder.parameter(AVRO_TYPE_ENUM, schema.getFullName()); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; + builder.parameter(paramName, schema.getFullName()); + int symbolIndex = 0; for (String enumSymbol : schema.getEnumSymbols()) { - builder.parameter(AVRO_TYPE_ENUM + "." + enumSymbol, enumSymbol); + if (generalizedSumTypeSupport) { + builder.parameter(paramName + "." + enumSymbol, String.valueOf(symbolIndex)); + } else { + builder.parameter(paramName + "." + enumSymbol, enumSymbol); + } + symbolIndex++; } break; @@ -1907,13 +1925,20 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, } } } - builder = SchemaBuilder.struct().name(AVRO_TYPE_UNION); + String unionName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_PREFIX + (unionIndex++) + : AVRO_TYPE_UNION; + builder = SchemaBuilder.struct().name(unionName); + if (generalizedSumTypeSupport) { + builder.parameter(GENERALIZED_TYPE_UNION, unionName); + } Set fieldNames = new HashSet<>(); + int fieldIndex = 0; for (org.apache.avro.Schema memberSchema : schema.getTypes()) { if (memberSchema.getType() == org.apache.avro.Schema.Type.NULL) { builder.optional(); } else { - String fieldName = unionMemberFieldName(memberSchema, enhancedSchemaSupport); + String fieldName = unionMemberFieldName(memberSchema, fieldIndex); if (fieldNames.contains(fieldName)) { throw new DataException("Multiple union schemas map to the Connect union field name"); } @@ -1923,6 +1948,7 @@ private Schema toConnectSchema(org.apache.avro.Schema schema, toConnectSchemaWithCycles(memberSchema, true, null, null, toConnectContext) ); } + fieldIndex++; } break; } @@ -2204,7 +2230,7 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, return null; } else { return defaultValueFromAvro( - schema.field(unionMemberFieldName(memberAvroSchema, enhancedSchemaSupport)).schema(), + schema.field(unionMemberFieldName(memberAvroSchema, 0)).schema(), memberAvroSchema, value, toConnectContext); @@ -2218,8 +2244,10 @@ private Object defaultValueFromAvroWithoutLogical(Schema schema, } - private String unionMemberFieldName(org.apache.avro.Schema schema, - boolean enhancedSchemaSupport) { + private String unionMemberFieldName(org.apache.avro.Schema schema, int index) { + if (generalizedSumTypeSupport) { + return GENERALIZED_TYPE_UNION_FIELD_PREFIX + index; + } if (schema.getType() == org.apache.avro.Schema.Type.RECORD || schema.getType() == org.apache.avro.Schema.Type.ENUM || schema.getType() == org.apache.avro.Schema.Type.FIXED) { @@ -2232,7 +2260,10 @@ private String unionMemberFieldName(org.apache.avro.Schema schema, return schema.getType().getName(); } - private String unionMemberFieldName(Schema schema, boolean enhancedSchemaSupport) { + private String unionMemberFieldName(Schema schema, int index) { + if (generalizedSumTypeSupport) { + return GENERALIZED_TYPE_UNION_FIELD_PREFIX + index; + } if (schema.type() == Schema.Type.STRUCT || isEnumSchema(schema) || isFixedSchema(schema)) { if (enhancedSchemaSupport) { return scrubFullName(schema.name(), scrubInvalidNames); @@ -2243,10 +2274,15 @@ private String unionMemberFieldName(Schema schema, boolean enhancedSchemaSupport return CONNECT_TYPES_TO_AVRO_TYPES.get(schema.type()).getName(); } + private static boolean isUnionSchema(Schema schema) { + return AVRO_TYPE_UNION.equals(schema.name()) || ConnectUnion.isUnion(schema); + } + private static boolean isEnumSchema(Schema schema) { return schema.type() == Schema.Type.STRING && schema.parameters() != null - && schema.parameters().containsKey(AVRO_TYPE_ENUM); + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(AVRO_TYPE_ENUM)); } private static boolean isFixedSchema(Schema schema) { @@ -2258,9 +2294,10 @@ private static boolean isFixedSchema(Schema schema) { private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, Object value, - boolean enhancedSchemaSupport) { + int index) { if (isEnumSchema(fieldSchema)) { - String enumSchemaName = fieldSchema.parameters().get(AVRO_TYPE_ENUM); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : AVRO_TYPE_ENUM; + String enumSchemaName = fieldSchema.parameters().get(paramName); if (value instanceof GenericData.EnumSymbol) { return ((GenericData.EnumSymbol) value).getSchema().getFullName().equals(enumSchemaName); } else { @@ -2276,7 +2313,7 @@ private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, if (isFixedSchema(fieldSchema)) { if (fixedValueSizeMatch(fieldSchema, value, Integer.parseInt(fieldSchema.parameters().get(CONNECT_AVRO_FIXED_SIZE_PROP)), - enhancedSchemaSupport)) { + index)) { return true; } } else { @@ -2293,14 +2330,14 @@ private boolean isInstanceOfAvroSchemaTypeForSimpleSchema(Schema fieldSchema, private boolean fixedValueSizeMatch(Schema fieldSchema, Object value, int size, - boolean enhancedSchemaSupport) { + int index) { if (value instanceof byte[]) { return ((byte[]) value).length == size; } else if (value instanceof ByteBuffer) { return ((ByteBuffer)value).remaining() == size; } else if (value instanceof GenericFixed) { - return unionMemberFieldName(((GenericFixed) value).getSchema(), enhancedSchemaSupport) - .equals(unionMemberFieldName(fieldSchema, enhancedSchemaSupport)); + return unionMemberFieldName(((GenericFixed) value).getSchema(), index) + .equals(unionMemberFieldName(fieldSchema, index)); } else { throw new DataException("Invalid class for fixed, expecting GenericFixed, byte[]" + " or ByteBuffer but found " + value.getClass()); diff --git a/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java b/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java index 5edd9b198e3..8451019e394 100644 --- a/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java +++ b/avro-data/src/main/java/io/confluent/connect/avro/AvroDataConfig.java @@ -25,6 +25,12 @@ public class AvroDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG = "enhanced.avro.schema.support"; public static final boolean ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT = false; public static final String ENHANCED_AVRO_SCHEMA_SUPPORT_DOC = @@ -61,6 +67,11 @@ public class AvroDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef() + .define(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC) .define(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG, ConfigDef.Type.BOOLEAN, ENHANCED_AVRO_SCHEMA_SUPPORT_DEFAULT, @@ -88,6 +99,10 @@ public AvroDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupport() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean isEnhancedAvroSchemaSupport() { return this.getBoolean(ENHANCED_AVRO_SCHEMA_SUPPORT_CONFIG); } diff --git a/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java b/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java index c82b40fcea8..10afde264cf 100644 --- a/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java +++ b/avro-data/src/test/java/io/confluent/connect/avro/AvroDataTest.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import java.util.LinkedHashMap; import org.apache.avro.LogicalTypes; import org.apache.avro.generic.GenericContainer; import org.apache.avro.generic.GenericData; @@ -211,6 +212,37 @@ public void testFromConnectEnum() { avroData); } + @Test + public void testFromConnectEnumWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Enums are just converted to strings, original enum is preserved in parameters + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder() + .enumeration("TestEnum") + .doc("some documentation") + .symbols("foo", "bar", "baz"); + Map params = new LinkedHashMap<>(); + params.put("io.confluent.connect.avro.enum.doc.TestEnum", "some documentation"); + params.put("org.apache.kafka.connect.data.Enum", "TestEnum"); + params.put("org.apache.kafka.connect.data.Enum.foo", "0"); + params.put("org.apache.kafka.connect.data.Enum.bar", "1"); + params.put("org.apache.kafka.connect.data.Enum.baz", "2"); + avroSchema.addProp("connect.parameters", params); + avroSchema.addProp("connect.name", "TestEnum"); + SchemaBuilder builder = SchemaBuilder.string().name("TestEnum"); + builder.parameter(AVRO_ENUM_DOC_PREFIX_PROP + "TestEnum", "some documentation"); + builder.parameter(GENERALIZED_TYPE_ENUM, "TestEnum"); + int i = 0; + for(String enumSymbol : new String[]{"foo", "bar", "baz"}) { + builder.parameter(GENERALIZED_TYPE_ENUM+"."+enumSymbol, String.valueOf(i++)); + } + + checkNonRecordConversion(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"), + builder.build(), "bar", avroData); + } + @Test public void testFromConnectMapWithStringKey() { final Schema schema = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA); @@ -314,7 +346,54 @@ public void testFromConnectFixedUnion() { assertEquals(2, genericData.resolveUnion(unionSchema, avroData.fromConnectData(union, unionSameOther))); } - + + @Test + public void testFromConnectUnionWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Make sure we handle primitive types and named types properly by using a variety of types + org.apache.avro.Schema avroRecordSchema1 = org.apache.avro.SchemaBuilder.builder() + .record("Test1").fields().requiredInt("test").endRecord(); + // Add connect name + avroRecordSchema1.addProp("connect.name", "Test1"); + org.apache.avro.Schema avroRecordSchema2 = org.apache.avro.SchemaBuilder.builder() + .record("Test2").namespace("io.confluent").fields().requiredInt("test").endRecord(); + // Add connect name + avroRecordSchema2.addProp("connect.name", "io.confluent.Test2"); + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder().unionOf() + .intType().and() + .stringType().and() + .type(avroRecordSchema1).and() + .type(avroRecordSchema2) + .endUnion(); + + Schema recordSchema1 = SchemaBuilder.struct().name("Test1") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema recordSchema2 = SchemaBuilder.struct().name("io.confluent.Test2") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema schema = SchemaBuilder.struct() + .name("connect_union_0") + .parameter("org.apache.kafka.connect.data.Union", "connect_union_0") + .field("connect_union_field_0", Schema.OPTIONAL_INT32_SCHEMA) + .field("connect_union_field_1", Schema.OPTIONAL_STRING_SCHEMA) + .field("connect_union_field_2", recordSchema1) + .field("connect_union_field_3", recordSchema2) + .build(); + assertEquals(12, + avroData.fromConnectData(schema, new Struct(schema).put("connect_union_field_0", 12))); + assertEquals("teststring", + avroData.fromConnectData(schema, new Struct(schema).put("connect_union_field_1", "teststring"))); + + Struct schema1Test = new Struct(schema).put("connect_union_field_2", new Struct(recordSchema1).put("test", 12)); + GenericRecord record1Test = new GenericRecordBuilder(avroRecordSchema1).set("test", 12).build(); + Struct schema2Test = new Struct(schema).put("connect_union_field_3", new Struct(recordSchema2).put("test", 12)); + GenericRecord record2Test = new GenericRecordBuilder(avroRecordSchema2).set("test", 12).build(); + assertEquals(record1Test, avroData.fromConnectData(schema, schema1Test)); + assertEquals(record2Test, avroData.fromConnectData(schema, schema2Test)); + } + @Test public void testFromConnectWithInvalidName() { AvroDataConfig avroDataConfig = new AvroDataConfig.Builder() @@ -2123,6 +2202,51 @@ public void testToConnectUnionWithEnhanced() { avroData.toConnectData(avroSchema, record2Test)); } + @Test + public void testToConnectUnionWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Make sure we handle primitive types and named types properly by using a variety of types + org.apache.avro.Schema avroRecordSchema1 = org.apache.avro.SchemaBuilder.builder() + .record("Test1").fields().requiredInt("test").endRecord(); + org.apache.avro.Schema avroRecordSchema2 = org.apache.avro.SchemaBuilder.builder() + .record("Test2").namespace("io.confluent").fields().requiredInt("test").endRecord(); + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder().unionOf() + .intType().and() + .stringType().and() + .type(avroRecordSchema1).and() + .type(avroRecordSchema2) + .endUnion(); + + Schema recordSchema1 = SchemaBuilder.struct().name("Test1") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema recordSchema2 = SchemaBuilder.struct().name("io.confluent.Test2") + .field("test", Schema.INT32_SCHEMA).optional().build(); + Schema schema = SchemaBuilder.struct() + .name("connect_union_0") + .parameter("org.apache.kafka.connect.data.Union", "connect_union_0") + .field("connect_union_field_0", Schema.OPTIONAL_INT32_SCHEMA) + .field("connect_union_field_1", Schema.OPTIONAL_STRING_SCHEMA) + .field("connect_union_field_2", recordSchema1) + .field("connect_union_field_3", recordSchema2) + .build(); + assertEquals(new SchemaAndValue(schema, new Struct(schema).put("connect_union_field_0", 12)), + avroData.toConnectData(avroSchema, 12)); + assertEquals(new SchemaAndValue(schema, new Struct(schema).put("connect_union_field_1", "teststring")), + avroData.toConnectData(avroSchema, "teststring")); + + Struct schema1Test = new Struct(schema).put("connect_union_field_2", new Struct(recordSchema1).put("test", 12)); + GenericRecord record1Test = new GenericRecordBuilder(avroRecordSchema1).set("test", 12).build(); + Struct schema2Test = new Struct(schema).put("connect_union_field_3", new Struct(recordSchema2).put("test", 12)); + GenericRecord record2Test = new GenericRecordBuilder(avroRecordSchema2).set("test", 12).build(); + assertEquals(new SchemaAndValue(schema, schema1Test), + avroData.toConnectData(avroSchema, record1Test)); + assertEquals(new SchemaAndValue(schema, schema2Test), + avroData.toConnectData(avroSchema, record2Test)); + } + @Test(expected = DataException.class) public void testToConnectUnionRecordConflict() { // If the records have the same name but are in different namespaces, we don't support this @@ -2221,6 +2345,31 @@ public void testToConnectEnumWithNoDoc() { avroData.toConnectData(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"))); } + @Test + public void testToConnectEnumWithGeneralizedSumTypeSupport() { + avroData = new AvroData(new AvroDataConfig.Builder() + .with(AvroDataConfig.SCHEMAS_CACHE_SIZE_CONFIG, 2) + .with(AvroDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, true) + .build()); + // Enums are just converted to strings, original enum is preserved in parameters + org.apache.avro.Schema avroSchema = org.apache.avro.SchemaBuilder.builder() + .enumeration("TestEnum") + .doc("some documentation") + .symbols("foo", "bar", "baz"); + SchemaBuilder builder = SchemaBuilder.string().name("TestEnum"); + builder.parameter(AVRO_ENUM_DOC_PREFIX_PROP + "TestEnum", "some documentation"); + builder.parameter(GENERALIZED_TYPE_ENUM, "TestEnum"); + int i = 0; + for(String enumSymbol : new String[]{"foo", "bar", "baz"}) { + builder.parameter(GENERALIZED_TYPE_ENUM+"."+enumSymbol, String.valueOf(i++)); + } + + assertEquals(new SchemaAndValue(builder.build(), "bar"), + avroData.toConnectData(avroSchema, "bar")); + assertEquals(new SchemaAndValue(builder.build(), "bar"), + avroData.toConnectData(avroSchema, new GenericData.EnumSymbol(avroSchema, "bar"))); + } + @Test public void testToConnectOptionalPrimitiveWithConnectMetadata() { Schema schema = SchemaBuilder.string(). diff --git a/avro-serde/pom.xml b/avro-serde/pom.xml index 0c6c9f9add7..928bfac25bb 100644 --- a/avro-serde/pom.xml +++ b/avro-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/avro-serializer/pom.xml b/avro-serializer/pom.xml index ed89ef8a2e7..8369d4c123c 100644 --- a/avro-serializer/pom.xml +++ b/avro-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -60,16 +60,10 @@ org.slf4j slf4j-reload4j test - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - + + + io.confluent + logredactor diff --git a/benchmark/pom.xml b/benchmark/pom.xml index d980c56956b..02737bb327b 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 kafka-schema-registry-benchmark diff --git a/client-console-scripts/pom.xml b/client-console-scripts/pom.xml index c7b5ad6888e..4f608ae384f 100644 --- a/client-console-scripts/pom.xml +++ b/client-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/client/pom.xml b/client/pom.xml index 1a18ca177cd..1f179ba02e7 100644 --- a/client/pom.xml +++ b/client/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java b/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java index 5a5c7c56b27..ac50a4d882f 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/SchemaProvider.java @@ -54,12 +54,30 @@ default void configure(Map configs) { * @param isNew whether the schema is new * @return an optional parsed schema */ - Optional parseSchema(String schemaString, - List references, - boolean isNew); + default Optional parseSchema(String schemaString, + List references, + boolean isNew) { + try { + return Optional.of(parseSchemaOrElseThrow(schemaString, references, isNew)); + } catch (Exception e) { + return Optional.empty(); + } + } default Optional parseSchema(String schemaString, List references) { return parseSchema(schemaString, references, false); } + + /** + * Parses a string representing a schema. + * + * @param schemaString the schema + * @param references a list of schema references + * @param isNew whether the schema is new + * @return a parsed schema or throw an error + */ + ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew); } diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java b/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java index 381780956d0..05b49dd90e0 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/avro/AvroSchemaProvider.java @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -47,16 +46,15 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of( - new AvroSchema(schemaString, references, resolveReferences(references), null, - validateDefaults && isNew)); + return new AvroSchema(schemaString, references, resolveReferences(references), null, + validateDefaults && isNew); } catch (Exception e) { log.error("Could not parse Avro schema", e); - return Optional.empty(); + throw e; } } } diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java index 2646850c18a..80a2a2f3826 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/CachedSchemaRegistryClient.java @@ -20,11 +20,10 @@ import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import io.confluent.kafka.schemaregistry.utils.QualifiedSubject; +import org.apache.kafka.common.config.SslConfigs; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.kafka.common.config.SslConfigs; - import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -204,8 +203,8 @@ public CachedSchemaRegistryClient( .build(); this.providers = providers != null && !providers.isEmpty() - ? providers.stream().collect(Collectors.toMap(p -> p.schemaType(), p -> p)) - : Collections.singletonMap(AvroSchema.TYPE, new AvroSchemaProvider()); + ? providers.stream().collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)) + : Collections.singletonMap(AvroSchema.TYPE, new AvroSchemaProvider()); Map schemaProviderConfigs = new HashMap<>(); schemaProviderConfigs.put(SchemaProvider.SCHEMA_VERSION_FETCHER_CONFIG, this); for (SchemaProvider provider : this.providers.values()) { @@ -474,6 +473,18 @@ public SchemaMetadata getSchemaMetadata(String subject, int version) return new SchemaMetadata(id, version, schemaType, references, schema); } + @Override + public SchemaMetadata getSchemaMetadata(String subject, int version, boolean lookupDeletedSchema) + throws IOException, RestClientException { + io.confluent.kafka.schemaregistry.client.rest.entities.Schema response + = restService.getVersion(subject, version, lookupDeletedSchema); + int id = response.getId(); + String schemaType = response.getSchemaType(); + String schema = response.getSchema(); + List references = response.getReferences(); + return new SchemaMetadata(id, version, schemaType, references, schema); + } + @Override public SchemaMetadata getLatestSchemaMetadata(String subject) throws IOException, RestClientException { @@ -522,6 +533,13 @@ public List getAllVersions(String subject) return restService.getAllVersions(subject); } + @Override + public List getAllVersions(String subject, boolean lookupDeletedSchema) + throws IOException, RestClientException { + return restService.getAllVersions(RestService.DEFAULT_REQUEST_PROPERTIES, + subject, lookupDeletedSchema); + } + @Override public int getId(String subject, ParsedSchema schema) throws IOException, RestClientException { @@ -621,7 +639,7 @@ public String getCompatibility(String subject) throws IOException, RestClientExc @Override public void deleteCompatibility(String subject) throws IOException, RestClientException { - restService.deleteSubjectConfig(subject); + restService.deleteConfig(subject); } @Override @@ -667,6 +685,12 @@ public Collection getAllSubjects() throws IOException, RestClientExcepti return restService.getAllSubjects(); } + @Override + public Collection getAllSubjects(boolean lookupDeletedSubject) + throws IOException, RestClientException { + return restService.getAllSubjects(lookupDeletedSubject); + } + @Override public Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java index 6866aaba5cc..9d2bcf014d7 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/MockSchemaRegistryClient.java @@ -112,7 +112,7 @@ private int getIdFromRegistry( idCache.computeIfAbsent(subject, k -> new ConcurrentHashMap<>()); if (!idSchemaMap.isEmpty()) { for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { if (registerRequest) { if (id < 0 || id == entry.getKey()) { generateVersion(subject, schema); @@ -148,6 +148,11 @@ private int getIdFromRegistry( } } + private boolean schemasEqual(ParsedSchema schema1, ParsedSchema schema2) { + return schema1.canonicalString().equals(schema2.canonicalString()) + || schema1.deepEquals(schema2); + } + private void generateVersion(String subject, ParsedSchema schema) { List versions = allVersions(subject); int currentVersion; @@ -359,7 +364,7 @@ public Schema getByVersion(String subject, int version, boolean lookupDeletedSch int id = -1; Map idSchemaMap = idCache.get(subject); for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { id = entry.getKey(); } } @@ -386,7 +391,7 @@ public SchemaMetadata getSchemaMetadata(String subject, int version) int id = -1; Map idSchemaMap = idCache.get(subject); for (Map.Entry entry : idSchemaMap.entrySet()) { - if (entry.getValue().canonicalString().equals(schema.canonicalString())) { + if (schemasEqual(entry.getValue(), schema)) { id = entry.getKey(); } } @@ -514,9 +519,11 @@ public synchronized List deleteSubject( throws IOException, RestClientException { schemaCache.remove(subject); idCache.remove(subject); - versionCache.remove(subject); + Map versions = versionCache.remove(subject); compatibilityCache.remove(subject); - return Collections.singletonList(0); + return versions != null + ? versions.values().stream().sorted().collect(Collectors.toList()) + : Collections.emptyList(); } @Override @@ -641,15 +648,19 @@ public Collection getAllSubjects() throws IOException, RestClientExcepti public Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { Stream validSubjects = getAllSubjects().stream() - .filter(subject -> subject.startsWith(subjectPrefix)); + .filter(subject -> subjectPrefix == null || subject.startsWith(subjectPrefix)); return validSubjects.collect(Collectors.toCollection(LinkedHashSet::new)); } @Override public synchronized void reset() { schemaCache.clear(); + schemaIdCache.clear(); idCache.clear(); versionCache.clear(); + compatibilityCache.clear(); + modes.clear(); + ids.clear(); } private static String toQualifiedContext(String subject) { diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java index c43d6241b96..a1fa430025b 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/SchemaRegistryClient.java @@ -137,6 +137,11 @@ public SchemaMetadata getLatestSchemaMetadata(String subject) public SchemaMetadata getSchemaMetadata(String subject, int version) throws IOException, RestClientException; + default SchemaMetadata getSchemaMetadata(String subject, int version, + boolean lookupDeletedSchema) throws IOException, RestClientException { + throw new UnsupportedOperationException(); + } + @Deprecated default int getVersion(String subject, org.apache.avro.Schema schema) throws IOException, RestClientException { @@ -153,6 +158,11 @@ default int getVersion(String subject, ParsedSchema schema, boolean normalize) public List getAllVersions(String subject) throws IOException, RestClientException; + default List getAllVersions(String subject, boolean lookupDeletedSchema) + throws IOException, RestClientException { + throw new UnsupportedOperationException(); + } + @Deprecated default boolean testCompatibility(String subject, org.apache.avro.Schema schema) throws IOException, RestClientException { @@ -197,6 +207,11 @@ default void deleteMode(String subject) throws IOException, RestClientException public Collection getAllSubjects() throws IOException, RestClientException; + default Collection getAllSubjects(boolean lookupDeletedSubject) throws IOException, + RestClientException { + throw new UnsupportedOperationException(); + } + default Collection getAllSubjectsByPrefix(String subjectPrefix) throws IOException, RestClientException { throw new UnsupportedOperationException(); diff --git a/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java b/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java index 3805a9def62..a08fbc10543 100644 --- a/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java +++ b/client/src/main/java/io/confluent/kafka/schemaregistry/client/rest/RestService.java @@ -695,15 +695,15 @@ public Config getConfig(Map requestProperties, return config; } - public Config deleteSubjectConfig(String subject) + public Config deleteConfig(String subject) throws IOException, RestClientException { - return deleteSubjectConfig(DEFAULT_REQUEST_PROPERTIES, subject); + return deleteConfig(DEFAULT_REQUEST_PROPERTIES, subject); } - public Config deleteSubjectConfig(Map requestProperties, String subject) + public Config deleteConfig(Map requestProperties, String subject) throws IOException, RestClientException { - UriBuilder builder = UriBuilder.fromPath("/config/{subject}"); - String path = builder.build(subject).toString(); + String path = subject != null + ? UriBuilder.fromPath("/config/{subject}").build(subject).toString() : "/config"; Config response = httpRequest(path, "DELETE", null, requestProperties, DELETE_SUBJECT_CONFIG_RESPONSE_TYPE); diff --git a/core/generated/swagger-ui/schema-registry-api-spec.yaml b/core/generated/swagger-ui/schema-registry-api-spec.yaml index ebdf30f7933..f68015592a1 100644 --- a/core/generated/swagger-ui/schema-registry-api-spec.yaml +++ b/core/generated/swagger-ui/schema-registry-api-spec.yaml @@ -222,6 +222,35 @@ paths: description: | Error code 50001 -- Error in the backend data store Error code 50003 -- Error while forwarding the request to the primary + delete: + summary: "Deletes the Global compatibility level config and\ + \ revert to the global default." + description: "" + operationId: "deleteTopLevelConfig" + consumes: + - "application/vnd.schemaregistry.v1+json" + - "application/vnd.schemaregistry+json" + - "application/json" + - "application/octet-stream" + produces: + - "application/vnd.schemaregistry.v1+json" + - "application/vnd.schemaregistry+json; qs=0.9" + - "application/json; qs=0.5" + responses: + 200: + description: "successful operation" + schema: + type: "string" + enum: + - "NONE" + - "BACKWARD" + - "BACKWARD_TRANSITIVE" + - "FORWARD" + - "FORWARD_TRANSITIVE" + - "FULL" + - "FULL_TRANSITIVE" + 500: + description: "Error code 50001 -- Error in the backend datastore" /config/{subject}: get: summary: Get compatibility level for a subject. diff --git a/core/pom.xml b/core/pom.xml index 1bc971105db..04470d95dba 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 kafka-schema-registry @@ -62,6 +62,10 @@ org.slf4j slf4j-reload4j + + io.confluent + logredactor + com.google.guava guava diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java index 55e90c79e86..5216dbbdd38 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/leaderelector/kafka/SchemaRegistryCoordinator.java @@ -150,10 +150,11 @@ protected void onJoinComplete( } @Override - protected Map performAssignment( + protected Map onLeaderElected( String kafkaLeaderId, // Kafka group "leader" who does assignment, *not* the SR leader String protocol, - List allMemberMetadata + List allMemberMetadata, + boolean skipAssignment ) { log.debug("Performing assignment"); @@ -211,11 +212,16 @@ protected Map performAssignment( } @Override - protected void onJoinPrepare(int generation, String memberId) { + protected boolean onJoinPrepare(Timer timer, int generation, String memberId) { log.debug("Revoking previous assignment {}", assignmentSnapshot); if (assignmentSnapshot != null) { listener.onRevoked(); } + // return true if the cleanup succeeds or if it fails with a non-retriable exception. + // return false otherwise. + // listener.onRevoked() called above removes this instance as the leader + // and even if we got an exception, it wouldn't help retrying. + return true; } @Override diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java index 3cf06a3308b..ed86f475acf 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ConfigResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.CompatibilityLevel; import io.confluent.kafka.schemaregistry.client.rest.Versions; @@ -95,9 +97,12 @@ public ConfigUpdateRequest updateSubjectLevelConfig( throw new RestInvalidCompatibilityException(); } - if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject)) { + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } + subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject); try { @@ -213,6 +218,39 @@ public Config getTopLevelConfig() { return config; } + @DELETE + @Operation(summary = "Deletes the Global-level compatibility level config and " + + "revert to the global default.", responses = { + @ApiResponse(content = @Content( + schema = @Schema(implementation = CompatibilityLevel.class))), + @ApiResponse(responseCode = "500", description = "Error code 50001 -- Error in the backend " + + "datastore") + }) + public void deleteTopLevelConfig( + final @Suspended AsyncResponse asyncResponse, + @Context HttpHeaders headers) { + log.info("Deleting Global compatibility setting and reverting back to default"); + + Config deletedConfig; + try { + CompatibilityLevel currentCompatibility = schemaRegistry.getCompatibilityLevel(null); + Map headerProperties = requestHeaderBuilder.buildRequestHeaders( + headers, schemaRegistry.config().whitelistHeaders()); + schemaRegistry.deleteCompatibilityConfigOrForward(null, headerProperties); + deletedConfig = new Config(currentCompatibility.name); + } catch (OperationNotPermittedException e) { + throw Errors.operationNotPermittedException(e.getMessage()); + } catch (SchemaRegistryStoreException e) { + throw Errors.storeException("Failed to delete compatibility level", e); + } catch (UnknownLeaderException e) { + throw Errors.unknownLeaderException("Failed to delete compatibility level", e); + } catch (SchemaRegistryRequestForwardingException e) { + throw Errors.requestForwardingFailedException("Error while forwarding delete config request" + + " to the leader", e); + } + asyncResponse.resume(deletedConfig); + } + @DELETE @Path("/{subject}") @Operation(summary = "Delete subject compatibility level", @@ -243,7 +281,7 @@ public void deleteSubjectConfig( Map headerProperties = requestHeaderBuilder.buildRequestHeaders( headers, schemaRegistry.config().whitelistHeaders()); - schemaRegistry.deleteSubjectCompatibilityConfigOrForward(subject, headerProperties); + schemaRegistry.deleteCompatibilityConfigOrForward(subject, headerProperties); deletedConfig = new Config(currentCompatibility.name); } catch (OperationNotPermittedException e) { throw Errors.operationNotPermittedException(e.getMessage()); diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java index cda5f8cb843..69071f4422a 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/ModeResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.client.rest.Versions; import io.confluent.kafka.schemaregistry.client.rest.entities.Mode; @@ -95,9 +97,12 @@ public ModeUpdateRequest updateMode( @QueryParam("force") boolean force ) { - if (subject != null && CharMatcher.javaIsoControl().matchesAnyOf(subject)) { + if (subject != null && (CharMatcher.javaIsoControl().matchesAnyOf(subject) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subject).getSubject() + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subject); } + subject = QualifiedSubject.normalize(schemaRegistry.tenant(), subject); io.confluent.kafka.schemaregistry.storage.Mode mode; diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java index d758f5e0d26..2235e8958de 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/rest/resources/SubjectVersionsResource.java @@ -15,6 +15,8 @@ package io.confluent.kafka.schemaregistry.rest.resources; +import static io.confluent.kafka.schemaregistry.storage.KafkaSchemaRegistry.GLOBAL_RESOURCE_NAME; + import com.google.common.base.CharMatcher; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import io.confluent.kafka.schemaregistry.client.rest.Versions; @@ -322,9 +324,12 @@ public void register( subjectName, request.getVersion(), request.getId(), request.getSchemaType(), request.getSchema() == null ? 0 : request.getSchema().length()); - if (subjectName != null && CharMatcher.javaIsoControl().matchesAnyOf(subjectName)) { + if (subjectName != null && (CharMatcher.javaIsoControl().matchesAnyOf(subjectName) + || QualifiedSubject.create(this.schemaRegistry.tenant(), subjectName).getSubject() + .equals(GLOBAL_RESOURCE_NAME))) { throw Errors.invalidSubjectException(subjectName); } + subjectName = QualifiedSubject.normalize(schemaRegistry.tenant(), subjectName); Map headerProperties = requestHeaderBuilder.buildRequestHeaders( @@ -358,9 +363,9 @@ public void register( throw Errors.requestForwardingFailedException("Error while forwarding register schema request" + " to the leader", e); } catch (IncompatibleSchemaException e) { - throw Errors.incompatibleSchemaException("Schema being registered is incompatible with an" - + " earlier schema for subject " - + "\"" + subjectName + "\"", e); + throw Errors.incompatibleSchemaException("Schema being registered is incompatible with" + + " an earlier schema for subject \"" + subjectName + "\", details: " + + e.getMessage(), e); } catch (UnknownLeaderException e) { throw Errors.unknownLeaderException("Leader not known.", e); } catch (SchemaRegistryException e) { diff --git a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java index 5035af25e91..74f6eca68f2 100644 --- a/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java +++ b/core/src/main/java/io/confluent/kafka/schemaregistry/storage/KafkaSchemaRegistry.java @@ -67,6 +67,8 @@ import java.util.LinkedHashSet; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Consumer; import java.util.stream.Stream; import org.apache.avro.reflect.Nullable; import org.apache.kafka.clients.admin.AdminClient; @@ -103,6 +105,8 @@ public class KafkaSchemaRegistry implements SchemaRegistry, LeaderAwareSchemaReg * Schema versions under a particular subject are indexed from MIN_VERSION. */ public static final int MIN_VERSION = 1; + // Subject name under which global permissions are stored. + public static final String GLOBAL_RESOURCE_NAME = "__GLOBAL"; public static final int MAX_VERSION = Integer.MAX_VALUE; private static final Logger log = LoggerFactory.getLogger(KafkaSchemaRegistry.class); @@ -130,6 +134,7 @@ public class KafkaSchemaRegistry implements SchemaRegistry, LeaderAwareSchemaReg private final Map providers; private final String kafkaClusterId; private final String groupId; + private final List> leaderChangeListeners = new CopyOnWriteArrayList<>(); public KafkaSchemaRegistry(SchemaRegistryConfig config, Serializer serializer) @@ -334,6 +339,15 @@ public boolean initialized() { return kafkaStore.initialized(); } + /** + * Add a leader change listener. + * + * @param listener a function that takes whether this node is a leader + */ + public void addLeaderChangeListener(Consumer listener) { + leaderChangeListeners.add(listener); + } + public boolean isLeader() { kafkaStore.leaderLock().lock(); try { @@ -365,9 +379,11 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) "Tried to set an ineligible node to leader: " + newLeader); } + boolean isLeader; + boolean leaderChanged; kafkaStore.leaderLock().lock(); try { - SchemaRegistryIdentity previousLeader = leaderIdentity; + final SchemaRegistryIdentity previousLeader = leaderIdentity; leaderIdentity = newLeader; if (leaderIdentity == null) { @@ -380,7 +396,9 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } } - if (leaderIdentity != null && !leaderIdentity.equals(previousLeader) && isLeader()) { + isLeader = isLeader(); + leaderChanged = leaderIdentity != null && !leaderIdentity.equals(previousLeader); + if (leaderChanged && isLeader) { // The new leader may not know the exact last offset in the Kafka log. So, mark the // last offset invalid here kafkaStore.markLastWrittenOffsetInvalid(); @@ -397,6 +415,16 @@ public void setLeader(@Nullable SchemaRegistryIdentity newLeader) } finally { kafkaStore.leaderLock().unlock(); } + + if (leaderChanged) { + for (Consumer listener : leaderChangeListeners) { + try { + listener.accept(isLeader); + } catch (Exception e) { + log.error("Could not invoke leader change listener", e); + } + } + } } /** @@ -487,8 +515,10 @@ public int register(String subject, Mode mode = getModeInScope(subject); boolean isCompatible = true; + List compatibilityErrorLogs = new ArrayList<>(); if (mode != Mode.IMPORT) { - isCompatible = isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions).isEmpty(); + compatibilityErrorLogs = isCompatibleWithPrevious(subject, parsedSchema, undeletedVersions); + isCompatible = compatibilityErrorLogs.isEmpty(); } try { @@ -556,8 +586,7 @@ public int register(String subject, return schema.getId(); } else { - throw new IncompatibleSchemaException( - "New schema is incompatible with an earlier schema."); + throw new IncompatibleSchemaException(compatibilityErrorLogs.toString()); } } catch (EntryTooLargeException e) { throw new SchemaTooLargeException("Write failed because schema is too large", e); @@ -655,7 +684,7 @@ public void deleteSchemaVersion(String subject, deleteMode(subject); } if (getCompatibilityLevel(subject) != null) { - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } } } else { @@ -728,7 +757,7 @@ public List deleteSubject(String subject, deleteMode(subject); } if (getCompatibilityLevel(subject) != null) { - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } } else { for (Integer version : deletedVersions) { @@ -959,7 +988,7 @@ private List forwardDeleteSubjectRequestToLeader( } } - private void forwardDeleteSubjectCompatibilityConfigToLeader( + private void forwardDeleteCompatibilityConfigToLeader( Map requestProperties, String subject ) throws SchemaRegistryRequestForwardingException { @@ -968,7 +997,7 @@ private void forwardDeleteSubjectCompatibilityConfigToLeader( log.debug(String.format("Forwarding delete subject compatibility config request %s to %s", subject, baseUrl)); try { - leaderRestService.deleteSubjectConfig(requestProperties, subject); + leaderRestService.deleteConfig(requestProperties, subject); } catch (IOException e) { throw new SchemaRegistryRequestForwardingException( String.format( @@ -1036,7 +1065,7 @@ private ParsedSchema canonicalizeSchema(Schema schema, boolean isNew, boolean no parsedSchema = parsedSchema.normalize(); } } catch (Exception e) { - String errMsg = "Invalid schema " + schema; + String errMsg = "Invalid schema " + schema + ", details: " + e.getMessage(); log.error(errMsg, e); throw new InvalidSchemaException(errMsg, e); } @@ -1097,10 +1126,12 @@ private ParsedSchema loadSchema( } final String type = schemaType; - ParsedSchema parsedSchema = provider.parseSchema(schema, references, isNew) - .orElseThrow(() -> new InvalidSchemaException("Invalid schema " + schema - + " with refs " + references + " of type " + type)); - return parsedSchema; + try { + return provider.parseSchemaOrElseThrow(schema, references, isNew); + } catch (Exception e) { + throw new InvalidSchemaException("Invalid schema " + schema + + " with refs " + references + " of type " + type + ", details: " + e.getMessage()); + } } public Schema getUsingContexts(String subject, int version, boolean @@ -1567,32 +1598,32 @@ public void updateConfigOrForward(String subject, CompatibilityLevel newCompatib } } - public void deleteSubjectCompatibilityConfig(String subject) + public void deleteCompatibilityConfig(String subject) throws SchemaRegistryStoreException, OperationNotPermittedException { if (isReadOnlyMode(subject)) { throw new OperationNotPermittedException("Subject " + subject + " is in read-only mode"); } try { kafkaStore.waitUntilKafkaReaderReachesLastOffset(subject, kafkaStoreTimeoutMs); - deleteSubjectCompatibility(subject); + deleteCompatibility(subject); } catch (StoreException e) { throw new SchemaRegistryStoreException("Failed to delete subject config value from store", e); } } - public void deleteSubjectCompatibilityConfigOrForward(String subject, + public void deleteCompatibilityConfigOrForward(String subject, Map headerProperties) throws SchemaRegistryStoreException, SchemaRegistryRequestForwardingException, OperationNotPermittedException, UnknownLeaderException { kafkaStore.lockFor(subject).lock(); try { if (isLeader()) { - deleteSubjectCompatibilityConfig(subject); + deleteCompatibilityConfig(subject); } else { // forward delete subject config request to the leader if (leaderIdentity != null) { - forwardDeleteSubjectCompatibilityConfigToLeader(headerProperties, subject); + forwardDeleteCompatibilityConfigToLeader(headerProperties, subject); } else { throw new UnknownLeaderException("Delete config request failed since leader is " + "unknown"); @@ -1694,7 +1725,7 @@ private void deleteMode(String subject) throws StoreException { this.kafkaStore.delete(modeKey); } - private void deleteSubjectCompatibility(String subject) throws StoreException { + private void deleteCompatibility(String subject) throws StoreException { ConfigKey configKey = new ConfigKey(subject); this.kafkaStore.delete(configKey); } diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java index 821b82e7167..87d44d0fb16 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/ClusterTestHarness.java @@ -130,17 +130,13 @@ public void setUp() throws Exception { servers.add(server); } - brokerList = - TestUtils.getBrokerListStrFromServers( - JavaConverters.asScalaBuffer(servers), - getSecurityProtocol() - ); + ListenerName listenerType = ListenerName.forSecurityProtocol(getSecurityProtocol()); + brokerList = TestUtils.bootstrapServers(JavaConverters.asScalaBuffer(servers), listenerType); // Initialize the rest app ourselves so we can ensure we don't pass any info about the Kafka // zookeeper. The format for this config includes the security protocol scheme in the URLs so // we can't use the pre-generated server list. String[] serverUrls = new String[servers.size()]; - ListenerName listenerType = ListenerName.forSecurityProtocol(getSecurityProtocol()); for(int i = 0; i < servers.size(); i++) { serverUrls[i] = getSecurityProtocol() + "://" + Utils.formatAddress( diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java index ec6d6058de6..e2dcbf3a1aa 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/metrics/CustomSchemaProviderMetricTest.java @@ -65,5 +65,12 @@ public Optional parseSchema(String schemaString, boolean isNew) { return Optional.empty(); } + + @Override + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { + return null; + } } } diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java index aa0cbfba80a..4e997c5dcab 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiCompatibilityTest.java @@ -22,7 +22,9 @@ import io.confluent.kafka.schemaregistry.rest.exceptions.RestInvalidSchemaException; import org.junit.Test; +import static org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType.READER_FIELD_MISSING_DEFAULT_VALUE; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class RestApiCompatibilityTest extends ClusterTestHarness { @@ -59,6 +61,8 @@ public void testCompatibility() throws Exception { assertEquals("Should get a conflict status", RestIncompatibleSchemaException.DEFAULT_ERROR_CODE, e.getStatus()); + assertTrue("Verifying error message verbosity", + e.getMessage().contains(READER_FIELD_MISSING_DEFAULT_VALUE.toString())); } // register a non-avro diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java index 561329a7293..c34ea023ea8 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiSslTest.java @@ -118,12 +118,18 @@ public void testRegisterWithClientSecurityWithMinimalProperties() throws Excepti clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_KEYSTORE_PASSWORD_CONFIG, props.get(SchemaRegistryConfig.SSL_KEYSTORE_PASSWORD_CONFIG)); + clientsslConfigs.put( + SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_KEYSTORE_TYPE_CONFIG, + props.get(SchemaRegistryConfig.SSL_KEYSTORE_TYPE_CONFIG)); clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_LOCATION_CONFIG, props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_LOCATION_CONFIG)); clientsslConfigs.put( SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_PASSWORD_CONFIG, props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_PASSWORD_CONFIG)); + clientsslConfigs.put( + SchemaRegistryClientConfig.CLIENT_NAMESPACE + SchemaRegistryConfig.SSL_TRUSTSTORE_TYPE_CONFIG, + props.get(SchemaRegistryConfig.SSL_TRUSTSTORE_TYPE_CONFIG)); CachedSchemaRegistryClient schemaRegistryClient = new CachedSchemaRegistryClient(restApp.restClient, 10, clientsslConfigs); assertEquals( diff --git a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java index ee27bdaa379..8a3b434852f 100644 --- a/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java +++ b/core/src/test/java/io/confluent/kafka/schemaregistry/rest/RestApiTest.java @@ -35,12 +35,14 @@ import io.confluent.kafka.schemaregistry.utils.TestUtils; import org.apache.avro.Schema.Parser; +import org.apache.avro.SchemaParseException; import org.junit.Test; import java.util.*; import java.net.URL; import java.net.HttpURLConnection; +import static io.confluent.kafka.schemaregistry.CompatibilityLevel.BACKWARD; import static io.confluent.kafka.schemaregistry.CompatibilityLevel.FORWARD; import static io.confluent.kafka.schemaregistry.CompatibilityLevel.NONE; import static io.confluent.kafka.schemaregistry.utils.QualifiedSubject.DEFAULT_CONTEXT; @@ -183,6 +185,57 @@ public void testRegisterBadDefault() throws Exception { } } + @Test + public void testRegisterInvalidSchemaBadType() throws Exception { + String subject = "testSubject"; + + //Invalid Field Type 'str' + String badSchemaString = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"str\",\"name\":\"field1\"}]}"; + + String expectedErrorMessage = null; + try { + new Parser().parse(badSchemaString); + fail("Parsing invalid schema string should fail with SchemaParseException"); + } catch (SchemaParseException spe) { + expectedErrorMessage = spe.getMessage(); + } + + try { + restApp.restClient.registerSchema(badSchemaString, subject); + fail("Registering schema with invalid field type should fail with " + + Errors.INVALID_SCHEMA_ERROR_CODE + + " (invalid schema)"); + } catch (RestClientException rce) { + assertEquals("Invalid schema", Errors.INVALID_SCHEMA_ERROR_CODE, rce.getErrorCode()); + assertTrue("Verify error message verbosity", rce.getMessage().contains(expectedErrorMessage)); + } + } + + @Test + public void testRegisterInvalidSchemaBadReference() throws Exception { + String subject = "testSubject"; + + //Invalid Reference + SchemaReference invalidReference = new SchemaReference("invalid.schema", "badSubject", 1); + String schemaString = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"field1\"}]}"; + + try { + restApp.restClient.registerSchema(schemaString, "AVRO", + Collections.singletonList(invalidReference), subject); + fail("Registering schema with invalid reference should fail with " + + Errors.INVALID_SCHEMA_ERROR_CODE + + " (invalid schema)"); + } catch (RestClientException rce) { + assertEquals("Invalid schema", Errors.INVALID_SCHEMA_ERROR_CODE, rce.getErrorCode()); + } + } + @Test public void testRegisterDiffSchemaType() throws Exception { String subject = "testSubject"; @@ -481,7 +534,7 @@ public void testSubjectConfigChange() throws Exception { restApp.restClient.getConfig(subject).getCompatibilityLevel()); // delete subject compatibility - restApp.restClient.deleteSubjectConfig(subject); + restApp.restClient.deleteConfig(subject); assertEquals("Compatibility level for this subject should be reverted to none", NONE.name, @@ -490,6 +543,33 @@ public void testSubjectConfigChange() throws Exception { .getCompatibilityLevel()); } + @Test + public void testGlobalConfigChange() throws Exception{ + assertEquals("Default compatibility level should be none for this test instance", + NONE.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // change subject compatibility to forward + restApp.restClient.updateCompatibility(CompatibilityLevel.FORWARD.name, null); + assertEquals("New Global compatibility level should be forward", + FORWARD.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // change subject compatibility to backward + restApp.restClient.updateCompatibility(BACKWARD.name, null); + assertEquals("New Global compatibility level should be backward", + BACKWARD.name, + restApp.restClient.getConfig(null).getCompatibilityLevel()); + + // delete Global compatibility + restApp.restClient.deleteConfig(null); + assertEquals("Global compatibility level should be reverted to none", + NONE.name, + restApp.restClient + .getConfig(RestService.DEFAULT_REQUEST_PROPERTIES, null, true) + .getCompatibilityLevel()); + } + @Test public void testGetSchemaNonExistingId() throws Exception { try { diff --git a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java index 55c225ad27c..268eed615c8 100644 --- a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java +++ b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Ref.java @@ -188,7 +188,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getRefIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(refId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, refId_); } if (isActive_ != false) { @@ -203,7 +203,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getRefIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(refId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, refId_); } if (isActive_ != false) { diff --git a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java index c7507ff7e36..958ae01f068 100644 --- a/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java +++ b/core/src/test/java/io/confluent/kafka/serializers/protobuf/test/Root.java @@ -220,7 +220,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getRootIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rootId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, rootId_); } if (ref_ != null) { @@ -235,7 +235,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getRootIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rootId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, rootId_); } if (ref_ != null) { diff --git a/json-schema-converter/pom.xml b/json-schema-converter/pom.xml index 1afe796b83e..c257423a0e8 100644 --- a/json-schema-converter/pom.xml +++ b/json-schema-converter/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -36,6 +36,11 @@ kafka-json-schema-serializer ${io.confluent.schema-registry.version} + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + io.confluent kafka-schema-serializer @@ -57,8 +62,8 @@ provided - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema org.mockito diff --git a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java index 08496adefbb..dfbf730791a 100644 --- a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java +++ b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaData.java @@ -27,6 +27,8 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.annotations.VisibleForTesting; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.json.jackson.Jackson; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import java.util.HashSet; @@ -102,9 +104,16 @@ public class JsonSchemaData { public static final String DEFAULT_ID_PREFIX = "#id"; public static final String JSON_ID_PROP = NAMESPACE + ".Id"; public static final String JSON_TYPE_ENUM = NAMESPACE + ".Enum"; - public static final String JSON_TYPE_ENUM_PREFIX = JSON_TYPE_ENUM + "."; public static final String JSON_TYPE_ONE_OF = NAMESPACE + ".OneOf"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_UNION_PREFIX = "connect_union_"; + public static final String GENERALIZED_TYPE_UNION_FIELD_PREFIX = + GENERALIZED_TYPE_UNION_PREFIX + "field_"; + + public static final String NULL_MARKER = ""; + private static final JsonNodeFactory JSON_NODE_FACTORY = JsonNodeFactory.withExactBigDecimals(true); @@ -182,14 +191,18 @@ public class JsonSchemaData { return result; }); TO_CONNECT_CONVERTERS.put(Schema.Type.STRUCT, (schema, value) -> { - if (schema.name() != null && schema.name().equals(JSON_TYPE_ONE_OF)) { + if (isUnionSchema(schema)) { + boolean generalizedSumTypeSupport = ConnectUnion.isUnion(schema); + String fieldNamePrefix = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_FIELD_PREFIX + : JSON_TYPE_ONE_OF + ".field."; int numMatchingProperties = 0; Field matchingField = null; for (Field field : schema.fields()) { Schema fieldSchema = field.schema(); if (isInstanceOfSchemaTypeForSimpleSchema(fieldSchema, value)) { - return new Struct(schema.schema()).put(JSON_TYPE_ONE_OF + ".field." + field.index(), + return new Struct(schema.schema()).put(fieldNamePrefix + field.index(), toConnectData(fieldSchema, value) ); } else { @@ -202,7 +215,7 @@ public class JsonSchemaData { } if (matchingField != null) { return new Struct(schema.schema()).put( - JSON_TYPE_ONE_OF + ".field." + matchingField.index(), + fieldNamePrefix + matchingField.index(), toConnectData(matchingField.schema(), value) ); } @@ -378,6 +391,7 @@ private static int matchStructSchema(Schema fieldSchema, JsonNode value) { private final JsonSchemaDataConfig config; private final Map fromConnectSchemaCache; private final Map toConnectSchemaCache; + private final boolean generalizedSumTypeSupport; public JsonSchemaData() { this(new JsonSchemaDataConfig.Builder().with( @@ -390,6 +404,7 @@ public JsonSchemaData(JsonSchemaDataConfig jsonSchemaDataConfig) { this.config = jsonSchemaDataConfig; fromConnectSchemaCache = new BoundedConcurrentHashMap<>(jsonSchemaDataConfig.schemaCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(jsonSchemaDataConfig.schemaCacheSize()); + generalizedSumTypeSupport = jsonSchemaDataConfig.isGeneralizedSumTypeSupport(); } /** @@ -520,7 +535,7 @@ public JsonNode fromConnectData(Schema schema, Object logicalValue) { } //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (JSON_TYPE_ONE_OF.equals(schema.name())) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -683,11 +698,18 @@ private org.everit.json.schema.Schema rawSchemaFromConnectSchema( builder = BooleanSchema.builder(); break; case STRING: - if (schema.parameters() != null && schema.parameters().containsKey(JSON_TYPE_ENUM)) { + if (schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(JSON_TYPE_ENUM))) { EnumSchema.Builder enumBuilder = EnumSchema.builder(); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : JSON_TYPE_ENUM; for (Map.Entry entry : schema.parameters().entrySet()) { - if (entry.getKey().startsWith(JSON_TYPE_ENUM_PREFIX)) { - enumBuilder.possibleValue(entry.getValue()); + if (entry.getKey().startsWith(paramName + ".")) { + String enumSymbol = entry.getKey().substring(paramName.length() + 1); + if (enumSymbol.equals(NULL_MARKER)) { + enumSymbol = null; + } + enumBuilder.possibleValue(enumSymbol); } } builder = enumBuilder; @@ -736,7 +758,7 @@ private org.everit.json.schema.Schema rawSchemaFromConnectSchema( } break; case STRUCT: - if (JSON_TYPE_ONE_OF.equals(schema.name())) { + if (isUnionSchema(schema)) { CombinedSchema.Builder combinedBuilder = CombinedSchema.builder(); combinedBuilder.criterion(CombinedSchema.ONE_CRITERION); if (schema.isOptional()) { @@ -987,19 +1009,28 @@ private Schema toConnectSchema( } else if (jsonSchema instanceof EnumSchema) { EnumSchema enumSchema = (EnumSchema) jsonSchema; builder = SchemaBuilder.string(); - builder.parameter(JSON_TYPE_ENUM, - "" - ); // JSON enums have no name, use empty string as placeholder + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : JSON_TYPE_ENUM; + builder.parameter(paramName, ""); // JSON enums have no name, use empty string as placeholder + int symbolIndex = 0; for (Object enumObj : enumSchema.getPossibleValuesAsList()) { - String enumSymbol = enumObj.toString(); - builder.parameter(JSON_TYPE_ENUM_PREFIX + enumSymbol, enumSymbol); + String enumSymbol = enumObj != null ? enumObj.toString() : NULL_MARKER; + if (generalizedSumTypeSupport) { + builder.parameter(paramName + "." + enumSymbol, String.valueOf(symbolIndex)); + } else { + builder.parameter(paramName + "." + enumSymbol, enumSymbol); + } + symbolIndex++; } } else if (jsonSchema instanceof CombinedSchema) { CombinedSchema combinedSchema = (CombinedSchema) jsonSchema; CombinedSchema.ValidationCriterion criterion = combinedSchema.getCriterion(); String name; if (criterion == CombinedSchema.ONE_CRITERION || criterion == CombinedSchema.ANY_CRITERION) { - name = JSON_TYPE_ONE_OF; + if (generalizedSumTypeSupport) { + name = GENERALIZED_TYPE_UNION_PREFIX + ctx.getAndIncrementUnionIndex(); + } else { + name = JSON_TYPE_ONE_OF; + } } else if (criterion == CombinedSchema.ALL_CRITERION) { return allOfToConnectSchema(ctx, combinedSchema, version, forceOptional); } else { @@ -1021,12 +1052,18 @@ private Schema toConnectSchema( } int index = 0; builder = SchemaBuilder.struct().name(name); + if (generalizedSumTypeSupport) { + builder.parameter(GENERALIZED_TYPE_UNION, name); + } for (org.everit.json.schema.Schema subSchema : combinedSchema.getSubschemas()) { if (subSchema instanceof NullSchema) { builder.optional(); } else { - String subFieldName = name + ".field." + index++; + String subFieldName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_UNION_FIELD_PREFIX + index + : name + ".field." + index; builder.field(subFieldName, toConnectSchema(ctx, subSchema, null, true)); + index++; } } } else if (jsonSchema instanceof ArraySchema) { @@ -1235,6 +1272,10 @@ private void collectPropertySchemas( } } + private static boolean isUnionSchema(Schema schema) { + return JSON_TYPE_ONE_OF.equals(schema.name()) || ConnectUnion.isUnion(schema); + } + private interface JsonToConnectTypeConverter { Object convert(Schema schema, JsonNode value); } @@ -1399,6 +1440,7 @@ public Schema schema() { private static class ToConnectContext { private final Map schemaToStructMap; private int idIndex = 0; + private int unionIndex = 0; public ToConnectContext() { this.schemaToStructMap = new IdentityHashMap<>(); @@ -1415,6 +1457,10 @@ public void put(org.everit.json.schema.Schema schema, SchemaBuilder builder) { public int incrementAndGetIdIndex() { return ++idIndex; } + + public int getAndIncrementUnionIndex() { + return unionIndex++; + } } /** diff --git a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java index 3af561dcf15..e4b02cb953e 100644 --- a/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java +++ b/json-schema-converter/src/main/java/io/confluent/connect/json/JsonSchemaDataConfig.java @@ -32,6 +32,12 @@ public class JsonSchemaDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String OBJECT_ADDITIONAL_PROPERTIES_CONFIG = "object.additional.properties"; public static final boolean OBJECT_ADDITIONAL_PROPERTIES_DEFAULT = true; public static final String OBJECT_ADDITIONAL_PROPERTIES_DOC = @@ -54,6 +60,12 @@ public class JsonSchemaDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef().define( + GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC + ).define( OBJECT_ADDITIONAL_PROPERTIES_CONFIG, ConfigDef.Type.BOOLEAN, OBJECT_ADDITIONAL_PROPERTIES_DEFAULT, @@ -86,6 +98,10 @@ public JsonSchemaDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupport() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean allowAdditionalProperties() { return getBoolean(OBJECT_ADDITIONAL_PROPERTIES_CONFIG); } diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java index b3909177947..81897b949bd 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/JsonSchemaDataTest.java @@ -35,7 +35,9 @@ import io.confluent.kafka.schemaregistry.json.jackson.Jackson; import java.util.ArrayList; import java.util.Date; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import org.apache.kafka.connect.data.ConnectSchema; import org.apache.kafka.connect.data.Decimal; import org.apache.kafka.connect.data.Schema; @@ -65,6 +67,10 @@ import static io.confluent.connect.json.JsonSchemaData.CONNECT_TYPE_MAP; import static io.confluent.connect.json.JsonSchemaData.CONNECT_TYPE_PROP; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_ENUM; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION_FIELD_PREFIX; +import static io.confluent.connect.json.JsonSchemaData.GENERALIZED_TYPE_UNION_PREFIX; import static io.confluent.connect.json.JsonSchemaData.JSON_TYPE_ENUM; import static io.confluent.connect.json.JsonSchemaData.JSON_TYPE_ONE_OF; import static io.confluent.connect.json.JsonSchemaData.KEY_FIELD; @@ -206,6 +212,77 @@ public void testFromConnectEnum() { checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); } + @Test + public void testFromConnectEnumWithNull() { + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(JSON_TYPE_ENUM, "") + .parameter(JSON_TYPE_ENUM + ".one", "one") + .parameter(JSON_TYPE_ENUM + ".two", "two") + .parameter(JSON_TYPE_ENUM + ".three", "three") + .parameter(JSON_TYPE_ENUM + ".", "") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + + @Test + public void testFromConnectEnumWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + Map params = new LinkedHashMap<>(); + params.put("org.apache.kafka.connect.data.Enum", ""); + params.put("org.apache.kafka.connect.data.Enum.one", "0"); + params.put("org.apache.kafka.connect.data.Enum.two", "1"); + params.put("org.apache.kafka.connect.data.Enum.three", "2"); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .unprocessedProperties(Collections.singletonMap("connect.parameters", params)) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + + @Test + public void testFromConnectEnumWithNullGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + Map params = new LinkedHashMap<>(); + params.put("org.apache.kafka.connect.data.Enum", ""); + params.put("org.apache.kafka.connect.data.Enum.one", "0"); + params.put("org.apache.kafka.connect.data.Enum.two", "1"); + params.put("org.apache.kafka.connect.data.Enum.three", "2"); + params.put("org.apache.kafka.connect.data.Enum.", "3"); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .unprocessedProperties(Collections.singletonMap("connect.parameters", params)) + .build(); + Schema connectSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .parameter(GENERALIZED_TYPE_ENUM + ".", "3") + .build(); + + checkNonObjectConversion(schema, TextNode.valueOf("one"), connectSchema, "one"); + } + @Test public void testFromConnectUnion() { NumberSchema firstSchema = NumberSchema.builder() @@ -227,6 +304,31 @@ public void testFromConnectUnion() { checkNonObjectConversion(schema, ShortNode.valueOf((short) 12), connectSchema, actual); } + @Test + public void testFromConnectUnionWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + NumberSchema firstSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(ImmutableMap.of("connect.type", "int8", "connect.index", 0)) + .build(); + NumberSchema secondSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(ImmutableMap.of("connect.type", "int16", "connect.index", 1)) + .build(); + CombinedSchema schema = CombinedSchema.oneOf(ImmutableList.of(firstSchema, secondSchema)) + .build(); + SchemaBuilder builder = SchemaBuilder.struct().name("connect_union_0"); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", Schema.INT8_SCHEMA); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "1", Schema.INT16_SCHEMA); + builder.parameter(GENERALIZED_TYPE_UNION, "connect_union_0"); + Schema connectSchema = builder.build(); + + Struct actual = new Struct(connectSchema).put(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", (byte) 12); + checkNonObjectConversion(schema, ShortNode.valueOf((short) 12), connectSchema, actual); + } + @Test public void testFromConnectUnionDifferentStruct() { NumberSchema numberSchema = NumberSchema.builder() @@ -1135,6 +1237,64 @@ public void testToConnectEnum() { checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); } + @Test + public void testToConnectEnumWithNull() { + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(JSON_TYPE_ENUM, "") + .parameter(JSON_TYPE_ENUM + ".one", "one") + .parameter(JSON_TYPE_ENUM + ".two", "two") + .parameter(JSON_TYPE_ENUM + ".three", "three") + .parameter(JSON_TYPE_ENUM + ".", "") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + + @Test + public void testToConnectEnumWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + + @Test + public void testToConnectEnumWithNullGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + EnumSchema schema = EnumSchema.builder() + .possibleValue("one") + .possibleValue("two") + .possibleValue("three") + .possibleValue(null) + .build(); + Schema expectedSchema = new SchemaBuilder(Schema.Type.STRING).parameter(GENERALIZED_TYPE_ENUM, "") + .parameter(GENERALIZED_TYPE_ENUM + ".one", "0") + .parameter(GENERALIZED_TYPE_ENUM + ".two", "1") + .parameter(GENERALIZED_TYPE_ENUM + ".three", "2") + .parameter(GENERALIZED_TYPE_ENUM + ".", "3") + .build(); + + checkNonObjectConversion(expectedSchema, "one", schema, TextNode.valueOf("one")); + } + @Test public void testToConnectEnumInAllOf() { StringSchema stringSchema = StringSchema.builder().build(); @@ -1261,6 +1421,31 @@ public void testToConnectUnion() { checkNonObjectConversion(expectedSchema, expected, schema, ShortNode.valueOf((short) 12)); } + @Test + public void testToConnectUnionWithGeneralizedSumTypeSupport() { + jsonSchemaData = + new JsonSchemaData(new JsonSchemaDataConfig( + Collections.singletonMap(JsonSchemaDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true"))); + NumberSchema firstSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(Collections.singletonMap("connect.type", "int8")) + .build(); + NumberSchema secondSchema = NumberSchema.builder() + .requiresInteger(true) + .unprocessedProperties(Collections.singletonMap("connect.type", "int16")) + .build(); + CombinedSchema schema = CombinedSchema.oneOf(ImmutableList.of(firstSchema, secondSchema)) + .build(); + SchemaBuilder builder = SchemaBuilder.struct().name("connect_union_0"); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", Schema.OPTIONAL_INT8_SCHEMA); + builder.field(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "1", Schema.OPTIONAL_INT16_SCHEMA); + builder.parameter(GENERALIZED_TYPE_UNION, "connect_union_0"); + Schema expectedSchema = builder.build(); + + Struct expected = new Struct(expectedSchema).put(GENERALIZED_TYPE_UNION_FIELD_PREFIX + "0", (byte) 12); + checkNonObjectConversion(expectedSchema, expected, schema, ShortNode.valueOf((short) 12)); + } + @Test public void testToConnectUnionSecondField() { StringSchema firstSchema = StringSchema.builder() @@ -1812,6 +1997,47 @@ public void testOptionalReferencedSchema() { assertTrue(connectSchema.field("complexNode").schema().isOptional()); } + @Test + public void testOptionalObjectOrArray() { + // From https://stackoverflow.com/questions/36413015/json-schema-which-allows-either-an-object-or-an-array-of-those-objects + String schema = "{\n" + + " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"assetMetadata\": {\n" + + " \"anyOf\": [\n" + + " { \"$ref\": \"#/definitions/assetMetadata\" },\n" + + " {\n" + + " \"type\": \"array\",\n" + + " \"items\": { \"$ref\": \"#/definitions/assetMetadata\" }\n" + + " },\n" + + " {\n" + + " \"type\": \"null\"\n" + + " }\n" + + " ]\n" + + " }\n" + + " },\n" + + " \"definitions\": {\n" + + " \"assetMetadata\": {\n" + + " \"type\": \"object\",\n" + + " \"additionalProperties\": false,\n" + + " \"properties\": {\n" + + " \"id\": {\n" + + " \"type\": \"string\"\n" + + " },\n" + + " \"type\": {\n" + + " \"type\": \"string\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; + JsonSchema jsonSchema = new JsonSchema(schema); + JsonSchemaData jsonSchemaData = new JsonSchemaData(); + Schema connectSchema = jsonSchemaData.toConnectSchema(jsonSchema); + assertTrue(connectSchema.field("assetMetadata").schema().isOptional()); + } + @Test public void testToConnectRecursiveSchema() { JsonSchema jsonSchema = getRecursiveJsonSchema(); diff --git a/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java b/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java index bd5499afb6e..69072721a84 100644 --- a/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java +++ b/json-schema-converter/src/test/java/io/confluent/connect/json/ResourceLoader.java @@ -36,14 +36,16 @@ public ResourceLoader(String rootPath) { this.rootPath = requireNonNull(rootPath, "rootPath cannot be null"); } - public JSONObject readJSONObject(String relPath) { - InputStream stream = getStream(relPath); - return new JSONObject(new JSONTokener(stream)); + public JSONObject readJSONObject(String relPath) throws IOException { + try (InputStream stream = getStream(relPath)) { + return new JSONObject(new JSONTokener(stream)); + } } public JsonNode readJsonNode(String relPath) throws IOException { - InputStream stream = getStream(relPath); - return new ObjectMapper().readTree(stream); + try (InputStream stream = getStream(relPath)) { + return new ObjectMapper().readTree(stream); + } } public InputStream getStream(String relPath) { diff --git a/json-schema-provider/pom.xml b/json-schema-provider/pom.xml index e20a51b3b3b..c490e8ed96e 100644 --- a/json-schema-provider/pom.xml +++ b/json-schema-provider/pom.xml @@ -5,7 +5,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -32,8 +32,8 @@ ${io.confluent.schema-registry.version} - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema com.fasterxml.jackson.datatype diff --git a/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java b/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java index 97ff4891284..a08ec4bae22 100644 --- a/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java +++ b/json-schema-provider/src/main/java/io/confluent/kafka/schemaregistry/json/JsonSchemaProvider.java @@ -19,7 +19,6 @@ import org.slf4j.LoggerFactory; import java.util.List; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -35,19 +34,19 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of(new JsonSchema( - schemaString, - references, - resolveReferences(references), - null - )); + return new JsonSchema( + schemaString, + references, + resolveReferences(references), + null + ); } catch (Exception e) { log.error("Could not parse JSON schema", e); - return Optional.empty(); + throw e; } } } diff --git a/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java b/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java index 152dabdbdfe..9387c20bc6b 100644 --- a/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java +++ b/json-schema-provider/src/test/java/io/confluent/kafka/schemaregistry/json/JsonSchemaTest.java @@ -22,8 +22,9 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.NullNode; import com.fasterxml.jackson.databind.node.NumericNode; -import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; import io.confluent.kafka.schemaregistry.json.diff.Difference; import io.confluent.kafka.schemaregistry.json.diff.SchemaDiff; @@ -36,9 +37,12 @@ import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Optional; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -77,6 +81,12 @@ public class JsonSchemaTest { private static final JsonSchema enumSchema = new JsonSchema(enumSchemaString); + private static final String invalidSchemaString = "{\"properties\": {\n" + + " \"string\": {\"type\": \"str\"}\n" + + " }" + + " \"additionalProperties\": false\n" + + "}"; + @Test public void testPrimitiveTypesToJsonSchema() throws Exception { Object envelope = JsonSchemaUtils.toObject((String) null, createPrimitiveSchema("null")); @@ -325,6 +335,33 @@ public void testRecursiveSchema() { assertEquals(0, diff.size()); } + @Test + public void testParseSchema() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + ParsedSchema parsedSchema = jsonSchemaProvider.parseSchemaOrElseThrow(recordSchemaString, + new ArrayList<>(), false); + Optional parsedSchemaOptional = jsonSchemaProvider.parseSchema(recordSchemaString, + new ArrayList<>(), false); + + assertNotNull(parsedSchema); + assertTrue(parsedSchemaOptional.isPresent()); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseSchemaThrowException() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + jsonSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString, + new ArrayList<>(), false); + } + + @Test + public void testParseSchemaSuppressException() { + SchemaProvider jsonSchemaProvider = new JsonSchemaProvider(); + Optional parsedSchema = jsonSchemaProvider.parseSchema(invalidSchemaString, + new ArrayList<>(), false); + assertFalse(parsedSchema.isPresent()); + } + @Test public void testSchemasDifferentFieldOrder() { String schema1 = "{\n" diff --git a/json-schema-serde/pom.xml b/json-schema-serde/pom.xml index 7f894c4e01f..06ba6d7d011 100644 --- a/json-schema-serde/pom.xml +++ b/json-schema-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java b/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java index 6b822909b73..d7a6028f587 100644 --- a/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java +++ b/json-schema-serde/src/main/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerde.java @@ -53,11 +53,19 @@ public KafkaJsonSchemaSerde(Class specificClass) { * For testing purposes only. */ public KafkaJsonSchemaSerde(final SchemaRegistryClient client) { + this(client, null); + } + + /** + * For testing purposes only. + */ + public KafkaJsonSchemaSerde(final SchemaRegistryClient client, final Class specificClass) { if (client == null) { throw new IllegalArgumentException("schema registry client must not be null"); } + this.specificClass = specificClass; inner = Serdes.serdeFrom(new KafkaJsonSchemaSerializer<>(client), - new KafkaJsonSchemaDeserializer<>(client)); + new KafkaJsonSchemaDeserializer<>(client)); } @Override @@ -103,4 +111,4 @@ private Map withSpecificClass(final Map config, boole return newConfig; } -} \ No newline at end of file +} diff --git a/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java b/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java index 74c514ca23d..a7a5de167c3 100644 --- a/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java +++ b/json-schema-serde/src/test/java/io/confluent/kafka/streams/serdes/json/KafkaJsonSchemaSerdeTest.java @@ -16,12 +16,15 @@ package io.confluent.kafka.streams.serdes.json; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.ObjectMapper; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; import org.junit.Test; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; @@ -49,6 +52,26 @@ public class KafkaJsonSchemaSerdeTest { private static final JsonSchema recordSchema = new JsonSchema(recordSchemaString); + @JsonIgnoreProperties(ignoreUnknown = true) + private static class SomeTestRecord { + String string; + Integer number; + private SomeTestRecord() {} + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + SomeTestRecord that = (SomeTestRecord) o; + return Objects.equals(string, that.string) && + Objects.equals(number, that.number); + } + } + private Object createJsonRecord() throws IOException { String json = "{\n" + " \"null\": null,\n" @@ -60,6 +83,17 @@ private Object createJsonRecord() throws IOException { return objectMapper.readValue(json, Object.class); } + private SomeTestRecord createJsonRecordWithClass() throws IOException { + String json = "{\n" + + " \"null\": null,\n" + + " \"boolean\": true,\n" + + " \"number\": 12,\n" + + " \"string\": \"string\"\n" + + "}"; + + return objectMapper.readValue(json, SomeTestRecord.class); + } + private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValues() { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaJsonSchemaSerde serde = new KafkaJsonSchemaSerde<>(schemaRegistryClient); @@ -69,6 +103,16 @@ private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValues return serde; } + private static KafkaJsonSchemaSerde createConfiguredSerdeForRecordValuesWithClass() { + SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); + KafkaJsonSchemaSerde serde = new KafkaJsonSchemaSerde<>(schemaRegistryClient, SomeTestRecord.class); + Map serdeConfig = new HashMap<>(); + serdeConfig.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "fake"); + serde.configure(serdeConfig, false); + return serde; + } + + @Test public void shouldRoundTripRecords() throws Exception { // Given @@ -105,4 +149,22 @@ public void shouldRoundTripNullRecordsToNull() { public void shouldFailWhenInstantiatedWithNullSchemaRegistryClient() { new KafkaJsonSchemaSerde<>((SchemaRegistryClient) null); } -} \ No newline at end of file + + @Test + public void shouldLetTheAbilityToDeserializeToASpecificClass() throws IOException { + // Given + KafkaJsonSchemaSerde serde = createConfiguredSerdeForRecordValuesWithClass(); + SomeTestRecord record = createJsonRecordWithClass(); + + // When + Object roundtrippedRecord = serde.deserializer().deserialize( + ANY_TOPIC, serde.serializer().serialize(ANY_TOPIC, record)); + + // Then + assertThat(roundtrippedRecord, equalTo(record)); + + // Cleanup + serde.close(); + } + +} diff --git a/json-schema-serializer/pom.xml b/json-schema-serializer/pom.xml index 986136166fb..fe96f43f568 100644 --- a/json-schema-serializer/pom.xml +++ b/json-schema-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java index 51a03ecfb44..7e8a3864716 100644 --- a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java +++ b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaDeserializer.java @@ -81,6 +81,10 @@ protected KafkaJsonSchemaDeserializerConfig deserializerConfig(Properties props) return new KafkaJsonSchemaDeserializerConfig(props); } + public ObjectMapper objectMapper() { + return objectMapper; + } + /** * Deserializes the payload without including schema information for primitive types, maps, and * arrays. Just the resulting deserialized object is returned. diff --git a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java index 87777c8afa5..f031f5464f7 100644 --- a/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java +++ b/json-schema-serializer/src/main/java/io/confluent/kafka/serializers/json/AbstractKafkaJsonSchemaSerializer.java @@ -87,6 +87,10 @@ protected KafkaJsonSchemaSerializerConfig serializerConfig(Map props) } } + public ObjectMapper objectMapper() { + return objectMapper; + } + protected byte[] serializeImpl( String subject, T object, diff --git a/json-serializer/pom.xml b/json-serializer/pom.xml index e03a9a794bf..a0cb5463eeb 100644 --- a/json-serializer/pom.xml +++ b/json-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java index dc55e888589..795f48b7ef4 100644 --- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java +++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonDeserializer.java @@ -65,6 +65,10 @@ private void configure(KafkaJsonDeserializerConfig config, boolean isKey) { } } + public ObjectMapper objectMapper() { + return objectMapper; + } + @Override public T deserialize(String ignored, byte[] bytes) { if (bytes == null || bytes.length == 0) { diff --git a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java index 2fe5e4634a7..1ec76f55f27 100644 --- a/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java +++ b/json-serializer/src/main/java/io/confluent/kafka/serializers/KafkaJsonSerializer.java @@ -54,6 +54,10 @@ protected void configure(KafkaJsonSerializerConfig config) { SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, !writeDatesAsIso8601); } + public ObjectMapper objectMapper() { + return objectMapper; + } + @Override public byte[] serialize(String topic, T data) { if (data == null) { diff --git a/licenses-and-notices.html b/licenses-and-notices.html index ab2a549f6aa..e0b0f36a0e9 100644 --- a/licenses-and-notices.html +++ b/licenses-and-notices.html @@ -89,19 +89,19 @@

License Report


jopt-simple-4.9jar4.9The MIT License
-kafka-avro-serializer-7.1.12-0jar7.1.12-0 +kafka-avro-serializer-7.2.10-0jar7.2.10-0 -kafka-clients-7.1.12-0-ccsjarincluded file +kafka-clients-7.2.10-0-ccsjarincluded file -kafka-connect-avro-converter-7.1.12-0jar7.1.12-0 +kafka-connect-avro-converter-7.2.10-0jar7.2.10-0 -kafka-json-serializer-7.1.12-0jar7.1.12-0 +kafka-json-serializer-7.2.10-0jar7.2.10-0 -kafka-schema-registry-7.1.12-0jar7.1.12-0 +kafka-schema-registry-7.2.10-0jar7.2.10-0 -kafka-schema-registry-client-7.1.12-0jar7.1.12-0 +kafka-schema-registry-client-7.2.10-0jar7.2.10-0 -kafka_2.11-7.1.12-0-ccsjarincluded file +kafka_2.11-7.2.10-0-ccsjarincluded file log4j-1.2.17jar1.2.17Apache 2.0
diff --git a/maven-plugin/pom.xml b/maven-plugin/pom.xml index 1f6940a1f83..097edbd61eb 100644 --- a/maven-plugin/pom.xml +++ b/maven-plugin/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -26,7 +26,7 @@ org.apache.maven maven-plugin-api - 3.6.3 + 3.8.1 org.apache.maven.plugin-tools diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java index feab9d1e5d6..6aab458dd68 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojo.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 Confluent Inc. + * Copyright 2022 Confluent Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,11 +36,9 @@ import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -55,36 +53,55 @@ public class DownloadSchemaRegistryMojo extends SchemaRegistryMojo { @Parameter(required = true) List subjectPatterns = new ArrayList<>(); + @Parameter(required = false) + List versions = new ArrayList<>(); + @Parameter(required = true) File outputDirectory; @Parameter(required = false) boolean encodeSubject = true; - Map downloadSchemas(Collection subjects) + Map downloadSchemas(List subjects, List versionsToDownload) throws MojoExecutionException { Map results = new LinkedHashMap<>(); - for (String subject : subjects) { + if (versionsToDownload.size() != subjects.size()) { + throw new MojoExecutionException("Number of versions specified should " + + "be same as number of subjects"); + } + for (int i = 0; i < subjects.size(); i++) { SchemaMetadata schemaMetadata; try { - getLog().info(String.format("Downloading latest metadata for %s.", subject)); - schemaMetadata = this.client().getLatestSchemaMetadata(subject); + getLog().info(String.format("Downloading metadata " + + "for %s.for version %s", subjects.get(i), versionsToDownload.get(i))); + schemaMetadata = this.client().getLatestSchemaMetadata(subjects.get(i)); + if (!versionsToDownload.get(i).equalsIgnoreCase("latest")) { + Integer maxVersion = schemaMetadata.getVersion(); + if (maxVersion < Integer.parseInt(versionsToDownload.get(i))) { + throw new MojoExecutionException( + String.format("Max possible version " + + "for %s is %d", subjects.get(i), maxVersion)); + } else { + schemaMetadata = this.client().getSchemaMetadata(subjects.get(i), + Integer.parseInt(versionsToDownload.get(i))); + } + } Optional schema = this.client().parseSchema( schemaMetadata.getSchemaType(), schemaMetadata.getSchema(), schemaMetadata.getReferences()); if (schema.isPresent()) { - results.put(subject, schema.get()); + results.put(subjects.get(i), schema.get()); } else { throw new MojoExecutionException( - String.format("Error while parsing schema for %s", subject) + String.format("Error while parsing schema for %s", subjects.get(i)) ); } } catch (Exception ex) { throw new MojoExecutionException( - String.format("Exception thrown while downloading metadata for %s.", subject), + String.format("Exception thrown while downloading metadata for %s.", subjects.get(i)), ex ); } @@ -99,26 +116,7 @@ public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("Plugin execution has been skipped"); return; } - - try { - getLog().debug( - String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory)); - if (outputDirectory.exists() && !outputDirectory.isDirectory()) { - throw new IllegalStateException("outputDirectory must be a directory"); - } - getLog() - .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory)); - if (!outputDirectory.isDirectory()) { - getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory)); - if (!outputDirectory.mkdirs()) { - throw new IllegalStateException( - "Could not create output directory " + this.outputDirectory); - } - } - } catch (Exception ex) { - throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex); - } - + outputDirValidation(); List patterns = new ArrayList<>(); for (String subject : subjectPatterns) { @@ -133,7 +131,6 @@ public void execute() throws MojoExecutionException, MojoFailureException { ); } } - Collection allSubjects; try { getLog().info("Getting all subjects on schema registry..."); @@ -141,33 +138,44 @@ public void execute() throws MojoExecutionException, MojoFailureException { } catch (Exception ex) { throw new MojoExecutionException("Exception thrown", ex); } - getLog().info(String.format("Schema Registry has %s subject(s).", allSubjects.size())); - Set subjectsToDownload = new LinkedHashSet<>(); + List subjectsToDownload = new ArrayList<>(); + List versionsToDownload = new ArrayList<>(); + if (!versions.isEmpty()) { + if (versions.size() != subjectPatterns.size()) { + throw new IllegalStateException("versions size should be same as subjectPatterns size"); + } + } for (String subject : allSubjects) { - for (Pattern pattern : patterns) { + for (int i = 0 ; i < patterns.size() ; i++) { getLog() - .debug(String.format("Checking '%s' against pattern '%s'", subject, pattern.pattern())); - Matcher matcher = pattern.matcher(subject); + .debug(String.format("Checking '%s' against pattern '%s'", + subject, patterns.get(i).pattern())); + Matcher matcher = patterns.get(i).matcher(subject); if (matcher.matches()) { - getLog().debug(String.format("'%s' matches pattern '%s' so downloading.", subject, - pattern.pattern())); + getLog().debug(String.format("'%s' matches " + + "pattern '%s' so downloading.", subject, + patterns.get(i).pattern())); + if (versions.isEmpty()) { + versionsToDownload.add("latest"); + } else { + versionsToDownload.add(versions.get(i)); + } subjectsToDownload.add(subject); break; } } } - - Map subjectToSchema = downloadSchemas(subjectsToDownload); + Map subjectToSchema = + downloadSchemas(subjectsToDownload, versionsToDownload); for (Map.Entry kvp : subjectToSchema.entrySet()) { String subject = kvp.getKey(); String encodedSubject = encodeSubject ? encode(subject) : subject; String fileName = String.format("%s%s", encodedSubject, getExtension(kvp.getValue())); File outputFile = new File(this.outputDirectory, fileName); - getLog().info( String.format("Writing schema for Subject(%s) to %s.", subject, outputFile) ); @@ -191,6 +199,27 @@ public void execute() throws MojoExecutionException, MojoFailureException { } } + public void outputDirValidation() throws MojoExecutionException, MojoFailureException { + try { + getLog().debug( + String.format("Checking if '%s' exists and is not a directory.", this.outputDirectory)); + if (outputDirectory.exists() && !outputDirectory.isDirectory()) { + throw new IllegalStateException("outputDirectory must be a directory"); + } + getLog() + .debug(String.format("Checking if outputDirectory('%s') exists.", this.outputDirectory)); + if (!outputDirectory.isDirectory()) { + getLog().debug(String.format("Creating outputDirectory('%s').", this.outputDirectory)); + if (!outputDirectory.mkdirs()) { + throw new IllegalStateException( + "Could not create output directory " + this.outputDirectory); + } + } + } catch (Exception ex) { + throw new MojoExecutionException("Exception thrown while creating outputDirectory", ex); + } + } + private String getExtension(ParsedSchema parsedSchema) { if (this.schemaExtension != null) { return schemaExtension; diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java new file mode 100644 index 00000000000..c075de3efb0 --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/MojoUtils.java @@ -0,0 +1,43 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.SchemaProvider; +import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider; +import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider; +import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider; +import java.io.File; +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.util.Arrays; +import java.util.List; + +public class MojoUtils { + + public static String readFile(File file, Charset encoding) throws IOException { + byte[] encoded = Files.readAllBytes(file.toPath()); + return new String(encoded, encoding); + } + + public static List defaultSchemaProviders() { + return Arrays.asList( + new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider() + ); + } + +} diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java index 0878268cda3..013ab684c30 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SchemaRegistryMojo.java @@ -23,19 +23,15 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import io.confluent.kafka.schemaregistry.SchemaProvider; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaProvider; import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClientConfig; -import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider; -import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider; public abstract class SchemaRegistryMojo extends AbstractMojo implements Closeable { @@ -77,7 +73,7 @@ protected SchemaRegistryClient client() { } List providers = schemaProviders != null && !schemaProviders.isEmpty() ? schemaProviders() - : defaultSchemaProviders(); + : MojoUtils.defaultSchemaProviders(); this.client = new CachedSchemaRegistryClient( this.schemaRegistryUrls, 1000, @@ -99,12 +95,6 @@ private List schemaProviders() { }).collect(Collectors.toList()); } - private List defaultSchemaProviders() { - return Arrays.asList( - new AvroSchemaProvider(), new JsonSchemaProvider(), new ProtobufSchemaProvider() - ); - } - @Override public void close() throws IOException { if (client != null) { diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java new file mode 100644 index 00000000000..4eb2cdbcd0b --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojo.java @@ -0,0 +1,111 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +import java.io.IOException; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +@Mojo(name = "set-compatibility", configurator = "custom-basic") +public class SetCompatibilityMojo extends SchemaRegistryMojo { + + @Parameter(required = true) + Map compatibilityLevels = new HashMap<>(); + + public void execute() throws MojoExecutionException { + for (Map.Entry entry : compatibilityLevels.entrySet()) { + if (entry.getValue().equalsIgnoreCase("null")) { + deleteConfig(entry.getKey()); + } else { + updateConfig(entry.getKey(), CompatibilityLevel.valueOf(entry.getValue())); + } + } + } + + public void updateConfig(String subject, CompatibilityLevel compatibility) + throws MojoExecutionException { + + try { + String updatedCompatibility; + + if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) { + updatedCompatibility = this.client().updateCompatibility(null, compatibility.toString()); + getLog().info("Global Compatibility set to " + + updatedCompatibility); + } else { + Collection allSubjects = this.client().getAllSubjects(); + if (!allSubjects.contains(subject)) { + throw new MojoExecutionException( + "Subject not found" + ); + } + updatedCompatibility = this.client().updateCompatibility(subject, compatibility.toString()); + getLog().info("Compatibility of " + subject + + " set to " + updatedCompatibility); + } + } catch (RestClientException | IOException e) { + e.printStackTrace(); + throw new MojoExecutionException( + "Exception thrown while updating config", + e + ); + } + + } + + public void deleteConfig(String subject) throws MojoExecutionException { + if (getLog().isDebugEnabled()) { + getLog().info("Deleting compatibility"); + } + try { + this.client().deleteCompatibility(subject); + if (subject.equalsIgnoreCase("null") || subject.equals("__GLOBAL")) { + getLog().info("Deleted global compatibility"); + } else { + getLog().info(String.format("Deleted compatibility of %s", subject)); + } + + } catch (IOException | RestClientException e) { + throw new MojoExecutionException( + "Exception thrown while updating config", + e + ); + } + } + + public String getConfig(String subject) throws MojoExecutionException { + if (getLog().isDebugEnabled()) { + getLog().info(String.format("Getting compatibility of %s", subject)); + } + try { + return String.format(this.client().getCompatibility(subject)); + } catch (IOException | RestClientException e) { + e.printStackTrace(); + throw new MojoExecutionException( + "Exception thrown while getting config", + e + ); + } + } +} \ No newline at end of file diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java new file mode 100644 index 00000000000..5c7d6692b71 --- /dev/null +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojo.java @@ -0,0 +1,188 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityChecker; +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import org.apache.maven.plugin.AbstractMojo; +import org.apache.maven.plugin.MojoExecutionException; +import org.apache.maven.plugins.annotations.Mojo; +import org.apache.maven.plugins.annotations.Parameter; + +@Mojo(name = "test-local-compatibility", configurator = "custom-basic") +public class TestLocalCompatibilityMojo extends AbstractMojo { + + @Parameter(required = true) + Map schemas = new HashMap<>(); + + @Parameter(required = false) + Map schemaTypes = new HashMap<>(); + + @Parameter(required = true) + Map previousSchemaPaths = new HashMap<>(); + + @Parameter(required = true) + Map compatibilityLevels = new HashMap<>(); + + protected Optional parseSchema( + String schemaType, + String schemaString, + List references, + Map providers) throws MojoExecutionException { + + SchemaProvider schemaProvider = providers.get(schemaType.toUpperCase()); + if (schemaProvider == null) { + throw new MojoExecutionException( + String.format("Invalid schema type %s", schemaType)); + } + + return schemaProvider.parseSchema(schemaString, references); + + } + + protected ParsedSchema loadSchema(File path, String schemaType, + Map schemaProviders) throws MojoExecutionException { + + String schemaString; + try { + schemaString = MojoUtils.readFile(path, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new MojoExecutionException( + String.format("File cannot be found at: %s", path)); + } + List references = new ArrayList<>(); + Optional schema = parseSchema(schemaType, schemaString, + references, schemaProviders); + + if (schema.isPresent()) { + return schema.get(); + } + + throw new MojoExecutionException(String.format("Unable to parse schema from %s " + + "with schema type as %s", path, schemaType)); + } + + protected ArrayList getFiles(File previousSchemaPath) { + + ArrayList previousSchemaFiles = new ArrayList<>(); + + getLog().debug(String.format("Loading File %s", previousSchemaPath)); + // Add all files inside a directory, inside directories are skipped + if (previousSchemaPath.isDirectory()) { + + File[] fileList = previousSchemaPath.listFiles(); + if (fileList == null) { + return previousSchemaFiles; + } + + for (File f : fileList) { + if (!f.isDirectory()) { + previousSchemaFiles.add(f); + } + } + + } else { + previousSchemaFiles.add(previousSchemaPath); + } + + return previousSchemaFiles; + } + + + protected void testSchema(String key, Map schemaProviders) + throws MojoExecutionException { + + File schemaPath = schemas.get(key); + + if (!previousSchemaPaths.containsKey(key)) { + throw new MojoExecutionException(String.format("Previous schemas not found for %s", key)); + } + + File previousSchemaPath = previousSchemaPaths.get(key); + String schemaType = schemaTypes.getOrDefault(key, AvroSchema.TYPE); + + if (!compatibilityLevels.containsKey(key)) { + throw new MojoExecutionException(String.format("Compatibility Level not found for %s", key)); + } + + CompatibilityLevel compatibilityLevel = compatibilityLevels.get(key); + + ArrayList previousSchemaFiles = getFiles(previousSchemaPath); + + if (previousSchemaFiles.size() > 1 + && (compatibilityLevel == CompatibilityLevel.BACKWARD + || compatibilityLevel == CompatibilityLevel.FORWARD + || compatibilityLevel == CompatibilityLevel.FULL)) { + + throw new MojoExecutionException(String.format("Provide exactly one file for %s check " + + "for schema %s", compatibilityLevel.name.toLowerCase(), schemaPath)); + + } + + ParsedSchema schema = loadSchema(schemaPath, schemaType, schemaProviders); + ArrayList previousSchemas = new ArrayList<>(); + + for (File previousSchemaFile : previousSchemaFiles) { + previousSchemas.add(loadSchema(previousSchemaFile, schemaType, schemaProviders)); + } + + CompatibilityChecker checker = CompatibilityChecker.checker(compatibilityLevel); + List errorMessages = checker.isCompatible(schema, previousSchemas); + + boolean success = errorMessages.isEmpty(); + + if (success) { + getLog().info(String.format("Schema is %s compatible with previous schemas", + compatibilityLevel.name.toLowerCase())); + } else { + String errorLog = String.format("Schema is not %s compatible with previous schemas. ", + compatibilityLevel.name.toLowerCase()) + errorMessages.get(0); + throw new MojoExecutionException(errorLog); + } + + } + + public void execute() throws MojoExecutionException { + + List providers = MojoUtils.defaultSchemaProviders(); + Map schemaProviders = providers.stream() + .collect(Collectors.toMap(SchemaProvider::schemaType, p -> p)); + + Set keys = schemas.keySet(); + + for (String key : keys) { + testSchema(key, schemaProviders); + } + + } + +} diff --git a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java index ad69a94c1a1..e0576d6ed40 100644 --- a/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java +++ b/maven-plugin/src/main/java/io/confluent/kafka/schemaregistry/maven/UploadSchemaRegistryMojo.java @@ -26,9 +26,7 @@ import java.io.File; import java.io.IOException; -import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -108,7 +106,7 @@ private void processSubject(String key, boolean isReference) { } return; } - String schemaString = readFile(file, StandardCharsets.UTF_8); + String schemaString = MojoUtils.readFile(file, StandardCharsets.UTF_8); Optional schema = client().parseSchema( schemaType, schemaString, schemaReferences); if (schema.isPresent()) { @@ -186,9 +184,5 @@ private List getReferences(String subject, Map return result; } - private static String readFile(File file, Charset encoding) throws IOException { - byte[] encoded = Files.readAllBytes(file.toPath()); - return new String(encoded, encoding); - } } diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java index 6b2445a9513..6e6f523a2b0 100644 --- a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/DownloadSchemaRegistryMojoTest.java @@ -24,11 +24,14 @@ import org.junit.Before; import org.junit.Test; +import javax.security.auth.Subject; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.List; +import java.util.regex.Matcher; public class DownloadSchemaRegistryMojoTest extends SchemaRegistryTest { DownloadSchemaRegistryMojo mojo; diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java new file mode 100644 index 00000000000..2e2907a4ced --- /dev/null +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/SetCompatibilityMojoTest.java @@ -0,0 +1,53 @@ +package io.confluent.kafka.schemaregistry.maven; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import org.apache.avro.Schema; +import org.apache.maven.plugin.MojoExecutionException; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; + +import static org.junit.Assert.assertThrows; + +public class SetCompatibilityMojoTest extends SchemaRegistryTest{ + SetCompatibilityMojo mojo; + + @Before + public void createMojoAndFiles() { + this.mojo = new SetCompatibilityMojo(); + this.mojo.client(new MockSchemaRegistryClient()); + } + + @Test + public void specificSubjects() throws IOException, RestClientException, MojoExecutionException { + String keySubject = String.format("TestSubject-key"); + Schema keySchema = Schema.create(Schema.Type.STRING); + + this.mojo.client().register(keySubject, new AvroSchema(keySchema)); + // Compatibility not set till now and hence should throw error + assertThrows("Checking that compatibility hasn't been set", + RestClientException.class, () -> this.mojo.client().getCompatibility(keySubject)); + + // Setting compatibility & checking if it matches + this.mojo.compatibilityLevels.put(keySubject,"BACKWARD"); + this.mojo.execute(); + + assert(this.mojo.getConfig(keySubject).equals("BACKWARD")); + + //Updating to a different compatibility + this.mojo.compatibilityLevels.replace(keySubject, "BACKWARD", "FULL"); + this.mojo.execute(); + + assert(this.mojo.getConfig(keySubject).equals("FULL")); + + //Checking for Global Compatibility + this.mojo.compatibilityLevels.put("__GLOBAL", "BACKWARD_TRANSITIVE"); + this.mojo.execute(); + assert(this.mojo.getConfig(null).equals("BACKWARD_TRANSITIVE")); + + } +} diff --git a/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java new file mode 100644 index 00000000000..ab76aac682b --- /dev/null +++ b/maven-plugin/src/test/java/io/confluent/kafka/schemaregistry/maven/TestLocalCompatibilityMojoTest.java @@ -0,0 +1,387 @@ +/* + * Copyright 2022 Confluent Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.kafka.schemaregistry.maven; + +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import io.confluent.kafka.schemaregistry.CompatibilityLevel; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import org.apache.maven.plugin.MojoExecutionException; +import org.junit.Before; +import org.junit.Test; + +/* + * The tests for avro are taken from AvroCompatibilityTest + */ +public class TestLocalCompatibilityMojoTest extends SchemaRegistryTest{ + TestLocalCompatibilityMojo mojo; + + final String schema1 = "schema1"; + final String schema2 = "schema2"; + final String schema3 = "schema3"; + final String schema4 = "schema4"; + final String schema6 = "schema6"; + final String schema7 = "schema7"; + final String schema8 = "schema8"; + final String schema10 = "schema10"; + final String schema11 = "schema11"; + final String schema12 = "schema12"; + final String schema13 = "schema13"; + final String schema14 = "schema14"; + + String fileExtension; + + @Before + public void createMojoAndFiles() { + this.mojo = new TestLocalCompatibilityMojo(); + makeFiles(); + + for(int i=1;i<=9;i++) { + this.mojo.schemaTypes.put("schema"+i, "AVRO"); + } + + this.mojo.schemaTypes.put(schema10, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); + this.mojo.schemaTypes.put(schema14, "JSON"); + + } + + private void makeFile(String schemaString, String name) { + + try (FileWriter writer = new FileWriter(this.tempDirectory+"/"+name)) { + writer.write(schemaString); + } catch (IOException e) { + e.printStackTrace(); + } + + if (name.contains("1.avsc") || name.contains("2.avsc")) { + + try (FileWriter writer = new FileWriter(this.tempDirectory+"/schema12Folder/"+name)) { + writer.write(schemaString); + } catch (IOException e) { + e.printStackTrace(); + } + + } + + } + + private void makeFiles(){ + + File newFolder = new File(this.tempDirectory.toString() + "/schema12Folder"); + if( newFolder.mkdir()) { + System.out.println("New Folder avro created successfully."); + } + + String schemaString1 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}]}"; + makeFile(schemaString1, "schema1.avsc"); + + String schemaString2 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}]}"; + makeFile(schemaString2, "schema2.avsc"); + + String schemaString3 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\"}]}"; + makeFile(schemaString3, "schema3.avsc"); + + String schemaString4 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1_new\", \"aliases\": [\"f1\"]}]}"; + makeFile(schemaString4, "schema4.avsc"); + + String schemaString6 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":[\"null\", \"string\"],\"name\":\"f1\"," + + " \"doc\":\"doc of f1\"}]}"; + makeFile(schemaString6, "schema6.avsc"); + + String schemaString7 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":[\"null\", \"string\", \"int\"],\"name\":\"f1\"," + + " \"doc\":\"doc of f1\"}]}"; + makeFile(schemaString7, "schema7.avsc"); + + String schemaString8 = "{\"type\":\"record\"," + + "\"name\":\"myrecord\"," + + "\"fields\":" + + "[{\"type\":\"string\",\"name\":\"f1\"}," + + " {\"type\":\"string\",\"name\":\"f2\", \"default\": \"foo\"}," + + " {\"type\":\"string\",\"name\":\"f3\", \"default\": \"bar\"}]}"; + makeFile(schemaString8, "schema8.avsc"); + + String schemaString10 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " }\n" + + "}"; + makeFile(schemaString10, "schema10.json"); + + String schemaString11 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": false\n" + + "}"; + makeFile(schemaString11, "schema11.json"); + + String schemaString12 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString12, "schema12.json"); + + String schemaString13 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" },\n" + + " \"zap\": { \"type\": \"string\" }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString13, "schema13.json"); + + String schemaString14 = "{\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"foo\": { \"type\": \"string\" },\n" + + " \"bar\": { \"type\": \"string\" },\n" + + " \"zap\": { \n" + + " \"oneOf\": [ { \"type\": \"string\" }, { \"type\": \"integer\" } ] \n" + + " }\n" + + " },\n" + + " \"additionalProperties\": { \"type\": \"string\" }\n" + + "}"; + + makeFile(schemaString14, "schema14.json"); + + } + + + private void setMojo(String schema, String previousSchemas){ + + this.mojo.schemas = Collections.singletonMap(schema, new File(this.tempDirectory + "/" + schema + fileExtension)); + this.mojo.previousSchemaPaths = new HashMap<>(); + + File temp = new File(this.tempDirectory + "/" + previousSchemas); + if(temp.isDirectory()) + this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas)); + else + this.mojo.previousSchemaPaths.put(schema, new File(this.tempDirectory + "/" + previousSchemas + fileExtension)); + + } + + private boolean isCompatible(String schema, String previousSchemas, CompatibilityLevel compatibilityLevel) + throws MojoExecutionException { + + setMojo(schema, previousSchemas); + this.mojo.compatibilityLevels.put(schema, compatibilityLevel); + this.mojo.execute(); + return true; + + } + + /* + * Backward compatibility: A new schema is backward compatible if it can be used to read the data + * written in the previous schema. + */ + @Test + public void testBasicBackwardsCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + assertTrue("adding a field with default is a backward compatible change", + isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD)); + assertThrows("adding a field w/o default is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema3, (schema1), CompatibilityLevel.BACKWARD)); + assertTrue("changing field name with alias is a backward compatible change", + isCompatible(schema4, (schema1), CompatibilityLevel.BACKWARD)); + assertTrue("evolving a field type to a union is a backward compatible change", + isCompatible(schema6, (schema1), CompatibilityLevel.BACKWARD)); + assertThrows("removing a type from a union is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema1, (schema6), CompatibilityLevel.BACKWARD)); + assertTrue("adding a new type in union is a backward compatible change", + isCompatible(schema7, (schema6), CompatibilityLevel.BACKWARD)); + assertThrows("removing a type from a union is not a backward compatible change", + MojoExecutionException.class, () -> isCompatible(schema6, (schema7), CompatibilityLevel.BACKWARD)); + + + this.mojo.schemaTypes.put(schema10, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); + this.mojo.schemaTypes.put(schema14, "JSON"); + + fileExtension = ".json"; + assertTrue("setting additional properties to true from false is a backward compatible change", + isCompatible(schema10, schema11, CompatibilityLevel.BACKWARD)); + + assertTrue("adding property of string type (same as additional properties type) is " + + "a backward compatible change", isCompatible(schema13, schema12, CompatibilityLevel.BACKWARD)); + + assertTrue("adding property of string or int type (string is additional properties type) is " + + "a backward compatible change", isCompatible(schema14, schema12, CompatibilityLevel.BACKWARD)); + + } + + @Test + public void testBasicBackwardsTransitiveCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + // 1 == 2, 2 == 3, 3 != 1 + assertTrue("adding a field with default is a backward compatible change", + isCompatible(schema2, (schema1), CompatibilityLevel.BACKWARD_TRANSITIVE)); + assertTrue("removing a default is a compatible change, but not transitively", + isCompatible(schema3, (schema2), CompatibilityLevel.BACKWARD_TRANSITIVE)); + + // Not compatible throws error + assertThrows("removing a default is not a transitively compatible change", + MojoExecutionException.class, () ->isCompatible(schema3, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE)); + + assertTrue("Checking if schema8 is backward compatible with schema1 and schema2 present in avro folder" + , isCompatible(schema8, "schema12Folder", CompatibilityLevel.BACKWARD_TRANSITIVE )); + + + } + + /* + * Forward compatibility: A new schema is forward compatible if the previous schema can read data written in this + * schema. + */ + @Test + public void testBasicForwardsCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + assertTrue("adding a field is a forward compatible change", + isCompatible(schema2, (schema1), CompatibilityLevel.FORWARD)); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema3, (schema1), CompatibilityLevel.FORWARD)); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema3, (schema2), CompatibilityLevel.FORWARD)); + assertTrue("adding a field is a forward compatible change", + isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD)); + + fileExtension = ".avsc"; + + // Only schema 2 is checked + assertThrows( MojoExecutionException.class, () -> + isCompatible(schema1, "schema12Folder", CompatibilityLevel.FORWARD)); + + fileExtension = ".json"; + this.mojo.schemaTypes.put(schema11, "JSON"); + this.mojo.schemaTypes.put(schema12, "JSON"); + this.mojo.schemaTypes.put(schema13, "JSON"); + + assertTrue("setting additional properties to false from true is a forward compatible change", + isCompatible(schema11, schema10, CompatibilityLevel.FORWARD)); + + assertTrue("removing property of string type (same as additional properties type)" + + " is a backward compatible change", isCompatible(schema13, + schema12, CompatibilityLevel.FORWARD)); + + assertTrue("removing property of string or int type (string is additional properties type) is " + + "a backward compatible change", isCompatible(schema12, + schema14, CompatibilityLevel.FORWARD)); + + } + + /* + * Forward transitive compatibility: A new schema is forward compatible if all previous schemas can read data written + * in this schema. + */ + @Test + public void testBasicForwardsTransitiveCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + // 1 == 2, 2 == 3, 3 != 1 + assertTrue("adding default to a field is a compatible change", + isCompatible(schema2, (schema3), CompatibilityLevel.FORWARD_TRANSITIVE)); + assertTrue("removing a field with a default is a compatible change", + isCompatible(schema1, (schema2), CompatibilityLevel.FORWARD_TRANSITIVE)); + } + + /* + * Full compatibility: A new schema is fully compatible if it’s both backward and forward compatible. + */ + @Test + public void testBasicFullCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + assertTrue("adding a field with default is a backward and a forward compatible change", + isCompatible(schema2, (schema1), CompatibilityLevel.FULL)); + + // Throws error, provide exactly one file for checking full compatibility + assertThrows(MojoExecutionException.class, () -> + isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL)); + + } + + /* + * Full transitive compatibility: A new schema is fully compatible if it’s both transitively backward + * and transitively forward compatible with the entire schema history. + */ + @Test + public void testBasicFullTransitiveCompatibility() throws MojoExecutionException { + + fileExtension = ".avsc"; + + assertTrue("iteratively adding fields with defaults is a compatible change", + isCompatible(schema8, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE)); + assertTrue("adding default to a field is a compatible change", + isCompatible(schema2, (schema3), CompatibilityLevel.FULL_TRANSITIVE)); + assertTrue("removing a field with a default is a compatible change", + isCompatible(schema1, (schema2), CompatibilityLevel.FULL_TRANSITIVE)); + + assertTrue("adding a field with default is a compatible change", + isCompatible(schema2, (schema1), CompatibilityLevel.FULL_TRANSITIVE)); + assertTrue("removing a default from a field compatible change", + isCompatible(schema3, (schema2), CompatibilityLevel.FULL_TRANSITIVE)); + + assertThrows( "transitively adding a field without a default is not a compatible change", + MojoExecutionException.class, () -> isCompatible(schema3, "schema12Folder", CompatibilityLevel.FULL_TRANSITIVE)); + + } +} \ No newline at end of file diff --git a/package-kafka-serde-tools/pom.xml b/package-kafka-serde-tools/pom.xml index ea3047a22b7..3b42510945c 100644 --- a/package-kafka-serde-tools/pom.xml +++ b/package-kafka-serde-tools/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -93,6 +93,12 @@ org.apache.kafka connect-runtime provided + + + org.slf4j + slf4j-log4j12 + + org.apache.kafka diff --git a/package-schema-registry/pom.xml b/package-schema-registry/pom.xml index e6355857364..8cca8a5124c 100644 --- a/package-schema-registry/pom.xml +++ b/package-schema-registry/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 kafka-schema-registry-package @@ -20,16 +20,10 @@ org.slf4j slf4j-reload4j compile - - - org.apache.logging.log4j - log4j-api - - - org.apache.logging.log4j - log4j-core - - + + + io.confluent + logredactor io.confluent diff --git a/pom.xml b/pom.xml index 3d7beafb38d..2dfe552d513 100644 --- a/pom.xml +++ b/pom.xml @@ -7,13 +7,13 @@ io.confluent rest-utils-parent - [7.1.12-0, 7.1.13-0) + [7.2.10-0, 7.2.11-0) kafka-schema-registry-parent pom kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 Confluent, Inc. http://confluent.io @@ -50,6 +50,7 @@ schema-serializer avro-serializer json-serializer + schema-converter avro-data avro-converter package-schema-registry @@ -84,7 +85,7 @@ 3.4.0 4.9.7 2.1.10 - 7.1.12-0 + 7.2.10-0 1.26.0 1.77 @@ -191,8 +192,8 @@ ${jackson.version} - com.github.everit-org.json-schema - org.everit.json.schema + com.github.erosb + everit-json-schema ${json-schema.version} diff --git a/protobuf-converter/pom.xml b/protobuf-converter/pom.xml index 10c3b2b7650..9695f62ab5f 100644 --- a/protobuf-converter/pom.xml +++ b/protobuf-converter/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 @@ -60,6 +60,11 @@ org.jetbrains.kotlin kotlin-stdlib + + io.confluent + kafka-schema-converter + ${io.confluent.schema-registry.version} + io.confluent kafka-schema-serializer diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java index 9cb3c377c29..012c93e9f9f 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufData.java @@ -33,6 +33,8 @@ import com.google.protobuf.Message; import com.google.protobuf.StringValue; import com.google.protobuf.util.Timestamps; +import io.confluent.connect.schema.ConnectEnum; +import io.confluent.connect.schema.ConnectUnion; import io.confluent.kafka.schemaregistry.utils.BoundedConcurrentHashMap; import io.confluent.protobuf.MetaProto; import io.confluent.protobuf.MetaProto.Meta; @@ -122,6 +124,9 @@ public class ProtobufData { public static final String CONNECT_TYPE_INT8 = "int8"; public static final String CONNECT_TYPE_INT16 = "int16"; + public static final String GENERALIZED_TYPE_UNION = ConnectUnion.LOGICAL_PARAMETER; + public static final String GENERALIZED_TYPE_ENUM = ConnectEnum.LOGICAL_PARAMETER; + private static final long MILLIS_PER_DAY = 24 * 60 * 60 * 1000; private static final int MILLIS_PER_NANO = 1_000_000; private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); @@ -286,8 +291,10 @@ public class ProtobufData { private final Map fromConnectSchemaCache; private final Map, Schema> toConnectSchemaCache; + private boolean generalizedSumTypeSupport; private boolean enhancedSchemaSupport; private boolean scrubInvalidNames; + private boolean useIntForEnums; private boolean useOptionalForNullables; private boolean supportOptionalForProto2; private boolean useWrapperForNullables; @@ -307,8 +314,10 @@ public ProtobufData(int cacheSize) { public ProtobufData(ProtobufDataConfig protobufDataConfig) { fromConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize()); toConnectSchemaCache = new BoundedConcurrentHashMap<>(protobufDataConfig.schemaCacheSize()); + this.generalizedSumTypeSupport = protobufDataConfig.isGeneralizedSumTypeSupportDefault(); this.enhancedSchemaSupport = protobufDataConfig.isEnhancedProtobufSchemaSupport(); this.scrubInvalidNames = protobufDataConfig.isScrubInvalidNames(); + this.useIntForEnums = protobufDataConfig.useIntForEnums(); this.useOptionalForNullables = protobufDataConfig.useOptionalForNullables(); this.supportOptionalForProto2 = protobufDataConfig.supportOptionalForProto2(); this.useWrapperForNullables = protobufDataConfig.useWrapperForNullables(); @@ -370,6 +379,10 @@ private Object fromConnectData( case INT16: case INT32: { final int intValue = ((Number) value).intValue(); // Check for correct type + if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM); + return protobufSchema.getEnumValue(scope + enumType, intValue); + } return isWrapper ? Int32Value.newBuilder().setValue(intValue).build() : intValue; } @@ -402,9 +415,13 @@ private Object fromConnectData( case STRING: { final String stringValue = (String) value; // Check for correct type - if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { - String enumType = schema.parameters().get(PROTOBUF_TYPE_ENUM); - String tag = schema.parameters().get(PROTOBUF_TYPE_ENUM_PREFIX + stringValue); + if (schema.parameters() != null + && (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || schema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) { + String paramName = generalizedSumTypeSupport + ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; + String enumType = schema.parameters().get(paramName); + String tag = schema.parameters().get(paramName + "." + stringValue); if (tag != null) { return protobufSchema.getEnumValue(scope + enumType, Integer.parseInt(tag)); } @@ -469,10 +486,9 @@ private Object fromConnectData( if (!struct.schema().equals(schema)) { throw new DataException("Mismatching struct schema"); } - String structName = schema.name(); //This handles the inverting of a union which is held as a struct, where each field is // one of the union types. - if (structName != null && structName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) { + if (isUnionSchema(schema)) { for (Field field : schema.fields()) { Object object = struct.get(field); if (object != null) { @@ -709,6 +725,7 @@ private MessageDefinition messageDefinitionFromConnectSchema( if (fieldDef != null) { boolean isProto3Optional = "optional".equals(fieldDef.getLabel()); if (isProto3Optional) { + // Add a synthentic oneof MessageDefinition.OneofBuilder oneofBuilder = message.addOneof("_" + fieldDef.getName()); oneofBuilder.addField( true, @@ -791,9 +808,11 @@ private FieldDefinition fieldDefinitionFromConnectSchema( Object defaultVal = null; if (fieldSchema.type() == Schema.Type.STRUCT) { String fieldSchemaName = fieldSchema.name(); - if (fieldSchemaName != null && fieldSchemaName.startsWith(PROTOBUF_TYPE_UNION_PREFIX)) { - String unionName = - getUnqualifiedName(ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length())); + if (isUnionSchema(fieldSchema)) { + String unionName = generalizedSumTypeSupport + ? fieldSchema.parameters().get(GENERALIZED_TYPE_UNION) + : getUnqualifiedName( + ctx, fieldSchemaName.substring(PROTOBUF_TYPE_UNION_PREFIX.length())); oneofDefinitionFromConnectSchema(ctx, schema, message, fieldSchema, unionName); return null; } else { @@ -810,8 +829,9 @@ private FieldDefinition fieldDefinitionFromConnectSchema( } else if (fieldSchema.type() == Schema.Type.MAP) { message.addMessageDefinition( mapDefinitionFromConnectSchema(ctx, schema, type, fieldSchema)); - } else if (fieldSchema.parameters() != null && fieldSchema.parameters() - .containsKey(PROTOBUF_TYPE_ENUM)) { + } else if (fieldSchema.parameters() != null + && (fieldSchema.parameters().containsKey(GENERALIZED_TYPE_ENUM) + || fieldSchema.parameters().containsKey(PROTOBUF_TYPE_ENUM))) { String enumName = getUnqualifiedName(ctx, fieldSchema.name()); if (!message.containsEnum(enumName)) { message.addEnumDefinition(enumDefinitionFromConnectSchema(ctx, schema, fieldSchema)); @@ -998,15 +1018,16 @@ private EnumDefinition enumDefinitionFromConnectSchema( Schema enumElem ) { String enumName = getUnqualifiedName(ctx, enumElem.name()); - EnumDefinition.Builder enumer = EnumDefinition.newBuilder(enumName); + EnumDefinition.Builder enumBuilder = EnumDefinition.newBuilder(enumName); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; for (Map.Entry entry : enumElem.parameters().entrySet()) { - if (entry.getKey().startsWith(PROTOBUF_TYPE_ENUM_PREFIX)) { - String name = entry.getKey().substring(PROTOBUF_TYPE_ENUM_PREFIX.length()); + if (entry.getKey().startsWith(paramName + ".")) { + String name = entry.getKey().substring(paramName.length() + 1); int tag = Integer.parseInt(entry.getValue()); - enumer.addValue(name, tag); + enumBuilder.addValue(name, tag); } } - return enumer.build(); + return enumBuilder.build(); } private String dataTypeFromConnectSchema( @@ -1041,6 +1062,9 @@ private String dataTypeFromConnectSchema( return useWrapperForNullables && schema.isOptional() ? PROTOBUF_INT32_WRAPPER_TYPE : FieldDescriptor.Type.INT32.toString().toLowerCase(); case INT32: + if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + return schema.parameters().get(PROTOBUF_TYPE_ENUM); + } defaultType = FieldDescriptor.Type.INT32.toString().toLowerCase(); if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_PROP)) { defaultType = schema.parameters().get(PROTOBUF_TYPE_PROP); @@ -1077,8 +1101,12 @@ private String dataTypeFromConnectSchema( return useWrapperForNullables && schema.isOptional() ? PROTOBUF_BOOL_WRAPPER_TYPE : FieldDescriptor.Type.BOOL.toString().toLowerCase(); case STRING: - if (schema.parameters() != null && schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { - return schema.parameters().get(PROTOBUF_TYPE_ENUM); + if (schema.parameters() != null) { + if (schema.parameters().containsKey(GENERALIZED_TYPE_ENUM)) { + return schema.parameters().get(GENERALIZED_TYPE_ENUM); + } else if (schema.parameters().containsKey(PROTOBUF_TYPE_ENUM)) { + return schema.parameters().get(PROTOBUF_TYPE_ENUM); + } } return useWrapperForNullables && schema.isOptional() ? PROTOBUF_STRING_WRAPPER_TYPE : FieldDescriptor.Type.STRING.toString().toLowerCase(); @@ -1118,6 +1146,11 @@ private boolean isTimestampSchema(Schema schema) { return Timestamp.LOGICAL_NAME.equals(schema.name()); } + private static boolean isUnionSchema(Schema schema) { + return (schema.name() != null && schema.name().startsWith(PROTOBUF_TYPE_UNION)) + || ConnectUnion.isUnion(schema); + } + public SchemaAndValue toConnectData(ProtobufSchema protobufSchema, Message message) { if (message == null) { return SchemaAndValue.NULL; @@ -1160,7 +1193,13 @@ protected Object toConnectData(Schema schema, Object value) { if (value instanceof Message) { value = getWrappedValue((Message) value); } - converted = ((Number) value).intValue(); + if (value instanceof Number) { + converted = ((Number) value).intValue(); + } else if (value instanceof Enum) { + converted = ((Enum) value).ordinal(); + } else if (value instanceof EnumValueDescriptor) { + converted = ((EnumValueDescriptor) value).getNumber(); + } break; case INT64: if (value instanceof Message) { @@ -1295,8 +1334,8 @@ private void setUnionField( OneofDescriptor oneOfDescriptor, FieldDescriptor fieldDescriptor ) { - String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex(); - Field unionField = schema.field(unionName); + String unionFieldName = unionFieldName(oneOfDescriptor); + Field unionField = schema.field(unionFieldName); Schema unionSchema = unionField.schema(); Struct union = new Struct(unionSchema); @@ -1308,6 +1347,10 @@ private void setUnionField( result.put(unionField, union); } + private String unionFieldName(OneofDescriptor oneofDescriptor) { + return oneofDescriptor.getName() + "_" + oneofDescriptor.getIndex(); + } + private void setStructField( Schema schema, Message message, @@ -1366,8 +1409,8 @@ private SchemaBuilder toConnectSchema( builder.name(name); List oneOfDescriptors = descriptor.getRealOneofs(); for (OneofDescriptor oneOfDescriptor : oneOfDescriptors) { - String unionName = oneOfDescriptor.getName() + "_" + oneOfDescriptor.getIndex(); - builder.field(unionName, toConnectSchema(ctx, oneOfDescriptor)); + String unionFieldName = unionFieldName(oneOfDescriptor); + builder.field(unionFieldName, toConnectSchema(ctx, oneOfDescriptor)); } List fieldDescriptors = descriptor.getFields(); for (FieldDescriptor fieldDescriptor : fieldDescriptors) { @@ -1389,8 +1432,14 @@ private SchemaBuilder toConnectSchema( private Schema toConnectSchema(ToConnectContext ctx, OneofDescriptor descriptor) { SchemaBuilder builder = SchemaBuilder.struct(); - String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName(); - builder.name(PROTOBUF_TYPE_UNION_PREFIX + name); + if (generalizedSumTypeSupport) { + String name = descriptor.getName(); + builder.name(name); + builder.parameter(GENERALIZED_TYPE_UNION, name); + } else { + String name = enhancedSchemaSupport ? descriptor.getFullName() : descriptor.getName(); + builder.name(PROTOBUF_TYPE_UNION_PREFIX + name); + } List fieldDescriptors = descriptor.getFields(); for (FieldDescriptor fieldDescriptor : fieldDescriptors) { builder.field(fieldDescriptor.getName(), toConnectSchema(ctx, fieldDescriptor)); @@ -1465,16 +1514,17 @@ private Schema toConnectSchema(ToConnectContext ctx, FieldDescriptor descriptor) break; case ENUM: - builder = SchemaBuilder.string(); + builder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string(); EnumDescriptor enumDescriptor = descriptor.getEnumType(); String name = enhancedSchemaSupport ? enumDescriptor.getFullName() : enumDescriptor.getName(); builder.name(name); - builder.parameter(PROTOBUF_TYPE_ENUM, enumDescriptor.getName()); + String paramName = generalizedSumTypeSupport ? GENERALIZED_TYPE_ENUM : PROTOBUF_TYPE_ENUM; + builder.parameter(paramName, enumDescriptor.getName()); for (EnumValueDescriptor enumValueDesc : enumDescriptor.getValues()) { String enumSymbol = enumValueDesc.getName(); String enumTag = String.valueOf(enumValueDesc.getNumber()); - builder.parameter(PROTOBUF_TYPE_ENUM_PREFIX + enumSymbol, enumTag); + builder.parameter(paramName + "." + enumSymbol, enumTag); } builder.optional(); break; diff --git a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java index fd3f76e7a8d..699463a68a3 100644 --- a/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java +++ b/protobuf-converter/src/main/java/io/confluent/connect/protobuf/ProtobufDataConfig.java @@ -24,6 +24,12 @@ public class ProtobufDataConfig extends AbstractConfig { + public static final String GENERALIZED_SUM_TYPE_SUPPORT_CONFIG = "generalized.sum.type.support"; + public static final boolean GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT = false; + public static final String GENERALIZED_SUM_TYPE_SUPPORT_DOC = + "Toggle for enabling/disabling generalized sum type support: interoperability of enum/union " + + "with other schema formats"; + public static final String ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG = "enhanced.protobuf.schema.support"; public static final boolean ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT = false; @@ -36,6 +42,10 @@ public class ProtobufDataConfig extends AbstractConfig { public static final String SCRUB_INVALID_NAMES_DOC = "Whether to scrub invalid names by replacing invalid characters with valid ones"; + public static final String INT_FOR_ENUMS_CONFIG = "int.for.enums"; + public static final boolean INT_FOR_ENUMS_DEFAULT = false; + public static final String INT_FOR_ENUMS_DOC = "Whether to represent enums as integers"; + public static final String OPTIONAL_FOR_NULLABLES_CONFIG = "optional.for.nullables"; public static final boolean OPTIONAL_FOR_NULLABLES_DEFAULT = false; public static final String OPTIONAL_FOR_NULLABLES_DOC = "Whether nullable fields should be " @@ -61,6 +71,11 @@ public class ProtobufDataConfig extends AbstractConfig { public static ConfigDef baseConfigDef() { return new ConfigDef() + .define(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, + ConfigDef.Type.BOOLEAN, + GENERALIZED_SUM_TYPE_SUPPORT_DEFAULT, + ConfigDef.Importance.MEDIUM, + GENERALIZED_SUM_TYPE_SUPPORT_DOC) .define(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG, ConfigDef.Type.BOOLEAN, ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DEFAULT, @@ -68,6 +83,11 @@ public static ConfigDef baseConfigDef() { ENHANCED_PROTOBUF_SCHEMA_SUPPORT_DOC) .define(SCRUB_INVALID_NAMES_CONFIG, ConfigDef.Type.BOOLEAN, SCRUB_INVALID_NAMES_DEFAULT, ConfigDef.Importance.MEDIUM, SCRUB_INVALID_NAMES_DOC) + .define(INT_FOR_ENUMS_CONFIG, + ConfigDef.Type.BOOLEAN, + INT_FOR_ENUMS_DEFAULT, + ConfigDef.Importance.MEDIUM, + INT_FOR_ENUMS_DOC) .define(OPTIONAL_FOR_NULLABLES_CONFIG, ConfigDef.Type.BOOLEAN, OPTIONAL_FOR_NULLABLES_DEFAULT, @@ -100,6 +120,10 @@ public ProtobufDataConfig(Map props) { super(baseConfigDef(), props); } + public boolean isGeneralizedSumTypeSupportDefault() { + return this.getBoolean(GENERALIZED_SUM_TYPE_SUPPORT_CONFIG); + } + public boolean isEnhancedProtobufSchemaSupport() { return this.getBoolean(ENHANCED_PROTOBUF_SCHEMA_SUPPORT_CONFIG); } @@ -108,6 +132,10 @@ public boolean isScrubInvalidNames() { return this.getBoolean(SCRUB_INVALID_NAMES_CONFIG); } + public boolean useIntForEnums() { + return this.getBoolean(INT_FOR_ENUMS_CONFIG); + } + public boolean useOptionalForNullables() { return this.getBoolean(OPTIONAL_FOR_NULLABLES_CONFIG); } diff --git a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java index eccf610febf..dcf982ef1e4 100644 --- a/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java +++ b/protobuf-converter/src/test/java/io/confluent/connect/protobuf/ProtobufDataTest.java @@ -91,9 +91,11 @@ import io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue; import io.confluent.kafka.serializers.protobuf.test.UInt32ValueOuterClass; +import static io.confluent.connect.protobuf.ProtobufData.GENERALIZED_TYPE_UNION; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_ENUM; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_PROP; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_TAG; +import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION; import static io.confluent.connect.protobuf.ProtobufData.PROTOBUF_TYPE_UNION_PREFIX; import static io.confluent.kafka.serializers.protobuf.test.TimestampValueOuterClass.TimestampValue.newBuilder; import static org.junit.Assert.assertArrayEquals; @@ -212,12 +214,12 @@ private NestedMessage createEmptyNestedTestProto() throws ParseException { return message.build(); } - private Schema getExpectedNestedTestProtoSchemaStringUserId() { - return getExpectedNestedTestProtoSchema(); + private Schema getExpectedNestedTestProtoSchemaStringUserId(boolean useIntForEnums) { + return getExpectedNestedTestProtoSchema(useIntForEnums); } - private Schema getExpectedNestedTestProtoSchemaIntUserId() { - return getExpectedNestedTestProtoSchema(); + private Schema getExpectedNestedTestProtoSchemaIntUserId(boolean useIntForEnums) { + return getExpectedNestedTestProtoSchema(useIntForEnums); } private SchemaBuilder getEnumUnionSchemaBuilder() { @@ -259,6 +261,46 @@ private SchemaBuilder getEnumUnionSchemaBuilder() { return enumUnionBuilder; } + private SchemaBuilder getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport() { + final SchemaBuilder enumUnionBuilder = SchemaBuilder.struct(); + enumUnionBuilder.name("EnumUnion"); + final SchemaBuilder someValBuilder = SchemaBuilder.struct(); + someValBuilder.name("some_val"); + someValBuilder.parameter(GENERALIZED_TYPE_UNION, "some_val"); + someValBuilder.field( + "one_id", + SchemaBuilder.string().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(1)).build() + ); + someValBuilder.field( + "other_id", + SchemaBuilder.int32().optional().parameter(PROTOBUF_TYPE_TAG, String.valueOf(2)).build() + ); + someValBuilder.field( + "some_status", + SchemaBuilder.string() + .name("Status") + .optional() + .parameter(PROTOBUF_TYPE_TAG, String.valueOf(3)) + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1") + .build() + ); + enumUnionBuilder.field("some_val_0", someValBuilder.optional().build()); + enumUnionBuilder.field( + "status", + SchemaBuilder.string() + .name("Status") + .optional() + .parameter(PROTOBUF_TYPE_TAG, String.valueOf(4)) + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM, "Status") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".ACTIVE", "0") + .parameter(ProtobufData.GENERALIZED_TYPE_ENUM + ".INACTIVE", "1") + .build() + ); + return enumUnionBuilder; + } + private Struct getEnumUnionWithString() throws ParseException { Schema schema = getEnumUnionSchemaBuilder().build(); Struct result = new Struct(schema.schema()); @@ -269,6 +311,16 @@ private Struct getEnumUnionWithString() throws ParseException { return result; } + private Struct getEnumUnionWithStringWithGeneralizedSumTypeSupport() throws ParseException { + Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); + Struct result = new Struct(schema.schema()); + Struct union = new Struct(schema.field("some_val_0").schema()); + union.put("one_id", "ID"); + result.put("some_val_0", union); + result.put("status", "INACTIVE"); + return result; + } + private Struct getEnumUnionWithSomeStatus() throws ParseException { Schema schema = getEnumUnionSchemaBuilder().build(); Struct result = new Struct(schema.schema()); @@ -279,6 +331,16 @@ private Struct getEnumUnionWithSomeStatus() throws ParseException { return result; } + private Struct getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws ParseException { + Schema schema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); + Struct result = new Struct(schema.schema()); + Struct union = new Struct(schema.field("some_val_0").schema()); + union.put("some_status", "INACTIVE"); + result.put("some_val_0", union); + result.put("status", "INACTIVE"); + return result; + } + private SchemaBuilder getComplexTypeSchemaBuilder() { final SchemaBuilder complexTypeBuilder = SchemaBuilder.struct(); complexTypeBuilder.name("ComplexType"); @@ -316,6 +378,10 @@ private SchemaBuilder getInnerMessageSchemaBuilder() { } private Schema getExpectedNestedTestProtoSchema() { + return getExpectedNestedTestProtoSchema(false); + } + + private Schema getExpectedNestedTestProtoSchema(boolean useIntForEnums) { final SchemaBuilder builder = SchemaBuilder.struct(); builder.name("NestedMessage"); final SchemaBuilder userIdBuilder = SchemaBuilder.struct(); @@ -363,8 +429,9 @@ private Schema getExpectedNestedTestProtoSchema() { .parameter(PROTOBUF_TYPE_TAG, String.valueOf(4)) .build() ); + SchemaBuilder enumBuilder = useIntForEnums ? SchemaBuilder.int32() : SchemaBuilder.string(); builder.field("status", - SchemaBuilder.string() + enumBuilder .name("Status") .optional() .parameter(PROTOBUF_TYPE_TAG, String.valueOf(5)) @@ -403,8 +470,8 @@ private Map getTestKeyValueMap() { return result; } - private Struct getExpectedNestedProtoResultStringUserId() throws ParseException { - Schema schema = getExpectedNestedTestProtoSchemaStringUserId(); + private Struct getExpectedNestedProtoResultStringUserId(boolean useIntForEnums) throws ParseException { + Schema schema = getExpectedNestedTestProtoSchemaStringUserId(useIntForEnums); Struct result = new Struct(schema.schema()); Struct userId = new Struct(schema.field("user_id").schema()); Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema()); @@ -422,7 +489,7 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException experiments.add("second experiment"); result.put("experiments_active", experiments); - result.put("status", "INACTIVE"); + result.put("status", useIntForEnums ? 1 : "INACTIVE"); result.put("map_type", getTestKeyValueMap()); Struct inner = new Struct(schema.field("inner").schema()); @@ -432,8 +499,8 @@ private Struct getExpectedNestedProtoResultStringUserId() throws ParseException return result; } - private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException { - Schema schema = getExpectedNestedTestProtoSchemaIntUserId(); + private Struct getExpectedNestedTestProtoResultIntUserId(boolean useIntForEnums) throws ParseException { + Schema schema = getExpectedNestedTestProtoSchemaIntUserId(useIntForEnums); Struct result = new Struct(schema.schema()); Struct userId = new Struct(schema.field("user_id").schema()); Struct union = new Struct(schema.field("user_id").schema().field("user_id_0").schema()); @@ -451,7 +518,7 @@ private Struct getExpectedNestedTestProtoResultIntUserId() throws ParseException experiments.add("second experiment"); result.put("experiments_active", experiments); - result.put("status", "INACTIVE"); + result.put("status", useIntForEnums ? 1 : "INACTIVE"); result.put("map_type", getTestKeyValueMap()); Struct inner = new Struct(schema.field("inner").schema()); @@ -553,9 +620,23 @@ private SchemaAndValue getSchemaAndValue(ProtobufData protobufData, Message mess public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception { NestedMessage message = createNestedTestProtoStringUserId(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(); + Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(false); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getExpectedNestedProtoResultStringUserId(false); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception { + NestedMessage message = createNestedTestProtoStringUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getExpectedNestedTestProtoSchemaStringUserId(true); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getExpectedNestedProtoResultStringUserId(); + Struct expected = getExpectedNestedProtoResultStringUserId(true); assertEquals(expected, result.value()); } @@ -563,9 +644,25 @@ public void testToConnectDataWithNestedProtobufMessageAndStringUserId() throws E public void testToConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception { NestedMessage message = createNestedTestProtoIntUserId(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(); + Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(false); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getExpectedNestedTestProtoResultIntUserId(); + Struct expected = getExpectedNestedTestProtoResultIntUserId(false); + assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema()); + assertEquals(expected.schema(), ((Struct) result.value()).schema()); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception { + NestedMessage message = createNestedTestProtoIntUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getExpectedNestedTestProtoSchemaIntUserId(true); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getExpectedNestedTestProtoResultIntUserId(true); assertSchemasEqual(expected.schema(), ((Struct) result.value()).schema()); assertEquals(expected.schema(), ((Struct) result.value()).schema()); assertEquals(expected, result.value()); @@ -605,9 +702,23 @@ public void testToConnectDataDefaultOneOfCannotHaveTwoOneOfsSet() throws Excepti public void testToConnectEnumUnionWithString() throws Exception { EnumUnion message = createEnumUnionWithString(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getEnumUnionWithString(); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithString(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getEnumUnionWithString(); + Struct expected = getEnumUnionWithStringWithGeneralizedSumTypeSupport(); assertEquals(expected, result.value()); } @@ -615,9 +726,23 @@ public void testToConnectEnumUnionWithString() throws Exception { public void testToConnectEnumUnionWithSomeStatus() throws Exception { EnumUnion message = createEnumUnionWithSomeStatus(); SchemaAndValue result = getSchemaAndValue(message); - Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + Schema expectedSchema = getEnumUnionSchemaBuilder().build(); + assertSchemasEqual(expectedSchema, result.schema()); + Struct expected = getEnumUnionWithSomeStatus(); + assertEquals(expected, result.value()); + } + + @Test + public void testToConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithSomeStatus(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue result = getSchemaAndValue(protobufData, message); + Schema expectedSchema = getEnumUnionSchemaBuilderWithGeneralizedSumTypeSupport().build(); assertSchemasEqual(expectedSchema, result.schema()); - Struct expected = getEnumUnionWithSomeStatus(); + Struct expected = getEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport(); assertEquals(expected, result.value()); } @@ -1091,6 +1216,7 @@ public void testRoundTripConnectUInt32Fixed32() throws Exception { assertTrue(parsedMessage.toString().contains("test_uint32: " + UNSIGNED_RESULT)); } + @Test public void testFromConnectEnumUnionWithString() throws Exception { EnumUnion message = createEnumUnionWithString(); SchemaAndValue schemaAndValue = getSchemaAndValue(message); @@ -1108,6 +1234,32 @@ public void testFromConnectEnumUnionWithSomeStatus() throws Exception { assertArrayEquals(messageBytes, message.toByteArray()); } + @Test + public void testFromConnectEnumUnionWithStringWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithString(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message); + byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue); + + assertArrayEquals(messageBytes, message.toByteArray()); + } + + @Test + public void testFromConnectEnumUnionWithSomeStatusWithGeneralizedSumTypeSupport() throws Exception { + EnumUnion message = createEnumUnionWithSomeStatus(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.GENERALIZED_SUM_TYPE_SUPPORT_CONFIG, "true") + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, message); + byte[] messageBytes = getMessageBytes(protobufData, schemaAndValue); + + assertArrayEquals(messageBytes, message.toByteArray()); + } + @Test public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws Exception { NestedMessage nestedMessage = createNestedTestProtoStringUserId(); @@ -1117,6 +1269,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndStringUserId() throws assertArrayEquals(messageBytes, nestedMessage.toByteArray()); } + @Test + public void testFromConnectDataWithNestedProtobufMessageAndStringUserIdWithIntEnums() throws Exception { + NestedMessage nestedMessage = createNestedTestProtoStringUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage); + byte[] messageBytes = getMessageBytes(schemaAndValue); + + assertArrayEquals(messageBytes, nestedMessage.toByteArray()); + } + @Test public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Exception { NestedMessage nestedMessage = createNestedTestProtoIntUserId(); @@ -1126,6 +1291,19 @@ public void testFromConnectDataWithNestedProtobufMessageAndIntUserId() throws Ex assertArrayEquals(messageBytes, nestedMessage.toByteArray()); } + @Test + public void testFromConnectDataWithNestedProtobufMessageAndIntUserIdWithIntEnums() throws Exception { + NestedMessage nestedMessage = createNestedTestProtoIntUserId(); + ProtobufDataConfig protobufDataConfig = new ProtobufDataConfig.Builder() + .with(ProtobufDataConfig.INT_FOR_ENUMS_CONFIG, true) + .build(); + ProtobufData protobufData = new ProtobufData(protobufDataConfig); + SchemaAndValue schemaAndValue = getSchemaAndValue(protobufData, nestedMessage); + byte[] messageBytes = getMessageBytes(schemaAndValue); + + assertArrayEquals(messageBytes, nestedMessage.toByteArray()); + } + @Test public void testFromConnectDataWithEmptyNestedProtobufMessage() throws Exception { NestedMessage nestedMessage = createEmptyNestedTestProto(); diff --git a/protobuf-provider/pom.xml b/protobuf-provider/pom.xml index bc0c8a7e164..e032c9a6622 100644 --- a/protobuf-provider/pom.xml +++ b/protobuf-provider/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java index c6719905f1f..376d5b0d8a8 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java @@ -36,6 +36,7 @@ import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto; import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors.Descriptor; +import com.google.protobuf.Descriptors.EnumDescriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.Descriptors.FileDescriptor; import com.google.protobuf.DurationProto; @@ -378,6 +379,20 @@ public ProtobufSchema(Descriptor descriptor, List references) { this.descriptor = descriptor; } + public ProtobufSchema(EnumDescriptor enumDescriptor) { + this(enumDescriptor, Collections.emptyList()); + } + + public ProtobufSchema(EnumDescriptor enumDescriptor, List references) { + Map dependencies = new HashMap<>(); + this.schemaObj = toProtoFile(enumDescriptor.getFile(), dependencies); + this.version = null; + this.name = enumDescriptor.getFullName(); + this.references = Collections.unmodifiableList(references); + this.dependencies = Collections.unmodifiableMap(dependencies); + this.descriptor = null; + } + private ProtobufSchema( ProtoFileElement schemaObj, Integer version, @@ -465,7 +480,7 @@ private ProtoFileElement toProtoFile(String schema) { byte[] bytes = base64Decoder.decode(schema); return toProtoFile(FileDescriptorProto.parseFrom(bytes)); } catch (Exception pe) { - throw new IllegalArgumentException("Could not parse Protobuf", e); + throw new IllegalArgumentException("Could not parse Protobuf - " + e.getMessage(), e); } } } @@ -964,6 +979,10 @@ public DynamicMessage.Builder newMessageBuilder(String name) { return toDynamicSchema().newMessageBuilder(name); } + public EnumDescriptor getEnumDescriptor(String enumTypeName) { + return toDynamicSchema().getEnumDescriptor(enumTypeName); + } + public Descriptors.EnumValueDescriptor getEnumValue(String enumTypeName, int enumNumber) { return toDynamicSchema().getEnumValue(enumTypeName, enumNumber); } diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java index e18063dbefb..936501bceb0 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaProvider.java @@ -17,7 +17,6 @@ package io.confluent.kafka.schemaregistry.protobuf; import java.util.List; -import java.util.Optional; import io.confluent.kafka.schemaregistry.AbstractSchemaProvider; import io.confluent.kafka.schemaregistry.ParsedSchema; @@ -35,20 +34,20 @@ public String schemaType() { } @Override - public Optional parseSchema(String schemaString, - List references, - boolean isNew) { + public ParsedSchema parseSchemaOrElseThrow(String schemaString, + List references, + boolean isNew) { try { - return Optional.of(new ProtobufSchema( - schemaString, - references, - resolveReferences(references), - null, - null - )); + return new ProtobufSchema( + schemaString, + references, + resolveReferences(references), + null, + null + ); } catch (Exception e) { log.error("Could not parse Protobuf schema", e); - return Optional.empty(); + throw e; } } } diff --git a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java index f65e68150d5..2bc1fd48163 100644 --- a/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java +++ b/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/dynamic/DynamicSchema.java @@ -24,6 +24,7 @@ import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import com.google.protobuf.DescriptorProtos.FileOptions; import com.google.protobuf.DescriptorProtos.FileOptions.OptimizeMode; +import com.google.protobuf.DescriptorProtos.ServiceDescriptorProto; import com.google.protobuf.Descriptors.Descriptor; import com.google.protobuf.Descriptors.DescriptorValidationException; import com.google.protobuf.Descriptors.EnumDescriptor; @@ -426,6 +427,16 @@ public Builder addEnumDefinition(EnumDefinition enumDef) { return this; } + public boolean containsService(String name) { + List services = mFileDescProtoBuilder.getServiceList(); + for (ServiceDescriptorProto service : services) { + if (service.getName().equals(name)) { + return true; + } + } + return false; + } + public Builder addServiceDefinition(ServiceDefinition serviceDef) { mFileDescProtoBuilder.addService(serviceDef.getServiceType()); return this; diff --git a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java index f5ff2507965..2eb2c428003 100644 --- a/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java +++ b/protobuf-provider/src/test/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchemaTest.java @@ -21,9 +21,12 @@ import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.google.protobuf.ByteString; import com.google.protobuf.Descriptors.Descriptor; +import com.google.protobuf.Descriptors.EnumDescriptor; import com.google.protobuf.Descriptors.FieldDescriptor; import com.google.protobuf.DynamicMessage; import com.squareup.wire.schema.internal.parser.ProtoFileElement; +import io.confluent.kafka.schemaregistry.ParsedSchema; +import io.confluent.kafka.schemaregistry.SchemaProvider; import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference; import io.confluent.kafka.schemaregistry.protobuf.dynamic.DynamicSchema; import io.confluent.kafka.schemaregistry.protobuf.dynamic.MessageDefinition; @@ -35,6 +38,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Optional; import io.confluent.kafka.schemaregistry.CompatibilityLevel; import io.confluent.kafka.schemaregistry.protobuf.diff.ResourceLoader; @@ -42,6 +46,7 @@ import static io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema.PROTO3; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; @@ -170,6 +175,18 @@ public class ProtobufSchemaTest { private static final ProtobufSchema enumBeforeMessageSchema = new ProtobufSchema(enumBeforeMessageSchemaString); + private static final String invalidSchemaString = "syntax = \"proto3\";\n" + + "\n" + + "option java_package = \"io.confluent.kafka.serializers.protobuf.test\";\n" + + "option java_outer_classname = \"TestMessageProtos\";\n" + + "\n" + + "import \"google/protobuf/descriptor.proto\";\n" + + "\n" + + "message TestMessage {\n" + + " string test_string = 1 [json_name = \"test_str\"];\n" + + " int32 test_int32 = 8.01;\n" + + "}\n"; + @Test public void testRecordToProtobuf() throws Exception { String json = "{\n" @@ -1228,6 +1245,41 @@ public void testEnumAfterMessage() throws Exception { new ProtobufSchema(enumBeforeMessageSchema.toDescriptor()).canonicalString()); } + @Test + public void testParseSchema() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + ParsedSchema parsedSchema = protobufSchemaProvider.parseSchemaOrElseThrow(recordSchemaString, + new ArrayList<>(), false); + Optional parsedSchemaOptional = protobufSchemaProvider.parseSchema(recordSchemaString, + new ArrayList<>(), false); + + assertNotNull(parsedSchema); + assertTrue(parsedSchemaOptional.isPresent()); + } + + @Test(expected = IllegalArgumentException.class) + public void testParseSchemaThrowException() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + protobufSchemaProvider.parseSchemaOrElseThrow(invalidSchemaString, + new ArrayList<>(), false); + } + + @Test + public void testParseSchemaSuppressException() { + SchemaProvider protobufSchemaProvider = new ProtobufSchemaProvider(); + Optional parsedSchema = protobufSchemaProvider.parseSchema(invalidSchemaString, + new ArrayList<>(), false); + assertFalse(parsedSchema.isPresent()); + } + + @Test + public void testEnumMethods() { + EnumDescriptor enumDescriptor = enumSchema.getEnumDescriptor("TestEnum.Suit"); + ProtobufSchema enumSchema2 = new ProtobufSchema(enumDescriptor); + EnumDescriptor enumDescriptor2 = enumSchema2.getEnumDescriptor("TestEnum.Suit"); + assertEquals(enumDescriptor.getFullName(), enumDescriptor2.getFullName()); + } + private static JsonNode jsonTree(String jsonData) { try { return objectMapper.readTree(jsonData); diff --git a/protobuf-serde/pom.xml b/protobuf-serde/pom.xml index d806fe4de71..293c24724d8 100644 --- a/protobuf-serde/pom.xml +++ b/protobuf-serde/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java index 4d33daefd95..8718c886e52 100644 --- a/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java +++ b/protobuf-serde/src/main/java/io/confluent/kafka/streams/serdes/protobuf/KafkaProtobufSerde.java @@ -54,11 +54,19 @@ public KafkaProtobufSerde(Class specificProtobufClass) { * For testing purposes only. */ public KafkaProtobufSerde(final SchemaRegistryClient client) { + this(client, null); + } + + /** + * For testing purposes only. + */ + public KafkaProtobufSerde(final SchemaRegistryClient client, final Class specificClass) { if (client == null) { throw new IllegalArgumentException("schema registry client must not be null"); } + this.specificProtobufClass = specificClass; inner = Serdes.serdeFrom(new KafkaProtobufSerializer<>(client), - new KafkaProtobufDeserializer<>(client)); + new KafkaProtobufDeserializer<>(client)); } @Override @@ -106,4 +114,4 @@ private Map withSpecificClass(final Map config, boole return newConfig; } -} \ No newline at end of file +} diff --git a/protobuf-serializer/pom.xml b/protobuf-serializer/pom.xml index 336ab08438e..836d5ea91e6 100644 --- a/protobuf-serializer/pom.xml +++ b/protobuf-serializer/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java index b7b65676977..f651800cde3 100644 --- a/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java +++ b/protobuf-serializer/src/test/java/com/acme/glup/ExampleProtoAcme.java @@ -583,7 +583,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (partition_ != null) { output.writeMessage(2, getPartition()); } - if (!getUidBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, uid_); } com.google.protobuf.GeneratedMessageV3 @@ -612,7 +612,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getPartition()); } - if (!getUidBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uid_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, uid_); } for (java.util.Map.Entry entry diff --git a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java index dd53e2749ee..1ebe41d52c8 100644 --- a/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java +++ b/protobuf-serializer/src/test/java/com/acme/glup/MetadataProto.java @@ -2713,7 +2713,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } for (int i = 0; i < format_.size(); i++) { @@ -2722,13 +2722,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (partitionScheme_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { output.writeEnum(3, partitionScheme_); } - if (!getJavaClassBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, javaClass_); } if (forTests_ != false) { output.writeBool(5, forTests_); } - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, owner_); } if (private_ != false) { @@ -2749,7 +2749,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } for (int i = 0; i < format_.size(); i++) { @@ -2760,14 +2760,14 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(3, partitionScheme_); } - if (!getJavaClassBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(javaClass_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, javaClass_); } if (forTests_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, forTests_); } - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, owner_); } if (private_ != false) { @@ -4414,7 +4414,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (format_ != null) { output.writeMessage(2, getFormat()); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, datasetId_); } unknownFields.writeTo(output); @@ -4434,7 +4434,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(2, getFormat()); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, datasetId_); } size += unknownFields.getSerializedSize(); @@ -5894,7 +5894,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getPathBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_); } if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -5915,7 +5915,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (priority_ != 0) { output.writeInt32(8, priority_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, label_); } if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) { @@ -5930,7 +5930,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getPathBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_); } if (fileFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -5957,7 +5957,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeInt32Size(8, priority_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, label_); } if (monitoringLevel_ != com.acme.glup.MetadataProto.MonitoringLevel.DEFAULT.getNumber()) { @@ -9786,16 +9786,16 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_); } if (useHippoCuttleJob_ != false) { @@ -9810,16 +9810,16 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_); } if (useHippoCuttleJob_ != false) { @@ -10972,16 +10972,16 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_); } if (deduplicate_ != false) { output.writeBool(3, deduplicate_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_); } unknownFields.writeTo(output); @@ -10993,17 +10993,17 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_); } if (deduplicate_ != false) { size += com.google.protobuf.CodedOutputStream .computeBoolSize(3, deduplicate_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_); } size += unknownFields.getSerializedSize(); @@ -13657,10 +13657,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, topic_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_); } if (deduplicate_ != false) { @@ -13669,7 +13669,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (config_ != null) { output.writeMessage(4, getConfig()); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, outputFormatLabel_); } for (int i = 0; i < configPerDc_.size(); i++) { @@ -13684,10 +13684,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(topic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, topic_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_); } if (deduplicate_ != false) { @@ -13698,7 +13698,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, getConfig()); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, outputFormatLabel_); } for (int i = 0; i < configPerDc_.size(); i++) { @@ -16422,7 +16422,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (from_ != null) { output.writeMessage(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, sourceNamespace_); } if (getPlatformsList().size() > 0) { @@ -16435,10 +16435,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (isBackfilling_ != false) { output.writeBool(8, isBackfilling_); } - if (!getToLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, toLabel_); } - if (!getToDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, toDatasetId_); } if (withBackfilling_ != false) { @@ -16460,7 +16460,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, sourceNamespace_); } { @@ -16479,10 +16479,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(8, isBackfilling_); } - if (!getToLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, toLabel_); } - if (!getToDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(toDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, toDatasetId_); } if (withBackfilling_ != false) { @@ -18109,7 +18109,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (from_ != null) { output.writeMessage(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceNamespace_); } if (getPlatformsList().size() > 0) { @@ -18132,7 +18132,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, getFrom()); } - if (!getSourceNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceNamespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceNamespace_); } { @@ -19485,10 +19485,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, outputDatasetId_); } if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -19497,10 +19497,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (outputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { output.writeEnum(4, outputFormat_); } - if (!getInputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, inputDatasetLabel_); } - if (!getOutputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, outputDatasetLabel_); } if (isByPlatform_ != false) { @@ -19515,10 +19515,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, outputDatasetId_); } if (inputFormat_ != com.acme.glup.MetadataProto.HDFSDataFormat.UNSUPPORTED_DATA_FORMAT.getNumber()) { @@ -19529,10 +19529,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(4, outputFormat_); } - if (!getInputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, inputDatasetLabel_); } - if (!getOutputDatasetLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, outputDatasetLabel_); } if (isByPlatform_ != false) { @@ -20922,19 +20922,19 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, outputFormatLabel_); } - if (samplingRate_ != 0F) { + if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) { output.writeFloat(5, samplingRate_); } unknownFields.writeTo(output); @@ -20946,19 +20946,19 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getInputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, inputDatasetId_); } - if (!getInputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(inputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, inputFormatLabel_); } - if (!getOutputDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, outputDatasetId_); } - if (!getOutputFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(outputFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, outputFormatLabel_); } - if (samplingRate_ != 0F) { + if (java.lang.Float.floatToRawIntBits(samplingRate_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, samplingRate_); } @@ -22298,22 +22298,22 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getLeftDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, leftDatasetId_); } - if (!getLeftFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, leftFormatLabel_); } - if (!getRightDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rightDatasetId_); } - if (!getRightFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, rightFormatLabel_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, hostname_); } - if (!getIgnoredFieldsBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, ignoredFields_); } unknownFields.writeTo(output); @@ -22325,22 +22325,22 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getLeftDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, leftDatasetId_); } - if (!getLeftFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(leftFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, leftFormatLabel_); } - if (!getRightDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightDatasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rightDatasetId_); } - if (!getRightFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rightFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, rightFormatLabel_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, hostname_); } - if (!getIgnoredFieldsBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(ignoredFields_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, ignoredFields_); } size += unknownFields.getSerializedSize(); @@ -24259,13 +24259,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) for (int i = 0; i < to_.size(); i++) { output.writeMessage(250, to_.get(i)); } - if (!getNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 251, namespace_); } - if (!getStartDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 253, startDate_); } - if (!getStopDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 254, stopDate_); } if (ignoreCn_ != false) { @@ -24324,13 +24324,13 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeMessageSize(250, to_.get(i)); } - if (!getNamespaceBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(namespace_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(251, namespace_); } - if (!getStartDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(startDate_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(253, startDate_); } - if (!getStopDateBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(stopDate_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(254, stopDate_); } if (ignoreCn_ != false) { @@ -27092,10 +27092,10 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, owner_); } - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { @@ -27122,10 +27122,10 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getOwnerBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(owner_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, owner_); } - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (partitioning_ != com.acme.glup.MetadataProto.PartitionScheme.UNSUPPORTED_PARTITION_SCHEME.getNumber()) { @@ -32944,7 +32944,7 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (pendingDeletion_ != false) { output.writeBool(5, pendingDeletion_); } - if (!getAddedAtBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, addedAt_); } unknownFields.writeTo(output); @@ -32984,7 +32984,7 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(5, pendingDeletion_); } - if (!getAddedAtBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(addedAt_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, addedAt_); } size += unknownFields.getSerializedSize(); @@ -33962,7 +33962,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (skip_ != false) { @@ -33977,7 +33977,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (skip_ != false) { @@ -34670,7 +34670,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (useEnumFieldId_ != false) { @@ -34685,7 +34685,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getNameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (useEnumFieldId_ != false) { @@ -37356,10 +37356,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (consolidationEnabled_ != false) { output.writeBool(7, consolidationEnabled_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, datasetId_); } - if (!getDatasetFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 11, datasetFormatLabel_); } for (int i = 0; i < controlMessage_.size(); i++) { @@ -37386,10 +37386,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBoolSize(7, consolidationEnabled_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, datasetId_); } - if (!getDatasetFormatLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetFormatLabel_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, datasetFormatLabel_); } for (int i = 0; i < controlMessage_.size(); i++) { @@ -38773,10 +38773,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (dc_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { output.writeEnum(2, dc_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, label_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, datasetId_); } unknownFields.writeTo(output); @@ -38796,10 +38796,10 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeEnumSize(2, dc_); } - if (!getLabelBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(label_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, label_); } - if (!getDatasetIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(datasetId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, datasetId_); } size += unknownFields.getSerializedSize(); @@ -39849,13 +39849,13 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (ip4_ != 0) { output.writeFixed32(2, ip4_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hostname_); } - if (!getContainerTaskBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, containerTask_); } - if (!getContainerAppBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, containerApp_); } unknownFields.writeTo(output); @@ -39875,13 +39875,13 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeFixed32Size(2, ip4_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hostname_); } - if (!getContainerTaskBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerTask_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, containerTask_); } - if (!getContainerAppBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerApp_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, containerApp_); } size += unknownFields.getSerializedSize(); @@ -41107,7 +41107,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kafkaTopic_); } if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { @@ -41125,7 +41125,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kafkaTopic_); } if (datacenter_ != com.acme.glup.MetadataProto.DataCenter.UNSUPPORTED_DATACENTER.getNumber()) { @@ -42494,13 +42494,13 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTypeBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, type_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, hostname_); } - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, kafkaTopic_); } if (partition_ != 0) { @@ -42512,16 +42512,16 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!processUuid_.isEmpty()) { output.writeBytes(6, processUuid_); } - if (!getRegionBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 7, region_); } if (timestampSeconds_ != 0) { output.writeInt32(8, timestampSeconds_); } - if (!getClusterBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 9, cluster_); } - if (!getEnvironmentBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 10, environment_); } com.google.protobuf.GeneratedMessageV3 @@ -42539,13 +42539,13 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTypeBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, type_); } - if (!getHostnameBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostname_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, hostname_); } - if (!getKafkaTopicBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kafkaTopic_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, kafkaTopic_); } if (partition_ != 0) { @@ -42560,17 +42560,17 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(6, processUuid_); } - if (!getRegionBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, region_); } if (timestampSeconds_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(8, timestampSeconds_); } - if (!getClusterBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cluster_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, cluster_); } - if (!getEnvironmentBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, environment_); } for (java.util.Map.Entry entry diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java index 212e9aaec47..d712b61efa1 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/EnumUnionOuter.java @@ -236,8 +236,8 @@ private EnumUnion( break; } case 16: { - someValCase_ = 2; someVal_ = input.readInt32(); + someValCase_ = 2; break; } case 24: { diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java index acc1da0958b..c90ad498263 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/NestedTestProto.java @@ -223,8 +223,8 @@ private UserId( break; } case 16: { - userIdCase_ = 2; userId_ = input.readInt32(); + userIdCase_ = 2; break; } case 26: { @@ -1293,7 +1293,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } unknownFields.writeTo(output); @@ -1305,7 +1305,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } size += unknownFields.getSerializedSize(); @@ -1801,8 +1801,8 @@ private ComplexType( break; } case 16: { - someValCase_ = 2; someVal_ = input.readInt32(); + someValCase_ = 2; break; } case 24: { @@ -3229,7 +3229,7 @@ public final boolean isInitialized() { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_); } if (getIdsList().size() > 0) { @@ -3248,7 +3248,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getIdBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_); } { @@ -3946,7 +3946,7 @@ public int getMapTypeCount() { @java.lang.Override public boolean containsMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetMapType().getMap().containsKey(key); } /** @@ -3973,7 +3973,7 @@ public java.util.Map getMapTypeMap() { public java.lang.String getMapTypeOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -3985,7 +3985,7 @@ public java.lang.String getMapTypeOrDefault( public java.lang.String getMapTypeOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); if (!map.containsKey(key)) { @@ -5133,7 +5133,7 @@ public int getMapTypeCount() { @java.lang.Override public boolean containsMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetMapType().getMap().containsKey(key); } /** @@ -5160,7 +5160,7 @@ public java.util.Map getMapTypeMap() { public java.lang.String getMapTypeOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -5172,7 +5172,7 @@ public java.lang.String getMapTypeOrDefault( public java.lang.String getMapTypeOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetMapType().getMap(); if (!map.containsKey(key)) { @@ -5192,7 +5192,7 @@ public Builder clearMapType() { public Builder removeMapType( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } internalGetMutableMapType().getMutableMap() .remove(key); return this; @@ -5211,8 +5211,11 @@ public Builder removeMapType( public Builder putMapType( java.lang.String key, java.lang.String value) { - if (key == null) { throw new java.lang.NullPointerException(); } - if (value == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { + throw new NullPointerException("map value"); +} + internalGetMutableMapType().getMutableMap() .put(key, value); return this; diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java index 1bd7557e8ed..2cfce29e32d 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageOptionalProtos.java @@ -238,7 +238,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (((bitField0_ & 0x00000001) != 0)) { @@ -253,7 +253,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (((bitField0_ & 0x00000001) != 0)) { diff --git a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java index ae7bffb5f76..2432b6a2d5e 100644 --- a/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java +++ b/protobuf-serializer/src/test/java/io/confluent/kafka/serializers/protobuf/test/TestMessageProtos.java @@ -475,7 +475,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (testBool_ != false) { @@ -484,10 +484,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!testBytes_.isEmpty()) { output.writeBytes(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { output.writeDouble(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { output.writeFloat(5, testFloat_); } if (testFixed32_ != 0) { @@ -529,7 +529,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (testBool_ != false) { @@ -540,11 +540,11 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, testFloat_); } @@ -2091,7 +2091,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, testString_); } if (testBool_ != false) { @@ -2100,10 +2100,10 @@ public void writeTo(com.google.protobuf.CodedOutputStream output) if (!testBytes_.isEmpty()) { output.writeBytes(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { output.writeDouble(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { output.writeFloat(5, testFloat_); } if (testFixed32_ != 0) { @@ -2148,7 +2148,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getTestStringBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(testString_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, testString_); } if (testBool_ != false) { @@ -2159,11 +2159,11 @@ public int getSerializedSize() { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, testBytes_); } - if (testDouble_ != 0D) { + if (java.lang.Double.doubleToRawLongBits(testDouble_) != 0) { size += com.google.protobuf.CodedOutputStream .computeDoubleSize(4, testDouble_); } - if (testFloat_ != 0F) { + if (java.lang.Float.floatToRawIntBits(testFloat_) != 0) { size += com.google.protobuf.CodedOutputStream .computeFloatSize(5, testFloat_); } diff --git a/protobuf-types/pom.xml b/protobuf-types/pom.xml index 02b0dfd4da6..3b251c949e1 100644 --- a/protobuf-types/pom.xml +++ b/protobuf-types/pom.xml @@ -6,7 +6,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 diff --git a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java index 5fbc82985b0..01fff765271 100644 --- a/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java +++ b/protobuf-types/src/main/java/io/confluent/protobuf/MetaProto.java @@ -250,7 +250,7 @@ public int getParamsCount() { @java.lang.Override public boolean containsParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetParams().getMap().containsKey(key); } /** @@ -277,7 +277,7 @@ public java.util.Map getParamsMap() { public java.lang.String getParamsOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -289,7 +289,7 @@ public java.lang.String getParamsOrDefault( public java.lang.String getParamsOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); if (!map.containsKey(key)) { @@ -312,7 +312,7 @@ public final boolean isInitialized() { @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - if (!getDocBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, doc_); } com.google.protobuf.GeneratedMessageV3 @@ -330,7 +330,7 @@ public int getSerializedSize() { if (size != -1) return size; size = 0; - if (!getDocBytes().isEmpty()) { + if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(doc_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, doc_); } for (java.util.Map.Entry entry @@ -760,7 +760,7 @@ public int getParamsCount() { @java.lang.Override public boolean containsParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } return internalGetParams().getMap().containsKey(key); } /** @@ -787,7 +787,7 @@ public java.util.Map getParamsMap() { public java.lang.String getParamsOrDefault( java.lang.String key, java.lang.String defaultValue) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); return map.containsKey(key) ? map.get(key) : defaultValue; @@ -799,7 +799,7 @@ public java.lang.String getParamsOrDefault( public java.lang.String getParamsOrThrow( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } java.util.Map map = internalGetParams().getMap(); if (!map.containsKey(key)) { @@ -819,7 +819,7 @@ public Builder clearParams() { public Builder removeParams( java.lang.String key) { - if (key == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } internalGetMutableParams().getMutableMap() .remove(key); return this; @@ -838,8 +838,11 @@ public Builder removeParams( public Builder putParams( java.lang.String key, java.lang.String value) { - if (key == null) { throw new java.lang.NullPointerException(); } - if (value == null) { throw new java.lang.NullPointerException(); } + if (key == null) { throw new NullPointerException("map key"); } + if (value == null) { + throw new NullPointerException("map value"); +} + internalGetMutableParams().getMutableMap() .put(key, value); return this; diff --git a/schema-converter/pom.xml b/schema-converter/pom.xml new file mode 100644 index 00000000000..a4d2b6a92ac --- /dev/null +++ b/schema-converter/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + + io.confluent + kafka-schema-registry-parent + 7.2.10-0 + + + + + Apache License 2.0 + http://www.apache.org/licenses/LICENSE-2.0.html + repo + + + + kafka-schema-converter + jar + kafka-schema-converter + + + + org.apache.kafka + connect-api + provided + + + junit + junit + test + + + diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java new file mode 100644 index 00000000000..3fd0b59b2c6 --- /dev/null +++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectEnum.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.connect.schema; + +import java.util.List; +import java.util.Map; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.errors.DataException; + +public class ConnectEnum { + + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Enum"; + + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols the enum symbols + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, List symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (int i = 0; i < symbols.size(); i++) { + builder.parameter(LOGICAL_PARAMETER + "." + symbols.get(i), String.valueOf(i)); + } + return builder; + } + + /** + * Returns a SchemaBuilder for an Enum. + * + * @param annotation an arbitrary annotation to be associated with the enum + * @param symbols a map of enum symbol to its ordinal + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation, Map symbols) { + SchemaBuilder builder = SchemaBuilder.string().parameter(LOGICAL_PARAMETER, annotation); + for (Map.Entry symbol : symbols.entrySet()) { + builder.parameter(LOGICAL_PARAMETER + "." + symbol.getKey(), + String.valueOf(symbol.getValue())); + } + return builder; + } + + /** + * Returns whether a schema represents an Enum. + * + * @param schema the schema + * @return whether the schema represents an Enum + */ + public static boolean isEnum(Schema schema) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); + } + + /** + * Returns whether a schema has an Enum symbol. + * + * @param schema the schema + * @param symbol the enum symbol + * @return whether the schema represents an Enum + */ + public static boolean hasEnumSymbol(Schema schema, String symbol) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER) + && schema.parameters().containsKey(LOGICAL_PARAMETER + "." + symbol); + } + + /** + * Convert a value from its logical format (Enum) to its encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static > String fromLogical(Schema schema, T value) { + if (!hasEnumSymbol(schema, value.name())) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return value.name(); + } + + /** + * Convert a value from its encoded format to its logical format (Enum). + * + * @param schema the schema + * @param cls the class of the logical value + * @param symbol the enum symbol + * @return the logical value + */ + public static > T toLogical(Schema schema, Class cls, + String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return java.lang.Enum.valueOf(cls, symbol); + } + + /** + * Convert a value from its encoded format to its ordinal. + * + * @param schema the schema + * @param symbol the enum symbol + * @return the ordinal + */ + public static int toOrdinal(Schema schema, String symbol) { + if (!hasEnumSymbol(schema, symbol)) { + throw new DataException( + "Requested conversion of Enum object but the schema does not match."); + } + return Integer.parseInt(schema.parameters().get(LOGICAL_PARAMETER + "." + symbol)); + } +} diff --git a/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java new file mode 100644 index 00000000000..7bf388d8e7e --- /dev/null +++ b/schema-converter/src/main/java/io/confluent/connect/schema/ConnectUnion.java @@ -0,0 +1,131 @@ +/* + * Copyright 2021 Confluent Inc. + * + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.confluent.connect.schema; + +import org.apache.kafka.connect.data.ConnectSchema; +import org.apache.kafka.connect.data.Field; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.data.SchemaBuilder; +import org.apache.kafka.connect.data.Struct; +import org.apache.kafka.connect.errors.DataException; + +public class ConnectUnion { + + public static final String LOGICAL_PARAMETER = "org.apache.kafka.connect.data.Union"; + + /** + * Returns a SchemaBuilder for a Union. + * + * @param annotation an arbitrary annotation to be associated with the union + * @return a SchemaBuilder + */ + public static SchemaBuilder builder(String annotation) { + return SchemaBuilder.struct().parameter(LOGICAL_PARAMETER, annotation); + } + + /** + * Returns whether a schema represents a Union. + * + * @param schema the schema + * @return whether the schema represents a Union + */ + public static boolean isUnion(Schema schema) { + return schema != null + && schema.parameters() != null + && schema.parameters().containsKey(LOGICAL_PARAMETER); + } + + /** + * Convert a value from its logical format (Union) to it's encoded format. + * + * @param schema the schema + * @param value the logical value + * @return the encoded value + */ + public static Object fromLogical(Schema schema, Struct value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + for (Field field : schema.fields()) { + Object object = value.get(field); + if (object != null) { + return object; + } + } + return null; + } + + /** + * Convert a value from its encoded format to its logical format (Union). + * The value is associated with the field whose schema matches the given value. + * + * @param schema the schema + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogical(Schema schema, Object value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (validate(field.schema(), value)) { + struct.put(field, value); + break; + } + } + return struct; + } + + private static boolean validate(Schema schema, Object value) { + try { + ConnectSchema.validateValue(schema, value); + } catch (DataException e) { + return false; + } + return true; + } + + /** + * Convert a value from its encoded format to its logical format (Union). + * The value is associated with the field with the given field name. + * + * @param schema the schema + * @param fieldName the field name + * @param value the encoded value + * @return the logical value + */ + public static Struct toLogicalUsingName(Schema schema, String fieldName, Object value) { + if (!isUnion(schema)) { + throw new DataException( + "Requested conversion of Union object but the schema does not match."); + } + Struct struct = new Struct(schema); + for (Field field : schema.fields()) { + if (field.name().equals(fieldName)) { + struct.put(field, value); + break; + } + } + return struct; + } +} diff --git a/schema-registry-console-scripts/pom.xml b/schema-registry-console-scripts/pom.xml index 66a7e8cefac..c2949863a59 100644 --- a/schema-registry-console-scripts/pom.xml +++ b/schema-registry-console-scripts/pom.xml @@ -7,7 +7,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0 io.confluent diff --git a/schema-serializer/pom.xml b/schema-serializer/pom.xml index c6bcdb311aa..190032c68af 100644 --- a/schema-serializer/pom.xml +++ b/schema-serializer/pom.xml @@ -7,7 +7,7 @@ io.confluent kafka-schema-registry-parent - 7.1.12-0 + 7.2.10-0