Skip to content

Commit

Permalink
set precombine in test
Browse files Browse the repository at this point in the history
  • Loading branch information
Jonathan Vexler committed Nov 27, 2023
1 parent 2a9a363 commit 38b2603
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.hudi.common.model.HoodieRecord.HoodieRecordType;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.common.util.collection.Pair;

import org.apache.avro.Schema;
Expand Down Expand Up @@ -131,7 +132,13 @@ default String[] getMandatoryFieldsForMerging(HoodieTableConfig cfg) {
} else {
cfg.getRecordKeyFieldStream().forEach(requiredFields::add);
}
requiredFields.add(cfg.getPreCombineField());
String preCombine = cfg.getPreCombineField();

//maybe throw exception otherwise
if (!StringUtils.isNullOrEmpty(preCombine)) {
requiredFields.add(preCombine);
}

return requiredFields.toArray(new String[0]);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ private Schema generateRequiredSchema() {
if (requestedSchema.getField(field) == null) {
Schema.Field foundField = dataSchema.getField(field);
if (foundField == null) {
throw new IllegalArgumentException("Filed: " + field + " does not exist in the table schema");
throw new IllegalArgumentException("Field: " + field + " does not exist in the table schema");
}
addedFields.add(new Schema.Field(foundField.name(), foundField.schema(), foundField.doc(), foundField.defaultVal()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,7 @@ public void setUp() throws IOException {
properties.setProperty(
HoodieTableConfig.BASE_FILE_FORMAT.key(),
HoodieTableConfig.BASE_FILE_FORMAT.defaultValue().toString());
properties.setProperty(
PAYLOAD_ORDERING_FIELD_PROP_KEY,
HoodieRecord.HoodieMetadataField.RECORD_KEY_METADATA_FIELD.getFieldName());
properties.setProperty(HoodieTableConfig.PRECOMBINE_FIELD.key(), "record_key");
metaClient = getHoodieMetaClient(hadoopConf(), basePath(), HoodieTableType.MERGE_ON_READ, properties);
}

Expand Down Expand Up @@ -165,8 +163,7 @@ public HoodieWriteConfig getWriteConfig(Schema avroSchema) {
LOGFILE_DATA_BLOCK_FORMAT.key(),
"parquet");
extraProperties.setProperty(
PAYLOAD_ORDERING_FIELD_PROP_KEY,
HoodieRecord.HoodieMetadataField.RECORD_KEY_METADATA_FIELD.getFieldName());
HoodieWriteConfig.PRECOMBINE_FIELD_NAME.key(), "record_key");
extraProperties.setProperty(
FILE_GROUP_READER_ENABLED.key(),
"true");
Expand Down

0 comments on commit 38b2603

Please sign in to comment.