Skip to content

Commit

Permalink
try fix ci
Browse files Browse the repository at this point in the history
  • Loading branch information
Jonathan Vexler committed Nov 27, 2023
1 parent 0b7f47a commit 60bdf14
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,9 @@ class SparkFileFormatInternalRowReaderContext(readerMaps: mutable.Map[Long, Part
}

override def projectRecord(from: Schema, to: Schema): UnaryOperator[InternalRow] = {
val projection = HoodieCatalystExpressionUtils.generateUnsafeProjection(AvroConversionUtils.convertAvroSchemaToStructType(from),
AvroConversionUtils.convertAvroSchemaToStructType(to))
u: InternalRow => projection(u)
val projection = HoodieCatalystExpressionUtils.generateUnsafeProjection(AvroConversionUtils.convertAvroSchemaToStructType(from),
AvroConversionUtils.convertAvroSchemaToStructType(to))
//needed for spark2.4
u: InternalRow => projection(u).asInstanceOf[InternalRow]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.avro.Schema;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.Closeable;
import java.io.IOException;
Expand All @@ -52,6 +53,7 @@
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static org.apache.hudi.common.fs.FSUtils.getRelativePartitionPath;
import static org.apache.hudi.common.table.HoodieTableConfig.RECORD_MERGER_STRATEGY;
import static org.apache.hudi.common.util.ConfigUtils.getBooleanWithAltKeys;
import static org.apache.hudi.common.util.ConfigUtils.getIntWithAltKeys;
Expand Down Expand Up @@ -106,7 +108,7 @@ public HoodieFileGroupReader(HoodieReaderContext<T> readerContext,
this.readerContext = readerContext;
this.hadoopConf = hadoopConf;
this.hoodieBaseFileOption = fileSlice.getBaseFile();
this.logFiles = fileSlice.getLogFiles().collect(Collectors.toList());
this.logFiles = fileSlice.getLogFiles().sorted(HoodieLogFile.getLogFileComparator()).collect(Collectors.toList());
this.props = props;
this.start = start;
this.length = length;
Expand Down Expand Up @@ -272,7 +274,8 @@ private void scanLogFiles() {
.withReadBlocksLazily(getBooleanWithAltKeys(props, HoodieReaderConfig.COMPACTION_LAZY_BLOCK_READ_ENABLE))
.withReverseReader(false)
.withBufferSize(getIntWithAltKeys(props, HoodieMemoryConfig.MAX_DFS_STREAM_BUFFER_SIZE))
.withPartition("needtofix")
.withPartition(getRelativePartitionPath(
new Path(readerState.tablePath), logFiles.get(0).getPath().getParent()))
.withRecordMerger(recordMerger)
.withRecordBuffer(recordBuffer)
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
import org.junit.jupiter.params.provider.ValueSource;

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -157,7 +156,7 @@ private void validateOutputFromFileGroupReader(Configuration hadoopConf,
SyncableFileSystemView fsView = viewManager.getFileSystemView(metaClient);
FileSlice fileSlice = fsView.getAllFileSlices(partitionPaths[0]).findFirst().get();
List<String> logFilePathList = getLogFileListFromFileSlice(fileSlice);
Collections.sort(logFilePathList);
//Collections.sort(logFilePathList);
assertEquals(expectedLogFileNum, logFilePathList.size());

List<T> actualRecordList = new ArrayList<>();
Expand Down

0 comments on commit 60bdf14

Please sign in to comment.