Skip to content

Commit

Permalink
fix: Fix 3.5.0 compile issues
Browse files Browse the repository at this point in the history
  • Loading branch information
nightscape committed Nov 14, 2023
1 parent cda2140 commit af8172a
Show file tree
Hide file tree
Showing 13 changed files with 115 additions and 55 deletions.
65 changes: 18 additions & 47 deletions build.sc
Original file line number Diff line number Diff line change
Expand Up @@ -8,50 +8,25 @@ trait SparkModule extends Cross.Module2[String, String] with SbtModule with CiRe
outer =>
override def scalaVersion = crossValue
val sparkVersion = crossValue2
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"

override def millSourcePath = super.millSourcePath / os.up

// Custom source layout for Spark Data Source API 2
val sparkVersionSpecificSources = if (sparkVersion >= "3.4.0") {
Seq("scala", "3.0_and_up/scala", "3.1_and_up/scala", "3.2_and_up/scala", "3.3_and_up/scala", "3.4_and_up/scala")
} else if (sparkVersion >= "3.3.0") {
Seq(
"scala",
"3.0_3.1_3.2_3.3/scala",
"3.0_and_up/scala",
"3.1_and_up/scala",
"3.2_and_up/scala",
"3.3_and_up/scala"
)
} else if (sparkVersion >= "3.2.0") {
Seq(
"scala",
"3.0_3.1_3.2/scala",
"3.0_3.1_3.2_3.3/scala",
"3.0_and_up/scala",
"3.1_and_up/scala",
"3.2_and_up/scala"
)
} else if (sparkVersion >= "3.1.0") {
Seq(
"scala",
"3.1/scala",
"3.0_3.1/scala",
"3.0_3.1_3.2_3.3/scala",
"3.0_3.1_3.2/scala",
"3.0_and_up/scala",
"3.1_and_up/scala"
)
} else if (sparkVersion >= "3.0.0") {
Seq("scala", "3.0/scala", "3.0_3.1/scala", "3.0_3.1_3.2_3.3/scala", "3.0_3.1_3.2/scala", "3.0_and_up/scala")
} else if (sparkVersion >= "2.4.0") {
Seq("scala", "2.4/scala")
} else {
throw new UnsupportedOperationException(s"sparkVersion ${sparkVersion} is not supported")
def sparkVersionSpecificSources = T {
val versionSpecificDirs = os.list(os.pwd / "src" / "main")
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
versionSpecificDirs.filter(_.last match {
case "scala" => true
case `sparkBinaryVersion` => true
case s"${sparkMaj}.${sparkMin}_and_up" => sparkMaj == sparkMajor && sparkMin <= sparkMinor
case s"${sparkLow}_to_${sparkHigh}" => sparkLow <= sparkVersion && sparkHigh >= sparkBinaryVersion
case _ => false
})
}

override def sources = T.sources {
super.sources() ++ sparkVersionSpecificSources.map(s => PathRef(millSourcePath / "src" / "main" / os.RelPath(s)))
super.sources() ++ sparkVersionSpecificSources().map(PathRef(_))
}

override def docSources = T.sources(Seq[PathRef]())
Expand Down Expand Up @@ -156,14 +131,10 @@ val spark32 = List("3.2.4")
val spark33 = List("3.3.3")
val spark34 = List("3.4.1")
val spark35 = List("3.5.0")
val sparkVersions = spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35
val crossMatrix =
sparkVersions.map(spark => (scala212, spark)) ++
sparkVersions.filter(_ >= "3.2").map(spark => (scala213, spark))

val crossMatrix = {

(spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35).map(spark =>
(scala212, spark)
) ++ (spark32 ++ spark33 ++ spark34 ++ spark35).map(spark => (scala213, spark))

// (spark34).map(spark => (scala212, spark))
}

object `spark-excel` extends Cross[SparkModule](crossMatrix) {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright 2022 Martin Mauch (@nightscape)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.crealytics.spark.excel.v2

import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.sql.catalyst.InternalRow

trait ExcelParserBase {

protected def getCurrentInput: UTF8String
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
BadRecordException(() => getCurrentInput, () => partialResults.headOption, baseException)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright 2022 Martin Mauch (@nightscape)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.crealytics.spark.excel.v2

import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.sql.catalyst.InternalRow

trait ExcelParserBase {

protected def getCurrentInput: UTF8String
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
BadRecordException(() => getCurrentInput, () => partialResults.headOption, baseException)
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.SerializableConfiguration

import java.net.URI
import scala.util.control.NonFatal

/** A factory used to create Excel readers.
Expand Down Expand Up @@ -75,7 +77,7 @@ case class ExcelPartitionReaderFactory(
requiredSchema: StructType
): SheetData[InternalRow] = {
val excelHelper = ExcelHelper(options)
val sheetData = excelHelper.getSheetData(conf, file.filePath.toUri)
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath.toString))
try {
SheetData(
ExcelParser.parseIterator(sheetData.rowIterator, parser, headerChecker, requiredSchema),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Copyright 2022 Martin Mauch (@nightscape)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.crealytics.spark.excel.v2

import _root_.org.apache.spark.sql.catalyst.util.BadRecordException
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.sql.catalyst.InternalRow

trait ExcelParserBase {

protected def getCurrentInput: UTF8String
def badRecord(partialResults: Array[InternalRow], baseException: Throwable): BadRecordException =
BadRecordException(() => getCurrentInput, () => partialResults, baseException)
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package com.crealytics.spark.excel.v2

import org.apache.hadoop.conf.Configuration
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.{InternalRow, FileSourceOptions}
import org.apache.spark.sql.connector.read.PartitionReader
import org.apache.spark.sql.execution.datasources.PartitionedFile
import org.apache.spark.sql.execution.datasources.v2._
Expand Down Expand Up @@ -54,7 +54,10 @@ case class ExcelPartitionReaderFactory(
parsedOptions: ExcelOptions,
filters: Seq[Filter]
) extends FilePartitionReaderFactory {

protected def options: FileSourceOptions = new FileSourceOptions(Map(
FileSourceOptions.IGNORE_CORRUPT_FILES -> "true",
FileSourceOptions.IGNORE_MISSING_FILES -> "true"
))
override def buildReader(file: PartitionedFile): PartitionReader[InternalRow] = {
val conf = broadcastedConf.value.value
val actualDataSchema =
Expand All @@ -77,7 +80,7 @@ case class ExcelPartitionReaderFactory(
requiredSchema: StructType
): SheetData[InternalRow] = {
val excelHelper = ExcelHelper(parsedOptions)
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath))
val sheetData = excelHelper.getSheetData(conf, URI.create(file.filePath.toString))
try {
SheetData(
ExcelParser.parseIterator(sheetData.rowIterator, parser, headerChecker, requiredSchema),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ import org.apache.poi.ss.usermodel.DateUtil
* The pushdown filters that should be applied to converted values.
*/
class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val options: ExcelOptions, filters: Seq[Filter])
extends Logging {
extends Logging with ExcelParserBase {
require(
requiredSchema.toSet.subsetOf(dataSchema.toSet),
s"requiredSchema (${requiredSchema.catalogString}) should be the subset of " +
Expand Down Expand Up @@ -95,7 +95,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option
private val pushedFilters = new ExcelFilters(filters, requiredSchema)

/* Retrieve the raw record string. */
private def getCurrentInput: UTF8String = UTF8String
protected def getCurrentInput: UTF8String = UTF8String
.fromString("TODO: how to show the corrupted record?")

/** This parser first picks some tokens from the input tokens, according to the required schema, then parse these
Expand Down Expand Up @@ -352,7 +352,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option

private def convert(tokens: Vector[Cell]): Option[InternalRow] = {
if (tokens == null) {
throw BadRecordException(() => getCurrentInput, () => None, new RuntimeException("Malformed Excel record"))
throw badRecord(Array.empty, new RuntimeException("Malformed Excel record"))
}

var badRecordException: Option[Throwable] =
Expand Down Expand Up @@ -396,7 +396,7 @@ class ExcelParser(dataSchema: StructType, requiredSchema: StructType, val option
if (skipRow) { noRows }
else {
if (badRecordException.isDefined) {
throw BadRecordException(() => getCurrentInput, () => requiredRow.headOption, badRecordException.get)
throw badRecord(requiredRow.toArray, badRecordException.get)
} else { requiredRow }
}
}
Expand Down

0 comments on commit af8172a

Please sign in to comment.