Skip to content

Commit

Permalink
Common: bump schema-ddl to 0.12.0 (close #192)
Browse files Browse the repository at this point in the history
  • Loading branch information
chuwy committed Oct 30, 2020
1 parent 7c30cfe commit 6b98790
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import cats.data._
import cats.implicits._

import com.snowplowanalytics.iglu.client.Client
import com.snowplowanalytics.iglu.core.circe.instances._
import com.snowplowanalytics.iglu.core.circe.implicits.{ schemaCriterionDecoder => _, _ }
import com.snowplowanalytics.iglu.core.{SelfDescribingData, SchemaCriterion}

import io.circe._
Expand Down Expand Up @@ -104,7 +104,7 @@ object StorageTarget {
tcpKeepAlive: Option[Boolean],
tcpKeepAliveMinutes: Option[Int]) {
/** Either errors or list of mutators to update the `Properties` object */
val validation: Either[ParseError, List[Properties => Unit]] = jdbcEncoder.encodeObject(this).toList.map {
val validation: Either[ParseError, List[Properties => Unit]] = RedshiftJdbc.jdbcEncoder.encodeObject(this).toList.map {
case (property, value) => value.fold(
((_: Properties) => ()).asRight,
b => ((props: Properties) => { props.setProperty(property, b.toString); () }).asRight,
Expand All @@ -131,20 +131,20 @@ object StorageTarget {

object RedshiftJdbc {
val empty = RedshiftJdbc(None, None, None, None, None, None, None, None, None, None, None, None)
}

implicit val jdbcDecoder: Decoder[RedshiftJdbc] =
Decoder.forProduct12("BlockingRowsMode", "DisableIsValidQuery", "DSILogLevel",
"FilterLevel", "loginTimeout", "loglevel", "socketTimeout", "ssl", "sslMode",
"sslRootCert", "tcpKeepAlive", "TCPKeepAliveMinutes")(RedshiftJdbc.apply)

implicit val jdbcEncoder: ObjectEncoder[RedshiftJdbc] =
Encoder.forProduct12("BlockingRowsMode", "DisableIsValidQuery", "DSILogLevel",
"FilterLevel", "loginTimeout", "loglevel", "socketTimeout", "ssl", "sslMode",
"sslRootCert", "tcpKeepAlive", "TCPKeepAliveMinutes")((j: RedshiftJdbc) =>
(j.blockingRows, j.disableIsValidQuery, j.dsiLogLevel,
j.filterLevel, j.loginTimeout, j.loglevel, j.socketTimeout, j.ssl, j.sslMode,
j.sslRootCert, j.tcpKeepAlive, j.tcpKeepAliveMinutes))
implicit val jdbcDecoder: Decoder[RedshiftJdbc] =
Decoder.forProduct12("BlockingRowsMode", "DisableIsValidQuery", "DSILogLevel",
"FilterLevel", "loginTimeout", "loglevel", "socketTimeout", "ssl", "sslMode",
"sslRootCert", "tcpKeepAlive", "TCPKeepAliveMinutes")(RedshiftJdbc.apply)

implicit val jdbcEncoder: Encoder.AsObject[RedshiftJdbc] =
Encoder.forProduct12("BlockingRowsMode", "DisableIsValidQuery", "DSILogLevel",
"FilterLevel", "loginTimeout", "loglevel", "socketTimeout", "ssl", "sslMode",
"sslRootCert", "tcpKeepAlive", "TCPKeepAliveMinutes")((j: RedshiftJdbc) =>
(j.blockingRows, j.disableIsValidQuery, j.dsiLogLevel,
j.filterLevel, j.loginTimeout, j.loglevel, j.socketTimeout, j.ssl, j.sslMode,
j.sslRootCert, j.tcpKeepAlive, j.tcpKeepAliveMinutes))
}

/** Reference to encrypted entity inside EC2 Parameter Store */
case class ParameterStoreConfig(parameterName: String)
Expand Down Expand Up @@ -238,7 +238,7 @@ object StorageTarget {
implicit val passwordConfigDecoder: Decoder[PasswordConfig] =
deriveDecoder[PasswordConfig]

implicit val schemaCriterionConfigDecoder: Decoder[SchemaCriterion] =
implicit def schemaCriterionConfigDecoder: Decoder[SchemaCriterion] =
Decoder.decodeString.emap {
s => SchemaCriterion.parse(s).toRight(s"Cannot parse [$s] as Iglu SchemaCriterion, it must have iglu:vendor/name/format/1-*-* format")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ object EventUtils {
* @return list of columns or flattening error
*/
def flatten[F[_]: Monad: RegistryLookup: Clock](resolver: Resolver[F], instance: SelfDescribingData[Json]): EitherT[F, FailureDetails.LoaderIgluError, List[String]] =
getOrdered(resolver, instance.schema).map { ordered => FlatData.flatten(instance.data, ordered, Some(escape)) }
getOrdered(resolver, instance.schema).map { ordered => FlatData.flatten(instance.data, ordered, FlatData.getString(Some(escape)), "") }

/** Prevents data with newlines and tabs from breaking the loading process */
private def escape(s: String): String =
Expand Down
2 changes: 1 addition & 1 deletion project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ object Dependencies {
// Scala (Shredder)
val spark = "3.0.1"
val eventsManifest = "0.2.0"
val schemaDdl = "0.10.0"
val schemaDdl = "0.12.0"

// Java (Loader)
val redshift = "1.2.36.1060"
Expand Down

0 comments on commit 6b98790

Please sign in to comment.