Skip to content

Commit

Permalink
Automate release process through CI (#3440)
Browse files Browse the repository at this point in the history
* Set the CI release process

* Bump SBT version up

* Remove all explicit log4j calls

* Fix Scala 2.13.7 compilation

* Add log4j1 bridge
  • Loading branch information
pomadchin authored Dec 25, 2021
1 parent fa62e3a commit 3782358
Show file tree
Hide file tree
Showing 16 changed files with 71 additions and 86 deletions.
15 changes: 5 additions & 10 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,18 +39,16 @@ jobs:
- save_cache: *save_build_cache

publish:
parameters:
scala-version:
type: string
executor: executor-gdal
environment:
SCALA_VERSION: << parameters.scala-version >>
steps:
- checkout
- restore_cache: *restore_build_cache
- run:
name: Artifacts publish
command: ./sbt -Dsbt.supershell=false "++$SCALA_VERSION" publishLocal
command: |
if [[ ! -z "$SONATYPE_PASSWORD" && ! -z "$SONATYPE_USERNAME" ]]; then
./sbt -Dsbt.supershell=false ci-release
fi
- save_cache: *save_build_cache

workflows:
Expand All @@ -65,11 +63,8 @@ workflows:
only: master

- publish:
matrix:
parameters:
scala-version: ["2.12.15", "2.13.6"]
filters:
branches:
only: master
only: /.*/
tags:
only: /^v.*/
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
name: Build and Test
strategy:
matrix:
scala: ["2.12.15", "2.13.6"]
scala: ["2.12.15", "2.13.7"]
runs-on: ubuntu-latest

env:
Expand Down
13 changes: 1 addition & 12 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import sbt.Keys._

ThisBuild / scalaVersion := "2.12.15"
ThisBuild / organization := "org.locationtech.geotrellis"
ThisBuild / crossScalaVersions := List("2.12.15", "2.13.6")
ThisBuild / crossScalaVersions := List("2.12.15", "2.13.7")

lazy val root = Project("geotrellis", file("."))
.aggregate(
Expand Down Expand Up @@ -38,17 +38,6 @@ lazy val root = Project("geotrellis", file("."))
.settings(Settings.commonSettings)
.settings(publish / skip := true)
.settings(ScalaUnidoc / unidoc / unidocProjectFilter := inAnyProject -- inProjects(mdoc))
.settings(
initialize := {
val curr = VersionNumber(sys.props("java.specification.version"))
val req = SemanticSelector("=1.8")
if (!curr.matchesSemVer(req)) {
val log = Keys.sLog.value
log.warn(s"Java $req required for GeoTools compatibility. Found Java $curr.\n" +
"Please change the version of Java running sbt.")
}
}
)

lazy val mdoc = project
.dependsOn(raster)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,11 @@ import geotrellis.raster.buffer.{BufferedTile, BufferSizes}
import geotrellis.raster.buffer.Direction._
import geotrellis.util._

import org.apache.log4j.Logger

import scala.collection.mutable.ArrayBuffer

object BufferTiles extends BufferTiles

trait BufferTiles {
val logger = Logger.getLogger(BufferTiles.getClass)

/** Collects tile neighbors by slicing the neighboring tiles to the given
* buffer size
Expand Down
5 changes: 3 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,14 @@ object Dependencies {
}

def apacheSpark(module: String) = Def.setting {
"org.apache.spark" %% s"spark-$module" % ver("3.1.1", "3.2.0").value
"org.apache.spark" %% s"spark-$module" % ver("3.1.2", "3.2.0").value
}

def scalaReflect(version: String) = "org.scala-lang" % "scala-reflect" % version

val pureconfig = "com.github.pureconfig" %% "pureconfig" % "0.14.0"
val log4s = "org.log4s" %% "log4s" % "1.9.0"
val log4s = "org.log4s" %% "log4s" % "1.10.0"
val log4jbridge = "org.apache.logging.log4j" % "log4j-1.2-api" % "2.17.0"
val scalatest = "org.scalatest" %% "scalatest" % "3.2.5"
val scalacheck = "org.scalacheck" %% "scalacheck" % "1.15.2"
val scalaXml = "org.scala-lang.modules" %% "scala-xml" % "1.3.0"
Expand Down
3 changes: 2 additions & 1 deletion project/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ object Settings {
).filter(_.asFile.canRead).map(Credentials(_)),

addCompilerPlugin("org.typelevel" % "kind-projector" % "0.13.2" cross CrossVersion.full),
addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.28" cross CrossVersion.full),
addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.31" cross CrossVersion.full),

libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Nil
Expand Down Expand Up @@ -547,6 +547,7 @@ object Settings {
name := "geotrellis-util",
libraryDependencies ++= Seq(
log4s,
log4jbridge, // CVE-2021-4104, CVE-2020-8908
scalaj,
spire,
scalatest % Test
Expand Down
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.5.5
sbt.version=1.5.8
19 changes: 10 additions & 9 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
resolvers += sbt.Resolver.bintrayIvyRepo("typesafe", "sbt-plugins")

addDependencyTreePlugin
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.5.3")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.34")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.29")
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.19" )
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.0.0")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.1")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1")
addSbtPlugin("com.thesamet" % "sbt-protoc" % "0.99.34")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.29")
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.19" )
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7")
libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.9.8"
15 changes: 9 additions & 6 deletions sbt
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@

set -o pipefail

declare -r sbt_release_version="1.5.3"
declare -r sbt_unreleased_version="1.5.3"
declare -r sbt_release_version="1.5.8"
declare -r sbt_unreleased_version="1.6.0-M1"

declare -r latest_213="2.13.6"
declare -r latest_212="2.12.14"
declare -r latest_213="2.13.7"
declare -r latest_212="2.12.15"
declare -r latest_211="2.11.12"
declare -r latest_210="2.10.7"
declare -r latest_29="2.9.3"
Expand Down Expand Up @@ -216,7 +216,8 @@ getJavaVersion() {
# but on 9 and 10 it's 9.x.y and 10.x.y.
if [[ "$str" =~ ^1\.([0-9]+)(\..*)?$ ]]; then
echo "${BASH_REMATCH[1]}"
elif [[ "$str" =~ ^([0-9]+)(\..*)?$ ]]; then
# Fixes https://github.com/dwijnand/sbt-extras/issues/326
elif [[ "$str" =~ ^([0-9]+)(\..*)?(-ea)?$ ]]; then
echo "${BASH_REMATCH[1]}"
elif [[ -n "$str" ]]; then
echoerr "Can't parse java version from: $str"
Expand Down Expand Up @@ -252,7 +253,9 @@ is_apple_silicon() { [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]
# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+
default_jvm_opts() {
local -r v="$(java_version)"
if [[ $v -ge 10 ]]; then
if [[ $v -ge 17 ]]; then
echo "$default_jvm_opts_common"
elif [[ $v -ge 10 ]]; then
if is_apple_silicon; then
# As of Dec 2020, JVM for Apple Silicon (M1) doesn't support JVMCI
echo "$default_jvm_opts_common"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ import geotrellis.raster.stitch._
import geotrellis.layer.buffer.BufferTiles
import geotrellis.util._

import org.apache.log4j.Logger
import org.log4s._
import org.apache.spark.rdd.RDD
import org.apache.spark.Partitioner

import scala.reflect.ClassTag


object BufferTilesRDD extends BufferTiles {
override val logger = Logger.getLogger(BufferTilesRDD.getClass)
@transient private[this] lazy val logger = getLogger

def bufferWithNeighbors[
K: SpatialComponent: ClassTag,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import geotrellis.layer._
import geotrellis.spark._
import geotrellis.vector._

import org.apache.log4j.Logger
import org.log4s._
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.AccumulatorV2
Expand All @@ -48,7 +48,7 @@ object IterativeCostDistance {
type KeyCostPair = (SpatialKey, SimpleCostDistance.Cost)
type Changes = mutable.ArrayBuffer[KeyCostPair]

val logger = Logger.getLogger(IterativeCostDistance.getClass)
@transient private[this] lazy val logger = getLogger

/**
* Compute the resolution (in meters per pixel) of a layer.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,16 +79,6 @@ class FileCOGLayerWriter(
val path = s"${keyPath(key)}.${Extension}"

mergeFunc match {
case None =>
cog.write(path, true)
// collect VRT metadata
(0 until cog.bandCount)
.map { b =>
val idx = Index.encode(keyIndex.toIndex(key), maxWidth)
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, cog.cols, cog.rows, cog.extent))
}
.foreach(samplesAccumulator.add)

case Some(_) if !uriExists(path) =>
cog.write(path, true)
// collect VRT metadata
Expand All @@ -110,6 +100,16 @@ class FileCOGLayerWriter(
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, merged.cols, merged.rows, merged.extent))
}
.foreach(samplesAccumulator.add)

case _ =>
cog.write(path, true)
// collect VRT metadata
(0 until cog.bandCount)
.map { b =>
val idx = Index.encode(keyIndex.toIndex(key), maxWidth)
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, cog.cols, cog.rows, cog.extent))
}
.foreach(samplesAccumulator.add)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,16 +88,6 @@ class HadoopCOGLayerWriter(
val path = new Path(s"${keyPath(key)}.${Extension}")

mergeFunc match {
case None =>
HdfsUtils.write(path, config.value) { new GeoTiffWriter(cog, _).write(true) }
// collect VRT metadata
(0 until cog.bandCount)
.map { b =>
val idx = Index.encode(keyIndex.toIndex(key), maxWidth)
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, cog.cols, cog.rows, cog.extent))
}
.foreach(samplesAccumulator.add)

case Some(_) if !HdfsUtils.pathExists(path, config.value) =>
HdfsUtils.write(path, config.value) { new GeoTiffWriter(cog, _).write(true) }
// collect VRT metadata
Expand All @@ -120,6 +110,16 @@ class HadoopCOGLayerWriter(
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, merged.cols, merged.rows, merged.extent))
}
.foreach(samplesAccumulator.add)

case _ =>
HdfsUtils.write(path, config.value) { new GeoTiffWriter(cog, _).write(true) }
// collect VRT metadata
(0 until cog.bandCount)
.map { b =>
val idx = Index.encode(keyIndex.toIndex(key), maxWidth)
(idx.toLong, vrt.simpleSource(s"$idx.$Extension", b + 1, cog.cols, cog.rows, cog.extent))
}
.foreach(samplesAccumulator.add)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import com.esotericsoftware.kryo.io.Input
import com.esotericsoftware.kryo.io.Output
import java.util.{Comparator, TreeMap}


/** Account for a bug in Kryo < 2.22 for serializing TreeMaps */
class XTreeMapSerializer extends MapSerializer {
override def write (kryo: Kryo, output: Output, map: java.util.Map[_, _]): Unit = {
Expand Down Expand Up @@ -161,13 +160,13 @@ class KryoRegistrator extends SparkKryoRegistrator {
kryo.register(classOf[Array[geotrellis.store.avro.AvroRecordCodec[Any]]])
kryo.register(classOf[Array[geotrellis.layer.SpaceTimeKey]])
kryo.register(classOf[Array[geotrellis.layer.SpatialKey]])
kryo.register(classOf[Array[geotrellis.vector.Feature[Any,Any]]])
kryo.register(classOf[Array[geotrellis.vector.Feature[_, Any]]])
kryo.register(classOf[Array[geotrellis.vector.MultiPolygon]])
kryo.register(classOf[Array[geotrellis.vector.Point]])
kryo.register(classOf[Array[geotrellis.vector.Polygon]])
kryo.register(classOf[Array[scala.collection.Seq[Any]]])
kryo.register(classOf[Array[scala.Tuple2[Any, Any]]])
kryo.register(classOf[Array[scala.Tuple3[Any, Any, Any]]])
kryo.register(classOf[Array[(Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any)]])
kryo.register(classOf[org.locationtech.jts.geom.Coordinate])
kryo.register(classOf[org.locationtech.jts.geom.Envelope])
kryo.register(classOf[org.locationtech.jts.geom.GeometryFactory])
Expand All @@ -180,24 +179,24 @@ class KryoRegistrator extends SparkKryoRegistrator {
kryo.register(classOf[org.locationtech.jts.geom.PrecisionModel])
kryo.register(classOf[org.locationtech.jts.geom.PrecisionModel.Type])
kryo.register(classOf[geotrellis.raster.histogram.FastMapHistogram])
kryo.register(classOf[geotrellis.raster.histogram.Histogram[Any]])
kryo.register(classOf[geotrellis.raster.histogram.MutableHistogram[Any]])
kryo.register(classOf[geotrellis.raster.histogram.Histogram[AnyVal]])
kryo.register(classOf[geotrellis.raster.histogram.MutableHistogram[AnyVal]])
kryo.register(classOf[geotrellis.raster.histogram.StreamingHistogram])
kryo.register(classOf[geotrellis.raster.histogram.StreamingHistogram.DeltaCompare])
kryo.register(classOf[geotrellis.raster.histogram.StreamingHistogram.Delta])
kryo.register(classOf[geotrellis.raster.histogram.StreamingHistogram.Bucket])
kryo.register(classOf[geotrellis.raster.density.KernelStamper])
kryo.register(classOf[geotrellis.raster.ProjectedRaster[Any]])
kryo.register(classOf[geotrellis.raster.ProjectedRaster[_]])
kryo.register(classOf[geotrellis.raster.TileLayout])
kryo.register(classOf[geotrellis.layer.TemporalProjectedExtent])
kryo.register(classOf[geotrellis.raster.buffer.BufferSizes])
kryo.register(classOf[geotrellis.store.avro.AvroRecordCodec[Any]])
kryo.register(classOf[geotrellis.store.avro.AvroUnionCodec[Any]])
kryo.register(classOf[geotrellis.store.avro.codecs.KeyValueRecordCodec[Any,Any]])
kryo.register(classOf[geotrellis.store.avro.codecs.TupleCodec[Any,Any]])
kryo.register(classOf[geotrellis.store.avro.codecs.KeyValueRecordCodec[Any, Any]])
kryo.register(classOf[geotrellis.store.avro.codecs.TupleCodec[Any, Any]])
kryo.register(classOf[geotrellis.layer.KeyBounds[Any]])
kryo.register(classOf[geotrellis.spark.knn.KNearestRDD.Ord[Any]])
kryo.register(classOf[geotrellis.vector.Feature[Any,Any]])
kryo.register(classOf[geotrellis.vector.Feature[_, Any]])
kryo.register(classOf[geotrellis.vector.Geometry], new GeometrySerializer[geotrellis.vector.Geometry])
kryo.register(classOf[geotrellis.vector.GeometryCollection])
kryo.register(classOf[geotrellis.vector.LineString], new GeometrySerializer[geotrellis.vector.LineString])
Expand All @@ -209,9 +208,9 @@ class KryoRegistrator extends SparkKryoRegistrator {
kryo.register(classOf[geotrellis.vector.SpatialIndex[Any]])
kryo.register(classOf[java.lang.Class[Any]])
kryo.register(classOf[java.util.TreeMap[Any, Any]])
kryo.register(classOf[java.util.HashMap[Any,Any]])
kryo.register(classOf[java.util.HashMap[Any, Any]])
kryo.register(classOf[java.util.HashSet[Any]])
kryo.register(classOf[java.util.LinkedHashMap[Any,Any]])
kryo.register(classOf[java.util.LinkedHashMap[Any, Any]])
kryo.register(classOf[java.util.LinkedHashSet[Any]])
kryo.register(classOf[org.apache.hadoop.io.BytesWritable])
kryo.register(classOf[org.apache.hadoop.io.BigIntWritable])
Expand All @@ -238,9 +237,9 @@ class KryoRegistrator extends SparkKryoRegistrator {
kryo.register(classOf[org.locationtech.proj4j.units.DegreeUnit])
kryo.register(classOf[org.locationtech.proj4j.units.Unit])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofInt])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofRef[Any]])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofRef[AnyRef]])
kryo.register(classOf[scala.collection.Seq[Any]])
kryo.register(classOf[scala.Tuple3[Any, Any, Any]])
kryo.register(classOf[(Any, Any, Any)])
kryo.register(geotrellis.proj4.LatLng.getClass)
kryo.register(geotrellis.layer.EmptyBounds.getClass)
kryo.register(scala.collection.immutable.Nil.getClass)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import geotrellis.spark.costdistance.IterativeCostDistance._
import geotrellis.vector._

import org.locationtech.jts.{geom => jts}
import org.apache.log4j.Logger
import org.log4s._
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.AccumulatorV2
Expand Down Expand Up @@ -65,7 +65,7 @@ object IterativeViewshed {
implicit def coordinatesToPoints(points: Seq[jts.Coordinate]): Seq[Viewpoint] =
points.map({ p => Viewpoint(p.getX, p.getY, p.getZ, 0, -1.0, Double.NegativeInfinity) })

private val logger = Logger.getLogger(IterativeViewshed.getClass)
@transient private[this] lazy val logger = getLogger

private case class Message(
target: SpatialKey,
Expand Down
1 change: 0 additions & 1 deletion version.sbt

This file was deleted.

0 comments on commit 3782358

Please sign in to comment.