forked from spark-jobserver/spark-jobserver
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sbt
293 lines (265 loc) · 12.3 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
import Dependencies._
transitiveClassifiers in Global := Seq(Artifact.SourceClassifier)
lazy val dirSettings = Seq()
lazy val akkaApp = Project(id = "akka-app", base = file("akka-app"))
.settings(description := "Common Akka application stack: metrics, tracing, logging, and more.")
.settings(commonSettings)
.settings(libraryDependencies ++= coreTestDeps ++ akkaDeps)
.settings(publishSettings)
.disablePlugins(SbtScalariform)
lazy val jobServer = Project(id = "job-server", base = file("job-server"))
.settings(commonSettings)
.settings(revolverSettings)
.settings(assembly := null.asInstanceOf[File])
.settings(
description := "Spark as a Service: a RESTful job server for Apache Spark",
libraryDependencies ++= sparkDeps ++ slickDeps ++ cassandraDeps ++ securityDeps ++ coreTestDeps,
test in Test := (test in Test).dependsOn(packageBin in Compile in jobServerTestJar)
.dependsOn(clean in Compile in jobServerTestJar)
.dependsOn(buildPython in jobServerPython)
.dependsOn(clean in Compile in jobServerPython)
.value,
testOnly in Test := (testOnly in Test).dependsOn(packageBin in Compile in jobServerTestJar)
.dependsOn(clean in Compile in jobServerTestJar)
.dependsOn(buildPython in jobServerPython)
.dependsOn(clean in Compile in jobServerPython)
.inputTaskValue,
console in Compile := Defaults.consoleTask(fullClasspath in Compile, console in Compile).value,
fullClasspath in Compile := (fullClasspath in Compile).map { classpath =>
extraJarPaths ++ classpath
}.value,
fork in Test := true
)
.settings(publishSettings)
.dependsOn(akkaApp, jobServerApi)
.disablePlugins(SbtScalariform)
lazy val jobServerTestJar = Project(id = "job-server-tests", base = file("job-server-tests"))
.settings(commonSettings)
.settings(jobServerTestJarSettings)
.settings(noPublishSettings)
.dependsOn(jobServerApi)
.disablePlugins(SbtScalariform)
.disablePlugins(ScoverageSbtPlugin) // do not include in coverage report
lazy val jobServerApi = Project(id = "job-server-api", base = file("job-server-api"))
.settings(commonSettings)
.settings(jobServerApiSettings)
.settings(publishSettings)
.disablePlugins(SbtScalariform)
lazy val jobServerExtras = Project(id = "job-server-extras", base = file("job-server-extras"))
.settings(commonSettings)
.settings(jobServerExtrasSettings)
.settings(
test in Test := (test in Test)
.dependsOn(packageBin in Compile in jobServerTestJar)
.dependsOn(clean in Compile in jobServerTestJar)
.dependsOn(buildPython in jobServerPython)
.dependsOn(buildPyExamples in jobServerPython)
.dependsOn(clean in Compile in jobServerPython)
.value,
testOnly in Test := (testOnly in Test)
.dependsOn(packageBin in Compile in jobServerTestJar)
.dependsOn(clean in Compile in jobServerTestJar)
.dependsOn(buildPython in jobServerPython)
.dependsOn(buildPyExamples in jobServerPython)
.dependsOn(clean in Compile in jobServerPython)
.inputTaskValue
)
.dependsOn(jobServerApi, jobServer % "compile->compile; test->test")
.disablePlugins(SbtScalariform)
lazy val jobServerPython = Project(id = "job-server-python", base = file("job-server-python"))
.settings(commonSettings)
.settings(jobServerPythonSettings)
.dependsOn(jobServerApi, akkaApp % "test")
.disablePlugins(SbtScalariform)
lazy val root = Project(id = "root", base = file("."))
.settings(commonSettings)
.settings(Release.settings)
.settings(noPublishSettings)
.settings(rootSettings)
.settings(dockerSettings)
.aggregate(jobServer, jobServerApi, jobServerTestJar, akkaApp, jobServerExtras, jobServerPython)
.dependsOn(jobServer, jobServerExtras)
.disablePlugins(SbtScalariform).enablePlugins(DockerPlugin)
lazy val jobServerExtrasSettings = revolverSettings ++ Assembly.settings ++ publishSettings ++ Seq(
libraryDependencies ++= sparkExtraDeps,
// Extras packages up its own jar for testing itself
test in Test := (test in Test).dependsOn(packageBin in Compile).value,
fork in Test := true,
parallelExecution in Test := false,
// Temporarily disable test for assembly builds so folks can package and get started. Some tests
// are flaky in extras esp involving paths.
test in assembly := {},
exportJars := true
)
lazy val jobServerApiSettings = Seq(libraryDependencies ++= sparkDeps ++ sparkExtraDeps)
lazy val testPython = taskKey[Unit]("Launch a sub process to run the Python tests")
lazy val buildPython = taskKey[Unit]("Build the python side of python support into an egg")
lazy val buildPyExamples = taskKey[Unit]("Build the examples of python jobs into an egg")
lazy val jobServerPythonSettings = revolverSettings ++ Assembly.settings ++ publishSettings ++ Seq(
libraryDependencies ++= sparkPythonDeps,
fork in Test := true,
cancelable in Test := true,
testPython := PythonTasks.testPythonTask(baseDirectory.value),
buildPython := PythonTasks.buildPythonTask(baseDirectory.value, version.value),
buildPyExamples := PythonTasks.buildExamplesTask(baseDirectory.value, version.value),
assembly := assembly.dependsOn(buildPython).value
)
lazy val jobServerTestJarSettings = Seq(
libraryDependencies ++= sparkDeps ++ apiDeps,
description := "Test jar for Spark Job Server",
exportJars := true // use the jar instead of target/classes
)
lazy val noPublishSettings = Seq(
publishTo := Some(Resolver.file("Unused repo", file("target/unusedrepo"))),
publishArtifact := false,
publish := {},
skip in publish := true
)
lazy val dockerSettings = Seq(
// Make the docker task depend on the assembly task, which generates a fat JAR file
docker := docker.dependsOn(assembly in jobServerExtras).value,
dockerfile in docker := {
val artifact = (assemblyOutputPath in assembly in jobServerExtras).value
val artifactTargetPath = s"/app/${artifact.name}"
val sparkBuild = s"spark-${Versions.spark}"
val sparkBuildCmd = scalaBinaryVersion.value match {
case "2.11" =>
Versions.spark match {
case s if s.startsWith("1") => {"./make-distribution.sh -Dscala-2.11 -Phadoop-2.7 -Phive"}
case _ => {"./dev/make-distribution.sh -Dscala-2.11 -Phadoop-2.7 -Phive"}
}
case other => throw new RuntimeException(s"Scala version $other is not supported!")
}
new sbtdocker.mutable.Dockerfile {
from(s"openjdk:${Versions.java}")
// Dockerfile best practices: https://docs.docker.com/articles/dockerfile_best-practices/
expose(8090)
expose(9999) // for JMX
env("MESOS_VERSION", Versions.mesos)
runRaw(
"""echo "deb http://repos.mesosphere.io/ubuntu/ trusty main" > /etc/apt/sources.list.d/mesosphere.list && \
apt-key adv --keyserver keyserver.ubuntu.com --recv E56151BF && \
apt-get -y update && \
apt-get -y install mesos=${MESOS_VERSION} && \
apt-get clean
""")
env("MAVEN_VERSION","3.3.9")
runRaw(
"""mkdir -p /usr/share/maven /usr/share/maven/ref \
&& curl -fsSL http://apache.osuosl.org/maven/maven-3/$MAVEN_VERSION/binaries/apache-maven-$MAVEN_VERSION-bin.tar.gz \
| tar -xzC /usr/share/maven --strip-components=1 \
&& ln -s /usr/share/maven/bin/mvn /usr/bin/mvn
""")
env("MAVEN_HOME","/usr/share/maven")
env("MAVEN_CONFIG", "/.m2")
copy(artifact, artifactTargetPath)
copy(baseDirectory(_ / "bin" / "server_start.sh").value, file("app/server_start.sh"))
copy(baseDirectory(_ / "bin" / "server_stop.sh").value, file("app/server_stop.sh"))
copy(baseDirectory(_ / "bin" / "setenv.sh").value, file("app/setenv.sh"))
copy(baseDirectory(_ / "config" / "log4j-stdout.properties").value, file("app/log4j-server.properties"))
copy(baseDirectory(_ / "config" / "docker.conf").value, file("app/docker.conf"))
copy(baseDirectory(_ / "config" / "docker.sh").value, file("app/settings.sh"))
// Including envs in Dockerfile makes it easy to override from docker command
env("JOBSERVER_MEMORY", "1G")
env("SPARK_HOME", "/spark")
// Use a volume to persist database between container invocations
run("mkdir", "-p", "/database")
runRaw(
s"""
|wget http://d3kbcqa49mib13.cloudfront.net/$sparkBuild.tgz && \\
|tar -xvf $sparkBuild.tgz && \\
|cd $sparkBuild && \\
|$sparkBuildCmd && \\
|cd .. && \\
|mv $sparkBuild/dist /spark && \\
|rm $sparkBuild.tgz && \\
|rm -r $sparkBuild
""".stripMargin.trim
)
volume("/database")
entryPoint("app/server_start.sh")
}
},
imageNames in docker := Seq(
sbtdocker.ImageName(namespace = Some("velvia"),
repository = "spark-jobserver",
tag = Some(
s"${version.value}" +
s".mesos-${Versions.mesos.split('-')(0)}" +
s".spark-${Versions.spark}" +
s".scala-${scalaBinaryVersion.value}" +
s".jdk-${Versions.java}")
)
)
)
lazy val rootSettings = Seq(
// Must run Spark tests sequentially because they compete for port 4040!
parallelExecution in Test := false,
publishArtifact := false,
concurrentRestrictions := Seq(
Tags.limit(Tags.CPU, java.lang.Runtime.getRuntime.availableProcessors()),
// limit to 1 concurrent test task, even across sub-projects
// Note: some components of tests seem to have the "Untagged" tag rather than "Test" tag.
// So, we limit the sum of "Test", "Untagged" tags to 1 concurrent
Tags.limitSum(1, Tags.Test, Tags.Untagged))
)
lazy val revolverSettings = Seq(
javaOptions in reStart += jobServerLogging,
// Give job server a bit more PermGen since it does classloading
javaOptions in reStart += "-XX:MaxPermSize=256m",
javaOptions in reStart += "-Djava.security.krb5.realm= -Djava.security.krb5.kdc=",
// This lets us add Spark back to the classpath without assembly barfing
fullClasspath in reStart := (fullClasspath in Compile).value,
mainClass in reStart := Some("spark.jobserver.JobServer")
)
// To add an extra jar to the classpath when doing "re-start" for quick development, set the
// env var EXTRA_JAR to the absolute full path to the jar
lazy val extraJarPaths = Option(System.getenv("EXTRA_JAR"))
.map(jarpath => Seq(Attributed.blank(file(jarpath))))
.getOrElse(Nil)
// Create a default Scala style task to run with compiles
lazy val runScalaStyle = taskKey[Unit]("testScalaStyle")
lazy val commonSettings = Defaults.coreDefaultSettings ++ dirSettings ++ Seq(
organization := "spark.jobserver",
crossPaths := true,
scalaVersion := sys.env.getOrElse("SCALA_VERSION", "2.11.8"),
dependencyOverrides += "org.scala-lang" % "scala-compiler" % scalaVersion.value,
// scalastyleFailOnError := true,
runScalaStyle := {
scalastyle.in(Compile).toTask("").value
},
(compile in Compile) := (compile in Compile).dependsOn(runScalaStyle).value,
// In Scala 2.10, certain language features are disabled by default, such as implicit conversions.
// Need to pass in language options or import scala.language.* to enable them.
// See SIP-18 (https://docs.google.com/document/d/1nlkvpoIRkx7at1qJEZafJwthZ3GeIklTFhqmXMvTX9Q/edit)
scalacOptions := Seq(
"-deprecation", "-feature",
"-language:implicitConversions",
"-language:postfixOps",
"-language:existentials"
),
// For Building on Encrypted File Systems...
scalacOptions ++= Seq("-Xmax-classfile-name", "128"),
resolvers ++= Dependencies.repos,
libraryDependencies ++= apiDeps,
parallelExecution in Test := false,
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-oDF"),
// We need to exclude jms/jmxtools/etc because it causes undecipherable SBT errors :(
ivyXML :=
<dependencies>
<exclude module="jms"/>
<exclude module="jmxtools"/>
<exclude module="jmxri"/>
</dependencies>
) ++ scoverageSettings
lazy val scoverageSettings = {
// Semicolon-separated list of regexs matching classes to exclude
coverageExcludedPackages := ".+Benchmark.*;.+Example.*;.+TestJob"
}
lazy val publishSettings = Seq(
licenses += ("Apache-2.0", url("http://choosealicense.com/licenses/apache/")),
bintrayOrganization := Some("spark-jobserver")
)
// This is here so we can easily switch back to Logback when Spark fixes its log4j dependency.
lazy val jobServerLogbackLogging = "-Dlogback.configurationFile=config/logback-local.xml"
lazy val jobServerLogging = "-Dlog4j.configuration=file:config/log4j-local.properties"