-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Laurence Liss
committed
Dec 28, 2015
0 parents
commit 19ddc15
Showing
7 changed files
with
777 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
*.class | ||
*.log | ||
|
||
# sbt specific | ||
.cache | ||
.history | ||
.lib/ | ||
dist/* | ||
target/ | ||
lib_managed/ | ||
src_managed/ | ||
project/boot/ | ||
project/plugins/project/ | ||
|
||
# Scala-IDE specific | ||
.scala_dependencies | ||
.worksheet |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
# USACE Flood Modeling Geoprocessing | ||
|
||
A [Spark Job Server](https://github.com/spark-jobserver/spark-jobserver) job for USACE flood modeling. | ||
|
||
## Usage | ||
|
||
First, build the assembly JAR for this project: | ||
|
||
```bash | ||
$ git clone https://github.com/azavea/usace-flood-geoprocessing.git | ||
$ cd usace-flood-geoprocessing | ||
$ ./sbt assembly | ||
``` | ||
|
||
To use a Docker based Scala build environment, you can use: | ||
|
||
```bash | ||
$ docker run \ | ||
--rm \ | ||
--volume ${HOME}/.ivy2:/root/.ivy2 \ | ||
--volume ${PWD}:/usace-flood-geoprocessing \ | ||
--workdir /usace-flood-geoprocessing \ | ||
quay.io/azavea/scala:latest ./sbt assembly | ||
``` | ||
|
||
Next, use the latest Spark Job Server (SJS) Docker image to launch an instance of SJS locally: | ||
|
||
```bash | ||
$ docker run \ | ||
--detach \ | ||
--volume ${PWD}/examples/conf/spark-jobserver.conf:/opt/spark-jobserver/spark-jobserver.conf:ro \ | ||
--publish 8090:8090 \ | ||
--name spark-jobserver \ | ||
quay.io/azavea/spark-jobserver:latest | ||
``` | ||
|
||
Now that the SJS service is running in the background, upload the assembly JAR and create a long-lived Spark context named `geoprocessing`: | ||
|
||
```bash | ||
$ curl --silent \ | ||
--data-binary @summary/target/scala-2.10/usaceflood-geoprocessing-assembly-0.0.1.jar \ | ||
'http://localhost:8090/jars/geoprocessing' | ||
$ curl --silent --data "" \ | ||
'http://localhost:8090/contexts/geoprocessing-context' | ||
``` | ||
|
||
Once that process is complete, try submitting a job to the `geoprocessing-context`: | ||
|
||
```bash | ||
$ curl --silent \ | ||
--data-binary "" \ | ||
'http://localhost:8090/jobs?sync=true&context=geoprocessing-context&appName=geoprocessing&classPath=org.azavea.usaceflood.geoprocessing.SummaryJob' | ||
``` | ||
|
||
## Deployments | ||
|
||
Not yet deployed. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
################### | ||
# spark-jobserver # | ||
################### | ||
|
||
spark.jobserver { | ||
port = 8090 | ||
jar-store-rootdir = "/opt/spark-jobserver/jars" | ||
filedao.rootdir = "/opt/spark-jobserver/filedao/data" | ||
} | ||
|
||
######### | ||
# spark # | ||
######### | ||
|
||
spark { | ||
home = "/opt/spark" | ||
master = "local[*]" | ||
|
||
context-settings.passthrough.spark.serializer = "org.apache.spark.serializer.KryoSerializer" | ||
context-settings.passthrough.spark.kryo.registrator = "geotrellis.spark.io.hadoop.KryoRegistrator" | ||
} | ||
|
||
######### | ||
# spray # | ||
######### | ||
|
||
spray.can.server.parsing.max-content-length = 250m |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
import sbt._ | ||
import Keys._ | ||
import scala.util.Properties | ||
|
||
// sbt-assembly | ||
import sbtassembly.Plugin._ | ||
import AssemblyKeys._ | ||
|
||
object Version { | ||
def either(environmentVariable: String, default: String): String = | ||
Properties.envOrElse(environmentVariable, default) | ||
|
||
val geotrellis = "0.10.0-97834e6" | ||
val scala = "2.10.5" | ||
val scalatest = "2.2.1" | ||
lazy val jobserver = either("SPARK_JOBSERVER_VERSION", "0.5.1") | ||
lazy val hadoop = either("SPARK_HADOOP_VERSION", "2.6.0") | ||
lazy val spark = either("SPARK_VERSION", "1.3.1") | ||
} | ||
|
||
object Geoprocessing extends Build { | ||
// Default settings | ||
override lazy val settings = | ||
super.settings ++ | ||
Seq( | ||
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }, | ||
version := "0.0.1", | ||
scalaVersion := Version.scala, | ||
organization := "org.azavea.usaceflood.geoprocessing", | ||
|
||
// disable annoying warnings about 2.10.x | ||
conflictWarning in ThisBuild := ConflictWarning.disable, | ||
scalacOptions ++= | ||
Seq("-deprecation", | ||
"-unchecked", | ||
"-Yinline-warnings", | ||
"-language:implicitConversions", | ||
"-language:reflectiveCalls", | ||
"-language:higherKinds", | ||
"-language:postfixOps", | ||
"-language:existentials", | ||
"-feature"), | ||
|
||
publishMavenStyle := true, | ||
|
||
publishArtifact in Test := false, | ||
|
||
pomIncludeRepository := { _ => false }, | ||
licenses := Seq("Apache 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.html")) | ||
) | ||
|
||
val resolutionRepos = Seq( | ||
Resolver.bintrayRepo("azavea", "geotrellis"), | ||
Resolver.bintrayRepo("scalaz", "releases"), | ||
"OpenGeo" at "https://boundless.artifactoryonline.com/boundless/main" | ||
) | ||
|
||
val defaultAssemblySettings = | ||
assemblySettings ++ | ||
Seq( | ||
test in assembly := {}, | ||
mergeStrategy in assembly <<= (mergeStrategy in assembly) { | ||
(old) => { | ||
case "reference.conf" => MergeStrategy.concat | ||
case "application.conf" => MergeStrategy.concat | ||
case "META-INF/MANIFEST.MF" => MergeStrategy.discard | ||
case "META-INF\\MANIFEST.MF" => MergeStrategy.discard | ||
case _ => MergeStrategy.first | ||
} | ||
}, | ||
resolvers ++= resolutionRepos | ||
) | ||
|
||
lazy val root = Project(id = "usaceflood-geoprocessing", | ||
base = file(".")).aggregate(summary) | ||
|
||
lazy val summary = Project("summary", file("summary")) | ||
.settings(summarySettings:_*) | ||
|
||
lazy val summarySettings = | ||
Seq( | ||
organization := "org.azavea.usaceflood.geoprocessing", | ||
name := "usaceflood-geoprocessing", | ||
|
||
scalaVersion := Version.scala, | ||
|
||
fork := true, | ||
// raise memory limits here if necessary | ||
javaOptions += "-Xmx2G", | ||
javaOptions += "-Djava.library.path=/usr/local/lib", | ||
|
||
libraryDependencies ++= Seq( | ||
"com.azavea.geotrellis" %% "geotrellis-engine" % Version.geotrellis, | ||
"com.azavea.geotrellis" %% "geotrellis-services" % Version.geotrellis, | ||
"com.azavea.geotrellis" %% "geotrellis-spark" % Version.geotrellis, | ||
"com.azavea.geotrellis" %% "geotrellis-testkit" % Version.geotrellis % "test", | ||
"org.scalatest" %% "scalatest" % Version.scalatest % "test", | ||
"org.apache.spark" %% "spark-core" % Version.spark % "provided", | ||
"org.apache.hadoop" % "hadoop-client" % Version.hadoop % "provided", | ||
"spark.jobserver" %% "job-server-api" % Version.jobserver % "provided" | ||
) | ||
) ++ | ||
defaultAssemblySettings | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
addSbtPlugin("io.spray" % "sbt-revolver" % "0.7.1") | ||
|
||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2") |
Oops, something went wrong.