Skip to content

Commit

Permalink
Merge branch 'release/0.2.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
Joe Tarricone committed Sep 8, 2016
2 parents e15f22e + 964419b commit 3b97340
Show file tree
Hide file tree
Showing 10 changed files with 140 additions and 30 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
## 0.2.0
- Release containerized image with /nlcd-agg-tiles endpoint, change to /count endpoint to accept bundled polygon requests.

## 0.1.1

- Release containerized image with /count endpoint.
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM quay.io/azavea/spark:1.6.1

ENV VERSION 0.1.1
ENV VERSION 0.2.0

COPY geop/target/scala-2.10/usace-programanalysis-geop-assembly-${VERSION}.jar /opt/geoprocessing/usace-programanalysis-geop.jar
COPY scripts/docker-entrypoint.sh /opt/geoprocessing/
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# USACE Program Analysis Geoprocessing

[![Build Status](https://travis-ci.org/azavea/usace-program-analysis-geoprocessing.png?branch=master)](https://travis-ci.org/azavea/usace-program-analysis-geoprocessing)
[![Docker Repository on Quay](https://quay.io/repository/usace/program-analysis-geoprocessing/status "Docker Repository on Quay")](https://quay.io/repository/usace/program-analysis-geoprocessing)

This repository contains the backing geoprocessing service for the [USACE Program Analysis web app](https://github.com/azavea/usace-program-analysis). It is a [Spray](https://github.com/spray/spray) based web service that performs geoprocessing operations using [GeoTrellis](https://github.com/geotrellis/geotrellis) and [Apache Spark](http://spark.apache.org/).

Expand Down
2 changes: 1 addition & 1 deletion examples/count.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
"rasters": [
"nlcd-2011-30m-epsg5070-0.10.0"
],
"multiPolygon": "{\"type\":\"MultiPolygon\",\"coordinates\":[[[[-75.1626205444336,39.95580659996906],[-75.25531768798828,39.94514735903112],[-75.22785186767578,39.89446035777916],[-75.1461410522461,39.88761144548104],[-75.09309768676758,39.91078961774283],[-75.09464263916016,39.93817189499188],[-75.12039184570312,39.94435771955196],[-75.1626205444336,39.95580659996906]]]]}"
"multiPolygons": ["{\"type\":\"MultiPolygon\",\"coordinates\":[[[[-75.1626205444336,39.95580659996906],[-75.25531768798828,39.94514735903112],[-75.22785186767578,39.89446035777916],[-75.1461410522461,39.88761144548104],[-75.09309768676758,39.91078961774283],[-75.09464263916016,39.93817189499188],[-75.12039184570312,39.94435771955196],[-75.1626205444336,39.95580659996906]]]]}"]
}
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
package com.azavea.usace.programanalysis.geop

import geotrellis.raster.Tile
import geotrellis.spark.io.{Intersects, _}
import geotrellis.spark.io._
import geotrellis.spark.{SpatialKey, TileLayerMetadata, _}
import geotrellis.spark.io.s3.{S3AttributeStore, S3LayerReader}
import geotrellis.vector.{Extent, MultiPolygon}

import org.apache.spark.SparkContext


object ClippedLayers {
/**
* Given a list of layer ids, a multipolygon and a spark context, returns
Expand Down Expand Up @@ -76,8 +75,8 @@ object ClippedLayers {
rootPath: String
)(implicit sc: SparkContext): S3LayerReader = {
val attributeStore = new S3AttributeStore(bucket, rootPath)
val catalog = new S3LayerReader(attributeStore)
val reader = new S3LayerReader(attributeStore)

catalog
reader
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,44 @@ package com.azavea.usace.programanalysis.geop

import akka.actor.Actor

import geotrellis.proj4.{ConusAlbers, LatLng}

import org.apache.spark.SparkContext

import scala.concurrent.future

import spray.http.AllOrigins
import spray.http.{ AllOrigins, MediaTypes }
import spray.http.HttpHeaders.{`Access-Control-Allow-Headers`, `Access-Control-Allow-Methods`, `Access-Control-Allow-Origin`}
import spray.http.HttpMethods.{DELETE, GET, OPTIONS, POST}
import spray.json.{JsNumber, JsObject}
import spray.routing.{Directive0, HttpService, RejectionHandler}

import geotrellis.proj4.{LatLng, ConusAlbers}
import geotrellis.raster.{IntConstantNoDataCellType, Tile}
import geotrellis.raster.render.{IntColorMap, Exact}
import geotrellis.raster.render.ColorMap.Options
import geotrellis.spark.io.{TileNotFoundError, AttributeNotFoundError}


class GeopServiceActor(sc: SparkContext) extends Actor with HttpService {
import scala.concurrent.ExecutionContext.Implicits.global
import JsonProtocol._

implicit val _sc = sc

val NLCD_FOREST_COLOR: Int = 0x2B7B3D80
val NLCD_WETLANDS_COLOR: Int = 0x75A5D080
val NLCD_DISTURBED_COLOR: Int = 0xFF5D5D80
val NLCD_NO_DATA_COLOR: Int = 0xFFFFFF00

val nlcdColorMap =
new IntColorMap(
Map(
41 -> NLCD_FOREST_COLOR, 42 -> NLCD_FOREST_COLOR, 43 -> NLCD_FOREST_COLOR,
90 -> NLCD_WETLANDS_COLOR, 95 -> NLCD_WETLANDS_COLOR,
21 -> NLCD_DISTURBED_COLOR, 22 -> NLCD_DISTURBED_COLOR, 23 -> NLCD_DISTURBED_COLOR,
24 -> NLCD_DISTURBED_COLOR, 81 -> NLCD_DISTURBED_COLOR, 82 -> NLCD_DISTURBED_COLOR),
new Options(classBoundaryType = Exact, noDataColor = NLCD_NO_DATA_COLOR, fallbackColor = NLCD_NO_DATA_COLOR)
)

def actorRefFactory = context
def receive = runRoute(root)

Expand All @@ -37,6 +56,7 @@ class GeopServiceActor(sc: SparkContext) extends Actor with HttpService {

def root =
pathPrefix("count") { rasterGroupedCount } ~
pathPrefix("nlcd-agg-tiles") { tilesHandler } ~
path("ping") { complete { "OK" } }

def rasterGroupedCount =
Expand All @@ -46,16 +66,40 @@ class GeopServiceActor(sc: SparkContext) extends Actor with HttpService {
entity(as[CountArgs]) { args =>
complete {
future {
val multiPolygon = args.multiPolygon.reproject(LatLng, ConusAlbers)
val rasterLayers = ClippedLayers(args.rasters, multiPolygon, sc)
val rasterGroupedCount = RasterGroupedCount(rasterLayers, multiPolygon)

JsObject(
rasterGroupedCount
.map { case (keys, count) =>
keys.mkString(",") -> JsNumber(count)
}
)
args.multiPolygons.map(m => {
val multiPolygon = m.reproject(LatLng, ConusAlbers)
val rasterLayers = ClippedLayers(args.rasters, multiPolygon, sc)
val rasterGroupedCount = RasterGroupedCount(rasterLayers, multiPolygon)

JsObject(
rasterGroupedCount
.map { case (keys, count) =>
keys.mkString(",") -> JsNumber(count)
})
}).toVector
}
}
}
}

def tilesHandler =
pathPrefix(IntNumber / IntNumber / IntNumber) { (zoom, x, y) =>
respondWithMediaType(MediaTypes.`image/png`) {
complete {
future {
val result: Option[Tile] =
try {
Some(LayerReader(zoom, x, y, sc))
} catch {
case _: AttributeNotFoundError | _: TileNotFoundError => None
}

result.map { tile =>
tile
.convert(IntConstantNoDataCellType)
.renderPng(nlcdColorMap)
.bytes
}
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,24 +9,34 @@ import spray.httpx.SprayJsonSupport
import spray.json._
import spray.json.DefaultJsonProtocol._

import scala.collection.parallel.immutable.ParVector


// TODO Nest under "input"
case class CountArgs (rasters: Seq[LayerId], multiPolygon: MultiPolygon)
case class CountArgs (
rasters: Seq[LayerId],
multiPolygons: ParVector[MultiPolygon]
)

object JsonProtocol extends SprayJsonSupport with GeoJsonSupport {
implicit object CountArgsJsonFormat extends RootJsonFormat[CountArgs] {
def write(args: CountArgs) = JsObject(
"zoom" -> JsNumber(args.rasters.head.zoom),
"rasters" -> JsArray(args.rasters.map(r => JsString(r.name)).toVector),
"multiPolygon" -> JsString(args.multiPolygon.toGeoJson())
)
def write(args: CountArgs) = args match {
case CountArgs(rasters, multiPolygons) =>
JsObject(
"zoom" -> JsNumber(rasters.head.zoom),
"rasters" -> JsArray(rasters.map(r => JsString(r.name)).toVector),
"multiPolygons" -> JsArray(multiPolygons.map(m => JsString(m.toGeoJson())).toVector)
)
case _ =>
throw new SerializationException("")
}

def read(value: JsValue) = {
value.asJsObject.getFields("zoom", "rasters", "multiPolygon") match {
case Seq(JsNumber(zoom), JsArray(rasters), JsString(multiPolygon)) =>
value.asJsObject.getFields("zoom", "rasters", "multiPolygons") match {
case Seq(JsNumber(zoom), JsArray(rasters), JsArray(multiPolygons)) =>
new CountArgs(
rasters.map { r => LayerId(r.convertTo[String], zoom.toInt) },
multiPolygon.parseGeoJson[MultiPolygon]
new ParVector[MultiPolygon](multiPolygons.map { m => m.convertTo[String].parseGeoJson[MultiPolygon] })
)
case _ =>
throw new DeserializationException("Bad Count Arguments")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package com.azavea.usace.programanalysis.geop

import geotrellis.raster.Tile
import geotrellis.spark.io._
import geotrellis.spark.{SpatialKey, LayerId}
import geotrellis.spark.io.s3.{S3AttributeStore, S3ValueReader}

import org.apache.spark.SparkContext

object LayerReader {
/**
* Given values for zoom, x and y, and a spark context, returns
* the corresponding [[Tile]] object.
*
* @param zoom [[Int]] value of zoom level
* @param x [[Int]] x-value of the spatial key
* @param y [[Int]] y-value of the spatial key
* @param sc [[SparkContext]] for the [[S3ValueReader]]
* @return [[Tile]] object
*/
def apply(
zoom: Int,
x: Int,
y: Int,
sc: SparkContext
): Tile = {
val sKey = SpatialKey(x, y)
val rdr = catalog(sc, zoom)

rdr.read(sKey)
}

/**
* Given a spark context, returns the correct catalog. This configures the
* next method with defaults.
*
*/
def catalog(sc: SparkContext, zoom: Int): Reader[SpatialKey, Tile] =
catalog("azavea-datahub", "catalog", zoom)(sc)

def catalog(
bucket: String,
rootPath: String,
zoom: Int
)(implicit sc: SparkContext): Reader[SpatialKey, Tile] = {
val attributeStore = new S3AttributeStore(bucket, rootPath)
val vr = new S3ValueReader(attributeStore)
val layer = LayerId("nlcd-zoomed", zoom)
val reader: Reader[SpatialKey, Tile] = vr.reader(layer)

reader
}
}
2 changes: 1 addition & 1 deletion project/Version.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ object Version {
def either(environmentVariable: String, default: String): String =
Properties.envOrElse(environmentVariable, default)

val version = "0.1.1"
val version = "0.2.0"

val geotrellis = "0.10.0"
val scala = either("SCALA_VERSION", "2.10.6")
Expand Down
2 changes: 1 addition & 1 deletion scripts/cibuild.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ fi
docker build -t "quay.io/usace/program-analysis-geoprocessing:${QUAY_TAG}" .

docker push "quay.io/usace/program-analysis-geoprocessing:${QUAY_TAG}"
docker tag -f "quay.io/usace/program-analysis-geoprocessing:${QUAY_TAG}" "quay.io/usace/program-analysis-geoprocessing:latest"
docker tag "quay.io/usace/program-analysis-geoprocessing:${QUAY_TAG}" "quay.io/usace/program-analysis-geoprocessing:latest"
docker push "quay.io/usace/program-analysis-geoprocessing:latest"

0 comments on commit 3b97340

Please sign in to comment.