Skip to content

Commit

Permalink
Merge branch 'release/1.13.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
Hector Castro committed Jun 29, 2016
2 parents 904f369 + a6e99b0 commit b208983
Show file tree
Hide file tree
Showing 81 changed files with 4,707 additions and 572 deletions.
7 changes: 0 additions & 7 deletions Vagrantfile
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
services.vm.hostname = "services"
services.vm.network "private_network", ip: ENV.fetch("MMW_SERVICES_IP", "33.33.34.30")

services.vm.synced_folder ".", "/vagrant", disabled: true

# Graphite Web
services.vm.network "forwarded_port", {
guest: 8080,
Expand Down Expand Up @@ -90,7 +88,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
worker.vm.hostname = "worker"
worker.vm.network "private_network", ip: ENV.fetch("MMW_WORKER_IP", "33.33.34.20")

worker.vm.synced_folder ".", "/vagrant", disabled: true
worker.vm.synced_folder "src/mmw", "/opt/app/"

if ENV["VAGRANT_ENV"].nil? || ENV["VAGRANT_ENV"] != "TEST"
Expand Down Expand Up @@ -125,8 +122,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
app.vm.hostname = "app"
app.vm.network "private_network", ip: ENV.fetch("MMW_APP_IP", "33.33.34.10")

app.vm.synced_folder ".", "/vagrant", disabled: true

if Vagrant::Util::Platform.windows? || Vagrant::Util::Platform.cygwin?
app.vm.synced_folder "src/mmw", "/opt/app/", type: "rsync", rsync__exclude: ["node_modules/", "apps/"]
app.vm.synced_folder "src/mmw/apps", "/opt/app/apps"
Expand Down Expand Up @@ -167,8 +162,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
tiler.vm.hostname = "tiler"
tiler.vm.network "private_network", ip: ENV.fetch("MMW_TILER_IP", "33.33.34.35")

tiler.vm.synced_folder ".", "/vagrant", disabled: true

if Vagrant::Util::Platform.windows? || Vagrant::Util::Platform.cygwin?
tiler.vm.synced_folder "src/tiler", "/opt/tiler/", type: "rsync", rsync__exclude: ["node_modules/"]
else
Expand Down
6 changes: 4 additions & 2 deletions deployment/ansible/group_vars/all
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ elasticsearch_cluster_name: "logstash"

nodejs_npm_version: 2.1.17

java_version: "7u95-*"
apache_version: "2.4.7-*"

java_version: "7u101-*"

graphite_carbon_version: "0.9.13-pre1"
graphite_whisper_version: "0.9.13-pre1"
Expand All @@ -44,7 +46,7 @@ sjs_host: "localhost"
sjs_port: 8090
sjs_container_image: "quay.io/azavea/spark-jobserver:0.6.1"

geop_version: "0.4.0"
geop_version: "1.2.0"

nginx_cache_dir: "/var/cache/nginx"
observation_api_url: "http://www.wikiwatershed-vs.org/"
2 changes: 1 addition & 1 deletion deployment/ansible/roles.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,6 @@
- src: azavea.beaver
version: 1.0.1
- src: azavea.java
version: 0.2.1
version: 0.2.5
- src: azavea.docker
version: 1.0.2
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ spark {
master = "local[*]"

context-settings.passthrough.spark.serializer = "org.apache.spark.serializer.KryoSerializer"
context-settings.passthrough.spark.kryo.registrator = "geotrellis.spark.io.hadoop.KryoRegistrator"
context-settings.passthrough.spark.kryo.registrator = "geotrellis.spark.io.kryo.KryoRegistrator"
}

#########
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,8 @@
{{ tiler_log_rotate_interval }}
compress
missingok
postrotate
restart mmw-tiler
endscript
notifempty
}
1 change: 1 addition & 0 deletions scripts/aws/setupdb.sh
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ export PUBLIC_HOSTED_ZONE_NAME=$(cat /etc/mmw.d/env/MMW_PUBLIC_HOSTED_ZONE_NAME)

# Ensure that the PostGIS extension exists
psql -c "CREATE EXTENSION IF NOT EXISTS postgis;"
psql -c "ALTER TABLE spatial_ref_sys OWNER TO ${PGUSER};"

# Run migrations
envdir /etc/mmw.d/env /opt/app/manage.py migrate
Expand Down
1 change: 1 addition & 0 deletions scripts/debugserver.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ set -x

vagrant ssh app -c "sudo service mmw-app stop || /bin/true"
vagrant ssh app -c "cd /opt/app/ && envdir /etc/mmw.d/env gunicorn --config /etc/mmw.d/gunicorn.py mmw.wsgi"
vagrant ssh app -c "sudo start mmw-app"
2 changes: 1 addition & 1 deletion src/mmw/apps/core/templates/base.html
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@

{% block javascript %}
<script type="text/javascript"
src="https://maps.googleapis.com/maps/api/js?key={{ client_settings.google_maps_api_key }}">
src="https://maps.googleapis.com/maps/api/js?key={{ google_maps_api_key }}">
</script>
<script type="text/javascript">
window.clientSettings = {{ client_settings | safe }};
Expand Down
1 change: 1 addition & 0 deletions src/mmw/apps/home/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
url(r'^$', home_page, name='home_page'),
url(r'^projects/$', projects, name='projects'),
url(r'^project/$', project, name='project'),
url(r'^project/new/', project, name='project'),
url(r'^project/(?P<proj_id>[0-9]+)/$', project, name='project'),
url(r'^project/(?P<proj_id>[0-9]+)/clone/?$',
project_clone, name='project_clone'),
Expand Down
14 changes: 11 additions & 3 deletions src/mmw/apps/home/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,13 @@ def get_layer_url(layer):
return urljoin(tiler_base, layer['code'] + tiler_postfix)


def get_model_packages():
for model_package in settings.MODEL_PACKAGES:
if model_package['name'] in settings.DISABLED_MODEL_PACKAGES:
model_package['disabled'] = True
return settings.MODEL_PACKAGES


def get_client_settings(request):
EMBED_FLAG = settings.ITSI['embed_flag']

Expand All @@ -126,9 +133,10 @@ def get_client_settings(request):
'stream_layers': get_layer_config(['stream', 'overlay']),
'draw_tools': settings.DRAW_TOOLS,
'map_controls': settings.MAP_CONTROLS,
'google_maps_api_key': settings.GOOGLE_MAPS_API_KEY,
'vizer_urls': settings.VIZER_URLS
})
'vizer_urls': settings.VIZER_URLS,
'model_packages': get_model_packages(),
}),
'google_maps_api_key': settings.GOOGLE_MAPS_API_KEY,
}

return client_settings
Expand Down
133 changes: 83 additions & 50 deletions src/mmw/apps/modeling/geoprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,83 @@
}


@statsd.timer(__name__ + '.sjs_submit')
def sjs_submit(host, port, args, data, retry=None):
"""
Submits a job to Spark Job Server. Returns its Job ID, which
can be used with sjs_retrieve to get the final result.
"""
url = 'http://{}:{}/jobs?{}'.format(host, port, args)
response = requests.post(url, data=json.dumps(data))

if response.ok:
job = response.json()
else:
error = response.json()
if error['status'] == 'NO SLOTS AVAILABLE' and retry:
try:
retry()
except MaxRetriesExceededError:
raise Exception('No slots available in Spark JobServer.\n'
'Details = {}'.format(response.text))
else:
raise Exception('Unable to submit job to Spark JobServer.\n'
'Details = {}'.format(response.text))

if job['status'] == 'STARTED':
return job['result']['jobId']
else:
raise Exception('Submitted job did not start in Spark JobServer.\n'
'Details = {}'.format(response.text))


@statsd.timer(__name__ + '.sjs_retrieve')
def sjs_retrieve(host, port, job_id, retry=None):
"""
Given a job ID, will try to retrieve its value. If the job is
still running, will call the optional retry function before
proceeding.
"""
url = 'http://{}:{}/jobs/{}'.format(host, port, job_id)
response = requests.get(url)

if response.ok:
job = response.json()
else:
raise Exception('Unable to retrieve job {} from Spark JobServer.\n'
'Details = {}'.format(job_id, response.text))

if job['status'] == 'FINISHED':
return job['result']
elif job['status'] == 'RUNNING':
if retry:
try:
retry()
except MaxRetriesExceededError:
delete = requests.delete(url) # Job took too long, terminate
if delete.ok:
raise Exception('Job {} timed out, '
'deleted.'.format(job_id))
else:
raise Exception('Job {} timed out, unable to delete.\n'
'Details: {}'.format(job_id, delete.text))
else:
if job['status'] == 'ERROR':
status = 'ERROR ({}: {})'.format(job['result']['errorClass'],
job['result']['message'])
else:
status = job['status']

delete = requests.delete(url) # Job in unusual state, terminate
if delete.ok:
raise Exception('Job {} was {}, deleted'.format(job_id, status))
else:
raise Exception('Job {} was {}, could not delete.\n'
'Details = {}'.format(job_id, status, delete.text))


@statsd.timer(__name__ + '.histogram_start')
def histogram_start(polygons):
def histogram_start(polygons, retry=None):
"""
Together, histogram_start and histogram_finish implement a
function which takes a list of polygons or multipolygons as input,
Expand All @@ -62,27 +137,13 @@ def histogram_start(polygons):
This is the top-half of the function.
"""
@statsd.timer(__name__ + '.histogram_start.sjs_post')
def post(url, data):
return requests.post(url, data)

host = settings.GEOP['host']
port = settings.GEOP['port']
path = settings.GEOP['path']
request = settings.GEOP['request'].copy()
request['input']['geometry'] = polygons
url = "http://%s:%s%s" % (host, port, path)
args = settings.GEOP['args']['SummaryJob']
data = settings.GEOP['json']['nlcdSoilCensus'].copy()
data['input']['geometry'] = polygons

response = post(url, data=json.dumps(request))
if response.ok:
data = response.json()
else:
raise Exception('Unable to communicate with SJS (top-half).')

if data['status'] == 'STARTED':
return data['result']['jobId']
else:
raise Exception('Job submission failed.')
return sjs_submit(host, port, args, data, retry)


@statsd.timer(__name__ + '.histogram_finish')
Expand All @@ -97,40 +158,12 @@ def dict_to_array(d):
result.append(((k1, k2), v))
return result

@statsd.timer(__name__ + '.histogram_finish.sjs_get')
def get(url):
return requests.get(url)

@statsd.timer(__name__ + '.histogram_finish.sjs_delete')
def delete(url):
response = requests.delete(url)
return response.ok

host = settings.GEOP['host']
port = settings.GEOP['port']
url = "http://%s:%s/jobs/%s" % (host, port, job_id)

response = get(url)
if response.ok:
data = response.json()
else:
raise Exception('Unable to communicate with SJS (bottom-half).')

if data['status'] == 'FINISHED':
return [dict_to_array(d) for d in data['result']]
elif data['status'] == 'RUNNING':
try:
retry()
except MaxRetriesExceededError, X: # job took too long, terminate
if delete(url):
raise X
else:
raise Exception('Job timed out, unable to delete.')
else:
if delete(url): # job failed, terminate
raise Exception('Job failed, deleted.')
else:
raise Exception('Job failed, unable to delete.')
data = sjs_retrieve(host, port, job_id, retry)

return [dict_to_array(d) for d in data]


def histogram_to_x(data, nucleus, update_rule, after_rule):
Expand Down
Empty file.
Loading

0 comments on commit b208983

Please sign in to comment.