From 69793310f3ba76fa9165e20a7fd8c8275ade7555 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Wed, 22 Nov 2017 13:27:00 -0500 Subject: [PATCH 01/19] CLIMATE-934 Fixed error with old Bounds constructor. Fixed error with matplotlib and date starting at epoch. Added Apache license. Minor Pylint. --- examples/time_series_with_regions.py | 135 ++++++++++++++++----------- 1 file changed, 79 insertions(+), 56 deletions(-) diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py index 3bb133c1..168da900 100644 --- a/examples/time_series_with_regions.py +++ b/examples/time_series_with_regions.py @@ -1,19 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +''' + Download three netCDF files, process the files to be the same shape, + divide the data into 13 subregions and plot a monthly time series for each sub region. +''' + +import sys +import datetime +from os import path +from calendar import monthrange +import ssl + +import numpy as np + # Apache OCW lib immports -from ocw.dataset import Dataset, Bounds +from ocw.dataset import Bounds import ocw.data_source.local as local import ocw.data_source.rcmed as rcmed import ocw.dataset_processor as dsp -import ocw.evaluation as evaluation -import ocw.metrics as metrics import ocw.plotter as plotter import ocw.utils as utils -import datetime -import numpy as np -import numpy.ma as ma -from os import path -import sys - if sys.version_info[0] >= 3: from urllib.request import urlretrieve else: @@ -21,17 +43,17 @@ # But note that this might need an update when Python 4 # might be around one day from urllib import urlretrieve -import ssl + if hasattr(ssl, '_create_unverified_context'): ssl._create_default_https_context = ssl._create_unverified_context - # File URL leader -FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/" +FILE_LEADER = 'http://zipper.jpl.nasa.gov/dist/' + # Three Local Model Files -FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc" -FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc" -FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc" +FILE_1 = 'AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc' +FILE_2 = 'AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc' +FILE_3 = 'AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc' LAT_MIN = -45.0 LAT_MAX = 42.24 @@ -56,55 +78,56 @@ # Download necessary NetCDF file if not present if not path.exists(FILE_1): - print("Downloading %s" % (FILE_LEADER + FILE_1)) + print('Downloading %s' % (FILE_LEADER + FILE_1)) urlretrieve(FILE_LEADER + FILE_1, FILE_1) if not path.exists(FILE_2): - print("Downloading %s" % (FILE_LEADER + FILE_2)) + print('Downloading %s' % (FILE_LEADER + FILE_2)) urlretrieve(FILE_LEADER + FILE_2, FILE_2) if not path.exists(FILE_3): - print("Downloading %s" % (FILE_LEADER + FILE_3)) + print('Downloading %s' % (FILE_LEADER + FILE_3)) urlretrieve(FILE_LEADER + FILE_3, FILE_3) -""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list""" -target_datasets.append(local.load_file(FILE_1, varName, name="KNMI")) -target_datasets.append(local.load_file(FILE_2, varName, name="REGCM")) -target_datasets.append(local.load_file(FILE_3, varName, name="UCT")) - +# Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list +target_datasets.append(local.load_file(FILE_1, varName, name='KNMI')) +target_datasets.append(local.load_file(FILE_2, varName, name='REGCM')) +target_datasets.append(local.load_file(FILE_3, varName, name='UCT')) -""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """ -print("Working with the rcmed interface to get CRU3.1 Daily Precipitation") +# Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module +print('Working with the rcmed interface to get CRU3.1 Daily Precipitation') # the dataset_id and the parameter id were determined from # https://rcmes.jpl.nasa.gov/content/data-rcmes-database CRU31 = rcmed.parameter_dataset( 10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END) - -""" Step 3: Processing datasets so they are the same shape ... """ -print("Processing datasets so they are the same shape") +# Step 3: Processing datasets so they are the same shape +print('Processing datasets so they are the same shape') CRU31 = dsp.water_flux_unit_conversion(CRU31) CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly') for member, each_target_dataset in enumerate(target_datasets): target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS) - target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[ - member]) + target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member]) target_datasets[member] = dsp.normalize_dataset_datetimes( target_datasets[member], 'monthly') -print("... spatial regridding") +print('... spatial regridding') new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep) new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep) CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons) - for member, each_target_dataset in enumerate(target_datasets): target_datasets[member] = dsp.spatial_regrid( target_datasets[member], new_lats, new_lons) -# find climatology monthly for obs and models +# Find climatology monthly for obs and models. CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31) +# Shift the day of the month to the end of the month as matplotlib does not handle +# the xticks elegantly when the first date is the epoch and tries to determine +# the start of the xticks based on a value < 1. +for index, item in enumerate(CRU31.times): + CRU31.times[index] = datetime.date(item.year, item.month, monthrange(item.year, item.month)[1]) for member, each_target_dataset in enumerate(target_datasets): target_datasets[member].values, target_datasets[ @@ -112,41 +135,41 @@ # make the model ensemble target_datasets_ensemble = dsp.ensemble(target_datasets) -target_datasets_ensemble.name = "ENS" +target_datasets_ensemble.name = 'ENS' # append to the target_datasets for final analysis target_datasets.append(target_datasets_ensemble) -""" Step 4: Subregion stuff """ +# Step 4: Subregion stuff list_of_regions = [ - Bounds(-10.0, 0.0, 29.0, 36.5), - Bounds(0.0, 10.0, 29.0, 37.5), - Bounds(10.0, 20.0, 25.0, 32.5), - Bounds(20.0, 33.0, 25.0, 32.5), - Bounds(-19.3, -10.2, 12.0, 20.0), - Bounds(15.0, 30.0, 15.0, 25.0), - Bounds(-10.0, 10.0, 7.3, 15.0), - Bounds(-10.9, 10.0, 5.0, 7.3), - Bounds(33.9, 40.0, 6.9, 15.0), - Bounds(10.0, 25.0, 0.0, 10.0), - Bounds(10.0, 25.0, -10.0, 0.0), - Bounds(30.0, 40.0, -15.0, 0.0), - Bounds(33.0, 40.0, 25.0, 35.0)] - -region_list = [["R" + str(i + 1)] for i in xrange(13)] + Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5), + Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5), + Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0), + Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0), + Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0), + Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3), + Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0), + Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0), + Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.0)] + +region_list = [['R' + str(i + 1)] for i in xrange(13)] for regions in region_list: firstTime = True - subset_name = regions[0] + "_CRU31" - # labels.append(subset_name) #for legend, uncomment this line + subset_name = regions[0] + '_CRU31' + labels.append(subset_name) subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name) tSeries = utils.calc_time_series(subset) results.append(tSeries) tSeries = [] firstTime = False for member, each_target_dataset in enumerate(target_datasets): - subset_name = regions[0] + "_" + target_datasets[member].name - # labels.append(subset_name) #for legend, uncomment this line + subset_name = regions[0] + '_' + target_datasets[member].name + labels.append(subset_name) subset = dsp.subset(target_datasets[member], list_of_regions[region_counter], subset_name) @@ -154,8 +177,8 @@ results.append(tSeries) tSeries = [] - plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[ - 0], ptitle=regions[0], fmt='png') + plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0], + label_month=True, ptitle=regions[0], fmt='png') results = [] tSeries = [] labels = [] From d41041f28532bf4fc5b5393a6ae1642116bbaa2c Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Wed, 22 Nov 2017 16:04:01 -0500 Subject: [PATCH 02/19] CLIMATE-935 Fixed subregion examples using old style Bounds constructor. Added Apache license and minor Pylint --- examples/subregions_portrait_diagram.py | 105 ++++++++------ examples/subregions_rectangular_boundaries.py | 56 +++++--- examples/time_series_with_regions.py | 135 ++++++++++-------- 3 files changed, 173 insertions(+), 123 deletions(-) diff --git a/examples/subregions_portrait_diagram.py b/examples/subregions_portrait_diagram.py index e85286f5..a6d3c145 100644 --- a/examples/subregions_portrait_diagram.py +++ b/examples/subregions_portrait_diagram.py @@ -1,5 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from os import path +import urllib +import ssl +import datetime +import numpy as np + # Apache OCW lib immports -from ocw.dataset import Dataset, Bounds +from ocw.dataset import Bounds import ocw.data_source.local as local import ocw.data_source.rcmed as rcmed import ocw.dataset_processor as dsp @@ -8,24 +31,17 @@ import ocw.plotter as plotter import ocw.utils as utils -import datetime -import numpy as np -import numpy.ma as ma - -from os import path -import urllib -import ssl if hasattr(ssl, '_create_unverified_context'): ssl._create_default_https_context = ssl._create_unverified_context # File URL leader -FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/" +FILE_LEADER = 'http://zipper.jpl.nasa.gov/dist/' # Three Local Model Files -FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc" -FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc" -FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc" +FILE_1 = 'AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc' +FILE_2 = 'AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc' +FILE_3 = 'AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc' # Filename for the output image/plot (without file extension) -OUTPUT_PLOT = "portrait_diagram" +OUTPUT_PLOT = 'portrait_diagram' # Spatial and temporal configurations LAT_MIN = -45.0 @@ -34,7 +50,8 @@ LON_MAX = 60.0 START = datetime.datetime(2000, 01, 1) END = datetime.datetime(2007, 12, 31) -EVAL_BOUNDS = Bounds(LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END) +EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX, lon_min=LON_MIN, + lon_max=LON_MAX, start=START, end=END) # variable that we are analyzing varName = 'pr' @@ -58,32 +75,32 @@ if not path.exists(FILE_3): urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3) -""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list""" -target_datasets.append(local.load_file(FILE_1, varName, name="KNMI")) -target_datasets.append(local.load_file(FILE_2, varName, name="REGCM")) -target_datasets.append(local.load_file(FILE_3, varName, name="UCT")) +# Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list +target_datasets.append(local.load_file(FILE_1, varName, name='KNMI')) +target_datasets.append(local.load_file(FILE_2, varName, name='REGCM')) +target_datasets.append(local.load_file(FILE_3, varName, name='UCT')) -""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """ -print("Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation") +# Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module +print('Working with the rcmed interface to get CRU3.1 Monthly Mean Precipitation') # the dataset_id and the parameter id were determined from # https://rcmes.jpl.nasa.gov/content/data-rcmes-database CRU31 = rcmed.parameter_dataset( 10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END) -""" Step 3: Processing Datasets so they are the same shape """ -print("Processing datasets ...") +# Step 3: Processing Datasets so they are the same shape +print('Processing datasets ...') CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly') -print("... on units") +print('... on units') CRU31 = dsp.water_flux_unit_conversion(CRU31) for member, each_target_dataset in enumerate(target_datasets): target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS) - target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[ - member]) + target_datasets[member] = \ + dsp.water_flux_unit_conversion(target_datasets[member]) target_datasets[member] = dsp.normalize_dataset_datetimes( target_datasets[member], 'monthly') -print("... spatial regridding") +print('... spatial regridding') new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep) new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep) CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons) @@ -97,12 +114,12 @@ _, CRU31.values = utils.calc_climatology_year(CRU31) for member, each_target_dataset in enumerate(target_datasets): - _, target_datasets[member].values = utils.calc_climatology_year(target_datasets[ - member]) + _, target_datasets[member].values = \ + utils.calc_climatology_year(target_datasets[member]) # make the model ensemble target_datasets_ensemble = dsp.ensemble(target_datasets) -target_datasets_ensemble.name = "ENS" +target_datasets_ensemble.name = 'ENS' # append to the target_datasets for final analysis target_datasets.append(target_datasets_ensemble) @@ -111,21 +128,21 @@ allNames.append(target.name) list_of_regions = [ - Bounds(-10.0, 0.0, 29.0, 36.5), - Bounds(0.0, 10.0, 29.0, 37.5), - Bounds(10.0, 20.0, 25.0, 32.5), - Bounds(20.0, 33.0, 25.0, 32.5), - Bounds(-19.3, -10.2, 12.0, 20.0), - Bounds(15.0, 30.0, 15.0, 25.0), - Bounds(-10.0, 10.0, 7.3, 15.0), - Bounds(-10.9, 10.0, 5.0, 7.3), - Bounds(33.9, 40.0, 6.9, 15.0), - Bounds(10.0, 25.0, 0.0, 10.0), - Bounds(10.0, 25.0, -10.0, 0.0), - Bounds(30.0, 40.0, -15.0, 0.0), - Bounds(33.0, 40.0, 25.0, 35.00)] - -region_list = ["R" + str(i + 1) for i in xrange(13)] + Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5), + Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5), + Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0), + Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0), + Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0), + Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3), + Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0), + Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0), + Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.00)] + +region_list = ['R' + str(i + 1) for i in xrange(13)] # metrics pattern_correlation = metrics.PatternCorrelation() diff --git a/examples/subregions_rectangular_boundaries.py b/examples/subregions_rectangular_boundaries.py index 7fcf0e88..37eb884b 100644 --- a/examples/subregions_rectangular_boundaries.py +++ b/examples/subregions_rectangular_boundaries.py @@ -1,16 +1,26 @@ -# Apache OCW lib immports -from ocw.dataset import Dataset, Bounds -import ocw.data_source.local as local -import ocw.data_source.rcmed as rcmed -import ocw.dataset_processor as dsp -import ocw.evaluation as evaluation -import ocw.metrics as metrics -import ocw.plotter as plotter -import ocw.utils as utils +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. import datetime import numpy as np -import numpy.ma as ma + +# Apache OCW lib immports +from ocw.dataset import Bounds +import ocw.plotter as plotter OUTPUT_PLOT = "subregions" @@ -32,19 +42,19 @@ new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep) list_of_regions = [ - Bounds(-10.0, 0.0, 29.0, 36.5, START_SUB, END_SUB), - Bounds(0.0, 10.0, 29.0, 37.5, START_SUB, END_SUB), - Bounds(10.0, 20.0, 25.0, 32.5, START_SUB, END_SUB), - Bounds(20.0, 33.0, 25.0, 32.5, START_SUB, END_SUB), - Bounds(-19.3, -10.2, 12.0, 20.0, START_SUB, END_SUB), - Bounds(15.0, 30.0, 15.0, 25.0, START_SUB, END_SUB), - Bounds(-10.0, 10.0, 7.3, 15.0, START_SUB, END_SUB), - Bounds(-10.9, 10.0, 5.0, 7.3, START_SUB, END_SUB), - Bounds(33.9, 40.0, 6.9, 15.0, START_SUB, END_SUB), - Bounds(10.0, 25.0, 0.0, 10.0, START_SUB, END_SUB), - Bounds(10.0, 25.0, -10.0, 0.0, START_SUB, END_SUB), - Bounds(30.0, 40.0, -15.0, 0.0, START_SUB, END_SUB), - Bounds(33.0, 40.0, 25.0, 35.0, START_SUB, END_SUB)] + Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5, start=START_SUB, end=END_SUB), + Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5, start=START_SUB, end=END_SUB), + Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5, start=START_SUB, end=END_SUB), + Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5, start=START_SUB, end=END_SUB), + Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3, start=START_SUB, end=END_SUB), + Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0, start=START_SUB, end=END_SUB), + Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.0, start=START_SUB, end=END_SUB)] # for plotting the subregions plotter.draw_subregions(list_of_regions, new_lats, diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py index 3bb133c1..0799cf0c 100644 --- a/examples/time_series_with_regions.py +++ b/examples/time_series_with_regions.py @@ -1,19 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +''' + Download three netCDF files, process the files to be the same shape, + divide the data into subregions and plot a monthly time series for each sub region. +''' + +import sys +import datetime +from os import path +from calendar import monthrange +import ssl + +import numpy as np + # Apache OCW lib immports -from ocw.dataset import Dataset, Bounds +from ocw.dataset import Bounds import ocw.data_source.local as local import ocw.data_source.rcmed as rcmed import ocw.dataset_processor as dsp -import ocw.evaluation as evaluation -import ocw.metrics as metrics import ocw.plotter as plotter import ocw.utils as utils -import datetime -import numpy as np -import numpy.ma as ma -from os import path -import sys - if sys.version_info[0] >= 3: from urllib.request import urlretrieve else: @@ -21,17 +43,17 @@ # But note that this might need an update when Python 4 # might be around one day from urllib import urlretrieve -import ssl + if hasattr(ssl, '_create_unverified_context'): ssl._create_default_https_context = ssl._create_unverified_context - # File URL leader -FILE_LEADER = "http://zipper.jpl.nasa.gov/dist/" +FILE_LEADER = 'http://zipper.jpl.nasa.gov/dist/' + # Three Local Model Files -FILE_1 = "AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc" -FILE_2 = "AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc" -FILE_3 = "AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc" +FILE_1 = 'AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc' +FILE_2 = 'AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc' +FILE_3 = 'AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc' LAT_MIN = -45.0 LAT_MAX = 42.24 @@ -56,55 +78,56 @@ # Download necessary NetCDF file if not present if not path.exists(FILE_1): - print("Downloading %s" % (FILE_LEADER + FILE_1)) + print('Downloading %s' % (FILE_LEADER + FILE_1)) urlretrieve(FILE_LEADER + FILE_1, FILE_1) if not path.exists(FILE_2): - print("Downloading %s" % (FILE_LEADER + FILE_2)) + print('Downloading %s' % (FILE_LEADER + FILE_2)) urlretrieve(FILE_LEADER + FILE_2, FILE_2) if not path.exists(FILE_3): - print("Downloading %s" % (FILE_LEADER + FILE_3)) + print('Downloading %s' % (FILE_LEADER + FILE_3)) urlretrieve(FILE_LEADER + FILE_3, FILE_3) -""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list""" -target_datasets.append(local.load_file(FILE_1, varName, name="KNMI")) -target_datasets.append(local.load_file(FILE_2, varName, name="REGCM")) -target_datasets.append(local.load_file(FILE_3, varName, name="UCT")) - +# Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list +target_datasets.append(local.load_file(FILE_1, varName, name='KNMI')) +target_datasets.append(local.load_file(FILE_2, varName, name='REGCM')) +target_datasets.append(local.load_file(FILE_3, varName, name='UCT')) -""" Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module """ -print("Working with the rcmed interface to get CRU3.1 Daily Precipitation") +# Step 2: Fetch an OCW Dataset Object from the data_source.rcmed module +print('Working with the rcmed interface to get CRU3.1 Daily Precipitation') # the dataset_id and the parameter id were determined from # https://rcmes.jpl.nasa.gov/content/data-rcmes-database CRU31 = rcmed.parameter_dataset( 10, 37, LAT_MIN, LAT_MAX, LON_MIN, LON_MAX, START, END) - -""" Step 3: Processing datasets so they are the same shape ... """ -print("Processing datasets so they are the same shape") +# Step 3: Processing datasets so they are the same shape +print('Processing datasets so they are the same shape') CRU31 = dsp.water_flux_unit_conversion(CRU31) CRU31 = dsp.normalize_dataset_datetimes(CRU31, 'monthly') for member, each_target_dataset in enumerate(target_datasets): target_datasets[member] = dsp.subset(target_datasets[member], EVAL_BOUNDS) - target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[ - member]) + target_datasets[member] = dsp.water_flux_unit_conversion(target_datasets[member]) target_datasets[member] = dsp.normalize_dataset_datetimes( target_datasets[member], 'monthly') -print("... spatial regridding") +print('... spatial regridding') new_lats = np.arange(LAT_MIN, LAT_MAX, gridLatStep) new_lons = np.arange(LON_MIN, LON_MAX, gridLonStep) CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons) - for member, each_target_dataset in enumerate(target_datasets): target_datasets[member] = dsp.spatial_regrid( target_datasets[member], new_lats, new_lons) -# find climatology monthly for obs and models +# Find climatology monthly for obs and models. CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31) +# Shift the day of the month to the end of the month as matplotlib does not handle +# the xticks elegantly when the first date is the epoch and tries to determine +# the start of the xticks based on a value < 1. +for index, item in enumerate(CRU31.times): + CRU31.times[index] = datetime.date(item.year, item.month, monthrange(item.year, item.month)[1]) for member, each_target_dataset in enumerate(target_datasets): target_datasets[member].values, target_datasets[ @@ -112,41 +135,41 @@ # make the model ensemble target_datasets_ensemble = dsp.ensemble(target_datasets) -target_datasets_ensemble.name = "ENS" +target_datasets_ensemble.name = 'ENS' # append to the target_datasets for final analysis target_datasets.append(target_datasets_ensemble) -""" Step 4: Subregion stuff """ +# Step 4: Subregion stuff list_of_regions = [ - Bounds(-10.0, 0.0, 29.0, 36.5), - Bounds(0.0, 10.0, 29.0, 37.5), - Bounds(10.0, 20.0, 25.0, 32.5), - Bounds(20.0, 33.0, 25.0, 32.5), - Bounds(-19.3, -10.2, 12.0, 20.0), - Bounds(15.0, 30.0, 15.0, 25.0), - Bounds(-10.0, 10.0, 7.3, 15.0), - Bounds(-10.9, 10.0, 5.0, 7.3), - Bounds(33.9, 40.0, 6.9, 15.0), - Bounds(10.0, 25.0, 0.0, 10.0), - Bounds(10.0, 25.0, -10.0, 0.0), - Bounds(30.0, 40.0, -15.0, 0.0), - Bounds(33.0, 40.0, 25.0, 35.0)] - -region_list = [["R" + str(i + 1)] for i in xrange(13)] + Bounds(lat_min=-10.0, lat_max=0.0, lon_min=29.0, lon_max=36.5), + Bounds(lat_min=0.0, lat_max=10.0, lon_min=29.0, lon_max=37.5), + Bounds(lat_min=10.0, lat_max=20.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=20.0, lat_max=33.0, lon_min=25.0, lon_max=32.5), + Bounds(lat_min=-19.3, lat_max=-10.2, lon_min=12.0, lon_max=20.0), + Bounds(lat_min=15.0, lat_max=30.0, lon_min=15.0, lon_max=25.0), + Bounds(lat_min=-10.0, lat_max=10.0, lon_min=7.3, lon_max=15.0), + Bounds(lat_min=-10.9, lat_max=10.0, lon_min=5.0, lon_max=7.3), + Bounds(lat_min=33.9, lat_max=40.0, lon_min=6.9, lon_max=15.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=0.0, lon_max=10.0), + Bounds(lat_min=10.0, lat_max=25.0, lon_min=-10.0, lon_max=0.0), + Bounds(lat_min=30.0, lat_max=40.0, lon_min=-15.0, lon_max=0.0), + Bounds(lat_min=33.0, lat_max=40.0, lon_min=25.0, lon_max=35.0)] + +region_list = [['R' + str(i + 1)] for i in xrange(13)] for regions in region_list: firstTime = True - subset_name = regions[0] + "_CRU31" - # labels.append(subset_name) #for legend, uncomment this line + subset_name = regions[0] + '_CRU31' + labels.append(subset_name) subset = dsp.subset(CRU31, list_of_regions[region_counter], subset_name) tSeries = utils.calc_time_series(subset) results.append(tSeries) tSeries = [] firstTime = False for member, each_target_dataset in enumerate(target_datasets): - subset_name = regions[0] + "_" + target_datasets[member].name - # labels.append(subset_name) #for legend, uncomment this line + subset_name = regions[0] + '_' + target_datasets[member].name + labels.append(subset_name) subset = dsp.subset(target_datasets[member], list_of_regions[region_counter], subset_name) @@ -154,8 +177,8 @@ results.append(tSeries) tSeries = [] - plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[ - 0], ptitle=regions[0], fmt='png') + plotter.draw_time_series(np.array(results), CRU31.times, labels, regions[0], + label_month=True, ptitle=regions[0], fmt='png') results = [] tSeries = [] labels = [] From 7d6f5ae8abc9d37cfdace59080da1c54ff7f71e4 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 26 Nov 2017 16:30:10 -0500 Subject: [PATCH 03/19] CLIMATE-465 Add class level documentation and Apache license to examples --- examples/GPM_WRF24_JPDF_comparison.py | 26 ++++++++++ examples/draw_climatology_map_MISR_AOD.py | 26 ++++++++++ examples/esgf_integration_example.py | 15 ++++++ examples/knmi_to_cru31_full_bias.py | 32 ++++++++++++ examples/model_ensemble_to_rcmed.py | 32 ++++++++++++ examples/multi_model_evaluation.py | 36 +++++++++++++ examples/multi_model_taylor_diagram.py | 51 +++++++++++++++++++ examples/podaac_integration_example.py | 21 ++++++++ examples/simple_model_to_model_bias.py | 29 +++++++++++ examples/simple_model_tstd.py | 24 +++++++++ examples/subregions_portrait_diagram.py | 39 ++++++++++++++ examples/subregions_rectangular_boundaries.py | 17 +++++++ examples/subset_TRMM_data_for_NCA_regions.py | 25 ++++++++- examples/taylor_diagram_example.py | 31 +++++++++++ examples/temperature_trends_over_CONUS.py | 25 +++++++++ examples/time_series_with_regions.py | 47 +++++++++++++---- 16 files changed, 466 insertions(+), 10 deletions(-) diff --git a/examples/GPM_WRF24_JPDF_comparison.py b/examples/GPM_WRF24_JPDF_comparison.py index 20b070e3..45eee894 100644 --- a/examples/GPM_WRF24_JPDF_comparison.py +++ b/examples/GPM_WRF24_JPDF_comparison.py @@ -15,6 +15,32 @@ # specific language governing permissions and limitations # under the License. +""" + GPM_WRF24_JPDF_comparison.py + + This is an example of calculating the joint probability distribution + function of rainfall intensity and duration for the Northern Great + Plains using GPM IMERG data for June/01/2015 + + In this example: + + 1. Load the GPM and WRF24 datasets with spatial filter. + 2. Load the spatial filter (Bukovsky region mask). + 3. Spatially subset the WRF data. + 4. Analyze the wet spells. + 5. Calculate the joint PDF(JPDF) of spell_duration and peak_rainfall. + 6. Visualize the JPDF. + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset + 3. dataset_processor + 4. metrics + 5. plotter + +""" + from ocw.dataset import Bounds import ocw.data_source.local as local import ocw.dataset_processor as dsp diff --git a/examples/draw_climatology_map_MISR_AOD.py b/examples/draw_climatology_map_MISR_AOD.py index 979c0f56..c75d3b38 100644 --- a/examples/draw_climatology_map_MISR_AOD.py +++ b/examples/draw_climatology_map_MISR_AOD.py @@ -15,6 +15,32 @@ # specific language governing permissions and limitations # under the License. +""" + draw_climatology_map_MISR_AOD.py + + Use OCW to download an MISR dataset, subset the data, calculate the 16 and 5 year + mean and draw a countour map of the means and the current values. + + In this example: + + 1. Download a dataset from https://dx.doi.org/10.6084/m9.figshare.3753321.v1. + *** Note *** The dataset for this example is not downloaded as part of the example + and must be downloaded to examples directory before running the example. + *** Note *** Depending on the OS on which the example is being run, the download + may remove the - in the filename. Rename the file appropriately. + 2. Subset the data set (lat / lon / start date / end date). + 3. Calculate the 16, 5 and 1 year mean. + 4. Draw a three contour maps using the calculated means and current values. + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset + 3. dataset_processor + 4. plotter + +""" + import ocw.dataset as ds import ocw.data_source.local as local import ocw.dataset_processor as dsp diff --git a/examples/esgf_integration_example.py b/examples/esgf_integration_example.py index 8247435f..e939927d 100644 --- a/examples/esgf_integration_example.py +++ b/examples/esgf_integration_example.py @@ -15,6 +15,21 @@ # specific language governing permissions and limitations # under the License. +""" + esgf_integration_example.py + + Use OCW to download an ESGF dataset into the common format of an OCW dataset object. + + In this example: + + 1. Download an ESGF (https://esgf.llnl.gov/) dataset and load it into a OCW dataset object. + + OCW modules demonstrated: + + 1. datasource/esgf + +""" + import ocw.data_source.esgf as esgf from getpass import getpass import ssl diff --git a/examples/knmi_to_cru31_full_bias.py b/examples/knmi_to_cru31_full_bias.py index 95b64a8f..13b56864 100644 --- a/examples/knmi_to_cru31_full_bias.py +++ b/examples/knmi_to_cru31_full_bias.py @@ -15,6 +15,38 @@ # specific language governing permissions and limitations # under the License. +""" + knmi_to_cru31_full_bias.py + + Use OCW to download, evaluate and plot (contour map) a dataset + against a reference dataset and OCW standard metrics (bias). + + In this example: + + 1. Download a netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc + 2. Load the local files into OCW dataset objects. + 3. Interface with the Regional Climate Model Evalutaion Database (https://rcmes.jpl.nasa.gov/) + to load the CRU3.1 Daily-Max Temp dataset (https://rcmes.jpl.nasa.gov/content/cru31). + 4. Process each dataset to the same same shape. + 5. Temporally rebin the datasets to a single timestep. + 6. Spatially regrid the dataset objects to a 1/2 degree grid. + 7. Build a bias metric to use for evaluation use the standard OCW metric set. + 8. Create an evaluation object using the datasets and metric. + 9. Plot the results of the evaluation (contour map). + + OCW modules demonstrated: + + 1. datasource/local + 2. datasource/rcmed + 3. dataset + 4. dataset_processor + 5. evaluation + 6. metrics + 7. plotter + +""" + import datetime import urllib from os import path diff --git a/examples/model_ensemble_to_rcmed.py b/examples/model_ensemble_to_rcmed.py index e8e3dbe0..185aa2ec 100644 --- a/examples/model_ensemble_to_rcmed.py +++ b/examples/model_ensemble_to_rcmed.py @@ -15,6 +15,38 @@ # specific language governing permissions and limitations # under the License. +""" + model_ensemble_to_rcmed.py + + Use OCW to download, evaluate and plot (contour map) two datasets + against a reference dataset and OCW standard metrics (bias). + + In this example: + + 1. Download two netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc + AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc + 2. Load the local files into OCW dataset objects. + 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/) + to load the CRU3.1 Daily-Max Temp dataset (https://rcmes.jpl.nasa.gov/content/cru31). + 4. Temporally rebin the datasets to annual. + 5. Spatially regrid the dataset objects to a 1/2 degree grid. + 6. Build a bias metric to use for evaluation use the standard OCW metric set. + 7. Create an evaluation object using the datasets and metric. + 8. Plot the results of the evaluation (contour map). + + OCW modules demonstrated: + + 1. datasource/local + 2. datasource/rcmed + 3. dataset + 4. dataset_processor + 5. metrics + 6. evaluation + 7. plotter + +""" + import datetime import math import urllib diff --git a/examples/multi_model_evaluation.py b/examples/multi_model_evaluation.py index 7756cc9f..ba6ad563 100644 --- a/examples/multi_model_evaluation.py +++ b/examples/multi_model_evaluation.py @@ -15,6 +15,42 @@ # specific language governing permissions and limitations # under the License. +""" + multi_model_evaluation.py + + Use OCW to download, evaluate and plot (contour map) two datasets + against a reference dataset and OCW standard metrics. + + In this example: + + 1. Download two netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc + AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc + 2. Load the local files into OCW dataset objects. + 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/) + to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31). + 4. Process each dataset to the same same shape. + a.) Restrict the datasets re: geographic and time boundaries. + b.) Convert the dataset water flux to common units. + c.) Normalize the dataset date / times to monthly. + d.) Spatially regrid each dataset. + 5. Calculate the mean annual value for each dataset. + 6. Evaluate the datasets against the reference data set and OCW standard metric and plot + a contour map. + + OCW modules demonstrated: + + 1. datasource/local + 2. datasource/rcmed + 3. dataset + 4. dataset_processor + 5. metrics + 6. evaluation + 7. plotter + 8. utils + +""" + import datetime import urllib import numpy as np diff --git a/examples/multi_model_taylor_diagram.py b/examples/multi_model_taylor_diagram.py index 31d4020c..8edee7b4 100644 --- a/examples/multi_model_taylor_diagram.py +++ b/examples/multi_model_taylor_diagram.py @@ -1,3 +1,54 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" + multi_model_taylor_diagram.py + + Use OCW to download, normalize and evaluate three datasets + against a reference dataset and OCW standard metrics + drawing a Taylor diagram of the results of the evaluation. + + In this example: + + 1. Download three netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc + AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc + AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc + 2. Load the local files into OCW dataset objects. + 3. Process each dataset to the same same shape. + a.) Restrict the datasets re: geographic and time boundaries. + b.) Temporally rebin the data (monthly). + c.) Spatially regrid each dataset. + 4. Extract the metrics used for the evaluation and evaluate + against a reference dataset and standard OCW metrics. + 5. Draw evaluation results Taylor diagram. + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset + 3. dataset_processor + 4. evaluation + 5. metrics + 6. plotter + 7. utils + +""" + # Apache OCW lib immports from ocw.dataset import Dataset, Bounds import ocw.data_source.local as local diff --git a/examples/podaac_integration_example.py b/examples/podaac_integration_example.py index 61663d71..be85884e 100644 --- a/examples/podaac_integration_example.py +++ b/examples/podaac_integration_example.py @@ -15,6 +15,27 @@ # specific language governing permissions and limitations # under the License. +""" + podaac_integration_example.py + + Use OCW to download a PODACC dataset, evaluate and plot (contour map). + + In this example: + + 1. Download a remote PO.DAAC (https://podaac.jpl.nasa.gov/) dataset + and read it into an OCW dataset object. + 2. Create a temporal STD metric using one of the OCW standard metrics. + 3. Evaluate the dataset against the metric and plot a contour map. + + OCW modules demonstrated: + + 1. datasource/podaac_datasource + 2. metrics + 3. evaluation + 4. plotter + +""" + import ocw.data_source.podaac_datasource as podaac import ocw.evaluation as evaluation import ocw.metrics as metrics diff --git a/examples/simple_model_to_model_bias.py b/examples/simple_model_to_model_bias.py index 8e834b67..ad1f29b0 100644 --- a/examples/simple_model_to_model_bias.py +++ b/examples/simple_model_to_model_bias.py @@ -15,6 +15,35 @@ # specific language governing permissions and limitations # under the License. +""" + simple_model_to_model_bias.py + + Use OCW to download, normalize and evaluate two datasets + against an OCW metric (bias) and plot the results of the + evaluation (contour map). + + In this example: + + 1. Download two netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc + AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc + 2. Load the local files into OCW dataset objects. + 3. Temporally rebin the data anually. + 4. Spatially regrid the dataset objects to a 1 degree grid. + 5. Build a bias metric to use for evaluation use the standard OCW metric set. + 6. Create an evaluation object using the datasets and metric. + 7. Plot the results of the evaluation (contour map). + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset_processor + 3. evaluation + 4. metrics + 5. plotter + +""" + import datetime from os import path import sys diff --git a/examples/simple_model_tstd.py b/examples/simple_model_tstd.py index fb3ce48d..6412493f 100644 --- a/examples/simple_model_tstd.py +++ b/examples/simple_model_tstd.py @@ -15,6 +15,30 @@ # specific language governing permissions and limitations # under the License. +""" + simple_model_tstd.py + + Use OCW to download, evaluate against a OCW standard + metrics (temporal STD) and plot the results (contour map). + + In this example: + + 1. Download a netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc + 2. Load the local file into OCW dataset objects. + 3. Use the standard OCW metrics to build a metric against which to evaluation (temporal STD). + 4. Create an evaluation object of the dataset vs. the metric. + 5. Plot the results of the evaluation using a contour map. + + OCW modules demonstrated: + + 1. datasource/local + 2. metrics + 3. evaluation + 4. plotter + +""" + from os import path import urllib diff --git a/examples/subregions_portrait_diagram.py b/examples/subregions_portrait_diagram.py index a6d3c145..3e6785c8 100644 --- a/examples/subregions_portrait_diagram.py +++ b/examples/subregions_portrait_diagram.py @@ -15,6 +15,45 @@ # specific language governing permissions and limitations # under the License. +""" + subregions_portrait_diagram.py + + Use OCW to download, normalize, evaluate and plot (portrait diagram) + three local datasets against a reference dataset. + + In this example: + + 1. Download three netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc + AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc + AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc + 2. Load the local files into OCW dataset objects. + 3. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/) + to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31). + 4. Process each dataset to the same same shape. + a.) Restrict the datasets re: geographic and time boundaries. + b.) Convert the dataset water flux to common units. + c.) Normalize the dataset date / times to monthly. + d.) Spatially regrid each dataset. + 5. Calculate the mean annual value for each dataset. + 6. Separate each dataset into 13 subregions. + 7. Extract the metrics used for the evaluation and evaluate + against a reference dataset. + 8. Create a portrait diagram of the results of the evaluation. + + OCW modules demonstrated: + + 1. datasource/local + 2. datasource/rcmed + 3. dataset + 4. dataset_processor + 5. metrics + 6. evaluation + 7. plotter + 8. utils + +""" + from os import path import urllib import ssl diff --git a/examples/subregions_rectangular_boundaries.py b/examples/subregions_rectangular_boundaries.py index 37eb884b..cf396bce 100644 --- a/examples/subregions_rectangular_boundaries.py +++ b/examples/subregions_rectangular_boundaries.py @@ -15,6 +15,23 @@ # specific language governing permissions and limitations # under the License. +""" + subregions_rectangular_boundaries.py + + Use OCW to define a set a sub regions and draw the sub regions. + + In this example: + + 1. Subset the data set (lat / lon / start date / end date). + 2. Draw each sub region. + + OCW modules demonstrated: + + 1. dataset (Bounds) + 2. plotter + +""" + import datetime import numpy as np diff --git a/examples/subset_TRMM_data_for_NCA_regions.py b/examples/subset_TRMM_data_for_NCA_regions.py index 90b752be..4ae4300d 100644 --- a/examples/subset_TRMM_data_for_NCA_regions.py +++ b/examples/subset_TRMM_data_for_NCA_regions.py @@ -15,9 +15,32 @@ # specific language governing permissions and limitations # under the License. +""" + subset_TRMM_data_for_NCA_regions.py + + Use OCW to subset TRMM data (https://pmm.nasa.gov/trmm) for NCA regions and draw + a contour map for the U.S. (CA', 'NV', 'UT', 'AZ', 'NM', 'CO'), Cuba and + the Bahamas (https://scenarios.globalchange.gov/regions_nca4). + + In this example: + + 1. Interface with the Regional Climate Model Evaluation Database (https://rcmes.jpl.nasa.gov/) + to load the TRMM dataset. + 2. Define the bounds for the U.S. (CA', 'NV', 'UT', 'AZ', 'NM', 'CO'), Cuba and the Bahamas and + the start date / end date. + 3. Create a contour map of the TRMM data for the U.S., Cuba, and Bahamas. + + OCW modules demonstrated: + + 1. datasource/rcmed + 2. dataset (Bounds) + 3. dataset_processor + 4. plotter + +""" + # Apache OCW lib immports import ocw.dataset_processor as dsp -import ocw.utils as utils from ocw.dataset import Bounds import ocw.data_source.rcmed as rcmed import ocw.plotter as plotter diff --git a/examples/taylor_diagram_example.py b/examples/taylor_diagram_example.py index 8d5bbf0d..8f683c10 100644 --- a/examples/taylor_diagram_example.py +++ b/examples/taylor_diagram_example.py @@ -15,6 +15,37 @@ # specific language governing permissions and limitations # under the License. +""" + taylor_diagram_example.py + + Use OCW to download, normalize and evaluate two datasets + drawing a Taylor diagram of the results of the evaluation. + + In this example: + + 1. Download two netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_tasmax.nc + AFRICA_UC-WRF311_CTL_ERAINT_MM_50km-rg_1989-2008_tasmax.nc + 2. Load the local files into OCW dataset objects. + 3. Process each dataset to the same same shape. + a.) Restrict the datasets re: geographic and time boundaries. + b.) Temporally rebin the data (monthly). + c.) Spatially regrid each dataset. + 4. Extract the metrics used for the evaluation and evaluate + against a reference dataset. + 5. Draw evaluation results Taylor diagram. + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset + 3. dataset_processor + 4. evaluation + 5. metrics + 6. plotter + +""" + import datetime import sys from os import path diff --git a/examples/temperature_trends_over_CONUS.py b/examples/temperature_trends_over_CONUS.py index 54f58432..028c8a26 100644 --- a/examples/temperature_trends_over_CONUS.py +++ b/examples/temperature_trends_over_CONUS.py @@ -15,6 +15,31 @@ # specific language governing permissions and limitations # under the License. +""" + temperature_trends_over_CONUS.py + + Use OCW to plot the temperature trends over CONUS using the nClimDiv reference data. + + In this example: + + 1. Load the local file nClimDiv/nClimDiv_tave_1895-2005.nc into OCW Dataset Objects. + *** Note *** It is assume this file exists locally in a subdirectory nClimDiv located + in the same directory as the example. + 2. Load the CMIP5 simulations into a list of OCW Dataset Objects. + 3. Spatially subset the observed dataset into state and regional boundaries. + 4. Temporally subset the observed and model datasets. + 5. Calculate and plot the temperature trend for each region. + + OCW modules demonstrated: + + 1. datasource/local + 2. dataset + 3. dataset_processor + 4. plotter + 5. utlis + +""" + import numpy as np import datetime diff --git a/examples/time_series_with_regions.py b/examples/time_series_with_regions.py index 0799cf0c..d92599b6 100644 --- a/examples/time_series_with_regions.py +++ b/examples/time_series_with_regions.py @@ -15,10 +15,38 @@ # specific language governing permissions and limitations # under the License. -''' - Download three netCDF files, process the files to be the same shape, - divide the data into subregions and plot a monthly time series for each sub region. -''' +""" + time_series_with_regions.py + + Use OCW to download and plot (time series) three local datasets against a reference dataset. + + In this example: + + 1. Download three netCDF files from a local site. + AFRICA_KNMI-RACMO2.2b_CTL_ERAINT_MM_50km_1989-2008_pr.nc + AFRICA_ICTP-REGCM3_CTL_ERAINT_MM_50km-rg_1989-2008_pr.nc + AFRICA_UCT-PRECIS_CTL_ERAINT_MM_50km_1989-2008_pr.nc + 2. Load the local files into OCW dataset objects. + 3. Interface with the Regional Climate Model Evalutaion Database (https://rcmes.jpl.nasa.gov/) + to load the CRU3.1 Daily Precipitation dataset (https://rcmes.jpl.nasa.gov/content/cru31). + 4. Process each dataset to the same same shape. + a.) Restrict the datasets re: geographic and time boundaries. + b.) Convert the dataset water flux to common units. + c.) Normalize the dataset date / times to monthly. + d.) Spatially regrid each dataset. + 5. Calculate the mean monthly value for each dataset. + 6. Separate each dataset into 13 subregions. + 7. Create a time series for each dataset in each subregion. + + OCW modules demonstrated: + + 1. datasource/local + 2. datasource/rcmed + 3. dataset + 4. dataset_processor + 5. plotter + +""" import sys import datetime @@ -118,8 +146,8 @@ CRU31 = dsp.spatial_regrid(CRU31, new_lats, new_lons) for member, each_target_dataset in enumerate(target_datasets): - target_datasets[member] = dsp.spatial_regrid( - target_datasets[member], new_lats, new_lons) + target_datasets[member] =\ + dsp.spatial_regrid(target_datasets[member], new_lats, new_lons) # Find climatology monthly for obs and models. CRU31.values, CRU31.times = utils.calc_climatology_monthly(CRU31) @@ -127,11 +155,12 @@ # the xticks elegantly when the first date is the epoch and tries to determine # the start of the xticks based on a value < 1. for index, item in enumerate(CRU31.times): - CRU31.times[index] = datetime.date(item.year, item.month, monthrange(item.year, item.month)[1]) + CRU31.times[index] = \ + datetime.date(item.year, item.month, monthrange(item.year, item.month)[1]) for member, each_target_dataset in enumerate(target_datasets): - target_datasets[member].values, target_datasets[ - member].times = utils.calc_climatology_monthly(target_datasets[member]) + target_datasets[member].values, target_datasets[member].times = \ + utils.calc_climatology_monthly(target_datasets[member]) # make the model ensemble target_datasets_ensemble = dsp.ensemble(target_datasets) From 41412caea5c9330ccbec28f4130ced89361789f8 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Mon, 27 Nov 2017 19:21:14 -0500 Subject: [PATCH 04/19] CLIMATE-797 Fix attribute error when dataset has no mask attribute --- ocw/dataset_processor.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py index 160ffb72..cf2e90e2 100755 --- a/ocw/dataset_processor.py +++ b/ocw/dataset_processor.py @@ -867,9 +867,13 @@ def mask_missing_data(dataset_array): mask_array = np.zeros(dataset_array[0].values.shape) for dataset in dataset_array: - index = np.where(dataset.values.mask == True) - if index[0].size > 0: - mask_array[index] = 1 + # CLIMATE-797 - Not every array passed in will be a masked array. + # For those that are, action based on the mask passed in. + # For those that are not, take no action (else AttributeError). + if hasattr(dataset.values, 'mask'): + index = np.where(dataset.values.mask == True) + if index[0].size > 0: + mask_array[index] = 1 masked_array = [] for dataset in dataset_array: dataset.values = ma.array(dataset.values, mask=mask_array) From 21ec176dfa5f3320a465ad67af4af43b60a4fc85 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Fri, 1 Dec 2017 05:22:15 -0500 Subject: [PATCH 05/19] CLIMATE-936 - Added instructions on where to find input files for the example. --- examples/temperature_trends_over_CONUS.py | 46 ++++++++++++----------- 1 file changed, 25 insertions(+), 21 deletions(-) diff --git a/examples/temperature_trends_over_CONUS.py b/examples/temperature_trends_over_CONUS.py index 028c8a26..8510294d 100644 --- a/examples/temperature_trends_over_CONUS.py +++ b/examples/temperature_trends_over_CONUS.py @@ -24,6 +24,10 @@ 1. Load the local file nClimDiv/nClimDiv_tave_1895-2005.nc into OCW Dataset Objects. *** Note *** It is assume this file exists locally in a subdirectory nClimDiv located + *** Note *** The files can be downloaded from : + https://rcmes.jpl.nasa.gov/RCMES_Turtorial_data/NCA-CMIP_examples.tar.gz + *** Note *** Additional information about the file content can be found here: + https://rcmes.jpl.nasa.gov/content/nca-cmip-analysis-using-rcmes in the same directory as the example. 2. Load the CMIP5 simulations into a list of OCW Dataset Objects. 3. Spatially subset the observed dataset into state and regional boundaries. @@ -51,8 +55,8 @@ import ocw.plotter as plotter import ocw.utils as utils -# nClimDiv observation file -file_obs = 'nClimDiv/nClimDiv_tave_1895-2005.nc' +# nClimGrid observation file +file_obs = 'nClimGrid/nClimGrid_tave_1895-2005.nc' # CMIP5 simulations model_file_path = 'CMIP5_historical' @@ -65,7 +69,7 @@ start_date = datetime.datetime(1979, 12, 1) end_date = datetime.datetime(2005, 8, 31) -nyear = 26 +nyear = 26 month_start = 6 # June month_end = 8 # August @@ -85,39 +89,39 @@ n_region = 7 # number of regions # CONUS regional boundaries -NW_bounds = Bounds(boundary_type='us_states', +NW_bounds = Bounds(boundary_type='us_states', us_states=regions[0]) -SW_bounds = Bounds(boundary_type='us_states', +SW_bounds = Bounds(boundary_type='us_states', us_states=regions[1]) -NGP_bounds = Bounds(boundary_type='us_states', +NGP_bounds = Bounds(boundary_type='us_states', us_states=regions[2]) -SGP_bounds = Bounds(boundary_type='us_states', +SGP_bounds = Bounds(boundary_type='us_states', us_states=regions[3]) -MW_bounds = Bounds(boundary_type='us_states', +MW_bounds = Bounds(boundary_type='us_states', us_states=regions[4]) -NE_bounds = Bounds(boundary_type='us_states', +NE_bounds = Bounds(boundary_type='us_states', us_states=regions[5]) -SE_bounds = Bounds(boundary_type='us_states', +SE_bounds = Bounds(boundary_type='us_states', us_states=regions[6]) regional_bounds = [NW_bounds, SW_bounds, NGP_bounds, SGP_bounds, MW_bounds, NE_bounds, SE_bounds] -""" Load nClimDiv file into OCW Dataset """ -obs_dataset = local.load_file(file_obs, variable_name='tave') +""" Load nClimGrid file into OCW Dataset """ +obs_dataset = local.load_file(file_obs, variable_name='tave') """ Load CMIP5 simulations into a list of OCW Datasets""" model_dataset = local.load_multiple_files(file_path=model_file_path, variable_name='tas', - dataset_name=dataset_name, variable_unit='K') + dataset_name=dataset_name, variable_unit='K') """ Temporal subset of obs_dataset """ -obs_dataset_subset = dsp.temporal_slice(obs_dataset, +obs_dataset_subset = dsp.temporal_slice(obs_dataset, start_time=start_date, end_time=end_date) obs_dataset_season = dsp.temporal_subset(obs_dataset_subset, month_start, month_end, average_each_year=True) """ Temporal subset of model_dataset """ -model_dataset_subset = [dsp.temporal_slice(dataset,start_time=start_date, end_time=end_date) +model_dataset_subset = [dsp.temporal_slice(dataset,start_time=start_date, end_time=end_date) for dataset in model_dataset] model_dataset_season = [dsp.temporal_subset(dataset, month_start, month_end, average_each_year=True) for dataset in model_dataset_subset] @@ -129,7 +133,7 @@ for iregion in np.arange(n_region): obs_timeseries[:, iregion] = utils.calc_time_series( - dsp.subset(obs_dataset_season, regional_bounds[iregion])) + dsp.subset(obs_dataset_season, regional_bounds[iregion])) for imodel in np.arange(nmodel): model_timeseries[imodel, :, iregion] = utils.calc_time_series( dsp.subset(model_dataset_season[imodel], regional_bounds[iregion])) @@ -150,20 +154,20 @@ regional_trends_model[imodel, iregion], regional_trends_model_error[iregion] = utils.calculate_temporal_trend_of_time_series( year, model_timeseries[imodel, :, iregion]) regional_trends_ens[iregion], regional_trends_ens_error[iregion] = utils.calculate_ensemble_temporal_trends( - model_timeseries[:, :, iregion]) + model_timeseries[:, :, iregion]) """ Generate plots """ -plotter.fill_US_states_with_color(regions, 'nClimDiv_tave_trends_JJA_1980-2005', +plotter.fill_US_states_with_color(regions, 'nClimGrid_tave_trends_JJA_1980-2005', values=regional_trends_obs, region_names=['%.3f' %(10*i) for i in regional_trends_obs]) -plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_JJA_1980-2005', +plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_JJA_1980-2005', values=regional_trends_ens, region_names=['%.3f' %(10*i) for i in regional_trends_ens]) bias_ens = regional_trends_ens - regional_trends_obs -plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_bias_from_nClimDiv_JJA_1980-2005', +plotter.fill_US_states_with_color(regions, 'CMIP5_ENS_tave_trends_bias_from_nClimGrid_JJA_1980-2005', values=bias_ens, region_names=['%.3f' %(10*i) for i in bias_ens]) @@ -171,7 +175,7 @@ ens_data = np.vstack([regional_trends_ens, regional_trends_ens_error]) plotter.draw_plot_to_compare_trends(obs_data, ens_data, regional_trends_model, - fname='Trends_comparison_btn_CMIP5_and_nClimDiv', + fname='Trends_comparison_btn_CMIP5_and_nClimGrid', data_labels=['NW','SW','NGP','SGP','MW','NE','SE'], xlabel='NCA regions', ylabel='tas trend [K/year]') From e8a2bdc04153597d490b19a4ccdca47b2dccfabd Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Wed, 3 Jan 2018 20:32:11 -0500 Subject: [PATCH 06/19] CLIMATE-940 File List Does Not Display On File Selection Due To Property Of Undefined Error --- .../directives/predictivefilebrowserinput.js | 25 ++++++++++++------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js b/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js index 7142c15f..6fe21183 100644 --- a/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js +++ b/ocw-ui/frontend/app/scripts/directives/predictivefilebrowserinput.js @@ -29,9 +29,16 @@ angular.module('ocwUiApp') .directive('predictiveFileBrowserInput', function() { var link = function($scope, $elem, $attrs) { $scope.autocomplete = []; - + // Set id to use this directive correctly in multiple places - $scope.id = 'autoCompletePath'+ $elem.context.id + /* + This had been written as $elem.context.id, but $elem is an object (jQuery.fn.init) + and the object did not have a context or id attribute. This was + throwing an error to the console and the list of files was not being displayed. + Replaced with $attrs.id. + */ + $scope.id = 'autoCompletePath' + $attrs.id; + /* * We need a place to dump our auto-completion options */ @@ -43,8 +50,8 @@ angular.module('ocwUiApp') var val = $(e.target).text(); $($elem).val(val); // Need to trigger the input box's "input" event so Angular updates the model! - $elem.trigger('input'); - + $elem.trigger('input'); + // If the user selected a directory, find more results.. if (val[val.length - 1] == '/') { $scope.fetchFiles($($elem).val()); @@ -152,7 +159,7 @@ angular.module('ocwUiApp') $scope.possibleCompletes = $scope.autocomplete; }; - /* + /* * Handle presses. * * Attempt to auto-complete options when the user presses . @@ -220,7 +227,7 @@ angular.module('ocwUiApp') } }; - /* + /* * Handle all other key presses in the input box * * Filter the auto-complete options as the user types to ensure that only options @@ -236,7 +243,7 @@ angular.module('ocwUiApp') $scope.updateAutoComplete(); }; - /* + /* * When a path is auto-completed with we need to check to see if it points * to a directory. If it does, we still need to fetch results! */ @@ -247,7 +254,7 @@ angular.module('ocwUiApp') } }; - /* + /* * Calculate the greatest common prefix of the passed options. * * Params: @@ -275,7 +282,7 @@ angular.module('ocwUiApp') return longestString.slice(0, index - 1); }; - /* + /* * Filter the auto-complete options based on the current input. */ $scope.filterResults = function() { From 37d1f30f6644f1b23654b452030d2200b2909e66 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Wed, 3 Jan 2018 20:56:57 -0500 Subject: [PATCH 07/19] CLIMATE-762 --- ocw-ui/backend/run_webservices.py | 37 +++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/ocw-ui/backend/run_webservices.py b/ocw-ui/backend/run_webservices.py index dae25677..d3297364 100644 --- a/ocw-ui/backend/run_webservices.py +++ b/ocw-ui/backend/run_webservices.py @@ -16,11 +16,44 @@ # ''' OCW UI Backend web services initialization. ''' +import os +import sys + from bottle import Bottle, response, static_file -from local_file_metadata_extractors import lfme_app + from directory_helpers import dir_app -from rcmed_helpers import rcmed_app +from local_file_metadata_extractors import lfme_app from processing import processing_app +from rcmed_helpers import rcmed_app + + +def link_to_frontend(): + """ + The backend expects a link to a directory called frontend at the same level. + Attempt to create one if it does not exist. + """ + backend_path = os.path.dirname(os.path.realpath(__file__)) + link_path = backend_path + '/frontend' + frontend_path = backend_path.replace('backend', 'frontend') + + if not os.path.isdir(link_path): + print("Expected to find a linked directory to the frontend.") + print("Checking default location %s." % frontend_path) + if os.path.isdir(frontend_path): + print("Attempting to create linked directory to %s." % frontend_path) + os.symlink(frontend_path, link_path) + else: + print("Frontend directory not found in %s." % frontend_path) + print("Either install the frontend to the default directory " + "or manually create a link in the backend directory called " + "'frontend' to the directory where the front end is installed.") + return 1 + + return 0 + + +if link_to_frontend(): + sys.exit(1) app = Bottle() app.mount('/lfme/', lfme_app) From 96ac07df4cec047adccf7a31e0fce82b634ce7e1 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Thu, 4 Jan 2018 18:22:02 -0500 Subject: [PATCH 08/19] CLIMATE-230 Line colors in time series cycles through too few colors --- ocw/plotter.py | 36 +++++++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/ocw/plotter.py b/ocw/plotter.py index 7f9b0920..f0af03fe 100755 --- a/ocw/plotter.py +++ b/ocw/plotter.py @@ -367,9 +367,24 @@ def draw_subregions(subregions, lats, lons, fname, fmt='png', ptitle='', fig.clf() +def _get_colors(num_colors): + """ + matplotlib will recycle colors after a certain number. This can make + line type charts confusing as colors will be reused. This function + provides a distribution of colors across the default color map + to better approximate uniqueness. + + :param num_colors: The number of unique colors to generate. + :return: A color map with num_colors. + """ + cmap = plt.get_cmap() + return [cmap(1. * i / num_colors) for i in range(num_colors)] + + def draw_time_series(results, times, labels, fname, fmt='png', gridshape=(1, 1), xlabel='', ylabel='', ptitle='', subtitles=None, - label_month=False, yscale='linear', aspect=None): + label_month=False, yscale='linear', aspect=None, + cycle_colors=True, cmap=None): ''' Draw a time series plot. :param results: 3D array of time series data. @@ -415,7 +430,22 @@ def draw_time_series(results, times, labels, fname, fmt='png', gridshape=(1, 1), :param aspect: (Optional) approximate aspect ratio of each subplot (width / height). Default is 8.5 / 5.5 :type aspect: :class:`float` + + :param cycle_colors: (Optional) flag to toggle whether to allow matlibplot + to re-use colors when plotting or force an evenly distributed range. + :type cycle_colors: :class:`bool` + + :param cmap: (Optional) string or :class:`matplotlib.colors.LinearSegmentedColormap` + instance denoting the colormap. This must be able to be recognized by + `Matplotlib's get_cmap function `_. + Maps like rainbow and spectral with wide spectrum of colors are nice choices when used with + the cycle_colors option. tab20, tab20b, and tab20c are good if the plot has less than 20 datasets. + :type cmap: :mod:`string` or :class:`matplotlib.colors.LinearSegmentedColormap` + ''' + if cmap is not None: + set_cmap(cmap) + # Handle the single plot case. if results.ndim == 2: results = results.reshape(1, *results.shape) @@ -448,6 +478,10 @@ def draw_time_series(results, times, labels, fname, fmt='png', gridshape=(1, 1), # Make the plots for i, ax in enumerate(grid): data = results[i] + + if not cycle_colors: + ax.set_prop_cycle('color', _get_colors(data.shape[0])) + if label_month: xfmt = mpl.dates.DateFormatter('%b') xloc = mpl.dates.MonthLocator() From 5220300be17b5a78139c6aba6eb4176bf6a6738f Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Thu, 4 Jan 2018 20:55:32 -0500 Subject: [PATCH 09/19] CLIMATE-941 Value Error When Bounds Object Is Created Without Named Inputs --- ocw-ui/backend/processing.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ocw-ui/backend/processing.py b/ocw-ui/backend/processing.py index 13ebe550..07375d82 100644 --- a/ocw-ui/backend/processing.py +++ b/ocw-ui/backend/processing.py @@ -203,12 +203,12 @@ def run_evaluation(): day_offset = end.day - 1 end -= timedelta(days=day_offset) - subset = Bounds(eval_bounds['lat_min'], - eval_bounds['lat_max'], - eval_bounds['lon_min'], - eval_bounds['lon_max'], - start, - end) + subset = Bounds(lat_min=eval_bounds['lat_min'], + lat_max=eval_bounds['lat_max'], + lon_min=eval_bounds['lon_min'], + lon_max=eval_bounds['lon_max'], + start=start, + end=end) ref_dataset = dsp.safe_subset(ref_dataset, subset) target_datasets = [dsp.safe_subset(ds, subset) From bc1cd24fe933f9def21228d8b3f4fbc3a8376b6a Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Thu, 4 Jan 2018 21:44:20 -0500 Subject: [PATCH 10/19] CLIMATE-917 Bocumentation build error with Python3 --- docs/source/conf.py | 4 ++++ ocw-ui/backend/rcmed_helpers.py | 4 ++-- ocw/dataset.py | 3 ++- ocw/dataset_loader.py | 17 ++++++++--------- ocw/utils.py | 16 ++++++++-------- 5 files changed, 24 insertions(+), 20 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f9eaafcb..b0b511c7 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -33,11 +33,15 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. + +# Note that 'sphinxcontrib.autohttp.bottle' is currently broken in Sphinx > 1.56 +# Remove from the extension list if documentation fails on Sphinx hard failure. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', 'sphinxcontrib.httpdomain', + 'sphinxcontrib.autohttp.bottle' ] # Add any paths that contain templates here, relative to this directory. diff --git a/ocw-ui/backend/rcmed_helpers.py b/ocw-ui/backend/rcmed_helpers.py index 32e1af8a..0c584aa6 100644 --- a/ocw-ui/backend/rcmed_helpers.py +++ b/ocw-ui/backend/rcmed_helpers.py @@ -55,7 +55,7 @@ def get_dataset_parameters(): **Example Call Format** - .. sourcecode:: javascript + .. sourcecode:: text /parameters/?dataset= @@ -121,7 +121,7 @@ def get_parameters_bounds(): **Example Call Format** - .. sourcecode:: javascript + .. sourcecode:: text /parameters/bounds/ diff --git a/ocw/dataset.py b/ocw/dataset.py index 196913a1..0a0e1a6e 100644 --- a/ocw/dataset.py +++ b/ocw/dataset.py @@ -117,6 +117,7 @@ def spatial_resolution(self): If self.lats and self.lons are from curvilinear coordinates, the output resolutions are approximate values. + :returns: The Dataset's latitudinal and longitudinal spatial resolution as a tuple of the form (lat_resolution, lon_resolution). :rtype: (:class:`float`, :class:`float`) @@ -264,7 +265,7 @@ def __init__(self, boundary_type='rectangular', start=None, end=None): '''Default Bounds constructor :param boundary_type: The type of spatial subset boundary. - :type boundary_type: :mod:`string' + :type boundary_type: :mod:`string` :param lat_min: The minimum latitude bound. diff --git a/ocw/dataset_loader.py b/ocw/dataset_loader.py index 4b2a9257..f84bdc40 100644 --- a/ocw/dataset_loader.py +++ b/ocw/dataset_loader.py @@ -33,33 +33,31 @@ def __init__(self, *loader_opts): Each keyword argument can be information for a dataset in dictionary form. For example: - `` + >>> loader_opt1 = {'loader_name': 'rcmed', 'name': 'cru', 'dataset_id': 10, 'parameter_id': 34} >>> loader_opt2 = {'path': './data/TRMM_v7_3B43_1980-2010.nc, 'variable': 'pcp'} >>> loader = DatasetLoader(loader_opt1, loader_opt2) - `` Or more conveniently if the loader configuration is defined in a yaml file named config_file (see RCMES examples): - `` + >>> import yaml >>> config = yaml.load(open(config_file)) >>> obs_loader_config = config['datasets']['reference'] >>> loader = DatasetLoader(*obs_loader_config) - `` As shown in the first example, the dictionary for each argument should contain a loader name and parameters specific to the particular loader. Once the configuration is entered, the datasets may be loaded using: - `` + >>> loader.load_datasets() >>> obs_datasets = loader.datasets - `` Additionally, each dataset must have a ``loader_name`` keyword. This may be one of the following: + * ``'local'`` - One or multiple dataset files in a local directory * ``'local_split'`` - A single dataset split accross multiple files in a local directory @@ -74,6 +72,7 @@ def __init__(self, *loader_opts): Users who wish to load datasets from loaders not described above may define their own custom dataset loader function and incorporate it as follows: + >>> loader.add_source_loader('my_loader_name', my_loader_func) :param loader_opts: Dictionaries containing the each dataset loader @@ -84,7 +83,7 @@ def __init__(self, *loader_opts): :type loader_opts: :class:`dict` :raises KeyError: If an invalid argument is passed to a data source - loader function. + loader function. ''' # dataset loader config self.set_loader_opts(*loader_opts) @@ -115,8 +114,8 @@ def add_source_loader(self, loader_name, loader_func): :type loader_name: :mod:`string` :param loader_func: Reference to a custom defined function. This should - return an OCW Dataset object, and have an origin which satisfies - origin['source'] == loader_name. + return an OCW Dataset object, and have an origin which satisfies + origin['source'] == loader_name. :type loader_func: :class:`callable` ''' self._source_loaders[loader_name] = loader_func diff --git a/ocw/utils.py b/ocw/utils.py index c2b62cfe..8f2c8c14 100755 --- a/ocw/utils.py +++ b/ocw/utils.py @@ -402,7 +402,7 @@ def trim_dataset(dataset): ''' Trim datasets such that first and last year of data have all 12 months :param dataset: Dataset object - :type dataset: :class:`dataset.Dataset + :type dataset: :class:`dataset.Dataset` :returns: Slice index for trimmed dataset ''' @@ -653,7 +653,7 @@ def _force_unicode(s, encoding='utf-8'): def calculate_temporal_trends(dataset): ''' Calculate temporal trends in dataset.values :param dataset: The dataset from which time values should be extracted. - :type dataset: :class:`dataset.Dataset' + :type dataset: :class:`dataset.Dataset` :returns: Arrays of the temporal trend and standard error :rtype: :class:`numpy.ma.core.MaskedArray` @@ -675,13 +675,13 @@ def calculate_temporal_trends(dataset): def calculate_ensemble_temporal_trends(timeseries_array, number_of_samples=1000): ''' Calculate temporal trends in an ensemble of time series :param timeseries_array: Two dimensional array. 1st index: model, 2nd index: time. - :type timeseries_array: :class:`numpy.ndarray' + :type timeseries_array: :class:`numpy.ndarray` :param sampling: A list whose elements are one-dimensional numpy arrays - :type timeseries_array: :class:`list' + :type timeseries_array: :class:`list` :returns: temporal trend and estimated error from bootstrapping - :rtype: :float:`float','float' + :rtype: :class:`float`, :class:`float` ''' nmodels, nt = timeseries_array.shape @@ -701,13 +701,13 @@ def calculate_ensemble_temporal_trends(timeseries_array, number_of_samples=1000) def calculate_temporal_trend_of_time_series(x,y): ''' Calculate least-square trends (a) in y = ax+b and a's standard error :param x: time series - :type x: :class:`numpy.ndarray' + :type x: :class:`numpy.ndarray` :param x: time series - :type x: :class:`numpy.ndarray' + :type x: :class:`numpy.ndarray` :returns: temporal trend and standard error - :rtype: :float:`float','float' + :rtype: :class:`float`, :class:`float` ''' slope, intercept, r_value, p_value, std_err = stats.linregress(x,y) return slope, std_err From 6b30af86c198381bb8b421adef7c5866fd56dfd2 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Thu, 4 Jan 2018 21:52:36 -0500 Subject: [PATCH 11/19] CLIMATE-625 Update draw_histogram and draw_marker_on_map documentation --- ocw/plotter.py | 45 +++++++++++++++++++++++++++++++-------------- 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/ocw/plotter.py b/ocw/plotter.py index 7f9b0920..960dae15 100755 --- a/ocw/plotter.py +++ b/ocw/plotter.py @@ -565,14 +565,23 @@ def draw_barchart(results, yvalues, fname, ptitle='', fmt='png', def draw_marker_on_map(lat, lon, fname, fmt='png', location_name=' ', gridshape=(1, 1)): - ''' - Purpose:: - Draw a marker on a map + '''Draw a marker on a map. + + :param lat: Latitude for plotting a marker. + :type lat: :class:`float` + + :param lon: Longitude for plotting a marker. + :type lon: :class:`float` + + :param fname: The filename of the plot. + :type fname: :class:`string` + + :param fmt: (Optional) Filetype for the output. + :type fmt: :class:`string` + + :param location_name: (Optional) A label for the map marker. + :type location_name: :class:`string` - Input:: - lat - latitude for plotting a marker - lon - longitude for plotting a marker - fname - a string specifying the filename of the plot ''' fig = plt.figure() fig.dpi = 300 @@ -1063,14 +1072,22 @@ def add_contours(self, std1, corr1, std2, corr2, **kwargs): def draw_histogram(dataset_array, data_names, fname, fmt='png', nbins=10): ''' - Purpose:: - Draw histograms + Purpose:: Draw a histogram for the input dataset. - Input:: - dataset_array - a list of data values [data1, data2, ....] - data_names - a list of data names ['name1','name2',....] - fname - a string specifying the filename of the plot - bins - number of bins + :param dataset_array: A list of data values [data1, data2, ....]. + :type dataset_array: :class:`list` of :class:`float` + + :param data_names: A list of data names ['name1','name2',....]. + :type data_names: :class:`list` of :class:`string` + + :param fname: The filename of the plot. + :type fname: :class:`string` + + :param fmt: (Optional) Filetype for the output. + :type fmt: :class:`string` + + :param bins: (Optional) Number of bins. + :type bins: :class:`integer` ''' fig = plt.figure() fig.dpi = 300 From 18e3a1635b364ac0336c529400bf073338ecaf56 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Fri, 5 Jan 2018 05:42:02 -0500 Subject: [PATCH 12/19] CLIMATE-366 Add full temporal rebin option to UI --- ocw-ui/frontend/app/scripts/services/evaluationsettings.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocw-ui/frontend/app/scripts/services/evaluationsettings.js b/ocw-ui/frontend/app/scripts/services/evaluationsettings.js index b53842cc..7d40a876 100644 --- a/ocw-ui/frontend/app/scripts/services/evaluationsettings.js +++ b/ocw-ui/frontend/app/scripts/services/evaluationsettings.js @@ -42,7 +42,7 @@ angular.module('ocwUiApp') var settings = { 'metrics': [], 'temporal': { - 'options': ['daily', 'monthly', 'yearly'], + 'options': ['daily', 'monthly', 'annual', 'full'], 'selected': 'yearly', }, 'spatialSelect': null, From 410d8dc566a5cf3dd263694b4a585d22d1128e4d Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Fri, 5 Jan 2018 13:24:43 -0500 Subject: [PATCH 13/19] CLIMATE-388 Make UI backend parameters endpoint more RESTful --- ocw-ui/backend/rcmed_helpers.py | 71 +++++++++---------- .../app/scripts/controllers/rcmedselection.js | 4 +- .../test/spec/controllers/rcmedselection.js | 8 +-- 3 files changed, 41 insertions(+), 42 deletions(-) diff --git a/ocw-ui/backend/rcmed_helpers.py b/ocw-ui/backend/rcmed_helpers.py index 32e1af8a..3c66007c 100644 --- a/ocw-ui/backend/rcmed_helpers.py +++ b/ocw-ui/backend/rcmed_helpers.py @@ -15,19 +15,19 @@ # limitations under the License. # -''' Services for interacting with NASA JPL's Regional Climate Model Evaluation Database. ''' - -import ocw.data_source.rcmed as rcmed +""" Services for interacting with NASA JPL's Regional Climate Model Evaluation Database. """ +import requests from bottle import Bottle, request, response -import requests +import ocw.data_source.rcmed as rcmed rcmed_app = Bottle() -@rcmed_app.route('/datasets/') + +@rcmed_app.get('/datasets/') def get_observation_dataset_data(): - ''' Return a list of dataset information from JPL's RCMED. + """ Return a list of dataset information from JPL's RCMED. **Example Return JSON Format** @@ -42,20 +42,21 @@ def get_observation_dataset_data(): }, ... ] - ''' - r = requests.get('http://rcmes.jpl.nasa.gov/query-api/datasets.php') + """ + result = requests.get('http://rcmes.jpl.nasa.gov/query-api/datasets.php') - if (request.query.callback): - return "%s(%s)" % (request.query.callback, r.text) - return r.text + if hasattr(request.query, 'callback'): + return "%s(%s)" % (request.query.callback, result.text) + return result.text -@rcmed_app.route('/parameters/') -def get_dataset_parameters(): - ''' Return dataset specific parameter information from JPL's RCMED. + +@rcmed_app.get('/parameters/dataset/') +def get_dataset_parameters(name): + """ Return dataset specific parameter information from JPL's RCMED. **Example Call Format** - .. sourcecode:: javascript + .. sourcecode:: text /parameters/?dataset= @@ -73,17 +74,18 @@ def get_dataset_parameters(): } ] - ''' - url = 'http://rcmes.jpl.nasa.gov/query-api/parameters.php?dataset=' + request.query.dataset - r = requests.get(url) + """ + + url = 'http://rcmes.jpl.nasa.gov/query-api/parameters.php?dataset=' + name + result = requests.get(url) - if (request.query.callback): - return "%s(%s)" % (request.query.callback, r.text) - return r.text + if hasattr(request.query, 'callback'): + return "%s(%s)" % (request.query.callback, result.text) + return result.text def extract_bounds(parameter): - ''' This will take a parameter dictionary and return the spatial and temporal bounds. + """ This will take a parameter dictionary and return the spatial and temporal bounds. :param parameter: Single parameter that is returned from rcmed.get_parameters_metadata(). :type parameter: dictionary: @@ -97,11 +99,9 @@ def extract_bounds(parameter): "lon_max": 179.75, "lon_min": -179.75 } - ''' - bounds_data = {} - bounds_data['start_date'] = str(parameter['start_date']) - bounds_data['end_date'] = str(parameter['end_date']) - spatial_bounds = parameter['bounding_box'].replace('(','').replace(')','') + """ + bounds_data = {'start_date': str(parameter['start_date']), 'end_date': str(parameter['end_date'])} + spatial_bounds = parameter['bounding_box'].replace('(', '').replace(')', '') spatial_bounds = spatial_bounds.split(',') # spatial_bounds is in format: # [, , , , , , , ] @@ -110,18 +110,18 @@ def extract_bounds(parameter): bounds_data['lat_min'] = float(spatial_bounds[2]) bounds_data['lon_max'] = float(spatial_bounds[1]) bounds_data['lon_min'] = float(spatial_bounds[5]) - param_id =str(parameter['parameter_id']) + param_id = str(parameter['parameter_id']) return param_id, bounds_data -@rcmed_app.route('/parameters/bounds/') -@rcmed_app.route('/parameters/bounds') +@rcmed_app.get('/parameters/bounds/') +@rcmed_app.get('/parameters/bounds') def get_parameters_bounds(): - ''' Return temporal and spatial bounds metadata for all of JPL's RCMED parameters. + """ Return temporal and spatial bounds metadata for all of JPL's RCMED parameters. **Example Call Format** - .. sourcecode:: javascript + .. sourcecode:: text /parameters/bounds/ @@ -148,19 +148,18 @@ def get_parameters_bounds(): } } - ''' + """ parameter_bounds = {} raw_parameters = rcmed.get_parameters_metadata() for parameter in raw_parameters: - if parameter['bounding_box'] != None: + if parameter['bounding_box'] is not None: param_id, bounds_data = extract_bounds(parameter) parameter_bounds[param_id] = bounds_data - return parameter_bounds @rcmed_app.hook('after_request') def enable_cors(): - ''' Allow Cross-Origin Resource Sharing for all URLs. ''' + """ Allow Cross-Origin Resource Sharing for all URLs. """ response.headers['Access-Control-Allow-Origin'] = '*' diff --git a/ocw-ui/frontend/app/scripts/controllers/rcmedselection.js b/ocw-ui/frontend/app/scripts/controllers/rcmedselection.js index a3b38017..81d7d213 100644 --- a/ocw-ui/frontend/app/scripts/controllers/rcmedselection.js +++ b/ocw-ui/frontend/app/scripts/controllers/rcmedselection.js @@ -81,9 +81,9 @@ angular.module('ocwUiApp') }; $scope.dataSelectUpdated = function() { - var urlString = $rootScope.baseURL + '/rcmed/parameters/?dataset=' + + var urlString = $rootScope.baseURL + '/rcmed/parameters/dataset/' + $scope.datasetSelection["shortname"] + - "&callback=JSON_CALLBACK"; + "?callback=JSON_CALLBACK"; $http.jsonp(urlString) .success(function(data) { $scope.retrievedObsParams = data; diff --git a/ocw-ui/frontend/test/spec/controllers/rcmedselection.js b/ocw-ui/frontend/test/spec/controllers/rcmedselection.js index 97011c76..ba041aae 100644 --- a/ocw-ui/frontend/test/spec/controllers/rcmedselection.js +++ b/ocw-ui/frontend/test/spec/controllers/rcmedselection.js @@ -84,9 +84,9 @@ describe('Controller: RcmedSelectionCtrl', function () { scope.datasetSelection = {shortname: 'TRMM'} // Test return with only single parameter - $httpBackend.expectJSONP($rootScope.baseURL + '/rcmed/parameters/?dataset=' + + $httpBackend.expectJSONP($rootScope.baseURL + '/rcmed/parameters/dataset/' + scope.datasetSelection['shortname'] + - '&callback=JSON_CALLBACK'). + '?callback=JSON_CALLBACK'). respond(200, ['pcp']); scope.dataSelectUpdated(); $httpBackend.flush(); @@ -94,9 +94,9 @@ describe('Controller: RcmedSelectionCtrl', function () { expect(scope.parameterSelection).toEqual('pcp'); // Test return with multiple parameters - $httpBackend.expectJSONP($rootScope.baseURL + '/rcmed/parameters/?dataset=' + + $httpBackend.expectJSONP($rootScope.baseURL + '/rcmed/parameters/dataset/' + scope.datasetSelection['shortname'] + - '&callback=JSON_CALLBACK'). + '?callback=JSON_CALLBACK'). respond(200, ['pcp', 'pcp2']); scope.dataSelectUpdated(); $httpBackend.flush(); From c7c1aeb5a13d0edfaab43c4a038f34f47baac599 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 13:57:48 -0500 Subject: [PATCH 14/19] CLIMATE-374 UI runEvaluation doesnt use $window or $location for results transition --- .../scripts/controllers/parameterselect.js | 38 +++++++++---------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js index cad97e08..565b054c 100644 --- a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js +++ b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js @@ -27,9 +27,9 @@ * Controller of the ocwUiApp */ angular.module('ocwUiApp') -.controller('ParameterSelectCtrl', ['$rootScope', '$scope', '$http', '$timeout', - 'selectedDatasetInformation', 'regionSelectParams', 'evaluationSettings', - function($rootScope, $scope, $http, $timeout, selectedDatasetInformation, regionSelectParams, evaluationSettings) { +.controller('ParameterSelectCtrl', ['$rootScope', '$scope', '$http', '$timeout', '$location', + 'selectedDatasetInformation', 'regionSelectParams', 'evaluationSettings', + function($rootScope, $scope, $http, $timeout, $location, selectedDatasetInformation, regionSelectParams, evaluationSettings) { $scope.datasets = selectedDatasetInformation.getDatasets(); // The min/max lat/lon values from the selected datasets @@ -145,6 +145,8 @@ angular.module('ocwUiApp') data['temporal_resolution'] = 30; } + data['temporal_resolution_type'] = temporal_res; + // Load the Metrics for the evaluation data['metrics'] = [] var metrics = settings.metrics @@ -164,21 +166,17 @@ angular.module('ocwUiApp') data['lon_min'] = $scope.displayParams.lonMin, data['lon_max'] = $scope.displayParams.lonMax, - $http.post($rootScope.baseURL + '/processing/run_evaluation/', data). - success(function(data) { - var evalWorkDir = data['eval_work_dir']; + $http.post(`${$rootScope.baseURL}/processing/run_evaluation/`, data). + success((data) => { + const evalWorkDir = data.eval_work_dir; $scope.runningEval = false; - $timeout(function() { - if (evalWorkDir !== undefined) { - window.location = "#/results/" + evalWorkDir; - } else { - window.location = "#/results"; - } + $timeout(() => { + let url = (evalWorkDir) ? `/results/${evalWorkDir}` : '/results'; + $location.url(url) }, 100); - - }).error(function() { + }).error(() => { $scope.runningEval = false; }); }; @@ -192,13 +190,13 @@ angular.module('ocwUiApp') if (parseFloat($scope.displayParams.latMax) > parseFloat($scope.latMax)) $scope.displayParams.latMax = $scope.latMax; - if (parseFloat($scope.displayParams.lonMin) < parseFloat($scope.lonMin)) + if (parseFloat($scope.displayParams.lonMin) < parseFloat($scope.lonMin)) $scope.displayParams.lonMin = $scope.lonMin; - if (parseFloat($scope.displayParams.lonMax) > parseFloat($scope.lonMax)) + if (parseFloat($scope.displayParams.lonMax) > parseFloat($scope.lonMax)) $scope.displayParams.lonMax = $scope.lonMax; - if ($scope.displayParams.start < $scope.start) + if ($scope.displayParams.start < $scope.start) $scope.displayParams.start = $scope.start; if ($scope.displayParams.end > $scope.end) @@ -213,8 +211,8 @@ angular.module('ocwUiApp') $rootScope.$broadcast('redrawOverlays', []); } - $scope.unwatchDatasets = $scope.$watch('datasets', - function() { + $scope.unwatchDatasets = $scope.$watch('datasets', + function() { var numDatasets = $scope.datasets.length; $scope.displayParams.areValid = false; $scope.areInUserRegridState = false; @@ -230,7 +228,7 @@ angular.module('ocwUiApp') // Get the valid lat/lon range in the selected datasets. for (var i = 0; i < numDatasets; i++) { var curDataset = $scope.datasets[i]; - + latMin = (curDataset['latlonVals']['latMin'] > latMin) ? curDataset['latlonVals']['latMin'] : latMin; latMax = (curDataset['latlonVals']['latMax'] < latMax) ? curDataset['latlonVals']['latMax'] : latMax; lonMin = (curDataset['latlonVals']['lonMin'] > lonMin) ? curDataset['latlonVals']['lonMin'] : lonMin; From 4a64b62bb73ee4d12448833b67e06ca4becf0528 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 16:09:59 -0500 Subject: [PATCH 15/19] CLIMATE-374 UI runEvaluation doesnt use $window or $location for results transition --- .../app/scripts/controllers/parameterselect.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js index 565b054c..a2554f4c 100644 --- a/ocw-ui/frontend/app/scripts/controllers/parameterselect.js +++ b/ocw-ui/frontend/app/scripts/controllers/parameterselect.js @@ -166,17 +166,17 @@ angular.module('ocwUiApp') data['lon_min'] = $scope.displayParams.lonMin, data['lon_max'] = $scope.displayParams.lonMax, - $http.post(`${$rootScope.baseURL}/processing/run_evaluation/`, data). - success((data) => { - const evalWorkDir = data.eval_work_dir; + $http.post($rootScope.baseURL + '/processing/run_evaluation/', data). + success(function(data) { + var evalWorkDir = data['eval_work_dir']; $scope.runningEval = false; - $timeout(() => { - let url = (evalWorkDir) ? `/results/${evalWorkDir}` : '/results'; + $timeout(function() { + var url = (evalWorkDir) ? '/results/' + evalWorkDir : '/results'; $location.url(url) }, 100); - }).error(() => { + }).error(function() { $scope.runningEval = false; }); }; From 5a632c86e4e10e545940ec4ad420ec232f22a413 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 18:50:15 -0500 Subject: [PATCH 16/19] CLIMATE-413 Extend browser support for ocw-ui/frontend/config/karma.conf.js --- ocw-ui/frontend/package.json | 22 +++++++++------ ocw-ui/frontend/test/karma.conf.js | 43 +++++++++++++++++++++++++----- 2 files changed, 51 insertions(+), 14 deletions(-) diff --git a/ocw-ui/frontend/package.json b/ocw-ui/frontend/package.json index 8024f17a..b0133c8c 100644 --- a/ocw-ui/frontend/package.json +++ b/ocw-ui/frontend/package.json @@ -2,12 +2,12 @@ "name": "ocwui", "version": "1.2.0", "description": "A tool for the evaluation and analysis of climate models.", - "repository": { - "type" : "git", - "url" : "https://git-wip-us.apache.org/repos/asf/climate.git" + "repository": { + "type": "git", + "url": "https://git-wip-us.apache.org/repos/asf/climate.git" }, "license": "Apache-2.0", - "readme":"README.md", + "readme": "README.md", "dependencies": {}, "devDependencies": { "bower": "^1.3.9", @@ -23,7 +23,7 @@ "grunt-contrib-copy": "^0.5.0", "grunt-contrib-cssmin": "^0.9.0", "grunt-contrib-htmlmin": "^0.3.0", - "grunt-contrib-imagemin": "^0.7.0", + "grunt-contrib-imagemin": "^2.0.1", "grunt-contrib-jshint": "^0.10.0", "grunt-contrib-uglify": "^0.4.0", "grunt-contrib-watch": "^0.6.1", @@ -36,12 +36,18 @@ "grunt-usemin": "^2.1.1", "grunt-wiredep": "^1.8.0", "jshint-stylish": "^0.2.0", - "karma": "^0.12.17", + "karma": "^2.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-detect-browsers": "^2.2.6", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", "karma-jasmine": "^0.1.5", - "karma-phantomjs-launcher": "^0.1.4", + "karma-phantomjs-launcher": "^1.0.4", + "karma-safari-launcher": "^1.0.0", "load-grunt-tasks": "^0.4.0", "time-grunt": "^0.3.1", - "yo": "^1.2.1" + "yo": "^1.2.1", + "serve-static": "^1.13.1" }, "engines": { "node": ">=0.10.0" diff --git a/ocw-ui/frontend/test/karma.conf.js b/ocw-ui/frontend/test/karma.conf.js index 744d927b..fa03fd75 100644 --- a/ocw-ui/frontend/test/karma.conf.js +++ b/ocw-ui/frontend/test/karma.conf.js @@ -46,23 +46,54 @@ module.exports = function(config) { // web server port port: 8080, + // Start these browsers, currently available: // - Chrome - // - ChromeCanary // - Firefox - // - Opera // - Safari (only Mac) // - PhantomJS // - IE (only Windows) - browsers: [ - 'PhantomJS', - //'Chrome' - ], + // Ok to leave this empty as karma-detect-browsers will figure this out based on what is installed. + // Either set enabled to false for karma-detect-browsers and fill in a specific list or update + // the logic in the karma-detect-browsers config to remove those you don't want to test. + browsers: [ ], + + frameworks: ['jasmine', 'detectBrowsers'], + + detectBrowsers: { + // enable/disable, default is true + enabled: true, + + // enable/disable phantomjs support, default is true + usePhantomJS: true, + + // post processing of browsers list + // here you can edit the list of browsers used by karma + postDetection: function(availableBrowser) { + + var result = availableBrowser; + + //Remove PhantomJS if another browser has been detected + //if (availableBrowser.length > 1 && availableBrowser.indexOf('PhantomJS')>-1) { + // var i = result.indexOf('PhantomJS'); + + // if (i !== -1) { + // result.splice(i, 1); + // } + //} + + return result; + } + }, // Which plugins to enable plugins: [ 'karma-phantomjs-launcher', 'karma-chrome-launcher', + 'karma-firefox-launcher', + 'karma-safari-launcher', + 'karma-ie-launcher', + 'karma-detect-browsers', 'karma-jasmine' ], From 64c2a55f9b53622e46bb524934a870f928c6ae92 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 19:32:25 -0500 Subject: [PATCH 17/19] CIMATE-917 Bocumentation build error with Python3 --- docs/source/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index b0b511c7..5e101e9e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,6 +14,8 @@ import sys import os +autodoc_mock_imports = ["esgf"] + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. From 13c6dd6aed9b7cff73cdfa985fe8a561a4759eab Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 19:37:20 -0500 Subject: [PATCH 18/19] CIMATE-917 Bocumentation build error with Python3 --- docs/source/conf.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 5e101e9e..3afb88bb 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -14,7 +14,10 @@ import sys import os -autodoc_mock_imports = ["esgf"] +# esgf is not currently available for Python 3 and will throw an +# error when building the documents. +if sys.version_info[0] >= 3: + autodoc_mock_imports = ["esgf"] # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the From 6a86e25d65179cdc0dccb6dc39d69edd41034cd9 Mon Sep 17 00:00:00 2001 From: Michael Anderson Date: Sun, 7 Jan 2018 21:03:59 -0500 Subject: [PATCH 19/19] CLIMATE-626 Update doc strings in Downscaling class --- docs/source/index.rst | 1 + docs/source/ocw/statistical_downscaling.rst | 8 ++++ ocw/statistical_downscaling.py | 51 ++++++++++++++------- 3 files changed, 44 insertions(+), 16 deletions(-) create mode 100644 docs/source/ocw/statistical_downscaling.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 2834ee62..9e73110c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -18,6 +18,7 @@ Contents: ocw/evaluation ocw/metrics ocw/plotter + ocw/statistical_downscaling ocw/utils data_source/data_sources ui-backend/backend diff --git a/docs/source/ocw/statistical_downscaling.rst b/docs/source/ocw/statistical_downscaling.rst new file mode 100644 index 00000000..be631513 --- /dev/null +++ b/docs/source/ocw/statistical_downscaling.rst @@ -0,0 +1,8 @@ +Downscaling Module +****************** + +Downscaling +=========== +.. autoclass:: statistical_downscaling.Downscaling + :members: + diff --git a/ocw/statistical_downscaling.py b/ocw/statistical_downscaling.py index 57013a07..f3d701f2 100755 --- a/ocw/statistical_downscaling.py +++ b/ocw/statistical_downscaling.py @@ -15,23 +15,36 @@ # specific language governing permissions and limitations # under the License. +""" +Classes: + Downscaling - Container for applying statistical downscaling. +""" + -import ocw.utils as utils import numpy as np -from scipy.stats import percentileofscore, linregress +from scipy.stats import linregress, percentileofscore + +class Downscaling(object): + """ + Statistical downscaling infers higher resolution information from lower resolution data. + For example, data collected at a more coarse regional level applied to a more refined + local level. -class Downscaling: + Statistical downscaling establishes a relationship between different variables in the large scale + and the local scale and applies that relationship to the local scale. + """ def __init__(self, ref_dataset, model_present, model_future): - ''' + """ Default Downscaling constructor. + :param ref_dataset: The Dataset to use as the reference dataset (observation) :type ref_dataset: Dataset :param model_present: model simulation to be compared with observation :type model_present: Dataset :param model_future: model simulation to be calibrated for prediction :type model_future: Dataset - ''' + """ self.ref_dataset = ref_dataset[~ref_dataset.mask].ravel() self.model_present = model_present.ravel() self.model_future = model_future.ravel() @@ -39,11 +52,11 @@ def __init__(self, ref_dataset, model_present, model_future): description = "statistical downscaling methods" def Delta_addition(self): - '''Calculate the mean difference between future and present simulation, + """Calculate the mean difference between future and present simulation, then add the difference to the observed distribution :returns: downscaled model_present and model_future - ''' + """ ref = self.ref_dataset model_present = self.model_present model_future = self.model_future @@ -51,23 +64,26 @@ def Delta_addition(self): return model_present, ref + np.mean(model_future - model_present) def Delta_correction(self): - '''Calculate the mean difference between observation and present simulation, + """Calculate the mean difference between observation and present simulation, then add the difference to the future distribution :returns: downscaled model_present and model_future - ''' + """ ref = self.ref_dataset model_present = self.model_present model_future = self.model_future - return model_present + np.mean(ref) - np.mean(model_present), model_future + np.mean(ref) - np.mean(model_present) + return model_present + np.mean(ref) - np.mean(model_present), model_future + \ + np.mean(ref) - np.mean(model_present) def Quantile_mapping(self): - '''Remove the biases for each quantile value - Wood et al (2004) HYDROLOGIC IMPLICATIONS OF DYNAMICAL AND STATISTICAL APPROACHES TO DOWNSCALING CLIMATE MODEL OUTPUTS + """Remove the biases for each quantile value + + Wood et al (2004) HYDROLOGIC IMPLICATIONS OF DYNAMICAL + AND STATISTICAL APPROACHES TO DOWNSCALING CLIMATE MODEL OUTPUTS :returns: downscaled model_present and model_future - ''' + """ ref = self.ref_dataset model_present = self.model_present model_present_corrected = np.zeros(model_present.size) @@ -86,11 +102,14 @@ def Quantile_mapping(self): return model_present_corrected, model_future_corrected def Asynchronous_regression(self): - '''Remove the biases by fitting a linear regression model with ordered observational and model datasets - Stoner et al (2013) An asynchronous regional regression model for statistical downscaling of daily climate variables + """Remove the biases by fitting a linear regression model with ordered observational and + model datasets + + Stoner et al (2013) An asynchronous regional regression model for statistical downscaling of + daily climate variables :returns: downscaled model_present and model_future - ''' + """ ref_original = self.ref_dataset model_present = self.model_present