Skip to content
This repository has been archived by the owner on May 12, 2021. It is now read-only.

Commit

Permalink
Merge branch 'master' into CLIMATE-912
Browse files Browse the repository at this point in the history
  • Loading branch information
lewismc authored Aug 9, 2017
2 parents 5c86f3a + 56989f5 commit 51d9dce
Show file tree
Hide file tree
Showing 21 changed files with 306 additions and 254 deletions.
16 changes: 4 additions & 12 deletions RCMES/cli_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1185,8 +1185,6 @@ def settings_screen(header):
screen.addstr(11, x/2, "6 - Change Target dataset/s")
screen.addstr(12, x/2, "7 - Change Metric")
screen.addstr(13, x/2, "8 - Change Working Directory")
#screen.addstr(14, x/2, "9 - Change Plot Title [Coming Soon....]")
#screen.addstr(15, x/2, "10 - Save the processed data [Coming Soon....]")
screen.addstr(14, x/2, "9 - Show Temporal Boundaries")
screen.addstr(15, x/2, "10 - Show Spatial Boundaries")
screen.addstr(16, x/2, "0 - Return to Main Menu")
Expand Down Expand Up @@ -1377,19 +1375,12 @@ def settings_screen(header):
else:
note = "Working directory has not changed"

if option == '9':
screen.addstr(25, x/2, "Please enter plot title:")
plot_title = screen.getstr()

#if option == '10':
# screen.addstr(25, x/2, "Please enter plot title:")
# plot_title = screen.getstr()

if option == '9':
models_start_time, models_end_time = get_models_temp_bound()
line = 25
for i, model in enumerate(model_datasets):
mode_name = models_info[i]['directory'].split("/")[-1]
#mode_name = models_info[i]['directory'].split("/")[-1]
mode_name = 'model %d' %(i+1)
line += 1
screen.addstr(line, x/2, "{0}".format(mode_name))
line += 1
Expand All @@ -1407,7 +1398,8 @@ def settings_screen(header):
models_bound = get_models_spatial_bound()
line = 25
for i, model in enumerate(model_datasets):
mode_name = models_info[i]['directory'].split("/")[-1]
#mode_name = models_info[i]['directory'].split("/")[-1]
mode_name = 'model %d' %(i+1)
line += 1
screen.addstr(line, x/2, "{0}".format(mode_name))
line += 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,11 @@ regrid:
regrid_dlat: 0.50
regrid_dlon: 0.50

# generic_dataset_name: If false, data filenames must include the elements of dataset_name list.
datasets:
- loader_name: local
name: SRB
generic_dataset_name: True
dataset_name: ['SRB']
file_path: ./data/NARCCAP_data/srb_rel3.0_shortwave_from_1983_to_2007.nc
variable_name: sw_sfc_dn

Expand Down
5 changes: 3 additions & 2 deletions RCMES/configuration_files/NARCCAP_examples/Fig16_summer.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@ regrid:
regrid_on_reference: False
regrid_dlat: 0.50
regrid_dlon: 0.50

# generic_dataset_name: If false, data filenames must include the elements of dataset_name list.
datasets:
- loader_name: local
name: SRB
generic_dataset_name: True
dataset_name: ['SRB']
file_path: ./data/NARCCAP_data/srb_rel3.0_shortwave_from_1983_to_2007.nc
variable_name: sw_sfc_dn

Expand Down
4 changes: 3 additions & 1 deletion RCMES/configuration_files/NARCCAP_examples/Fig16_winter.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,11 @@ regrid:
regrid_dlat: 0.50
regrid_dlon: 0.50

# generic_dataset_name: If false, data filenames must include the elements of dataset_name list.
datasets:
- loader_name: local
name: SRB
generic_dataset_name: True
dataset_name: ['SRB']
file_path: ./data/NARCCAP_data/srb_rel3.0_shortwave_from_1983_to_2007.nc
variable_name: sw_sfc_dn

Expand Down
17 changes: 5 additions & 12 deletions RCMES/statistical_downscaling/run_statistical_downscaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@ def extract_data_at_nearest_grid_point(target_dataset, longitude, latitude):
:rtype: Open Climate Workbench Dataset Object
"""

if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
if target_dataset.lons.ndim == target_dataset.lats.ndim == 1:
new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
elif target_dataset.lons.ndim == target_dataset.lats.ndim == 2:
new_lon = target_datasets.lons
new_lat = target_datasets.lats
distance = (new_lon - longitude)**2. + (new_lat - latitude)**2.
Expand Down Expand Up @@ -155,16 +155,9 @@ def extract_data_at_nearest_grid_point(target_dataset, longitude, latitude):

print(downscale_option_names[DOWNSCALE_OPTION]+": Downscaling model output")

if DOWNSCALE_OPTION == 1:
downscaled_model_present, downscaled_model_future = downscale.Delta_addition()
elif DOWNSCALE_OPTION == 2:
downscaled_model_present, downscaled_model_future = downscale.Delta_correction()
elif DOWNSCALE_OPTION == 3:
downscaled_model_present, downscaled_model_future = downscale.Quantile_mapping()
elif DOWNSCALE_OPTION == 4:
downscaled_model_present, downscaled_model_future = downscale.Asynchronous_regression()
else:
sys.exit("DOWNSCALE_OPTION must be an integer between 1 and 4")
xdownscale = [downscale.Delta_addition, downscale.Delta_correction, downscale.Quantile_mapping, downscale.Asynchronous_regression]
if 0 < DOWNSCALE_OPTION <= len(xdownscale): xdownscale[DOWNSCALE_OPTION - 1]()
else: sys.exit("DOWNSCALE_OPTION must be an integer between 1 and " + len(xdownscale))


""" Step 5: Create plots and spreadsheet """
Expand Down
17 changes: 11 additions & 6 deletions examples/esgf_integration_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,19 @@
import ocw.data_source.esgf as esgf
from getpass import getpass
import ssl
import sys

if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context

dataset_id = 'obs4MIPs.CNES.AVISO.zos.mon.v20110829|esgf-data.jpl.nasa.gov'
dataset_id = 'obs4mips.CNES.AVISO.zos.mon.v20110829|esgf-data.jpl.nasa.gov'
variable = 'zosStderr'

username = raw_input('Enter your ESGF OpenID:\n')
if sys.version_info[0] >= 3:
username = input('Enter your ESGF OpenID:\n')
else:
username = raw_input('Enter your ESGF OpenID:\n')

password = getpass(prompt='Enter your ESGF Password:\n')

# Multiple datasets are returned in a list if the ESGF dataset is
Expand All @@ -39,7 +44,7 @@
# we only need to look at the 0-th value in the returned list.
ds = datasets[0]

print '\n--------\n'
print 'Variable: ', ds.variable
print 'Shape: ', ds.values.shape
print 'A Value: ', ds.values[100][100][100]
print('\n--------\n')
print('Variable: ', ds.variable)
print('Shape: ', ds.values.shape)
print('A Value: ', ds.values[100][100][100])
7 changes: 4 additions & 3 deletions examples/podaac_integration_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@
variable = 'uwnd'
name = 'PO.DAAC_test_dataset'
OUTPUT_PLOT = "ccmp_temporal_std"
""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
print("Extracting Level4 granule %s and converting it into a OCW dataset object." % datasetId)
""" Step 1: Download remote PO.DAAC Dataset and read it into an OCW Dataset Object"""
print("Available Level4 PO.DAAC Granules: %s" % podaac.list_available_extract_granule_dataset_ids())
print("Extracting variable '%s' from Level4 granule '%s' and converting it into a OCW dataset object." % (variable, datasetId))
ccmp_dataset = podaac.extract_l4_granule(
variable=variable, dataset_id=datasetId, name=name)
print("CCMP_Dataset.values shape: (times, lats, lons) - %s \n" %
Expand Down Expand Up @@ -67,7 +68,7 @@

fname = OUTPUT_PLOT
gridshape = (4, 5) # 20 Years worth of plots. 20 rows in 1 column
plot_title = "CCMP Temporal Standard Deviation"
plot_title = "Cross-Calibrated Multi-Platform Temporal Standard Deviation"
sub_titles = range(2002, 2010, 1)

plotter.draw_contour_map(results, lats, lons, fname,
Expand Down
14 changes: 11 additions & 3 deletions examples/simple_model_to_model_bias.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,15 @@

import datetime
from os import path
import urllib
import sys

if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
# Not Python 3 - today, it is most likely to be Python 2
# But note that this might need an update when Python 4
# might be around one day
from urllib import urlretrieve

import numpy as np

Expand All @@ -39,9 +47,9 @@
FILE_2_PATH = path.join('/tmp', FILE_2)

if not path.exists(FILE_1_PATH):
urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
urlretrieve(FILE_LEADER + FILE_1, FILE_1_PATH)
if not path.exists(FILE_2_PATH):
urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)
urlretrieve(FILE_LEADER + FILE_2, FILE_2_PATH)

""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
print("Loading %s into an OCW Dataset Object" % (FILE_1_PATH,))
Expand Down
13 changes: 10 additions & 3 deletions examples/taylor_diagram_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,14 @@
import datetime
import sys
from os import path
import urllib

if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
# Not Python 3 - today, it is most likely to be Python 2
# But note that this might need an update when Python 4
# might be around one day
from urllib import urlretrieve

import numpy

Expand All @@ -36,10 +43,10 @@
# Download some example NetCDF files for the evaluation
##########################################################################
if not path.exists(FILE_1):
urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
urlretrieve(FILE_LEADER + FILE_1, FILE_1)

if not path.exists(FILE_2):
urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
urlretrieve(FILE_LEADER + FILE_2, FILE_2)

# Load the example datasets into OCW Dataset objects. We want to load
# the 'tasmax' variable values. We'll also name the datasets for use
Expand Down
21 changes: 16 additions & 5 deletions examples/time_series_with_regions.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,15 @@
import numpy as np
import numpy.ma as ma
from os import path
import urllib
import sys

if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
# Not Python 3 - today, it is most likely to be Python 2
# But note that this might need an update when Python 4
# might be around one day
from urllib import urlretrieve
import ssl
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
Expand All @@ -29,7 +37,7 @@
LAT_MAX = 42.24
LON_MIN = -24.0
LON_MAX = 60.0
START = datetime.datetime(2000, 01, 1)
START = datetime.datetime(2000, 1, 1)
END = datetime.datetime(2007, 12, 31)

EVAL_BOUNDS = Bounds(lat_min=LAT_MIN, lat_max=LAT_MAX,
Expand All @@ -48,13 +56,16 @@

# Download necessary NetCDF file if not present
if not path.exists(FILE_1):
urllib.urlretrieve(FILE_LEADER + FILE_1, FILE_1)
print("Downloading %s" % (FILE_LEADER + FILE_1))
urlretrieve(FILE_LEADER + FILE_1, FILE_1)

if not path.exists(FILE_2):
urllib.urlretrieve(FILE_LEADER + FILE_2, FILE_2)
print("Downloading %s" % (FILE_LEADER + FILE_2))
urlretrieve(FILE_LEADER + FILE_2, FILE_2)

if not path.exists(FILE_3):
urllib.urlretrieve(FILE_LEADER + FILE_3, FILE_3)
print("Downloading %s" % (FILE_LEADER + FILE_3))
urlretrieve(FILE_LEADER + FILE_3, FILE_3)

""" Step 1: Load Local NetCDF File into OCW Dataset Objects and store in list"""
target_datasets.append(local.load_file(FILE_1, varName, name="KNMI"))
Expand Down
16 changes: 5 additions & 11 deletions mccsearch/code/mccSearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@
# graph meeting the CC criteria
PRUNED_GRAPH = nx.DiGraph()
#------------------------ End GLOBAL VARS -------------------------

#************************ Begin Functions *************************
#******************************************************************

Expand Down Expand Up @@ -123,7 +124,6 @@ def readMergData(dirname, filelist=None):
mergTimeVarName = 'time'
mergLatVarName = 'latitude'
mergLonVarName = 'longitude'

filelistInstructions = dirname + '/*'
if filelist is None:
filelist = glob.glob(filelistInstructions)
Expand Down Expand Up @@ -285,6 +285,7 @@ def findCloudElements(mergImgs, timelist, TRMMdirName=None):
# NB in the TRMM files the info is hours since the time thus 00Z file has
# in 01, 02 and 03 times
for t in range(mergImgs.shape[0]):

#-------------------------------------------------
# #textfile name for saving the data for arcgis
# thisFileName = MAINDIRECTORY+'/' + (str(timelist[t])).replace(" ", "_") + '.txt'
Expand Down Expand Up @@ -513,7 +514,6 @@ def findCloudElements(mergImgs, timelist, TRMMdirName=None):
finalCETRMMvalues[0,
cloudElementLat[lat_index],
cloudElementLon[lon_index]]))

brightnesstemp[:] = brightnesstemp1[:]
currNetCDFCEData.close()

Expand Down Expand Up @@ -964,7 +964,6 @@ def findCloudClusters(CEGraph):
Output::
PRUNED_GRAPH: a Networkx directed graph of with CCs/ MCSs
'''

seenNode = []
Expand Down Expand Up @@ -1253,7 +1252,6 @@ def traverseTree(subGraph, node, stack, checkedNodes=None):
Assumptions:
frames are ordered and are equally distributed in time e.g. hrly satellite images
'''
if len(checkedNodes) == len(subGraph):
return checkedNodes
Expand All @@ -1273,7 +1271,6 @@ def traverseTree(subGraph, node, stack, checkedNodes=None):
stack.insert(0, child)

stack.insert(0, parent)

for child in downOneLevel:
if child not in checkedNodes and child not in stack:
if len(subGraph.predecessors(child)) > 1 or node in checkedNodes:
Expand Down Expand Up @@ -1612,6 +1609,7 @@ def updateMCCList(
potentialMCCList[index]["possMCCList"].append(
(node, stage))
potentialMCCList[index]["fullMCSMCC"].append((node, stage))

if frameNum > potentialMCCList[index]["frameNum"] or potentialMCCList[index]["frameNum"] == 0:
potentialMCCList[index]["frameNum"] = frameNum
potentialMCCList[index]["highestMCCnode"] = node
Expand Down Expand Up @@ -1678,6 +1676,7 @@ def updateMCCList(
(node, 'M'))
potentialMCCList[index]["fullMCSMCC"].append(
(node, 'M'))

potentialMCCList[index]["durationAandB"] += 1
if frameNum > potentialMCCList[index]["frameNum"]:
potentialMCCList[index]["frameNum"] = frameNum
Expand Down Expand Up @@ -2088,7 +2087,6 @@ def addInfothisDict(thisNode, cloudElementArea, criteriaB):
criteriaB: a masked array of floating-point numbers representing the lat,lons meeting the criteria
Output:: None
'''
for eachdict in CLOUD_ELEMENT_GRAPH.nodes(thisNode):
if eachdict[1]['uniqueID'] == thisNode:
Expand Down Expand Up @@ -3323,7 +3321,6 @@ def displaySize(finalMCCList):
ax.set_title(title)
ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d%H:%M:%S')
fig.autofmt_xdate()

plt.subplots_adjust(bottom=0.2)

imgFilename = MAINDIRECTORY + '/images/' + str(count) + 'MCS.gif'
Expand Down Expand Up @@ -3748,7 +3745,6 @@ def plotAccuInTimeRange(starttime, endtime):
latitude[:] = LATTRMM[:, 0]
latitude.units = "degrees_north"
latitude.long_name = "Latitude"

rainFallacc[:] = accuPrecipRate[:]

accuTRMMData.close()
Expand Down Expand Up @@ -4251,6 +4247,7 @@ def colorbar_index(ncolors, nlabels, cmap):
Purpose::
Utility script for crating a colorbar
Taken from http://stackoverflow.com/questions/18704353/correcting-matplotlib-colorbar-ticks
'''
cmap = cmap_discretize(cmap, ncolors)
mappable = cm.ScalarMappable(cmap=cmap)
Expand Down Expand Up @@ -4297,13 +4294,10 @@ def cmap_discretize(cmap, N):
# NETCDF format. The files end up in a folder called mergNETCDF in the directory
# where the raw MERG data is
# NOTE: VERY RAW AND DIRTY

# Input::
# Directory to the location of the raw MERG files, preferably zipped

# Output::
# none

# Assumptions::
# 1 GrADS (http://www.iges.org/grads/gadoc/) and lats4D (http://opengrads.org/doc/scripts/lats4d/)
# have been installed on the system and the user can access
Expand Down
1 change: 1 addition & 0 deletions mccsearch/code/mccSearchUI.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
# Wizard for running the mccSearch program
'''

import os
import networkx as nx
# mccSearch modules
from mccSearch import *
Expand Down
Loading

0 comments on commit 51d9dce

Please sign in to comment.