Skip to content

Commit 188a5ff

Browse files
committed
Merged dev to main for new release
2 parents 5f63813 + 024cf54 commit 188a5ff

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+2137
-699
lines changed

dnora/aux_funcs.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,9 @@
55
from scipy.interpolate import griddata
66
from scipy import interpolate
77
import os, re, glob
8-
import pyfimex0 as pyfi
8+
# This is imported inside the pyfimex function, since we want to avoid to require
9+
# this dependency if someone wan't to run without wind forcing
10+
#import pyfimex0 as pyfi
911
from typing import TYPE_CHECKING, Tuple, List, Union
1012
from . import file_module
1113
if TYPE_CHECKING:
@@ -630,6 +632,7 @@ def create_swan_segment_coords(boundary_mask, lon_edges, lat_edges):
630632

631633
def pyfimex(input_file, output_file, projString, xAxisValues, yAxisValues,
632634
selectVariables, reduceTime_start, reduceTime_end,ensemble_member=False):
635+
import pyfimex0 as pyfi
633636
r = pyfi.createFileReader('netcdf', input_file)
634637
inter_ll = pyfi.createInterpolator(r)
635638
inter_ll.changeProjection(pyfi.InterpolationMethod.BILINEAR,

dnora/bnd/bnd_mod.py

+12-23
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,8 @@ def import_boundary(self, start_time: str, end_time: str,
5454
boundary_point_grid = Grid(lon=self.grid.lon_edges(),
5555
lat=self.grid.lat_edges(),
5656
name='boundary_points')
57-
boundary_point_grid.set_spacing(nx=self.grid.boundary_nx(),
58-
ny=self.grid.boundary_ny())
57+
#boundary_point_grid.set_spacing(nx=self.grid.boundary_nx(),
58+
# ny=self.grid.boundary_ny())
5959
boundary_reader.set_restricted_area(boundary_point_grid)
6060

6161
if write_cache or read_cache:
@@ -64,7 +64,7 @@ def import_boundary(self, start_time: str, end_time: str,
6464
if read_cache and not cache_empty:
6565
msg.info('Reading boundary data from cache!!!')
6666
original_boundary_reader = copy(boundary_reader)
67-
boundary_reader = DnoraNc(files=glob.glob(f'{cache_folder}/{cache_name}*'), convention = boundary_reader.convention())
67+
boundary_reader = DnoraNc(files=glob.glob(f'{cache_folder}/{cache_name}_bnd.nc'), convention = boundary_reader.convention())
6868

6969
self._history.append(copy(boundary_reader))
7070

@@ -88,30 +88,19 @@ def import_boundary(self, start_time: str, end_time: str,
8888
if read_cache and not cache_empty:
8989
patch_start, patch_end = aux_funcs.determine_patch_periods(self.time(), start_time, end_time)
9090
if patch_start:
91-
msg.info('Not all data found in cache. Patching from original source...')
92-
93-
lon_all, lat_all = original_boundary_reader.get_coordinates(start_time)
94-
inds = point_picker(self.grid, lon_all, lat_all)
95-
bnd_list = [self.data]
96-
for t0, t1 in zip(patch_start, patch_end):
97-
time, freq, dirs, spec, lon, lat, source = original_boundary_reader(t0, t1, inds)
98-
bnd_list.append(self.compile_to_xr(time, freq, dirs, spec, lon, lat, source))
99-
100-
self.data = xr.concat(bnd_list, dim="time").sortby('time')
91+
msg.info(
92+
f'Timesteps stored in {cache_name}_bnd.nc does not cover the entire time range. set read_cache to false and rerun.')
93+
exit(-1)
10194

10295

10396
if write_cache:
10497
msg.info('Caching data:')
105-
for month in self.months():
106-
cache_file = f"{cache_name}_{month.strftime('%Y-%m')}.nc"
107-
t0 = f"{month.strftime('%Y-%m-01')}"
108-
d1 = monthrange(int(month.strftime('%Y')), int(month.strftime('%m')))[1]
109-
t1 = f"{month.strftime(f'%Y-%m-{d1}')}"
110-
outfile = f'{cache_folder}/{cache_file}'
111-
if os.path.exists(outfile):
112-
os.remove(outfile)
113-
self.data.sel(time=slice(t0, t1)).to_netcdf(outfile)
114-
msg.to_file(outfile)
98+
cache_file = f"{cache_name}_bnd.nc"
99+
outfile = f'{cache_folder}/{cache_file}'
100+
if os.path.exists(outfile):
101+
os.remove(outfile)
102+
self.data.to_netcdf(outfile)
103+
msg.to_file(outfile)
115104

116105
#self.mask = [True]*len(self.x())
117106

dnora/bnd/pick.py

+7-2
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,9 @@ class NearestGridPoint(PointPicker):
3131
"""Choose the nearest grid point to each boundary point in the grid.
3232
Set a maximum allowed distance using `max_dist` (in km) at instantiation time.
3333
"""
34-
def __init__(self, max_dist=None):
34+
def __init__(self, max_dist=None, remove_duplicate=False):
3535
self.max_dist = max_dist
36+
self.remove_duplicate = remove_duplicate
3637
pass
3738

3839
def __call__(self, grid, bnd_lon, bnd_lat):
@@ -51,7 +52,11 @@ def __call__(self, grid, bnd_lon, bnd_lat):
5152
else:
5253
msg.plain('DISCARDED, too far: '+ms)
5354

54-
inds = np.array(inds)
55+
if self.remove_duplicate == True:
56+
inds = np.unique(np.array(inds))
57+
msg.plain('*** Duplicate spectra are removed ***')
58+
else:
59+
inds = np.array(inds)
5560
return inds
5661

5762
class Area(PointPicker):

dnora/bnd/read.py

+1
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,7 @@ def _crop(ds):
108108
return ds.sel(time=slice(start_time, end_time))
109109
msg.info(f"Getting boundary spectra from cached netcdf (e.g. {self.files[0]}) from {start_time} to {end_time}")
110110
ds = xr.open_mfdataset(self.files, preprocess=_crop)
111+
111112
ds = ds.sel(x=inds)
112113
return ds.time.values, ds.freq.values, ds.dirs.values, ds.spec.values, ds.lon.values, ds.lat.values, ds.source
113114

dnora/bnd/read_ec.py

+56-28
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import cdsapi
1212
# Import aux_funcsiliry functions
1313
from .. import msg
14-
from ..aux_funcs import create_time_stamps, expand_area, int_list_of_days, int_list_of_months, int_list_of_years
14+
from ..aux_funcs import create_time_stamps, expand_area, day_list
1515

1616

1717
def renormalize_era5_spec(bnd_spec):
@@ -25,53 +25,73 @@ def reshape_bnd_spec(bnd_spec):
2525
pass
2626

2727

28-
def download_era5_from_cds(start_time, end_time, lon, lat, dlon, dlat, folder='dnora_wnd_temp') -> str:
29-
"""Downloads ERA5 10 m wind data from the Copernicus Climate Data Store for a
28+
def download_era5_from_cds(start_time, end_time, lon, lat, dlon, dlat, folder='dnora_bnd_temp') -> str:
29+
"""Downloads ERA5 spectral data from the Copernicus Climate Data Store for a
3030
given area and time period"""
3131
start_time = pd.Timestamp(start_time)
3232
end_time = pd.Timestamp(end_time)
3333
c = cdsapi.Client()
3434

35-
filename = f'{folder}/EC_ERA5.nc'
35+
days = day_list(start_time, end_time)
36+
#tt = pd.date_range(start=start_time,end=end_time)
3637

37-
years = [f'{y:4.0f}' for y in int_list_of_years(start_time, end_time)]
38-
months = [f'{m:02.0f}' for m in int_list_of_months(start_time, end_time)]
39-
days = [f'{d:02.0f}' for d in int_list_of_days(start_time, end_time)]
38+
filename = f'{folder}/EC_ERA5.nc'
4039

4140
# Create string for dates
42-
dates = []
43-
for y in years:
44-
for m in months:
45-
for d in days:
46-
dates.append(f'{y}-{m}-{d}')
47-
dates = '/'.join(dates)
41+
#dates = [days[0].strftime('%Y-%m-%d'), days[-1].strftime('%Y-%m-%d')]
42+
#dates = '/'.join(dates)
43+
dates = f'{str(start_time)[0:10]}/to/{str(end_time)[0:10]}'
4844

4945

5046
cds_command ={
5147
'class': 'ea',
5248
'date': dates,
5349
'direction': '1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24',
5450
'domain': 'g',
51+
'area': f'{lat[1]}/{lon[0]}/{lat[0]}/{lon[1]}', # north, west, south, east
52+
'grid': f'{dlat}/{dlon}',
5553
'expver': '1',
5654
'frequency': '1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30',
5755
'param': '251.140',
5856
'stream': 'wave',
59-
'time': '00:00:00/03:00:00/06:00:00/09:00:00/12:00:00/15:00:00/18:00:00/21:00:00',
60-
'area': f'{lat[1]+0.0001}/{lon[0]}/{lat[0]}/{lon[1]+0.0001}', # north, west, south, east
61-
'grid': f'{dlon}/{dlat}',
57+
'time': '00:00:00/01:00:00/02:00:00/03:00:00/04:00:00/05:00:00/06:00:00/07:00:00/08:00:00/09:00:00/10:00:00/11:00:00/12:00:00/13:00:00/14:00:00/15:00:00/16:00:00/17:00:00/18:00:00/19:00:00/20:00:00/21:00:00/22:00:00/23:00:00',
6258
'type': 'an',
6359
'format': 'netcdf',
64-
}
60+
}
61+
#print(cds_command)
62+
63+
# cds_command ={
64+
# 'class': 'ea',
65+
# 'date': dates,
66+
# 'direction': '/'.join([f'{n+1:01.0f}' for n in range(24)]),
67+
# 'domain': 'g',
68+
# 'expver': '1',
69+
# 'frequency': '/'.join([f'{n+1:01.0f}' for n in range(30)]),
70+
# 'param': '251.140',
71+
# 'stream': 'wave',
72+
# 'time': '00:00:00/03:00:00/06:00:00/09:00:00/12:00:00/15:00:00/18:00:00/21:00:00',
73+
# 'area': f'{lat[1]}/{lon[0]}/{lat[0]}/{lon[1]}', # north, west, south, east
74+
# 'grid': f'{dlon}/{dlat}',
75+
# 'type': 'an',
76+
# 'format': 'netcdf',
77+
# }
6578

6679
c.retrieve('reanalysis-era5-complete', cds_command, filename)
6780
return filename
6881
class ERA5(BoundaryReader):
82+
83+
def __init__(self):
84+
self.dlon = 0.5
85+
self.dlat = 0.5
86+
6987
def convention(self) -> str:
7088
return 'Ocean'
7189

7290
def get_coordinates(self, start_time) -> Tuple:
7391
"""Reads first time instance of first file to get longitudes and latitudes for the PointPicker"""
74-
point_list = self.get_restricted_area()._point_list()
92+
restricted_area = self.get_restricted_area()
93+
restricted_area.set_spacing(dlon=self.dlon, dlat=self.dlat)
94+
point_list = restricted_area._point_list()
7595
lon_all = point_list[:,0]
7696
lat_all = point_list[:,1]
7797

@@ -87,23 +107,31 @@ def __call__(self, start_time, end_time, inds) -> Tuple:
87107
os.mkdir(temp_folder)
88108
print("Creating folder %s..." % temp_folder)
89109

90-
msg.plain("Removing old files from temporary folder...")
91-
for f in glob.glob(f"{temp_folder}/EC_ERA5.nc"):
92-
os.remove(f)
110+
local_read = False
111+
112+
if not local_read:
113+
msg.plain("Removing old files from temporary folder...")
114+
for f in glob.glob(f"{temp_folder}/EC_ERA5.nc"):
115+
os.remove(f)
93116

94117
restricted_area = self.get_restricted_area()
118+
lon = np.floor(np.array(restricted_area.lon_edges())/self.dlon)*self.dlon
119+
lat = np.floor(np.array(restricted_area.lat_edges())/self.dlat)*self.dlat
95120

96-
nc_file = download_era5_from_cds(start_time, end_time,
97-
lon=restricted_area.lon_edges(),
98-
lat=restricted_area.lat_edges(),
99-
dlon=restricted_area.dlon(),
100-
dlat=restricted_area.dlat(),
101-
folder=temp_folder)
102121

103122

123+
if local_read:
124+
nc_file = f'{temp_folder}/EC_ERA5.nc'
125+
else:
126+
nc_file = download_era5_from_cds(start_time, end_time,
127+
lon=(lon[0]-self.dlon, lon[1]+self.dlon),
128+
lat=(lat[0]-self.dlat, lat[1]+self.dlat),
129+
dlon=self.dlon,
130+
dlat=self.dlat,
131+
folder=temp_folder)
104132

105133
bnd_spec = xr.open_dataset(nc_file)
106-
134+
bnd_spec = bnd_spec.sortby("time")
107135
bnd_spec = renormalize_era5_spec(bnd_spec)
108136

109137
lon, lat = np.meshgrid(bnd_spec.longitude.values, bnd_spec.latitude.values[::-1])

0 commit comments

Comments
 (0)