Skip to content

Commit f43f6a5

Browse files
authored
Merge pull request #145 from unity-sds/develop
release/3.6.1
2 parents e03f0e5 + 9b97d0f commit f43f6a5

File tree

5 files changed

+45
-11
lines changed

5 files changed

+45
-11
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file.
55
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
66
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
77

8+
## [3.6.1] - 2023-04-24
9+
### Changed
10+
- [#144](https://github.com/unity-sds/unity-data-services/pull/144) fix: downloaded stac to return local absolute path
11+
812
## [3.6.0] - 2023-04-24
913
### Added
1014
- [#142](https://github.com/unity-sds/unity-data-services/pull/142) feat: Support DAAC download files stac file, not just direct json text

cumulus_lambda_functions/stage_in_out/download_granules_daac.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -63,31 +63,37 @@ def __download_one_granule(self, assets: dict):
6363
headers = {
6464
'Authorization': f'Bearer {self.__edl_token}'
6565
}
66+
local_item = {}
6667
for k, v in assets.items():
68+
local_item[k] = v
6769
try:
6870
LOGGER.debug(f'downloading: {v["href"]}')
6971
r = requests.get(v['href'], headers=headers)
7072
if r.status_code >= 400:
7173
raise RuntimeError(f'wrong response status: {r.status_code}. details: {r.content}')
7274
# TODO. how to correctly check redirecting to login page
73-
with open(os.path.join(self._download_dir, os.path.basename(v["href"])), 'wb') as fd:
75+
local_file_path = os.path.join(self._download_dir, os.path.basename(v["href"]))
76+
with open(local_file_path, 'wb') as fd:
7477
fd.write(r.content)
78+
local_item[k]['href'] = local_file_path
7579
except Exception as e:
7680
LOGGER.exception(f'failed to download {v}')
77-
v['cause'] = str(e)
81+
local_item[k]['description'] = f'download failed. {str(e)}'
7882
error_log.append(v)
79-
return error_log
83+
return local_item, error_log
8084

8185
def download(self, **kwargs) -> list:
8286
self.__set_props_from_env()
8387
LOGGER.debug(f'creating download dir: {self._download_dir}')
8488
downloading_urls = self.__get_downloading_urls(self._granules_json)
8589
error_list = []
90+
local_items = []
8691
for each in downloading_urls:
8792
LOGGER.debug(f'working on {each}')
88-
current_error_list = self.__download_one_granule(each)
93+
local_item, current_error_list = self.__download_one_granule(each)
8994
error_list.extend(current_error_list)
95+
local_items.append({'assets': local_item})
9096
if len(error_list) > 0:
9197
with open(f'{self._download_dir}/error.log', 'w') as error_file:
9298
error_file.write(json.dumps(error_list, indent=4))
93-
return downloading_urls
99+
return local_items

cumulus_lambda_functions/stage_in_out/download_granules_s3.py

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -58,25 +58,30 @@ def __download_one_granule(self, assets: dict):
5858
:return:
5959
"""
6060
error_log = []
61+
local_item = {}
6162
for k, v in assets.items():
63+
local_item[k] = v
6264
try:
6365
LOGGER.debug(f'downloading: {v["href"]}')
64-
self.__s3.set_s3_url(v['href']).download(self._download_dir)
66+
local_file_path = self.__s3.set_s3_url(v['href']).download(self._download_dir)
67+
local_item[k]['href'] = local_file_path
6568
except Exception as e:
6669
LOGGER.exception(f'failed to download {v}')
67-
v['cause'] = str(e)
70+
local_item[k]['description'] = f'download failed. {str(e)}'
6871
error_log.append(v)
69-
return error_log
72+
return local_item, error_log
7073

7174
def download(self, **kwargs) -> list:
7275
self.__set_props_from_env()
7376
downloading_urls = self.__get_downloading_urls(self._granules_json)
7477
error_list = []
78+
local_items = []
7579
for each in downloading_urls:
7680
LOGGER.debug(f'working on {each}')
77-
current_error_list = self.__download_one_granule(each)
81+
local_item, current_error_list = self.__download_one_granule(each)
82+
local_items.append({'assets': local_item})
7883
error_list.extend(current_error_list)
7984
if len(error_list) > 0:
8085
with open(f'{self._download_dir}/error.log', 'w') as error_file:
8186
error_file.write(json.dumps(error_list, indent=4))
82-
return downloading_urls
87+
return local_items

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
setup(
2020
name="cumulus_lambda_functions",
21-
version="3.6.0",
21+
version="3.6.1",
2222
packages=find_packages(),
2323
install_requires=install_requires,
2424
tests_require=['mock', 'nose', 'sphinx', 'sphinx_rtd_theme', 'coverage', 'pystac', 'python-dotenv', 'jsonschema'],

tests/integration_tests/test_docker_entry.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -224,6 +224,10 @@ def test_02_download(self):
224224
download_result = choose_process()
225225
self.assertTrue(isinstance(download_result, list), f'download_result is not list: {download_result}')
226226
self.assertEqual(sum([len(k) for k in download_result]), len(glob(os.path.join(tmp_dir_name, '*'))), f'downloaded file does not match')
227+
self.assertTrue('assets' in download_result[0], f'no assets in download_result: {download_result}')
228+
for each_granule in zip(granule_json, download_result):
229+
remote_filename = os.path.basename(each_granule[0]['assets']['data']['href'])
230+
self.assertEqual(each_granule[1]['assets']['data']['href'], os.path.join(tmp_dir_name, remote_filename), f"mismatched: {each_granule[0]['assets']['data']['href']}")
227231
return
228232

229233
def test_02_download__daac(self):
@@ -245,6 +249,11 @@ def test_02_download__daac(self):
245249
error_file = os.path.join(tmp_dir_name, 'error.log')
246250
if FileUtils.file_exist(error_file):
247251
self.assertTrue(False, f'some downloads failed. error.log exists. {FileUtils.read_json(error_file)}')
252+
self.assertTrue('assets' in download_result[0], f'no assets in download_result: {download_result}')
253+
for each_granule in zip(granule_json, download_result):
254+
remote_filename = os.path.basename(each_granule[0]['assets']['data']['href'])
255+
self.assertEqual(each_granule[1]['assets']['data']['href'], os.path.join(tmp_dir_name, remote_filename),
256+
f"mismatched: {each_granule[0]['assets']['data']['href']}")
248257
return
249258

250259
def test_02_download__daac__from_file(self):
@@ -271,6 +280,11 @@ def test_02_download__daac__from_file(self):
271280
error_file = os.path.join(downloading_dir, 'error.log')
272281
if FileUtils.file_exist(error_file):
273282
self.assertTrue(False, f'some downloads failed. error.log exists. {FileUtils.read_json(error_file)}')
283+
self.assertTrue('assets' in download_result[0], f'no assets in download_result: {download_result}')
284+
for each_granule in zip(granule_json, download_result):
285+
remote_filename = os.path.basename(each_granule[0]['assets']['data']['href'])
286+
self.assertEqual(each_granule[1]['assets']['data']['href'], os.path.join(downloading_dir, remote_filename),
287+
f"mismatched: {each_granule[0]['assets']['data']['href']}")
274288
return
275289

276290
def test_02_download__daac_error(self):
@@ -314,6 +328,11 @@ def test_02_download__from_file(self):
314328
error_file = os.path.join(downloading_dir, 'error.log')
315329
if FileUtils.file_exist(error_file):
316330
self.assertTrue(False, f'some downloads failed. error.log exists. {FileUtils.read_json(error_file)}')
331+
self.assertTrue('assets' in download_result[0], f'no assets in download_result: {download_result}')
332+
for each_granule in zip(granule_json, download_result):
333+
remote_filename = os.path.basename(each_granule[0]['assets']['data']['href'])
334+
self.assertEqual(each_granule[1]['assets']['data']['href'], os.path.join(downloading_dir, remote_filename),
335+
f"mismatched: {each_granule[0]['assets']['data']['href']}")
317336
return
318337

319338
def test_03_upload(self):

0 commit comments

Comments
 (0)