Skip to content

Commit

Permalink
Merge pull request #101 from unity-sds/develop
Browse files Browse the repository at this point in the history
release/1.9.2
  • Loading branch information
ngachung authored Nov 16, 2022
2 parents 407d1ab + 10ef223 commit a672dab
Show file tree
Hide file tree
Showing 14 changed files with 129 additions and 37 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,15 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [1.9.2] - 2022-11-16
### Fixed
- [#100](https://github.com/unity-sds/unity-data-services/pull/100) status=completed is only for granules, not for collections
## [1.9.1] - 2022-11-15
### Added
- [#94](https://github.com/unity-sds/unity-data-services/issues/94) Added DAPA lambdas function name to parameter store for UCS API Gateway integration
### Fixed
- [#98](https://github.com/unity-sds/unity-data-services/issues/98) accept provider from ENV or optionally from user call

## [1.8.1] - 2022-09-27
### Added
- [#79](https://github.com/unity-sds/unity-data-services/pull/79) Collection Creation endpoint with DAPA format
Expand Down
16 changes: 15 additions & 1 deletion ci.cd/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ build_lambda:
docker run --rm -v `PWD`:"/usr/src/app/cumulus_lambda_functions":z -w "/usr/src/app/cumulus_lambda_functions" cae-artifactory.jpl.nasa.gov:17001/python:3.9 ci.cd/create_s3_zip.sh

build_lambda_public:
docker run --rm -v `PWD`:"/usr/src/app/cumulus_lambda_functions":z -w "/usr/src/app/cumulus_lambda_functions" python:3.7 ci.cd/create_s3_zip.sh
docker run --rm -v `PWD`:"/usr/src/app/cumulus_lambda_functions":z -w "/usr/src/app/cumulus_lambda_functions" python:3.9 ci.cd/create_s3_zip.sh

upload_lambda:
aws --profile saml-pub s3 cp cumulus_lambda_functions_deployment.zip s3://am-uds-dev-cumulus-tf-state/unity_cumulus_lambda/
Expand All @@ -29,3 +29,17 @@ update_lambda_function_4:
aws --profile saml-pub lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket am-uds-dev-cumulus-tf-state --function-name arn:aws:lambda:us-west-2:884500545225:function:am-uds-dev-cumulus-cumulus_collections_creation_dapa --publish &>/dev/null
update_lambda_function_5:
aws --profile saml-pub lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket am-uds-dev-cumulus-tf-state --function-name arn:aws:lambda:us-west-2:884500545225:function:am-uds-dev-cumulus-cumulus_collections_creation_dapa_facade --publish &>/dev/null


upload_lambda_mcp_dev:
aws s3 cp cumulus_lambda_functions_deployment.zip s3://uds-dev-cumulus-public/unity_cumulus_lambda/
update_lambda_function_mcp_dev_1:
aws lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket uds-dev-cumulus-public --function-name arn:aws:lambda:us-west-2:237868187491:function:uds-dev-cumulus-cumulus_collections_dapa --publish &>/dev/null
update_lambda_function_mcp_dev_2:
aws lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket uds-dev-cumulus-public --function-name arn:aws:lambda:us-west-2:237868187491:function:uds-dev-cumulus-cumulus_granules_dapa --publish &>/dev/null
update_lambda_function_mcp_dev_4:
aws lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket uds-dev-cumulus-public --function-name arn:aws:lambda:us-west-2:237868187491:function:uds-dev-cumulus-cumulus_collections_creation_dapa --publish &>/dev/null
update_lambda_function_mcp_dev_5:
aws lambda update-function-code --s3-key unity_cumulus_lambda/cumulus_lambda_functions_deployment.zip --s3-bucket uds-dev-cumulus-public --function-name arn:aws:lambda:us-west-2:237868187491:function:uds-dev-cumulus-cumulus_collections_creation_dapa_facade --publish &>/dev/null


Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,14 @@ def __init__(self, event):
self.__cumulus_lambda_prefix = os.getenv('CUMULUS_LAMBDA_PREFIX')
self.__ingest_sqs_url = os.getenv('CUMULUS_WORKFLOW_SQS_URL')
self.__workflow_name = os.getenv('CUMULUS_WORKFLOW_NAME', 'CatalogGranule')
self.__provider_id = '' # TODO. need this?
self.__provider_id = os.getenv('UNITY_DEFAULT_PROVIDER', '')
self.__collection_creation_lambda_name = os.environ.get('COLLECTION_CREATION_LAMBDA_NAME', '').strip()

def execute_creation(self):
try:
cumulus_collection_doc = CollectionTransformer().from_stac(self.__request_body)
collection_transformer = CollectionTransformer()
cumulus_collection_doc = collection_transformer.from_stac(self.__request_body)
self.__provider_id = self.__provider_id if collection_transformer.output_provider is None else collection_transformer.output_provider
creation_result = self.__cumulus_collection_query.create_collection(cumulus_collection_doc, self.__cumulus_lambda_prefix)
if 'status' not in creation_result:
LOGGER.error(f'status not in creation_result: {creation_result}')
Expand All @@ -37,6 +39,7 @@ def execute_creation(self):
'message': creation_result
})
}
LOGGER.debug(f'__provider_id: {self.__provider_id}')
rule_creation_result = self.__cumulus_collection_query.create_sqs_rules(
cumulus_collection_doc,
self.__cumulus_lambda_prefix,
Expand Down
12 changes: 12 additions & 0 deletions cumulus_lambda_functions/cumulus_stac/collection_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from urllib.parse import quote_plus, urlparse, unquote_plus

import pystac
from cumulus_lambda_functions.lib.lambda_logger_generator import LambdaLoggerGenerator
from pystac import Link

from cumulus_lambda_functions.cumulus_stac.stac_transformer_abstract import StacTransformerAbstract
Expand Down Expand Up @@ -281,6 +282,7 @@
# "items": {
# "description": "For each field only the original data type of the property can occur (except for arrays), but we can't validate that in JSON Schema yet. See the sumamry description in the STAC specification for details."
# }
LOGGER = LambdaLoggerGenerator.get_logger(__name__, LambdaLoggerGenerator.get_level_from_env())


class CollectionTransformer(StacTransformerAbstract):
Expand All @@ -290,6 +292,13 @@ def __init__(self, report_to_ems:bool = True, include_date_range=False):
self.__report_to_ems = report_to_ems
self.__include_date_range = include_date_range

self.__output_provider = None
self.__output_cumulus_collection = None

@property
def output_provider(self):
return self.__output_provider

def generate_target_link_url(self, regex: str = None, bucket: str = None):
href_link = ['unknown_bucket', 'unknown_regex']
if regex is not None and regex != '':
Expand Down Expand Up @@ -502,4 +511,7 @@ def from_stac(self, source: dict) -> dict:
output_collection_cumulus['dateFrom'] = date_interval[0].strftime(TimeUtils.MMDD_FORMAT)
if date_interval[1] is not None:
output_collection_cumulus['dateTo'] = date_interval[1].strftime(TimeUtils.MMDD_FORMAT)
LOGGER.debug(f'input_dapa_collection.providers: {input_dapa_collection.providers}')
self.__output_provider = None if input_dapa_collection.providers is None or len(input_dapa_collection.providers) < 1 else input_dapa_collection.providers[0].name
self.__output_cumulus_collection = output_collection_cumulus
return output_collection_cumulus
11 changes: 8 additions & 3 deletions cumulus_lambda_functions/cumulus_stac/item_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,11 @@ def __get_assets(self, input_dict):
}
return asset_dict

def __get_datetime_from_source(self, source: dict, datetime_key: str):
if datetime_key not in source:
return '1970-01-01T00:00:00Z'
return f"{source[datetime_key]}{'' if source[datetime_key].endswith('Z') else 'Z'}"

def to_stac(self, source: dict) -> dict:
"""
Sample: Cumulus granule
Expand Down Expand Up @@ -409,10 +414,10 @@ def to_stac(self, source: dict) -> dict:
},
"properties": {
"datetime": f"{TimeUtils.decode_datetime(source['createdAt'], False)}Z",
"start_datetime": f"{source['beginningDateTime']}{'' if source['beginningDateTime'].endswith('Z') else 'Z'}",
"end_datetime": f"{source['endingDateTime']}{'' if source['endingDateTime'].endswith('Z') else 'Z'}",
"start_datetime": self.__get_datetime_from_source(source, 'beginningDateTime'),
"end_datetime": self.__get_datetime_from_source(source, 'endingDateTime'),
"created": self.__get_datetime_from_source(source, 'productionDateTime'),
# "created": source['processingEndDateTime'], # TODO
"created": source['productionDateTime'], # TODO
},
"collection": source['collectionId'],
"links": [
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from datetime import datetime

from pystac import Link, Collection, Extent, SpatialExtent, TemporalExtent, Summaries
from pystac import Link, Collection, Extent, SpatialExtent, TemporalExtent, Summaries, Provider

from cumulus_lambda_functions.cumulus_stac.collection_transformer import CollectionTransformer
from cumulus_lambda_functions.lib.lambda_logger_generator import LambdaLoggerGenerator
Expand All @@ -18,6 +18,11 @@ def __init__(self):
self.__sample_filename = ''
self.__files = []
self.__collection_transformer = CollectionTransformer()
self.__provider_name = ''

def with_provider(self, provider_name: str):
self.__provider_name = provider_name
return self

def with_title(self, title: str):
self.__collection_title = title
Expand Down Expand Up @@ -56,6 +61,7 @@ def start(self):
extent=Extent(SpatialExtent([[0, 0, 0, 0]]),
TemporalExtent([[datetime.utcnow(), datetime.utcnow()]])),
title=self.__collection_title,
providers=[Provider(self.__provider_name)],
summaries=Summaries({
'granuleId': [self.__granule_id_regex],
'granuleIdExtraction': [self.__granule_id_extraction_regex],
Expand Down
51 changes: 26 additions & 25 deletions cumulus_lambda_functions/cumulus_wrapper/query_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,39 +171,40 @@ def create_sqs_rules(self, new_collection: dict, private_api_prefix: str, sqs_ur
}'
:return:
"""
rule_body = {
'workflow': workflow_name,
'collection': {
'name': new_collection['name'],
'version': new_collection['version'],
},
# 'provider': provider_name,
'name': f'{new_collection["name"]}___{new_collection["version"]}___rules_sqs',
'rule': {
# 'type': 'onetime',
'type': 'sqs',
'value': sqs_url,
},
'state': 'ENABLED',
"meta": {
'retries': 1,
'visibilityTimeout': visibility_timeout,
# "provider_path": "data/SNPP_ATMS_Level0_T/ATMS_SCIENCE_Group/2016/002/",
# "publish": False,
# "distribution_endpoint": "s3://am-uds-dev-cumulus-internal/"
},

}
if provider_name is not None and provider_name != '':
rule_body['provider'] = provider_name
payload = {
'httpMethod': 'POST',
'resource': '/{proxy+}',
'path': f'/{self.__rules_key}',
'headers': {
'Content-Type': 'application/json',
},
'body': json.dumps({
'workflow': workflow_name,
'collection': {
'name': new_collection['name'],
'version': new_collection['version'],
},
# 'provider': provider_name,
'name': f'{new_collection["name"]}___{new_collection["version"]}___rules_sqs',
'rule': {
# 'type': 'onetime',
'type': 'sqs',
'value': sqs_url,
},
'state': 'ENABLED',
"meta": {
'retries': 1,
'visibilityTimeout': visibility_timeout,
# "provider_path": "data/SNPP_ATMS_Level0_T/ATMS_SCIENCE_Group/2016/002/",
# "publish": False,
# "distribution_endpoint": "s3://am-uds-dev-cumulus-internal/"
},

})
'body': json.dumps(rule_body)
}
if provider_name is not None and provider_name != '':
payload['provider'] = provider_name
LOGGER.debug(f'payload: {payload}')
try:
query_result = self._invoke_api(payload, private_api_prefix)
Expand Down
2 changes: 2 additions & 0 deletions cumulus_lambda_functions/cumulus_wrapper/query_granules.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ class GranulesQuery(CumulusBase):

def __init__(self, cumulus_base: str, cumulus_token: str):
super().__init__(cumulus_base, cumulus_token)
self._conditions.append('status=completed')


def with_collection_id(self, collection_id: str):
self._conditions.append(f'{self.__collection_id_key}={collection_id}')
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

setup(
name="cumulus_lambda_functions",
version="1.8.1",
version="1.9.2",
packages=find_packages(),
install_requires=install_requires,
tests_require=['mock', 'nose', 'sphinx', 'sphinx_rtd_theme', 'coverage', 'pystac', 'python-dotenv', 'jsonschema'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def test_01(self):
.with_granule_id_extraction_regex("(P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}0).+") \
.with_title("P1570515ATMSSCIENCEAXT11344000000001.PDS") \
.with_process('modis') \
.with_provider('Test123')\
.add_file_type("P1570515ATMSSCIENCEAXT11344000000000.PDS.cmr.xml",
"^P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}00.PDS.cmr.xml$", 'internal', 'metadata', 'item') \
.add_file_type("P1570515ATMSSCIENCEAXT11344000000001.PDS.xml",
Expand All @@ -42,11 +43,13 @@ def test_02(self):
os.environ[Constants.PASSWORD] = '/unity/uds/user/wphyo/dwssap'
os.environ[Constants.PASSWORD_TYPE] = Constants.PARAM_STORE
os.environ[Constants.CLIENT_ID] = '7a1fglm2d54eoggj13lccivp25' # JPL Cloud
os.environ[Constants.CLIENT_ID] = '71g0c73jl77gsqhtlfg2ht388c' # MCP Dev

os.environ[Constants.COGNITO_URL] = 'https://cognito-idp.us-west-2.amazonaws.com'
bearer_token = CognitoTokenRetriever().start()
post_url = 'https://k3a3qmarxh.execute-api.us-west-2.amazonaws.com/dev'
post_url = 'https://k3a3qmarxh.execute-api.us-west-2.amazonaws.com/dev/am-uds-dapa/collections/'
post_url = 'https://k3a3qmarxh.execute-api.us-west-2.amazonaws.com/dev/am-uds-dapa/collections/' # JPL Cloud
post_url = 'https://1gp9st60gd.execute-api.us-west-2.amazonaws.com/dev/am-uds-dapa/collections/' # MCP Dev
headers = {
'Authorization': f'Bearer {bearer_token}',
# 'Content-Type': 'application/json',
Expand All @@ -58,6 +61,7 @@ def test_02(self):
.with_granule_id_extraction_regex("(P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}0).+") \
.with_title("P1570515ATMSSCIENCEAXT11344000000001.PDS") \
.with_process('modis') \
.with_provider('unity')\
.add_file_type("P1570515ATMSSCIENCEAXT11344000000000.PDS.cmr.xml",
"^P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}00.PDS.cmr.xml$", 'internal', 'metadata', 'item') \
.add_file_type("P1570515ATMSSCIENCEAXT11344000000001.PDS.xml",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def test_01(self):
"description": "Sentinel-2 is a wide-swath, high-resolution, multi-spectral\nimaging mission supporting Copernicus Land Monitoring studies,\nincluding the monitoring of vegetation, soil and water cover,\nas well as observation of inland waterways and coastal areas.\n\nThe Sentinel-2 data contain 13 UINT16 spectral bands representing\nTOA reflectance scaled by 10000. See the [Sentinel-2 User Handbook](https://sentinel.esa.int/documents/247904/685211/Sentinel-2_User_Handbook)\nfor details. In addition, three QA bands are present where one\n(QA60) is a bitmask band with cloud mask information. For more\ndetails, [see the full explanation of how cloud masks are computed.](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-2-msi/level-1c/cloud-masks)\n\nEach Sentinel-2 product (zip archive) may contain multiple\ngranules. Each granule becomes a separate Earth Engine asset.\nEE asset ids for Sentinel-2 assets have the following format:\nCOPERNICUS/S2/20151128T002653_20151128T102149_T56MNN. Here the\nfirst numeric part represents the sensing date and time, the\nsecond numeric part represents the product generation date and\ntime, and the final 6-character string is a unique granule identifier\nindicating its UTM grid reference (see [MGRS](https://en.wikipedia.org/wiki/Military_Grid_Reference_System)).\n\nFor more details on Sentinel-2 radiometric resoltuon, [see this page](https://earth.esa.int/web/sentinel/user-guides/sentinel-2-msi/resolutions/radiometric).\n",
"license": "proprietary",
# "keywords": [],
"providers": [],
"providers": [{"name": "Test123"}],
"extent": {
"spatial": {
"bbox": [[0, 0, 0, 0]]
Expand All @@ -84,12 +84,15 @@ def test_01(self):
}
converted_stac = CollectionTransformer(include_date_range=True).to_stac(source)
self.assertEqual(None, stac_validator.validate(converted_stac), f'invalid stac format: {stac_validator}')
converted_cumulus = CollectionTransformer(include_date_range=True).from_stac(converted_stac)
converted_stac['providers'].append({'name': 'Test123'})
transformer = CollectionTransformer(include_date_range=True)
converted_cumulus = transformer.from_stac(converted_stac)
for k, v in source.items():
if k in ['updatedAt', 'timestamp', 'createdAt']:
continue
self.assertTrue(k in converted_cumulus, f'missing {k}')
if k not in ['files', 'dateFrom', 'dateTo']:
self.assertEqual(v, converted_cumulus[k], f'wrong value for {k}')
self.assertEqual(sorted(json.dumps(source['files'])), sorted(json.dumps(converted_cumulus['files'])), f"wrong files content: {source['files']} vs. {converted_cumulus['files']}")
self.assertEqual(transformer.output_provider, 'Test123', f'wrong provider')
return
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def test_01(self):
.with_graule_id_regex("^P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}0$")\
.with_granule_id_extraction_regex("(P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}0).+")\
.with_title("P1570515ATMSSCIENCEAXT11344000000001.PDS")\
.with_provider('test123')\
.with_process('modis')\
.add_file_type("P1570515ATMSSCIENCEAXT11344000000000.PDS.cmr.xml", "^P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}00.PDS.cmr.xml$", 'internal', 'metadata', 'item') \
.add_file_type("P1570515ATMSSCIENCEAXT11344000000001.PDS.xml", "^P[0-9]{3}[0-9]{4}[A-Z]{13}T[0-9]{12}01\\.PDS\\.xml$", 'internal', 'metadata', 'item') \
Expand Down
Loading

0 comments on commit a672dab

Please sign in to comment.