Skip to content
This repository has been archived by the owner on Sep 26, 2023. It is now read-only.

Commit

Permalink
Propagate through event parameters to file objects so date arguments …
Browse files Browse the repository at this point in the history
…are used
  • Loading branch information
moradology committed Nov 14, 2022
1 parent 55cfa7f commit f02e625
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
"collection": "nex-gddp-cmip6-crossover",
"prefix": "crossing/",
"bucket": "nex-gddp-cmip6-cog",
"filename_regex": "tas_CrossingYear_CMIP6_ssp245.tif",
"filename_regex": "^.*245.tif",
"discovery": "s3",
"single_datetime": "2015-01-01T00:00:00Z",
"properties": {
"ssp": "245"
}
Expand All @@ -13,8 +14,9 @@
"collection": "nex-gddp-cmip6-crossover",
"prefix": "crossing/",
"bucket": "nex-gddp-cmip6-cog",
"filename_regex": "tas_CrossingYear_CMIP6_ssp585.tif",
"filename_regex": "^.*585.tif",
"discovery": "s3",
"single_datetime": "2015-01-01T00:00:00Z",
"properties": {
"ssp": "585"
}
Expand Down
13 changes: 13 additions & 0 deletions lambdas/s3-discovery/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,18 @@ def handler(event, context):

file_objs_size = 0
payload = {**event, "cogify": cogify, "objects": []}

# Propagate forward optional datetime arguments
date_fields = {}
if "single_datetime" in event:
date_fields["single_datetime"] = event["single_datetime"]
if "start_datetime" in event:
date_fields["start_datetime"] = event["start_datetime"]
if "end_datetime" in event:
date_fields["end_datetime"] = event["end_datetime"]
if "datetime_range" in event:
date_fields["datetime_range"] = event["datetime_range"]

for page in pages:
if "Contents" not in page:
raise Exception(f"No files found at s3://{bucket}/{prefix}")
Expand All @@ -57,6 +69,7 @@ def handler(event, context):
"s3_filename": f"s3://{bucket}/{filename}",
"upload": event.get("upload", False),
"properties": properties,
**date_fields
}
payload["objects"].append(file_obj)
file_obj_size = len(json.dumps(file_obj, ensure_ascii=False).encode("utf8"))
Expand Down

0 comments on commit f02e625

Please sign in to comment.