Skip to content

Commit

Permalink
feat: include HLS-VI tooling
Browse files Browse the repository at this point in the history
  • Loading branch information
sharkinsspatial authored Feb 10, 2025
1 parent d3575c2 commit 87422a7
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 8 deletions.
7 changes: 3 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -112,17 +112,16 @@ RUN pip3 install --upgrade awscli
RUN pip3 install click==7.1.2
RUN pip3 install rio-cogeo==1.1.10 --no-binary rasterio --user
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]

RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]

RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]

RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]
RUN pip3 install wheel
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]
RUN pip3 install libxml2-python3
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]
RUN pip3 install git+https://github.com/NASA-IMPACT/[email protected]


COPY ./scripts/* ${PREFIX}/bin/
ENV OMP_NUM_THREADS=4
Expand Down
38 changes: 34 additions & 4 deletions scripts/sentinel.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash
# shellcheck disable=SC2153
# shellcheck disable=SC1091
# shellcheck disable=SC1091
# Exit on any error
set -o errexit

Expand All @@ -10,6 +10,7 @@ bucket="$OUTPUT_BUCKET"
# shellcheck disable=SC2034
inputbucket="$INPUT_BUCKET"
workingdir="/var/scratch/${jobid}"
vidir="${workingdir}/vi"
bucket_role_arn="$GCC_ROLE_ARN"
debug_bucket="$DEBUG_BUCKET"
replace_existing="$REPLACE_EXISTING"
Expand Down Expand Up @@ -46,6 +47,7 @@ set_output_names () {
hlsversion="v2.0"
day_of_year=$(get_doy "${year}" "${month}" "${day}")
outputname="HLS.S30.${granulecomponents[5]}.${year}${day_of_year}${hms}.${hlsversion}"
vi_outputname="HLS-VI.S30.${granulecomponents[5]}.${year}${day_of_year}${hms}.${hlsversion}"
output_hdf="${workingdir}/${outputname}.hdf"
nbar_name="HLS.S30.${granulecomponents[5]}.${year}${day_of_year}.${hms}.${hlsversion}"
nbar_input="${workingdir}/${nbar_name}.hdf"
Expand All @@ -54,6 +56,7 @@ set_output_names () {
output_metadata="${workingdir}/${outputname}.cmr.xml"
output_stac_metadata="${workingdir}/${outputname}_stac.json"
bucket_key="s3://${bucket}/S30/data/${year}${day_of_year}/${outputname}${twinkey}"
vi_bucket_key="s3://${bucket}/S30_VI/data/${year}${day_of_year}/${vi_outputname}${twinkey}"
gibs_dir="${workingdir}/gibs"
gibs_bucket_key="s3://${gibs_bucket}/S30/data/${year}${day_of_year}"
# We also need to obtain the sensor for the Bandpass parameters file
Expand Down Expand Up @@ -171,7 +174,7 @@ create_thumbnail -i "$workingdir" -o "$output_thumbnail" -s S30

# Create metadata
echo "Creating metadata"
create_metadata "$output_hdf" --save "$output_metadata"
create_metadata "$output_hdf" --save "$output_metadata"

# Create STAC metadata
cmr_to_stac_item "$output_metadata" "$output_stac_metadata" \
Expand Down Expand Up @@ -202,7 +205,7 @@ if [ -z "$debug_bucket" ]; then
# Copy manifest to S3 to signal completion.
aws s3 cp "$manifest" "${bucket_key}/${manifest_name}" --profile gccprofile
else
# Create
# Create
# Convert intermediate hdf to COGs
hdf_to_cog "$resample30m" --output-dir "$workingdir" --product S30 --debug-mode
hdf_to_cog "$nbarIntermediate" --output-dir "$workingdir" --product S30 --debug-mode
Expand All @@ -227,7 +230,7 @@ for gibs_id_dir in "$gibs_dir"/* ; do
subtile_basename=$(basename "$xml" .xml)
subtile_manifest_name="${subtile_basename}.json"
subtile_manifest="${gibs_id_dir}/${subtile_manifest_name}"
gibs_id_bucket_key="$gibs_bucket_key/${gibsid}"
gibs_id_bucket_key="$gibs_bucket_key/${gibsid}"
echo "Gibs id bucket key is ${gibs_id_bucket_key}"

create_manifest "$gibs_id_dir" "$subtile_manifest" \
Expand All @@ -253,3 +256,30 @@ for gibs_id_dir in "$gibs_dir"/* ; do
fi
done
echo "All GIBS tiles created"

# Generate VI files
echo "Generating VI files"
vi_generate_indices -i "$workingdir" -o "$vidir" -s "$outputname"
vi_generate_metadata -i "$workingdir" -o "$vidir"
vi_generate_stac_items --cmr_xml "$vidir/${vi_outputname}.cmr.xml" --endpoint data.lpdaac.earthdatacloud.nasa.gov --version 020 --out_json "$vidir/${vi_outputname}_stac.json"

echo "Generating VI manifest"
vi_manifest_name="${vi_outputname}.json"
vi_manifest="${vidir}/${vi_manifest_name}"
create_manifest "$vidir" "$vi_manifest" "$vi_bucket_key" "HLSS30_VI" \
"$vi_outputname" "$jobid" false

if [ -z "$debug_bucket" ]; then
aws s3 cp "$vidir" "$vi_bucket_key" --exclude "*" --include "*.tif" \
--include "*.xml" --include "*.jpg" --include "*_stac.json" \
--profile gccprofile --recursive

# Copy vi manifest to S3 to signal completion.
aws s3 cp "$vi_manifest" "${vi_bucket_key}/${vi_manifest_name}" --profile gccprofile
else
# Copy all vi files to debug bucket.
echo "Copy files to debug bucket"
debug_bucket_key=s3://${debug_bucket}/${outputname}
aws s3 cp "$vidir" "$debug_bucket_key" --recursive --acl public-read
fi

0 comments on commit 87422a7

Please sign in to comment.