From 97b68d06301195a35975f288994ada576284768e Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 16 Jun 2020 16:44:10 +0200 Subject: [PATCH 01/78] function for copying strings --- Makefile | 5 +++- src/cross-level/string-cl.c | 52 +++++++++++++++++++++++++++++++++++++ src/cross-level/string-cl.h | 47 +++++++++++++++++++++++++++++++++ 3 files changed, 103 insertions(+), 1 deletion(-) create mode 100755 src/cross-level/string-cl.c create mode 100755 src/cross-level/string-cl.h diff --git a/Makefile b/Makefile index 8b583d14..8494ad7d 100755 --- a/Makefile +++ b/Makefile @@ -71,7 +71,7 @@ TA=temp-aux ### TARGETS all: temp cross lower higher aux exe -cross: enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl datesys_cl lock_cl cube_cl dir_cl stats_cl pca_cl tile_cl queue_cl warp_cl sun_cl quality_cl sys_cl konami_cl download_cl read_cl +cross: string_cl enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl datesys_cl lock_cl cube_cl dir_cl stats_cl pca_cl tile_cl queue_cl warp_cl sun_cl quality_cl sys_cl konami_cl download_cl read_cl lower: table_ll param_ll meta_ll cube_ll equi7_ll glance7_ll atc_ll sunview_ll read_ll radtran_ll topo_ll cloud_ll gas_ll brdf_ll atmo_ll aod_ll resmerge_ll coreg_ll coregfuns_ll acix_ll modwvp_ll higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl aux: param_aux param_train_aux train_aux @@ -87,6 +87,9 @@ temp: ### CROSS LEVEL COMPILE UNITS +string_cl: temp $(DC)/string-cl.c + $(GCC) $(CFLAGS) -c $(DC)/string-cl.c -o $(TC)/string_cl.o + enum_cl: temp $(DC)/enum-cl.c $(GCC) $(CFLAGS) -c $(DC)/enum-cl.c -o $(TC)/enum_cl.o diff --git a/src/cross-level/string-cl.c b/src/cross-level/string-cl.c new file mode 100755 index 00000000..b35bbb79 --- /dev/null +++ b/src/cross-level/string-cl.c @@ -0,0 +1,52 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +This file contains functions for string handling ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + + +#include "string-cl.h" + + +/** Copy string ++++ This function copies a source string into a destination buffer. ++++ strncpy copies as many characters from src to dst as there is space ++++ in src. The string is padded with zeros. This way, buffer overflow ++++ won't happen. If dst is longer than src, dst will be truncated. The ++++ truncation will be detected and the program will interrupt. +--- dst: destination buffer +--- size: size of destination buffer +--- src: source string ++++ Return: void ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ +void copy_string(char *dst, size_t size, const char *src){ + + strncpy(dst, src, size); + if (dst[size-1] != '\0'){ + printf("cannot copy, string too long.\n"); + exit(1); + } + + return; +} + diff --git a/src/cross-level/string-cl.h b/src/cross-level/string-cl.h new file mode 100755 index 00000000..5a1660b4 --- /dev/null +++ b/src/cross-level/string-cl.h @@ -0,0 +1,47 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +String handling header ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + + +#ifndef STRING_CL_H +#define STRING_CL_H + +#include // core input and output functions +#include // standard general utilities library +#include // string handling functions + + +#ifdef __cplusplus +extern "C" { +#endif + +void copy_string(char *dst, size_t size, const char *src); + +#ifdef __cplusplus +} +#endif + +#endif + From 83e7dd54397372a6d667d8911ff62c9013c2759a Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 17 Jun 2020 13:22:43 +0200 Subject: [PATCH 02/78] removed strncpy --- src/cross-level/param-cl.c | 348 +++++++++++++---------------------- src/cross-level/param-cl.h | 1 + src/cross-level/sys-cl.c | 112 +++++------- src/cross-level/sys-cl.h | 1 + src/lower-level/meta-ll.c | 366 ++++++++++++++++++------------------- src/lower-level/meta-ll.h | 1 + 6 files changed, 351 insertions(+), 478 deletions(-) diff --git a/src/cross-level/param-cl.c b/src/cross-level/param-cl.c index cd1333de..0dcb4cea 100755 --- a/src/cross-level/param-cl.c +++ b/src/cross-level/param-cl.c @@ -1,6 +1,6 @@ /**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -This file is part of FORCE - Framework for Operational Radiometric +This file is part of FORCE - Framework for Operational Radiometric Correction for Environmental monitoring. Copyright (C) 2013-2020 David Frantz @@ -29,7 +29,7 @@ This file contains functions for parsing parameter files /** Number of values -+++ This function takes a parameter in tag and multi-value notation, and ++++ This function takes a parameter in tag and multi-value notation, and +++ returns the number of values. --- buf: buffer that holds tag / value line as read from parameter file +++ Return: number of values @@ -41,14 +41,12 @@ const char *separator = " ="; int n = -1; // start at -1 to ignore tag - if (strlen(buf) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(buffer, buf, strlen(buf)); buffer[strlen(buf)] = '\0';} + copy_string(buffer, NPOW_10, buf); buffer[strcspn(buffer, "\r\n#")] = 0; ptr = strtok(buffer, separator); - + while (ptr != NULL){ ptr = strtok(NULL, separator); n++; @@ -75,16 +73,7 @@ const char *separator = " =\n"; ptr = strtok(str, separator); while ((ptr = strtok(NULL, separator)) != NULL){ - - if (strlen(ptr) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(param[num], ptr, strlen(ptr)); - param[num][strlen(ptr)] = '\0'; - num++; - } - - } + copy_string(param[num++], NPOW_10, ptr);} *n = num; return true; @@ -103,7 +92,7 @@ int y, m, d; date_t date; - strncpy(cy, str, 4); cy[4] = '\0'; y= atoi(cy); + strncpy(cy, str, 4); cy[4] = '\0'; y = atoi(cy); strncpy(cm, str+5, 2); cm[2] = '\0'; m = atoi(cm); strncpy(cd, str+8, 2); cd[2] = '\0'; d = atoi(cd); @@ -129,7 +118,7 @@ int e; for (e=0; en < params->nmax) return; - + re_alloc((void**)¶ms->par, params->nmax, params->nmax*2, sizeof(par_t)); - + params->nmax *= 2; return; @@ -189,7 +178,7 @@ int i; } free((void*)params); params = NULL; - + } return; @@ -239,8 +228,8 @@ void allocate_par(par_t *par){ +++ Return: void +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ void free_par(par_t *par){ - - + + switch (par->type){ case _PAR_INT_: if (par->length != NULL) free((void*)par->int_vec_[0]); @@ -278,7 +267,7 @@ void free_par(par_t *par){ } -/** This function pre-screens a parameter file, and counts how often a +/** This function pre-screens a parameter file, and counts how often a --- given tag was specified --- fpar: parameter filepath --- tag: parameter tag @@ -298,7 +287,7 @@ int n = 0; if (strcmp(ptr, tag) == 0) n++; } fseek(fpar, 0, SEEK_SET); - + if (n == 0){ printf("pre-screening parfile failed. No tag %s was detected. ", tag); return FAILURE; @@ -322,18 +311,13 @@ void register_int_par(params_t *params, const char *name, int min, int max, int reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].int_range[_MIN_] = min; params->par[params->n].int_range[_MAX_] = max; - + params->par[params->n].type = _PAR_INT_; params->par[params->n].length = NULL; @@ -358,15 +342,10 @@ void register_enum_par(params_t *params, const char *name, const tagged_enum_t * reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].n_enums = n_enums; alloc((void**)¶ms->par[params->n].enums, n_enums, sizeof(tagged_enum_t)); memmove(params->par[params->n].enums, enums, sizeof(tagged_enum_t)*n_enums); @@ -395,18 +374,13 @@ void register_float_par(params_t *params, const char *name, float min, float max reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].float_range[_MIN_] = min; params->par[params->n].float_range[_MAX_] = max; - + params->par[params->n].type = _PAR_FLOAT_; params->par[params->n].length = NULL; @@ -431,12 +405,7 @@ void register_double_par(params_t *params, const char *name, double min, double reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; @@ -465,23 +434,16 @@ void register_bool_par(params_t *params, const char *name, int *ptr){ reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].n_enums = 2; alloc((void**)¶ms->par[params->n].enums, 2, sizeof(tagged_enum_t)); - - strncpy(params->par[params->n].enums[0].tag, "FALSE", 5); - params->par[params->n].enums[0].tag[5] = '\0'; - strncpy(params->par[params->n].enums[1].tag, "TRUE", 4); - params->par[params->n].enums[1].tag[4] = '\0'; - + + copy_string(params->par[params->n].enums[0].tag, NPOW_04, "FALSE"); + copy_string(params->par[params->n].enums[1].tag, NPOW_04, "TRUE"); + params->par[params->n].enums[0].en = false; params->par[params->n].enums[1].en = true; @@ -511,23 +473,13 @@ char cmax[NPOW_10]; reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - if (strlen(min) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(cmin, min, strlen(min)); cmin[strlen(min)] = '\0';} + copy_string(cmin, NPOW_10, min); + copy_string(cmax, NPOW_10, max); - if (strlen(max) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(cmax, max, strlen(max)); cmax[strlen(max)] = '\0';} - params->par[params->n].date_range[_MIN_] = parse_date(cmin); params->par[params->n].date_range[_MAX_] = parse_date(cmax); @@ -554,17 +506,12 @@ void register_char_par(params_t *params, const char *name, int char_test, char * reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; params->par[params->n].char_test = char_test; - + params->par[params->n].type = _PAR_CHAR_; params->par[params->n].length = NULL; @@ -591,25 +538,20 @@ void register_intvec_par(params_t *params, const char *name, int min, int max, i reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].int_range[_MIN_] = min; params->par[params->n].int_range[_MAX_] = max; - + params->par[params->n].type = _PAR_INT_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].int_vec_ = ptr; - + params->n++; return; @@ -630,26 +572,21 @@ void register_enumvec_par(params_t *params, const char *name, const tagged_enum_ reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].n_enums = n_enums; alloc((void**)¶ms->par[params->n].enums, n_enums, sizeof(tagged_enum_t)); memmove(params->par[params->n].enums, enums, sizeof(tagged_enum_t)*n_enums); - + params->par[params->n].type = _PAR_ENUM_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].int_vec_ = ptr; - + params->n++; return; @@ -670,25 +607,20 @@ void register_floatvec_par(params_t *params, const char *name, float min, float reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].float_range[_MIN_] = min; params->par[params->n].float_range[_MAX_] = max; - + params->par[params->n].type = _PAR_FLOAT_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].float_vec_ = ptr; - + params->n++; return; @@ -709,25 +641,20 @@ void register_doublevec_par(params_t *params, const char *name, double min, doub reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].double_range[_MIN_] = min; params->par[params->n].double_range[_MAX_] = max; - + params->par[params->n].type = _PAR_DOUBLE_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].double_vec_ = ptr; - + params->n++; return; @@ -746,34 +673,26 @@ void register_boolvec_par(params_t *params, const char *name, int **ptr, int *pt reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].n_enums = 2; alloc((void**)¶ms->par[params->n].enums, 2, sizeof(tagged_enum_t)); - - strncpy(params->par[params->n].enums[0].tag, "FALSE", 5); - params->par[params->n].enums[0].tag[5] = '\0'; - - strncpy(params->par[params->n].enums[1].tag, "TRUE", 4); - params->par[params->n].enums[1].tag[4] = '\0'; - + + copy_string(params->par[params->n].enums[0].tag, NPOW_04, "FALSE"); + copy_string(params->par[params->n].enums[1].tag, NPOW_04, "TRUE"); + params->par[params->n].enums[0].en = false; params->par[params->n].enums[1].en = true; - + params->par[params->n].type = _PAR_BOOL_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].int_vec_ = ptr; - + params->n++; return; @@ -796,33 +715,23 @@ char cmax[NPOW_10]; reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - - if (strlen(min) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(cmin, min, strlen(min)); cmin[strlen(min)] = '\0';} - if (strlen(max) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(cmax, max, strlen(max)); cmax[strlen(max)] = '\0';} + copy_string(cmin, NPOW_10, min); + copy_string(cmax, NPOW_10, max); params->par[params->n].date_range[_MIN_] = parse_date(cmin); params->par[params->n].date_range[_MAX_] = parse_date(cmax); - + params->par[params->n].type = _PAR_DATE_; params->par[params->n].length = ptr_length; *params->par[params->n].length = 0; params->par[params->n].date_vec_ = ptr; - + params->n++; return; @@ -842,15 +751,10 @@ void register_charvec_par(params_t *params, const char *name, int char_test, cha reallocate_params(params); - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[params->n].name, name, strlen(name)); - params->par[params->n].name[strlen(name)] = '\0'; - } + copy_string(params->par[params->n].name, NPOW_10, name); params->par[params->n].set = false; - + params->par[params->n].char_test = char_test; params->par[params->n].type = _PAR_CHAR_; @@ -859,14 +763,14 @@ void register_charvec_par(params_t *params, const char *name, int char_test, cha *params->par[params->n].length = 0; params->par[params->n].char_vec_ = ptr; - + params->n++; return; } -/** This function parses a parameter. One line of the parameter file is +/** This function parses a parameter. One line of the parameter file is +++ compared to all registered parameters --- params: parsed parameters --- buf: buffer that holds tag / value line as read from parameter file @@ -880,10 +784,7 @@ char *ptr = NULL; const char *separator = " ="; - if (strlen(buf) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(buffer, buf, strlen(buf)); buffer[strlen(buf)] = '\0';} - + copy_string(buffer, NPOW_10, buf); buffer[strcspn(buffer, "\r\n#")] = 0; ptr = strtok(buffer, separator); @@ -892,7 +793,7 @@ const char *separator = " ="; if (tag == NULL) return; for (i=0; in; i++){ - + if (params->par[i].set) continue; if (strcmp(tag, params->par[i].name) == 0){ @@ -924,12 +825,13 @@ const char *separator = " ="; *params->par[i].date_ = parse_date(ptr); break; case _PAR_CHAR_: - if (strlen(ptr) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(*params->par[i].char_, ptr, strlen(ptr)); - (*params->par[i].char_)[strlen(ptr)] = '\0'; - } + //if (strlen(ptr) > NPOW_10-1){ + // printf("cannot copy, string too long.\n"); exit(1); + //} else { + // strncpy(*params->par[i].char_, ptr, strlen(ptr)); + // (*params->par[i].char_)[strlen(ptr)] = '\0'; + //} + copy_string(*params->par[i].char_, NPOW_10, ptr); break; default: printf("unknown datatype for par..\n"); @@ -942,10 +844,7 @@ const char *separator = " ="; if ((*params->par[i].length = length_par(buf)) < 1) return; allocate_par(¶ms->par[i]); - if (strlen(buf) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(buffer, buf, strlen(buf)); buffer[strlen(buf)] = '\0';} - + copy_string(buffer, NPOW_10, buf); buffer[strcspn(buffer, "\r\n#")] = 0; ptr = strtok(buffer, separator); @@ -976,12 +875,13 @@ const char *separator = " ="; params->par[i].date_vec_[0][n] = parse_date(ptr); break; case _PAR_CHAR_: - if (strlen(ptr) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(params->par[i].char_vec_[0][n], ptr, strlen(ptr)); - params->par[i].char_vec_[0][n][strlen(ptr)] = '\0'; - } + //if (strlen(ptr) > NPOW_10-1){ + // printf("cannot copy, string too long.\n"); exit(1); + //} else { + // strncpy(params->par[i].char_vec_[0][n], ptr, strlen(ptr)); + // params->par[i].char_vec_[0][n][strlen(ptr)] = '\0'; + //} + copy_string(params->par[i].char_vec_[0][n], NPOW_10, ptr); break; default: printf("unknown datatype for par..\n"); @@ -990,7 +890,7 @@ const char *separator = " ="; n++; - } + } } @@ -1015,9 +915,9 @@ char eol[1]; printf("%d parameters were registered.\n", params->n); for (i=0; in; i++){ - + printf("%s: ", params->par[i].name); - + // scalar if (params->par[i].length == NULL){ @@ -1038,7 +938,7 @@ char eol[1]; printf("%d\n", *params->par[i].int_); break; case _PAR_DATE_: - printf("%04d-%02d-%02d\n", params->par[i].date_->year, + printf("%04d-%02d-%02d\n", params->par[i].date_->year, params->par[i].date_->month, params->par[i].date_->day); break; case _PAR_CHAR_: @@ -1053,7 +953,7 @@ char eol[1]; } else { for (n=0; n<*params->par[i].length; n++){ - + if (n == *params->par[i].length-1) eol[0] = '\n'; else eol[0] = ' '; switch (params->par[i].type){ @@ -1073,7 +973,7 @@ char eol[1]; printf("%d%s", params->par[i].int_vec_[0][n], eol); break; case _PAR_DATE_: - printf("%04d-%02d-%02d%s", params->par[i].date_vec_[0][n].year, + printf("%04d-%02d-%02d%s", params->par[i].date_vec_[0][n].year, params->par[i].date_vec_[0][n].month, params->par[i].date_vec_[0][n].day, eol); break; case _PAR_CHAR_: @@ -1089,7 +989,7 @@ char eol[1]; } } - + return; } @@ -1149,7 +1049,7 @@ char eol[1]; break; case _PAR_DATE_: if (cur < end){ - cur += snprintf(cur, end-cur, "%04d-%02d-%02d", params->par[i].date_->year, + cur += snprintf(cur, end-cur, "%04d-%02d-%02d", params->par[i].date_->year, params->par[i].date_->month, params->par[i].date_->day); } else { printf("Buffer Overflow in assembling par log\n"); exit(1);} break; @@ -1169,7 +1069,7 @@ char eol[1]; } else { for (n=0; n<*params->par[i].length; n++){ - + if (n == *params->par[i].length-1) eol[0] = '\0'; else eol[0] = ' '; switch (params->par[i].type){ @@ -1200,7 +1100,7 @@ char eol[1]; break; case _PAR_DATE_: if (cur < end){ - cur += snprintf(cur, end-cur, "%04d-%02d-%02d%s", params->par[i].date_vec_[0][n].year, + cur += snprintf(cur, end-cur, "%04d-%02d-%02d%s", params->par[i].date_vec_[0][n].year, params->par[i].date_vec_[0][n].month, params->par[i].date_vec_[0][n].day, eol); } else { printf("Buffer Overflow in assembling par log\n"); exit(1);} break; @@ -1221,7 +1121,7 @@ char eol[1]; } } - + return; } @@ -1235,20 +1135,20 @@ int error = 0, n; if (par->length == NULL){ - if (*par->int_ < par->int_range[_MIN_] || + if (*par->int_ < par->int_range[_MIN_] || *par->int_ > par->int_range[_MAX_]) error++; } else { for (n=0; n<*par->length; n++){ - if (par->int_vec_[0][n] < par->int_range[_MIN_] || + if (par->int_vec_[0][n] < par->int_range[_MIN_] || par->int_vec_[0][n] > par->int_range[_MAX_]) error++; } } - + if (error > 0){ - printf("parameter %s is out of bounds [%d,%d].\n", + printf("parameter %s is out of bounds [%d,%d].\n", par->name, par->int_range[_MIN_], par->int_range[_MAX_]); return FAILURE; - } + } return SUCCESS; } @@ -1276,13 +1176,13 @@ bool ok; if (!ok) error++; } } - + if (error > 0){ printf("parameter %s is out of bounds {%s", par->name, par->enums[0].tag); for (e=1; en_enums; e++) printf(",%s", par->enums[e].tag); printf("}.\n"); return FAILURE; - } + } return SUCCESS; } @@ -1297,20 +1197,20 @@ int error = 0, n; if (par->length == NULL){ - if (*par->float_ < par->float_range[_MIN_] || + if (*par->float_ < par->float_range[_MIN_] || *par->float_ > par->float_range[_MAX_]) error++; } else { for (n=0; n<*par->length; n++){ - if (par->float_vec_[0][n] < par->float_range[_MIN_] || + if (par->float_vec_[0][n] < par->float_range[_MIN_] || par->float_vec_[0][n] > par->float_range[_MAX_]) error++; } } - + if (error > 0){ - printf("parameter %s is out of bounds [%f,%f].\n", + printf("parameter %s is out of bounds [%f,%f].\n", par->name, par->float_range[_MIN_], par->float_range[_MAX_]); return FAILURE; - } + } return SUCCESS; } @@ -1325,20 +1225,20 @@ int error = 0, n; if (par->length == NULL){ - if (*par->double_ < par->double_range[_MIN_] || + if (*par->double_ < par->double_range[_MIN_] || *par->double_ > par->double_range[_MAX_]) error++; } else { for (n=0; n<*par->length; n++){ - if (par->double_vec_[0][n] < par->double_range[_MIN_] || + if (par->double_vec_[0][n] < par->double_range[_MIN_] || par->double_vec_[0][n] > par->double_range[_MAX_]) error++; } } - + if (error > 0){ - printf("parameter %s is out of bounds [%f,%f].\n", + printf("parameter %s is out of bounds [%f,%f].\n", par->name, par->double_range[_MIN_], par->double_range[_MAX_]); return FAILURE; - } + } return SUCCESS; } @@ -1353,22 +1253,22 @@ int error = 0, n; if (par->length == NULL){ - if (par->date_->ce < par->date_range[_MIN_].ce || + if (par->date_->ce < par->date_range[_MIN_].ce || par->date_->ce > par->date_range[_MAX_].ce) error++; } else { for (n=0; n<*par->length; n++){ - if (par->date_vec_[0][n].ce < par->date_range[_MIN_].ce || + if (par->date_vec_[0][n].ce < par->date_range[_MIN_].ce || par->date_vec_[0][n].ce > par->date_range[_MAX_].ce) error++; } } - + if (error > 0){ - printf("parameter %s is out of bounds [%04d-%02d-%02d,%04d-%02d-%02d].\n", + printf("parameter %s is out of bounds [%04d-%02d-%02d,%04d-%02d-%02d].\n", par->name, par->date_range[_MIN_].year, par->date_range[_MIN_].month, par->date_range[_MIN_].day, par->date_range[_MAX_].year, par->date_range[_MAX_].month, par->date_range[_MAX_].day); return FAILURE; - } + } return SUCCESS; } @@ -1421,7 +1321,7 @@ int n; } else { for (n=0; n<*par->length; n++){ - + switch (par->char_test){ case _CHAR_TEST_NULL_OR_EXIST_: if ((strcmp(par->char_vec_[0][n], "NULL") != 0) && !fileexist(par->char_vec_[0][n])){ @@ -1474,13 +1374,13 @@ int error = 0, i; for (i=0; in; i++){ - + if (params->par[i].set == false){ printf("parameter %s was not set.\n", params->par[i].name); error++; continue; } - + if (params->par[i].length != NULL && *params->par[i].length == 0){ printf("parameter %s was incorrectly parsed.\n", params->par[i].name); error++; @@ -1515,7 +1415,7 @@ int error = 0, i; } } - + if (error > 0) return FAILURE; else return SUCCESS; } diff --git a/src/cross-level/param-cl.h b/src/cross-level/param-cl.h index fde04e5e..4c292aeb 100755 --- a/src/cross-level/param-cl.h +++ b/src/cross-level/param-cl.h @@ -35,6 +35,7 @@ Parsing parameter header #include // macro constants of the integer types #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/alloc-cl.h" #include "../cross-level/date-cl.h" #include "../cross-level/dir-cl.h" diff --git a/src/cross-level/sys-cl.c b/src/cross-level/sys-cl.c index eb9db512..8be3cbf0 100755 --- a/src/cross-level/sys-cl.c +++ b/src/cross-level/sys-cl.c @@ -47,82 +47,52 @@ int i = 0, num = 6*2; // system info if (uname(&sys) != -1){ - - strncpy(stringlist[i], "Sys_system_name", 15); stringlist[i][15] = '\0'; i++; - if (strlen(sys.sysname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], sys.sysname, strlen(sys.sysname)); - stringlist[i][strlen(sys.sysname)] = '\0'; i++; - } - - strncpy(stringlist[i], "Sys_host_name", 13); stringlist[i][13] = '\0'; i++; - if (strlen(sys.nodename) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], sys.nodename, strlen(sys.nodename)); - stringlist[i][strlen(sys.nodename)] = '\0'; i++; - } - - strncpy(stringlist[i], "Sys_OS_release", 14); stringlist[i][14] = '\0'; i++; - if (strlen(sys.release) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], sys.release, strlen(sys.release)); - stringlist[i][strlen(sys.release)] = '\0'; i++; - } - - strncpy(stringlist[i], "Sys_OS_version", 14); stringlist[i][14] = '\0'; i++; - if (strlen(sys.version) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], sys.version, strlen(sys.version)); - stringlist[i][strlen(sys.version)] = '\0'; i++; - } - - strncpy(stringlist[i], "Sys_machine", 11); stringlist[i][11] = '\0'; i++; - if (strlen(sys.machine) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], sys.machine, strlen(sys.machine)); - stringlist[i][strlen(sys.machine)] = '\0'; i++; - } - + + copy_string(stringlist[i++], NPOW_10, "Sys_system_name"); + copy_string(stringlist[i++], NPOW_10, sys.sysname); + + copy_string(stringlist[i++], NPOW_10, "Sys_host_name"); + copy_string(stringlist[i++], NPOW_10, sys.nodename); + + copy_string(stringlist[i++], NPOW_10, "Sys_OS_release"); + copy_string(stringlist[i++], NPOW_10, sys.release); + + copy_string(stringlist[i++], NPOW_10, "Sys_OS_version"); + copy_string(stringlist[i++], NPOW_10, sys.version); + + copy_string(stringlist[i++], NPOW_10, "Sys_machine"); + copy_string(stringlist[i++], NPOW_10, sys.machine); + } else { - - strncpy(stringlist[i], "Sys_system_name", 15); stringlist[i][15] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - - strncpy(stringlist[i], "Sys_host_name", 13); stringlist[i][13] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - - strncpy(stringlist[i], "Sys_OS_release", 14); stringlist[i][14] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - - strncpy(stringlist[i], "Sys_OS_version", 14); stringlist[i][14] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - - strncpy(stringlist[i], "Sys_machine", 11); stringlist[i][11] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - + + copy_string(stringlist[i++], NPOW_10, "Sys_system_name"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + + copy_string(stringlist[i++], NPOW_10, "Sys_host_name"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + + copy_string(stringlist[i++], NPOW_10, "Sys_OS_release"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + + copy_string(stringlist[i++], NPOW_10, "Sys_OS_version"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + + copy_string(stringlist[i++], NPOW_10, "Sys_machine"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + } - + if (getlogin_r(user, NPOW_10) == 0){ - - strncpy(stringlist[i], "Sys_operator", 12); stringlist[i][12] = '\0'; i++; - if (strlen(user) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stringlist[i], user, strlen(user)); - stringlist[i][strlen(user)] = '\0'; i++; - } - + + copy_string(stringlist[i++], NPOW_10, "Sys_operator"); + copy_string(stringlist[i++], NPOW_10, user); + } else { - - strncpy(stringlist[i], "Sys_operator", 12); stringlist[i][12] = '\0'; i++; - strncpy(stringlist[i], "unknown", 7); stringlist[i][7] = '\0'; i++; - + + copy_string(stringlist[i++], NPOW_10, "Sys_operator"); + copy_string(stringlist[i++], NPOW_10, "unknown"); + } *n = num; diff --git a/src/cross-level/sys-cl.h b/src/cross-level/sys-cl.h index 6f48755e..8e6673a5 100755 --- a/src/cross-level/sys-cl.h +++ b/src/cross-level/sys-cl.h @@ -33,6 +33,7 @@ System info header #include // string handling functions #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/alloc-cl.h" diff --git a/src/lower-level/meta-ll.c b/src/lower-level/meta-ll.c index 54256c53..13f8501a 100755 --- a/src/lower-level/meta-ll.c +++ b/src/lower-level/meta-ll.c @@ -1,6 +1,6 @@ /**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -This file is part of FORCE - Framework for Operational Radiometric +This file is part of FORCE - Framework for Operational Radiometric Correction for Environmental monitoring. Copyright (C) 2013-2020 David Frantz @@ -39,7 +39,7 @@ meta_t *meta = NULL; alloc((void**)&meta, 1, sizeof(meta_t)); - init_metadata(meta); + init_metadata(meta); return meta; } @@ -80,7 +80,7 @@ printf("check that all meta is initialized, stack as well?\n"); meta->dtype = meta->fill; meta->sat = meta->fill; meta->tier = meta->fill; - + meta->cal = NULL; meta->s2.szen = NULL; @@ -103,7 +103,7 @@ cal_t *cal = NULL; int b; alloc((void**)&cal, nb, sizeof(cal_t)); - for (b=0; bfname, "NULL", 4); cal->fname[4] = '\0'; - strncpy(cal->orig_band, "NULL", 4); cal->orig_band[4] = '\0'; + copy_string(cal->fname, NPOW_10, "NULL"); + copy_string(cal->orig_band, NPOW_03, "NULL"); cal->fill = -32767; @@ -161,10 +161,10 @@ printf("init and check for stack struct, too?\n"); #endif if ((nb = get_stack_nbands(DN)) < 0){ - printf("error in retrieving number of bands. "); return FAILURE;} + printf("error in retrieving number of bands. "); return FAILURE;} if ((b_temp = find_domain(DN, "TEMP")) < 0){ - printf("error in retrieving temp band. "); return FAILURE;} + printf("error in retrieving temp band. "); return FAILURE;} // if(strcmp(meta->satellite, "NULL") == 0){ // printf("error in satellite name. "); return FAILURE;} @@ -235,10 +235,10 @@ char basename[NPOW_10]; for (b=0; bcal[b].fname, basename, NPOW_10); printf("DN: %s\n", basename); - printf(" LMAX/LMIN %.2f/%.2f, QMAX/QMIN %.2f/%.2f, R*/R+ %.5f/%.2f, K1/K2 %.2f/%.2f\n", - meta->cal[b].lmax, meta->cal[b].lmin, + printf(" LMAX/LMIN %.2f/%.2f, QMAX/QMIN %.2f/%.2f, R*/R+ %.5f/%.2f, K1/K2 %.2f/%.2f\n", + meta->cal[b].lmax, meta->cal[b].lmin, meta->cal[b].qmax, meta->cal[b].qmin, - meta->cal[b].rmul, meta->cal[b].radd, + meta->cal[b].rmul, meta->cal[b].radd, meta->cal[b].k1, meta->cal[b].k2); } @@ -289,7 +289,7 @@ GDALDatasetH fp_; if ((fp = fopen(metaname, "r")) == NULL){ printf("Unable to open Landsat metadata (MTL file)! "); return FAILURE;} - + // process line by line while (fgets(buffer, NPOW_10, fp) != NULL){ @@ -323,11 +323,11 @@ GDALDatasetH fp_; DN = allocate_stack(nb, 0, _DT_NONE_); nchar = snprintf(sensor, NPOW_04, "LND%02d", lid); - if (nchar < 0 || nchar >= NPOW_04){ + if (nchar < 0 || nchar >= NPOW_04){ printf("Buffer Overflow in assembling sensor\n"); return FAILURE;} - + for (b=0; bcal = allocate_calibration(nb); //alloc((void**)&meta->cal, nb, sizeof(cal_t)); @@ -360,86 +360,86 @@ GDALDatasetH fp_; b = 0; if (lid == 8){ - strncpy(meta->cal[b].orig_band, "1", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "ULTRABLUE"); + copy_string(meta->cal[b].orig_band, NPOW_03, "1"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "ULTRABLUE"); b++; - - strncpy(meta->cal[b].orig_band, "2", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "BLUE"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "2"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "BLUE"); b++; - - strncpy(meta->cal[b].orig_band, "3", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "GREEN"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "3"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "GREEN"); b++; - - strncpy(meta->cal[b].orig_band, "4", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "RED"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "4"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "RED"); b++; - - strncpy(meta->cal[b].orig_band, "5", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "NIR"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "5"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "NIR"); b++; - - strncpy(meta->cal[b].orig_band, "9", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "CIRRUS"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "9"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "CIRRUS"); b++; - - strncpy(meta->cal[b].orig_band, "6", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR1"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "6"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR1"); b++; - - strncpy(meta->cal[b].orig_band, "7", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR2"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "7"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR2"); b++; - - strncpy(meta->cal[b].orig_band, "10", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "TEMP"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "10"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "TEMP"); b++; } else { - strncpy(meta->cal[b].orig_band, "1", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "BLUE"); + copy_string(meta->cal[b].orig_band, NPOW_03, "1"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "BLUE"); b++; - - strncpy(meta->cal[b].orig_band, "2", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "GREEN"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "2"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "GREEN"); b++; - - strncpy(meta->cal[b].orig_band, "3", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "RED"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "3"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "RED"); b++; - - strncpy(meta->cal[b].orig_band, "4", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "NIR"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "4"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "NIR"); b++; - - strncpy(meta->cal[b].orig_band, "5", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR1"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "5"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR1"); b++; - - strncpy(meta->cal[b].orig_band, "7", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR2"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "7"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR2"); b++; - - strncpy(meta->cal[b].orig_band, "6", 1); meta->cal[b].orig_band[1] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "TEMP"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "6"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "TEMP"); b++; } @@ -456,9 +456,9 @@ GDALDatasetH fp_; for (b=0; bd_level1, tag, tokenptr, &meta->cal[b], lid, 0); // product type - } else if (strcmp(tag, "DATA_TYPE") == 0 || + } else if (strcmp(tag, "DATA_TYPE") == 0 || strcmp(tag, "PRODUCT_TYPE") == 0){ - if (strstr(tokenptr, "L1T") != NULL || + if (strstr(tokenptr, "L1T") != NULL || strstr(tokenptr, "L1TP") != NULL){ meta->tier = 1; } else meta->tier = 2; @@ -469,23 +469,23 @@ GDALDatasetH fp_; if (strstr(tokenptr, "RT") != NULL) meta->tier = 3; // dimension variables - } else if (strcmp(tag, "PRODUCT_SAMPLES_REF") == 0 || + } else if (strcmp(tag, "PRODUCT_SAMPLES_REF") == 0 || strcmp(tag, "REFLECTIVE_SAMPLES") == 0){ set_stack_ncols(DN, atoi(tokenptr)); - } else if (strcmp(tag, "PRODUCT_LINES_REF") == 0 || + } else if (strcmp(tag, "PRODUCT_LINES_REF") == 0 || strcmp(tag, "REFLECTIVE_LINES") == 0){ set_stack_nrows(DN, atoi(tokenptr)); // resolution variables - } else if (strcmp(tag, "GRID_CELL_SIZE_REF") == 0 || + } else if (strcmp(tag, "GRID_CELL_SIZE_REF") == 0 || strcmp(tag, "GRID_CELL_SIZE_REFLECTIVE") == 0){ set_stack_res(DN, atoi(tokenptr)); // bounding box variables: map - } else if (strcmp(tag, "PRODUCT_UL_CORNER_MAPX") == 0 || + } else if (strcmp(tag, "PRODUCT_UL_CORNER_MAPX") == 0 || strcmp(tag, "CORNER_UL_PROJECTION_X_PRODUCT") == 0){ set_stack_ulx(DN, atof(tokenptr)-15); - } else if (strcmp(tag, "PRODUCT_UL_CORNER_MAPY") == 0 || + } else if (strcmp(tag, "PRODUCT_UL_CORNER_MAPY") == 0 || strcmp(tag, "CORNER_UL_PROJECTION_Y_PRODUCT") == 0){ set_stack_uly(DN, atof(tokenptr)+15); @@ -567,7 +567,7 @@ GDALDatasetH fp_; GDALClose(fp_); for (b=0; bparams->log); nchar = snprintf(meta->refsys, NPOW_04, "%03d%03d", path, row); - if (nchar < 0 || nchar >= NPOW_04){ + if (nchar < 0 || nchar >= NPOW_04){ printf("Buffer Overflow in assembling WRS-2\n"); return FAILURE;} @@ -666,9 +666,9 @@ int svgrid = 5000; meta->dtype = 16; meta->sat = 65535; - - + + nb = 13; DN = allocate_stack(nb, 0, _DT_NONE_); @@ -685,7 +685,7 @@ int svgrid = 5000; // scan directory for xml file if (findfile(d_top_2, "S2A", ".xml", metaname, NPOW_10) == FAILURE && findfile(d_top_2, "MTD", ".xml", metaname, NPOW_10) == FAILURE){ - printf("Finding top-level S2 metadata file failed. "); + printf("Finding top-level S2 metadata file failed. "); return FAILURE; } @@ -717,11 +717,11 @@ int svgrid = 5000; if ((atoi(tokenptr2+0)) != 2){ printf("unknown/unsupported sensor ID! "); return FAILURE; } - + nchar = snprintf(sensor, NPOW_04, "SEN%s", tokenptr2); - if (nchar < 0 || nchar >= NPOW_04){ + if (nchar < 0 || nchar >= NPOW_04){ printf("Buffer Overflow in assembling sensor\n"); return FAILURE;} - + for (b=0; bcal = allocate_calibration(nb); @@ -741,75 +741,75 @@ int svgrid = 5000; printf("Start of RSR array: %d\n", b_rsr); #endif - b = 0; + b = 0; - strncpy(meta->cal[b].orig_band, "01", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "ULTRABLUE"); + copy_string(meta->cal[b].orig_band, NPOW_03, "01"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "ULTRABLUE"); b++; - - strncpy(meta->cal[b].orig_band, "02", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "BLUE"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "02"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "BLUE"); b++; - - strncpy(meta->cal[b].orig_band, "03", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "GREEN"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "03"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "GREEN"); b++; - - strncpy(meta->cal[b].orig_band, "04", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "RED"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "04"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "RED"); b++; - - strncpy(meta->cal[b].orig_band, "05", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "REDEDGE1"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "05"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "REDEDGE1"); b++; - - strncpy(meta->cal[b].orig_band, "06", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "REDEDGE2"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "06"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "REDEDGE2"); b++; - - strncpy(meta->cal[b].orig_band, "07", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "REDEDGE3"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "07"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "REDEDGE3"); b++; - - strncpy(meta->cal[b].orig_band, "08", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "BROADNIR"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "08"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "BROADNIR"); b++; - - strncpy(meta->cal[b].orig_band, "8A", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "NIR"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "8A"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "NIR"); b++; - - strncpy(meta->cal[b].orig_band, "09", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "VAPOR"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "09"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "VAPOR"); b++; - - strncpy(meta->cal[b].orig_band, "10", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "CIRRUS"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "10"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "CIRRUS"); b++; - - strncpy(meta->cal[b].orig_band, "11", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR1"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "11"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR1"); b++; - - strncpy(meta->cal[b].orig_band, "12", 2); meta->cal[b].orig_band[2] = '\0'; - meta->cal[b].rsr_band = b_rsr++; - set_stack_domain(DN, b, "SWIR2"); + + copy_string(meta->cal[b].orig_band, NPOW_03, "12"); + meta->cal[b].rsr_band = b_rsr++; + set_stack_domain(DN, b, "SWIR2"); b++; nchar = snprintf(d_img, NPOW_10, "%s/IMG_DATA", pl2->d_level1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling dirname\n"); return FAILURE;} // get filename @@ -818,14 +818,14 @@ int svgrid = 5000; //if (pl2->use.refbands[b]) _NO_++; nchar = snprintf(id_img, NPOW_10, "_B%s.jp2", meta->cal[b].orig_band); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling image ID\n"); return FAILURE;} if (findfile(d_img, id_img, NULL, meta->cal[b].fname, NPOW_10) == FAILURE){ printf("Unable to find image %s. ", id_img); return FAILURE;} } - + if (strlen(pl2->b_level1) > 50){ // old, long naming convention strncpy(meta->refsys, pl2->b_level1+49, 6); @@ -844,7 +844,7 @@ int svgrid = 5000; // acquisition variables } else if (strcmp(tag, "PRODUCT_START_TIME") == 0){ - tokenptr3 = strtok(tokenptr, separator3); + tokenptr3 = strtok(tokenptr, separator3); date.year = atoi(tokenptr3); tokenptr3 = strtok(NULL, separator3); date.month = atoi(tokenptr3); @@ -930,7 +930,7 @@ int svgrid = 5000; if (svgrid != atoi(tokenptr)){ printf("SUN_VIEW_GRID is incompatible with Sentinel-2 metadata. "); return FAILURE;} - + sv_nx = ceil(get_stack_width(DN)/(float)svgrid); sv_ny = ceil(get_stack_height(DN)/(float)svgrid); if (s_sz == NULL) alloc_2D((void***)&s_sz, sv_ny, sv_nx, sizeof(float)); @@ -1003,10 +1003,10 @@ int svgrid = 5000; left = sv_nx-1; right = 0; top = sv_ny-1; bottom = 0; - + for (i=0; i 0 && k_va[i][j] > 0){ if (j < left) left = j; if (j > right) right = j; @@ -1019,7 +1019,7 @@ int svgrid = 5000; right++; // lower-right corner of cell bottom++; // lower-right corner of cell - + if (left > 0) left--; // one to the left to fill the missing left edge while (fmod(left*svgrid, 60) != 0 && left > 0) left--; @@ -1038,9 +1038,9 @@ int svgrid = 5000; //meta->s2.ny = bottom-top+1; meta->s2.nx = right-left; meta->s2.ny = bottom-top; - + if (meta->s2.nx <= 0 || meta->s2.ny <= 0){ - printf("no valid cell after subsetting. Abort.\n"); + printf("no valid cell after subsetting. Abort.\n"); free_stack(DN); exit(SUCCESS); } @@ -1054,7 +1054,7 @@ int svgrid = 5000; // average of view angles for (i=0; is2.ny; i++){ for (j=0; js2.nx; j++){ - + ii = i+top; jj = j+left; @@ -1075,16 +1075,16 @@ int svgrid = 5000; } } - - + + // try to fill the left edge (duplicate values - silly method, but it will do for now) // average of view angles for (i=0; is2.ny; i++){ for (j=0; js2.nx; j++){ - + if ((jj = j+1) == meta->s2.nx) continue; - + if (meta->s2.vzen[i][j] == meta->s2.nodata && meta->s2.vzen[i][jj] != meta->s2.nodata){ @@ -1097,18 +1097,18 @@ int svgrid = 5000; } } - - - //right++; + + + //right++; //bottom++; meta->s2.left = left * svgrid/get_stack_res(DN); meta->s2.top = top * svgrid/get_stack_res(DN); meta->s2.right = right * svgrid/get_stack_res(DN); meta->s2.bottom = bottom * svgrid/get_stack_res(DN); - + if (meta->s2.right > get_stack_ncols(DN)) meta->s2.right = get_stack_ncols(DN); if (meta->s2.bottom > get_stack_nrows(DN)) meta->s2.bottom = get_stack_nrows(DN); @@ -1142,7 +1142,7 @@ int svgrid = 5000; GDALClose(fp_); for (b=0; bparams->log); - - + + #ifdef FORCE_DEBUG print_metadata(meta, nb); print_stack_info(DN); @@ -1192,8 +1192,8 @@ int nchar; if (lid == 7 && strcmp(cal->orig_band, "6") == 0){ - strncpy(add1, "_VCID_1", 7); add1[7] = '\0'; - strncpy(add2, "1", 1); add2[1] = '\0'; + copy_string(add1, NPOW_10, "_VCID_1"); + copy_string(add2, NPOW_10, "1"); } else { add1[0] = '\0'; add1[0] = '\0'; @@ -1202,24 +1202,24 @@ int nchar; if (type == 0){ if (strcmp(cal->fname, "NULL") == 0){ nchar = snprintf(str1, NPOW_10, "FILE_NAME_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} nchar = snprintf(str2, NPOW_10, "BAND%s%s_FILE_NAME", cal->orig_band, add2); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0 || strcmp(tag, str2) == 0){ nchar = snprintf(cal->fname, NPOW_10, "%s/%s", d_level1, value); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling filename\n"); exit(1);} } } } else if (type == 1){ if (cal->lmax == cal->fill){ nchar = snprintf(str1, NPOW_10, "RADIANCE_MAXIMUM_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} nchar = snprintf(str2, NPOW_10, "LMAX_BAND%s%s", cal->orig_band, add2); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0 || strcmp(tag, str2) == 0){ cal->lmax = atof(value); @@ -1228,10 +1228,10 @@ int nchar; } else if (type == 2){ if (cal->lmin == cal->fill){ nchar = snprintf(str1, NPOW_10, "RADIANCE_MINIMUM_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} nchar = snprintf(str2, NPOW_10, "LMIN_BAND%s%s", cal->orig_band, add2); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0 || strcmp(tag, str2) == 0){ cal->lmin = atof(value); @@ -1240,10 +1240,10 @@ int nchar; } else if (type == 3){ if (cal->qmax == cal->fill){ nchar = snprintf(str1, NPOW_10, "QUANTIZE_CAL_MAX_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} nchar = snprintf(str2, NPOW_10, "QCALMAX_BAND%s%s", cal->orig_band, add2); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0 || strcmp(tag, str2) == 0){ cal->qmax = atof(value); @@ -1252,10 +1252,10 @@ int nchar; } else if (type == 4){ if (cal->qmin == cal->fill){ nchar = snprintf(str1, NPOW_10, "QUANTIZE_CAL_MIN_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} nchar = snprintf(str2, NPOW_10, "QCALMIN_BAND%s%s", cal->orig_band, add2); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0 || strcmp(tag, str2) == 0){ cal->qmin = atof(value); @@ -1264,7 +1264,7 @@ int nchar; } else if (type == 5){ if (cal->rmul == cal->fill){ nchar = snprintf(str1, NPOW_10, "REFLECTANCE_MULT_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0){// || strcmp(tag, str2) == 0){ cal->rmul = atof(value); @@ -1273,7 +1273,7 @@ int nchar; } else if (type == 6){ if (cal->radd == cal->fill){ nchar = snprintf(str1, NPOW_10, "REFLECTANCE_ADD_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0){// || strcmp(tag, str2) == 0){ cal->radd = atof(value); @@ -1282,7 +1282,7 @@ int nchar; } else if (type == 7){ if (cal->k1 == cal->fill){ nchar = snprintf(str1, NPOW_10, "K1_CONSTANT_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0){ cal->k1 = atof(value); @@ -1291,7 +1291,7 @@ int nchar; } else if (type == 8){ if (cal->k2 == cal->fill){ nchar = snprintf(str1, NPOW_10, "K2_CONSTANT_BAND_%s%s", cal->orig_band, add1); - if (nchar < 0 || nchar >= NPOW_10){ + if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling basename\n"); exit(1);} if (strcmp(tag, str1) == 0){ cal->k2 = atof(value); diff --git a/src/lower-level/meta-ll.h b/src/lower-level/meta-ll.h index 92780d0b..2641a2bb 100755 --- a/src/lower-level/meta-ll.h +++ b/src/lower-level/meta-ll.h @@ -32,6 +32,7 @@ Level 1 metadata header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../lower-level/table-ll.h" #include "../lower-level/param-ll.h" From 9b519236a933474c9b2be01d6938d7d02a080b5e Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 18 Jun 2020 16:49:07 +0200 Subject: [PATCH 03/78] removed strncpy --- src/lower-level/_level2.c | 17 +++-------------- src/lower-level/atmo-ll.c | 18 +++++++++--------- src/lower-level/atmo-ll.h | 1 + src/lower-level/cube-ll.c | 13 +++---------- src/lower-level/cube-ll.h | 1 + src/lower-level/equi7-ll.c | 5 ++--- src/lower-level/equi7-ll.h | 1 + src/lower-level/gas-ll.c | 6 +++--- src/lower-level/gas-ll.h | 1 + src/lower-level/glance7-ll.c | 5 ++--- src/lower-level/glance7-ll.h | 1 + src/lower-level/modwvp-ll.c | 20 ++++++++------------ src/lower-level/modwvp-ll.h | 1 + src/lower-level/param-ll.c | 11 ++--------- src/lower-level/param-ll.h | 1 + 15 files changed, 39 insertions(+), 63 deletions(-) diff --git a/src/lower-level/_level2.c b/src/lower-level/_level2.c index d578e234..774a1d9e 100755 --- a/src/lower-level/_level2.c +++ b/src/lower-level/_level2.c @@ -30,6 +30,7 @@ This program is the FORCE Level-2 Processing System (single image) #include // string handling functions #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/konami-cl.h" #include "../cross-level/cite-cl.h" #include "../cross-level/stack-cl.h" @@ -100,20 +101,8 @@ GDALDriverH driver; pl2 = allocate_param_lower(); // get command line parameters - if (strlen(argv[1]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE;; - } else { - strncpy(pl2->d_level1, argv[1], strlen(argv[1])); - pl2->d_level1[strlen(argv[1])] = '\0'; - } - - if (strlen(argv[2]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE;; - } else { - strncpy(pl2->f_par, argv[2], strlen(argv[2])); - pl2->f_par[strlen(argv[2])] = '\0'; - } - + copy_string(pl2->d_level1, NPOW_10, argv[1]); + copy_string(pl2->f_par, NPOW_10, argv[2]); check_arg(argv[2]); diff --git a/src/lower-level/atmo-ll.c b/src/lower-level/atmo-ll.c index cd0b2b42..dfeb74ed 100755 --- a/src/lower-level/atmo-ll.c +++ b/src/lower-level/atmo-ll.c @@ -1068,9 +1068,9 @@ stack_t *BOA = TOA; // set metadata if (pl2->doatmo){ - strncpy(product, "BOA", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "BOA"); } else { - strncpy(product, "TOA", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "TOA"); } set_stack_product(BOA, product); set_stack_name(BOA, "FORCE Level 2 Processing System"); @@ -1140,7 +1140,7 @@ short *qa_ = NULL; // set metadata - strncpy(product, "QAI", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "QAI"); set_stack_product(QA, product); set_stack_name(QA, "FORCE Level 2 Processing System"); get_stack_sensor(QA, 0, sensor, NPOW_04); @@ -1204,7 +1204,7 @@ short *dst_ = NULL; // set metadata - strncpy(product, "DST", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "DST"); set_stack_product(DST, product); set_stack_name(DST, "FORCE Level 2 Processing System"); get_stack_compactdate(DST, 0, date, NPOW_04); @@ -1359,7 +1359,7 @@ enum { R, G, B }; // set metadata - strncpy(product, "OVV", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "OVV"); set_stack_product(OVV, product); set_stack_name(OVV, "FORCE Level 2 Processing System"); get_stack_compactdate(OVV, 0, date, NPOW_04); @@ -1536,7 +1536,7 @@ GDALDataType eOutputType = GDT_Float64; free((void*)fcoarse_); // set metadata - strncpy(product, "VZN", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "VZN"); set_stack_product(VZN, product); set_stack_name(VZN, "FORCE Level 2 Processing System"); get_stack_compactdate(VZN, 0, date, NPOW_04); @@ -1627,7 +1627,7 @@ short *hot_ = NULL; // set metadata - strncpy(product, "HOT", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "HOT"); set_stack_product(HOT, product); set_stack_name(HOT, "FORCE Level 2 Processing System"); get_stack_compactdate(HOT, 0, date, NPOW_04); @@ -1739,7 +1739,7 @@ float **xy_aod_ = NULL; // set metadata - strncpy(product, "AOD", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "AOD"); set_stack_product(AOD, product); set_stack_name(AOD, "FORCE Level 2 Processing System"); get_stack_compactdate(AOD, 0, date, NPOW_04); @@ -1828,7 +1828,7 @@ short *wvp_ = NULL; // set metadata - strncpy(product, "WVP", 3); product[3] = '\0'; + copy_string(product, NPOW_02, "WVP"); set_stack_product(WV, product); set_stack_name(WV, "FORCE Level 2 Processing System"); get_stack_compactdate(WV, 0, date, NPOW_04); diff --git a/src/lower-level/atmo-ll.h b/src/lower-level/atmo-ll.h index c56e952a..71cd97e5 100755 --- a/src/lower-level/atmo-ll.h +++ b/src/lower-level/atmo-ll.h @@ -32,6 +32,7 @@ Atmospheric correction header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../cross-level/cube-cl.h" #include "../lower-level/param-ll.h" diff --git a/src/lower-level/cube-ll.c b/src/lower-level/cube-ll.c index 07b680f8..5a999195 100755 --- a/src/lower-level/cube-ll.c +++ b/src/lower-level/cube-ll.c @@ -298,26 +298,19 @@ double tol = 5e-3; multicube->cover[0] = true; cube = multicube->cube[0]; - if (strlen(pl2->d_level2) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->dname, pl2->d_level2, strlen(pl2->d_level2)); cube->dname[strlen(pl2->d_level2)] = '\0';} - + copy_string(cube->dname, NPOW_10, pl2->d_level2); if (pl2->doreproj){ cube->res = pl2->res; - if (strlen(pl2->proj) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->proj, pl2->proj, strlen(pl2->proj)); cube->proj[strlen(pl2->proj)] = '\0';} + copy_string(cube->proj, NPOW_10, pl2->proj); } else { cube->res = get_stack_res(stack); get_stack_proj(stack, utm_proj, NPOW_10); - if (strlen(utm_proj) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->proj, utm_proj, strlen(utm_proj)); cube->proj[strlen(utm_proj)] = '\0';} + copy_string(cube->proj, NPOW_10, utm_proj); } diff --git a/src/lower-level/cube-ll.h b/src/lower-level/cube-ll.h index 73c81510..36c148bf 100755 --- a/src/lower-level/cube-ll.h +++ b/src/lower-level/cube-ll.h @@ -31,6 +31,7 @@ Datacube header #include // core input and output functions #include // standard general utilities library +#include "../cross-level/string-cl.h" #include "../cross-level/cube-cl.h" #include "../cross-level/tile-cl.h" #include "../cross-level/stack-cl.h" diff --git a/src/lower-level/equi7-ll.c b/src/lower-level/equi7-ll.c index c9b76eaf..f578613d 100755 --- a/src/lower-level/equi7-ll.c +++ b/src/lower-level/equi7-ll.c @@ -183,9 +183,8 @@ int nchar; printf("Buffer Overflow in assembling dirname\n"); return NULL;} cube->res = pl2->res; - if (strlen(proj7[c]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->proj, proj7[c], strlen(proj7[c])); cube->proj[strlen(proj7[c])] = '\0';} + + copy_string(cube->proj, NPOW_10, proj7[c]); cube->tilesize = 100000; cube->chunksize = 2000; diff --git a/src/lower-level/equi7-ll.h b/src/lower-level/equi7-ll.h index 33f57f60..74d9e9ac 100755 --- a/src/lower-level/equi7-ll.h +++ b/src/lower-level/equi7-ll.h @@ -33,6 +33,7 @@ Equi7 header #include // boolean data type #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/cite-cl.h" #include "../cross-level/stack-cl.h" #include "../lower-level/param-ll.h" diff --git a/src/lower-level/gas-ll.c b/src/lower-level/gas-ll.c index 55c30685..4b7e93c1 100755 --- a/src/lower-level/gas-ll.c +++ b/src/lower-level/gas-ll.c @@ -646,7 +646,7 @@ int year, month, day; if (strcmp(pl2->d_wvp, "NULL") == 0){ wvp = avg = pl2->wvp; - strncpy(source, "PRM", 3); source[3] = '\0'; + copy_string(source, NPOW_02, "PRM"); #ifdef FORCE_DEBUG printf("Use wvp from parameter file: %.2f\n", wvp); @@ -693,10 +693,10 @@ int year, month, day; wvp = atof(tokenptr); if (k == 0){ avg = wvp; - strncpy(source, "AVG", 3); source[3] = '\0'; + copy_string(source, NPOW_02, "AVG"); } else { tokenptr = strtok(NULL, separator); - strncpy(source, tokenptr, 3); source[3] = '\0'; + copy_string(source, NPOW_02, tokenptr); } } diff --git a/src/lower-level/gas-ll.h b/src/lower-level/gas-ll.h index 3472c8c8..d89349c7 100755 --- a/src/lower-level/gas-ll.h +++ b/src/lower-level/gas-ll.h @@ -33,6 +33,7 @@ Atmospheric Gas header #include // boolean data type #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../cross-level/quality-cl.h" #include "../lower-level/meta-ll.h" diff --git a/src/lower-level/glance7-ll.c b/src/lower-level/glance7-ll.c index fb53e776..5fe55881 100755 --- a/src/lower-level/glance7-ll.c +++ b/src/lower-level/glance7-ll.c @@ -137,9 +137,8 @@ int nchar; printf("Buffer Overflow in assembling dirname\n"); return NULL;} cube->res = pl2->res; - if (strlen(proj7[c]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->proj, proj7[c], strlen(proj7[c])); cube->proj[strlen(proj7[c])] = '\0';} + + copy_string(cube->proj, NPOW_10, proj7[c]); cube->tilesize = 150000; cube->chunksize = 1500; diff --git a/src/lower-level/glance7-ll.h b/src/lower-level/glance7-ll.h index ba83b68b..ef58aae3 100755 --- a/src/lower-level/glance7-ll.h +++ b/src/lower-level/glance7-ll.h @@ -32,6 +32,7 @@ GLANCE header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../lower-level/param-ll.h" diff --git a/src/lower-level/modwvp-ll.c b/src/lower-level/modwvp-ll.c index 79c8e1e6..1130db79 100755 --- a/src/lower-level/modwvp-ll.c +++ b/src/lower-level/modwvp-ll.c @@ -263,9 +263,7 @@ float tmp, min[3]; while (tokenptr != NULL){ if (k == 0){ - if (strlen(tokenptr) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(id[nline], tokenptr, strlen(tokenptr)); id[nline][strlen(tokenptr)] = '\0';} + copy_string(id[nline], NPOW_10, tokenptr); } else if (k == 4){ if (strcmp(tokenptr, "D") == 0 || strcmp(tokenptr, "B") == 0){ valid[nline] = true; // day image @@ -642,9 +640,7 @@ const char *separator = ","; while (fgets(buffer, NPOW_10, fp) != NULL){ if (strstr(buffer, pattern) != NULL){ str = strtok(buffer, separator); - if (strlen(str) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(basename, str, strlen(str)); basename[strlen(str)] = '\0';} + copy_string(basename, NPOW_10, str); ok = true; } } @@ -855,22 +851,22 @@ float ctr = 0; for (c=0; c= mydctr[c]){ WVP[c] = modavg[c]; - if (WVP[c] < 9999){ strncpy(SEN[c], "MOD", 3); SEN[3] = '\0';} + if (WVP[c] < 9999) copy_string(SEN[c], NPOW_02, "MOD"); } else { WVP[c] = mydavg[c]; - if (WVP[c] < 9999){ strncpy(SEN[c], "MYD", 3); SEN[3] = '\0';} + if (WVP[c] < 9999) copy_string(SEN[c], NPOW_02, "MYD"); } } else if (modavg[c] < 9999 && mydavg[c] >= 9999){ WVP[c] = modavg[c]; - if (WVP[c] < 9999){ strncpy(SEN[c], "MOD", 3); SEN[3] = '\0';} + if (WVP[c] < 9999) copy_string(SEN[c], NPOW_02, "MOD"); } else if (modavg[c] >= 9999 && mydavg[c] < 9999){ WVP[c] = mydavg[c]; - if (WVP[c] < 9999){ strncpy(SEN[c], "MYD", 3); SEN[3] = '\0';} + if (WVP[c] < 9999) copy_string(SEN[c], NPOW_02, "MYD"); } } if (WVP[c] < 9999) ctr++; @@ -1006,7 +1002,7 @@ double *modavg, *mydavg, *modctr, *mydctr; // initialize precipitable water with fill for (c=0; c // boolean data type #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/date-cl.h" #include "../cross-level/alloc-cl.h" #include "../cross-level/dir-cl.h" diff --git a/src/lower-level/param-ll.c b/src/lower-level/param-ll.c index 7ea437fc..cd00e0f6 100755 --- a/src/lower-level/param-ll.c +++ b/src/lower-level/param-ll.c @@ -103,9 +103,7 @@ void parse_proj(par_ll_t *pl2){ int i; - if (strlen(pl2->proj_[0]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(pl2->proj, pl2->proj_[0], strlen(pl2->proj_[0])); pl2->proj[strlen(pl2->proj_[0])] = '\0';} + copy_string(pl2->proj, NPOW_10, pl2->proj_[0]); for (i=1; inproj_; i++){ strncat(pl2->proj, " ", NPOW_10-strlen(pl2->proj)-1); @@ -172,12 +170,7 @@ char bname[NPOW_10] = "\0"; if (findfile(pl2->d_level1, "L1C", NULL, bname, NPOW_10) != SUCCESS){ printf("Unable to dive down .SAFE file!\n"); return FAILURE;} - if (strlen(bname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE;; - } else { - strncpy(pl2->d_level1, bname, strlen(bname)); - pl2->d_level1[strlen(bname)] = '\0'; - } + copy_string(pl2->d_level1, NPOW_10, bname); } diff --git a/src/lower-level/param-ll.h b/src/lower-level/param-ll.h index e54e5fb6..10e682aa 100755 --- a/src/lower-level/param-ll.h +++ b/src/lower-level/param-ll.h @@ -32,6 +32,7 @@ Level 2 Processing paramater header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/param-cl.h" From 8f1069d19cf5b816ef300df298610f7ee5483212 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 24 Jun 2020 10:51:24 +0200 Subject: [PATCH 04/78] removed strncpy --- src/cross-level/cube-cl.c | 13 +- src/cross-level/cube-cl.h | 1 + src/cross-level/dir-cl.c | 12 +- src/cross-level/dir-cl.h | 1 + src/cross-level/stack-cl.c | 265 +++++++++---------------------------- src/cross-level/stack-cl.h | 1 + 6 files changed, 68 insertions(+), 225 deletions(-) diff --git a/src/cross-level/cube-cl.c b/src/cross-level/cube-cl.c index c5980247..194cde5f 100755 --- a/src/cross-level/cube-cl.c +++ b/src/cross-level/cube-cl.c @@ -325,10 +325,7 @@ FILE *fp = NULL; if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling filename\n"); return NULL;} - if (strlen(d_read) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->dname, d_read, strlen(d_read)); cube->dname[strlen(d_read)] = '\0';} - + copy_string(cube->dname, NPOW_10, d_read); if ((fp = fopen(fname, "r")) == NULL){ printf("Unable to open %s. ", fname); @@ -340,9 +337,7 @@ FILE *fp = NULL; free_datacube(cube); return NULL; } else { buffer[strcspn(buffer, "\r\n#")] = 0; - if (strlen(buffer) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->proj, buffer, strlen(buffer)); cube->proj[strlen(buffer)] = '\0';} + copy_string(cube->proj, NPOW_10, buffer); } if (fgets(buffer, NPOW_10, fp) == NULL){ @@ -435,9 +430,7 @@ double tol = 5e-3; } - if (strlen(d_write) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(cube->dname, d_write, strlen(d_write)); cube->dname[strlen(d_write)] = '\0';} + copy_string(cube->dname, NPOW_10, d_write); if (write_datacube_def(cube) == FAILURE){ printf("Writing datacube definition failed. "); diff --git a/src/cross-level/cube-cl.h b/src/cross-level/cube-cl.h index 56c84378..2dfd6b22 100755 --- a/src/cross-level/cube-cl.h +++ b/src/cross-level/cube-cl.h @@ -34,6 +34,7 @@ Datacube header #include // common mathematical functions #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/alloc-cl.h" #include "../cross-level/dir-cl.h" #include "../cross-level/lock-cl.h" diff --git a/src/cross-level/dir-cl.c b/src/cross-level/dir-cl.c index 3f2d53a3..ec135432 100755 --- a/src/cross-level/dir-cl.c +++ b/src/cross-level/dir-cl.c @@ -153,9 +153,7 @@ char *dot; // Locate the first dot and copy from there dot = strchr(basename, '.'); if (dot != NULL){ - if (strlen(dot) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(extension, dot, strlen(dot)); extension[strlen(dot)] = '\0';} + copy_string(extension, size, dot); } else { extension[0] = '\0'; } @@ -217,9 +215,7 @@ char *start; // copy string from starting point, add terminating 0 - if (strlen(start) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(basename, start, strlen(start)); basename[strlen(start)] = '\0';} + copy_string(basename, size, start); return; } @@ -241,9 +237,7 @@ char *slash; // copy path to dir - if (strlen(path) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(dirname, path, strlen(path)); dirname[strlen(path)] = '\0';} + copy_string(dirname, size, path); // Locate the last slash and set terminating 0 diff --git a/src/cross-level/dir-cl.h b/src/cross-level/dir-cl.h index fb60c9aa..8414a350 100755 --- a/src/cross-level/dir-cl.h +++ b/src/cross-level/dir-cl.h @@ -39,6 +39,7 @@ Directory/file support header #include // error numbers #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #ifdef __cplusplus diff --git a/src/cross-level/stack-cl.c b/src/cross-level/stack-cl.c index 8b8344de..726695aa 100755 --- a/src/cross-level/stack-cl.c +++ b/src/cross-level/stack-cl.c @@ -492,11 +492,13 @@ int datatype; void init_stack(stack_t *stack){ int i; - strncpy(stack->name, "NA", 2); stack->name[2] = '\0'; - strncpy(stack->product, "NA", 2); stack->product[2] = '\0'; - strncpy(stack->dname, "NA", 2); stack->dname[2] = '\0'; - strncpy(stack->fname, "NA", 2); stack->fname[2] = '\0'; - strncpy(stack->extension, "NA", 2); stack->extension[2] = '\0'; + + copy_string(stack->name, NPOW_10, "NA"); + copy_string(stack->product, NPOW_03, "NA"); + copy_string(stack->dname, NPOW_10, "NA"); + copy_string(stack->fname, NPOW_10, "NA"); + copy_string(stack->extension, NPOW_02, "NA"); + stack->sid = -1; stack->format = 0; stack->open = OPEN_FALSE; @@ -521,9 +523,10 @@ int i; stack->nchunk = 0; stack->tx = 0; stack->ty = 0; - strncpy(stack->proj, "NA", 2); stack->proj[2] = '\0'; - strncpy(stack->par, "NA", 2); stack->par[2] = '\0'; - + + copy_string(stack->proj,NPOW_10, "NA"); + copy_string(stack->par, NPOW_13, "NA"); + stack->save = NULL; stack->nodata = NULL; stack->scale = NULL; @@ -557,10 +560,10 @@ int b; stack->nodata[b] = 0; stack->scale[b] = 0; stack->wavelength[b] = 0; - strncpy(stack->unit[b], "NA", 2); stack->unit[b][2] = '\0'; - strncpy(stack->domain[b], "NA", 2); stack->domain[b][2] = '\0'; - strncpy(stack->bandname[b], "NA", 2); stack->bandname[b][2] = '\0'; - strncpy(stack->sensor[b], "NA", 2); stack->sensor[b][2] = '\0'; + copy_string(stack->unit[b], NPOW_04, "NA"); + copy_string(stack->domain[b], NPOW_10, "NA"); + copy_string(stack->bandname[b], NPOW_10, "NA"); + copy_string(stack->sensor[b], NPOW_04, "NA"); init_date(&stack->date[b]); } @@ -673,39 +676,19 @@ int i = 0; alloc_2DC((void***)&fp_meta, n_fp_meta, NPOW_13, sizeof(char)); alloc_2DC((void***)&band_meta, n_band_meta, NPOW_13, sizeof(char)); sys_meta = system_info(&n_sys_meta); - - strncpy(fp_meta[i], "FORCE_version", 13); fp_meta[i][13] = '\0'; i++; - if (strlen(_VERSION_) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(fp_meta[i], _VERSION_, strlen(_VERSION_)); - fp_meta[i][strlen(_VERSION_)] = '\0'; i++; - } + + copy_string(fp_meta[i++], NPOW_13, "FORCE_version"); + copy_string(fp_meta[i++], NPOW_13, _VERSION_); - strncpy(fp_meta[i], "FORCE_description", 17); fp_meta[i][17] = '\0'; i++; - if (strlen(stack->name) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(fp_meta[i], stack->name, strlen(stack->name)); - fp_meta[i][strlen(stack->name)] = '\0'; i++; - } + copy_string(fp_meta[i++], NPOW_13, "FORCE_description"); + copy_string(fp_meta[i++], NPOW_13, stack->name); - strncpy(fp_meta[i], "FORCE_product", 13); fp_meta[i][13] = '\0'; i++; - if (strlen(stack->product) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(fp_meta[i], stack->product, strlen(stack->product)); - fp_meta[i][strlen(stack->product)] = '\0'; i++; - } + copy_string(fp_meta[i++], NPOW_13, "FORCE_product"); + copy_string(fp_meta[i++], NPOW_13, stack->product); - strncpy(fp_meta[i], "FORCE_param", 11); fp_meta[i][11] = '\0'; i++; - if (strlen(stack->par) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(fp_meta[i], stack->par, strlen(stack->par)); - fp_meta[i][strlen(stack->par)] = '\0'; i++; - } + copy_string(fp_meta[i++], NPOW_13, "FORCE_param"); + copy_string(fp_meta[i++], NPOW_13, stack->par); // how many bands to output? @@ -926,48 +909,28 @@ int i = 0; i = 0; - strncpy(band_meta[i], "Domain", 6); band_meta[i][6] = '\0'; i++; - if (strlen(stack->domain[b_stack]) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(band_meta[i], stack->domain[b_stack], strlen(stack->domain[b_stack])); - band_meta[i][strlen(stack->domain[b_stack])] = '\0'; i++; - } + copy_string(band_meta[i++], NPOW_13, "Domain"); + copy_string(band_meta[i++], NPOW_13, stack->domain[b_stack]); - strncpy(band_meta[i], "Wavelength", 10); band_meta[i][10] = '\0'; i++; + copy_string(band_meta[i++], NPOW_13, "Wavelength"); nchar = snprintf(band_meta[i], NPOW_13, "%.3f", stack->wavelength[b_stack]); i++; if (nchar < 0 || nchar >= NPOW_13){ printf("Buffer Overflow in assembling band metadata\n"); return FAILURE;} - strncpy(band_meta[i], "Wavelength_unit", 15); band_meta[i][15] = '\0'; i++; - if (strlen(stack->unit[b_stack]) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(band_meta[i], stack->unit[b_stack], strlen(stack->unit[b_stack])); - band_meta[i][strlen(stack->unit[b_stack])] = '\0'; i++; - } + copy_string(band_meta[i++], NPOW_13, "Wavelength_unit"); + copy_string(band_meta[i++], NPOW_13, stack->unit[b_stack]); - strncpy(band_meta[i], "Scale", 5); band_meta[i][5] = '\0'; i++; + copy_string(band_meta[i++], NPOW_13, "Scale"); nchar = snprintf(band_meta[i], NPOW_13, "%.3f", stack->scale[b_stack]); i++; if (nchar < 0 || nchar >= NPOW_13){ printf("Buffer Overflow in assembling band metadata\n"); return FAILURE;} - strncpy(band_meta[i], "Sensor", 6); band_meta[i][6] = '\0'; i++; - if (strlen(stack->sensor[b_stack]) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(band_meta[i], stack->sensor[b_stack], strlen(stack->sensor[b_stack])); - band_meta[i][strlen(stack->sensor[b_stack])] = '\0'; i++; - } + copy_string(band_meta[i++], NPOW_13, "Sensor"); + copy_string(band_meta[i++], NPOW_13, stack->sensor[b_stack]); get_stack_longdate(stack, b_stack, ldate, NPOW_05-1); - strncpy(band_meta[i], "Date", 4); band_meta[i][4] = '\0'; i++; - if (strlen(ldate) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(band_meta[i], ldate, strlen(ldate)); - band_meta[i][strlen(ldate)] = '\0'; i++; - } + copy_string(band_meta[i++], NPOW_13, "Date"); + copy_string(band_meta[i++], NPOW_13, ldate); band = GDALGetRasterBand(fp, b_file); @@ -1767,13 +1730,8 @@ char domain_[NPOW_10]; void set_stack_name(stack_t *stack, const char *name){ - if (strlen(name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->name, name, strlen(name)); - stack->name[strlen(name)] = '\0'; - } - + copy_string(stack->name, NPOW_10, name); + return; } @@ -1787,12 +1745,7 @@ void set_stack_name(stack_t *stack, const char *name){ void get_stack_name(stack_t *stack, char name[], size_t size){ - if (strlen(stack->name) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(name, stack->name, strlen(stack->name)); - name[strlen(stack->name)] = '\0'; - } + copy_string(name, size, stack->name); return; } @@ -1806,13 +1759,8 @@ void get_stack_name(stack_t *stack, char name[], size_t size){ void set_stack_product(stack_t *stack, const char *product){ - if (strlen(product) > NPOW_03-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->product, product, strlen(product)); - stack->product[strlen(product)] = '\0'; - } - + copy_string(stack->product, NPOW_03, product); + return; } @@ -1826,12 +1774,7 @@ void set_stack_product(stack_t *stack, const char *product){ void get_stack_product(stack_t *stack, char product[], size_t size){ - if (strlen(stack->product) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(product, stack->product, strlen(stack->product)); - product[strlen(stack->product)] = '\0'; - } + copy_string(product, size, stack->product); return; } @@ -1845,12 +1788,7 @@ void get_stack_product(stack_t *stack, char product[], size_t size){ void set_stack_dirname(stack_t *stack, const char *dname){ - if (strlen(dname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->dname, dname, strlen(dname)); - stack->dname[strlen(dname)] = '\0'; - } + copy_string(stack->dname, NPOW_10, dname); return; } @@ -1865,12 +1803,7 @@ void set_stack_dirname(stack_t *stack, const char *dname){ void get_stack_dirname(stack_t *stack, char dname[], size_t size){ - if (strlen(stack->dname) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(dname, stack->dname, strlen(stack->dname)); - dname[strlen(stack->dname)] = '\0'; - } + copy_string(dname, size, stack->dname); return; } @@ -1884,12 +1817,7 @@ void get_stack_dirname(stack_t *stack, char dname[], size_t size){ void set_stack_filename(stack_t *stack, const char *fname){ - if (strlen(fname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->fname, fname, strlen(fname)); - stack->fname[strlen(fname)] = '\0'; - } + copy_string(stack->fname, NPOW_10, fname); return; } @@ -1904,12 +1832,7 @@ void set_stack_filename(stack_t *stack, const char *fname){ void get_stack_filename(stack_t *stack, char fname[], size_t size){ - if (strlen(stack->fname) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(fname, stack->fname, strlen(stack->fname)); - fname[strlen(stack->fname)] = '\0'; - } + copy_string(fname, size, stack->fname); return; } @@ -1930,13 +1853,8 @@ void set_stack_extension(stack_t *stack, const char *extension){ if (get_stack_format(stack) == _FMT_JPEG_ && strcmp(extension, "jpg") != 0){ printf("extension does not match with format.\n");} - if (strlen(extension) > NPOW_02-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->extension, extension, strlen(extension)); - stack->extension[strlen(extension)] = '\0'; - } - + copy_string(stack->extension, NPOW_02, extension); + return; } @@ -1950,12 +1868,7 @@ void set_stack_extension(stack_t *stack, const char *extension){ void get_stack_extension(stack_t *stack, char extension[], size_t size){ - if (strlen(stack->extension) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(extension, stack->extension, strlen(stack->extension)); - extension[strlen(stack->extension)] = '\0'; - } + copy_string(extension, size, stack->extension); return; } @@ -2690,12 +2603,7 @@ double get_stack_chunkheight(stack_t *stack){ void set_stack_proj(stack_t *stack, const char *proj){ - if (strlen(proj) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->proj, proj, strlen(proj)); - stack->proj[strlen(proj)] = '\0'; - } + copy_string(stack->proj, NPOW_10, proj); return; } @@ -2710,12 +2618,7 @@ void set_stack_proj(stack_t *stack, const char *proj){ void get_stack_proj(stack_t *stack, char proj[], size_t size){ - if (strlen(stack->proj) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(proj, stack->proj, strlen(stack->proj)); - proj[strlen(stack->proj)] = '\0'; - } + copy_string(proj, size, stack->proj); return; } @@ -2729,12 +2632,7 @@ void get_stack_proj(stack_t *stack, char proj[], size_t size){ void set_stack_par(stack_t *stack, const char *par){ - if (strlen(par) > NPOW_13-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->par, par, strlen(par)); - stack->par[strlen(par)] = '\0'; - } + copy_string(stack->par, NPOW_13, par); return; } @@ -2749,12 +2647,7 @@ void set_stack_par(stack_t *stack, const char *par){ void get_stack_par(stack_t *stack, char par[], size_t size){ - if (strlen(stack->par) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(par, stack->par, strlen(stack->par)); - par[strlen(stack->par)] = '\0'; - } + copy_string(par, size, stack->par); return; } @@ -2881,12 +2774,7 @@ float get_stack_wavelength(stack_t *stack, int b){ void set_stack_unit(stack_t *stack, int b, const char *unit){ - if (strlen(unit) > NPOW_04-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->unit[b], unit, strlen(unit)); - stack->unit[b][strlen(unit)] = '\0'; - } + copy_string(stack->unit[b], NPOW_04, unit); return; } @@ -2902,12 +2790,7 @@ void set_stack_unit(stack_t *stack, int b, const char *unit){ void get_stack_unit(stack_t *stack, int b, char unit[], size_t size){ - if (strlen(stack->unit[b]) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(unit, stack->unit[b], strlen(stack->unit[b])); - unit[strlen(stack->unit[b])] = '\0'; - } + copy_string(unit, size, stack->unit[b]); return; } @@ -2922,12 +2805,7 @@ void get_stack_unit(stack_t *stack, int b, char unit[], size_t size){ void set_stack_domain(stack_t *stack, int b, const char *domain){ - if (strlen(domain) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->domain[b], domain, strlen(domain)); - stack->domain[b][strlen(domain)] = '\0'; - } + copy_string(stack->domain[b], NPOW_10, domain); return; } @@ -2943,12 +2821,7 @@ void set_stack_domain(stack_t *stack, int b, const char *domain){ void get_stack_domain(stack_t *stack, int b, char domain[], size_t size){ - if (strlen(stack->domain[b]) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(domain, stack->domain[b], strlen(stack->domain[b])); - domain[strlen(stack->domain[b])] = '\0'; - } + copy_string(domain, size, stack->domain[b]); return; } @@ -2963,12 +2836,7 @@ void get_stack_domain(stack_t *stack, int b, char domain[], size_t size){ void set_stack_bandname(stack_t *stack, int b, const char *bandname){ - if (strlen(bandname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->bandname[b], bandname, strlen(bandname)); - stack->bandname[b][strlen(bandname)] = '\0'; - } + copy_string(stack->bandname[b], NPOW_10, bandname); return; } @@ -2984,12 +2852,7 @@ void set_stack_bandname(stack_t *stack, int b, const char *bandname){ void get_stack_bandname(stack_t *stack, int b, char bandname[], size_t size){ - if (strlen(stack->bandname[b]) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(bandname, stack->bandname[b], strlen(stack->bandname[b])); - bandname[strlen(stack->bandname[b])] = '\0'; - } + copy_string(bandname, size, stack->bandname[b]); return; } @@ -3004,12 +2867,7 @@ void get_stack_bandname(stack_t *stack, int b, char bandname[], size_t size){ void set_stack_sensor(stack_t *stack, int b, const char *sensor){ - if (strlen(sensor) > NPOW_04-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(stack->sensor[b], sensor, strlen(sensor)); - stack->sensor[b][strlen(sensor)] = '\0'; - } + copy_string(stack->sensor[b], NPOW_04, sensor); return; } @@ -3025,12 +2883,7 @@ void set_stack_sensor(stack_t *stack, int b, const char *sensor){ void get_stack_sensor(stack_t *stack, int b, char sensor[], size_t size){ - if (strlen(stack->sensor[b]) > size-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { - strncpy(sensor, stack->sensor[b], strlen(stack->sensor[b])); - sensor[strlen(stack->sensor[b])] = '\0'; - } + copy_string(sensor, size, stack->sensor[b]); return; } diff --git a/src/cross-level/stack-cl.h b/src/cross-level/stack-cl.h index 2122702a..2d833c28 100755 --- a/src/cross-level/stack-cl.h +++ b/src/cross-level/stack-cl.h @@ -32,6 +32,7 @@ Image header #include // string handling functions #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/date-cl.h" #include "../cross-level/alloc-cl.h" #include "../cross-level/warp-cl.h" From 4d5d7581ec9867b2178f5654755858ba563ba65a Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 24 Jun 2020 10:58:37 +0200 Subject: [PATCH 05/78] removed strncpy --- src/aux-level/_main.c | 4 ++-- src/aux-level/_quality-inflate.c | 10 +++------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/aux-level/_main.c b/src/aux-level/_main.c index 84e5dba5..4f475883 100755 --- a/src/aux-level/_main.c +++ b/src/aux-level/_main.c @@ -31,6 +31,7 @@ This program is the general entry point to FORCE #include // standard symbolic constants and types #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/konami-cl.h" @@ -41,8 +42,7 @@ char user[NPOW_10]; if (argc >= 2) check_arg(argv[1]); - if (getlogin_r(user, NPOW_10) != 0){ - strncpy(user, "user", 4); user[4] = '\0';} + if (getlogin_r(user, NPOW_10) != 0) copy_string(user, NPOW_10, "user"); printf("\n##########################################################################\n"); diff --git a/src/aux-level/_quality-inflate.c b/src/aux-level/_quality-inflate.c index d603766d..d2a8e7a7 100755 --- a/src/aux-level/_quality-inflate.c +++ b/src/aux-level/_quality-inflate.c @@ -29,6 +29,7 @@ This program inflates QAI layers #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/konami-cl.h" #include "../cross-level/quality-cl.h" #include "../higher-level/read-ard-hl.h" @@ -56,13 +57,8 @@ cube_t *cube = NULL; if (argc != 3){ printf("Usage: %s QAI dir\n\n", argv[0]); exit(1);} // parse arguments - if (strlen(argv[1]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(iname, argv[1], strlen(argv[1])); iname[strlen(argv[1])] = '\0';} - if (strlen(argv[2]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(d_out, argv[2], strlen(argv[2])); d_out[strlen(argv[2])] = '\0';} - + copy_string(iname, NPOW_10, argv[1]); + copy_string(d_out, NPOW_10, argv[2]); GDALAllRegister(); From d48d6f265dc1b01bf2000ad42c46dfa05632dd36 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 24 Jun 2020 15:03:34 +0200 Subject: [PATCH 06/78] removed strncpy --- src/higher-level/cso-hl.c | 18 +++---- src/higher-level/cso-hl.h | 1 + src/higher-level/lib-hl.c | 5 +- src/higher-level/lib-hl.h | 1 + src/higher-level/ml-hl.c | 6 +-- src/higher-level/ml-hl.h | 1 + src/higher-level/param-hl.c | 86 +++++++++++++++------------------- src/higher-level/param-hl.h | 1 + src/higher-level/read-ard-hl.c | 57 ++++------------------ src/higher-level/read-ard-hl.h | 1 + 10 files changed, 67 insertions(+), 110 deletions(-) diff --git a/src/higher-level/cso-hl.c b/src/higher-level/cso-hl.c index f21ed4ee..0f88b61e 100755 --- a/src/higher-level/cso-hl.c +++ b/src/higher-level/cso-hl.c @@ -59,15 +59,15 @@ int nchar; short ***ptr[NPOW_08]; - if (phl->cso.sta.num > -1){ strncpy(prodname[phl->cso.sta.num], "NUM", 3); prodname[phl->cso.sta.num][3] = '\0';} - if (phl->cso.sta.min > -1){ strncpy(prodname[phl->cso.sta.min], "MIN", 3); prodname[phl->cso.sta.min][3] = '\0';} - if (phl->cso.sta.max > -1){ strncpy(prodname[phl->cso.sta.max], "MAX", 3); prodname[phl->cso.sta.max][3] = '\0';} - if (phl->cso.sta.rng > -1){ strncpy(prodname[phl->cso.sta.rng], "RNG", 3); prodname[phl->cso.sta.rng][3] = '\0';} - if (phl->cso.sta.iqr > -1){ strncpy(prodname[phl->cso.sta.iqr], "IQR", 3); prodname[phl->cso.sta.iqr][3] = '\0';} - if (phl->cso.sta.avg > -1){ strncpy(prodname[phl->cso.sta.avg], "AVG", 3); prodname[phl->cso.sta.avg][3] = '\0';} - if (phl->cso.sta.std > -1){ strncpy(prodname[phl->cso.sta.std], "STD", 3); prodname[phl->cso.sta.std][3] = '\0';} - if (phl->cso.sta.skw > -1){ strncpy(prodname[phl->cso.sta.skw], "SKW", 3); prodname[phl->cso.sta.skw][3] = '\0';} - if (phl->cso.sta.krt > -1){ strncpy(prodname[phl->cso.sta.krt], "KRT", 3); prodname[phl->cso.sta.krt][3] = '\0';} + if (phl->cso.sta.num > -1) copy_string(prodname[phl->cso.sta.num], NPOW_03, "NUM"); + if (phl->cso.sta.min > -1) copy_string(prodname[phl->cso.sta.min], NPOW_03, "MIN"); + if (phl->cso.sta.max > -1) copy_string(prodname[phl->cso.sta.max], NPOW_03, "MAX"); + if (phl->cso.sta.rng > -1) copy_string(prodname[phl->cso.sta.rng], NPOW_03, "RNG"); + if (phl->cso.sta.iqr > -1) copy_string(prodname[phl->cso.sta.iqr], NPOW_03, "IQR"); + if (phl->cso.sta.avg > -1) copy_string(prodname[phl->cso.sta.avg], NPOW_03, "AVG"); + if (phl->cso.sta.std > -1) copy_string(prodname[phl->cso.sta.std], NPOW_03, "STD"); + if (phl->cso.sta.skw > -1) copy_string(prodname[phl->cso.sta.skw], NPOW_03, "SKW"); + if (phl->cso.sta.krt > -1) copy_string(prodname[phl->cso.sta.krt], NPOW_03, "KRT"); for (q=0; qcso.sta.nquantiles; q++){ nchar = snprintf(prodname[phl->cso.sta.qxx[q]], NPOW_03, "Q%02.0f", phl->cso.sta.q[q]*100); if (nchar < 0 || nchar >= NPOW_03){ diff --git a/src/higher-level/cso-hl.h b/src/higher-level/cso-hl.h index 2ca442fd..c9c69ba7 100755 --- a/src/higher-level/cso-hl.h +++ b/src/higher-level/cso-hl.h @@ -32,6 +32,7 @@ CSO Processing header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/cite-cl.h" #include "../cross-level/stack-cl.h" #include "../cross-level/stats-cl.h" diff --git a/src/higher-level/lib-hl.c b/src/higher-level/lib-hl.c index 0c99dd66..12bce8df 100755 --- a/src/higher-level/lib-hl.c +++ b/src/higher-level/lib-hl.c @@ -77,11 +77,10 @@ short ***ptr[1] ={ &lib->mae_ }; if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling domain\n"); error++;} } else { - strncpy(domain, bname, strlen(bname)); - domain[strlen(bname)] = '\0'; + copy_string(domain, NPOW_10, bname); } } else { - strncpy(domain, "LIBRARY-SUMMARY", 15); domain[15] = '\0'; + copy_string(domain, NPOW_10, "LIBRARY-SUMMARY"); } set_stack_domain(LIB[o], b, domain); set_stack_bandname(LIB[o], b, domain); diff --git a/src/higher-level/lib-hl.h b/src/higher-level/lib-hl.h index af6c0ba0..5ed9ab7b 100755 --- a/src/higher-level/lib-hl.h +++ b/src/higher-level/lib-hl.h @@ -33,6 +33,7 @@ Library completeness header #include "../cross-level/stack-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stats-cl.h" #include "../higher-level/read-ard-hl.h" diff --git a/src/higher-level/ml-hl.c b/src/higher-level/ml-hl.c index 25efb529..052ba6b1 100755 --- a/src/higher-level/ml-hl.c +++ b/src/higher-level/ml-hl.c @@ -79,8 +79,7 @@ short ***ptr[5] = { &ml->mlp_, &ml->mli_, &ml->mlu_, &ml->rfp_, &ml->rfm_ }; if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling domain\n"); error++;} } else { - strncpy(domain, bname, strlen(bname)); - domain[strlen(bname)] = '\0'; + copy_string(domain, NPOW_10, bname); } set_stack_domain(ML[o], s, domain); set_stack_bandname(ML[o], s, domain); @@ -96,8 +95,7 @@ short ***ptr[5] = { &ml->mlp_, &ml->mli_, &ml->mlu_, &ml->rfp_, &ml->rfm_ }; if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling domain\n"); error++;} } else { - strncpy(domain, bname, strlen(bname)); - domain[strlen(bname)] = '\0'; + copy_string(domain, NPOW_10, bname); } set_stack_domain(ML[o], sc, domain); set_stack_bandname(ML[o], sc, domain); diff --git a/src/higher-level/ml-hl.h b/src/higher-level/ml-hl.h index d5eefd22..b7c9ee59 100755 --- a/src/higher-level/ml-hl.h +++ b/src/higher-level/ml-hl.h @@ -34,6 +34,7 @@ Machine learning header #include #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../cross-level/stats-cl.h" #include "../higher-level/param-hl.h" diff --git a/src/higher-level/param-hl.c b/src/higher-level/param-hl.c index 61c9f2ee..35249a4c 100755 --- a/src/higher-level/param-hl.c +++ b/src/higher-level/param-hl.c @@ -452,124 +452,124 @@ int *band_ptr[_WVL_LENGTH_] = { switch (tsa->index[idx]){ case _IDX_BLU_: v[_WVL_BLUE_] = true; - strncpy(tsa->index_name[idx] , "BLU", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "BLU"); break; case _IDX_GRN_: v[_WVL_GREEN_] = true; - strncpy(tsa->index_name[idx] , "GRN", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "GRN"); break; case _IDX_RED_: v[_WVL_RED_] = true; - strncpy(tsa->index_name[idx] , "RED", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "RED"); break; case _IDX_NIR_: v[_WVL_NIR_] = true; - strncpy(tsa->index_name[idx] , "NIR", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NIR"); break; case _IDX_SW1_: v[_WVL_SWIR1_] = true; - strncpy(tsa->index_name[idx] , "SW1", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "SW1"); break; case _IDX_SW2_: v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "SW2", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "SW2"); break; case _IDX_RE1_: v[_WVL_REDEDGE1_] = true; - strncpy(tsa->index_name[idx] , "RE1", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "RE1"); break; case _IDX_RE2_: v[_WVL_REDEDGE2_] = true; - strncpy(tsa->index_name[idx] , "RE2", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "RE2"); break; case _IDX_RE3_: v[_WVL_REDEDGE3_] = true; - strncpy(tsa->index_name[idx] , "RE3", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "RE3"); break; case _IDX_BNR_: v[_WVL_BNIR_] = true; - strncpy(tsa->index_name[idx] , "BNR", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "BNR"); break; case _IDX_NDV_: v[_WVL_NIR_] = v[_WVL_RED_] = true; - strncpy(tsa->index_name[idx] , "NDV", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDV"); break; case _IDX_EVI_: v[_WVL_NIR_] = v[_WVL_RED_] = v[_WVL_BLUE_] = true; - strncpy(tsa->index_name[idx] , "EVI", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "EVI"); break; case _IDX_NBR_: v[_WVL_NIR_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "NBR", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NBR"); break; case _IDX_ARV_: v[_WVL_RED_] = v[_WVL_BLUE_] = v[_WVL_NIR_] = true; - strncpy(tsa->index_name[idx] , "ARV", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "ARV"); break; case _IDX_SAV_: v[_WVL_NIR_] = v[_WVL_RED_] = true; - strncpy(tsa->index_name[idx] , "SAV", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "SAV"); break; case _IDX_SRV_: v[_WVL_RED_] = v[_WVL_BLUE_] = v[_WVL_NIR_] = true; - strncpy(tsa->index_name[idx] , "SRV", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "SRV"); break; case _IDX_TCB_: v[_WVL_BLUE_] = v[_WVL_GREEN_] = v[_WVL_RED_] = true; v[_WVL_NIR_] = v[_WVL_SWIR1_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "TCB", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "TCB"); break; case _IDX_TCG_: v[_WVL_BLUE_] = v[_WVL_GREEN_] = v[_WVL_RED_] = true; v[_WVL_NIR_] = v[_WVL_SWIR1_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "TCG", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "TCG"); break; case _IDX_TCW_: v[_WVL_BLUE_] = v[_WVL_GREEN_] = v[_WVL_RED_] = true; v[_WVL_NIR_] = v[_WVL_SWIR1_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "TCW", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "TCW"); break; case _IDX_TCD_: v[_WVL_BLUE_] = v[_WVL_GREEN_] = v[_WVL_RED_] = true; v[_WVL_NIR_] = v[_WVL_SWIR1_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "TCD", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "TCD"); break; case _IDX_NDB_: v[_WVL_SWIR1_] = v[_WVL_NIR_] = true; - strncpy(tsa->index_name[idx] , "NDB", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDB"); break; case _IDX_NDW_: v[_WVL_GREEN_] = v[_WVL_NIR_] = true; - strncpy(tsa->index_name[idx] , "NDW", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDW"); break; case _IDX_MNW_: v[_WVL_GREEN_] = v[_WVL_SWIR1_] = true; - strncpy(tsa->index_name[idx] , "MNW", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "MNW"); break; case _IDX_NDS_: v[_WVL_GREEN_] = v[_WVL_SWIR1_] = true; - strncpy(tsa->index_name[idx] , "NDS", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDS"); break; case _IDX_SMA_: for (b=0; b= 0); - strncpy(tsa->index_name[idx] , "SMA", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "SMA"); tsa->sma.v = true; break; case _IDX_BVV_: v[_WVL_VV_] = true; - strncpy(tsa->index_name[idx] , "BVV", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "BVV"); break; case _IDX_BVH_: v[_WVL_VH_] = true; - strncpy(tsa->index_name[idx] , "BVH", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "BVH"); break; case _IDX_NDT_: v[_WVL_SWIR1_] = v[_WVL_SWIR2_] = true; - strncpy(tsa->index_name[idx] , "NDT", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDT"); break; case _IDX_NDM_: v[_WVL_NIR_] = v[_WVL_SWIR1_] = true; - strncpy(tsa->index_name[idx] , "NDM", 3); tsa->index_name[idx][3] = '\0'; + copy_string(tsa->index_name[idx], NPOW_02, "NDM"); break; default: printf("unknown INDEX\n"); @@ -691,14 +691,10 @@ int i, j, k; for (i=0, k=0; intags; i++){ for (j=1; jifeature[i]; j++, k++){ - if (strlen(ftr->cfeature[i][0]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(ftr->bname[k], ftr->cfeature[i][0], strlen(ftr->cfeature[i][0])); - ftr->bname[k][strlen(ftr->cfeature[i][0])] = '\0'; - } - + + copy_string(ftr->bname[k], NPOW_10, ftr->cfeature[i][0]); ftr->band[k] = atoi(ftr->cfeature[i][j]); + #ifdef FORCE_DEBUG printf("Feature # %04d: %s, band %d\n", k, ftr->bname[k], ftr->band[k]); #endif @@ -1044,15 +1040,15 @@ int *band_ptr[_WVL_LENGTH_] = { // set target sensor if (sen->nb == 6){ - strncpy(sen->target, "LNDLG", 5); sen->target[5] = '\0'; + copy_string(sen->target, NPOW_10, "LNDLG"); } else if (sen->nb == 10){ - strncpy(sen->target, "SEN2L", 5); sen->target[5] = '\0'; + copy_string(sen->target, NPOW_10, "SEN2L"); } else if (sen->nb == 4){ - strncpy(sen->target, "SEN2H", 5); sen->target[5] = '\0'; + copy_string(sen->target, NPOW_10, "SEN2H"); } else if (sen->nb == 3){ - strncpy(sen->target, "R-G-B", 5); sen->target[5] = '\0'; + copy_string(sen->target, NPOW_10, "R-G-B"); } else if (sen->nb == 2){ - strncpy(sen->target, "VVVHP", 5); sen->target[5] = '\0'; + copy_string(sen->target, NPOW_10, "VVVHP"); } else { printf("unknown sensors.\n"); return FAILURE; } @@ -1065,15 +1061,11 @@ int *band_ptr[_WVL_LENGTH_] = { for (b=0, bb=0; b NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(sen->domain[bb], domains[b], strlen(domains[b])); sen->domain[bb][strlen(domains[b])] = '\0';} + copy_string(sen->domain[bb], NPOW_10, domains[b]); for (s=0, ss=0; s NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(sen->sensor[ss], upper, strlen(upper)); sen->sensor[ss][strlen(upper)] = '\0';} + copy_string(sen->sensor[ss], NPOW_10, upper); sen->band[ss][bb] = band[s][b]; ss++; } diff --git a/src/higher-level/param-hl.h b/src/higher-level/param-hl.h index 3139c960..fc05da49 100755 --- a/src/higher-level/param-hl.h +++ b/src/higher-level/param-hl.h @@ -34,6 +34,7 @@ Higher Level Processing paramater header #include // macro constants of the floating-point library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/param-cl.h" diff --git a/src/higher-level/read-ard-hl.c b/src/higher-level/read-ard-hl.c index b5c1115d..b1873015 100755 --- a/src/higher-level/read-ard-hl.c +++ b/src/higher-level/read-ard-hl.c @@ -288,15 +288,7 @@ dir_t d; for (m=0, d.n=0; md_name, phl->b_mask) == 0){ - - if (strlen(d.LIST[m]->d_name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(d.list[d.n], d.LIST[m]->d_name, strlen(d.LIST[m]->d_name)); - d.list[d.n][strlen(d.LIST[m]->d_name)] = '\0'; - d.n++; - } - + copy_string(d.list[d.n++], NPOW_10, d.LIST[m]->d_name); break; } @@ -375,15 +367,7 @@ int nchar; if (!phl->date_doys[date.doy]) vs = false; // white-list image - if (vs){ - if (strlen(d.LIST[t]->d_name) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(d.list[d.n], d.LIST[t]->d_name, strlen(d.LIST[t]->d_name)); - d.list[d.n][strlen(d.LIST[t]->d_name)] = '\0'; - d.n++; - } - } + if (vs) copy_string(d.list[d.n++], NPOW_10, d.LIST[t]->d_name); } @@ -428,25 +412,14 @@ int n; for (t=0, n=0; t cemax) continue; - if (strlen(dir.list[t]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(list[n], dir.list[t], strlen(dir.list[t])); - list[n][strlen(dir.list[t])] = '\0'; - n++; - } + copy_string(list[n++], NPOW_10, dir.list[t]); } for (t=0; t= n){ - strncpy(dir.list[t], "NULL", 4); dir.list[t][4] = '\0'; + copy_string(dir.list[t], NPOW_10, "NULL"); } else { - if (strlen(list[t]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { - strncpy(dir.list[t], list[t], strlen(list[t])); - dir.list[t][strlen(list[t])] = '\0'; - } + copy_string(dir.list[t], NPOW_10, list[t]); } } free_2D((void**)list, dir.n); @@ -827,10 +800,8 @@ bool level3 = false; if (phl->prd.imp){ // backup filename - if (strlen(fname) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(temp, fname, strlen(fname)); temp[strlen(fname)] = '\0';} - + copy_string(temp, NPOW_10, fname); + // new filename if (strstr(fname, "BOA") != NULL) pch = strstr(fname, "BOA"); @@ -839,11 +810,7 @@ bool level3 = false; strncpy(pch, "IMP", 3); // if no improphed product exists, use normal one - if (!fileexist(fname)){ - if (strlen(temp) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); exit(1); - } else { strncpy(fname, temp, strlen(temp)); fname[strlen(temp)] = '\0';} - } + if (!fileexist(fname)) copy_string(fname, NPOW_10, temp); } @@ -1368,9 +1335,7 @@ int nchar; } // copy file name - if (strlen(file) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(fname, file, strlen(file)); fname[strlen(file)] = '\0';} + copy_string(fname, NPOW_10, file); nchar = snprintf(c_tc, NPOW_04, "X%04d_Y%04d", tx, ty); if (nchar < 0 || nchar >= NPOW_04){ @@ -1428,9 +1393,7 @@ int nchar; printf("error in assembling filename for neighboring block.\n"); return NULL; } else strncpy(pch, c_tn, 11); - if (strlen(c_tn) > NPOW_04-1){ - printf("cannot copy, string too long.\n"); return NULL; - } else { strncpy(c_tc, c_tn, strlen(c_tn)); c_tc[strlen(c_tn)] = '\0';} + copy_string(c_tc, NPOW_04, c_tn); #ifdef FORCE_DEBUG diff --git a/src/higher-level/read-ard-hl.h b/src/higher-level/read-ard-hl.h index 28f8b500..f0e721ef 100755 --- a/src/higher-level/read-ard-hl.h +++ b/src/higher-level/read-ard-hl.h @@ -32,6 +32,7 @@ Reading ARD header #include // standard general utilities library #include "../cross-level/const-cl.h" +#include "../cross-level/string-cl.h" #include "../cross-level/stack-cl.h" #include "../cross-level/imagefuns-cl.h" #include "../cross-level/quality-cl.h" From 79da875e6ca2313f87ccf4032849a6a8dacbea37 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 3 Aug 2020 10:06:16 +0200 Subject: [PATCH 07/78] updated Makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5e830fdf..d51c7f83 100755 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ # Modify the following lines to match your needs # Installation directory -BINDIR=/usr/local/bin +BINDIR=/develop # Libraries GDAL=-I/usr/include/gdal -L/usr/lib -Wl,-rpath=/usr/lib From 54653b8ba217531285ca3efeb014e0a31d79d3f3 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 5 Aug 2020 17:00:36 +0200 Subject: [PATCH 08/78] started working on polar metrics --- docs/source/history/v3.rst | 2 +- src/higher-level/polar-hl.c | 353 ++++++++++++++++++++++++++++++++++++ src/higher-level/polar-hl.h | 50 +++++ src/higher-level/tsa-hl.c | 2 + 4 files changed, 406 insertions(+), 1 deletion(-) create mode 100755 src/higher-level/polar-hl.c create mode 100755 src/higher-level/polar-hl.h diff --git a/docs/source/history/v3.rst b/docs/source/history/v3.rst index 83555aa2..9c6e0c95 100755 --- a/docs/source/history/v3.rst +++ b/docs/source/history/v3.rst @@ -6,7 +6,7 @@ Version 3 FORCE v. 3.4.0 -------------- -Release: 19.03.2020 +Release: 03.08.2020 * **General changes** diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c new file mode 100755 index 00000000..c6d2346c --- /dev/null +++ b/src/higher-level/polar-hl.c @@ -0,0 +1,353 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +This file contains functions for polarmetrics ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + + +#include "polar-hl.h" + + +int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_pol_t *pol); + + +/** This function derives phenometrics from an interpolated time series + +++ for each year. + --- ts: pointer to instantly useable TSA image arrays + --- mask_: mask image + --- nc: number of cells + --- ni: number of interpolation steps + --- nodata: nodata value + --- year_min: first year in the complete time series + --- year_max: last year in the complete time series + --- pol: pheno parameters + +++ Return: SUCCESS/FAILURE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ +int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_pol_t *pol){ +int l, npol = 15; +int year; +int p; +int i, ii, i_, i0, i1, ni_; +char cdat0[NPOW_10]; +char cdat1[NPOW_10]; +int nchar, error = 0; +float *v = NULL; +float *yhat = NULL; +float *w = NULL; +float *doy = NULL; +float dce, ce0; +float ymax; +int doymax, yoff; +bool southern = false; +bool valid; +float dseg; +int nseg; +float ce_left, ce_right, ce; +float v_left, v_right; +Spline *spl; + + + valid = false; + for (l=0; lpol_[l] != NULL) valid = true; + } + + if (!valid) return CANCEL; + + + + + #pragma omp parallel private(l,i,ii,i0,i1,ni_,ce_left,ce_right,v_left,v_right,year,valid,doymax,yoff,ce,i_,ymax,v,w,doy,dce,ce0,cdat0,cdat1,spl,nchar) firstprivate(southern) shared(mask_,ts,nc,ni,year_min,year_max,nodata,pol,nseg,npol) reduction(+: error) default(none) + { + + // allocate + alloc((void**)&v, ni, sizeof(float)); + alloc((void**)&doy, ni, sizeof(float)); + alloc((void**)&rad, ni, sizeof(float)); + alloc((void**)&pol_x, ni, sizeof(float)); + alloc((void**)&pol_y, ni, sizeof(float)); + + + #pragma omp for + for (p=0; ppol_[l] != NULL){ + for (year=0; yearny; year++) ts->pol_[l][year][p] = nodata; + } + } + + if (mask_ != NULL && !mask_[p]) continue; + + + + valid = true; + mean_pol_x = 0; + mean_pol_y = 0; + + + /** copy ce/v to working variables + +++ and interpolate linearly to make sure **/ + for (i=0; id_tsi[i].doy; + rad[i] = ts->d_tsi[i].doy/365*2*M_PI; + + // linearly interpolate v-value + if (ts->tsi_[i][p] == nodata){ + + ce_left = ce_right = INT_MIN; + v_left = v_right = nodata; + ce = ts->d_tsi[i].ce; + + for (i_=i-1; i_>=0; i_--){ + if (ts->tsi_[i_][p] != nodata){ + ce_left = ts->d_tsi[i_].ce; + v_left = ts->tsi_[i_][p]; + break; + } + } + for (i_=i+1; i_tsi_[i_][p] != nodata){ + ce_right = ts->d_tsi[i_].ce; + v_right = ts->tsi_[i_][p]; + break; + } + } + + if (ce_left > 0 && ce_right > 0){ + v[i] = (v_left*(ce_right-ce) + v_right*(ce-ce_left))/(ce_right-ce_left); + } else if (ce_left > 0){ + v[i] = v_left; + } else if (ce_right > 0){ + v[i] = v_right; + } else { + valid = false; + } + + // copy v-value + } else { + + v[i] = ts->tsi_[i][p]; + + } + + pol_x[i] = v[i]*cos(rad[i]); + pol_y[i] = v[i]*sin(rad[i]); + + mean_pol_x += pol_x[i]; + mean_pol_y += pol_y[i]; + + } + + if (!valid) continue; + + // mean of polar coordinates + mean_pol_x /= ni; + mean_pol_y /= ni; + + // average vector + mean_rad = atan2(mean_pol_y, mean_pol_x); + if (mean_rad <= 0) mean_rad += 2*M_PI; + mean_v = sqrt(mean_pol_x*mean_pol_x + mean_pol_y*mean_pol_y); + + // diametric opposite of average vector = start of phenological year + if (mean_rad < M_PI){ + theta = mean_rad + M_PI; + } else { + theta = mean_rad - M_PI; + } + + // yoff = 0; // year offset, probably use? + + + for (year=0; yearny; year++){ + + sum_ann = 0; + n = 0; + + // extract annual values + // cumulative values + for (i=i1; id_tsi[i].year < year_min+year) continue; + if (ts->d_tsi[i].year == year_min+year && rad[i] < theta) continue; + if (ts->d_tsi[i].year > year_min+year && rad[i] => theta) break; + + + rad_ann[n] = rad[i]; + v_ann[n] = v[i]; + + sum_ann += v[i]; + cum_v_ann[n++] = sum_ann; + + } + + + rad_start_grow = rad_early_grow = rad_mid_grow = rad_end_grow = rad_late_grow = rad_len_grow = -1; + v_start_grow = v_early_grow = v_mid_grow = v_end_grow = v_late_grow = -1; + mean_v_grow = var_v_grow = sd_v_grow = n_grow = 0; + mean_pol_x_grow = mean_pol_y_grow = 0; + mean_pol_x_spring = mean_pol_y_spring = 0; + mean_pol_x_fall = mean_pol_y_fall = 0; + + for (i=0; i= 0.150 && rad_start_grow < 0){ rad_start_grow = rad_ann[i]; v_start_grow = v_ann[i];} + if (cum_v[i] >= 0.500 && rad_mid_grow < 0){ rad_mid_grow = rad_ann[i]; v_mid_grow = v_ann[i];} + if (cum_v[i] >= 0.800 && rad_end_grow < 0){ rad_end_grow = rad_ann[i]; v_end_grow = v_ann[i];} + if (cum_v[i] >= 0.150 && cum_v[i] < 0.800){ + var_recurrence(v_ann[i], &mean_v_grow, &var_v_grow, ++n_grow); + mean_pol_x_grow += v_ann[i]*cos(rad_ann[i]); + mean_pol_y_grow += v_ann[i]*sin(rad_ann[i]); + n_grow + } + if (cum_v[i] >= 0.150 && cum_v[i] < 0.500){ + mean_pol_x_spring += v_ann[i]*cos(rad_ann[i]); + mean_pol_y_spring += v_ann[i]*sin(rad_ann[i]); + n_spring++; + } + if (cum_v[i] >= 0.500 && cum_v[i] < 0.800){ + mean_pol_x_fall += v_ann[i]*cos(rad_ann[i]); + mean_pol_y_fall += v_ann[i]*sin(rad_ann[i]); + n_fall++; + } + } + + rad_len_grow = rad_end - rad_start; + sd_v_grow = standdev(var_v_grow, n); + + mean_pol_x_grow /= n_grow; + mean_pol_y_grow /= n_grow; + mean_pol_x_spring /= n_spring; + mean_pol_y_spring /= n_spring; + mean_pol_x_fall /= n_fall; + mean_pol_y_fall /= n_fall; + + mean_rad_grow = atan2(mean_pol_y_grow, mean_pol_x_grow); + if (mean_rad_grow <= 0) mean_rad_grow += 2*M_PI; + mean_v_grow = sqrt(mean_pol_x_grow*mean_pol_x_grow + mean_pol_y_grow*mean_pol_y_grow); + + mean_rad_spring = atan2(mean_pol_y_spring, mean_pol_x_spring); + if (mean_rad_spring <= 0) mean_rad_spring += 2*M_PI; + mean_v_spring = sqrt(mean_pol_x_spring*mean_pol_x_spring + mean_pol_y_spring*mean_pol_y_spring); + + mean_rad_fall = atan2(mean_pol_y_fall, mean_pol_x_fall); + if (mean_rad_fall <= 0) mean_rad_fall += 2*M_PI; + mean_v_fall = sqrt(mean_pol_x_fall*mean_pol_x_fall + mean_pol_y_fall*mean_pol_y_fall); + + + + valid = false; + + // sanity check? + // if () valid = true; + + valid = true; + + /** copy POL if all OK **/ + if (valid){ + if (pol->odem) ts->pol_[_POL_DEM_][year][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st POL year + if (pol->odss) ts->pol_[_POL_DSS_][year][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st POL year + if (pol->odri) ts->pol_[_POL_DRI_][year][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st POL year + if (pol->odps) ts->pol_[_POL_DPS_][year][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st POL year + if (pol->odfi) ts->pol_[_POL_DFI_][year][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st POL year + if (pol->odes) ts->pol_[_POL_DES_][year][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st POL year + if (pol->odlm) ts->pol_[_POL_DLM_][year][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st POL year + if (pol->olts) ts->pol_[_POL_LTS_][year][p] = (short)(ph.min_min_duration*dce); // days + if (pol->olgs) ts->pol_[_POL_LGS_][year][p] = (short)(ph.green_duration*dce); // days + if (pol->ovem) ts->pol_[_POL_VEM_][year][p] = (short)(ph.early_min_val); // index value + if (pol->ovss) ts->pol_[_POL_VSS_][year][p] = (short)(ph.start_green_val); // index value + if (pol->ovri) ts->pol_[_POL_VRI_][year][p] = (short)(ph.early_flex_val); // index value + if (pol->ovps) ts->pol_[_POL_VPS_][year][p] = (short)(ph.peak_val); // index value + if (pol->ovfi) ts->pol_[_POL_VFI_][year][p] = (short)(ph.late_flex_val); // index value + if (pol->oves) ts->pol_[_POL_VES_][year][p] = (short)(ph.end_green_val); // index value + if (pol->ovlm) ts->pol_[_POL_VLM_][year][p] = (short)(ph.late_min_val); // index value + if (pol->ovbl) ts->pol_[_POL_VBL_][year][p] = (short)(ph.latent_val); // index value + if (pol->ovsa) ts->pol_[_POL_VSA_][year][p] = (short)(ph.amplitude); // index value + if (pol->oist) ts->pol_[_POL_IST_][year][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 + if (pol->oibl) ts->pol_[_POL_IBL_][year][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 + if (pol->oibt) ts->pol_[_POL_IBT_][year][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 + if (pol->oigs) ts->pol_[_POL_IGS_][year][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 + if (pol->orar) ts->pol_[_POL_RAR_][year][p] = (short)(ph.greenup_rate/dce); // index value / days + if (pol->oraf) ts->pol_[_POL_RAF_][year][p] = (short)(ph.senescence_rate/dce); // index value / days + if (pol->ormr) ts->pol_[_POL_RMR_][year][p] = (short)(ph.early_flex_rate/dce); // index value / days + if (pol->ormf) ts->pol_[_POL_RMF_][year][p] = (short)(ph.late_flex_rate/dce); // index value / days + } + + } + + + } + + /** clean **/ + free((void*)v); free((void*)doy); + free((void*)pol_x); free((void*)pol_y); + + } + + if (error > 0) return FAILURE; + + return SUCCESS; +} + + +/** public functions ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + + +/** This function derives phenometrics from an interpolated time series. + +++ The function will fail if FORCE was not compiled with SPLITS (see in- + +++ stallation instructions). Do not expect this function to work if you + +++ have fairly sparse time series. In addition, the time series needs to + +++ be long enough (at least extend into the previous and next year). Phe- + +++ nometrics are derived for each given year. + --- ts: pointer to instantly useable TSA image arrays + --- mask_: mask image + --- nc: number of cells + --- ni: number of interpolation steps + --- nodata: nodata value + --- phl: HL parameters + +++ Return: SUCCESS/FAILURE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ +int tsa_polar(tsa_t *ts, small *mask_, int nc, int ni, short nodata, par_hl_t *phl){ + + + //if (phl->tsa.pol.ospl + + // phl->tsa.pol.opol + + // phl->tsa.pol.otrd + + // phl->tsa.pol.ocat == 0) return SUCCESS; + + //cite_me(_CITE_POLAR_); + + + if (polar_ts(ts, mask_, nc, ni, nodata, + phl->date_range[_MIN_].year, phl->date_range[_MAX_].year, &phl->tsa.pol) == FAILURE) return FAILURE; + + + + return SUCCESS; +} + diff --git a/src/higher-level/polar-hl.h b/src/higher-level/polar-hl.h new file mode 100755 index 00000000..0b21e143 --- /dev/null +++ b/src/higher-level/polar-hl.h @@ -0,0 +1,50 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +Polarmetrics header ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + + +#ifndef PHENO_HL_H +#define PHENO_HL_H + +#include // core input and output functions +#include // standard general utilities library + +#include "../cross-level/cite-cl.h" +#include "../higher-level/param-hl.h" +#include "../higher-level/tsa-hl.h" + + +#ifdef __cplusplus +extern "C" { +#endif + +int tsa_polar(tsa_t *ts, small *mask_, int nc, int ni, short nodata, par_hl_t *phl); + +#ifdef __cplusplus +} +#endif + +#endif + diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index 5d076876..7ac49194 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -493,6 +493,8 @@ short nodata; tsa_fold(&ts, mask_, nc, ni, nodata, phl); + tsa_polar(&ts, mask_, nc, ni, nodata, phl); + tsa_pheno(&ts, mask_, nc, ni, nodata, phl); tsa_trend(&ts, mask_, nc, nodata, phl); From ae1a2a36656dd5e8901fdf66ca381fbb6689c549 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 6 Aug 2020 08:43:07 +0200 Subject: [PATCH 09/78] added target in Makefile --- Makefile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index d51c7f83..14f3a6dd 100755 --- a/Makefile +++ b/Makefile @@ -74,7 +74,7 @@ TA=temp-aux all: temp cross lower higher aux exe cross: enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl datesys_cl lock_cl cube_cl dir_cl stats_cl pca_cl tile_cl queue_cl warp_cl sun_cl quality_cl sys_cl konami_cl download_cl read_cl lower: table_ll param_ll meta_ll cube_ll equi7_ll glance7_ll atc_ll sunview_ll read_ll radtran_ll topo_ll cloud_ll gas_ll brdf_ll atmo_ll aod_ll resmerge_ll coreg_ll coregfuns_ll acix_ll modwvp_ll -higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl +higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl polar_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl aux: param_aux param_train_aux train_aux exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis .PHONY: temp all install install_ bash python clean build @@ -266,6 +266,9 @@ standardize_hl: temp $(DH)/standardize-hl.c pheno_hl: temp $(DH)/pheno-hl.cpp $(GPP) $(CFLAGS) $(SPLITS) -c $(DH)/pheno-hl.cpp -o $(TH)/pheno_hl.o $(LDSPLITS) +polar_hl: temp $(DH)/polar-hl.c + $(GCC) $(CFLAGS) -c $(DH)/polar-hl.c -o $(TH)/polar_hl.o + trend_hl: temp $(DH)/trend-hl.c $(GCC) $(CFLAGS) -c $(DH)/trend-hl.c -o $(TH)/trend_hl.o From 2fd129b7097b670d668ff80b32117643eda6d93b Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 6 Aug 2020 17:29:26 +0200 Subject: [PATCH 10/78] 1st polarmetrics implementation --- src/cross-level/cite-cl.c | 5 + src/cross-level/cite-cl.h | 3 +- src/cross-level/enum-cl.c | 7 + src/cross-level/enum-cl.h | 8 +- src/higher-level/param-hl.c | 86 ++++++++ src/higher-level/param-hl.h | 19 ++ src/higher-level/polar-hl.c | 407 ++++++++++++++++++++++-------------- src/higher-level/polar-hl.h | 4 +- src/higher-level/tsa-hl.c | 92 ++++++-- src/higher-level/tsa-hl.h | 12 +- 10 files changed, 468 insertions(+), 175 deletions(-) diff --git a/src/cross-level/cite-cl.c b/src/cross-level/cite-cl.c index ae0abc41..8f13bfe8 100755 --- a/src/cross-level/cite-cl.c +++ b/src/cross-level/cite-cl.c @@ -300,6 +300,11 @@ cite_t _cite_me_[_CITE_LENGTH_] = { "Gao, B. (1996). NDWI — A normalized difference water index for remote " "sensing of vegetation liquid water from space. Remote Sensing of Environment, " "58, 3, 257-266", + false }, + { "Polar metrics", + "Brooks, B., Lee, D., Pomara, L., Hargrove, W. (2020). Monitoring Broadscale " + "Vegetational Diversity and Change across North American Landscapes Using Land " + "Surface Phenology. Forests 11(6), 606", false } }; diff --git a/src/cross-level/cite-cl.h b/src/cross-level/cite-cl.h index bb35b505..9bb19ae5 100755 --- a/src/cross-level/cite-cl.h +++ b/src/cross-level/cite-cl.h @@ -54,7 +54,8 @@ enum { _CITE_FORCE_, _CITE_L2PS_, _CITE_ATMVAL_, _CITE_DISTURBANCE_, _CITE_NDBI_, _CITE_NDWI_, _CITE_MNDWI_, _CITE_NDSI_, _CITE_SMA_, _CITE_EQUI7_, _CITE_RESMERGE_, _CITE_LSM_, - _CITE_NDTI_, _CITE_NDMI_, _CITE_LENGTH_ }; + _CITE_NDTI_, _CITE_NDMI_, _CITE_POL_, + _CITE_LENGTH_ }; typedef struct { char description[NPOW_10]; diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index b5e91664..98dfd36b 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -116,6 +116,13 @@ const tagged_enum_t _TAGGED_ENUM_LSP_[_LSP_LENGTH_] = { {_LSP_IBT_, "IBT" }, {_LSP_IGS_, "IGS" }, {_LSP_RAR_, "RAR" }, {_LSP_RAF_, "RAF" }, {_LSP_RMR_, "RMR" }, {_LSP_RMF_, "RMF" }}; +const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { + { _POL_DSS_, "DSS" }, { _POL_DMS_, "DMS" }, { _POL_DES_, "DES" }, { _POL_DEV_, "DEV" }, + { _POL_DAV_, "DAV" }, { _POL_DLV_, "DLV" }, { _POL_LGS_, "LGS" }, { _POL_LBV_, "LBV" }, + { _POL_VSS_, "VSS" }, { _POL_VMS_, "VMS" }, { _POL_VES_, "VES" }, { _POL_VEV_, "VEV" }, + { _POL_VAV_, "VAV" }, { _POL_VLV_, "VLV" }, { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, + { _POL_DPY_, "DPY" }}; + const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { { _TAIL_LEFT_, "LEFT" }, { _TAIL_TWO_, "TWO" }, { _TAIL_RIGHT_, "RIGHT" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index e2c88e16..c085b4e3 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -189,7 +189,12 @@ enum { _LSP_DEM_, _LSP_DSS_, _LSP_DRI_, _LSP_DPS_, _LSP_DFI_, _LSP_DES_, _LSP_VPS_, _LSP_VFI_, _LSP_VES_, _LSP_VLM_, _LSP_VBL_, _LSP_VSA_, _LSP_IST_, _LSP_IBL_, _LSP_IBT_, _LSP_IGS_, _LSP_RAR_, _LSP_RAF_, _LSP_RMR_, _LSP_RMF_, _LSP_LENGTH_ }; - + +// polar metrics +enum { _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_DEV_, _POL_DAV_, _POL_DLV_, + _POL_LGS_, _POL_LBV_, _POL_VSS_, _POL_VMS_, _POL_VES_, _POL_VEV_, + _POL_VAV_, _POL_VLV_, _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_LENGTH_ }; + // folding enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGTH_ }; @@ -262,6 +267,7 @@ extern const tagged_enum_t _TAGGED_ENUM_INT_[_INT_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_STA_[_STA_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_HEMI_[_HEMI_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_LSP_[_LSP_LENGTH_]; +extern const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_STD_[_STD_LENGTH_]; extern const tagged_enum_t _TAGGED_ENUM_ML_[_ML_LENGTH_]; diff --git a/src/higher-level/param-hl.c b/src/higher-level/param-hl.c index 9e77eaf4..b2e9f3ea 100755 --- a/src/higher-level/param-hl.c +++ b/src/higher-level/param-hl.c @@ -51,6 +51,7 @@ void free_mcl(par_mcl_t *mcl); int parse_ftr(par_ftr_t *ftr); int parse_sta(par_sta_t *sta); int parse_lsp(par_lsp_t *lsp); +int parse_pol(par_pol_t *pol); int parse_txt(par_txt_t *txt); int parse_lsm(par_lsm_t *lsm); int parse_quality(par_qai_t *qai); @@ -232,6 +233,17 @@ void register_tsa(params_t *params, par_hl_t *phl){ register_bool_par(params, "OUTPUT_TRP", &phl->tsa.lsp.otrd); register_bool_par(params, "OUTPUT_CAP", &phl->tsa.lsp.ocat); + // polar parameters + register_float_par(params, "POL_START_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.start); + register_float_par(params, "POL_MID_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.mid); + register_float_par(params, "POL_END_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.end); + register_enumvec_par(params, "POL", _TAGGED_ENUM_POL_, _POL_LENGTH_, &phl->tsa.pol.metrics, &phl->tsa.pol.nmetrics); + register_enum_par(params, "STANDARDIZE_POL", _TAGGED_ENUM_STD_, _STD_LENGTH_, &phl->tsa.pol.standard); + register_bool_par(params, "OUTPUT_PCT", &phl->tsa.pol.opct); + register_bool_par(params, "OUTPUT_POL", &phl->tsa.pol.opol); + register_bool_par(params, "OUTPUT_TRO", &phl->tsa.pol.otrd); + register_bool_par(params, "OUTPUT_CAO", &phl->tsa.pol.ocat); + // trend parameters register_enum_par(params, "TREND_TAIL", _TAGGED_ENUM_TAIL_, _TAIL_LENGTH_, &phl->tsa.trd.tail); register_float_par(params, "TREND_CONF", 0, 1, &phl->tsa.trd.conf); @@ -843,6 +855,60 @@ int i; } +/** This function reparses polarmetrics parameters (special para- ++++ meter that cannot be parsed with the general parser). +--- lsp: phenometrics parameters ++++ Return: SUCCESS/FAILURE ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ +int parse_pol(par_pol_t *pol){ +int i; + + + for (i=0; inmetrics; i++){ + if (pol->metrics[i] == _POL_DSS_){ + pol->odss = true; + } else if (pol->metrics[i] == _POL_DMS_){ + pol->odms = true; + } else if (pol->metrics[i] == _POL_DES_){ + pol->odes = true; + } else if (pol->metrics[i] == _POL_DEV_){ + pol->odev = true; + } else if (pol->metrics[i] == _POL_DAV_){ + pol->odav = true; + } else if (pol->metrics[i] == _POL_DLV_){ + pol->odlv = true; + } else if (pol->metrics[i] == _POL_LGS_){ + pol->olgs = true; + } else if (pol->metrics[i] == _POL_LBV_){ + pol->olbv = true; + } else if (pol->metrics[i] == _POL_VSS_){ + pol->ovss = true; + } else if (pol->metrics[i] == _POL_VMS_){ + pol->ovms = true; + } else if (pol->metrics[i] == _POL_VES_){ + pol->oves = true; + } else if (pol->metrics[i] == _POL_VEV_){ + pol->ovev = true; + } else if (pol->metrics[i] == _POL_VAV_){ + pol->ovav = true; + } else if (pol->metrics[i] == _POL_VLV_){ + pol->ovlv = true; + } else if (pol->metrics[i] == _POL_VGA_){ + pol->ovga = true; + } else if (pol->metrics[i] == _POL_VGV_){ + pol->ovgv = true; + } else if (pol->metrics[i] == _POL_DPY_){ + pol->odpy = true; + } else { + printf("warning: unknown pol.\n"); + } + } + + + return SUCCESS; +} + + /** This function reparses texture parameters (special para- +++ meter that cannot be parsed with the general parser). --- txt: texture parameters @@ -1322,6 +1388,8 @@ double tol = 5e-3; if (phl->type == _HL_CSO_) parse_sta(&phl->cso.sta); if (phl->type == _HL_TSA_) parse_lsp(&phl->tsa.lsp); + + if (phl->type == _HL_TSA_) parse_pol(&phl->tsa.pol); if (phl->type == _HL_TXT_) parse_txt(&phl->txt); @@ -1368,6 +1436,10 @@ double tol = 5e-3; // phenology not possible for first and last year phl->tsa.lsp.ny = phl->ny-2; + // polarmetrics not possible for one year + phl->tsa.pol.ny = phl->ny; + phl->tsa.pol.ns = phl->ny-1; + #ifdef FORCE_DEBUG printf("ny: %d, nq: %d, nm: %d, nw: %d, nd: %d\n", phl->ny, phl->nq, phl->nm, phl->nw, phl->nd); @@ -1519,6 +1591,20 @@ double tol = 5e-3; } + + if (phl->tsa.pol.opct || phl->tsa.pol.opol || phl->tsa.pol.otrd || phl->tsa.pol.ocat){ + + if (phl->tsa.pol.ns < 1){ + printf("POL cannot be estimated for one year.\n"); + printf("Time window is too short.\n"); + return FAILURE; + } + + if (phl->tsa.tsi.method == _INT_NONE_){ + printf("Polarmetrics require INTERPOLATE != NONE\n"); return FAILURE;} + + } + } diff --git a/src/higher-level/param-hl.h b/src/higher-level/param-hl.h index dd3c0d1e..24b2527e 100755 --- a/src/higher-level/param-hl.h +++ b/src/higher-level/param-hl.h @@ -200,6 +200,24 @@ typedef struct { int standard; } par_lsp_t; +// polar metrics +typedef struct { + int ny; + int ns; + float start; + float mid; + float end; + int opct; // flag: output polar coordinate transformed TS + int opol; // flag: output polar metrics + int *metrics, nmetrics; + int odss, odms, odes, odev, odav, odlv; + int olgs, olbv, ovss, ovms, oves, ovev; + int ovav, ovlv, ovga, ovgv, odpy; + int otrd; // flag: output POL trends + int ocat; // flag: output POL cats + int standard; +} par_pol_t; + // trend typedef struct { int tail; // tail type @@ -249,6 +267,7 @@ typedef struct { par_tsi_t tsi; par_sma_t sma; par_lsp_t lsp; + par_pol_t pol; par_trd_t trd; } par_tsa_t; diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index c6d2346c..c340cf38 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -28,7 +28,104 @@ This file contains functions for polarmetrics #include "polar-hl.h" -int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_pol_t *pol); +enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _SEASON_, _PCX_, _PCY_, _COORD_LEN_ }; + +void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]); +void polar_vector(float x, float y, float yr, float polar_array[_COORD_LEN_]); +void identify_seasons(float **polar, int ni, int istep, float theta); +void accumulate_seasons(float **polar, int ni); + +int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol); + + + + +void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]){ + + + polar_array[_RAD_] = r; + polar_array[_VAL_] = v; + polar_array[_YEAR_] = yr; + polar_array[_DOY_] = r*365.0/(2.0*M_PI); + polar_array[_PCX_] = v*cos(r); + polar_array[_PCY_] = v*sin(r); + + return; +} + + +void polar_vector(float x, float y, float yr, float polar_array[_COORD_LEN_]){ +float r, v; + + + r = atan2(y, x); + if (r <= 0) r += 2*M_PI; + v = sqrt(x*x + y*y); + + polar_array[_RAD_] = r; + polar_array[_VAL_] = v; + polar_array[_YEAR_] = yr; + polar_array[_DOY_] = r*365.0/(2.0*M_PI); + polar_array[_PCX_] = x; + polar_array[_PCY_] = y; + + return; +} + + +void identify_seasons(float **polar, int ni, int istep, float theta){ +int i, s = -1; +float rstep = istep/365.0*2.0*M_PI; + + + for (i=0; i= theta && + polar[i][_RAD_]-theta <= rstep) s++; + + polar[i][_SEASON_] = s; + + } + + return; +} + +void accumulate_seasons(float **polar, int ni){ +int i, s; +float sum; + + + polar[0][_CUM_] = polar[0][_VAL_]; + s = polar[0][_SEASON_]; + + for (i=1; i=0; i--){ + + if (polar[i][_SEASON_] != s){ + sum = polar[i][_CUM_]; + s = polar[i][_SEASON_]; + } + + polar[i][_CUM_] /= sum; + + } + + return; +} /** This function derives phenometrics from an interpolated time series @@ -43,32 +140,34 @@ int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min --- pol: pheno parameters +++ Return: SUCCESS/FAILURE +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ -int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_pol_t *pol){ -int l, npol = 15; +int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol){ +int l; int year; int p; -int i, ii, i_, i0, i1, ni_; -char cdat0[NPOW_10]; -char cdat1[NPOW_10]; -int nchar, error = 0; -float *v = NULL; -float *yhat = NULL; -float *w = NULL; -float *doy = NULL; -float dce, ce0; -float ymax; -int doymax, yoff; -bool southern = false; +int i, i_, i0; +int s, y; +float r, v; bool valid; -float dseg; -int nseg; float ce_left, ce_right, ce; float v_left, v_right; -Spline *spl; + +enum { _START_, _MID_, _END_, _EVENT_LEN_ }; +enum { _LONGTERM_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_}; + +float theta, doy_theta; + +float timing[_EVENT_LEN_][_COORD_LEN_]; +float vector[_WINDOW_LEN_][_COORD_LEN_]; +float mean_window[_WINDOW_LEN_][2]; +int n_window[_WINDOW_LEN_]; +double recurrence[2]; + +float **polar = NULL; + valid = false; - for (l=0; lpol_[l] != NULL) valid = true; } @@ -77,22 +176,18 @@ Spline *spl; - #pragma omp parallel private(l,i,ii,i0,i1,ni_,ce_left,ce_right,v_left,v_right,year,valid,doymax,yoff,ce,i_,ymax,v,w,doy,dce,ce0,cdat0,cdat1,spl,nchar) firstprivate(southern) shared(mask_,ts,nc,ni,year_min,year_max,nodata,pol,nseg,npol) reduction(+: error) default(none) + //#pragma omp parallel private(l,i,i0,ni_,ce_left,ce_right,v_left,v_right,year,valid,ce,v,doy) firstprivate(southern) shared(mask_,ts,nc,ni,year_min,year_max,nodata,pol,nseg) default(none) { // allocate - alloc((void**)&v, ni, sizeof(float)); - alloc((void**)&doy, ni, sizeof(float)); - alloc((void**)&rad, ni, sizeof(float)); - alloc((void**)&pol_x, ni, sizeof(float)); - alloc((void**)&pol_y, ni, sizeof(float)); + alloc_2D((void***)&polar, ni, _COORD_LEN_, sizeof(float)); - #pragma omp for + //#pragma omp for for (p=0; ppol_[l] != NULL){ for (year=0; yearny; year++) ts->pol_[l][year][p] = nodata; } @@ -103,18 +198,13 @@ Spline *spl; valid = true; - mean_pol_x = 0; - mean_pol_y = 0; + memset(mean_window[_LONGTERM_], 0, 2*sizeof(float)); /** copy ce/v to working variables +++ and interpolate linearly to make sure **/ for (i=0; id_tsi[i].doy; - rad[i] = ts->d_tsi[i].doy/365*2*M_PI; - // linearly interpolate v-value if (ts->tsi_[i][p] == nodata){ @@ -138,177 +228,186 @@ Spline *spl; } if (ce_left > 0 && ce_right > 0){ - v[i] = (v_left*(ce_right-ce) + v_right*(ce-ce_left))/(ce_right-ce_left); + v = (v_left*(ce_right-ce) + v_right*(ce-ce_left))/(ce_right-ce_left); } else if (ce_left > 0){ - v[i] = v_left; + v = v_left; } else if (ce_right > 0){ - v[i] = v_right; + v = v_right; } else { + v = nodata; valid = false; } // copy v-value } else { - v[i] = ts->tsi_[i][p]; + v = ts->tsi_[i][p]; } - pol_x[i] = v[i]*cos(rad[i]); - pol_y[i] = v[i]*sin(rad[i]); + r = ts->d_tsi[i].doy/365.0*2.0*M_PI; + +if (p == 0) printf("doy: %d\n", ts->d_tsi[i].doy); +if (p == 0) printf("r: %f\n", r); +if (p == 0) printf("v: %f\n", v); + polar_coords(r, v, ts->d_tsi[i].year, polar[i]); +if (p == 0) printf("x: %f\n", polar[i][_PCX_]); +if (p == 0) printf("y: %f\n", polar[i][_PCY_]); - mean_pol_x += pol_x[i]; - mean_pol_y += pol_y[i]; + mean_window[_LONGTERM_][_X_] += polar[i][_PCX_]; + mean_window[_LONGTERM_][_Y_] += polar[i][_PCY_]; } if (!valid) continue; +if (p == 0) printf("valid pixel.\n"); + // mean of polar coordinates - mean_pol_x /= ni; - mean_pol_y /= ni; + mean_window[_LONGTERM_][_X_] /= ni; + mean_window[_LONGTERM_][_Y_] /= ni; +if (p == 0) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); - // average vector - mean_rad = atan2(mean_pol_y, mean_pol_x); - if (mean_rad <= 0) mean_rad += 2*M_PI; - mean_v = sqrt(mean_pol_x*mean_pol_x + mean_pol_y*mean_pol_y); + // multi-annual average vector + polar_vector(mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_], 0, vector[_LONGTERM_]); // diametric opposite of average vector = start of phenological year - if (mean_rad < M_PI){ - theta = mean_rad + M_PI; + if (vector[_LONGTERM_][_RAD_] < M_PI){ + theta = vector[_LONGTERM_][_RAD_] + M_PI; } else { - theta = mean_rad - M_PI; + theta = vector[_LONGTERM_][_RAD_] - M_PI; } + doy_theta = (theta*365.0/(2.0*M_PI)); + +if (p == 0) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); +if (p == 0) printf("theta: %f %f\n", theta, doy_theta); - // yoff = 0; // year offset, probably use? - - - for (year=0; yearny; year++){ - - sum_ann = 0; - n = 0; - - // extract annual values - // cumulative values - for (i=i1; id_tsi[i].year < year_min+year) continue; - if (ts->d_tsi[i].year == year_min+year && rad[i] < theta) continue; - if (ts->d_tsi[i].year > year_min+year && rad[i] => theta) break; + identify_seasons(polar, ni, tsi->step, theta); + accumulate_seasons(polar, ni); - rad_ann[n] = rad[i]; - v_ann[n] = v[i]; - sum_ann += v[i]; - cum_v_ann[n++] = sum_ann; + for (s=0, i0=0; sns; s++){ - } + memset(timing, 0, sizeof(float)*_EVENT_LEN_*_COORD_LEN_); + memset(mean_window, 0, sizeof(float)*_WINDOW_LEN_*2); + memset(n_window, 0, sizeof(float)*_WINDOW_LEN_); + memset(recurrence, 0, sizeof(double)*2); - rad_start_grow = rad_early_grow = rad_mid_grow = rad_end_grow = rad_late_grow = rad_len_grow = -1; - v_start_grow = v_early_grow = v_mid_grow = v_end_grow = v_late_grow = -1; - mean_v_grow = var_v_grow = sd_v_grow = n_grow = 0; - mean_pol_x_grow = mean_pol_y_grow = 0; - mean_pol_x_spring = mean_pol_y_spring = 0; - mean_pol_x_fall = mean_pol_y_fall = 0; - - for (i=0; i= 0.150 && rad_start_grow < 0){ rad_start_grow = rad_ann[i]; v_start_grow = v_ann[i];} - if (cum_v[i] >= 0.500 && rad_mid_grow < 0){ rad_mid_grow = rad_ann[i]; v_mid_grow = v_ann[i];} - if (cum_v[i] >= 0.800 && rad_end_grow < 0){ rad_end_grow = rad_ann[i]; v_end_grow = v_ann[i];} - if (cum_v[i] >= 0.150 && cum_v[i] < 0.800){ - var_recurrence(v_ann[i], &mean_v_grow, &var_v_grow, ++n_grow); - mean_pol_x_grow += v_ann[i]*cos(rad_ann[i]); - mean_pol_y_grow += v_ann[i]*sin(rad_ann[i]); - n_grow + for (i=i0; i s){ i0 = i; break; } + + // start of growing season + if (polar[i][_CUM_] >= pol->start && timing[_START_][_CUM_] == 0){ + memcpy(timing[_START_], polar[i], sizeof(float)*_COORD_LEN_);} + + // mid of growing season + if (polar[i][_CUM_] >= pol->mid && timing[_MID_][_CUM_] == 0){ + memcpy(timing[_MID_], polar[i], sizeof(float)*_COORD_LEN_);} + + // end of growing season + if (polar[i][_CUM_] >= pol->end && timing[_END_][_CUM_] == 0){ + memcpy(timing[_END_], polar[i], sizeof(float)*_COORD_LEN_);} + + // mean, sd of val + average vector of growing season + if (polar[i][_CUM_] >= pol->start && + polar[i][_CUM_] < pol->end){ + var_recurrence(polar[i][_VAL_], &recurrence[0], &recurrence[1], ++n_window[_GROW_]); + mean_window[_GROW_][_X_] += polar[i][_PCX_]; + mean_window[_GROW_][_Y_] += polar[i][_PCY_]; } - if (cum_v[i] >= 0.150 && cum_v[i] < 0.500){ - mean_pol_x_spring += v_ann[i]*cos(rad_ann[i]); - mean_pol_y_spring += v_ann[i]*sin(rad_ann[i]); - n_spring++; + + // average vector of early growing season part + if (polar[i][_CUM_] >= pol->start && + polar[i][_CUM_] < pol->mid){ + mean_window[_EARLY_][_X_] += polar[i][_PCX_]; + mean_window[_EARLY_][_Y_] += polar[i][_PCY_]; + n_window[_EARLY_]++; } - if (cum_v[i] >= 0.500 && cum_v[i] < 0.800){ - mean_pol_x_fall += v_ann[i]*cos(rad_ann[i]); - mean_pol_y_fall += v_ann[i]*sin(rad_ann[i]); - n_fall++; + + // average vector of late growing season part + if (polar[i][_CUM_] >= pol->mid && + polar[i][_CUM_] < pol->end){ + mean_window[_LATE_][_X_] += polar[i][_PCX_]; + mean_window[_LATE_][_Y_] += polar[i][_PCY_]; + n_window[_LATE_]++; } } - rad_len_grow = rad_end - rad_start; - sd_v_grow = standdev(var_v_grow, n); - - mean_pol_x_grow /= n_grow; - mean_pol_y_grow /= n_grow; - mean_pol_x_spring /= n_spring; - mean_pol_y_spring /= n_spring; - mean_pol_x_fall /= n_fall; - mean_pol_y_fall /= n_fall; - - mean_rad_grow = atan2(mean_pol_y_grow, mean_pol_x_grow); - if (mean_rad_grow <= 0) mean_rad_grow += 2*M_PI; - mean_v_grow = sqrt(mean_pol_x_grow*mean_pol_x_grow + mean_pol_y_grow*mean_pol_y_grow); - mean_rad_spring = atan2(mean_pol_y_spring, mean_pol_x_spring); - if (mean_rad_spring <= 0) mean_rad_spring += 2*M_PI; - mean_v_spring = sqrt(mean_pol_x_spring*mean_pol_x_spring + mean_pol_y_spring*mean_pol_y_spring); + mean_window[_GROW_][_X_] /= n_window[_GROW_]; + mean_window[_GROW_][_Y_] /= n_window[_GROW_]; + mean_window[_EARLY_][_X_] /= n_window[_EARLY_]; + mean_window[_EARLY_][_Y_] /= n_window[_EARLY_]; + mean_window[_LATE_][_X_] /= n_window[_LATE_]; + mean_window[_LATE_][_Y_] /= n_window[_LATE_]; - mean_rad_fall = atan2(mean_pol_y_fall, mean_pol_x_fall); - if (mean_rad_fall <= 0) mean_rad_fall += 2*M_PI; - mean_v_fall = sqrt(mean_pol_x_fall*mean_pol_x_fall + mean_pol_y_fall*mean_pol_y_fall); + polar_vector(mean_window[_GROW_][_X_], mean_window[_GROW_][_Y_], 0, vector[_GROW_]); + polar_vector(mean_window[_EARLY_][_X_], mean_window[_EARLY_][_Y_], 0, vector[_EARLY_]); + polar_vector(mean_window[_LATE_][_X_], mean_window[_LATE_][_Y_], 0, vector[_LATE_]); - valid = false; - // sanity check? + //valid = false; // if () valid = true; + //valid = true; + + if (doy_theta < 182) y = s; else y = s+1; + +if (p == 0) printf("season: %d, year %d\n", s, y); +if (p == 0) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); - valid = true; /** copy POL if all OK **/ - if (valid){ - if (pol->odem) ts->pol_[_POL_DEM_][year][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st POL year - if (pol->odss) ts->pol_[_POL_DSS_][year][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st POL year - if (pol->odri) ts->pol_[_POL_DRI_][year][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st POL year - if (pol->odps) ts->pol_[_POL_DPS_][year][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st POL year - if (pol->odfi) ts->pol_[_POL_DFI_][year][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st POL year - if (pol->odes) ts->pol_[_POL_DES_][year][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st POL year - if (pol->odlm) ts->pol_[_POL_DLM_][year][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st POL year - if (pol->olts) ts->pol_[_POL_LTS_][year][p] = (short)(ph.min_min_duration*dce); // days - if (pol->olgs) ts->pol_[_POL_LGS_][year][p] = (short)(ph.green_duration*dce); // days - if (pol->ovem) ts->pol_[_POL_VEM_][year][p] = (short)(ph.early_min_val); // index value - if (pol->ovss) ts->pol_[_POL_VSS_][year][p] = (short)(ph.start_green_val); // index value - if (pol->ovri) ts->pol_[_POL_VRI_][year][p] = (short)(ph.early_flex_val); // index value - if (pol->ovps) ts->pol_[_POL_VPS_][year][p] = (short)(ph.peak_val); // index value - if (pol->ovfi) ts->pol_[_POL_VFI_][year][p] = (short)(ph.late_flex_val); // index value - if (pol->oves) ts->pol_[_POL_VES_][year][p] = (short)(ph.end_green_val); // index value - if (pol->ovlm) ts->pol_[_POL_VLM_][year][p] = (short)(ph.late_min_val); // index value - if (pol->ovbl) ts->pol_[_POL_VBL_][year][p] = (short)(ph.latent_val); // index value - if (pol->ovsa) ts->pol_[_POL_VSA_][year][p] = (short)(ph.amplitude); // index value - if (pol->oist) ts->pol_[_POL_IST_][year][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 - if (pol->oibl) ts->pol_[_POL_IBL_][year][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 - if (pol->oibt) ts->pol_[_POL_IBT_][year][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 - if (pol->oigs) ts->pol_[_POL_IGS_][year][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 - if (pol->orar) ts->pol_[_POL_RAR_][year][p] = (short)(ph.greenup_rate/dce); // index value / days - if (pol->oraf) ts->pol_[_POL_RAF_][year][p] = (short)(ph.senescence_rate/dce); // index value / days - if (pol->ormr) ts->pol_[_POL_RMR_][year][p] = (short)(ph.early_flex_rate/dce); // index value / days - if (pol->ormf) ts->pol_[_POL_RMF_][year][p] = (short)(ph.late_flex_rate/dce); // index value / days - } + //if (valid){ + //if (pol->odem) ts->pol_[_POL_DEM_][y][p] = (short)0; + if (pol->odss) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_][_DOY_]; + if (pol->odms) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_][_DOY_]; + if (pol->odes) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_][_DOY_]; + if (pol->odev) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_][_DOY_]; + if (pol->odav) ts->pol_[_POL_DAV_][y][p] = (short)vector[_GROW_][_DOY_]; + if (pol->odlv) ts->pol_[_POL_DLV_][y][p] = (short)vector[_LATE_][_DOY_]; + //if (pol->odlm) ts->pol_[_POL_DLM_][y][p] = (short)0; + if (pol->olgs) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_][_DOY_] - timing[_START_][_DOY_]); + if (pol->olbv) ts->pol_[_POL_LBV_][y][p] = (short)(vector[_LATE_][_DOY_] - vector[_EARLY_][_DOY_]); + //if (pol->ovem) ts->pol_[_POL_VEM_][y][p] = (short)0; + if (pol->ovss) ts->pol_[_POL_VSS_][y][p] = (short)timing[_START_][_VAL_]; + if (pol->ovms) ts->pol_[_POL_VMS_][y][p] = (short)timing[_MID_][_VAL_]; + if (pol->oves) ts->pol_[_POL_VES_][y][p] = (short)timing[_END_][_VAL_]; + if (pol->ovev) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_][_VAL_]; + if (pol->ovav) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_][_VAL_]; + if (pol->ovlv) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_][_VAL_]; + //if (pol->ovlm) ts->pol_[_POL_VLM_][y][p] = (short)0; + //if (pol->ovbl) ts->pol_[_POL_VBL_][y][p] = (short)0; + if (pol->ovga) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; + if (pol->ovgv) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); + if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)doy_theta; + //if (pol->oist) ts->pol_[_POL_IST_][y][p] = (short)0; + //if (pol->oibl) ts->pol_[_POL_IBL_][y][p] = (short)0; + //if (pol->oibt) ts->pol_[_POL_IBT_][y][p] = (short)0; + //if (pol->oigs) ts->pol_[_POL_IGS_][y][p] = (short)0; + //if (pol->orar) ts->pol_[_POL_RAR_][y][p] = (short)0; + //if (pol->oraf) ts->pol_[_POL_RAF_][y][p] = (short)0; + //if (pol->ormr) ts->pol_[_POL_RMR_][y][p] = (short)0; + //if (pol->ormf) ts->pol_[_POL_RMF_][y][p] = (short)0; + //} } } - /** clean **/ - free((void*)v); free((void*)doy); - free((void*)pol_x); free((void*)pol_y); + free_2D((void**)polar, ni); } - if (error > 0) return FAILURE; return SUCCESS; } @@ -335,16 +434,16 @@ Spline *spl; int tsa_polar(tsa_t *ts, small *mask_, int nc, int ni, short nodata, par_hl_t *phl){ - //if (phl->tsa.pol.ospl + - // phl->tsa.pol.opol + - // phl->tsa.pol.otrd + - // phl->tsa.pol.ocat == 0) return SUCCESS; + if (phl->tsa.pol.opct + + phl->tsa.pol.opol + + phl->tsa.pol.otrd + + phl->tsa.pol.ocat == 0) return SUCCESS; - //cite_me(_CITE_POLAR_); + cite_me(_CITE_POL_); if (polar_ts(ts, mask_, nc, ni, nodata, - phl->date_range[_MIN_].year, phl->date_range[_MAX_].year, &phl->tsa.pol) == FAILURE) return FAILURE; + phl->date_range[_MIN_].year, phl->date_range[_MAX_].year, &phl->tsa.tsi, &phl->tsa.pol) == FAILURE) return FAILURE; diff --git a/src/higher-level/polar-hl.h b/src/higher-level/polar-hl.h index 0b21e143..129570a1 100755 --- a/src/higher-level/polar-hl.h +++ b/src/higher-level/polar-hl.h @@ -25,8 +25,8 @@ Polarmetrics header +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ -#ifndef PHENO_HL_H -#define PHENO_HL_H +#ifndef POLAR_HL_H +#define POLAR_HL_H #include // core input and output functions #include // standard general utilities library diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index 7ac49194..db300409 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -53,13 +53,13 @@ char fdate[NPOW_10]; char sensor[NPOW_04]; char domain[NPOW_10]; int nchar; -int o, nprod = 98; +int o, nprod = 149; int error = 0; -enum { _full_, _stats_, _inter_, _year_, _quarter_, _month_, _week_, _day_, _lsp_, _trd_, _cat_ }; -int prodlen[11] = { nt, phl->tsa.stm.sta.nmetrics, ni, +enum { _full_, _stats_, _inter_, _year_, _quarter_, _month_, _week_, _day_, _lsp_, _pol_, _trd_, _cat_ }; +int prodlen[12] = { nt, phl->tsa.stm.sta.nmetrics, ni, phl->ny, phl->nq, phl->nm, phl->nw, phl->nd, - phl->tsa.lsp.ny, _TRD_LENGTH_, _CAT_LENGTH_ }; -char prodname[98][NPOW_03] = { + phl->tsa.lsp.ny, phl->tsa.pol.ny, _TRD_LENGTH_, _CAT_LENGTH_ }; +char prodname[149][NPOW_03] = { "TSS", "RMS", "STM", "TSI", "SPL", "FBY", "FBQ", "FBM", "FBW", "FBD", "DEM-LSP", "DSS-LSP", "DRI-LSP", "DPS-LSP", "DFI-LSP", "DES-LSP", @@ -67,20 +67,29 @@ char prodname[98][NPOW_03] = { "VPS-LSP", "VFI-LSP", "VES-LSP", "VLM-LSP", "VBL-LSP", "VSA-LSP", "IST-LSP", "IBL-LSP", "IBT-LSP", "IGS-LSP", "RAR-LSP", "RAF-LSP", "RMR-LSP", "RMF-LSP", + "DSS-POL", "DMS-POL", "DES-POL", "DEV-POL", "DAV-POL", "DLV-POL", + "LGS-POL", "LBV-POL", "VSS-POL", "VMS-POL", "VES-POL", "VEV-POL", + "VAV-POL", "VLV-POL", "VGA-POL", "VGV-POL", "DPY-POL", "DEM-TRP", "DSS-TRP", "DRI-TRP", "DPS-TRP", "DFI-TRP", "DES-TRP", "DLM-TRP", "LTS-TRP", "LGS-TRP", "VEM-TRP", "VSS-TRP", "VRI-TRP", "VPS-TRP", "VFI-TRP", "VES-TRP", "VLM-TRP", "VBL-TRP", "VSA-TRP", "IST-TRP", "IBL-TRP", "IBT-TRP", "IGS-TRP", "RAR-TRP", "RAF-TRP", "RMR-TRP", "RMF-TRP", + "DSS-TRO", "DMS-TRO", "DES-TRO", "DEV-TRO", "DAV-TRO", "DLV-TRO", + "LGS-TRO", "LBV-TRO", "VSS-TRO", "VMS-TRO", "VES-TRO", "VEV-TRO", + "VAV-TRO", "VLV-TRO", "VGA-TRO", "VGV-TRO", "DPY-TRO", "TRY", "TRQ", "TRM", "TRW", "TRD", "DEM-CAP", "DSS-CAP", "DRI-CAP", "DPS-CAP", "DFI-CAP", "DES-CAP", "DLM-CAP", "LTS-CAP", "LGS-CAP", "VEM-CAP", "VSS-CAP", "VRI-CAP", "VPS-CAP", "VFI-CAP", "VES-CAP", "VLM-CAP", "VBL-CAP", "VSA-CAP", "IST-CAP", "IBL-CAP", "IBT-CAP", "IGS-CAP", "RAR-CAP", "RAF-CAP", "RMR-CAP", "RMF-CAP", + "DSS-CAO", "DMS-CAO", "DES-CAO", "DEV-CAO", "DAV-CAO", "DLV-CAO", + "LGS-CAO", "LBV-CAO", "VSS-CAO", "VMS-CAO", "VES-CAO", "VEV-CAO", + "VAV-CAO", "VLV-CAO", "VGA-CAO", "VGV-CAO", "DPY-CAO", "CAY", "CAQ", "CAM", "CAW", "CAD" }; -int prodtype[98] = { +int prodtype[149] = { _full_, _full_, _stats_, _inter_, _inter_, _year_, _quarter_, _month_, _week_, _day_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, @@ -88,20 +97,29 @@ int prodtype[98] = { _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, + _pol_, _pol_, _pol_, _pol_, _pol_, _pol_, + _pol_, _pol_, _pol_, _pol_, _pol_, _pol_, + _pol_, _pol_, _pol_, _pol_, _pol_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, + _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, + _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, + _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, + _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, + _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, + _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, _cat_ }; -int enable[98] = { +int enable[149] = { true, phl->tsa.sma.orms, phl->tsa.stm.ostm, true, phl->tsa.lsp.ospl, phl->tsa.fld.ofby+phl->tsa.fld.otry+phl->tsa.fld.ocay, phl->tsa.fld.ofbq+phl->tsa.fld.otrq+phl->tsa.fld.ocaq, phl->tsa.fld.ofbm+phl->tsa.fld.otrm+phl->tsa.fld.ocam, phl->tsa.fld.ofbw+phl->tsa.fld.otrw+phl->tsa.fld.ocaw, @@ -119,20 +137,35 @@ int enable[98] = { phl->tsa.lsp.oibt*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.oigs*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.orar*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.oraf*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ormr*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ormf*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), + phl->tsa.pol.odss*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odms*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.odes*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odev*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.odav*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odlv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.olgs*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.olbv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.ovss*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovms*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.oves*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovev*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.ovav*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovlv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.ovga*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovgv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), + phl->tsa.pol.odpy*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.lsp.otrd*phl->tsa.lsp.odem, phl->tsa.lsp.otrd*phl->tsa.lsp.odss, phl->tsa.lsp.otrd*phl->tsa.lsp.odri, phl->tsa.lsp.otrd*phl->tsa.lsp.odps, phl->tsa.lsp.otrd*phl->tsa.lsp.odfi, phl->tsa.lsp.otrd*phl->tsa.lsp.odes, phl->tsa.lsp.otrd*phl->tsa.lsp.odlm, phl->tsa.lsp.otrd*phl->tsa.lsp.olts, phl->tsa.lsp.otrd*phl->tsa.lsp.olgs, phl->tsa.lsp.otrd*phl->tsa.lsp.ovem, phl->tsa.lsp.otrd*phl->tsa.lsp.ovss, phl->tsa.lsp.otrd*phl->tsa.lsp.ovri, phl->tsa.lsp.otrd*phl->tsa.lsp.ovps, phl->tsa.lsp.otrd*phl->tsa.lsp.ovfi, phl->tsa.lsp.otrd*phl->tsa.lsp.oves, phl->tsa.lsp.otrd*phl->tsa.lsp.ovlm, phl->tsa.lsp.otrd*phl->tsa.lsp.ovbl, phl->tsa.lsp.otrd*phl->tsa.lsp.ovsa, phl->tsa.lsp.otrd*phl->tsa.lsp.oist, phl->tsa.lsp.otrd*phl->tsa.lsp.oibl, phl->tsa.lsp.otrd*phl->tsa.lsp.oibt, phl->tsa.lsp.otrd*phl->tsa.lsp.oigs, phl->tsa.lsp.otrd*phl->tsa.lsp.orar, phl->tsa.lsp.otrd*phl->tsa.lsp.oraf, phl->tsa.lsp.otrd*phl->tsa.lsp.ormr, phl->tsa.lsp.otrd*phl->tsa.lsp.ormf, + phl->tsa.pol.otrd*phl->tsa.pol.odss, phl->tsa.pol.otrd*phl->tsa.pol.odms, phl->tsa.pol.otrd*phl->tsa.pol.odes, phl->tsa.pol.otrd*phl->tsa.pol.odev, phl->tsa.pol.otrd*phl->tsa.pol.odav, phl->tsa.pol.otrd*phl->tsa.pol.odlv, + phl->tsa.pol.otrd*phl->tsa.pol.olgs, phl->tsa.pol.otrd*phl->tsa.pol.olbv, phl->tsa.pol.otrd*phl->tsa.pol.ovss, phl->tsa.pol.otrd*phl->tsa.pol.ovms, phl->tsa.pol.otrd*phl->tsa.pol.oves, phl->tsa.pol.otrd*phl->tsa.pol.ovev, + phl->tsa.pol.otrd*phl->tsa.pol.ovav, phl->tsa.pol.otrd*phl->tsa.pol.ovlv, phl->tsa.pol.otrd*phl->tsa.pol.ovga, phl->tsa.pol.otrd*phl->tsa.pol.ovgv, phl->tsa.pol.otrd*phl->tsa.pol.odpy, phl->tsa.fld.otry, phl->tsa.fld.otrq, phl->tsa.fld.otrm, phl->tsa.fld.otrw, phl->tsa.fld.otrd, phl->tsa.lsp.ocat*phl->tsa.lsp.odem, phl->tsa.lsp.ocat*phl->tsa.lsp.odss, phl->tsa.lsp.ocat*phl->tsa.lsp.odri, phl->tsa.lsp.ocat*phl->tsa.lsp.odps, phl->tsa.lsp.ocat*phl->tsa.lsp.odfi, phl->tsa.lsp.ocat*phl->tsa.lsp.odes, phl->tsa.lsp.ocat*phl->tsa.lsp.odlm, phl->tsa.lsp.ocat*phl->tsa.lsp.olts, phl->tsa.lsp.ocat*phl->tsa.lsp.olgs, phl->tsa.lsp.ocat*phl->tsa.lsp.ovem, phl->tsa.lsp.ocat*phl->tsa.lsp.ovss, phl->tsa.lsp.ocat*phl->tsa.lsp.ovri, phl->tsa.lsp.ocat*phl->tsa.lsp.ovps, phl->tsa.lsp.ocat*phl->tsa.lsp.ovfi, phl->tsa.lsp.ocat*phl->tsa.lsp.oves, phl->tsa.lsp.ocat*phl->tsa.lsp.ovlm, phl->tsa.lsp.ocat*phl->tsa.lsp.ovbl, phl->tsa.lsp.ocat*phl->tsa.lsp.ovsa, phl->tsa.lsp.ocat*phl->tsa.lsp.oist, phl->tsa.lsp.ocat*phl->tsa.lsp.oibl, phl->tsa.lsp.ocat*phl->tsa.lsp.oibt, phl->tsa.lsp.ocat*phl->tsa.lsp.oigs, phl->tsa.lsp.ocat*phl->tsa.lsp.orar, phl->tsa.lsp.ocat*phl->tsa.lsp.oraf, phl->tsa.lsp.ocat*phl->tsa.lsp.ormr, phl->tsa.lsp.ocat*phl->tsa.lsp.ormf, + phl->tsa.pol.ocat*phl->tsa.pol.odss, phl->tsa.pol.ocat*phl->tsa.pol.odms, phl->tsa.pol.ocat*phl->tsa.pol.odes, phl->tsa.pol.ocat*phl->tsa.pol.odev, phl->tsa.pol.ocat*phl->tsa.pol.odav, phl->tsa.pol.ocat*phl->tsa.pol.odlv, + phl->tsa.pol.ocat*phl->tsa.pol.olgs, phl->tsa.pol.ocat*phl->tsa.pol.olbv, phl->tsa.pol.ocat*phl->tsa.pol.ovss, phl->tsa.pol.ocat*phl->tsa.pol.ovms, phl->tsa.pol.ocat*phl->tsa.pol.oves, phl->tsa.pol.ocat*phl->tsa.pol.ovev, + phl->tsa.pol.ocat*phl->tsa.pol.ovav, phl->tsa.pol.ocat*phl->tsa.pol.ovlv, phl->tsa.pol.ocat*phl->tsa.pol.ovga, phl->tsa.pol.ocat*phl->tsa.pol.ovgv, phl->tsa.pol.ocat*phl->tsa.pol.odpy, phl->tsa.fld.ocay, phl->tsa.fld.ocaq, phl->tsa.fld.ocam, phl->tsa.fld.ocaw, phl->tsa.fld.ocad }; -int write[98] = { +int write[149] = { phl->tsa.otss, phl->tsa.sma.orms, phl->tsa.stm.ostm, phl->tsa.tsi.otsi, phl->tsa.lsp.ospl, phl->tsa.fld.ofby, phl->tsa.fld.ofbq, phl->tsa.fld.ofbm, phl->tsa.fld.ofbw, phl->tsa.fld.ofbd, phl->tsa.lsp.olsp*phl->tsa.lsp.odem, phl->tsa.lsp.olsp*phl->tsa.lsp.odss, phl->tsa.lsp.olsp*phl->tsa.lsp.odri, phl->tsa.lsp.olsp*phl->tsa.lsp.odps, phl->tsa.lsp.olsp*phl->tsa.lsp.odfi, phl->tsa.lsp.olsp*phl->tsa.lsp.odes, @@ -140,20 +173,29 @@ int write[98] = { phl->tsa.lsp.olsp*phl->tsa.lsp.ovps, phl->tsa.lsp.olsp*phl->tsa.lsp.ovfi, phl->tsa.lsp.olsp*phl->tsa.lsp.oves, phl->tsa.lsp.olsp*phl->tsa.lsp.ovlm, phl->tsa.lsp.olsp*phl->tsa.lsp.ovbl, phl->tsa.lsp.olsp*phl->tsa.lsp.ovsa, phl->tsa.lsp.olsp*phl->tsa.lsp.oist, phl->tsa.lsp.olsp*phl->tsa.lsp.oibl, phl->tsa.lsp.olsp*phl->tsa.lsp.oibt, phl->tsa.lsp.olsp*phl->tsa.lsp.oigs, phl->tsa.lsp.olsp*phl->tsa.lsp.orar, phl->tsa.lsp.olsp*phl->tsa.lsp.oraf, phl->tsa.lsp.olsp*phl->tsa.lsp.ormr, phl->tsa.lsp.olsp*phl->tsa.lsp.ormf, + phl->tsa.pol.opol*phl->tsa.pol.odss, phl->tsa.pol.opol*phl->tsa.pol.odms, phl->tsa.pol.opol*phl->tsa.pol.odes, phl->tsa.pol.opol*phl->tsa.pol.odev, phl->tsa.pol.opol*phl->tsa.pol.odav, phl->tsa.pol.opol*phl->tsa.pol.odlv, + phl->tsa.pol.opol*phl->tsa.pol.olgs, phl->tsa.pol.opol*phl->tsa.pol.olbv, phl->tsa.pol.opol*phl->tsa.pol.ovss, phl->tsa.pol.opol*phl->tsa.pol.ovms, phl->tsa.pol.opol*phl->tsa.pol.oves, phl->tsa.pol.opol*phl->tsa.pol.ovev, + phl->tsa.pol.opol*phl->tsa.pol.ovav, phl->tsa.pol.opol*phl->tsa.pol.ovlv, phl->tsa.pol.opol*phl->tsa.pol.ovga, phl->tsa.pol.opol*phl->tsa.pol.ovgv, phl->tsa.pol.opol*phl->tsa.pol.odpy, phl->tsa.lsp.otrd*phl->tsa.lsp.odem, phl->tsa.lsp.otrd*phl->tsa.lsp.odss, phl->tsa.lsp.otrd*phl->tsa.lsp.odri, phl->tsa.lsp.otrd*phl->tsa.lsp.odps, phl->tsa.lsp.otrd*phl->tsa.lsp.odfi, phl->tsa.lsp.otrd*phl->tsa.lsp.odes, phl->tsa.lsp.otrd*phl->tsa.lsp.odlm, phl->tsa.lsp.otrd*phl->tsa.lsp.olts, phl->tsa.lsp.otrd*phl->tsa.lsp.olgs, phl->tsa.lsp.otrd*phl->tsa.lsp.ovem, phl->tsa.lsp.otrd*phl->tsa.lsp.ovss, phl->tsa.lsp.otrd*phl->tsa.lsp.ovri, phl->tsa.lsp.otrd*phl->tsa.lsp.ovps, phl->tsa.lsp.otrd*phl->tsa.lsp.ovfi, phl->tsa.lsp.otrd*phl->tsa.lsp.oves, phl->tsa.lsp.otrd*phl->tsa.lsp.ovlm, phl->tsa.lsp.otrd*phl->tsa.lsp.ovbl, phl->tsa.lsp.otrd*phl->tsa.lsp.ovsa, phl->tsa.lsp.otrd*phl->tsa.lsp.oist, phl->tsa.lsp.otrd*phl->tsa.lsp.oibl, phl->tsa.lsp.otrd*phl->tsa.lsp.oibt, phl->tsa.lsp.otrd*phl->tsa.lsp.oigs, phl->tsa.lsp.otrd*phl->tsa.lsp.orar, phl->tsa.lsp.otrd*phl->tsa.lsp.oraf, phl->tsa.lsp.otrd*phl->tsa.lsp.ormr, phl->tsa.lsp.otrd*phl->tsa.lsp.ormf, + phl->tsa.pol.otrd*phl->tsa.pol.odss, phl->tsa.pol.otrd*phl->tsa.pol.odms, phl->tsa.pol.otrd*phl->tsa.pol.odes, phl->tsa.pol.otrd*phl->tsa.pol.odev, phl->tsa.pol.otrd*phl->tsa.pol.odav, phl->tsa.pol.otrd*phl->tsa.pol.odlv, + phl->tsa.pol.otrd*phl->tsa.pol.olgs, phl->tsa.pol.otrd*phl->tsa.pol.olbv, phl->tsa.pol.otrd*phl->tsa.pol.ovss, phl->tsa.pol.otrd*phl->tsa.pol.ovms, phl->tsa.pol.otrd*phl->tsa.pol.oves, phl->tsa.pol.otrd*phl->tsa.pol.ovev, + phl->tsa.pol.otrd*phl->tsa.pol.ovav, phl->tsa.pol.otrd*phl->tsa.pol.ovlv, phl->tsa.pol.otrd*phl->tsa.pol.ovga, phl->tsa.pol.otrd*phl->tsa.pol.ovgv, phl->tsa.pol.otrd*phl->tsa.pol.odpy, phl->tsa.fld.otry, phl->tsa.fld.otrq, phl->tsa.fld.otrm, phl->tsa.fld.otrw, phl->tsa.fld.otrd, phl->tsa.lsp.ocat*phl->tsa.lsp.odem, phl->tsa.lsp.ocat*phl->tsa.lsp.odss, phl->tsa.lsp.ocat*phl->tsa.lsp.odri, phl->tsa.lsp.ocat*phl->tsa.lsp.odps, phl->tsa.lsp.ocat*phl->tsa.lsp.odfi, phl->tsa.lsp.ocat*phl->tsa.lsp.odes, phl->tsa.lsp.ocat*phl->tsa.lsp.odlm, phl->tsa.lsp.ocat*phl->tsa.lsp.olts, phl->tsa.lsp.ocat*phl->tsa.lsp.olgs, phl->tsa.lsp.ocat*phl->tsa.lsp.ovem, phl->tsa.lsp.ocat*phl->tsa.lsp.ovss, phl->tsa.lsp.ocat*phl->tsa.lsp.ovri, phl->tsa.lsp.ocat*phl->tsa.lsp.ovps, phl->tsa.lsp.ocat*phl->tsa.lsp.ovfi, phl->tsa.lsp.ocat*phl->tsa.lsp.oves, phl->tsa.lsp.ocat*phl->tsa.lsp.ovlm, phl->tsa.lsp.ocat*phl->tsa.lsp.ovbl, phl->tsa.lsp.ocat*phl->tsa.lsp.ovsa, phl->tsa.lsp.ocat*phl->tsa.lsp.oist, phl->tsa.lsp.ocat*phl->tsa.lsp.oibl, phl->tsa.lsp.ocat*phl->tsa.lsp.oibt, phl->tsa.lsp.ocat*phl->tsa.lsp.oigs, phl->tsa.lsp.ocat*phl->tsa.lsp.orar, phl->tsa.lsp.ocat*phl->tsa.lsp.oraf, phl->tsa.lsp.ocat*phl->tsa.lsp.ormr, phl->tsa.lsp.ocat*phl->tsa.lsp.ormf, + phl->tsa.pol.ocat*phl->tsa.pol.odss, phl->tsa.pol.ocat*phl->tsa.pol.odms, phl->tsa.pol.ocat*phl->tsa.pol.odes, phl->tsa.pol.ocat*phl->tsa.pol.odev, phl->tsa.pol.ocat*phl->tsa.pol.odav, phl->tsa.pol.ocat*phl->tsa.pol.odlv, + phl->tsa.pol.ocat*phl->tsa.pol.olgs, phl->tsa.pol.ocat*phl->tsa.pol.olbv, phl->tsa.pol.ocat*phl->tsa.pol.ovss, phl->tsa.pol.ocat*phl->tsa.pol.ovms, phl->tsa.pol.ocat*phl->tsa.pol.oves, phl->tsa.pol.ocat*phl->tsa.pol.ovev, + phl->tsa.pol.ocat*phl->tsa.pol.ovav, phl->tsa.pol.ocat*phl->tsa.pol.ovlv, phl->tsa.pol.ocat*phl->tsa.pol.ovga, phl->tsa.pol.ocat*phl->tsa.pol.ovgv, phl->tsa.pol.ocat*phl->tsa.pol.odpy, phl->tsa.fld.ocay, phl->tsa.fld.ocaq, phl->tsa.fld.ocam, phl->tsa.fld.ocaw, phl->tsa.fld.ocad }; -short ***ptr[98] = { +short ***ptr[149] = { &ts->tss_, &ts->rms_, &ts->stm_, &ts->tsi_, &ts->spl_, &ts->fby_, &ts->fbq_, &ts->fbm_, &ts->fbw_, &ts->fbd_, &ts->lsp_[0], &ts->lsp_[1], &ts->lsp_[2], &ts->lsp_[3], &ts->lsp_[4], &ts->lsp_[5], @@ -161,17 +203,26 @@ short ***ptr[98] = { &ts->lsp_[12], &ts->lsp_[13], &ts->lsp_[14], &ts->lsp_[15], &ts->lsp_[16], &ts->lsp_[17], &ts->lsp_[18], &ts->lsp_[19], &ts->lsp_[20], &ts->lsp_[21], &ts->lsp_[22], &ts->lsp_[23], &ts->lsp_[24], &ts->lsp_[25], + &ts->pol_[0], &ts->pol_[1], &ts->pol_[2], &ts->pol_[3], &ts->pol_[4], &ts->pol_[5], + &ts->pol_[6], &ts->pol_[7], &ts->pol_[8], &ts->pol_[9], &ts->pol_[10], &ts->pol_[11], + &ts->pol_[12], &ts->pol_[13], &ts->pol_[14], &ts->pol_[15], &ts->pol_[16], &ts->trp_[0], &ts->trp_[1], &ts->trp_[2], &ts->trp_[3], &ts->trp_[4], &ts->trp_[5], &ts->trp_[6], &ts->trp_[7], &ts->trp_[8], &ts->trp_[9], &ts->trp_[10], &ts->trp_[11], &ts->trp_[12], &ts->trp_[13], &ts->trp_[14], &ts->trp_[15], &ts->trp_[16], &ts->trp_[17], &ts->trp_[18], &ts->trp_[19], &ts->trp_[20], &ts->trp_[21], &ts->trp_[22], &ts->trp_[23], &ts->trp_[24], &ts->trp_[25], + &ts->tro_[0], &ts->tro_[1], &ts->tro_[2], &ts->tro_[3], &ts->tro_[4], &ts->tro_[5], + &ts->tro_[6], &ts->tro_[7], &ts->tro_[8], &ts->tro_[9], &ts->tro_[10], &ts->tro_[11], + &ts->tro_[12], &ts->tro_[13], &ts->tro_[14], &ts->tro_[15], &ts->tro_[16], &ts->try_, &ts->trq_, &ts->trm_, &ts->trw_, &ts->trd_, &ts->cap_[0], &ts->cap_[1], &ts->cap_[2], &ts->cap_[3], &ts->cap_[4], &ts->cap_[5], &ts->cap_[6], &ts->cap_[7], &ts->cap_[8], &ts->cap_[9], &ts->cap_[10], &ts->cap_[11], &ts->cap_[12], &ts->cap_[13], &ts->cap_[14], &ts->cap_[15], &ts->cap_[16], &ts->cap_[17], &ts->cap_[18], &ts->cap_[19], &ts->cap_[20], &ts->cap_[21], &ts->cap_[22], &ts->cap_[23], - &ts->cap_[24], &ts->cap_[25], + &ts->cap_[24], &ts->cap_[25], + &ts->cao_[0], &ts->cao_[1], &ts->cao_[2], &ts->cao_[3], &ts->cao_[4], &ts->cao_[5], + &ts->cao_[6], &ts->cao_[7], &ts->cao_[8], &ts->cao_[9], &ts->cao_[10], &ts->cao_[11], + &ts->cao_[12], &ts->cao_[13], &ts->cao_[14], &ts->cao_[15], &ts->cao_[16], &ts->cay_, &ts->caq_, &ts->cam_, &ts->caw_, &ts->cad_ }; @@ -190,6 +241,7 @@ short ***ptr[98] = { if (phl->nw > 0) alloc((void**)&ts->d_fbw, phl->nw, sizeof(date_t)); else ts->d_fbw = NULL; if (phl->nd > 0) alloc((void**)&ts->d_fbd, phl->nd, sizeof(date_t)); else ts->d_fbd = NULL; if (phl->tsa.lsp.ny > 0) alloc((void**)&ts->d_lsp, phl->tsa.lsp.ny, sizeof(date_t)); else ts->d_lsp = NULL; + if (phl->tsa.pol.ny > 0) alloc((void**)&ts->d_pol, phl->tsa.pol.ny, sizeof(date_t)); else ts->d_pol = NULL; //printf("scale, date, ts, bandnames, and sensor ID must be set in compile_tsa!!!\n"); @@ -197,7 +249,7 @@ short ***ptr[98] = { for (o=0; odate_range[_MIN_].year+t); + set_stack_sensor(TSA[o], t, "BLEND"); + copy_date(&date, &ts->d_pol[t]); + nchar = snprintf(fdate, NPOW_10, "YEAR-%04d", date.year); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling domain\n"); error++;} + set_stack_wavelength(TSA[o], t, date.year); + set_stack_unit(TSA[o], t, "year"); + set_stack_domain(TSA[o], t, fdate); + set_stack_bandname(TSA[o], t, fdate); + break; case _trd_: set_stack_sensor(TSA[o], t, "BLEND"); set_stack_domain(TSA[o], t, _TAGGED_ENUM_TRD_[t].tag); @@ -361,6 +425,7 @@ short ***ptr[98] = { if (ts->d_fbw != NULL){ free((void*)ts->d_fbw); ts->d_fbw = NULL;} if (ts->d_fbd != NULL){ free((void*)ts->d_fbd); ts->d_fbd = NULL;} if (ts->d_lsp != NULL){ free((void*)ts->d_lsp); ts->d_lsp = NULL;} + if (ts->d_pol != NULL){ free((void*)ts->d_pol); ts->d_pol = NULL;} return NULL; } @@ -500,10 +565,10 @@ short nodata; tsa_trend(&ts, mask_, nc, nodata, phl); tsa_cat(&ts, mask_, nc, nodata, phl); - + tsa_standardize(&ts, mask_, nc, nt, ni, nodata, phl); - + // clean temporal information if (ts.d_tss != NULL){ free((void*)ts.d_tss); ts.d_tss = NULL;} if (ts.d_tsi != NULL){ free((void*)ts.d_tsi); ts.d_tsi = NULL;} @@ -513,6 +578,7 @@ short nodata; if (ts.d_fbw != NULL){ free((void*)ts.d_fbw); ts.d_fbw = NULL;} if (ts.d_fbd != NULL){ free((void*)ts.d_fbd); ts.d_fbd = NULL;} if (ts.d_lsp != NULL){ free((void*)ts.d_lsp); ts.d_lsp = NULL;} + if (ts.d_pol != NULL){ free((void*)ts.d_pol); ts.d_pol = NULL;} } diff --git a/src/higher-level/tsa-hl.h b/src/higher-level/tsa-hl.h index 584769c8..0682d539 100755 --- a/src/higher-level/tsa-hl.h +++ b/src/higher-level/tsa-hl.h @@ -52,12 +52,15 @@ typedef struct { short **fby_, **fbq_, **fbm_, **fbw_, **fbd_; short **try_, **trq_, **trm_, **trw_, **trd_; short **cay_, **caq_, **cam_, **caw_, **cad_; - short **lsp_[26]; - short **trp_[26]; - short **cap_[26]; + short **lsp_[_LSP_LENGTH_]; + short **trp_[_LSP_LENGTH_]; + short **cap_[_LSP_LENGTH_]; + short **pol_[_POL_LENGTH_]; + short **tro_[_POL_LENGTH_]; + short **cao_[_POL_LENGTH_]; date_t *d_tss, *d_tsi; date_t *d_fby, *d_fbq, *d_fbm, *d_fbw, *d_fbd; - date_t *d_lsp; + date_t *d_lsp, *d_pol; } tsa_t; #include "../higher-level/index-hl.h" @@ -66,6 +69,7 @@ typedef struct { #include "../higher-level/fold-hl.h" #include "../higher-level/trend-hl.h" #include "../higher-level/pheno-hl.h" +#include "../higher-level/polar-hl.h" #include "../higher-level/standardize-hl.h" stack_t **time_series_analysis(ard_t *ard, stack_t *mask, int nt, par_hl_t *phl, aux_emb_t *endmember, cube_t *cube, int *nproduct); From 1b622b2b83df30ceb15244f14e7465f3b81d88ee Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 10 Aug 2020 05:40:24 +0200 Subject: [PATCH 11/78] working on polarmetrics --- src/higher-level/polar-hl.c | 117 +++++++++++++++++++++++++----------- 1 file changed, 81 insertions(+), 36 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index c340cf38..9cb6f1b6 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -28,10 +28,10 @@ This file contains functions for polarmetrics #include "polar-hl.h" -enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _SEASON_, _PCX_, _PCY_, _COORD_LEN_ }; +enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _CE_, _SEASON_, _PCX_, _PCY_, _COORD_LEN_ }; void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]); -void polar_vector(float x, float y, float yr, float polar_array[_COORD_LEN_]); +void polar_vector(float x, float y, float yr, float doy_theta, float polar_array[_COORD_LEN_]); void identify_seasons(float **polar, int ni, int istep, float theta); void accumulate_seasons(float **polar, int ni); @@ -41,12 +41,16 @@ int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]){ +float doy; + doy = r*365.0/(2.0*M_PI); + polar_array[_RAD_] = r; polar_array[_VAL_] = v; polar_array[_YEAR_] = yr; - polar_array[_DOY_] = r*365.0/(2.0*M_PI); + polar_array[_DOY_] = doy; + polar_array[_CE_] = doy2ce(doy, yr); polar_array[_PCX_] = v*cos(r); polar_array[_PCY_] = v*sin(r); @@ -54,18 +58,27 @@ void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]){ } -void polar_vector(float x, float y, float yr, float polar_array[_COORD_LEN_]){ -float r, v; +void polar_vector(float x, float y, float yr, float doy_theta, float polar_array[_COORD_LEN_]){ +float r, v, doy; r = atan2(y, x); if (r <= 0) r += 2*M_PI; v = sqrt(x*x + y*y); + + doy = r*365.0/(2.0*M_PI); polar_array[_RAD_] = r; polar_array[_VAL_] = v; polar_array[_YEAR_] = yr; - polar_array[_DOY_] = r*365.0/(2.0*M_PI); + polar_array[_DOY_] = doy; + + if (doy > doy_theta){ + polar_array[_CE_] = doy2ce(doy, yr); + } else { + polar_array[_CE_] = doy2ce(doy, yr+1); + } + polar_array[_PCX_] = x; polar_array[_PCY_] = y; @@ -152,9 +165,10 @@ float ce_left, ce_right, ce; float v_left, v_right; enum { _START_, _MID_, _END_, _EVENT_LEN_ }; -enum { _LONGTERM_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_}; +enum { _LONGTERM_, _THISYEAR_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_}; -float theta, doy_theta; +float theta, doy_theta, ce_theta; +float theta_now, doy_theta_now, ce_theta_now; float timing[_EVENT_LEN_][_COORD_LEN_]; float vector[_WINDOW_LEN_][_COORD_LEN_]; @@ -201,7 +215,7 @@ float **polar = NULL; memset(mean_window[_LONGTERM_], 0, 2*sizeof(float)); - /** copy ce/v to working variables + /** copy doy/v to working variables +++ and interpolate linearly to make sure **/ for (i=0; id_tsi[i].doy/365.0*2.0*M_PI; -if (p == 0) printf("doy: %d\n", ts->d_tsi[i].doy); -if (p == 0) printf("r: %f\n", r); -if (p == 0) printf("v: %f\n", v); - polar_coords(r, v, ts->d_tsi[i].year, polar[i]); -if (p == 0) printf("x: %f\n", polar[i][_PCX_]); -if (p == 0) printf("y: %f\n", polar[i][_PCY_]); +if (p == 367642) printf("doy: %d\n", ts->d_tsi[i].doy); +if (p == 367642) printf("r: %f\n", r); +if (p == 367642) printf("v: %f\n", v); +if (v < 0) v = 0; + polar_coords(r, v, ts->d_tsi[i].year-year_min, polar[i]); +if (p == 367642) printf("x: %f\n", polar[i][_PCX_]); +if (p == 367642) printf("y: %f\n", polar[i][_PCY_]); mean_window[_LONGTERM_][_X_] += polar[i][_PCX_]; mean_window[_LONGTERM_][_Y_] += polar[i][_PCY_]; @@ -261,15 +276,15 @@ if (p == 0) printf("y: %f\n", polar[i][_PCY_]); if (!valid) continue; -if (p == 0) printf("valid pixel.\n"); +if (p == 367642) printf("valid pixel.\n"); // mean of polar coordinates mean_window[_LONGTERM_][_X_] /= ni; mean_window[_LONGTERM_][_Y_] /= ni; -if (p == 0) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); +if (p == 367642) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); // multi-annual average vector - polar_vector(mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_], 0, vector[_LONGTERM_]); + polar_vector(mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_], 0, 0, vector[_LONGTERM_]); // diametric opposite of average vector = start of phenological year if (vector[_LONGTERM_][_RAD_] < M_PI){ @@ -279,8 +294,8 @@ if (p == 0) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_w } doy_theta = (theta*365.0/(2.0*M_PI)); -if (p == 0) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); -if (p == 0) printf("theta: %f %f\n", theta, doy_theta); +if (p == 367642) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); +if (p == 367642) printf("theta: %f %f\n", theta, doy_theta); identify_seasons(polar, ni, tsi->step, theta); @@ -295,9 +310,12 @@ if (p == 0) printf("theta: %f %f\n", theta, doy_theta); memset(n_window, 0, sizeof(float)*_WINDOW_LEN_); memset(recurrence, 0, sizeof(double)*2); + if (doy_theta < 182) y = s; else y = s+1; + ce_theta = doy2ce(doy_theta, s); + for (i=i0; i doy_theta){ + ce_theta_now = doy2ce(doy_theta_now, s); + } else { + ce_theta_now = doy2ce(doy_theta_now, s+1); + } + mean_window[_GROW_][_X_] /= n_window[_GROW_]; @@ -348,9 +391,9 @@ polar[i][_SEASON_], polar[i][_RAD_], polar[i][_VAL_], polar[i][_PCX_], polar[i][ mean_window[_LATE_][_X_] /= n_window[_LATE_]; mean_window[_LATE_][_Y_] /= n_window[_LATE_]; - polar_vector(mean_window[_GROW_][_X_], mean_window[_GROW_][_Y_], 0, vector[_GROW_]); - polar_vector(mean_window[_EARLY_][_X_], mean_window[_EARLY_][_Y_], 0, vector[_EARLY_]); - polar_vector(mean_window[_LATE_][_X_], mean_window[_LATE_][_Y_], 0, vector[_LATE_]); + polar_vector(mean_window[_GROW_][_X_], mean_window[_GROW_][_Y_], s, doy_theta, vector[_GROW_]); + polar_vector(mean_window[_EARLY_][_X_], mean_window[_EARLY_][_Y_], s, doy_theta, vector[_EARLY_]); + polar_vector(mean_window[_LATE_][_X_], mean_window[_LATE_][_Y_], s, doy_theta, vector[_LATE_]); @@ -359,24 +402,25 @@ polar[i][_SEASON_], polar[i][_RAD_], polar[i][_VAL_], polar[i][_PCX_], polar[i][ // if () valid = true; //valid = true; - if (doy_theta < 182) y = s; else y = s+1; -if (p == 0) printf("season: %d, year %d\n", s, y); -if (p == 0) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); + +if (p == 367642) printf("season: %d, year %d\n", s, y); +if (p == 367642) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); + /** copy POL if all OK **/ //if (valid){ //if (pol->odem) ts->pol_[_POL_DEM_][y][p] = (short)0; - if (pol->odss) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_][_DOY_]; - if (pol->odms) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_][_DOY_]; - if (pol->odes) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_][_DOY_]; - if (pol->odev) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_][_DOY_]; - if (pol->odav) ts->pol_[_POL_DAV_][y][p] = (short)vector[_GROW_][_DOY_]; - if (pol->odlv) ts->pol_[_POL_DLV_][y][p] = (short)vector[_LATE_][_DOY_]; + if (pol->odss) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_][_CE_]; + if (pol->odms) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_][_CE_]; + if (pol->odes) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_][_CE_]; + if (pol->odev) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_][_CE_]; + if (pol->odav) ts->pol_[_POL_DAV_][y][p] = (short)vector[_GROW_][_CE_]; + if (pol->odlv) ts->pol_[_POL_DLV_][y][p] = (short)vector[_LATE_][_CE_]; //if (pol->odlm) ts->pol_[_POL_DLM_][y][p] = (short)0; - if (pol->olgs) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_][_DOY_] - timing[_START_][_DOY_]); - if (pol->olbv) ts->pol_[_POL_LBV_][y][p] = (short)(vector[_LATE_][_DOY_] - vector[_EARLY_][_DOY_]); + if (pol->olgs) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_][_CE_] - timing[_START_][_CE_]); + if (pol->olbv) ts->pol_[_POL_LBV_][y][p] = (short)(vector[_LATE_][_CE_] - vector[_EARLY_][_CE_]); //if (pol->ovem) ts->pol_[_POL_VEM_][y][p] = (short)0; if (pol->ovss) ts->pol_[_POL_VSS_][y][p] = (short)timing[_START_][_VAL_]; if (pol->ovms) ts->pol_[_POL_VMS_][y][p] = (short)timing[_MID_][_VAL_]; @@ -388,7 +432,8 @@ if (p == 0) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recu //if (pol->ovbl) ts->pol_[_POL_VBL_][y][p] = (short)0; if (pol->ovga) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; if (pol->ovgv) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); - if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)doy_theta; + //if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)(ce_theta_now - ce_theta); + if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)(ce_theta); //if (pol->oist) ts->pol_[_POL_IST_][y][p] = (short)0; //if (pol->oibl) ts->pol_[_POL_IBL_][y][p] = (short)0; //if (pol->oibt) ts->pol_[_POL_IBT_][y][p] = (short)0; From f75770ce7e23993d631b24ac06f213d3e20b435e Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 10 Aug 2020 05:43:18 +0200 Subject: [PATCH 12/78] added a safety query to BAP, in case all scores are zero --- src/higher-level/param-hl.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/higher-level/param-hl.c b/src/higher-level/param-hl.c index 9e77eaf4..d307d7bb 100755 --- a/src/higher-level/param-hl.c +++ b/src/higher-level/param-hl.c @@ -1416,6 +1416,10 @@ double tol = 5e-3; phl->bap.w.t = phl->bap.w.d + phl->bap.w.y + phl->bap.w.c + phl->bap.w.h + phl->bap.w.r + phl->bap.w.v; + if (phl->bap.w.t == 0){ + printf("ALL scoring weights are zero. This is not allowed. " + "At least, the seasonal score should be > 0.\n"); return FAILURE;} + // number of years phl->bap.Yn = (phl->bap.Yr*2)+1; From 78a6a8e72cec4746c907aaf7aac7220f92c0ff8a Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 11 Aug 2020 10:57:07 +0200 Subject: [PATCH 13/78] working on polar metrics --- src/higher-level/polar-hl.c | 41 ++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 9cb6f1b6..b07a1090 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -32,7 +32,7 @@ enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _CE_, _SEASON_, _PCX_, _PCY_, _COORD_ void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]); void polar_vector(float x, float y, float yr, float doy_theta, float polar_array[_COORD_LEN_]); -void identify_seasons(float **polar, int ni, int istep, float theta); +void identify_seasons(float **polar, int ni, int istep, float doy_theta); void accumulate_seasons(float **polar, int ni); int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol); @@ -86,15 +86,18 @@ float r, v, doy; } -void identify_seasons(float **polar, int ni, int istep, float theta){ -int i, s = -1; +void identify_seasons(float **polar, int ni, int istep, float doy_theta){ +int i, s = -1, y = 0; float rstep = istep/365.0*2.0*M_PI; +float ce_theta; for (i=0; i= theta && - polar[i][_RAD_]-theta <= rstep) s++; + ce_theta = doy2ce(doy_theta, y); + + if (polar[i][_CE_] >= ce_theta && + polar[i][_CE_]-ce_theta <= istep){ s++; y++;} polar[i][_SEASON_] = s; @@ -261,13 +264,13 @@ float **polar = NULL; r = ts->d_tsi[i].doy/365.0*2.0*M_PI; -if (p == 367642) printf("doy: %d\n", ts->d_tsi[i].doy); -if (p == 367642) printf("r: %f\n", r); -if (p == 367642) printf("v: %f\n", v); +if (p == 375639) printf("doy: %d\n", ts->d_tsi[i].doy); +if (p == 375639) printf("r: %f\n", r); +if (p == 375639) printf("v: %f\n", v); if (v < 0) v = 0; polar_coords(r, v, ts->d_tsi[i].year-year_min, polar[i]); -if (p == 367642) printf("x: %f\n", polar[i][_PCX_]); -if (p == 367642) printf("y: %f\n", polar[i][_PCY_]); +if (p == 375639) printf("x: %f\n", polar[i][_PCX_]); +if (p == 375639) printf("y: %f\n", polar[i][_PCY_]); mean_window[_LONGTERM_][_X_] += polar[i][_PCX_]; mean_window[_LONGTERM_][_Y_] += polar[i][_PCY_]; @@ -276,12 +279,12 @@ if (p == 367642) printf("y: %f\n", polar[i][_PCY_]); if (!valid) continue; -if (p == 367642) printf("valid pixel.\n"); +if (p == 375639) printf("valid pixel.\n"); // mean of polar coordinates mean_window[_LONGTERM_][_X_] /= ni; mean_window[_LONGTERM_][_Y_] /= ni; -if (p == 367642) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); +if (p == 375639) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); // multi-annual average vector polar_vector(mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_], 0, 0, vector[_LONGTERM_]); @@ -294,11 +297,11 @@ if (p == 367642) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], m } doy_theta = (theta*365.0/(2.0*M_PI)); -if (p == 367642) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); -if (p == 367642) printf("theta: %f %f\n", theta, doy_theta); +if (p == 375639) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); +if (p == 375639) printf("theta: %f %f\n", theta, doy_theta); - identify_seasons(polar, ni, tsi->step, theta); + identify_seasons(polar, ni, tsi->step, doy_theta); accumulate_seasons(polar, ni); @@ -315,8 +318,8 @@ if (p == 367642) printf("theta: %f %f\n", theta, doy_theta); for (i=i0; i s){ i0 = i; break; } @@ -404,8 +407,8 @@ polar[i][_SEASON_], polar[i][_RAD_], polar[i][_VAL_], polar[i][_PCX_], polar[i][ -if (p == 367642) printf("season: %d, year %d\n", s, y); -if (p == 367642) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); +if (p == 375639) printf("season: %d, year %d\n", s, y); +if (p == 375639) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); From e67546c698488fa11424a5dfb1e6bc1fcf658fcc Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 11 Aug 2020 10:58:21 +0200 Subject: [PATCH 14/78] fixed cannot copy error when parameterfile is too long --- src/cross-level/stack-cl.c | 30 +++++++++++++++--------------- src/cross-level/stack-cl.h | 2 +- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/src/cross-level/stack-cl.c b/src/cross-level/stack-cl.c index 8b8344de..6c375e42 100755 --- a/src/cross-level/stack-cl.c +++ b/src/cross-level/stack-cl.c @@ -670,13 +670,13 @@ int i = 0; //CPLPushErrorHandler(CPLQuietErrorHandler); - alloc_2DC((void***)&fp_meta, n_fp_meta, NPOW_13, sizeof(char)); - alloc_2DC((void***)&band_meta, n_band_meta, NPOW_13, sizeof(char)); + alloc_2DC((void***)&fp_meta, n_fp_meta, NPOW_14, sizeof(char)); + alloc_2DC((void***)&band_meta, n_band_meta, NPOW_14, sizeof(char)); sys_meta = system_info(&n_sys_meta); strncpy(fp_meta[i], "FORCE_version", 13); fp_meta[i][13] = '\0'; i++; - if (strlen(_VERSION_) > NPOW_13-1){ + if (strlen(_VERSION_) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(fp_meta[i], _VERSION_, strlen(_VERSION_)); @@ -684,7 +684,7 @@ int i = 0; } strncpy(fp_meta[i], "FORCE_description", 17); fp_meta[i][17] = '\0'; i++; - if (strlen(stack->name) > NPOW_13-1){ + if (strlen(stack->name) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(fp_meta[i], stack->name, strlen(stack->name)); @@ -692,7 +692,7 @@ int i = 0; } strncpy(fp_meta[i], "FORCE_product", 13); fp_meta[i][13] = '\0'; i++; - if (strlen(stack->product) > NPOW_13-1){ + if (strlen(stack->product) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(fp_meta[i], stack->product, strlen(stack->product)); @@ -700,7 +700,7 @@ int i = 0; } strncpy(fp_meta[i], "FORCE_param", 11); fp_meta[i][11] = '\0'; i++; - if (strlen(stack->par) > NPOW_13-1){ + if (strlen(stack->par) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(fp_meta[i], stack->par, strlen(stack->par)); @@ -927,7 +927,7 @@ int i = 0; i = 0; strncpy(band_meta[i], "Domain", 6); band_meta[i][6] = '\0'; i++; - if (strlen(stack->domain[b_stack]) > NPOW_13-1){ + if (strlen(stack->domain[b_stack]) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(band_meta[i], stack->domain[b_stack], strlen(stack->domain[b_stack])); @@ -935,12 +935,12 @@ int i = 0; } strncpy(band_meta[i], "Wavelength", 10); band_meta[i][10] = '\0'; i++; - nchar = snprintf(band_meta[i], NPOW_13, "%.3f", stack->wavelength[b_stack]); i++; - if (nchar < 0 || nchar >= NPOW_13){ + nchar = snprintf(band_meta[i], NPOW_14, "%.3f", stack->wavelength[b_stack]); i++; + if (nchar < 0 || nchar >= NPOW_14){ printf("Buffer Overflow in assembling band metadata\n"); return FAILURE;} strncpy(band_meta[i], "Wavelength_unit", 15); band_meta[i][15] = '\0'; i++; - if (strlen(stack->unit[b_stack]) > NPOW_13-1){ + if (strlen(stack->unit[b_stack]) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(band_meta[i], stack->unit[b_stack], strlen(stack->unit[b_stack])); @@ -948,12 +948,12 @@ int i = 0; } strncpy(band_meta[i], "Scale", 5); band_meta[i][5] = '\0'; i++; - nchar = snprintf(band_meta[i], NPOW_13, "%.3f", stack->scale[b_stack]); i++; - if (nchar < 0 || nchar >= NPOW_13){ + nchar = snprintf(band_meta[i], NPOW_14, "%.3f", stack->scale[b_stack]); i++; + if (nchar < 0 || nchar >= NPOW_14){ printf("Buffer Overflow in assembling band metadata\n"); return FAILURE;} strncpy(band_meta[i], "Sensor", 6); band_meta[i][6] = '\0'; i++; - if (strlen(stack->sensor[b_stack]) > NPOW_13-1){ + if (strlen(stack->sensor[b_stack]) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(band_meta[i], stack->sensor[b_stack], strlen(stack->sensor[b_stack])); @@ -962,7 +962,7 @@ int i = 0; get_stack_longdate(stack, b_stack, ldate, NPOW_05-1); strncpy(band_meta[i], "Date", 4); band_meta[i][4] = '\0'; i++; - if (strlen(ldate) > NPOW_13-1){ + if (strlen(ldate) > NPOW_14-1){ printf("cannot copy, string too long.\n"); return FAILURE; } else { strncpy(band_meta[i], ldate, strlen(ldate)); @@ -2729,7 +2729,7 @@ void get_stack_proj(stack_t *stack, char proj[], size_t size){ void set_stack_par(stack_t *stack, const char *par){ - if (strlen(par) > NPOW_13-1){ + if (strlen(par) > NPOW_14-1){ printf("cannot copy, string too long.\n"); exit(1); } else { strncpy(stack->par, par, strlen(par)); diff --git a/src/cross-level/stack-cl.h b/src/cross-level/stack-cl.h index 2122702a..2ae22aa5 100755 --- a/src/cross-level/stack-cl.h +++ b/src/cross-level/stack-cl.h @@ -77,7 +77,7 @@ typedef struct { int tx, ty; // ID of tile char proj[NPOW_10]; // projection - char par[NPOW_13]; // parameterization + char par[NPOW_14]; // parameterization bool *save; // save band? int *nodata; // nodata value From 43325fe5133f5dc9c95edc53e746180fa3f8e25c Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Thu, 13 Aug 2020 09:59:30 +0200 Subject: [PATCH 15/78] add new bash scripts for gcs download --- bash/force-level1-landsat-g.sh | 300 +++++++++++++++++++++++++++++++ bash/force-level1-sentinel2-g.sh | 292 ++++++++++++++++++++++++++++++ 2 files changed, 592 insertions(+) create mode 100755 bash/force-level1-landsat-g.sh create mode 100755 bash/force-level1-sentinel2-g.sh diff --git a/bash/force-level1-landsat-g.sh b/bash/force-level1-landsat-g.sh new file mode 100755 index 00000000..fc1f3ace --- /dev/null +++ b/bash/force-level1-landsat-g.sh @@ -0,0 +1,300 @@ +#!/bin/bash + +# ====================================================================================== +# Name: LS_queryAndDownload_gsutil.sh +# Author: Stefan Ernst +# Date: 2020-06-20 +# Last change: 2020-08-10 +# Desc: Query and download the public Google Cloud Storage Sentinel-2 archive. +# Requirements: +# 1. Google Sentinel-2 metadata catalogue: +# https://console.cloud.google.com/storage/browser/gcp-public-data-landsat +# 2. shapefile containing the Landsat WRS-2 descending orbits: +# https://www.usgs.gov/media/files/landsat-wrs-2-descending-path-row-shapefile +# 3. gsutil - available through pip and conda +# Run the command 'gsutil config' after installation to set up authorization +# with your Google account. +# 4. gdal - specify the AOI as path/row if gdal is not available +# ====================================================================================== + + +trap "echo Exited!; exit;" SIGINT SIGTERM #make sure that CTRL-C stops the whole process + +show_help() { +cat << EOF + +Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi + aoitype sensor starttime endtime min-cc max-cc + + metadata-dir + directory where the Landsat metadata (csv file) is stored + + level-1-datapool + An existing directory, your files will be stored here + + queue + Downloaded files are appended to a file queue, which is needed for + the Level 2 processing. The file doesn't need to exist. If it exists, + new lines will be appended on successful ingestion + + area of interest + (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" + The polygon must be closed (first X/Y = last X/Y). X/Y must be given as + decimal degrees with negative values for West and South coordinates. + (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, + but using EPSG4326 is recommended + (3) Path/Row of the Landsat footprints of interest: "PPPRRR,PPPRRR,PPPRRR" + Make sure to keep leading zeros - correct: 181034, incorrect: 18134 + + type of area of interest + 1 - coordinates as text + 2 - shapefile + 3 - PathRow as text + + sensor + Landsat sensor identifier: + LT05 - Landsat 5 TM + LE07 - Landsat 7 ETM+ + LC08 - Landsat OLI + + starttime endtime + Dates must be given as YYYY-MM-DD + + min-cc max-cc + The cloud cover range must be specified in % + + -d dry will trigger a dry run that will only return the number of images + and their total data volume + + -u will update the metadata catalogue (download and extract from GCS) + only the metadata dir is required as argument when using this option + + -h|--help show this help + +EOF +} + + +update_meta() { + echo "Updating metadata catalogue..." + gsutil -m cp gs://gcp-public-data-landsat/index.csv.gz $METADIR + gunzip $METADIR/index.csv.gz + mv $METADIR/index.csv $METADIR/metadata_LS.csv +} + + +# ============================================================ +# check for options +DRYRUN=0 +while :; do + case $1 in + -d) DRYRUN=1 ;; + -h|-\?|--help) show_help + exit 0 ;; + -u) METADIR=$2 + if [ $# -lt 2 ]; then + echo "Metadata directory not specified, exiting" + exit 1 + elif [ $# -gt 2 ]; then + echo "Error: Please only specify the metadata directory when using the update option (-u)" + exit 1 + elif ! [ -w $METADIR ]; then + echo "Can not write to metadata directory, exiting" + exit 1 + fi + update_meta + echo "Done. You can run this script without option -d to download data now." + exit ;; + -?*) printf "%s\n" "" "Incorrect option specified" "" + show_help >&2 + exit 1 ;; + *) break #no more options + esac + shift +done + + +# ============================================================ +# if wrong number of input args and -u opt not set, stop +EXPECTED_ARGS=10 +if [ $# -ne $EXPECTED_ARGS ]; then + printf "%s\n" "" "Incorrect number of input arguments provided" + show_help + exit +fi + +METADIR=$1 +POOL=$2 +QUEUE=$3 +AOI=$4 +AOITYPE=$5 +SENSIN=$6 +DATEMIN=$7 +DATEMAX=$8 +CCMIN=$9 +CCMAX=${10} + +METACAT=$METADIR"/metadata_LS.csv" + + +# ============================================================ +# Check user input +for s in $(echo $SENSIN | sed 's/,/ /g') +do + case "$s" in + LT05|LE07|LC08) continue ;; + *) printf "%s\n" "" "$s is not a valid sensor type." "Valid Sensors: LT05, LE07, LC08" "" + exit ;; + esac +done + +if ! date -d $DATEMIN &> /dev/null; then + printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 + elif ! date -d $DATEMAX &> /dev/null; then + printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 +fi + + +# ============================================================ +# Check if metadata catalogue exists and is up to date +if ! [ -f $METACAT ]; then + echo "Metadata catalogue does not exist." + update_meta +fi + +METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) +if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then + printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." +fi + + +# ============================================================ +# Get path / rows of interest +if [ "$AOITYPE" -eq 2 ]; then + if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then + printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." + fi +fi +if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then + if ! [ -x "$(command -v ogr2ogr)" ]; then + printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 + exit 1 + fi +fi + + +if [ "$AOITYPE" -eq 1 ]; then + + WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename=landsat&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" + PRRAW=$(ogr2ogr -f CSV /vsistdout/ -select "PR" WFS:"$WFSURL") + PR="_"$(echo $PRRAW | sed 's/PR, //; s/ /_|_/g')"_" + +elif [ "$AOITYPE" -eq 2 ]; then + + printf "%s\n" "" "Searching for Landsat footprints intersecting with geometries of AOI shapefile..." + AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) + BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename=landsat&bbox="$BBOX + + ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update + ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update + + PRRAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT landsat.PR FROM landsat, $AOINE WHERE ST_Intersects(landsat.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) + PR="_"$(echo $PRRAW | sed 's/PR, //; s/ /_|_/g')"_" + rm merged.gpkg + +elif [ "$AOITYPE" -eq 3 ]; then + + PRRAW=$AOI + PR="_"$(echo $AOI | sed 's/,/_|_/g')"_" + +else + echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " + echo " 2 for shapefile (point/polygon/line) or " + echo " 3 for comma-separated PATHROW " + exit 1 +fi + +SENSOR=$(echo "$SENSIN" | sed 's/,/_|/g')"_" + + +# ============================================================ +# Filter metadata and extract download links +printf "%s\n" "" "Querying the metadata catalogue for" "Sensor(s): "$SENSIN "Path/Row: "$(echo $PR | sed 's/_//g; s/|/,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" + +LINKS=$(grep -E $PR $METACAT | grep -E $SENSOR | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $7 == "T1" && $12 >= clow && $12 <= chigh') + +printf "%s" "$LINKS" > LS_filtered_meta.txt +SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') +#NSCENES=$(( $(printf "%s" "$LINKS" | wc -l | cut -d" " -f 1) + 1 )) +NSCENES=$(sed -n '$=' LS_filtered_meta.txt) +#rm LS_filtered_meta.txt + +# ============================================================ +# Get total number and size of scenes matching criteria +UNIT="MB" +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="GB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="TB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="PB" +fi + +if [ -z $NSCENES ];then + printf "%s\n" "There were no Landsat Level 1 scenes found matching the search criteria" "" + exit 0 +else + printf "%s\n" "$NSCENES Landsat Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" +fi + +if [ $DRYRUN -eq 1 ]; then + exit 0 +fi + + +# ============================================================ +# Download scenes +echo "Starting to download "$NSCENES" Landsat Level 1 scenes" +ITER=1 +for LINK in $LINKS +do + SCENEID=$(echo $LINK | cut -d, -f 2) + PR=$(echo $SCENEID | cut -d_ -f3) + PRPATH=$POOL/$PR + URL=$(echo $LINK | cut -d, -f 18) + + # create target directory if it doesn't exist + if [ ! -w $PRPATH ]; then + mkdir $PRPATH + if [ ! -w $PRPATH ]; then + echo "$PRPATH: Creating directory failed." + exit 1 + fi + fi + ABSPRPATH=$(cd $POOL/$PR; pwd) + + # Check if scene already exists + SCENEPATH=$ABSPRPATH/$SCENEID + if [ -d $SCENEPATH ]; then + echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." + ((ITER++)) + continue + fi + + echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." + gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $ABSPRPATH + + echo "$SCENEPATH QUEUED" >> $QUEUE + + + ((ITER++)) +done diff --git a/bash/force-level1-sentinel2-g.sh b/bash/force-level1-sentinel2-g.sh new file mode 100755 index 00000000..4e8442fe --- /dev/null +++ b/bash/force-level1-sentinel2-g.sh @@ -0,0 +1,292 @@ +#!/bin/bash + +# ===================================================================================== +# Name: S2_queryAndDownload_gsutil.sh +# Author: Stefan Ernst +# Date: 2020-06-20 +# Last change: 2020-08-11 +# Desc: Query and download the public Google Cloud Storage Landsat archive. +# Only Collection 1 Tier one products are considered. +# Requirements: +# 1. Google Landsat metadata catalogue: +# https://console.cloud.google.com/storage/browser/gcp-public-data-landsat +# 2. shapefile containing the Landsat WRS-2 descending orbits: +# https://www.usgs.gov/media/files/landsat-wrs-2-descending-path-row-shapefile +# 3. gsutil - available through pip and conda +# Run the command 'gsutil config' after installation to set up authorization +# with your Google account. +# 4. gdal - specify the AOI as path/row if gdal is not available +# ===================================================================================== + + +trap "echo Exited!; exit;" SIGINT SIGTERM #make sure that CTRL-C stops the whole process + +show_help() { +cat << EOF + +Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi + aoitype sensor starttime endtime min-cc max-cc + + metadata-dir + directory where the Sentinel-2 metadata (csv file) is stored + + level-1-datapool + An existing directory, your files will be stored here + + queue + Downloaded files are appended to a file queue, which is needed for + the Level 2 processing. The file doesn't need to exist. If it exists, + new lines will be appended on successful ingestion + + area of interest + (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" + The polygon must be closed (first X/Y = last X/Y). X/Y must be given as + decimal degrees with negative values for West and South coordinates. + (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, + but using EPSG4326 is recommended + (3) Path/Row of the Landsat footprints of interest: "PPPRRR,PPPRRR,PPPRRR" + Make sure to keep leading zeros - correct: 181034, incorrect: 18134 + + type of area of interest + 1 - coordinates as text + 2 - shapefile + 3 - PathRow as text + + starttime endtime + Dates must be given as YYYY-MM-DD + + min-cc max-cc + The cloud cover range must be specified in % + + -d dry will trigger a dry run that will only return the number of images + and their total data volume + + -u will update the metadata catalogue (download and extract from GCS) + only the metadata dir is required as argument when using this option + + -h|--help show this help + +EOF +} + + +update_meta() { + echo "Updating metadata catalogue..." + gsutil -m cp gs://gcp-public-data-sentinel-2/index.csv.gz $METADIR + gunzip $METADIR/index.csv.gz + mv $METADIR/index.csv $METADIR/metadata_S2.csv +} + + +# ============================================================ +# check for options +DRYRUN=0 +while :; do + case $1 in + -d) DRYRUN=1 ;; + -h|-\?|--help) show_help + exit 0 ;; + -u) METADIR=$2 + if [ $# -lt 2 ]; then + echo "Metadata directory not specified, exiting" + exit 1 + elif [ $# -gt 2 ]; then + echo "Error: Please only specify the metadata directory when using the update option (-u)" + exit 1 + elif ! [ -w $METADIR ]; then + echo "Can not write to metadata directory, exiting" + exit 1 + fi + update_meta + echo "Done. You can run this script without option -d to download data now." + exit ;; + -?*) printf "%s\n" "" "Incorrect option specified" "" + show_help >&2 + exit 1 ;; + *) break #no more options + esac + shift +done + + +# ============================================================ +# if wrong number of input args and -u opt not set, stop +EXPECTED_ARGS=9 +if [ $# -ne $EXPECTED_ARGS ]; then + printf "%s\n" "" "Incorrect number of input arguments provided" + show_help + exit +fi + +METADIR=$1 +POOL=$2 +QUEUE=$3 +AOI=$4 +AOITYPE=$5 +DATEMIN=$6 +DATEMAX=$7 +CCMIN=$8 +CCMAX=$9 + +METACAT=$METADIR"/metadata_S2.csv" + + +# ============================================================ +# Check user input +for s in $(echo $SENSIN | sed 's/,/ /g') +do + case "$s" in + LT05|LE07|LC08) continue ;; + *) printf "%s\n" "" "$s is not a valid sensor type." "Valid Sensors: LT05, LE07, LC08" "" + exit ;; + esac +done + +if ! date -d $DATEMIN &> /dev/null; then + printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 + elif ! date -d $DATEMAX &> /dev/null; then + printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 +fi + + +# ============================================================ +# Check if metadata catalogue exists and is up to date +if ! [ -f $METACAT ]; then + echo "Metadata catalogue does not exist." + update_meta +fi + +METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) +if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then + printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." +fi + + +# ============================================================ +# Get S2 MGRS tiles of interest +if [ "$AOITYPE" -eq 2 ]; then + if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then + printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." + fi +fi +if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then + if ! [ -x "$(command -v ogr2ogr)" ]; then + printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 + exit 1 + fi +fi + + +if [ "$AOITYPE" -eq 1 ]; then + + WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename=sentinel2&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") + TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" + +elif [ "$AOITYPE" -eq 2 ]; then + + printf "%s\n" "" "Searching for S2 tiles intersecting with geometries of AOI shapefile..." + AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) + BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename=sentinel2&bbox="$BBOX + + ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update + ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update + + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT sentinel2.Name FROM sentinel2, $AOINE WHERE ST_Intersects(sentinel2.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) + TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" + rm merged.gpkg + +elif [ "$AOITYPE" -eq 3 ]; then + + TILERAW=$AOI + TILES="_T"$(echo $AOI | sed 's/,/_|_T/g')"_" + +else + echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " + echo " 2 for shapefile (point/polygon/line) or " + echo " 3 for comma-separated tile names " + exit +fi + + +# ============================================================ +# Filter metadata and extract download links +printf "%s\n" "" "Querying the metadata catalogue for" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" + +LINKS=$(grep -E $TILES $METACAT | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') + +printf "%s" "$LINKS" > S2_filtered_meta.txt +SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') +NSCENES=$(sed -n '$=' S2_filtered_meta.txt) +rm S2_filtered_meta.txt + + +# ============================================================ +# Get total number and size of scenes matching criteria +UNIT="MB" +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="GB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="TB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="PB" +fi + +if [ -z $NSCENES ];then + printf "%s\n" "There were no Sentinel-2 Level 1 scenes found matching the search criteria" "" + exit 0 +else + printf "%s\n" "$NSCENES Sentinel-2 Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" +fi + +if [ $DRYRUN -eq 1 ]; then + exit 0 +fi + + +# ============================================================ +# Download scenes +echo "Starting to download "$NSCENES" Sentinel-2 Level 1 scenes" +ITER=1 +for LINK in $LINKS +do + SCENEID=$(echo $LINK | cut -d, -f 2) + TILE=$(echo $LINK | cut -d, -f1 | grep -o -E "T[0-9]{2}[A-Z]{3}") + TILEPATH=$POOL/$TILE + URL=$(echo $LINK | cut -d, -f 14) + + # create target directory if it doesn't exist + if [ ! -w $TILEPATH ]; then + mkdir $TILEPATH + if [ ! -w $TILEPATH ]; then + echo "$TILEPATH: Creating directory failed." + exit 1 + fi + fi + ABSTILEPATH=$(cd $POOL/$TILE; pwd) + + # Check if scene already exists + SCENEPATH=$ABSTILEPATH/$SCENEID".SAFE" + if [ -d $SCENEPATH ]; then + echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." + ((ITER++)) + continue + fi + + echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." + gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $ABSTILEPATH + + echo "$SCENEPATH QUEUED" >> $QUEUE + + + ((ITER++)) +done From 78a3229436da51400972bb84667a7d3fcdfd18ad Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Thu, 13 Aug 2020 11:11:53 +0200 Subject: [PATCH 16/78] remove unnecessary check for sensor --- bash/force-level1-sentinel2-g.sh | 9 --------- 1 file changed, 9 deletions(-) diff --git a/bash/force-level1-sentinel2-g.sh b/bash/force-level1-sentinel2-g.sh index 4e8442fe..7fc3b45c 100755 --- a/bash/force-level1-sentinel2-g.sh +++ b/bash/force-level1-sentinel2-g.sh @@ -133,15 +133,6 @@ METACAT=$METADIR"/metadata_S2.csv" # ============================================================ # Check user input -for s in $(echo $SENSIN | sed 's/,/ /g') -do - case "$s" in - LT05|LE07|LC08) continue ;; - *) printf "%s\n" "" "$s is not a valid sensor type." "Valid Sensors: LT05, LE07, LC08" "" - exit ;; - esac -done - if ! date -d $DATEMIN &> /dev/null; then printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" exit 1 From 8348d7c5ee3bbd60be102d630d607bb8094eb950 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 13 Aug 2020 12:42:16 +0200 Subject: [PATCH 17/78] working implementation of polarmetrics --- src/cross-level/enum-cl.c | 29 +- src/cross-level/enum-cl.h | 10 +- src/higher-level/param-hl.c | 105 +------ src/higher-level/param-hl.h | 21 +- src/higher-level/pheno-hl.cpp | 104 +++---- src/higher-level/polar-hl.c | 507 ++++++++++++++++++++---------- src/higher-level/trend-hl.c | 22 +- src/higher-level/tsa-hl.c | 571 +++++++++++++++++++++++----------- src/higher-level/tsa-hl.h | 1 + 9 files changed, 836 insertions(+), 534 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index 98dfd36b..1d51bbdf 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -108,20 +108,25 @@ const tagged_enum_t _TAGGED_ENUM_HEMI_[_HEMI_LENGTH_] = { { _HEMI_NORTH_, "NORTH" }, { _HEMI_SOUTH_, "SOUTH" }, { _HEMI_MIXED_, "MIXED" }}; const tagged_enum_t _TAGGED_ENUM_LSP_[_LSP_LENGTH_] = { - {_LSP_DEM_, "DEM" }, {_LSP_DSS_, "DSS" }, {_LSP_DRI_, "DRI" }, {_LSP_DPS_, "DPS" }, - {_LSP_DFI_, "DFI" }, {_LSP_DES_, "DES" }, {_LSP_DLM_, "DLM" }, {_LSP_LTS_, "LTS" }, - {_LSP_LGS_, "LGS" }, {_LSP_VEM_, "VEM" }, {_LSP_VSS_, "VSS" }, {_LSP_VRI_, "VRI" }, - {_LSP_VPS_, "VPS" }, {_LSP_VFI_, "VFI" }, {_LSP_VES_, "VES" }, {_LSP_VLM_, "VLM" }, - {_LSP_VBL_, "VBL" }, {_LSP_VSA_, "VSA" }, {_LSP_IST_, "IST" }, {_LSP_IBL_, "IBL" }, - {_LSP_IBT_, "IBT" }, {_LSP_IGS_, "IGS" }, {_LSP_RAR_, "RAR" }, {_LSP_RAF_, "RAF" }, - {_LSP_RMR_, "RMR" }, {_LSP_RMF_, "RMF" }}; + { _LSP_DEM_, "DEM" }, { _LSP_DSS_, "DSS" }, { _LSP_DRI_, "DRI" }, { _LSP_DPS_, "DPS" }, + { _LSP_DFI_, "DFI" }, { _LSP_DES_, "DES" }, { _LSP_DLM_, "DLM" }, { _LSP_LTS_, "LTS" }, + { _LSP_LGS_, "LGS" }, { _LSP_VEM_, "VEM" }, { _LSP_VSS_, "VSS" }, { _LSP_VRI_, "VRI" }, + { _LSP_VPS_, "VPS" }, { _LSP_VFI_, "VFI" }, { _LSP_VES_, "VES" }, { _LSP_VLM_, "VLM" }, + { _LSP_VBL_, "VBL" }, { _LSP_VSA_, "VSA" }, { _LSP_IST_, "IST" }, { _LSP_IBL_, "IBL" }, + { _LSP_IBT_, "IBT" }, { _LSP_IGS_, "IGS" }, { _LSP_RAR_, "RAR" }, { _LSP_RAF_, "RAF" }, + { _LSP_RMR_, "RMR" }, { _LSP_RMF_, "RMF" }}; const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { - { _POL_DSS_, "DSS" }, { _POL_DMS_, "DMS" }, { _POL_DES_, "DES" }, { _POL_DEV_, "DEV" }, - { _POL_DAV_, "DAV" }, { _POL_DLV_, "DLV" }, { _POL_LGS_, "LGS" }, { _POL_LBV_, "LBV" }, - { _POL_VSS_, "VSS" }, { _POL_VMS_, "VMS" }, { _POL_VES_, "VES" }, { _POL_VEV_, "VEV" }, - { _POL_VAV_, "VAV" }, { _POL_VLV_, "VLV" }, { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, - { _POL_DPY_, "DPY" }}; + { _POL_DEM_, "DEM" }, { _POL_DLM_, "DLM" }, { _POL_DPS_, "DPS" }, + { _POL_DSS_, "DSS" }, { _POL_DMS_, "DMS" }, { _POL_DES_, "DES" }, + { _POL_DEV_, "DEV" }, { _POL_DAV_, "DAV" }, { _POL_DLV_, "DLV" }, + { _POL_LTS_, "LTS" }, { _POL_LGS_, "LGS" }, { _POL_LGV_, "LGV" }, + { _POL_VEM_, "VEM" }, { _POL_VLM_, "VLM" }, { _POL_VPS_, "VPS" }, + { _POL_VSS_, "VSS" }, { _POL_VMS_, "VMS" }, { _POL_VES_, "VES" }, + { _POL_VEV_, "VEV" }, { _POL_VAV_, "VAV" }, { _POL_VLV_, "VLV" }, + { _POL_VBL_, "VBL" }, { _POL_VSA_, "VSA" }, { _POL_VPA_, "VPA" }, + { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, { _POL_DPY_, "DPY" }, + { _POL_DPV_, "DPV" }}; const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { { _TAIL_LEFT_, "LEFT" }, { _TAIL_TWO_, "TWO" }, { _TAIL_RIGHT_, "RIGHT" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index c085b4e3..23952da9 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -191,16 +191,18 @@ enum { _LSP_DEM_, _LSP_DSS_, _LSP_DRI_, _LSP_DPS_, _LSP_DFI_, _LSP_DES_, _LSP_RMR_, _LSP_RMF_, _LSP_LENGTH_ }; // polar metrics -enum { _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_DEV_, _POL_DAV_, _POL_DLV_, - _POL_LGS_, _POL_LBV_, _POL_VSS_, _POL_VMS_, _POL_VES_, _POL_VEV_, - _POL_VAV_, _POL_VLV_, _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_LENGTH_ }; +enum { _POL_DEM_, _POL_DLM_, _POL_DPS_, _POL_DSS_, _POL_DMS_, _POL_DES_, + _POL_DEV_, _POL_DAV_, _POL_DLV_, _POL_LTS_, _POL_LGS_, _POL_LGV_, + _POL_VEM_, _POL_VLM_, _POL_VPS_, _POL_VSS_, _POL_VMS_, _POL_VES_, + _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VSA_, _POL_VPA_, + _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_LENGTH_ }; // folding enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGTH_ }; // time series parts enum { _PART_TOTAL_, _PART_BEFORE_, _PART_AFTER_, _PART_LENGTH_ }; - + // trend enum { _TRD_MEAN_, _TRD_OFFSET_, _TRD_SLOPE_, _TRD_RSQ_, _TRD_SIG_, _TRD_RMSE_, diff --git a/src/higher-level/param-hl.c b/src/higher-level/param-hl.c index b2e9f3ea..2236b60f 100755 --- a/src/higher-level/param-hl.c +++ b/src/higher-level/param-hl.c @@ -235,8 +235,9 @@ void register_tsa(params_t *params, par_hl_t *phl){ // polar parameters register_float_par(params, "POL_START_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.start); - register_float_par(params, "POL_MID_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.mid); - register_float_par(params, "POL_END_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.end); + register_float_par(params, "POL_MID_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.mid); + register_float_par(params, "POL_END_THRESHOLD", 0.01, 0.99, &phl->tsa.pol.end); + register_bool_par(params, "POL_ADAPTIVE", &phl->tsa.pol.adaptive); register_enumvec_par(params, "POL", _TAGGED_ENUM_POL_, _POL_LENGTH_, &phl->tsa.pol.metrics, &phl->tsa.pol.nmetrics); register_enum_par(params, "STANDARDIZE_POL", _TAGGED_ENUM_STD_, _STD_LENGTH_, &phl->tsa.pol.standard); register_bool_par(params, "OUTPUT_PCT", &phl->tsa.pol.opct); @@ -792,64 +793,7 @@ int parse_lsp(par_lsp_t *lsp){ int i; - for (i=0; inmetrics; i++){ - if (lsp->metrics[i] == _LSP_DEM_){ - lsp->odem = true; - } else if (lsp->metrics[i] == _LSP_DSS_){ - lsp->odss = true; - } else if (lsp->metrics[i] == _LSP_DRI_){ - lsp->odri = true; - } else if (lsp->metrics[i] == _LSP_DPS_){ - lsp->odps = true; - } else if (lsp->metrics[i] == _LSP_DFI_){ - lsp->odfi = true; - } else if (lsp->metrics[i] == _LSP_DES_){ - lsp->odes = true; - } else if (lsp->metrics[i] == _LSP_DLM_){ - lsp->odlm = true; - } else if (lsp->metrics[i] == _LSP_LTS_){ - lsp->olts = true; - } else if (lsp->metrics[i] == _LSP_LGS_){ - lsp->olgs = true; - } else if (lsp->metrics[i] == _LSP_VEM_){ - lsp->ovem = true; - } else if (lsp->metrics[i] == _LSP_VSS_){ - lsp->ovss = true; - } else if (lsp->metrics[i] == _LSP_VRI_){ - lsp->ovri = true; - } else if (lsp->metrics[i] == _LSP_VPS_){ - lsp->ovps = true; - } else if (lsp->metrics[i] == _LSP_VFI_){ - lsp->ovfi = true; - } else if (lsp->metrics[i] == _LSP_VES_){ - lsp->oves = true; - } else if (lsp->metrics[i] == _LSP_VLM_){ - lsp->ovlm = true; - } else if (lsp->metrics[i] == _LSP_VBL_){ - lsp->ovbl = true; - } else if (lsp->metrics[i] == _LSP_VSA_){ - lsp->ovsa = true; - } else if (lsp->metrics[i] == _LSP_IST_){ - lsp->oist = true; - } else if (lsp->metrics[i] == _LSP_IBL_){ - lsp->oibl = true; - } else if (lsp->metrics[i] == _LSP_IBT_){ - lsp->oibt = true; - } else if (lsp->metrics[i] == _LSP_IGS_){ - lsp->oigs = true; - } else if (lsp->metrics[i] == _LSP_RAR_){ - lsp->orar = true; - } else if (lsp->metrics[i] == _LSP_RAF_){ - lsp->oraf = true; - } else if (lsp->metrics[i] == _LSP_RMR_){ - lsp->ormr = true; - } else if (lsp->metrics[i] == _LSP_RMF_){ - lsp->ormf = true; - } else { - printf("warning: unknown lsp.\n"); - } - } - + for (i=0; inmetrics; i++) lsp->use[lsp->metrics[i]] = true; return SUCCESS; } @@ -864,46 +808,7 @@ int parse_pol(par_pol_t *pol){ int i; - for (i=0; inmetrics; i++){ - if (pol->metrics[i] == _POL_DSS_){ - pol->odss = true; - } else if (pol->metrics[i] == _POL_DMS_){ - pol->odms = true; - } else if (pol->metrics[i] == _POL_DES_){ - pol->odes = true; - } else if (pol->metrics[i] == _POL_DEV_){ - pol->odev = true; - } else if (pol->metrics[i] == _POL_DAV_){ - pol->odav = true; - } else if (pol->metrics[i] == _POL_DLV_){ - pol->odlv = true; - } else if (pol->metrics[i] == _POL_LGS_){ - pol->olgs = true; - } else if (pol->metrics[i] == _POL_LBV_){ - pol->olbv = true; - } else if (pol->metrics[i] == _POL_VSS_){ - pol->ovss = true; - } else if (pol->metrics[i] == _POL_VMS_){ - pol->ovms = true; - } else if (pol->metrics[i] == _POL_VES_){ - pol->oves = true; - } else if (pol->metrics[i] == _POL_VEV_){ - pol->ovev = true; - } else if (pol->metrics[i] == _POL_VAV_){ - pol->ovav = true; - } else if (pol->metrics[i] == _POL_VLV_){ - pol->ovlv = true; - } else if (pol->metrics[i] == _POL_VGA_){ - pol->ovga = true; - } else if (pol->metrics[i] == _POL_VGV_){ - pol->ovgv = true; - } else if (pol->metrics[i] == _POL_DPY_){ - pol->odpy = true; - } else { - printf("warning: unknown pol.\n"); - } - } - + for (i=0; inmetrics; i++) pol->use[pol->metrics[i]] = true; return SUCCESS; } diff --git a/src/higher-level/param-hl.h b/src/higher-level/param-hl.h index 24b2527e..e587c93d 100755 --- a/src/higher-level/param-hl.h +++ b/src/higher-level/param-hl.h @@ -190,11 +190,12 @@ typedef struct { int ospl; // flag: output spline fit int olsp; // flag: output LSP metrics int *metrics, nmetrics; - int odem, odss, odri, odps, odfi, odes; - int odlm, olts, olgs, ovem, ovss, ovri;; - int ovps, ovfi, oves, ovlm, ovbl, ovsa; - int oist, oibl, oibt, oigs, orar, oraf; - int ormr, ormf; + int use[_LSP_LENGTH_]; + //int odem, odss, odri, odps, odfi, odes; + //int odlm, olts, olgs, ovem, ovss, ovri;; + //int ovps, ovfi, oves, ovlm, ovbl, ovsa; + //int oist, oibl, oibt, oigs, orar, oraf; + //int ormr, ormf; int otrd; // flag: output LSP trends int ocat; // flag: output LSP cats int standard; @@ -210,12 +211,16 @@ typedef struct { int opct; // flag: output polar coordinate transformed TS int opol; // flag: output polar metrics int *metrics, nmetrics; - int odss, odms, odes, odev, odav, odlv; - int olgs, olbv, ovss, ovms, oves, ovev; - int ovav, ovlv, ovga, ovgv, odpy; + int use[_POL_LENGTH_]; + //int odem, odss, odev, odms, odav, odlv; + //int odes, odlm, olts, olgs, olgv, ovem; + //int ovss, ovev, ovms, ovav, ovlv, oves; + //int ovlm, ovbl, ovsa, ovga, ovgv, odpy; + //int odpv; int otrd; // flag: output POL trends int ocat; // flag: output POL cats int standard; + int adaptive; } par_pol_t; // trend diff --git a/src/higher-level/pheno-hl.cpp b/src/higher-level/pheno-hl.cpp index 5c87c4ff..6f12d72d 100755 --- a/src/higher-level/pheno-hl.cpp +++ b/src/higher-level/pheno-hl.cpp @@ -257,32 +257,32 @@ Spline *spl; /** copy LSP if all OK **/ if (valid){ - if (lsp->odem) ts->lsp_[_LSP_DEM_][y_index][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st LSP year - if (lsp->odss) ts->lsp_[_LSP_DSS_][y_index][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st LSP year - if (lsp->odri) ts->lsp_[_LSP_DRI_][y_index][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st LSP year - if (lsp->odps) ts->lsp_[_LSP_DPS_][y_index][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st LSP year - if (lsp->odfi) ts->lsp_[_LSP_DFI_][y_index][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st LSP year - if (lsp->odes) ts->lsp_[_LSP_DES_][y_index][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st LSP year - if (lsp->odlm) ts->lsp_[_LSP_DLM_][y_index][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st LSP year - if (lsp->olts) ts->lsp_[_LSP_LTS_][y_index][p] = (short)(ph.min_min_duration*dce); // days - if (lsp->olgs) ts->lsp_[_LSP_LGS_][y_index][p] = (short)(ph.green_duration*dce); // days - if (lsp->ovem) ts->lsp_[_LSP_VEM_][y_index][p] = (short)(ph.early_min_val); // index value - if (lsp->ovss) ts->lsp_[_LSP_VSS_][y_index][p] = (short)(ph.start_green_val); // index value - if (lsp->ovri) ts->lsp_[_LSP_VRI_][y_index][p] = (short)(ph.early_flex_val); // index value - if (lsp->ovps) ts->lsp_[_LSP_VPS_][y_index][p] = (short)(ph.peak_val); // index value - if (lsp->ovfi) ts->lsp_[_LSP_VFI_][y_index][p] = (short)(ph.late_flex_val); // index value - if (lsp->oves) ts->lsp_[_LSP_VES_][y_index][p] = (short)(ph.end_green_val); // index value - if (lsp->ovlm) ts->lsp_[_LSP_VLM_][y_index][p] = (short)(ph.late_min_val); // index value - if (lsp->ovbl) ts->lsp_[_LSP_VBL_][y_index][p] = (short)(ph.latent_val); // index value - if (lsp->ovsa) ts->lsp_[_LSP_VSA_][y_index][p] = (short)(ph.amplitude); // index value - if (lsp->oist) ts->lsp_[_LSP_IST_][y_index][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 - if (lsp->oibl) ts->lsp_[_LSP_IBL_][y_index][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 - if (lsp->oibt) ts->lsp_[_LSP_IBT_][y_index][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 - if (lsp->oigs) ts->lsp_[_LSP_IGS_][y_index][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 - if (lsp->orar) ts->lsp_[_LSP_RAR_][y_index][p] = (short)(ph.greenup_rate/dce); // index value / days - if (lsp->oraf) ts->lsp_[_LSP_RAF_][y_index][p] = (short)(ph.senescence_rate/dce); // index value / days - if (lsp->ormr) ts->lsp_[_LSP_RMR_][y_index][p] = (short)(ph.early_flex_rate/dce); // index value / days - if (lsp->ormf) ts->lsp_[_LSP_RMF_][y_index][p] = (short)(ph.late_flex_rate/dce); // index value / days + if (lsp->use[_LSP_DEM_]) ts->lsp_[_LSP_DEM_][y_index][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DSS_]) ts->lsp_[_LSP_DSS_][y_index][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DRI_]) ts->lsp_[_LSP_DRI_][y_index][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DPS_]) ts->lsp_[_LSP_DPS_][y_index][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DFI_]) ts->lsp_[_LSP_DFI_][y_index][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DES_]) ts->lsp_[_LSP_DES_][y_index][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DLM_]) ts->lsp_[_LSP_DLM_][y_index][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_LTS_]) ts->lsp_[_LSP_LTS_][y_index][p] = (short)(ph.min_min_duration*dce); // days + if (lsp->use[_LSP_LGS_]) ts->lsp_[_LSP_LGS_][y_index][p] = (short)(ph.green_duration*dce); // days + if (lsp->use[_LSP_VEM_]) ts->lsp_[_LSP_VEM_][y_index][p] = (short)(ph.early_min_val); // index value + if (lsp->use[_LSP_VSS_]) ts->lsp_[_LSP_VSS_][y_index][p] = (short)(ph.start_green_val); // index value + if (lsp->use[_LSP_VRI_]) ts->lsp_[_LSP_VRI_][y_index][p] = (short)(ph.early_flex_val); // index value + if (lsp->use[_LSP_VPS_]) ts->lsp_[_LSP_VPS_][y_index][p] = (short)(ph.peak_val); // index value + if (lsp->use[_LSP_VFI_]) ts->lsp_[_LSP_VFI_][y_index][p] = (short)(ph.late_flex_val); // index value + if (lsp->use[_LSP_VES_]) ts->lsp_[_LSP_VES_][y_index][p] = (short)(ph.end_green_val); // index value + if (lsp->use[_LSP_VLM_]) ts->lsp_[_LSP_VLM_][y_index][p] = (short)(ph.late_min_val); // index value + if (lsp->use[_LSP_VBL_]) ts->lsp_[_LSP_VBL_][y_index][p] = (short)(ph.latent_val); // index value + if (lsp->use[_LSP_VSA_]) ts->lsp_[_LSP_VSA_][y_index][p] = (short)(ph.amplitude); // index value + if (lsp->use[_LSP_IST_]) ts->lsp_[_LSP_IST_][y_index][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IBL_]) ts->lsp_[_LSP_IBL_][y_index][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IBT_]) ts->lsp_[_LSP_IBT_][y_index][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IGS_]) ts->lsp_[_LSP_IGS_][y_index][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_RAR_]) ts->lsp_[_LSP_RAR_][y_index][p] = (short)(ph.greenup_rate/dce); // index value / days + if (lsp->use[_LSP_RAF_]) ts->lsp_[_LSP_RAF_][y_index][p] = (short)(ph.senescence_rate/dce); // index value / days + if (lsp->use[_LSP_RMR_]) ts->lsp_[_LSP_RMR_][y_index][p] = (short)(ph.early_flex_rate/dce); // index value / days + if (lsp->use[_LSP_RMF_]) ts->lsp_[_LSP_RMF_][y_index][p] = (short)(ph.late_flex_rate/dce); // index value / days } destroy_spline(spl); @@ -534,32 +534,32 @@ Spline *spl; /** copy LSP if all OK **/ if (valid){ - if (lsp->odem) ts->lsp_[_LSP_DEM_][year][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st LSP year - if (lsp->odss) ts->lsp_[_LSP_DSS_][year][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st LSP year - if (lsp->odri) ts->lsp_[_LSP_DRI_][year][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st LSP year - if (lsp->odps) ts->lsp_[_LSP_DPS_][year][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st LSP year - if (lsp->odfi) ts->lsp_[_LSP_DFI_][year][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st LSP year - if (lsp->odes) ts->lsp_[_LSP_DES_][year][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st LSP year - if (lsp->odlm) ts->lsp_[_LSP_DLM_][year][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st LSP year - if (lsp->olts) ts->lsp_[_LSP_LTS_][year][p] = (short)(ph.min_min_duration*dce); // days - if (lsp->olgs) ts->lsp_[_LSP_LGS_][year][p] = (short)(ph.green_duration*dce); // days - if (lsp->ovem) ts->lsp_[_LSP_VEM_][year][p] = (short)(ph.early_min_val); // index value - if (lsp->ovss) ts->lsp_[_LSP_VSS_][year][p] = (short)(ph.start_green_val); // index value - if (lsp->ovri) ts->lsp_[_LSP_VRI_][year][p] = (short)(ph.early_flex_val); // index value - if (lsp->ovps) ts->lsp_[_LSP_VPS_][year][p] = (short)(ph.peak_val); // index value - if (lsp->ovfi) ts->lsp_[_LSP_VFI_][year][p] = (short)(ph.late_flex_val); // index value - if (lsp->oves) ts->lsp_[_LSP_VES_][year][p] = (short)(ph.end_green_val); // index value - if (lsp->ovlm) ts->lsp_[_LSP_VLM_][year][p] = (short)(ph.late_min_val); // index value - if (lsp->ovbl) ts->lsp_[_LSP_VBL_][year][p] = (short)(ph.latent_val); // index value - if (lsp->ovsa) ts->lsp_[_LSP_VSA_][year][p] = (short)(ph.amplitude); // index value - if (lsp->oist) ts->lsp_[_LSP_IST_][year][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 - if (lsp->oibl) ts->lsp_[_LSP_IBL_][year][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 - if (lsp->oibt) ts->lsp_[_LSP_IBT_][year][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 - if (lsp->oigs) ts->lsp_[_LSP_IGS_][year][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 - if (lsp->orar) ts->lsp_[_LSP_RAR_][year][p] = (short)(ph.greenup_rate/dce); // index value / days - if (lsp->oraf) ts->lsp_[_LSP_RAF_][year][p] = (short)(ph.senescence_rate/dce); // index value / days - if (lsp->ormr) ts->lsp_[_LSP_RMR_][year][p] = (short)(ph.early_flex_rate/dce); // index value / days - if (lsp->ormf) ts->lsp_[_LSP_RMF_][year][p] = (short)(ph.late_flex_rate/dce); // index value / days + if (lsp->use[_LSP_DEM_]) ts->lsp_[_LSP_DEM_][year][p] = (short)(ph.doy_early_min*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DSS_]) ts->lsp_[_LSP_DSS_][year][p] = (short)(ph.doy_start_green*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DRI_]) ts->lsp_[_LSP_DRI_][year][p] = (short)(ph.doy_early_flex*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DPS_]) ts->lsp_[_LSP_DPS_][year][p] = (short)(ph.doy_peak*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DFI_]) ts->lsp_[_LSP_DFI_][year][p] = (short)(ph.doy_late_flex*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DES_]) ts->lsp_[_LSP_DES_][year][p] = (short)(ph.doy_end_green*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_DLM_]) ts->lsp_[_LSP_DLM_][year][p] = (short)(ph.doy_late_min*dce+ce0); // days since 1st LSP year + if (lsp->use[_LSP_LTS_]) ts->lsp_[_LSP_LTS_][year][p] = (short)(ph.min_min_duration*dce); // days + if (lsp->use[_LSP_LGS_]) ts->lsp_[_LSP_LGS_][year][p] = (short)(ph.green_duration*dce); // days + if (lsp->use[_LSP_VEM_]) ts->lsp_[_LSP_VEM_][year][p] = (short)(ph.early_min_val); // index value + if (lsp->use[_LSP_VSS_]) ts->lsp_[_LSP_VSS_][year][p] = (short)(ph.start_green_val); // index value + if (lsp->use[_LSP_VRI_]) ts->lsp_[_LSP_VRI_][year][p] = (short)(ph.early_flex_val); // index value + if (lsp->use[_LSP_VPS_]) ts->lsp_[_LSP_VPS_][year][p] = (short)(ph.peak_val); // index value + if (lsp->use[_LSP_VFI_]) ts->lsp_[_LSP_VFI_][year][p] = (short)(ph.late_flex_val); // index value + if (lsp->use[_LSP_VES_]) ts->lsp_[_LSP_VES_][year][p] = (short)(ph.end_green_val); // index value + if (lsp->use[_LSP_VLM_]) ts->lsp_[_LSP_VLM_][year][p] = (short)(ph.late_min_val); // index value + if (lsp->use[_LSP_VBL_]) ts->lsp_[_LSP_VBL_][year][p] = (short)(ph.latent_val); // index value + if (lsp->use[_LSP_VSA_]) ts->lsp_[_LSP_VSA_][year][p] = (short)(ph.amplitude); // index value + if (lsp->use[_LSP_IST_]) ts->lsp_[_LSP_IST_][year][p] = (short)(ph.min_min_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IBL_]) ts->lsp_[_LSP_IBL_][year][p] = (short)(ph.latent_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IBT_]) ts->lsp_[_LSP_IBT_][year][p] = (short)(ph.total_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_IGS_]) ts->lsp_[_LSP_IGS_][year][p] = (short)(ph.green_integral*dce*0.001); // days * index value * 10 + if (lsp->use[_LSP_RAR_]) ts->lsp_[_LSP_RAR_][year][p] = (short)(ph.greenup_rate/dce); // index value / days + if (lsp->use[_LSP_RAF_]) ts->lsp_[_LSP_RAF_][year][p] = (short)(ph.senescence_rate/dce); // index value / days + if (lsp->use[_LSP_RMR_]) ts->lsp_[_LSP_RMR_][year][p] = (short)(ph.early_flex_rate/dce); // index value / days + if (lsp->use[_LSP_RMF_]) ts->lsp_[_LSP_RMF_][year][p] = (short)(ph.late_flex_rate/dce); // index value / days } } diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index b07a1090..09004c5c 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -27,38 +27,62 @@ This file contains functions for polarmetrics #include "polar-hl.h" - -enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _CE_, _SEASON_, _PCX_, _PCY_, _COORD_LEN_ }; - -void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]); -void polar_vector(float x, float y, float yr, float doy_theta, float polar_array[_COORD_LEN_]); -void identify_seasons(float **polar, int ni, int istep, float doy_theta); -void accumulate_seasons(float **polar, int ni); +// should rather be a struct.. +//enum { _RAD_, _VAL_, _CUM_, _YEAR_, _DOY_, _CE_, _SEASON_, _PCX_, _PCY_, _COORD_LEN_ }; +typedef struct { + float rad; + float val; + float cum; + float pcx; + float pcy; + int doy; + int year; + int season; + int ce; +} polar_t; + +void print_polar(polar_t *polar); +void polar_coords(float r, float v, float yr, polar_t *polar); +void polar_vector(float x, float y, polar_t *polar); +void ce_from_polar_vector(float yr, polar_t *theta, polar_t *polar); +void identify_regular_seasons(polar_t *polar, int ni, int istep, polar_t *theta); +polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta, bool print); +void accumulate_seasons(polar_t *polar, int ni); int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol); +void print_polar(polar_t *polar){ + + + printf("polar coordinate:\n"); + printf(" season: %d, year: %d, ce: %d, doy: %d\n", + polar->season, polar->year, polar->ce, polar->doy); + printf(" rad: %.2f, val: %7.2f, , x: %7.2f, y: %7.2f, cum: %7.2f\n", + polar->rad, polar->val, polar->pcx, polar->pcy, polar->cum); + return; +} -void polar_coords(float r, float v, float yr, float polar_array[_COORD_LEN_]){ +void polar_coords(float r, float v, float yr, polar_t *polar){ float doy; doy = r*365.0/(2.0*M_PI); - polar_array[_RAD_] = r; - polar_array[_VAL_] = v; - polar_array[_YEAR_] = yr; - polar_array[_DOY_] = doy; - polar_array[_CE_] = doy2ce(doy, yr); - polar_array[_PCX_] = v*cos(r); - polar_array[_PCY_] = v*sin(r); + polar->rad = r; + polar->val = v; + polar->year = yr; + polar->doy = doy; + polar->ce = doy2ce(doy, yr); + polar->pcx = v*cos(r); + polar->pcy = v*sin(r); return; } -void polar_vector(float x, float y, float yr, float doy_theta, float polar_array[_COORD_LEN_]){ +void polar_vector(float x, float y, polar_t *polar){ float r, v, doy; @@ -68,75 +92,208 @@ float r, v, doy; doy = r*365.0/(2.0*M_PI); - polar_array[_RAD_] = r; - polar_array[_VAL_] = v; - polar_array[_YEAR_] = yr; - polar_array[_DOY_] = doy; + polar->rad = r; + polar->val = v; + polar->doy = doy; + + + polar->pcx = x; + polar->pcy = y; + + return; +} + + +void ce_from_polar_vector(float yr, polar_t *theta, polar_t *polar){ - if (doy > doy_theta){ - polar_array[_CE_] = doy2ce(doy, yr); + + polar->year = yr; + + if (polar->doy > theta->doy){ + polar->ce = doy2ce(polar->doy, yr); } else { - polar_array[_CE_] = doy2ce(doy, yr+1); + polar->ce = doy2ce(polar->doy, yr+1); } - polar_array[_PCX_] = x; - polar_array[_PCY_] = y; - return; } -void identify_seasons(float **polar, int ni, int istep, float doy_theta){ +void identify_regular_seasons(polar_t *polar, int ni, int istep, polar_t *theta){ int i, s = -1, y = 0; -float rstep = istep/365.0*2.0*M_PI; -float ce_theta; +//float rstep = istep/365.0*2.0*M_PI; for (i=0; ice = doy2ce(theta->doy, y); - if (polar[i][_CE_] >= ce_theta && - polar[i][_CE_]-ce_theta <= istep){ s++; y++;} + if (polar[i].ce >= theta->ce && + polar[i].ce-theta->ce <= istep){ s++; y++;} - polar[i][_SEASON_] = s; + polar[i].season = s; } return; } -void accumulate_seasons(float **polar, int ni){ + +polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta, bool print){ +int s, i, i0, ii, i1; +float mean_pct[2], n_pct; +polar_t *alpha0 = NULL; // mean vector in pre-structured phenological year +polar_t *theta0 = NULL; // diametric opposite of alpha0 = fine-tuned start of phenological year +float opposite; +int *diff_season = NULL; +int ce_shift, i_shift, d_shift, d_seas; + + + alloc((void**)&theta0, pol->ns, sizeof(polar_t)); + if (!pol->adaptive) return theta0; + + alloc((void**)&alpha0, pol->ns, sizeof(polar_t)); + + + // fine-tune the start of phenological year per season + for (s=0, i0=0; sns; s++){ + + memset(mean_pct, 0, sizeof(float)*2); + n_pct = 0; + + for (i=i0; i s){ i0 = i; break; } + + mean_pct[_X_] += polar[i].pcx; + mean_pct[_Y_] += polar[i].pcy; + n_pct++; + + } + + mean_pct[_X_] /= n_pct; + mean_pct[_Y_] /= n_pct; + polar_vector(mean_pct[_X_], mean_pct[_Y_], &alpha0[s]); + + if (alpha0[s].rad < M_PI){ + theta0[s].rad = alpha0[s].rad + M_PI; + } else { + theta0[s].rad = alpha0[s].rad - M_PI; + } + opposite = alpha0[s].rad - M_PI; // opposite with sign + + theta0[s].doy = (theta0[s].rad*365.0/(2.0*M_PI)); + + if (opposite < 0 && alpha0[s].rad - theta->rad >= 0){ + theta0[s].year = s-1; + theta0[s].ce = doy2ce(theta0[s].doy, s-1); + } else if (opposite > 0 && alpha0[s].rad - theta->rad < 0){ + theta0[s].year = s+1; + theta0[s].ce = doy2ce(theta0[s].doy, s+1); + } else { + theta0[s].year = s; + theta0[s].ce = doy2ce(theta0[s].doy, s); + } +if (print) printf("season %d. alpha: %f %d. updated theta: %f %d %d\n", s, alpha0[s].rad, alpha0[s].doy, theta0[s].rad, theta0[s].doy, theta0[s].ce); + } + + + alloc((void**)&diff_season, ni, sizeof(int)); + + s = polar[0].season; + + for (i=1; i= pol->ns) break; + + theta->ce = doy2ce(theta->doy, s); + + ce_shift = theta0[s].ce - theta->ce; + i_shift = floor(abs(ce_shift)/(float)istep); + d_shift = (ce_shift > 0) ? 1 : -1; + d_seas = (ce_shift > 0) ? -1 : 1; + + if (print) printf("season %d: shift is %d days, %d positions in %d direction. Adding %d to season\n", + s, ce_shift, i_shift, d_shift, d_seas); + + if (ce_shift > 0){ + + for (ii=0; ii= ni) break; + diff_season[i1] = -1; + } + + } else { + + for (ii=0; ii 0){ + // ii = i+i_shift*d_shift; + // if (ii < 0 || ii >= ni) break; + // diff_season[ii] = d_seas; + // i_shift--; + //} + + } + + } + + + // eventually update season + for (i=0; i=0; i--){ - if (polar[i][_SEASON_] != s){ - sum = polar[i][_CUM_]; - s = polar[i][_SEASON_]; + if (polar[i].season != s){ + sum = polar[i].cum; + s = polar[i].season; } - polar[i][_CUM_] /= sum; + polar[i].cum /= sum; } @@ -144,6 +301,7 @@ float sum; } + /** This function derives phenometrics from an interpolated time series +++ for each year. --- ts: pointer to instantly useable TSA image arrays @@ -167,20 +325,17 @@ bool valid; float ce_left, ce_right, ce; float v_left, v_right; -enum { _START_, _MID_, _END_, _EVENT_LEN_ }; -enum { _LONGTERM_, _THISYEAR_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_}; +enum { _LEFT_, _START_, _MID_, _SIJSIJNSI_, _END_, _RIGHT_, _EVENT_LEN_ }; +enum { _ALPHA_, _THETA_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_ }; -float theta, doy_theta, ce_theta; -float theta_now, doy_theta_now, ce_theta_now; - -float timing[_EVENT_LEN_][_COORD_LEN_]; -float vector[_WINDOW_LEN_][_COORD_LEN_]; +polar_t timing[_EVENT_LEN_]; +polar_t vector[_WINDOW_LEN_]; float mean_window[_WINDOW_LEN_][2]; int n_window[_WINDOW_LEN_]; double recurrence[2]; -float **polar = NULL; - +polar_t *polar = NULL; +polar_t *theta0 = NULL; valid = false; @@ -197,7 +352,7 @@ float **polar = NULL; { // allocate - alloc_2D((void***)&polar, ni, _COORD_LEN_, sizeof(float)); + alloc((void**)&polar, ni, sizeof(polar_t)); //#pragma omp for @@ -215,7 +370,7 @@ float **polar = NULL; valid = true; - memset(mean_window[_LONGTERM_], 0, 2*sizeof(float)); + memset(mean_window[_ALPHA_], 0, 2*sizeof(float)); /** copy doy/v to working variables @@ -264,129 +419,131 @@ float **polar = NULL; r = ts->d_tsi[i].doy/365.0*2.0*M_PI; -if (p == 375639) printf("doy: %d\n", ts->d_tsi[i].doy); -if (p == 375639) printf("r: %f\n", r); -if (p == 375639) printf("v: %f\n", v); -if (v < 0) v = 0; - polar_coords(r, v, ts->d_tsi[i].year-year_min, polar[i]); -if (p == 375639) printf("x: %f\n", polar[i][_PCX_]); -if (p == 375639) printf("y: %f\n", polar[i][_PCY_]); + if (p == 404173) printf("doy: %d\n", ts->d_tsi[i].doy); + if (p == 404173) printf("r: %f\n", r); + if (p == 404173) printf("v: %f\n", v); + if (v < 0) v = 0; + polar_coords(r, v, ts->d_tsi[i].year-year_min, &polar[i]); + if (p == 404173) printf("x: %f\n", polar[i].pcx); + if (p == 404173) printf("y: %f\n", polar[i].pcy); + + if (pol->opct) ts->pcx_[i][p] = (short)polar[i].pcx; + if (pol->opct) ts->pcy_[i][p] = (short)polar[i].pcy; - mean_window[_LONGTERM_][_X_] += polar[i][_PCX_]; - mean_window[_LONGTERM_][_Y_] += polar[i][_PCY_]; + mean_window[_ALPHA_][_X_] += polar[i].pcx; + mean_window[_ALPHA_][_Y_] += polar[i].pcy; } if (!valid) continue; -if (p == 375639) printf("valid pixel.\n"); + if (p == 404173) printf("valid pixel.\n"); // mean of polar coordinates - mean_window[_LONGTERM_][_X_] /= ni; - mean_window[_LONGTERM_][_Y_] /= ni; -if (p == 375639) printf("mean pol x/y: %f %f\n", mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_]); + mean_window[_ALPHA_][_X_] /= ni; + mean_window[_ALPHA_][_Y_] /= ni; + if (p == 404173) printf("mean pol x/y: %f %f\n", mean_window[_ALPHA_][_X_], mean_window[_ALPHA_][_Y_]); // multi-annual average vector - polar_vector(mean_window[_LONGTERM_][_X_], mean_window[_LONGTERM_][_Y_], 0, 0, vector[_LONGTERM_]); + polar_vector(mean_window[_ALPHA_][_X_], mean_window[_ALPHA_][_Y_], &vector[_ALPHA_]); // diametric opposite of average vector = start of phenological year - if (vector[_LONGTERM_][_RAD_] < M_PI){ - theta = vector[_LONGTERM_][_RAD_] + M_PI; + if (vector[_ALPHA_].rad < M_PI){ + vector[_THETA_].rad = vector[_ALPHA_].rad + M_PI; } else { - theta = vector[_LONGTERM_][_RAD_] - M_PI; + vector[_THETA_].rad = vector[_ALPHA_].rad - M_PI; } - doy_theta = (theta*365.0/(2.0*M_PI)); - -if (p == 375639) printf("avg: %f %f %f\n", vector[_LONGTERM_][_RAD_], vector[_LONGTERM_][_DOY_], vector[_LONGTERM_][_VAL_]); -if (p == 375639) printf("theta: %f %f\n", theta, doy_theta); + vector[_THETA_].doy = (vector[_THETA_].rad*365.0/(2.0*M_PI)); + + if (p == 404173) printf("avg: %f %d %f\n", vector[_ALPHA_].rad, vector[_ALPHA_].doy, vector[_ALPHA_].val); + if (p == 404173) printf("theta: %f %d %d\n", vector[_THETA_].rad, vector[_THETA_].doy, vector[_THETA_].ce); + + identify_regular_seasons(polar, ni, tsi->step, &vector[_THETA_]); - identify_seasons(polar, ni, tsi->step, doy_theta); + if (p == 404173){ + theta0 = identify_variable_seasons(polar, ni, tsi->step, pol, &vector[_THETA_], true); + } else { + theta0 = identify_variable_seasons(polar, ni, tsi->step, pol, &vector[_THETA_], false); + } accumulate_seasons(polar, ni); + + if (p == 404173) for (i=0; ins; s++){ - memset(timing, 0, sizeof(float)*_EVENT_LEN_*_COORD_LEN_); + memset(&timing, 0, sizeof(polar_t)*_EVENT_LEN_); memset(mean_window, 0, sizeof(float)*_WINDOW_LEN_*2); memset(n_window, 0, sizeof(float)*_WINDOW_LEN_); memset(recurrence, 0, sizeof(double)*2); - if (doy_theta < 182) y = s; else y = s+1; - ce_theta = doy2ce(doy_theta, s); + if (vector[_THETA_].doy < 182) y = s; else y = s+1; + vector[_THETA_].ce = doy2ce(vector[_THETA_].doy, s); for (i=i0; i s){ i0 = i; break; } + if (polar[i].season < s) continue; + if (polar[i].season > s){ i0 = i; break; } + + // start of phenological year + if (polar[i].cum > 0 && timing[_LEFT_].cum == 0){ + memcpy(&timing[_LEFT_], &polar[i], sizeof(polar_t));} + + // end of phenological year + if (polar[i].cum == 1){ + if (i+1 < ni){ + memcpy(&timing[_RIGHT_], &polar[i+1], sizeof(polar_t)); + } else { + memcpy(&timing[_RIGHT_], &polar[i], sizeof(polar_t)); + } + } // start of growing season - if (polar[i][_CUM_] >= pol->start && timing[_START_][_CUM_] == 0){ - memcpy(timing[_START_], polar[i], sizeof(float)*_COORD_LEN_);} + if (polar[i].cum >= pol->start && timing[_START_].cum == 0){ + memcpy(&timing[_START_], &polar[i], sizeof(polar_t));} // mid of growing season - if (polar[i][_CUM_] >= pol->mid && timing[_MID_][_CUM_] == 0){ - memcpy(timing[_MID_], polar[i], sizeof(float)*_COORD_LEN_);} + if (polar[i].cum >= pol->mid && timing[_MID_].cum == 0){ + memcpy(&timing[_MID_], &polar[i], sizeof(polar_t));} // end of growing season - if (polar[i][_CUM_] >= pol->end && timing[_END_][_CUM_] == 0){ - memcpy(timing[_END_], polar[i], sizeof(float)*_COORD_LEN_);} + if (polar[i].cum >= pol->end && timing[_END_].cum == 0){ + memcpy(&timing[_END_], &polar[i], sizeof(polar_t));} // mean, sd of val + average vector of growing season - if (polar[i][_CUM_] >= pol->start && - polar[i][_CUM_] < pol->end){ - var_recurrence(polar[i][_VAL_], &recurrence[0], &recurrence[1], ++n_window[_GROW_]); - mean_window[_GROW_][_X_] += polar[i][_PCX_]; - mean_window[_GROW_][_Y_] += polar[i][_PCY_]; + if (polar[i].cum >= pol->start && + polar[i].cum < pol->end){ + var_recurrence(polar[i].val, &recurrence[0], &recurrence[1], ++n_window[_GROW_]); + mean_window[_GROW_][_X_] += polar[i].pcx; + mean_window[_GROW_][_Y_] += polar[i].pcy; } + // max of season + if (polar[i].val > timing[_SIJSIJNSI_].val){ + memcpy(&timing[_SIJSIJNSI_], &polar[i], sizeof(polar_t));} + // average vector of early growing season part - if (polar[i][_CUM_] >= pol->start && - polar[i][_CUM_] < pol->mid){ - mean_window[_EARLY_][_X_] += polar[i][_PCX_]; - mean_window[_EARLY_][_Y_] += polar[i][_PCY_]; + if (polar[i].cum >= pol->start && + polar[i].cum < pol->mid){ + mean_window[_EARLY_][_X_] += polar[i].pcx; + mean_window[_EARLY_][_Y_] += polar[i].pcy; n_window[_EARLY_]++; } // average vector of late growing season part - if (polar[i][_CUM_] >= pol->mid && - polar[i][_CUM_] < pol->end){ - mean_window[_LATE_][_X_] += polar[i][_PCX_]; - mean_window[_LATE_][_Y_] += polar[i][_PCY_]; + if (polar[i].cum >= pol->mid && + polar[i].cum < pol->end){ + mean_window[_LATE_][_X_] += polar[i].pcx; + mean_window[_LATE_][_Y_] += polar[i].pcy; n_window[_LATE_]++; } - mean_window[_THISYEAR_][_X_] += polar[i][_PCX_]; - mean_window[_THISYEAR_][_Y_] += polar[i][_PCY_]; - n_window[_THISYEAR_]++; - - } - - - - mean_window[_THISYEAR_][_X_] /= n_window[_THISYEAR_]; - mean_window[_THISYEAR_][_Y_] /= n_window[_THISYEAR_]; - polar_vector(mean_window[_THISYEAR_][_X_], mean_window[_THISYEAR_][_Y_], s, doy_theta, vector[_THISYEAR_]); - - if (vector[_THISYEAR_][_RAD_] < M_PI){ - theta_now = vector[_THISYEAR_][_RAD_] + M_PI; - } else { - theta_now = vector[_THISYEAR_][_RAD_] - M_PI; - } - doy_theta_now = (theta_now*365.0/(2.0*M_PI)); - - if (doy_theta_now > doy_theta){ - ce_theta_now = doy2ce(doy_theta_now, s); - } else { - ce_theta_now = doy2ce(doy_theta_now, s+1); } - mean_window[_GROW_][_X_] /= n_window[_GROW_]; mean_window[_GROW_][_Y_] /= n_window[_GROW_]; mean_window[_EARLY_][_X_] /= n_window[_EARLY_]; @@ -394,9 +551,13 @@ polar[i][_SEASON_], polar[i][_RAD_], polar[i][_VAL_], polar[i][_YEAR_], polar[i] mean_window[_LATE_][_X_] /= n_window[_LATE_]; mean_window[_LATE_][_Y_] /= n_window[_LATE_]; - polar_vector(mean_window[_GROW_][_X_], mean_window[_GROW_][_Y_], s, doy_theta, vector[_GROW_]); - polar_vector(mean_window[_EARLY_][_X_], mean_window[_EARLY_][_Y_], s, doy_theta, vector[_EARLY_]); - polar_vector(mean_window[_LATE_][_X_], mean_window[_LATE_][_Y_], s, doy_theta, vector[_LATE_]); + polar_vector(mean_window[_GROW_][_X_], mean_window[_GROW_][_Y_], &vector[_GROW_]); + polar_vector(mean_window[_EARLY_][_X_], mean_window[_EARLY_][_Y_],&vector[_EARLY_]); + polar_vector(mean_window[_LATE_][_X_], mean_window[_LATE_][_Y_], &vector[_LATE_]); + + ce_from_polar_vector(s, &vector[_THETA_], &vector[_GROW_]); + ce_from_polar_vector(s, &vector[_THETA_], &vector[_EARLY_]); + ce_from_polar_vector(s, &vector[_THETA_], &vector[_LATE_]); @@ -407,52 +568,52 @@ polar[i][_SEASON_], polar[i][_RAD_], polar[i][_VAL_], polar[i][_YEAR_], polar[i] -if (p == 375639) printf("season: %d, year %d\n", s, y); -if (p == 375639) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); - - - - /** copy POL if all OK **/ - //if (valid){ - //if (pol->odem) ts->pol_[_POL_DEM_][y][p] = (short)0; - if (pol->odss) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_][_CE_]; - if (pol->odms) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_][_CE_]; - if (pol->odes) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_][_CE_]; - if (pol->odev) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_][_CE_]; - if (pol->odav) ts->pol_[_POL_DAV_][y][p] = (short)vector[_GROW_][_CE_]; - if (pol->odlv) ts->pol_[_POL_DLV_][y][p] = (short)vector[_LATE_][_CE_]; - //if (pol->odlm) ts->pol_[_POL_DLM_][y][p] = (short)0; - if (pol->olgs) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_][_CE_] - timing[_START_][_CE_]); - if (pol->olbv) ts->pol_[_POL_LBV_][y][p] = (short)(vector[_LATE_][_CE_] - vector[_EARLY_][_CE_]); - //if (pol->ovem) ts->pol_[_POL_VEM_][y][p] = (short)0; - if (pol->ovss) ts->pol_[_POL_VSS_][y][p] = (short)timing[_START_][_VAL_]; - if (pol->ovms) ts->pol_[_POL_VMS_][y][p] = (short)timing[_MID_][_VAL_]; - if (pol->oves) ts->pol_[_POL_VES_][y][p] = (short)timing[_END_][_VAL_]; - if (pol->ovev) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_][_VAL_]; - if (pol->ovav) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_][_VAL_]; - if (pol->ovlv) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_][_VAL_]; - //if (pol->ovlm) ts->pol_[_POL_VLM_][y][p] = (short)0; - //if (pol->ovbl) ts->pol_[_POL_VBL_][y][p] = (short)0; - if (pol->ovga) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; - if (pol->ovgv) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); - //if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)(ce_theta_now - ce_theta); - if (pol->odpy) ts->pol_[_POL_DPY_][y][p] = (short)(ce_theta); - //if (pol->oist) ts->pol_[_POL_IST_][y][p] = (short)0; - //if (pol->oibl) ts->pol_[_POL_IBL_][y][p] = (short)0; - //if (pol->oibt) ts->pol_[_POL_IBT_][y][p] = (short)0; - //if (pol->oigs) ts->pol_[_POL_IGS_][y][p] = (short)0; - //if (pol->orar) ts->pol_[_POL_RAR_][y][p] = (short)0; - //if (pol->oraf) ts->pol_[_POL_RAF_][y][p] = (short)0; - //if (pol->ormr) ts->pol_[_POL_RMR_][y][p] = (short)0; - //if (pol->ormf) ts->pol_[_POL_RMF_][y][p] = (short)0; - //} - + if (p == 404173) printf("season: %d, year %d\n", s, y); + if (p == 404173) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); + + // date parameters + if (pol->use[_POL_DEM_]) ts->pol_[_POL_DEM_][y][p] = (short)timing[_LEFT_].ce; + if (pol->use[_POL_DSS_]) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_].ce; + if (pol->use[_POL_DMS_]) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_].ce; + if (pol->use[_POL_DPS_]) ts->pol_[_POL_DPS_][y][p] = (short)timing[_SIJSIJNSI_].ce; + if (pol->use[_POL_DES_]) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_].ce; + if (pol->use[_POL_DLM_]) ts->pol_[_POL_DLM_][y][p] = (short)timing[_RIGHT_].ce; + if (pol->use[_POL_DEV_]) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_].ce; + if (pol->use[_POL_DAV_]) ts->pol_[_POL_DAV_][y][p] = (short)vector[_GROW_].ce; + if (pol->use[_POL_DLV_]) ts->pol_[_POL_DLV_][y][p] = (short)vector[_LATE_].ce; + if (pol->use[_POL_DPY_]) ts->pol_[_POL_DPY_][y][p] = (short)(vector[_THETA_].ce); + if (pol->use[_POL_DPV_]) ts->pol_[_POL_DPV_][y][p] = (short)(theta0[s].ce - vector[_THETA_].ce); + + // length paramaters + if (pol->use[_POL_LGS_]) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_].ce - timing[_START_].ce); + if (pol->use[_POL_LGV_]) ts->pol_[_POL_LGV_][y][p] = (short)(vector[_LATE_].ce - vector[_EARLY_].ce); + if (pol->use[_POL_LTS_]) ts->pol_[_POL_LTS_][y][p] = (short)(timing[_RIGHT_].ce - timing[_LEFT_].ce); + + // value parameters + if (pol->use[_POL_VEM_]) ts->pol_[_POL_VEM_][y][p] = (short)timing[_LEFT_].val; + if (pol->use[_POL_VSS_]) ts->pol_[_POL_VSS_][y][p] = (short)timing[_START_].val; + if (pol->use[_POL_VMS_]) ts->pol_[_POL_VMS_][y][p] = (short)timing[_MID_].val; + if (pol->use[_POL_VPS_]) ts->pol_[_POL_VPS_][y][p] = (short)timing[_SIJSIJNSI_].val; + if (pol->use[_POL_VLM_]) ts->pol_[_POL_VLM_][y][p] = (short)timing[_RIGHT_].val; + if (pol->use[_POL_VES_]) ts->pol_[_POL_VES_][y][p] = (short)timing[_END_].val; + if (pol->use[_POL_VEV_]) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_].val; + if (pol->use[_POL_VAV_]) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_].val; + if (pol->use[_POL_VLV_]) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_].val; + if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_SIJSIJNSI_].val - + (timing[_START_].val+timing[_END_].val)/2.0); + if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_SIJSIJNSI_].val - timing[_MID_].val); + if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)((timing[_LEFT_].val+timing[_RIGHT_].val)/2.0); + if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; + if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); + + } + if (theta0 != NULL) free((void*)theta0); theta0 = NULL; } - free_2D((void**)polar, ni); + free((void*)polar); } diff --git a/src/higher-level/trend-hl.c b/src/higher-level/trend-hl.c index 4d95f58b..8e7ac613 100755 --- a/src/higher-level/trend-hl.c +++ b/src/higher-level/trend-hl.c @@ -425,7 +425,7 @@ double mae, rmse; +++ Return: SUCCESS/FAILURE +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ int tsa_trend(tsa_t *ts, small *mask_, int nc, short nodata, par_hl_t *phl){ -int l, nlsp = 26; +int l; bool in_ce = false; @@ -437,11 +437,18 @@ bool in_ce = false; if (phl->tsa.lsp.otrd){ - for (l=0; llsp_[l], ts->d_lsp, mask_, nc, phl->tsa.lsp.ny, ts->trp_[l], nodata, _FLD_YEAR_, in_ce, &phl->tsa.trd); } } + + if (phl->tsa.pol.otrd){ + for (l=0; l<_POL_LENGTH_; l++){ + if (l < 9) in_ce = true; else in_ce = false; + trend(ts->pol_[l], ts->d_pol, mask_, nc, phl->tsa.pol.ny, ts->tro_[l], nodata, _FLD_YEAR_, in_ce, &phl->tsa.trd); + } + } return SUCCESS; } @@ -456,7 +463,7 @@ bool in_ce = false; +++ Return: SUCCESS/FAILURE +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ int tsa_cat(tsa_t *ts, small *mask_, int nc, short nodata, par_hl_t *phl){ -int l, nlsp = 26; +int l; bool in_ce = false; @@ -471,11 +478,18 @@ bool in_ce = false; if (phl->tsa.lsp.ocat){ - for (l=0; llsp_[l], ts->d_lsp, mask_, nc, phl->tsa.lsp.ny, ts->cap_[l], nodata, _FLD_YEAR_, in_ce, &phl->tsa.trd); } } + + if (phl->tsa.pol.ocat){ + for (l=0; l<_POL_LENGTH_; l++){ + if (l < 9) in_ce = true; else in_ce = false; + cat(ts->pol_[l], ts->d_pol, mask_, nc, phl->tsa.pol.ny, ts->cao_[l], nodata, _FLD_YEAR_, in_ce, &phl->tsa.trd); + } + } return SUCCESS; } diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index db300409..3532a0f7 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -31,6 +31,319 @@ This file contains functions for Level 3 processing stack_t *compile_tsa_stack(stack_t *ard, int nb, int idx, int write, char *prodname, par_hl_t *phl); stack_t **compile_tsa(ard_t *ard, tsa_t *tsa, par_hl_t *phl, cube_t *cube, int nt, int ni, int idx, int *nproduct); +typedef struct { + int prodlen; + char prodname[NPOW_03]; + int prodtype; + int enable; + int write; + short ***ptr; +} stack_compile_info_t; + +enum { _full_, _stats_, _inter_, _year_, _quarter_, _month_, _week_, _day_, _lsp_, _pol_, _trd_, _cat_ }; + + +int info_tss(stack_compile_info_t *info, int o, int nt, tsa_t *ts, par_hl_t *phl){ + + info[o].prodlen = nt; + strncpy(info[o].prodname, "TSS", 3); info[o].prodname[3] = '\0'; + info[o].prodtype = _full_; + info[o].enable = true; + info[o].write = phl->tsa.otss; + info[o].ptr = &ts->tss_; + + return o+1; +} + +int info_tsi(stack_compile_info_t *info, int o, int ni, tsa_t *ts, par_hl_t *phl){ + + + info[o].prodlen = ni; + strncpy(info[o].prodname, "TSI", 3); info[o].prodname[3] = '\0'; + info[o].prodtype = _inter_; + info[o].enable = true; + info[o].write = phl->tsa.tsi.otsi; + info[o].ptr = &ts->tsi_; + + return o+1; +} + +int info_stm(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ + + + info[o].prodlen = phl->tsa.stm.sta.nmetrics; + strncpy(info[o].prodname, "STM", 3); info[o].prodname[3] = '\0'; + info[o].prodtype = _stats_; + info[o].enable = phl->tsa.stm.ostm; + info[o].write = phl->tsa.stm.ostm; + info[o].ptr = &ts->stm_; + + return o+1; +} + +int info_rms(stack_compile_info_t *info, int o, int nt, tsa_t *ts, par_hl_t *phl){ + + + info[o].prodlen = nt; + strncpy(info[o].prodname, "RMS", 3); info[o].prodname[3] = '\0'; + info[o].prodtype = _full_; + info[o].enable = phl->tsa.sma.orms; + info[o].write = phl->tsa.sma.orms; + info[o].ptr = &ts->rms_; + + return o+1; +} + +int info_spl(stack_compile_info_t *info, int o, int ni, tsa_t *ts, par_hl_t *phl){ + + + info[o].prodlen = ni; + strncpy(info[o].prodname, "SPL", 3); info[o].prodname[3] = '\0'; + info[o].prodtype = _inter_; + info[o].enable = phl->tsa.lsp.ospl; + info[o].write = phl->tsa.lsp.ospl; + info[o].ptr = &ts->spl_; + + return o+1; +} + +int info_fby(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int p = o; + + + info[p].prodlen = phl->ny; + strncpy(info[p].prodname, "FBY", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _year_; + info[p].enable = phl->tsa.fld.ofby+phl->tsa.fld.otry; + info[p].write = phl->tsa.fld.ofby; + info[p++].ptr = &ts->fby_; + + info[p].prodlen = _TRD_LENGTH_; + strncpy(info[p].prodname, "TRY", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.fld.otry; + info[p].write = phl->tsa.fld.otry; + info[p++].ptr = &ts->try_; + + info[p].prodlen = _CAT_LENGTH_; + strncpy(info[p].prodname, "CAY", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.fld.ocay; + info[p].write = phl->tsa.fld.ocay; + info[p++].ptr = &ts->cay_; + + return p; +} + +int info_fbq(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int p = o; + + + info[p].prodlen = phl->nq; + strncpy(info[p].prodname, "FBQ", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _quarter_; + info[p].enable = phl->tsa.fld.ofbq+phl->tsa.fld.otrq; + info[p].write = phl->tsa.fld.ofbq; + info[p++].ptr = &ts->fbq_; + + info[p].prodlen = _TRD_LENGTH_; + strncpy(info[p].prodname, "TRQ", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.fld.otrq; + info[p].write = phl->tsa.fld.otrq; + info[p++].ptr = &ts->trq_; + + info[p].prodlen = _CAT_LENGTH_; + strncpy(info[p].prodname, "CAQ", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.fld.ocaq; + info[p].write = phl->tsa.fld.ocaq; + info[p++].ptr = &ts->caq_; + + return p; +} + +int info_fbm(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int p = o; + + + info[p].prodlen = phl->nm; + strncpy(info[p].prodname, "FBM", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _month_; + info[p].enable = phl->tsa.fld.ofbm+phl->tsa.fld.otrm; + info[p].write = phl->tsa.fld.ofbm; + info[p++].ptr = &ts->fbm_; + + info[p].prodlen = _TRD_LENGTH_; + strncpy(info[p].prodname, "TRM", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.fld.otrm; + info[p].write = phl->tsa.fld.otrm; + info[p++].ptr = &ts->trm_; + + info[p].prodlen = _CAT_LENGTH_; + strncpy(info[p].prodname, "CAM", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.fld.ocam; + info[p].write = phl->tsa.fld.ocam; + info[p++].ptr = &ts->cam_; + + return p; +} + +int info_fbw(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int p = o; + + + info[p].prodlen = phl->nw; + strncpy(info[p].prodname, "FBW", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _week_; + info[p].enable = phl->tsa.fld.ofbw+phl->tsa.fld.otrw; + info[p].write = phl->tsa.fld.ofbw; + info[p++].ptr = &ts->fbw_; + + info[p].prodlen = _TRD_LENGTH_; + strncpy(info[p].prodname, "TRW", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.fld.otrw; + info[p].write = phl->tsa.fld.otrw; + info[p++].ptr = &ts->trw_; + + info[p].prodlen = _CAT_LENGTH_; + strncpy(info[p].prodname, "CAW", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.fld.ocaw; + info[p].write = phl->tsa.fld.ocaw; + info[p++].ptr = &ts->caw_; + + return p; +} + +int info_fbd(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int p = o; + + + info[p].prodlen = phl->nd; + strncpy(info[p].prodname, "FBD", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _day_; + info[p].enable = phl->tsa.fld.ofbd+phl->tsa.fld.otrd; + info[p].write = phl->tsa.fld.ofbd; + info[p++].ptr = &ts->fbd_; + + info[p].prodlen = _TRD_LENGTH_; + strncpy(info[p].prodname, "TRD", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.fld.otrd; + info[p].write = phl->tsa.fld.otrd; + info[p++].ptr = &ts->trd_; + + info[p].prodlen = _CAT_LENGTH_; + strncpy(info[p].prodname, "CAD", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.fld.ocad; + info[p].write = phl->tsa.fld.ocad; + info[p++].ptr = &ts->cad_; + + return p; +} + +int info_lsp(stack_compile_info_t *info, int o, tsa_t *ts, par_hl_t *phl){ +int l, p = o; +int nchar; + + + for (l=0; l<_LSP_LENGTH_; l++, p++){ + info[p].prodlen = phl->tsa.lsp.ny; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-LSP", _TAGGED_ENUM_LSP_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _lsp_; + info[p].enable = phl->tsa.lsp.use[l]*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat); + info[p].write = phl->tsa.lsp.use[l]*phl->tsa.lsp.olsp; + info[p].ptr = &ts->lsp_[l]; + } + + for (l=0; l<_LSP_LENGTH_; l++, p++){ + info[p].prodlen =_TRD_LENGTH_; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-TRP", _TAGGED_ENUM_LSP_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.lsp.use[l]*phl->tsa.lsp.otrd; + info[p].write = phl->tsa.lsp.use[l]*phl->tsa.lsp.otrd; + info[p].ptr = &ts->trp_[l]; + } + + for (l=0; l<_LSP_LENGTH_; l++, p++){ + info[p].prodlen = _CAT_LENGTH_; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-CAP", _TAGGED_ENUM_LSP_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.lsp.use[l]*phl->tsa.lsp.ocat; + info[p].write = phl->tsa.lsp.use[l]*phl->tsa.lsp.ocat; + info[p].ptr = &ts->cap_[l]; + } + + return p; +} + +int info_pol(stack_compile_info_t *info, int o, int ni, tsa_t *ts, par_hl_t *phl){ +int l, p = o; +int nchar; + + info[p].prodlen = ni; + strncpy(info[p].prodname, "PCX", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _inter_; + info[p].enable = phl->tsa.pol.opct; + info[p].write = phl->tsa.pol.opct; + info[p++].ptr = &ts->pcx_; + + info[p].prodlen = ni; + strncpy(info[p].prodname, "PCY", 3); info[p].prodname[3] = '\0'; + info[p].prodtype = _inter_; + info[p].enable = phl->tsa.pol.opct; + info[p].write = phl->tsa.pol.opct; + info[p++].ptr = &ts->pcy_; + + for (l=0; l<_POL_LENGTH_; l++, p++){ + info[p].prodlen = phl->tsa.pol.ny; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-POL", _TAGGED_ENUM_POL_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _pol_; + info[p].enable = phl->tsa.pol.use[l]*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat); + info[p].write = phl->tsa.pol.use[l]*phl->tsa.pol.opol; + info[p].ptr = &ts->pol_[l]; + } + + for (l=0; l<_POL_LENGTH_; l++, p++){ + info[p].prodlen =_TRD_LENGTH_; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-TRO", _TAGGED_ENUM_POL_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _trd_; + info[p].enable = phl->tsa.pol.use[l]*phl->tsa.pol.otrd; + info[p].write = phl->tsa.pol.use[l]*phl->tsa.pol.otrd; + info[p].ptr = &ts->tro_[l]; + } + + for (l=0; l<_POL_LENGTH_; l++, p++){ + info[p].prodlen = _CAT_LENGTH_; + nchar = snprintf(info[p].prodname, NPOW_03, "%s-CAO", _TAGGED_ENUM_POL_[l].tag); + if (nchar < 0 || nchar >= NPOW_10){ + printf("Buffer Overflow in assembling filename\n"); exit(1);} + info[p].prodtype = _cat_; + info[p].enable = phl->tsa.pol.use[l]*phl->tsa.pol.ocat; + info[p].write = phl->tsa.pol.use[l]*phl->tsa.pol.ocat; + info[p].ptr = &ts->cao_[l]; + } + + return p; +} + + + /** This function compiles the stacks, in which TSA results are stored. +++ It also sets metadata and sets pointers to instantly useable image @@ -53,178 +366,40 @@ char fdate[NPOW_10]; char sensor[NPOW_04]; char domain[NPOW_10]; int nchar; -int o, nprod = 149; +int o = 0, nprod; int error = 0; -enum { _full_, _stats_, _inter_, _year_, _quarter_, _month_, _week_, _day_, _lsp_, _pol_, _trd_, _cat_ }; -int prodlen[12] = { nt, phl->tsa.stm.sta.nmetrics, ni, - phl->ny, phl->nq, phl->nm, phl->nw, phl->nd, - phl->tsa.lsp.ny, phl->tsa.pol.ny, _TRD_LENGTH_, _CAT_LENGTH_ }; -char prodname[149][NPOW_03] = { - "TSS", "RMS", "STM", "TSI", "SPL", - "FBY", "FBQ", "FBM", "FBW", "FBD", - "DEM-LSP", "DSS-LSP", "DRI-LSP", "DPS-LSP", "DFI-LSP", "DES-LSP", - "DLM-LSP", "LTS-LSP", "LGS-LSP", "VEM-LSP", "VSS-LSP", "VRI-LSP", - "VPS-LSP", "VFI-LSP", "VES-LSP", "VLM-LSP", "VBL-LSP", "VSA-LSP", - "IST-LSP", "IBL-LSP", "IBT-LSP", "IGS-LSP", "RAR-LSP", "RAF-LSP", - "RMR-LSP", "RMF-LSP", - "DSS-POL", "DMS-POL", "DES-POL", "DEV-POL", "DAV-POL", "DLV-POL", - "LGS-POL", "LBV-POL", "VSS-POL", "VMS-POL", "VES-POL", "VEV-POL", - "VAV-POL", "VLV-POL", "VGA-POL", "VGV-POL", "DPY-POL", - "DEM-TRP", "DSS-TRP", "DRI-TRP", "DPS-TRP", "DFI-TRP", "DES-TRP", - "DLM-TRP", "LTS-TRP", "LGS-TRP", "VEM-TRP", "VSS-TRP", "VRI-TRP", - "VPS-TRP", "VFI-TRP", "VES-TRP", "VLM-TRP", "VBL-TRP", "VSA-TRP", - "IST-TRP", "IBL-TRP", "IBT-TRP", "IGS-TRP", "RAR-TRP", "RAF-TRP", - "RMR-TRP", "RMF-TRP", - "DSS-TRO", "DMS-TRO", "DES-TRO", "DEV-TRO", "DAV-TRO", "DLV-TRO", - "LGS-TRO", "LBV-TRO", "VSS-TRO", "VMS-TRO", "VES-TRO", "VEV-TRO", - "VAV-TRO", "VLV-TRO", "VGA-TRO", "VGV-TRO", "DPY-TRO", - "TRY", "TRQ", "TRM", "TRW", "TRD", - "DEM-CAP", "DSS-CAP", "DRI-CAP", "DPS-CAP", "DFI-CAP", "DES-CAP", - "DLM-CAP", "LTS-CAP", "LGS-CAP", "VEM-CAP", "VSS-CAP", "VRI-CAP", - "VPS-CAP", "VFI-CAP", "VES-CAP", "VLM-CAP", "VBL-CAP", "VSA-CAP", - "IST-CAP", "IBL-CAP", "IBT-CAP", "IGS-CAP", "RAR-CAP", "RAF-CAP", - "RMR-CAP", "RMF-CAP", - "DSS-CAO", "DMS-CAO", "DES-CAO", "DEV-CAO", "DAV-CAO", "DLV-CAO", - "LGS-CAO", "LBV-CAO", "VSS-CAO", "VMS-CAO", "VES-CAO", "VEV-CAO", - "VAV-CAO", "VLV-CAO", "VGA-CAO", "VGV-CAO", "DPY-CAO", - "CAY", "CAQ", "CAM", "CAW", "CAD" }; - -int prodtype[149] = { - _full_, _full_, _stats_, _inter_, _inter_, - _year_, _quarter_, _month_, _week_, _day_, - _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, - _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, - _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, - _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, _lsp_, - _lsp_, _lsp_, - _pol_, _pol_, _pol_, _pol_, _pol_, _pol_, - _pol_, _pol_, _pol_, _pol_, _pol_, _pol_, - _pol_, _pol_, _pol_, _pol_, _pol_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, - _trd_, _trd_, _trd_, _trd_, _trd_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_, - _cat_, _cat_, _cat_, _cat_, _cat_ }; - -int enable[149] = { - true, phl->tsa.sma.orms, phl->tsa.stm.ostm, true, phl->tsa.lsp.ospl, - phl->tsa.fld.ofby+phl->tsa.fld.otry+phl->tsa.fld.ocay, phl->tsa.fld.ofbq+phl->tsa.fld.otrq+phl->tsa.fld.ocaq, - phl->tsa.fld.ofbm+phl->tsa.fld.otrm+phl->tsa.fld.ocam, phl->tsa.fld.ofbw+phl->tsa.fld.otrw+phl->tsa.fld.ocaw, - phl->tsa.fld.ofbd+phl->tsa.fld.otrd+phl->tsa.fld.ocad, - phl->tsa.lsp.odem*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.odss*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.odri*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.odps*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.odfi*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.odes*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.odlm*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.olts*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.olgs*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ovem*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.ovss*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ovri*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.ovps*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ovfi*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.oves*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ovlm*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.ovbl*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ovsa*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.oist*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.oibl*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.oibt*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.oigs*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.orar*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.oraf*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.lsp.ormr*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), phl->tsa.lsp.ormf*(phl->tsa.lsp.olsp+phl->tsa.lsp.otrd+phl->tsa.lsp.ocat), - phl->tsa.pol.odss*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odms*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.odes*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odev*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.odav*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.odlv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.olgs*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.olbv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.ovss*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovms*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.oves*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovev*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.ovav*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovlv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.ovga*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), phl->tsa.pol.ovgv*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.pol.odpy*(phl->tsa.pol.opol+phl->tsa.pol.otrd+phl->tsa.pol.ocat), - phl->tsa.lsp.otrd*phl->tsa.lsp.odem, phl->tsa.lsp.otrd*phl->tsa.lsp.odss, phl->tsa.lsp.otrd*phl->tsa.lsp.odri, phl->tsa.lsp.otrd*phl->tsa.lsp.odps, phl->tsa.lsp.otrd*phl->tsa.lsp.odfi, phl->tsa.lsp.otrd*phl->tsa.lsp.odes, - phl->tsa.lsp.otrd*phl->tsa.lsp.odlm, phl->tsa.lsp.otrd*phl->tsa.lsp.olts, phl->tsa.lsp.otrd*phl->tsa.lsp.olgs, phl->tsa.lsp.otrd*phl->tsa.lsp.ovem, phl->tsa.lsp.otrd*phl->tsa.lsp.ovss, phl->tsa.lsp.otrd*phl->tsa.lsp.ovri, - phl->tsa.lsp.otrd*phl->tsa.lsp.ovps, phl->tsa.lsp.otrd*phl->tsa.lsp.ovfi, phl->tsa.lsp.otrd*phl->tsa.lsp.oves, phl->tsa.lsp.otrd*phl->tsa.lsp.ovlm, phl->tsa.lsp.otrd*phl->tsa.lsp.ovbl, phl->tsa.lsp.otrd*phl->tsa.lsp.ovsa, - phl->tsa.lsp.otrd*phl->tsa.lsp.oist, phl->tsa.lsp.otrd*phl->tsa.lsp.oibl, phl->tsa.lsp.otrd*phl->tsa.lsp.oibt, phl->tsa.lsp.otrd*phl->tsa.lsp.oigs, phl->tsa.lsp.otrd*phl->tsa.lsp.orar, phl->tsa.lsp.otrd*phl->tsa.lsp.oraf, - phl->tsa.lsp.otrd*phl->tsa.lsp.ormr, phl->tsa.lsp.otrd*phl->tsa.lsp.ormf, - phl->tsa.pol.otrd*phl->tsa.pol.odss, phl->tsa.pol.otrd*phl->tsa.pol.odms, phl->tsa.pol.otrd*phl->tsa.pol.odes, phl->tsa.pol.otrd*phl->tsa.pol.odev, phl->tsa.pol.otrd*phl->tsa.pol.odav, phl->tsa.pol.otrd*phl->tsa.pol.odlv, - phl->tsa.pol.otrd*phl->tsa.pol.olgs, phl->tsa.pol.otrd*phl->tsa.pol.olbv, phl->tsa.pol.otrd*phl->tsa.pol.ovss, phl->tsa.pol.otrd*phl->tsa.pol.ovms, phl->tsa.pol.otrd*phl->tsa.pol.oves, phl->tsa.pol.otrd*phl->tsa.pol.ovev, - phl->tsa.pol.otrd*phl->tsa.pol.ovav, phl->tsa.pol.otrd*phl->tsa.pol.ovlv, phl->tsa.pol.otrd*phl->tsa.pol.ovga, phl->tsa.pol.otrd*phl->tsa.pol.ovgv, phl->tsa.pol.otrd*phl->tsa.pol.odpy, - phl->tsa.fld.otry, phl->tsa.fld.otrq, phl->tsa.fld.otrm, phl->tsa.fld.otrw, phl->tsa.fld.otrd, - phl->tsa.lsp.ocat*phl->tsa.lsp.odem, phl->tsa.lsp.ocat*phl->tsa.lsp.odss, phl->tsa.lsp.ocat*phl->tsa.lsp.odri, phl->tsa.lsp.ocat*phl->tsa.lsp.odps, phl->tsa.lsp.ocat*phl->tsa.lsp.odfi, phl->tsa.lsp.ocat*phl->tsa.lsp.odes, - phl->tsa.lsp.ocat*phl->tsa.lsp.odlm, phl->tsa.lsp.ocat*phl->tsa.lsp.olts, phl->tsa.lsp.ocat*phl->tsa.lsp.olgs, phl->tsa.lsp.ocat*phl->tsa.lsp.ovem, phl->tsa.lsp.ocat*phl->tsa.lsp.ovss, phl->tsa.lsp.ocat*phl->tsa.lsp.ovri, - phl->tsa.lsp.ocat*phl->tsa.lsp.ovps, phl->tsa.lsp.ocat*phl->tsa.lsp.ovfi, phl->tsa.lsp.ocat*phl->tsa.lsp.oves, phl->tsa.lsp.ocat*phl->tsa.lsp.ovlm, phl->tsa.lsp.ocat*phl->tsa.lsp.ovbl, phl->tsa.lsp.ocat*phl->tsa.lsp.ovsa, - phl->tsa.lsp.ocat*phl->tsa.lsp.oist, phl->tsa.lsp.ocat*phl->tsa.lsp.oibl, phl->tsa.lsp.ocat*phl->tsa.lsp.oibt, phl->tsa.lsp.ocat*phl->tsa.lsp.oigs, phl->tsa.lsp.ocat*phl->tsa.lsp.orar, phl->tsa.lsp.ocat*phl->tsa.lsp.oraf, - phl->tsa.lsp.ocat*phl->tsa.lsp.ormr, phl->tsa.lsp.ocat*phl->tsa.lsp.ormf, - phl->tsa.pol.ocat*phl->tsa.pol.odss, phl->tsa.pol.ocat*phl->tsa.pol.odms, phl->tsa.pol.ocat*phl->tsa.pol.odes, phl->tsa.pol.ocat*phl->tsa.pol.odev, phl->tsa.pol.ocat*phl->tsa.pol.odav, phl->tsa.pol.ocat*phl->tsa.pol.odlv, - phl->tsa.pol.ocat*phl->tsa.pol.olgs, phl->tsa.pol.ocat*phl->tsa.pol.olbv, phl->tsa.pol.ocat*phl->tsa.pol.ovss, phl->tsa.pol.ocat*phl->tsa.pol.ovms, phl->tsa.pol.ocat*phl->tsa.pol.oves, phl->tsa.pol.ocat*phl->tsa.pol.ovev, - phl->tsa.pol.ocat*phl->tsa.pol.ovav, phl->tsa.pol.ocat*phl->tsa.pol.ovlv, phl->tsa.pol.ocat*phl->tsa.pol.ovga, phl->tsa.pol.ocat*phl->tsa.pol.ovgv, phl->tsa.pol.ocat*phl->tsa.pol.odpy, - phl->tsa.fld.ocay, phl->tsa.fld.ocaq, phl->tsa.fld.ocam, phl->tsa.fld.ocaw, phl->tsa.fld.ocad }; - -int write[149] = { - phl->tsa.otss, phl->tsa.sma.orms, phl->tsa.stm.ostm, phl->tsa.tsi.otsi, phl->tsa.lsp.ospl, - phl->tsa.fld.ofby, phl->tsa.fld.ofbq, phl->tsa.fld.ofbm, phl->tsa.fld.ofbw, phl->tsa.fld.ofbd, - phl->tsa.lsp.olsp*phl->tsa.lsp.odem, phl->tsa.lsp.olsp*phl->tsa.lsp.odss, phl->tsa.lsp.olsp*phl->tsa.lsp.odri, phl->tsa.lsp.olsp*phl->tsa.lsp.odps, phl->tsa.lsp.olsp*phl->tsa.lsp.odfi, phl->tsa.lsp.olsp*phl->tsa.lsp.odes, - phl->tsa.lsp.olsp*phl->tsa.lsp.odlm, phl->tsa.lsp.olsp*phl->tsa.lsp.olts, phl->tsa.lsp.olsp*phl->tsa.lsp.olgs, phl->tsa.lsp.olsp*phl->tsa.lsp.ovem, phl->tsa.lsp.olsp*phl->tsa.lsp.ovss, phl->tsa.lsp.olsp*phl->tsa.lsp.ovri, - phl->tsa.lsp.olsp*phl->tsa.lsp.ovps, phl->tsa.lsp.olsp*phl->tsa.lsp.ovfi, phl->tsa.lsp.olsp*phl->tsa.lsp.oves, phl->tsa.lsp.olsp*phl->tsa.lsp.ovlm, phl->tsa.lsp.olsp*phl->tsa.lsp.ovbl, phl->tsa.lsp.olsp*phl->tsa.lsp.ovsa, - phl->tsa.lsp.olsp*phl->tsa.lsp.oist, phl->tsa.lsp.olsp*phl->tsa.lsp.oibl, phl->tsa.lsp.olsp*phl->tsa.lsp.oibt, phl->tsa.lsp.olsp*phl->tsa.lsp.oigs, phl->tsa.lsp.olsp*phl->tsa.lsp.orar, phl->tsa.lsp.olsp*phl->tsa.lsp.oraf, - phl->tsa.lsp.olsp*phl->tsa.lsp.ormr, phl->tsa.lsp.olsp*phl->tsa.lsp.ormf, - phl->tsa.pol.opol*phl->tsa.pol.odss, phl->tsa.pol.opol*phl->tsa.pol.odms, phl->tsa.pol.opol*phl->tsa.pol.odes, phl->tsa.pol.opol*phl->tsa.pol.odev, phl->tsa.pol.opol*phl->tsa.pol.odav, phl->tsa.pol.opol*phl->tsa.pol.odlv, - phl->tsa.pol.opol*phl->tsa.pol.olgs, phl->tsa.pol.opol*phl->tsa.pol.olbv, phl->tsa.pol.opol*phl->tsa.pol.ovss, phl->tsa.pol.opol*phl->tsa.pol.ovms, phl->tsa.pol.opol*phl->tsa.pol.oves, phl->tsa.pol.opol*phl->tsa.pol.ovev, - phl->tsa.pol.opol*phl->tsa.pol.ovav, phl->tsa.pol.opol*phl->tsa.pol.ovlv, phl->tsa.pol.opol*phl->tsa.pol.ovga, phl->tsa.pol.opol*phl->tsa.pol.ovgv, phl->tsa.pol.opol*phl->tsa.pol.odpy, - phl->tsa.lsp.otrd*phl->tsa.lsp.odem, phl->tsa.lsp.otrd*phl->tsa.lsp.odss, phl->tsa.lsp.otrd*phl->tsa.lsp.odri, phl->tsa.lsp.otrd*phl->tsa.lsp.odps, phl->tsa.lsp.otrd*phl->tsa.lsp.odfi, phl->tsa.lsp.otrd*phl->tsa.lsp.odes, - phl->tsa.lsp.otrd*phl->tsa.lsp.odlm, phl->tsa.lsp.otrd*phl->tsa.lsp.olts, phl->tsa.lsp.otrd*phl->tsa.lsp.olgs, phl->tsa.lsp.otrd*phl->tsa.lsp.ovem, phl->tsa.lsp.otrd*phl->tsa.lsp.ovss, phl->tsa.lsp.otrd*phl->tsa.lsp.ovri, - phl->tsa.lsp.otrd*phl->tsa.lsp.ovps, phl->tsa.lsp.otrd*phl->tsa.lsp.ovfi, phl->tsa.lsp.otrd*phl->tsa.lsp.oves, phl->tsa.lsp.otrd*phl->tsa.lsp.ovlm, phl->tsa.lsp.otrd*phl->tsa.lsp.ovbl, phl->tsa.lsp.otrd*phl->tsa.lsp.ovsa, - phl->tsa.lsp.otrd*phl->tsa.lsp.oist, phl->tsa.lsp.otrd*phl->tsa.lsp.oibl, phl->tsa.lsp.otrd*phl->tsa.lsp.oibt, phl->tsa.lsp.otrd*phl->tsa.lsp.oigs, phl->tsa.lsp.otrd*phl->tsa.lsp.orar, phl->tsa.lsp.otrd*phl->tsa.lsp.oraf, - phl->tsa.lsp.otrd*phl->tsa.lsp.ormr, phl->tsa.lsp.otrd*phl->tsa.lsp.ormf, - phl->tsa.pol.otrd*phl->tsa.pol.odss, phl->tsa.pol.otrd*phl->tsa.pol.odms, phl->tsa.pol.otrd*phl->tsa.pol.odes, phl->tsa.pol.otrd*phl->tsa.pol.odev, phl->tsa.pol.otrd*phl->tsa.pol.odav, phl->tsa.pol.otrd*phl->tsa.pol.odlv, - phl->tsa.pol.otrd*phl->tsa.pol.olgs, phl->tsa.pol.otrd*phl->tsa.pol.olbv, phl->tsa.pol.otrd*phl->tsa.pol.ovss, phl->tsa.pol.otrd*phl->tsa.pol.ovms, phl->tsa.pol.otrd*phl->tsa.pol.oves, phl->tsa.pol.otrd*phl->tsa.pol.ovev, - phl->tsa.pol.otrd*phl->tsa.pol.ovav, phl->tsa.pol.otrd*phl->tsa.pol.ovlv, phl->tsa.pol.otrd*phl->tsa.pol.ovga, phl->tsa.pol.otrd*phl->tsa.pol.ovgv, phl->tsa.pol.otrd*phl->tsa.pol.odpy, - phl->tsa.fld.otry, phl->tsa.fld.otrq, phl->tsa.fld.otrm, phl->tsa.fld.otrw, phl->tsa.fld.otrd, - phl->tsa.lsp.ocat*phl->tsa.lsp.odem, phl->tsa.lsp.ocat*phl->tsa.lsp.odss, phl->tsa.lsp.ocat*phl->tsa.lsp.odri, phl->tsa.lsp.ocat*phl->tsa.lsp.odps, phl->tsa.lsp.ocat*phl->tsa.lsp.odfi, phl->tsa.lsp.ocat*phl->tsa.lsp.odes, - phl->tsa.lsp.ocat*phl->tsa.lsp.odlm, phl->tsa.lsp.ocat*phl->tsa.lsp.olts, phl->tsa.lsp.ocat*phl->tsa.lsp.olgs, phl->tsa.lsp.ocat*phl->tsa.lsp.ovem, phl->tsa.lsp.ocat*phl->tsa.lsp.ovss, phl->tsa.lsp.ocat*phl->tsa.lsp.ovri, - phl->tsa.lsp.ocat*phl->tsa.lsp.ovps, phl->tsa.lsp.ocat*phl->tsa.lsp.ovfi, phl->tsa.lsp.ocat*phl->tsa.lsp.oves, phl->tsa.lsp.ocat*phl->tsa.lsp.ovlm, phl->tsa.lsp.ocat*phl->tsa.lsp.ovbl, phl->tsa.lsp.ocat*phl->tsa.lsp.ovsa, - phl->tsa.lsp.ocat*phl->tsa.lsp.oist, phl->tsa.lsp.ocat*phl->tsa.lsp.oibl, phl->tsa.lsp.ocat*phl->tsa.lsp.oibt, phl->tsa.lsp.ocat*phl->tsa.lsp.oigs, phl->tsa.lsp.ocat*phl->tsa.lsp.orar, phl->tsa.lsp.ocat*phl->tsa.lsp.oraf, - phl->tsa.lsp.ocat*phl->tsa.lsp.ormr, phl->tsa.lsp.ocat*phl->tsa.lsp.ormf, - phl->tsa.pol.ocat*phl->tsa.pol.odss, phl->tsa.pol.ocat*phl->tsa.pol.odms, phl->tsa.pol.ocat*phl->tsa.pol.odes, phl->tsa.pol.ocat*phl->tsa.pol.odev, phl->tsa.pol.ocat*phl->tsa.pol.odav, phl->tsa.pol.ocat*phl->tsa.pol.odlv, - phl->tsa.pol.ocat*phl->tsa.pol.olgs, phl->tsa.pol.ocat*phl->tsa.pol.olbv, phl->tsa.pol.ocat*phl->tsa.pol.ovss, phl->tsa.pol.ocat*phl->tsa.pol.ovms, phl->tsa.pol.ocat*phl->tsa.pol.oves, phl->tsa.pol.ocat*phl->tsa.pol.ovev, - phl->tsa.pol.ocat*phl->tsa.pol.ovav, phl->tsa.pol.ocat*phl->tsa.pol.ovlv, phl->tsa.pol.ocat*phl->tsa.pol.ovga, phl->tsa.pol.ocat*phl->tsa.pol.ovgv, phl->tsa.pol.ocat*phl->tsa.pol.odpy, - phl->tsa.fld.ocay, phl->tsa.fld.ocaq, phl->tsa.fld.ocam, phl->tsa.fld.ocaw, phl->tsa.fld.ocad }; - -short ***ptr[149] = { - &ts->tss_, &ts->rms_, &ts->stm_, &ts->tsi_, &ts->spl_, - &ts->fby_, &ts->fbq_, &ts->fbm_, &ts->fbw_, &ts->fbd_, - &ts->lsp_[0], &ts->lsp_[1], &ts->lsp_[2], &ts->lsp_[3], &ts->lsp_[4], &ts->lsp_[5], - &ts->lsp_[6], &ts->lsp_[7], &ts->lsp_[8], &ts->lsp_[9], &ts->lsp_[10], &ts->lsp_[11], - &ts->lsp_[12], &ts->lsp_[13], &ts->lsp_[14], &ts->lsp_[15], &ts->lsp_[16], &ts->lsp_[17], - &ts->lsp_[18], &ts->lsp_[19], &ts->lsp_[20], &ts->lsp_[21], &ts->lsp_[22], &ts->lsp_[23], - &ts->lsp_[24], &ts->lsp_[25], - &ts->pol_[0], &ts->pol_[1], &ts->pol_[2], &ts->pol_[3], &ts->pol_[4], &ts->pol_[5], - &ts->pol_[6], &ts->pol_[7], &ts->pol_[8], &ts->pol_[9], &ts->pol_[10], &ts->pol_[11], - &ts->pol_[12], &ts->pol_[13], &ts->pol_[14], &ts->pol_[15], &ts->pol_[16], - &ts->trp_[0], &ts->trp_[1], &ts->trp_[2], &ts->trp_[3], &ts->trp_[4], &ts->trp_[5], - &ts->trp_[6], &ts->trp_[7], &ts->trp_[8], &ts->trp_[9], &ts->trp_[10], &ts->trp_[11], - &ts->trp_[12], &ts->trp_[13], &ts->trp_[14], &ts->trp_[15], &ts->trp_[16], &ts->trp_[17], - &ts->trp_[18], &ts->trp_[19], &ts->trp_[20], &ts->trp_[21], &ts->trp_[22], &ts->trp_[23], - &ts->trp_[24], &ts->trp_[25], - &ts->tro_[0], &ts->tro_[1], &ts->tro_[2], &ts->tro_[3], &ts->tro_[4], &ts->tro_[5], - &ts->tro_[6], &ts->tro_[7], &ts->tro_[8], &ts->tro_[9], &ts->tro_[10], &ts->tro_[11], - &ts->tro_[12], &ts->tro_[13], &ts->tro_[14], &ts->tro_[15], &ts->tro_[16], - &ts->try_, &ts->trq_, &ts->trm_, &ts->trw_, &ts->trd_, - &ts->cap_[0], &ts->cap_[1], &ts->cap_[2], &ts->cap_[3], &ts->cap_[4], &ts->cap_[5], - &ts->cap_[6], &ts->cap_[7], &ts->cap_[8], &ts->cap_[9], &ts->cap_[10], &ts->cap_[11], - &ts->cap_[12], &ts->cap_[13], &ts->cap_[14], &ts->cap_[15], &ts->cap_[16], &ts->cap_[17], - &ts->cap_[18], &ts->cap_[19], &ts->cap_[20], &ts->cap_[21], &ts->cap_[22], &ts->cap_[23], - &ts->cap_[24], &ts->cap_[25], - &ts->cao_[0], &ts->cao_[1], &ts->cao_[2], &ts->cao_[3], &ts->cao_[4], &ts->cao_[5], - &ts->cao_[6], &ts->cao_[7], &ts->cao_[8], &ts->cao_[9], &ts->cao_[10], &ts->cao_[11], - &ts->cao_[12], &ts->cao_[13], &ts->cao_[14], &ts->cao_[15], &ts->cao_[16], - &ts->cay_, &ts->caq_, &ts->cam_, &ts->caw_, &ts->cad_ }; +stack_compile_info_t *info = NULL; + + + nprod = 5 + // TSS, RMS, TSI, STM, SPL, + 5 + // folds + 5 + // trend on folds + 5 + // cat on folds + _LSP_LENGTH_ + // phenometrics + _LSP_LENGTH_ + // trend on phenometrics + _LSP_LENGTH_ + // cat on phenometrics + 2 + // polar-transformed coordinates + _POL_LENGTH_ + // polarmetrics + _POL_LENGTH_ + // trend on polarmetrics + _POL_LENGTH_; // cat on polarmetrics + + printf("%d potential products.\n", nprod); + + alloc((void**)&info, nprod, sizeof(stack_compile_info_t)); + + o = info_tss(info, o, nt, ts, phl); + o = info_rms(info, o, nt, ts, phl); + o = info_tsi(info, o, ni, ts, phl); + o = info_stm(info, o, ts, phl); + o = info_spl(info, o, ni, ts, phl); + o = info_fby(info, o, ts, phl); + o = info_fbq(info, o, ts, phl); + o = info_fbm(info, o, ts, phl); + o = info_fbw(info, o, ts, phl); + o = info_fbd(info, o, ts, phl); + o = info_lsp(info, o, ts, phl); + o = info_pol(info, o, ni, ts, phl); alloc((void**)&TSA, nprod, sizeof(stack_t*)); @@ -247,19 +422,20 @@ short ***ptr[149] = { for (o=0; otss_); +printf("%02d: ts ptr rms: %p\n", 0, ts->rms_); +printf("%02d: ts ptr tsi: %p\n", 0, ts->tsi_); +printf("%02d: ts ptr stm: %p\n", 0, ts->stm_); +printf("%02d: ts ptr spl: %p\n", 0, ts->spl_); +printf("%02d: ts ptr fby: %p\n", 0, ts->fby_); +printf("%02d: ts ptr fbq: %p\n", 0, ts->fbq_); +printf("%02d: ts ptr fbm: %p\n", 0, ts->fbm_); +printf("%02d: ts ptr fbw: %p\n", 0, ts->fbw_); +printf("%02d: ts ptr fbd: %p\n", 0, ts->fbd_); +printf("%02d: ts ptr try: %p\n", 0, ts->try_); +printf("%02d: ts ptr trq: %p\n", 0, ts->trq_); +printf("%02d: ts ptr trm: %p\n", 0, ts->trm_); +printf("%02d: ts ptr trw: %p\n", 0, ts->trw_); +printf("%02d: ts ptr trd: %p\n", 0, ts->trd_); +printf("%02d: ts ptr cay: %p\n", 0, ts->cay_); +printf("%02d: ts ptr caq: %p\n", 0, ts->caq_); +printf("%02d: ts ptr cam: %p\n", 0, ts->cam_); +printf("%02d: ts ptr caw: %p\n", 0, ts->caw_); +printf("%02d: ts ptr cad: %p\n", 0, ts->cad_); + +for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr lsp: %p\n", o, ts->lsp_[o]); +for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr trp: %p\n", o, ts->trp_[o]); +for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr cap: %p\n", o, ts->cap_[o]); +for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr pol: %p\n", o, ts->pol_[o]); +for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr tro: %p\n", o, ts->tro_[o]); +for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr cao: %p\n", o, ts->cao_[o]); + + if (error > 0){ printf("%d compiling TSA product errors.\n", error); for (o=0; o Date: Thu, 13 Aug 2020 15:35:07 +0200 Subject: [PATCH 18/78] parallel polar --- src/higher-level/polar-hl.c | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 09004c5c..dfc6b12b 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -316,7 +316,6 @@ float sum; +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol){ int l; -int year; int p; int i, i_, i0; int s, y; @@ -348,20 +347,20 @@ polar_t *theta0 = NULL; - //#pragma omp parallel private(l,i,i0,ni_,ce_left,ce_right,v_left,v_right,year,valid,ce,v,doy) firstprivate(southern) shared(mask_,ts,nc,ni,year_min,year_max,nodata,pol,nseg) default(none) + #pragma omp parallel private(l,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,recurrence,polar,theta0) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) { // allocate alloc((void**)&polar, ni, sizeof(polar_t)); - //#pragma omp for + #pragma omp for for (p=0; ppol_[l] != NULL){ - for (year=0; yearny; year++) ts->pol_[l][year][p] = nodata; + for (y=0; yny; y++) ts->pol_[l][y][p] = nodata; } } From a65b7b97d9517a0544446c88384a658bf939218d Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 13 Aug 2020 15:44:51 +0200 Subject: [PATCH 19/78] removed printfs etc from polar --- src/higher-level/polar-hl.c | 68 ++++++++----------------------------- 1 file changed, 15 insertions(+), 53 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index dfc6b12b..870f5c48 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -46,7 +46,7 @@ void polar_coords(float r, float v, float yr, polar_t *polar); void polar_vector(float x, float y, polar_t *polar); void ce_from_polar_vector(float yr, polar_t *theta, polar_t *polar); void identify_regular_seasons(polar_t *polar, int ni, int istep, polar_t *theta); -polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta, bool print); +polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta); void accumulate_seasons(polar_t *polar, int ni); int polar_ts(tsa_t *ts, small *mask_, int nc, int ni, short nodata, int year_min, int year_max, par_tsi_t *tsi, par_pol_t *pol); @@ -139,14 +139,14 @@ int i, s = -1, y = 0; } -polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta, bool print){ +polar_t *identify_variable_seasons(polar_t *polar, int ni, int istep, par_pol_t *pol, polar_t *theta){ int s, i, i0, ii, i1; float mean_pct[2], n_pct; polar_t *alpha0 = NULL; // mean vector in pre-structured phenological year polar_t *theta0 = NULL; // diametric opposite of alpha0 = fine-tuned start of phenological year float opposite; int *diff_season = NULL; -int ce_shift, i_shift, d_shift, d_seas; +int ce_shift, i_shift; alloc((void**)&theta0, pol->ns, sizeof(polar_t)); @@ -195,7 +195,7 @@ int ce_shift, i_shift, d_shift, d_seas; theta0[s].year = s; theta0[s].ce = doy2ce(theta0[s].doy, s); } -if (print) printf("season %d. alpha: %f %d. updated theta: %f %d %d\n", s, alpha0[s].rad, alpha0[s].doy, theta0[s].rad, theta0[s].doy, theta0[s].ce); + } @@ -215,11 +215,7 @@ if (print) printf("season %d. alpha: %f %d. updated theta: %f %d %d\n", s, alpha ce_shift = theta0[s].ce - theta->ce; i_shift = floor(abs(ce_shift)/(float)istep); - d_shift = (ce_shift > 0) ? 1 : -1; - d_seas = (ce_shift > 0) ? -1 : 1; - - if (print) printf("season %d: shift is %d days, %d positions in %d direction. Adding %d to season\n", - s, ce_shift, i_shift, d_shift, d_seas); + if (ce_shift > 0){ @@ -238,14 +234,6 @@ if (print) printf("season %d. alpha: %f %d. updated theta: %f %d %d\n", s, alpha } } - - - //while (i_shift > 0){ - // ii = i+i_shift*d_shift; - // if (ii < 0 || ii >= ni) break; - // diff_season[ii] = d_seas; - // i_shift--; - //} } @@ -324,7 +312,7 @@ bool valid; float ce_left, ce_right, ce; float v_left, v_right; -enum { _LEFT_, _START_, _MID_, _SIJSIJNSI_, _END_, _RIGHT_, _EVENT_LEN_ }; +enum { _LEFT_, _START_, _MID_, _PEAK_, _END_, _RIGHT_, _EVENT_LEN_ }; enum { _ALPHA_, _THETA_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_ }; polar_t timing[_EVENT_LEN_]; @@ -417,14 +405,10 @@ polar_t *theta0 = NULL; } r = ts->d_tsi[i].doy/365.0*2.0*M_PI; - - if (p == 404173) printf("doy: %d\n", ts->d_tsi[i].doy); - if (p == 404173) printf("r: %f\n", r); - if (p == 404173) printf("v: %f\n", v); if (v < 0) v = 0; + polar_coords(r, v, ts->d_tsi[i].year-year_min, &polar[i]); - if (p == 404173) printf("x: %f\n", polar[i].pcx); - if (p == 404173) printf("y: %f\n", polar[i].pcy); + if (pol->opct) ts->pcx_[i][p] = (short)polar[i].pcx; if (pol->opct) ts->pcy_[i][p] = (short)polar[i].pcy; @@ -436,12 +420,10 @@ polar_t *theta0 = NULL; if (!valid) continue; - if (p == 404173) printf("valid pixel.\n"); // mean of polar coordinates mean_window[_ALPHA_][_X_] /= ni; mean_window[_ALPHA_][_Y_] /= ni; - if (p == 404173) printf("mean pol x/y: %f %f\n", mean_window[_ALPHA_][_X_], mean_window[_ALPHA_][_Y_]); // multi-annual average vector polar_vector(mean_window[_ALPHA_][_X_], mean_window[_ALPHA_][_Y_], &vector[_ALPHA_]); @@ -454,22 +436,13 @@ polar_t *theta0 = NULL; } vector[_THETA_].doy = (vector[_THETA_].rad*365.0/(2.0*M_PI)); - if (p == 404173) printf("avg: %f %d %f\n", vector[_ALPHA_].rad, vector[_ALPHA_].doy, vector[_ALPHA_].val); - if (p == 404173) printf("theta: %f %d %d\n", vector[_THETA_].rad, vector[_THETA_].doy, vector[_THETA_].ce); - identify_regular_seasons(polar, ni, tsi->step, &vector[_THETA_]); - if (p == 404173){ - theta0 = identify_variable_seasons(polar, ni, tsi->step, pol, &vector[_THETA_], true); - } else { - theta0 = identify_variable_seasons(polar, ni, tsi->step, pol, &vector[_THETA_], false); - } + theta0 = identify_variable_seasons(polar, ni, tsi->step, pol, &vector[_THETA_]); accumulate_seasons(polar, ni); - - if (p == 404173) for (i=0; ins; s++){ @@ -521,8 +494,8 @@ polar_t *theta0 = NULL; } // max of season - if (polar[i].val > timing[_SIJSIJNSI_].val){ - memcpy(&timing[_SIJSIJNSI_], &polar[i], sizeof(polar_t));} + if (polar[i].val > timing[_PEAK_].val){ + memcpy(&timing[_PEAK_], &polar[i], sizeof(polar_t));} // average vector of early growing season part if (polar[i].cum >= pol->start && @@ -559,22 +532,11 @@ polar_t *theta0 = NULL; ce_from_polar_vector(s, &vector[_THETA_], &vector[_LATE_]); - - // sanity check? - //valid = false; - // if () valid = true; - //valid = true; - - - - if (p == 404173) printf("season: %d, year %d\n", s, y); - if (p == 404173) printf("mean, sd, and n: %f, %f, %d\n", recurrence[0], standdev(recurrence[1], n_window[_GROW_]), n_window[_GROW_]); - // date parameters if (pol->use[_POL_DEM_]) ts->pol_[_POL_DEM_][y][p] = (short)timing[_LEFT_].ce; if (pol->use[_POL_DSS_]) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_].ce; if (pol->use[_POL_DMS_]) ts->pol_[_POL_DMS_][y][p] = (short)timing[_MID_].ce; - if (pol->use[_POL_DPS_]) ts->pol_[_POL_DPS_][y][p] = (short)timing[_SIJSIJNSI_].ce; + if (pol->use[_POL_DPS_]) ts->pol_[_POL_DPS_][y][p] = (short)timing[_PEAK_].ce; if (pol->use[_POL_DES_]) ts->pol_[_POL_DES_][y][p] = (short)timing[_END_].ce; if (pol->use[_POL_DLM_]) ts->pol_[_POL_DLM_][y][p] = (short)timing[_RIGHT_].ce; if (pol->use[_POL_DEV_]) ts->pol_[_POL_DEV_][y][p] = (short)vector[_EARLY_].ce; @@ -592,15 +554,15 @@ polar_t *theta0 = NULL; if (pol->use[_POL_VEM_]) ts->pol_[_POL_VEM_][y][p] = (short)timing[_LEFT_].val; if (pol->use[_POL_VSS_]) ts->pol_[_POL_VSS_][y][p] = (short)timing[_START_].val; if (pol->use[_POL_VMS_]) ts->pol_[_POL_VMS_][y][p] = (short)timing[_MID_].val; - if (pol->use[_POL_VPS_]) ts->pol_[_POL_VPS_][y][p] = (short)timing[_SIJSIJNSI_].val; + if (pol->use[_POL_VPS_]) ts->pol_[_POL_VPS_][y][p] = (short)timing[_PEAK_].val; if (pol->use[_POL_VLM_]) ts->pol_[_POL_VLM_][y][p] = (short)timing[_RIGHT_].val; if (pol->use[_POL_VES_]) ts->pol_[_POL_VES_][y][p] = (short)timing[_END_].val; if (pol->use[_POL_VEV_]) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_].val; if (pol->use[_POL_VAV_]) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_].val; if (pol->use[_POL_VLV_]) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_].val; - if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_SIJSIJNSI_].val - + if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_PEAK_].val - (timing[_START_].val+timing[_END_].val)/2.0); - if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_SIJSIJNSI_].val - timing[_MID_].val); + if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_PEAK_].val - timing[_MID_].val); if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)((timing[_LEFT_].val+timing[_RIGHT_].val)/2.0); if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); From 0645b05d4a8b8e9304a465dbec3834f104ea3005 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 13 Aug 2020 15:47:54 +0200 Subject: [PATCH 20/78] removed printfs etc from tsa --- src/higher-level/tsa-hl.c | 66 +++++++++++++++++++++------------------ 1 file changed, 35 insertions(+), 31 deletions(-) diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index 3532a0f7..5404d49e 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -423,9 +423,12 @@ stack_compile_info_t *info = NULL; for (o=0; otss_); -printf("%02d: ts ptr rms: %p\n", 0, ts->rms_); -printf("%02d: ts ptr tsi: %p\n", 0, ts->tsi_); -printf("%02d: ts ptr stm: %p\n", 0, ts->stm_); -printf("%02d: ts ptr spl: %p\n", 0, ts->spl_); -printf("%02d: ts ptr fby: %p\n", 0, ts->fby_); -printf("%02d: ts ptr fbq: %p\n", 0, ts->fbq_); -printf("%02d: ts ptr fbm: %p\n", 0, ts->fbm_); -printf("%02d: ts ptr fbw: %p\n", 0, ts->fbw_); -printf("%02d: ts ptr fbd: %p\n", 0, ts->fbd_); -printf("%02d: ts ptr try: %p\n", 0, ts->try_); -printf("%02d: ts ptr trq: %p\n", 0, ts->trq_); -printf("%02d: ts ptr trm: %p\n", 0, ts->trm_); -printf("%02d: ts ptr trw: %p\n", 0, ts->trw_); -printf("%02d: ts ptr trd: %p\n", 0, ts->trd_); -printf("%02d: ts ptr cay: %p\n", 0, ts->cay_); -printf("%02d: ts ptr caq: %p\n", 0, ts->caq_); -printf("%02d: ts ptr cam: %p\n", 0, ts->cam_); -printf("%02d: ts ptr caw: %p\n", 0, ts->caw_); -printf("%02d: ts ptr cad: %p\n", 0, ts->cad_); - -for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr lsp: %p\n", o, ts->lsp_[o]); -for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr trp: %p\n", o, ts->trp_[o]); -for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr cap: %p\n", o, ts->cap_[o]); -for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr pol: %p\n", o, ts->pol_[o]); -for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr tro: %p\n", o, ts->tro_[o]); -for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr cao: %p\n", o, ts->cao_[o]); + //printf("%02d: ts ptr tss: %p\n", 0, ts->tss_); + //printf("%02d: ts ptr rms: %p\n", 0, ts->rms_); + //printf("%02d: ts ptr tsi: %p\n", 0, ts->tsi_); + //printf("%02d: ts ptr stm: %p\n", 0, ts->stm_); + //printf("%02d: ts ptr spl: %p\n", 0, ts->spl_); + //printf("%02d: ts ptr fby: %p\n", 0, ts->fby_); + //printf("%02d: ts ptr fbq: %p\n", 0, ts->fbq_); + //printf("%02d: ts ptr fbm: %p\n", 0, ts->fbm_); + //printf("%02d: ts ptr fbw: %p\n", 0, ts->fbw_); + //printf("%02d: ts ptr fbd: %p\n", 0, ts->fbd_); + //printf("%02d: ts ptr try: %p\n", 0, ts->try_); + //printf("%02d: ts ptr trq: %p\n", 0, ts->trq_); + //printf("%02d: ts ptr trm: %p\n", 0, ts->trm_); + //printf("%02d: ts ptr trw: %p\n", 0, ts->trw_); + //printf("%02d: ts ptr trd: %p\n", 0, ts->trd_); + //printf("%02d: ts ptr cay: %p\n", 0, ts->cay_); + //printf("%02d: ts ptr caq: %p\n", 0, ts->caq_); + //printf("%02d: ts ptr cam: %p\n", 0, ts->cam_); + //printf("%02d: ts ptr caw: %p\n", 0, ts->caw_); + //printf("%02d: ts ptr cad: %p\n", 0, ts->cad_); + //for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr lsp: %p\n", o, ts->lsp_[o]); + //for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr trp: %p\n", o, ts->trp_[o]); + //for (o=0; o<_LSP_LENGTH_; o++) printf("%02d: ts ptr cap: %p\n", o, ts->cap_[o]); + //for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr pol: %p\n", o, ts->pol_[o]); + //for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr tro: %p\n", o, ts->tro_[o]); + //for (o=0; o<_POL_LENGTH_; o++) printf("%02d: ts ptr cao: %p\n", o, ts->cao_[o]); if (error > 0){ From 7cfad8774d71179c63c8f0198ff3b3ba1e35ce20 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 13 Aug 2020 15:53:28 +0200 Subject: [PATCH 21/78] removed printfs etc from tsa --- src/higher-level/tsa-hl.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index 5404d49e..fe5a019e 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -384,7 +384,7 @@ stack_compile_info_t *info = NULL; _POL_LENGTH_ + // trend on polarmetrics _POL_LENGTH_; // cat on polarmetrics - printf("%d potential products.\n", nprod); + //printf("%d potential products.\n", nprod); alloc((void**)&info, nprod, sizeof(stack_compile_info_t)); From 23a17506e19cc50d3a9938d7283b0b9a81f337b7 Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Fri, 14 Aug 2020 15:38:09 +0200 Subject: [PATCH 22/78] google: ls and s2 now in -gcs.sh, esa: now in -esa.sh, force-level1.sh acts as master script --- bash/force-level1-landsat-g.sh | 300 ----------------------- bash/force-level1-sentinel2-g.sh | 283 --------------------- bash/force-level1-sentinel2.sh | 408 ------------------------------- 3 files changed, 991 deletions(-) delete mode 100755 bash/force-level1-landsat-g.sh delete mode 100755 bash/force-level1-sentinel2-g.sh delete mode 100755 bash/force-level1-sentinel2.sh diff --git a/bash/force-level1-landsat-g.sh b/bash/force-level1-landsat-g.sh deleted file mode 100755 index fc1f3ace..00000000 --- a/bash/force-level1-landsat-g.sh +++ /dev/null @@ -1,300 +0,0 @@ -#!/bin/bash - -# ====================================================================================== -# Name: LS_queryAndDownload_gsutil.sh -# Author: Stefan Ernst -# Date: 2020-06-20 -# Last change: 2020-08-10 -# Desc: Query and download the public Google Cloud Storage Sentinel-2 archive. -# Requirements: -# 1. Google Sentinel-2 metadata catalogue: -# https://console.cloud.google.com/storage/browser/gcp-public-data-landsat -# 2. shapefile containing the Landsat WRS-2 descending orbits: -# https://www.usgs.gov/media/files/landsat-wrs-2-descending-path-row-shapefile -# 3. gsutil - available through pip and conda -# Run the command 'gsutil config' after installation to set up authorization -# with your Google account. -# 4. gdal - specify the AOI as path/row if gdal is not available -# ====================================================================================== - - -trap "echo Exited!; exit;" SIGINT SIGTERM #make sure that CTRL-C stops the whole process - -show_help() { -cat << EOF - -Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi - aoitype sensor starttime endtime min-cc max-cc - - metadata-dir - directory where the Landsat metadata (csv file) is stored - - level-1-datapool - An existing directory, your files will be stored here - - queue - Downloaded files are appended to a file queue, which is needed for - the Level 2 processing. The file doesn't need to exist. If it exists, - new lines will be appended on successful ingestion - - area of interest - (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" - The polygon must be closed (first X/Y = last X/Y). X/Y must be given as - decimal degrees with negative values for West and South coordinates. - (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, - but using EPSG4326 is recommended - (3) Path/Row of the Landsat footprints of interest: "PPPRRR,PPPRRR,PPPRRR" - Make sure to keep leading zeros - correct: 181034, incorrect: 18134 - - type of area of interest - 1 - coordinates as text - 2 - shapefile - 3 - PathRow as text - - sensor - Landsat sensor identifier: - LT05 - Landsat 5 TM - LE07 - Landsat 7 ETM+ - LC08 - Landsat OLI - - starttime endtime - Dates must be given as YYYY-MM-DD - - min-cc max-cc - The cloud cover range must be specified in % - - -d dry will trigger a dry run that will only return the number of images - and their total data volume - - -u will update the metadata catalogue (download and extract from GCS) - only the metadata dir is required as argument when using this option - - -h|--help show this help - -EOF -} - - -update_meta() { - echo "Updating metadata catalogue..." - gsutil -m cp gs://gcp-public-data-landsat/index.csv.gz $METADIR - gunzip $METADIR/index.csv.gz - mv $METADIR/index.csv $METADIR/metadata_LS.csv -} - - -# ============================================================ -# check for options -DRYRUN=0 -while :; do - case $1 in - -d) DRYRUN=1 ;; - -h|-\?|--help) show_help - exit 0 ;; - -u) METADIR=$2 - if [ $# -lt 2 ]; then - echo "Metadata directory not specified, exiting" - exit 1 - elif [ $# -gt 2 ]; then - echo "Error: Please only specify the metadata directory when using the update option (-u)" - exit 1 - elif ! [ -w $METADIR ]; then - echo "Can not write to metadata directory, exiting" - exit 1 - fi - update_meta - echo "Done. You can run this script without option -d to download data now." - exit ;; - -?*) printf "%s\n" "" "Incorrect option specified" "" - show_help >&2 - exit 1 ;; - *) break #no more options - esac - shift -done - - -# ============================================================ -# if wrong number of input args and -u opt not set, stop -EXPECTED_ARGS=10 -if [ $# -ne $EXPECTED_ARGS ]; then - printf "%s\n" "" "Incorrect number of input arguments provided" - show_help - exit -fi - -METADIR=$1 -POOL=$2 -QUEUE=$3 -AOI=$4 -AOITYPE=$5 -SENSIN=$6 -DATEMIN=$7 -DATEMAX=$8 -CCMIN=$9 -CCMAX=${10} - -METACAT=$METADIR"/metadata_LS.csv" - - -# ============================================================ -# Check user input -for s in $(echo $SENSIN | sed 's/,/ /g') -do - case "$s" in - LT05|LE07|LC08) continue ;; - *) printf "%s\n" "" "$s is not a valid sensor type." "Valid Sensors: LT05, LE07, LC08" "" - exit ;; - esac -done - -if ! date -d $DATEMIN &> /dev/null; then - printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 - elif ! date -d $DATEMAX &> /dev/null; then - printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 -fi - - -# ============================================================ -# Check if metadata catalogue exists and is up to date -if ! [ -f $METACAT ]; then - echo "Metadata catalogue does not exist." - update_meta -fi - -METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) -if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then - printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." -fi - - -# ============================================================ -# Get path / rows of interest -if [ "$AOITYPE" -eq 2 ]; then - if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then - printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." - fi -fi -if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then - if ! [ -x "$(command -v ogr2ogr)" ]; then - printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 - exit 1 - fi -fi - - -if [ "$AOITYPE" -eq 1 ]; then - - WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename=landsat&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" - PRRAW=$(ogr2ogr -f CSV /vsistdout/ -select "PR" WFS:"$WFSURL") - PR="_"$(echo $PRRAW | sed 's/PR, //; s/ /_|_/g')"_" - -elif [ "$AOITYPE" -eq 2 ]; then - - printf "%s\n" "" "Searching for Landsat footprints intersecting with geometries of AOI shapefile..." - AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) - BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename=landsat&bbox="$BBOX - - ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update - ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update - - PRRAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT landsat.PR FROM landsat, $AOINE WHERE ST_Intersects(landsat.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) - PR="_"$(echo $PRRAW | sed 's/PR, //; s/ /_|_/g')"_" - rm merged.gpkg - -elif [ "$AOITYPE" -eq 3 ]; then - - PRRAW=$AOI - PR="_"$(echo $AOI | sed 's/,/_|_/g')"_" - -else - echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " - echo " 2 for shapefile (point/polygon/line) or " - echo " 3 for comma-separated PATHROW " - exit 1 -fi - -SENSOR=$(echo "$SENSIN" | sed 's/,/_|/g')"_" - - -# ============================================================ -# Filter metadata and extract download links -printf "%s\n" "" "Querying the metadata catalogue for" "Sensor(s): "$SENSIN "Path/Row: "$(echo $PR | sed 's/_//g; s/|/,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" - -LINKS=$(grep -E $PR $METACAT | grep -E $SENSOR | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $7 == "T1" && $12 >= clow && $12 <= chigh') - -printf "%s" "$LINKS" > LS_filtered_meta.txt -SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') -#NSCENES=$(( $(printf "%s" "$LINKS" | wc -l | cut -d" " -f 1) + 1 )) -NSCENES=$(sed -n '$=' LS_filtered_meta.txt) -#rm LS_filtered_meta.txt - -# ============================================================ -# Get total number and size of scenes matching criteria -UNIT="MB" -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="GB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="TB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="PB" -fi - -if [ -z $NSCENES ];then - printf "%s\n" "There were no Landsat Level 1 scenes found matching the search criteria" "" - exit 0 -else - printf "%s\n" "$NSCENES Landsat Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" -fi - -if [ $DRYRUN -eq 1 ]; then - exit 0 -fi - - -# ============================================================ -# Download scenes -echo "Starting to download "$NSCENES" Landsat Level 1 scenes" -ITER=1 -for LINK in $LINKS -do - SCENEID=$(echo $LINK | cut -d, -f 2) - PR=$(echo $SCENEID | cut -d_ -f3) - PRPATH=$POOL/$PR - URL=$(echo $LINK | cut -d, -f 18) - - # create target directory if it doesn't exist - if [ ! -w $PRPATH ]; then - mkdir $PRPATH - if [ ! -w $PRPATH ]; then - echo "$PRPATH: Creating directory failed." - exit 1 - fi - fi - ABSPRPATH=$(cd $POOL/$PR; pwd) - - # Check if scene already exists - SCENEPATH=$ABSPRPATH/$SCENEID - if [ -d $SCENEPATH ]; then - echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." - ((ITER++)) - continue - fi - - echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." - gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $ABSPRPATH - - echo "$SCENEPATH QUEUED" >> $QUEUE - - - ((ITER++)) -done diff --git a/bash/force-level1-sentinel2-g.sh b/bash/force-level1-sentinel2-g.sh deleted file mode 100755 index 7fc3b45c..00000000 --- a/bash/force-level1-sentinel2-g.sh +++ /dev/null @@ -1,283 +0,0 @@ -#!/bin/bash - -# ===================================================================================== -# Name: S2_queryAndDownload_gsutil.sh -# Author: Stefan Ernst -# Date: 2020-06-20 -# Last change: 2020-08-11 -# Desc: Query and download the public Google Cloud Storage Landsat archive. -# Only Collection 1 Tier one products are considered. -# Requirements: -# 1. Google Landsat metadata catalogue: -# https://console.cloud.google.com/storage/browser/gcp-public-data-landsat -# 2. shapefile containing the Landsat WRS-2 descending orbits: -# https://www.usgs.gov/media/files/landsat-wrs-2-descending-path-row-shapefile -# 3. gsutil - available through pip and conda -# Run the command 'gsutil config' after installation to set up authorization -# with your Google account. -# 4. gdal - specify the AOI as path/row if gdal is not available -# ===================================================================================== - - -trap "echo Exited!; exit;" SIGINT SIGTERM #make sure that CTRL-C stops the whole process - -show_help() { -cat << EOF - -Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi - aoitype sensor starttime endtime min-cc max-cc - - metadata-dir - directory where the Sentinel-2 metadata (csv file) is stored - - level-1-datapool - An existing directory, your files will be stored here - - queue - Downloaded files are appended to a file queue, which is needed for - the Level 2 processing. The file doesn't need to exist. If it exists, - new lines will be appended on successful ingestion - - area of interest - (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" - The polygon must be closed (first X/Y = last X/Y). X/Y must be given as - decimal degrees with negative values for West and South coordinates. - (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, - but using EPSG4326 is recommended - (3) Path/Row of the Landsat footprints of interest: "PPPRRR,PPPRRR,PPPRRR" - Make sure to keep leading zeros - correct: 181034, incorrect: 18134 - - type of area of interest - 1 - coordinates as text - 2 - shapefile - 3 - PathRow as text - - starttime endtime - Dates must be given as YYYY-MM-DD - - min-cc max-cc - The cloud cover range must be specified in % - - -d dry will trigger a dry run that will only return the number of images - and their total data volume - - -u will update the metadata catalogue (download and extract from GCS) - only the metadata dir is required as argument when using this option - - -h|--help show this help - -EOF -} - - -update_meta() { - echo "Updating metadata catalogue..." - gsutil -m cp gs://gcp-public-data-sentinel-2/index.csv.gz $METADIR - gunzip $METADIR/index.csv.gz - mv $METADIR/index.csv $METADIR/metadata_S2.csv -} - - -# ============================================================ -# check for options -DRYRUN=0 -while :; do - case $1 in - -d) DRYRUN=1 ;; - -h|-\?|--help) show_help - exit 0 ;; - -u) METADIR=$2 - if [ $# -lt 2 ]; then - echo "Metadata directory not specified, exiting" - exit 1 - elif [ $# -gt 2 ]; then - echo "Error: Please only specify the metadata directory when using the update option (-u)" - exit 1 - elif ! [ -w $METADIR ]; then - echo "Can not write to metadata directory, exiting" - exit 1 - fi - update_meta - echo "Done. You can run this script without option -d to download data now." - exit ;; - -?*) printf "%s\n" "" "Incorrect option specified" "" - show_help >&2 - exit 1 ;; - *) break #no more options - esac - shift -done - - -# ============================================================ -# if wrong number of input args and -u opt not set, stop -EXPECTED_ARGS=9 -if [ $# -ne $EXPECTED_ARGS ]; then - printf "%s\n" "" "Incorrect number of input arguments provided" - show_help - exit -fi - -METADIR=$1 -POOL=$2 -QUEUE=$3 -AOI=$4 -AOITYPE=$5 -DATEMIN=$6 -DATEMAX=$7 -CCMIN=$8 -CCMAX=$9 - -METACAT=$METADIR"/metadata_S2.csv" - - -# ============================================================ -# Check user input -if ! date -d $DATEMIN &> /dev/null; then - printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 - elif ! date -d $DATEMAX &> /dev/null; then - printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 -fi - - -# ============================================================ -# Check if metadata catalogue exists and is up to date -if ! [ -f $METACAT ]; then - echo "Metadata catalogue does not exist." - update_meta -fi - -METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) -if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then - printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." -fi - - -# ============================================================ -# Get S2 MGRS tiles of interest -if [ "$AOITYPE" -eq 2 ]; then - if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then - printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." - fi -fi -if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then - if ! [ -x "$(command -v ogr2ogr)" ]; then - printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 - exit 1 - fi -fi - - -if [ "$AOITYPE" -eq 1 ]; then - - WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename=sentinel2&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") - TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" - -elif [ "$AOITYPE" -eq 2 ]; then - - printf "%s\n" "" "Searching for S2 tiles intersecting with geometries of AOI shapefile..." - AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) - BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename=sentinel2&bbox="$BBOX - - ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update - ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update - - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT sentinel2.Name FROM sentinel2, $AOINE WHERE ST_Intersects(sentinel2.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) - TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" - rm merged.gpkg - -elif [ "$AOITYPE" -eq 3 ]; then - - TILERAW=$AOI - TILES="_T"$(echo $AOI | sed 's/,/_|_T/g')"_" - -else - echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " - echo " 2 for shapefile (point/polygon/line) or " - echo " 3 for comma-separated tile names " - exit -fi - - -# ============================================================ -# Filter metadata and extract download links -printf "%s\n" "" "Querying the metadata catalogue for" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" - -LINKS=$(grep -E $TILES $METACAT | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') - -printf "%s" "$LINKS" > S2_filtered_meta.txt -SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') -NSCENES=$(sed -n '$=' S2_filtered_meta.txt) -rm S2_filtered_meta.txt - - -# ============================================================ -# Get total number and size of scenes matching criteria -UNIT="MB" -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="GB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="TB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="PB" -fi - -if [ -z $NSCENES ];then - printf "%s\n" "There were no Sentinel-2 Level 1 scenes found matching the search criteria" "" - exit 0 -else - printf "%s\n" "$NSCENES Sentinel-2 Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" -fi - -if [ $DRYRUN -eq 1 ]; then - exit 0 -fi - - -# ============================================================ -# Download scenes -echo "Starting to download "$NSCENES" Sentinel-2 Level 1 scenes" -ITER=1 -for LINK in $LINKS -do - SCENEID=$(echo $LINK | cut -d, -f 2) - TILE=$(echo $LINK | cut -d, -f1 | grep -o -E "T[0-9]{2}[A-Z]{3}") - TILEPATH=$POOL/$TILE - URL=$(echo $LINK | cut -d, -f 14) - - # create target directory if it doesn't exist - if [ ! -w $TILEPATH ]; then - mkdir $TILEPATH - if [ ! -w $TILEPATH ]; then - echo "$TILEPATH: Creating directory failed." - exit 1 - fi - fi - ABSTILEPATH=$(cd $POOL/$TILE; pwd) - - # Check if scene already exists - SCENEPATH=$ABSTILEPATH/$SCENEID".SAFE" - if [ -d $SCENEPATH ]; then - echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." - ((ITER++)) - continue - fi - - echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." - gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $ABSTILEPATH - - echo "$SCENEPATH QUEUED" >> $QUEUE - - - ((ITER++)) -done diff --git a/bash/force-level1-sentinel2.sh b/bash/force-level1-sentinel2.sh deleted file mode 100755 index 0e30c65d..00000000 --- a/bash/force-level1-sentinel2.sh +++ /dev/null @@ -1,408 +0,0 @@ -#!/bin/bash - -########################################################################## -# -# This file is part of FORCE - Framework for Operational Radiometric -# Correction for Environmental monitoring. -# -# Copyright (C) 2013-2020 David Frantz -# -# FORCE is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# FORCE is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with FORCE. If not, see . -# -########################################################################## - -# this script downloads Sentinel-2 from ESA and maintains a clean Level-1 datapool - -EXPECTED_ARGS=7 -MAXIMUM_ARGS=8 - -# if wrong number of input args, stop -if [ $# -ne $EXPECTED_ARGS ] && [ $# -ne $MAXIMUM_ARGS ]; then - echo "" - echo "Usage: `basename $0` Level-1-Datapool queue Boundingbox" - echo " starttime endtime min-cc max-cc [dry]" - echo "" - echo " Level-1-Datapool" - echo " An existing directory, your files will be stored here" - echo "" - echo " queue" - echo " Downloaded files are appended to a file queue, which is needed for" - echo " the Level 2 processing. The file doesn't need to exist. If it exists," - echo " new lines will be appended on successful ingestion" - echo "" - echo " Boundingbox" - echo " The coordinates of your study area: \"X1/Y1,X2/Y2,X3/Y3,...,X1/Y1\"" - echo " The box must be closed (first X/Y = last X/Y). X/Y must be given as" - echo " decimal degrees with negative values for West and South coordinates." - echo " Note that the box doesn't have to be square, you can specify a polygon" - echo "" - echo " starttime endtime" - echo " Dates must be given as YYYY-MM-DD" - echo "" - echo " min-cc max-cc" - echo " The cloud cover range must be given in %" - echo "" - echo " dry will trigger a dry run that will only return the number of images" - echo " and their total data volume" - echo "" - echo " Your ESA credentials must be placed in \$HOME/.scihub" - echo " (OR in \$FORCE_CREDENTIALS/.scihub if the FORCE_CREDENTIALS environment" - echo " variable is defined)." - echo " First line: User name" - echo " Second line: Password, special characters might be problematic" - echo "" - exit -fi - -if [ $# -eq $MAXIMUM_ARGS ]; then - if [ $8 == dry ]; then - dryrun=1 - else - echo "unknown option, optional argument 7 must be dry" - exit - fi -else - dryrun=0 -fi - -POOL=$1 -POOLLIST=$2 -BOUND=$(echo $3 | sed 's_/_%20_g' | sed 's/ //g') -S0=$4 -S1=$5 -C0=$6 -C1=$7 - -if [ ! -w $POOL ]; then - echo "Level-1-Datapool must exist" - exit -fi - - -if [ -z "$FORCE_CREDENTIALS" ]; then - CREDDIR=$HOME/.scihub -else - CREDDIR=$FORCE_CREDENTIALS/.scihub -fi - - -if [ ! -r $CREDDIR ]; then - echo "Your ESA credentials were not found in $CREDDIR" - echo " First line: User name" - echo " Second line: Password, special characters might be problematic" - exit -fi - -H=$(head -n 2 $CREDDIR) -USER=$(echo $H | cut -d ' ' -f 1) -PW=$(echo $H | cut -d ' ' -f 2) -CRED="--user=$USER --password=$PW" -HUB="https://scihub.copernicus.eu/dhus/search" - - -FNAME="S2?_MSIL1C*" - -NMAX=100 -FNAMEQ="filename:"$FNAME -BOUNDQ="footprint:\"Intersects(POLYGON(("$BOUND")))\"" -DIRECQ="orbitdirection:Descending" -SENS="beginposition:["$S0"T00:00:00.000Z%20TO%20"$S1"T00:00:00.000Z]" -CC="cloudcoverpercentage:["$C0"%20TO%20"$C1"]" - -QUERY="?q=$FNAMEQ%20AND%20$BOUNDQ%20AND%20$DIRECQ%20AND%20$SENS%20AND%20$CC&rows=$NMAX" - -START=0 -NUM=1 - -SIZE=0 - -while [ $START -lt $NUM ]; do - - # current time - CTIME=$(date +"%Y-%m-%d_%H:%M:%S") - LIST=$POOL"/query_"$C0"-"$C1"-"$START"_"$CTIME".html" - - QUERY="?q=$FNAMEQ%20AND%20$BOUNDQ%20AND%20$DIRECQ%20AND%20$SENS%20AND%20$CC&rows=$NMAX&start=$START" - - wget --no-check-certificate -q -O $LIST $CRED $HUB$QUERY# - EXIT=$? - - # check exit code - if [ $EXIT -ne 0 ]; then - if [ $EXIT -eq 1 ]; then - echo "Error. Unable to query Scihub. Generic error code." - elif [ $EXIT -eq 2 ]; then - echo "Error. Unable to query Scihub. Parse error." - elif [ $EXIT -eq 3 ]; then - echo "Error. Unable to query Scihub. File I/O error." - elif [ $EXIT -eq 4 ]; then - echo "Error. Unable to query Scihub. Network failure." - elif [ $EXIT -eq 5 ]; then - echo "Error. Unable to query Scihub. SSL verification failure." - elif [ $EXIT -eq 6 ]; then - echo "Error. Unable to query Scihub. Username/password authentication failure." - elif [ $EXIT -eq 7 ]; then - echo "Error. Unable to query Scihub. Protocol errors." - elif [ $EXIT -eq 8 ]; then - echo "Error. Unable to query Scihub. Server issued an error response." - fi - rm $LIST - exit - fi - - # test if query exists - if [ ! -f $LIST ]; then - echo "Error. Unable to query Scihub." - exit - fi - - NUM=$(grep 'totalResults' $LIST | sed -r 's/.*>([0-9]*)<.*/\1/') - TODO=$(($NUM-$START)) - if [ $TODO -gt 100 ]; then - PAGE=100 - else - PAGE=$TODO - fi - echo "$CTIME - Found $TODO S2A/B files. Downloading $PAGE files on this page." - START=$(($START + $NMAX)) - - SIZES=(`grep 'size' $LIST | sed 's/<[^<>]*>//g' | sed 's/[A-Z ]//g'`) - UNITS=(`grep 'size' $LIST | sed 's/<[^<>]*>//g' | sed 's/[0-9. ]//g'`) - - for s in $(seq ${#SIZES[@]}); do - if [ ! ${UNITS[$s]} == "MB" ]; then - echo "warning: size not in MB. This script needs tuning" - fi - SIZE=$(echo $SIZE ${SIZES[$s]} | awk '{print $1 + $2}') - done - - - - - URL=($(grep '//')) - FNAMES=($(grep '\.SAFE' $LIST | cut -d '>' -f 2 | sed 's/SAFE.*/SAFE/')) - FNAMEZ=($(grep '\.SAFE' $LIST | cut -d '>' -f 2 | sed 's/SAFE.*/zip/')) - - rm $LIST - if [ -f $LIST ]; then - echo "Warning. Unable to delete Scihub query." - fi - - if [ $dryrun -eq 1 ]; then - continue; - fi - - #echo "$CTIME - Found ${#URL[*]} S2A/B files on this page." - if [ ${#URL[*]} -eq 0 ]; then - exit - fi - - - for i in $(seq 1 ${#URL[*]}); do - - #echo ${URL[$j]} - - j=$(($i-1)) - - # get tile id and target directory - TILE=$(echo ${FNAMES[$j]} | sed 's/.*_\(T[0-9]\{2\}[A-Z]\{3\}\)_.*/\1/') - PPATH=$POOL/$TILE - - # create target directory if it doesn't exist - if [ ! -w $PPATH ]; then - mkdir $PPATH - if [ ! -w $PPATH ]; then - echo "$PPATH: Creating directory failed." - exit - fi - #chmod 0755 $PPATH - fi - - PNAMES=$PPATH/${FNAMES[$j]} - PNAMEZ=$PPATH/${FNAMEZ[$j]} - - if [ -d $PNAMES ]; then - # file already exists, do nothing - #echo "${FNAMES[$j]}: File exists." - continue - else - - BASE=$(echo ${FNAMES[$j]} | sed 's/\(.*\)_N[0-9]\{4\}.*/\1/') - #echo $BASE - - if [ -d $PPATH/$BASE* ]; then - - PNAME_POOL=$(ls -d $PPATH/$BASE*) -# NPOOL=$(echo $PNAME_POOL | wc -w) - -# if [ $NPOOL -gt 1 ]; then -# echo "should not happen." -# continue -# elif [ $NPOOL -eq 1 ]; then - - VERSION_HUB=$(echo ${FNAMES[$j]} | sed 's/.*_\(N[0-9]\{4\}\)_.*/\1/') - VMAJOR_HUB=${VERSION_HUB:1:2} - VMINOR_HUB=${VERSION_HUB:3:4} - - FNAME_POOL=$(basename $PNAME_POOL) - - VERSION_POOL=$(echo $FNAME_POOL | sed 's/.*_\(N[0-9]\{4\}\)_.*/\1/') - VMAJOR_POOL=${VERSION_POOL:1:2} - VMINOR_POOL=${VERSION_POOL:3:4} - - #echo $VERSION_HUB $VMAJOR_HUB $VMINOR_HUB - #echo $VERSION_POOL $VMAJOR_POOL $VMINOR_POOL - #echo $FNAME_POOL - - if [ $VMAJOR_HUB -lt $VMAJOR_POOL ]; then - continue - elif [ $VMAJOR_HUB -eq $VMAJOR_POOL ] && [ $VMINOR_HUB -le $VMINOR_POOL ]; then - continue - fi - - #echo "delete" $PNAME_POOL - rm -r $PNAME_POOL - if [ -d $PNAME_POOL ]; then - echo "$FNAME_POOL: Could not update dataset." - continue - else - echo "$FNAME_POOL: Removed dataset." - sed -i.tmp "/$FNAME_POOL/d" $POOLLIST - chmod --reference $POOLLIST".tmp" $POOLLIST - rm $POOLLIST".tmp" - fi - - fi - - fi - - # get HTTP response, and determine whether file was pulled from LTA, or is ready to download - CTIME=$(date +"%Y%m%d%H%M%S") - CHECK=$POOL"/LTA_CHECK_"$CTIME - HTTP=$(wget --server-response --no-check-certificate -O $CHECK $CRED ${URL[$j]} 2>&1 | grep "HTTP/" | tail -n 1 | awk '{print $2}') - rm $CHECK - #HTTP=$(wget --spider --server-response --no-check-certificate $CRED ${URL[$j]} 2>&1 | grep "HTTP/" | tail -n 1 | awk '{print $2}') - - if [ $HTTP -eq 202 ]; then - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Success. Rerun this program after a while" - sleep 5 - continue - elif [ $HTTP -eq 503 ]; then - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. The LTA archive is busy. Rerun this program after a while" - sleep 5 - continue - elif [ $HTTP -eq 403 ]; then - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. You have exhausted your user quota. Rerun this program after a while" - sleep 5 - continue - elif [ $HTTP -eq 500 ]; then - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. Something is not right" - sleep 5 - continue - elif [ $HTTP -eq 429 ]; then - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. Too Many Requests" - sleep 5 - continue - elif [ $HTTP -eq 200 ]; then - wget -q --show-progress --no-check-certificate -O $PNAMEZ $CRED ${URL[$j]} - EXIT=$? - else - echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. HTTP code" $HTTP - sleep 5 - continue - fi - - # check exit code - if [ $EXIT -ne 0 ]; then - if [ $EXIT -eq 1 ]; then - echo "${FNAMES[$j]}: Generic error code." - elif [ $EXIT -eq 2 ]; then - echo "${FNAMES[$j]}: Parse error." - elif [ $EXIT -eq 3 ]; then - echo "${FNAMES[$j]}: File I/O error." - elif [ $EXIT -eq 4 ]; then - echo "${FNAMES[$j]}: Network failure." - elif [ $EXIT -eq 5 ]; then - echo "${FNAMES[$j]}: SSL verification failure." - elif [ $EXIT -eq 6 ]; then - echo "${FNAMES[$j]}: Username/password authentication failure." - elif [ $EXIT -eq 7 ]; then - echo "${FNAMES[$j]}: Protocol errors." - elif [ $EXIT -eq 8 ]; then - echo "${FNAMES[$j]}: Server issued an error response." - fi - rm $PNAMEZ - continue - fi - - # to be sure that file exists - if [ ! -f $PNAMEZ ]; then - echo "${FNAMES[$j]}: Error. File not downloaded." - continue - fi - - - # extract zip - #SAFE=$PPATH/$(unzip -l -q $PNAMEZ | head -n 3 | tail -n 1 | sed 's/.* //') - unzip -qq -d $PPATH $PNAMEZ 2>/dev/null - - # delete zip - rm $PNAMEZ - if [ -f $PNAMEZ ]; then - echo "Warning. Unable to delete zip file." - fi - - PNAMES=$(ls -d $PPATH/$BASE*.SAFE) - - #to be sure that extracted directory exists - if [ ! -d $PNAMES ]; then - echo "$FNAMES: Extracting zip failed." - exit - fi - - - # protect files - #find $PNAMES -type f -exec chmod 0644 {} \; - #find $PNAMES -type d -exec chmod 0755 {} \; - - #TILE=$(ls -d $PNAMES/GRANULE/*) - - echo "$PNAMES QUEUED" >> $POOLLIST - - done - -done - - -if [ $dryrun -eq 1 ]; then - - UNIT="MB" - if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="GB" - fi - if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="TB" - fi - if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="PB" - fi - - echo $NUM "Sentinel-2 A/B L1C files available" - echo $SIZE $UNIT "data volume available" - -fi - From 927111321134a6c6f2ade9f4ebde082f31e7af7f Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Fri, 14 Aug 2020 15:41:48 +0200 Subject: [PATCH 23/78] google: ls and s2 now in -gcs.sh, esa: now in -esa.sh, force-level1.sh acts as master script --- bash/force-level1-esa.sh | 419 +++++++++++++++++++++++++++++++++++++++ bash/force-level1-gcs.sh | 319 +++++++++++++++++++++++++++++ bash/force-level1.sh | 56 ++++++ 3 files changed, 794 insertions(+) create mode 100755 bash/force-level1-esa.sh create mode 100755 bash/force-level1-gcs.sh create mode 100755 bash/force-level1.sh diff --git a/bash/force-level1-esa.sh b/bash/force-level1-esa.sh new file mode 100755 index 00000000..42297a17 --- /dev/null +++ b/bash/force-level1-esa.sh @@ -0,0 +1,419 @@ +#!/bin/bash + +########################################################################## +# +# This file is part of FORCE - Framework for Operational Radiometric +# Correction for Environmental monitoring. +# +# Copyright (C) 2013-2020 David Frantz +# +# FORCE is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FORCE is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FORCE. If not, see . +# +########################################################################## + +# this script downloads Sentinel-2 from ESA and maintains a clean Level-1 datapool + +show_help() { +cat << HELP + +Usage: `basename $0` [-d] Level-1-Datapool queue Boundingbox + starttime endtime min-cc max-cc + +Mandatory arguments: + Level-1-Datapool + An existing directory, your files will be stored here + + queue + Downloaded files are appended to a file queue, which is needed for + the Level 2 processing. The file doesn't need to exist. If it exists, + new lines will be appended on successful ingestion + + Boundingbox + The coordinates of your study area: \"X1/Y1,X2/Y2,X3/Y3,...,X1/Y1\" + The box must be closed (first X/Y = last X/Y). X/Y must be given as + decimal degrees with negative values for West and South coordinates. + Note that the box doesn't have to be square, you can specify a polygon + + starttime endtime + Dates must be given as YYYY-MM-DD + + min-cc max-cc + The cloud cover range must be given in % + +Optional arguments (always placed BEFORE mandatory arguments): + -d + will trigger a dry run that will only return the number of images + and their total data volume + + -h|--help + show this help + + Your ESA credentials must be placed in \$HOME/.scihub + (OR in \$FORCE_CREDENTIALS/.scihub if the FORCE_CREDENTIALS environment + variable is defined). + First line: User name + Second line: Password, special characters might be problematic + +HELP +exit 1 +} + +echo $@ +# check for optional args and set dryrun var +case $1 in + -d) + dryrun=1 + shift ;; + -h|--help) + show_help ;; + *) + dryrun=0 ;; +esac + +# if wrong number of input args, stop +if [ $# -ne 7 ]; then + printf "%s\n" "" "Invalid number of input arguments specified" + show_help +fi + +POOL=$1 +POOLLIST=$2 +BOUND=$(echo $3 | sed 's_/_%20_g' | sed 's/ //g') +S0=$4 +S1=$5 +C0=$6 +C1=$7 + +if [ ! -w $POOL ]; then + echo "Level-1-Datapool must exist" + exit +fi + + +if [ -z "$FORCE_CREDENTIALS" ]; then + CREDDIR=$HOME/.scihub +else + CREDDIR=$FORCE_CREDENTIALS/.scihub +fi + + +if [ ! -r $CREDDIR ]; then + echo "Your ESA credentials were not found in $CREDDIR" + echo " First line: User name" + echo " Second line: Password, special characters might be problematic" + exit +fi + +H=$(head -n 2 $CREDDIR) +USER=$(echo $H | cut -d ' ' -f 1) +PW=$(echo $H | cut -d ' ' -f 2) +CRED="--user=$USER --password=$PW" +HUB="https://scihub.copernicus.eu/dhus/search" + + +FNAME="S2?_MSIL1C*" + +NMAX=100 +FNAMEQ="filename:"$FNAME +BOUNDQ="footprint:\"Intersects(POLYGON(("$BOUND")))\"" +DIRECQ="orbitdirection:Descending" +SENS="beginposition:["$S0"T00:00:00.000Z%20TO%20"$S1"T00:00:00.000Z]" +CC="cloudcoverpercentage:["$C0"%20TO%20"$C1"]" + +QUERY="?q=$FNAMEQ%20AND%20$BOUNDQ%20AND%20$DIRECQ%20AND%20$SENS%20AND%20$CC&rows=$NMAX" + +START=0 +NUM=1 + +SIZE=0 + +while [ $START -lt $NUM ]; do + + # current time + CTIME=$(date +"%Y-%m-%d_%H:%M:%S") + LIST=$POOL"/query_"$C0"-"$C1"-"$START"_"$CTIME".html" + + QUERY="?q=$FNAMEQ%20AND%20$BOUNDQ%20AND%20$DIRECQ%20AND%20$SENS%20AND%20$CC&rows=$NMAX&start=$START" + + wget --no-check-certificate -q -O $LIST $CRED $HUB$QUERY# + EXIT=$? + + # check exit code + if [ $EXIT -ne 0 ]; then + if [ $EXIT -eq 1 ]; then + echo "Error. Unable to query Scihub. Generic error code." + elif [ $EXIT -eq 2 ]; then + echo "Error. Unable to query Scihub. Parse error." + elif [ $EXIT -eq 3 ]; then + echo "Error. Unable to query Scihub. File I/O error." + elif [ $EXIT -eq 4 ]; then + echo "Error. Unable to query Scihub. Network failure." + elif [ $EXIT -eq 5 ]; then + echo "Error. Unable to query Scihub. SSL verification failure." + elif [ $EXIT -eq 6 ]; then + echo "Error. Unable to query Scihub. Username/password authentication failure." + elif [ $EXIT -eq 7 ]; then + echo "Error. Unable to query Scihub. Protocol errors." + elif [ $EXIT -eq 8 ]; then + echo "Error. Unable to query Scihub. Server issued an error response." + fi + rm $LIST + exit + fi + + # test if query exists + if [ ! -f $LIST ]; then + echo "Error. Unable to query Scihub." + exit + fi + + NUM=$(grep 'totalResults' $LIST | sed -r 's/.*>([0-9]*)<.*/\1/') + TODO=$(($NUM-$START)) + if [ $TODO -gt 100 ]; then + PAGE=100 + else + PAGE=$TODO + fi + echo "$CTIME - Found $TODO S2A/B files. Downloading $PAGE files on this page." + START=$(($START + $NMAX)) + + SIZES=(`grep 'size' $LIST | sed 's/<[^<>]*>//g' | sed 's/[A-Z ]//g'`) + UNITS=(`grep 'size' $LIST | sed 's/<[^<>]*>//g' | sed 's/[0-9. ]//g'`) + + for s in $(seq ${#SIZES[@]}); do + if [ ! ${UNITS[$s]} == "MB" ]; then + echo "warning: size not in MB. This script needs tuning" + fi + SIZE=$(echo $SIZE ${SIZES[$s]} | awk '{print $1 + $2}') + done + + + + + URL=($(grep '//')) + FNAMES=($(grep '\.SAFE' $LIST | cut -d '>' -f 2 | sed 's/SAFE.*/SAFE/')) + FNAMEZ=($(grep '\.SAFE' $LIST | cut -d '>' -f 2 | sed 's/SAFE.*/zip/')) + + rm $LIST + if [ -f $LIST ]; then + echo "Warning. Unable to delete Scihub query." + fi + + if [ $dryrun -eq 1 ]; then + continue; + fi + + #echo "$CTIME - Found ${#URL[*]} S2A/B files on this page." + if [ ${#URL[*]} -eq 0 ]; then + exit + fi + + + for i in $(seq 1 ${#URL[*]}); do + + #echo ${URL[$j]} + + j=$(($i-1)) + + # get tile id and target directory + TILE=$(echo ${FNAMES[$j]} | sed 's/.*_\(T[0-9]\{2\}[A-Z]\{3\}\)_.*/\1/') + PPATH=$POOL/$TILE + + # create target directory if it doesn't exist + if [ ! -w $PPATH ]; then + mkdir $PPATH + if [ ! -w $PPATH ]; then + echo "$PPATH: Creating directory failed." + exit + fi + #chmod 0755 $PPATH + fi + + PNAMES=$PPATH/${FNAMES[$j]} + PNAMEZ=$PPATH/${FNAMEZ[$j]} + + if [ -d $PNAMES ]; then + # file already exists, do nothing + #echo "${FNAMES[$j]}: File exists." + continue + else + + BASE=$(echo ${FNAMES[$j]} | sed 's/\(.*\)_N[0-9]\{4\}.*/\1/') + #echo $BASE + + if [ -d $PPATH/$BASE* ]; then + + PNAME_POOL=$(ls -d $PPATH/$BASE*) +# NPOOL=$(echo $PNAME_POOL | wc -w) + +# if [ $NPOOL -gt 1 ]; then +# echo "should not happen." +# continue +# elif [ $NPOOL -eq 1 ]; then + + VERSION_HUB=$(echo ${FNAMES[$j]} | sed 's/.*_\(N[0-9]\{4\}\)_.*/\1/') + VMAJOR_HUB=${VERSION_HUB:1:2} + VMINOR_HUB=${VERSION_HUB:3:4} + + FNAME_POOL=$(basename $PNAME_POOL) + + VERSION_POOL=$(echo $FNAME_POOL | sed 's/.*_\(N[0-9]\{4\}\)_.*/\1/') + VMAJOR_POOL=${VERSION_POOL:1:2} + VMINOR_POOL=${VERSION_POOL:3:4} + + #echo $VERSION_HUB $VMAJOR_HUB $VMINOR_HUB + #echo $VERSION_POOL $VMAJOR_POOL $VMINOR_POOL + #echo $FNAME_POOL + + if [ $VMAJOR_HUB -lt $VMAJOR_POOL ]; then + continue + elif [ $VMAJOR_HUB -eq $VMAJOR_POOL ] && [ $VMINOR_HUB -le $VMINOR_POOL ]; then + continue + fi + + #echo "delete" $PNAME_POOL + rm -r $PNAME_POOL + if [ -d $PNAME_POOL ]; then + echo "$FNAME_POOL: Could not update dataset." + continue + else + echo "$FNAME_POOL: Removed dataset." + sed -i.tmp "/$FNAME_POOL/d" $POOLLIST + chmod --reference $POOLLIST".tmp" $POOLLIST + rm $POOLLIST".tmp" + fi + + fi + + fi + + # get HTTP response, and determine whether file was pulled from LTA, or is ready to download + CTIME=$(date +"%Y%m%d%H%M%S") + CHECK=$POOL"/LTA_CHECK_"$CTIME + HTTP=$(wget --server-response --no-check-certificate -O $CHECK $CRED ${URL[$j]} 2>&1 | grep "HTTP/" | tail -n 1 | awk '{print $2}') + rm $CHECK + #HTTP=$(wget --spider --server-response --no-check-certificate $CRED ${URL[$j]} 2>&1 | grep "HTTP/" | tail -n 1 | awk '{print $2}') + + if [ $HTTP -eq 202 ]; then + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Success. Rerun this program after a while" + sleep 5 + continue + elif [ $HTTP -eq 503 ]; then + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. The LTA archive is busy. Rerun this program after a while" + sleep 5 + continue + elif [ $HTTP -eq 403 ]; then + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. You have exhausted your user quota. Rerun this program after a while" + sleep 5 + continue + elif [ $HTTP -eq 500 ]; then + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. Something is not right" + sleep 5 + continue + elif [ $HTTP -eq 429 ]; then + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. Too Many Requests" + sleep 5 + continue + elif [ $HTTP -eq 200 ]; then + wget -q --show-progress --no-check-certificate -O $PNAMEZ $CRED ${URL[$j]} + EXIT=$? + else + echo "${FNAMES[$j]}: Pulling from Long Term Archive. Failed. HTTP code" $HTTP + sleep 5 + continue + fi + + # check exit code + if [ $EXIT -ne 0 ]; then + if [ $EXIT -eq 1 ]; then + echo "${FNAMES[$j]}: Generic error code." + elif [ $EXIT -eq 2 ]; then + echo "${FNAMES[$j]}: Parse error." + elif [ $EXIT -eq 3 ]; then + echo "${FNAMES[$j]}: File I/O error." + elif [ $EXIT -eq 4 ]; then + echo "${FNAMES[$j]}: Network failure." + elif [ $EXIT -eq 5 ]; then + echo "${FNAMES[$j]}: SSL verification failure." + elif [ $EXIT -eq 6 ]; then + echo "${FNAMES[$j]}: Username/password authentication failure." + elif [ $EXIT -eq 7 ]; then + echo "${FNAMES[$j]}: Protocol errors." + elif [ $EXIT -eq 8 ]; then + echo "${FNAMES[$j]}: Server issued an error response." + fi + rm $PNAMEZ + continue + fi + + # to be sure that file exists + if [ ! -f $PNAMEZ ]; then + echo "${FNAMES[$j]}: Error. File not downloaded." + continue + fi + + + # extract zip + #SAFE=$PPATH/$(unzip -l -q $PNAMEZ | head -n 3 | tail -n 1 | sed 's/.* //') + unzip -qq -d $PPATH $PNAMEZ 2>/dev/null + + # delete zip + rm $PNAMEZ + if [ -f $PNAMEZ ]; then + echo "Warning. Unable to delete zip file." + fi + + PNAMES=$(ls -d $PPATH/$BASE*.SAFE) + + #to be sure that extracted directory exists + if [ ! -d $PNAMES ]; then + echo "$FNAMES: Extracting zip failed." + exit + fi + + + # protect files + #find $PNAMES -type f -exec chmod 0644 {} \; + #find $PNAMES -type d -exec chmod 0755 {} \; + + #TILE=$(ls -d $PNAMES/GRANULE/*) + + echo "$PNAMES QUEUED" >> $POOLLIST + + done + +done + + +if [ $dryrun -eq 1 ]; then + + UNIT="MB" + if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="GB" + fi + if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="TB" + fi + if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="PB" + fi + + echo $NUM "Sentinel-2 A/B L1C files available" + echo $SIZE $UNIT "data volume available" + +fi + diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-gcs.sh new file mode 100755 index 00000000..ab68828d --- /dev/null +++ b/bash/force-level1-gcs.sh @@ -0,0 +1,319 @@ +trap "echo Exited!; exit;" SIGINT SIGTERM # make sure that CTRL-C breaks out of download loop +set -e # make sure script exits if any process exits unsuccessfully + +show_help() { +cat << HELP + +Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi + aoitype sensor starttime endtime min-cc max-cc + +Mandatory arguments: + metadata-dir + directory where the Landsat metadata (csv file) is stored + + level-1-datapool + An existing directory, your files will be stored here + + queue + Downloaded files are appended to a file queue, which is needed for + the Level 2 processing. The file doesn't need to exist. If it exists, + new lines will be appended on successful ingestion + + area of interest + (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" + The polygon must be closed (first X/Y = last X/Y). X/Y must be given as + decimal degrees with negative values for West and South coordinates. + (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, + but using EPSG4326 is recommended + (3) Path/Row (Landsat): "PPPRRR,PPPRRR,PPPRRR" + Make sure to keep leading zeros - correct: 181034, incorrect: 18134 + Tile name (Sentinel-2): "34UEU,33UUU" + + type of area of interest + 1 - coordinates as text + 2 - shapefile + 3 - PathRow as text + + sensor + Specify the sensor(s) to include. Separate with commas while retaining the + order below. Landsat and Sentinel-2 sensors can not be combined. + Landsat Sentinel-2 + LT05 - Landsat 5 TM S2A + LE07 - Landsat 7 ETM+ S2B + LC08 - Landsat 8 OLI + Correct: "LT05,LC08", incorrect: "LC08,LT05" or "LE07,S2B" + + starttime endtime + Dates must be given as YYYY-MM-DD + + min-cc max-cc + The cloud cover range must be specified in % + +Optional arguments (always placed AFTER platform/mirr and BEFORE mandatory arguments): + -d dry + will trigger a dry run that will only return the number of images + and their total data volume + + -u update + will update the metadata catalogue (download and extract from GCS) + only the metadata dir is required as argument when using this option + + -h|--help + show this help + +HELP +exit 1 +} + + +update_meta() { + echo "Updating metadata catalogue..." + gsutil -m cp gs://gcp-public-data-$GCSNAME/index.csv.gz $METADIR + gunzip $METADIR/index.csv.gz + mv $METADIR/index.csv $METADIR/metadata_$SATELLITE.csv +} + +# set variables for urls, file names, layer names, print, ... +case $PLATFORM in + s2) + GCSNAME="sentinel-2" + SATELLITE="sentinel2" + PRINTNAME="Sentinel-2" ;; + ls) + GCSNAME="landsat" + SATELLITE="landsat" + PRINTNAME="Landsat" ;; +esac + +while :; do + case $1 in + -d) + DRYRUN=1 ;; + -h|-\?|--help) + show_help ;; + -u) + METADIR=$2 + if [ $# -lt 2 ]; then + echo "Metadata directory not specified, exiting" + exit 1 + elif [ $# -gt 2 ]; then + echo "Error: Please only specify the metadata directory when using the update option (-u)" + exit 1 + elif ! [ -w $METADIR ]; then + echo "Can not write to metadata directory, exiting" + exit 1 + else + update_meta + echo "Done. You can run this script without option -u to download data now." + exit + fi ;; + -?*) printf "%s\n" "" "Incorrect option specified" "" + show_help >&2 ;; + *) + break #no more options + esac + shift +done + +if [ $# -ne 10 ]; then + printf "%s\n" "" "Incorrect number of mandatory input arguments provided" + show_help +fi + +# ============================================================ +# Check user input and set up variables +METADIR=$1 +POOL=$2 +QUEUE=$3 +AOI=$4 +AOITYPE=$5 +SENSIN=$6 +DATEMIN=$7 +DATEMAX=$8 +CCMIN=$9 +CCMAX=${10} + +echo $PLATFORM + +SENSIN=$(echo $SENSIN | tr '[:lower:]' '[:upper:]') # convert sensor strings to upper case to prevent unnecessary headaches +case $SENSIN in + S2A|S2A,S2B|S2B) + if [ $PLATFORM = "ls" ]; then + print "%s\n" "Error: Sentinel-2 sensor names for Landsat query received" + show_help + fi ;; + LT05|LT05,LE07|LT05,LE07,LC08|LE07|LE07,LC08|LC08) + if [ $PLATFORM = "s2" ]; then + printf "%s\n" "" "Error: Landsat sensor names for Sentinel-2 query received" + show_help + fi ;; + *) + printf "%s\n" "" "Error: invalid sensor or invalid combination of sensors speficied" + show_help ;; +esac + +if ! date -d $DATEMIN &> /dev/null; then + printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 + elif ! date -d $DATEMAX &> /dev/null; then + printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 +fi + + +# ============================================================ +# Check if metadata catalogue exists and is up to date +METACAT=$METADIR"/metadata_$SATELLITE.csv" +if ! [ -f $METACAT ]; then + echo $METACAT + printf "%s\n" "" "Metadata catalogue does not exist. Use the -u option to download / update the metadata catalogue" "" + exit 1 +fi + +METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) +if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then + printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." +fi + + +# ============================================================ +# Get tiles / footprints of interest +if [ "$AOITYPE" -eq 2 ]; then + if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then + printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." + fi +fi +if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then + if ! [ -x "$(command -v ogr2ogr)" ]; then + printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 + exit 1 + fi +fi + + +if [ "$AOITYPE" -eq 1 ]; then + + WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") + TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" + +elif [ "$AOITYPE" -eq 2 ]; then + + printf "%s\n" "" "Searching for footprints / tiles intersecting with geometries of AOI shapefile..." + AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) + BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename="$SATELLITE"&bbox="$BBOX + + ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update + ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update + + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT $SATELLITE.Name FROM $SATELLITE, $AOINE WHERE ST_Intersects($SATELLITE.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) + TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" + rm merged.gpkg + +elif [ "$AOITYPE" -eq 3 ]; then + + TILERAW=$AOI + TILES="_T"$(echo $AOI | sed 's/,/_|_T/g')"_" + +else + echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " + echo " 2 for shapefile (point/polygon/line) or " + echo " 3 for comma-separated tile names " + exit +fi + + +# ============================================================ +# Filter metadata and extract download links +printf "%s\n" "" "Querying the metadata catalogue for" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" + +if [ $PLATFORM = "s2" ]; then + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo $SENSIN | sed s'/,/|/g') | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') +elif [ $PLATFORM = "landsat" ]; then + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo "$SENSIN" | sed 's/,/_|/g')"_" | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $7 == "T1" && $12 >= clow && $12 <= chigh') +fi + +printf "%s" "$LINKS" > filtered_metadata.txt +SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') +NSCENES=$(sed -n '$=' filtered_metadata.txt) +rm filtered_metadata.txt + + +# ============================================================ +# Get total number and size of scenes matching criteria +UNIT="MB" +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="GB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="TB" +fi +if [ ${SIZE%%.*} -gt 1024 ]; then + SIZE=$(echo $SIZE | awk '{print $1 / 1024}') + UNIT="PB" +fi + +if [ -z $NSCENES ];then + printf "%s\n" "There were no $PRINTNAME Level 1 scenes found matching the search criteria" "" + exit 0 +else + printf "%s\n" "$NSCENES $PRINTNAME Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" +fi + +if [ $DRYRUN -eq 1 ]; then + exit 0 +fi + + +# ============================================================ +# Download scenes +POOL=$(cd $POOL; pwd) +echo "Starting to download "$NSCENES" "$PRINTNAME" Level 1 scenes" +ITER=1 +for LINK in $LINKS +do + SCENEID=$(echo $LINK | cut -d, -f 2) + + if [ $SATELLITE = "sentinel2" ]; then + TILE=$(echo $LINK | cut -d, -f 1 | grep -o -E "T[0-9]{2}[A-Z]{3}") + URL=$(echo $LINK | cut -d, -f 14) + elif [ $SATELLITE = "landsat" ]; then + TILE=$(echo $SCENEID | cut -d_ -f 3) + URL=$(echo $LINK | cut -d, -f 18) + fi + + # create target directory if it doesn't exist + TILEPATH=$POOL/$TILE + if [ ! -w $TILEPATH ]; then + mkdir $TILEPATH + if [ ! -w $TILEPATH ]; then + echo "$TILEPATH: Creating directory failed." + exit 1 + fi + fi + + # Check if scene already exists# + SCENEPATH=$TILEPATH/$SCENEID + if [ $SATELLITE = "sentinel2" ]; then + SCENEPATH=$SCENEPATH".SAFE" + fi + if [ -d $SCENEPATH ]; then + echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." + ((ITER++)) + continue + fi + + echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." + gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH + + echo "$SCENEPATH QUEUED" >> $QUEUE + + ((ITER++)) +done + +printf "%s\n" "" "Finished." "" +exit 0 \ No newline at end of file diff --git a/bash/force-level1.sh b/bash/force-level1.sh new file mode 100755 index 00000000..24286780 --- /dev/null +++ b/bash/force-level1.sh @@ -0,0 +1,56 @@ +# ============================================================ +# This script will act as 'master' for Level-1 download +# It calls the ESA or Google download script +# +# Check for satellite first: +# - if ls, go for the google script +# - if s2, check for -m option (esa, gcs, later also aws) +# +# - check if arguments are in right order, format for scripts +# +# call landsat-level1-sentinel2 or landsat-level1-gcs + +#set -e # makes sure that this script stops as soon as the sub scripts exit + + +# ============================================================ +# check for options +DRYRUN=0 +PLATFORM=$1 +# check for platform and mirror, discard platform ($1) and mirror ($2:-m and $3) afterwards +case $PLATFORM in + s2) + if ! [ $2 = "-m" ]; then + printf "%s\n" "" "Mirror option (-m) must be set as first optional argument for Sentinel-2" "Valid mirrors: 'esa' for ESA and 'gcs' for Google Cloud Storage" "" + exit 1 + else + MIRROR=$3 + case $MIRROR in + "esa"|"gcs") + shift 2 ;; + *) + printf "%s\n" "" "Mirror must be either esa (ESA archive) or gcs (Google Cloud Storage)" "" ;; + esac + fi ;; + ls) + MIRROR="gcs" ;; + *) + printf "%s\n" "" "Platform must be either ls (Landsat) or s2 (Sentinel-2)" "" + exit 1 ;; +esac +shift + +echo $(dirname $0) +# ============================================================ +# run ESA or GCS scripts +BINDIR=$(dirname $0) +case $MIRROR in + "esa") + echo $@ + source $BINDIR"/"force-level1-esa $@ ;; + + "gcs") + echo "$@" + source $BINDIR"/"force-level1-gcs $@ ;; +esac + From 5430063555908dab8ce4ae676e1f4b116f925ba6 Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Sat, 15 Aug 2020 01:15:37 +0200 Subject: [PATCH 24/78] fix for landsat metadata query --- bash/force-level1-gcs.sh | 61 +++++++++++++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 13 deletions(-) diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-gcs.sh index ab68828d..0f1b9d38 100755 --- a/bash/force-level1-gcs.sh +++ b/bash/force-level1-gcs.sh @@ -1,3 +1,30 @@ +########################################################################## +# +# This file is part of FORCE - Framework for Operational Radiometric +# Correction for Environmental monitoring. +# +# Copyright (C) 2013-2020 David Frantz +# +# FORCE is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FORCE is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FORCE. If not, see . +# +########################################################################## + +# Copyright (C) 2020 Stefan Ernst +# Contact: stefan.ernst@hu-berlin.de + +# This script downloads Landsat and Sentinel-2 Level 1 data from GCS + trap "echo Exited!; exit;" SIGINT SIGTERM # make sure that CTRL-C breaks out of download loop set -e # make sure script exits if any process exits unsuccessfully @@ -142,7 +169,7 @@ case $SENSIN in print "%s\n" "Error: Sentinel-2 sensor names for Landsat query received" show_help fi ;; - LT05|LT05,LE07|LT05,LE07,LC08|LE07|LE07,LC08|LC08) + LT05|LT05,LE07|LT05,LC08|LT05,LE07,LC08|LE07|LE07,LC08|LC08) if [ $PLATFORM = "s2" ]; then printf "%s\n" "" "Error: Landsat sensor names for Sentinel-2 query received" show_help @@ -178,11 +205,6 @@ fi # ============================================================ # Get tiles / footprints of interest -if [ "$AOITYPE" -eq 2 ]; then - if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then - printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." - fi -fi if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then if ! [ -x "$(command -v ogr2ogr)" ]; then printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 @@ -192,14 +214,21 @@ fi if [ "$AOITYPE" -eq 1 ]; then - + printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") - TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" + case $PLATFORM in + s2) TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" ;; + ls) TILES="_"$(echo $TILERAW | sed 's/PR, //; s/ /_|_/g')"_" ;; + esac + elif [ "$AOITYPE" -eq 2 ]; then - + + if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then + printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." + fi printf "%s\n" "" "Searching for footprints / tiles intersecting with geometries of AOI shapefile..." AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') @@ -212,10 +241,13 @@ elif [ "$AOITYPE" -eq 2 ]; then TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" rm merged.gpkg -elif [ "$AOITYPE" -eq 3 ]; then +elif [ "$AOITYPE" -eq 3 ]; then TILERAW=$AOI - TILES="_T"$(echo $AOI | sed 's/,/_|_T/g')"_" + case $PLATFORM in + "s2") TILES="_T"$(echo $TILERAW | sed 's/,/_|_T/g')"_" ;; + "ls") TILES="_"$(echo $TILERAW | sed 's/,/_|_/g')"_" ;; + esac else echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " @@ -231,12 +263,15 @@ printf "%s\n" "" "Querying the metadata catalogue for" "Tile(s): "$(echo $TILERA if [ $PLATFORM = "s2" ]; then LINKS=$(grep -E $TILES $METACAT | grep -E $(echo $SENSIN | sed s'/,/|/g') | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') -elif [ $PLATFORM = "landsat" ]; then +elif [ $PLATFORM = "ls" ]; then LINKS=$(grep -E $TILES $METACAT | grep -E $(echo "$SENSIN" | sed 's/,/_|/g')"_" | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $7 == "T1" && $12 >= clow && $12 <= chigh') fi printf "%s" "$LINKS" > filtered_metadata.txt -SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') +case $PLATFORM in + "s2") SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') ;; + "ls") SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') ;; +esac NSCENES=$(sed -n '$=' filtered_metadata.txt) rm filtered_metadata.txt From ddb3ba1102512be4bb0a9985a0118abf0699d223 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 18 Aug 2020 20:00:38 +0200 Subject: [PATCH 25/78] added aux tool to copy over metadata --- Makefile | 11 ++-- src/aux-level/_md_copy.c | 106 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+), 3 deletions(-) create mode 100755 src/aux-level/_md_copy.c diff --git a/Makefile b/Makefile index d51c7f83..dd779c3f 100755 --- a/Makefile +++ b/Makefile @@ -74,9 +74,9 @@ TA=temp-aux all: temp cross lower higher aux exe cross: enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl datesys_cl lock_cl cube_cl dir_cl stats_cl pca_cl tile_cl queue_cl warp_cl sun_cl quality_cl sys_cl konami_cl download_cl read_cl lower: table_ll param_ll meta_ll cube_ll equi7_ll glance7_ll atc_ll sunview_ll read_ll radtran_ll topo_ll cloud_ll gas_ll brdf_ll atmo_ll aod_ll resmerge_ll coreg_ll coregfuns_ll acix_ll modwvp_ll -higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl +higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl polar_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl aux: param_aux param_train_aux train_aux -exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis +exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis force-mdcp .PHONY: temp all install install_ bash python clean build @@ -266,6 +266,9 @@ standardize_hl: temp $(DH)/standardize-hl.c pheno_hl: temp $(DH)/pheno-hl.cpp $(GPP) $(CFLAGS) $(SPLITS) -c $(DH)/pheno-hl.cpp -o $(TH)/pheno_hl.o $(LDSPLITS) +polar_hl: temp $(DH)/polar-hl.c + $(GCC) $(CFLAGS) -c $(DH)/polar-hl.c -o $(TH)/polar_hl.o + trend_hl: temp $(DH)/trend-hl.c $(GCC) $(CFLAGS) -c $(DH)/trend-hl.c -o $(TH)/trend_hl.o @@ -325,7 +328,7 @@ force: temp cross $(DA)/_main.c force-parameter: temp cross aux $(DA)/_parameter.c $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) $(OPENCV) -o $(TB)/force-parameter $(DA)/_parameter.c $(TC)/*.o $(TA)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) $(LDOPENCV) - + force-tile-finder: temp cross $(DA)/_tile-finder.c $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) -o $(TB)/force-tile-finder $(DA)/_tile-finder.c $(TC)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) @@ -347,6 +350,8 @@ force-higher-level: temp cross higher $(DH)/_higher-level.c force-lut-modis: temp cross lower $(DL)/_lut-modis.c $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) -o $(TB)/force-lut-modis $(DL)/_lut-modis.c $(TC)/*.o $(TL)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) +force-mdcp: temp cross $(DA)/_md_copy.c + $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) -o $(TB)/force-mdcp $(DA)/_md_copy.c $(TC)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) ### dummy code for testing stuff diff --git a/src/aux-level/_md_copy.c b/src/aux-level/_md_copy.c new file mode 100755 index 00000000..4338c873 --- /dev/null +++ b/src/aux-level/_md_copy.c @@ -0,0 +1,106 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +This program copies FORCE metadata from one file to another ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +#include // core input and output functions +#include // standard general utilities library + +#include "../cross-level/const-cl.h" +#include "../cross-level/konami-cl.h" + + +/** Geospatial Data Abstraction Library (GDAL) **/ +#include "gdal.h" // public (C callable) GDAL entry points +#include "cpl_conv.h" // various convenience functions for CPL +#include "cpl_string.h" // various convenience functions for strings + + +int main ( int argc, char *argv[] ){ +int b, nb; +GDALDatasetH src, dst; +GDALRasterBandH bsrc, bdst; +char fsrc[NPOW_10], fdst[NPOW_10]; +char **meta = NULL; +char **bmeta = NULL; +const char *bname = NULL; + + + if (argc >= 2) check_arg(argv[1]); + if (argc != 3){ printf("Usage: %s src dst\n\n", argv[0]); return FAILURE;} + + + if (strlen(argv[1]) > NPOW_10-1){ + printf("cannot copy, string too long.\n"); return FAILURE; + } else { strncpy(fsrc, argv[1], strlen(argv[1])); fsrc[strlen(argv[1])] = '\0';} + if (strlen(argv[2]) > NPOW_10-1){ + printf("cannot copy, string too long.\n"); return FAILURE; + } else { strncpy(fdst, argv[2], strlen(argv[2])); fdst[strlen(argv[2])] = '\0';} + + + GDALAllRegister(); + + if ((src = GDALOpenEx(fsrc, GDAL_OF_READONLY, NULL, NULL, NULL)) == NULL){ + printf("unable to open %s\n\n", fsrc); return FAILURE;} + + if ((dst = GDALOpenEx(fdst, GDAL_OF_UPDATE, NULL, NULL, NULL)) == NULL){ + printf("unable to open %s\n\n", fdst); return FAILURE;} + + if ((nb = GDALGetRasterCount(src)) != GDALGetRasterCount(dst)){ + printf("src and dst images have different number of bands\n\n"); + return FAILURE;} + + + // copy FORCE domain + meta = GDALGetMetadata(src, "FORCE"); + //printf("Number of metadata items: %d\n", CSLCount(meta)); + //CSLPrint(meta, NULL); + //CSLDestroy(meta); + GDALSetMetadata(dst, meta, "FORCE"); + + for (b=0; b Date: Tue, 18 Aug 2020 20:01:07 +0200 Subject: [PATCH 26/78] use metadata copying tool in force-mosaic --- bash/force-mosaic.sh | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/bash/force-mosaic.sh b/bash/force-mosaic.sh index c391d5d1..de8e9009 100755 --- a/bash/force-mosaic.sh +++ b/bash/force-mosaic.sh @@ -33,11 +33,13 @@ if [ $# -ne $EXPECTED_ARGS ]; then fi NOW=$PWD +BINDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" INP=$(readlink -f $1) OUT=$INP/mosaic + # input dir exists? if [ ! -d $INP ]; then echo $INP "does not exist" @@ -59,8 +61,11 @@ function mosaic_this(){ num=$1 prd=$2 + bin=$3 LIST="force-mosaic_list_$2.txt" + echo $bin + echo "mosaicking" $prd ONAME=${prd/.dat/.vrt} @@ -85,11 +90,20 @@ function mosaic_this(){ # build vrt if [ $N -gt 0 ]; then + echo $N "chips found". + + #build VRT gdalbuildvrt -q -srcnodata $NODATA -vrtnodata $NODATA -input_file_list $LIST $ONAME + + # set vrt to relative paths sed -i.tmp 's/relativeToVRT="0"/relativeToVRT="1"/g' $ONAME chmod --reference $ONAME".tmp" $ONAME rm $ONAME".tmp" + + # copy metadata + $bin"/"force-mdcp $FIRST $ONAME + else echo "no chip found." fi @@ -109,11 +123,11 @@ export -f mosaic_this PRODUCTS="force-mosaic_products.txt" find .. \( -name '*.dat' -o -name '*.tif' \) -exec basename {} \; | sort | uniq > $PRODUCTS -NPROD=$(wc -l $PRODUCTS) +NPROD=$(wc -l $PRODUCTS | cut -d " " -f 1) echo "mosaicking $NPROD products:" parallel -a $PRODUCTS echo {#} {} -parallel -a $PRODUCTS mosaic_this {#} {} +parallel -a $PRODUCTS mosaic_this {#} {} $BINDIR rm $PRODUCTS From 8f884f46c973197942296b3e5ee8dafb67516d08 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 18 Aug 2020 20:01:56 +0200 Subject: [PATCH 27/78] fixed erroneous gdalsetdescription call --- src/cross-level/stack-cl.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cross-level/stack-cl.c b/src/cross-level/stack-cl.c index 6c375e42..47c65dca 100755 --- a/src/cross-level/stack-cl.c +++ b/src/cross-level/stack-cl.c @@ -1024,7 +1024,7 @@ int i = 0; // in case of ENVI, update description //if (format == _FMT_ENVI_) - GDALSetDescription(fp, stack->name); + //GDALSetDescription(fp, stack->name); for (i=0; i Date: Thu, 20 Aug 2020 08:28:19 +0200 Subject: [PATCH 28/78] fixed make --- Makefile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Makefile b/Makefile index dd779c3f..7aafab5a 100755 --- a/Makefile +++ b/Makefile @@ -74,7 +74,7 @@ TA=temp-aux all: temp cross lower higher aux exe cross: enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl datesys_cl lock_cl cube_cl dir_cl stats_cl pca_cl tile_cl queue_cl warp_cl sun_cl quality_cl sys_cl konami_cl download_cl read_cl lower: table_ll param_ll meta_ll cube_ll equi7_ll glance7_ll atc_ll sunview_ll read_ll radtran_ll topo_ll cloud_ll gas_ll brdf_ll atmo_ll aod_ll resmerge_ll coreg_ll coregfuns_ll acix_ll modwvp_ll -higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl polar_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl +higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl aux: param_aux param_train_aux train_aux exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis force-mdcp .PHONY: temp all install install_ bash python clean build @@ -266,9 +266,6 @@ standardize_hl: temp $(DH)/standardize-hl.c pheno_hl: temp $(DH)/pheno-hl.cpp $(GPP) $(CFLAGS) $(SPLITS) -c $(DH)/pheno-hl.cpp -o $(TH)/pheno_hl.o $(LDSPLITS) -polar_hl: temp $(DH)/polar-hl.c - $(GCC) $(CFLAGS) -c $(DH)/polar-hl.c -o $(TH)/polar_hl.o - trend_hl: temp $(DH)/trend-hl.c $(GCC) $(CFLAGS) -c $(DH)/trend-hl.c -o $(TH)/trend_hl.o From 9be40531f137b07c14c4230eaa13341b20a6120b Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 20 Aug 2020 09:02:27 +0200 Subject: [PATCH 29/78] removed strncpy --- src/aux-level/_md_copy.c | 11 ++++------- src/aux-level/_quality-inflate.c | 14 ++++++-------- src/cross-level/param-cl.c | 12 ------------ 3 files changed, 10 insertions(+), 27 deletions(-) diff --git a/src/aux-level/_md_copy.c b/src/aux-level/_md_copy.c index 4338c873..b41de584 100755 --- a/src/aux-level/_md_copy.c +++ b/src/aux-level/_md_copy.c @@ -41,7 +41,8 @@ int main ( int argc, char *argv[] ){ int b, nb; GDALDatasetH src, dst; GDALRasterBandH bsrc, bdst; -char fsrc[NPOW_10], fdst[NPOW_10]; +char *fsrc = NULL; +char *fdst = NULL; char **meta = NULL; char **bmeta = NULL; const char *bname = NULL; @@ -51,12 +52,8 @@ const char *bname = NULL; if (argc != 3){ printf("Usage: %s src dst\n\n", argv[0]); return FAILURE;} - if (strlen(argv[1]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(fsrc, argv[1], strlen(argv[1])); fsrc[strlen(argv[1])] = '\0';} - if (strlen(argv[2]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(fdst, argv[2], strlen(argv[2])); fdst[strlen(argv[2])] = '\0';} + fsrc = argv[1]; + fdst = argv[2]; GDALAllRegister(); diff --git a/src/aux-level/_quality-inflate.c b/src/aux-level/_quality-inflate.c index d2a8e7a7..46f0ce0e 100755 --- a/src/aux-level/_quality-inflate.c +++ b/src/aux-level/_quality-inflate.c @@ -40,10 +40,10 @@ This program inflates QAI layers int main( int argc, char *argv[] ){ double geotran[6]; -char iname[NPOW_10]; +char *iname = NULL; +char *d_out = NULL; +char *pch = NULL; char oname[NPOW_10]; -char d_out[NPOW_10]; -char *pch = NULL; const char *proj; GDALDatasetH fp; stack_t *QAI = NULL; @@ -57,8 +57,8 @@ cube_t *cube = NULL; if (argc != 3){ printf("Usage: %s QAI dir\n\n", argv[0]); exit(1);} // parse arguments - copy_string(iname, NPOW_10, argv[1]); - copy_string(d_out, NPOW_10, argv[2]); + iname = argv[1]; + d_out = argv[2]; GDALAllRegister(); @@ -81,9 +81,7 @@ cube_t *cube = NULL; cube->chunksize = cube->ny*cube->res; proj = GDALGetProjectionRef(fp); - if (strlen(proj) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(cube->proj, proj, strlen(proj)); cube->proj[strlen(proj)] = '\0';} + copy_string(cube->proj, NPOW_10, proj); GDALClose(fp); diff --git a/src/cross-level/param-cl.c b/src/cross-level/param-cl.c index 0dcb4cea..7bbe2417 100755 --- a/src/cross-level/param-cl.c +++ b/src/cross-level/param-cl.c @@ -825,12 +825,6 @@ const char *separator = " ="; *params->par[i].date_ = parse_date(ptr); break; case _PAR_CHAR_: - //if (strlen(ptr) > NPOW_10-1){ - // printf("cannot copy, string too long.\n"); exit(1); - //} else { - // strncpy(*params->par[i].char_, ptr, strlen(ptr)); - // (*params->par[i].char_)[strlen(ptr)] = '\0'; - //} copy_string(*params->par[i].char_, NPOW_10, ptr); break; default: @@ -875,12 +869,6 @@ const char *separator = " ="; params->par[i].date_vec_[0][n] = parse_date(ptr); break; case _PAR_CHAR_: - //if (strlen(ptr) > NPOW_10-1){ - // printf("cannot copy, string too long.\n"); exit(1); - //} else { - // strncpy(params->par[i].char_vec_[0][n], ptr, strlen(ptr)); - // params->par[i].char_vec_[0][n][strlen(ptr)] = '\0'; - //} copy_string(params->par[i].char_vec_[0][n], NPOW_10, ptr); break; default: From 0c030f7b6fa48797e2f7dd04e0743572994032dd Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 22 Aug 2020 14:41:43 +0200 Subject: [PATCH 30/78] started working on aux tool to build layerstacks --- src/aux-level/_stack.c | 106 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100755 src/aux-level/_stack.c diff --git a/src/aux-level/_stack.c b/src/aux-level/_stack.c new file mode 100755 index 00000000..5a0efff2 --- /dev/null +++ b/src/aux-level/_stack.c @@ -0,0 +1,106 @@ +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +This file is part of FORCE - Framework for Operational Radiometric +Correction for Environmental monitoring. + +Copyright (C) 2013-2020 David Frantz + +FORCE is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +FORCE is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with FORCE. If not, see . + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +/**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +This program stacks images ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ + +#include // core input and output functions +#include // standard general utilities library + +#include "../cross-level/const-cl.h" +#include "../cross-level/konami-cl.h" + + +/** Geospatial Data Abstraction Library (GDAL) **/ +#include "gdal.h" // public (C callable) GDAL entry points +#include "cpl_conv.h" // various convenience functions for CPL +#include "cpl_string.h" // various convenience functions for strings + + +int main ( int argc, char *argv[] ){ +int b, nb; +GDALDatasetH src, dst; +GDALRasterBandH bsrc, bdst; +char fsrc[NPOW_10], fdst[NPOW_10]; +char **meta = NULL; +char **bmeta = NULL; +const char *bname = NULL; + + + if (argc >= 2) check_arg(argv[1]); + if (argc < 2){ printf("Usage: %s file1 file2 file3 etc.\n\n", argv[0]); return FAILURE;} + + + if (strlen(argv[1]) > NPOW_10-1){ + printf("cannot copy, string too long.\n"); return FAILURE; + } else { strncpy(fsrc, argv[1], strlen(argv[1])); fsrc[strlen(argv[1])] = '\0';} + if (strlen(argv[2]) > NPOW_10-1){ + printf("cannot copy, string too long.\n"); return FAILURE; + } else { strncpy(fdst, argv[2], strlen(argv[2])); fdst[strlen(argv[2])] = '\0';} + + + GDALAllRegister(); + + if ((src = GDALOpenEx(fsrc, GDAL_OF_READONLY, NULL, NULL, NULL)) == NULL){ + printf("unable to open %s\n\n", fsrc); return FAILURE;} + + if ((dst = GDALOpenEx(fdst, GDAL_OF_UPDATE, NULL, NULL, NULL)) == NULL){ + printf("unable to open %s\n\n", fdst); return FAILURE;} + + if ((nb = GDALGetRasterCount(src)) != GDALGetRasterCount(dst)){ + printf("src and dst images have different number of bands\n\n"); + return FAILURE;} + + + // copy FORCE domain + meta = GDALGetMetadata(src, "FORCE"); + //printf("Number of metadata items: %d\n", CSLCount(meta)); + //CSLPrint(meta, NULL); + //CSLDestroy(meta); + GDALSetMetadata(dst, meta, "FORCE"); + + for (b=0; b Date: Sun, 23 Aug 2020 14:56:24 +0200 Subject: [PATCH 31/78] worked on stacking implementation --- Makefile | 7 +- src/aux-level/_stack.c | 218 +++++++++++++++++++++++++++++++++++------ 2 files changed, 191 insertions(+), 34 deletions(-) diff --git a/Makefile b/Makefile index 7aafab5a..73c6c2f6 100755 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ GPP=g++ G11=g++ -std=c++11 CFLAGS=-O3 -Wall -fopenmp -#CFLAGS=-g -Wall -fopenmp +CFLAGS=-g -Wall -fopenmp ### DIRECTORIES @@ -76,7 +76,7 @@ cross: enum_cl cite_cl utils_cl alloc_cl stack_cl imagefuns_cl param_cl date_cl lower: table_ll param_ll meta_ll cube_ll equi7_ll glance7_ll atc_ll sunview_ll read_ll radtran_ll topo_ll cloud_ll gas_ll brdf_ll atmo_ll aod_ll resmerge_ll coreg_ll coregfuns_ll acix_ll modwvp_ll higher: param_hl progress_hl tasks_hl read-aux_hl read-ard_hl quality_hl bap_hl level3_hl cso_hl tsa_hl index_hl interpolate_hl stm_hl fold_hl standardize_hl pheno_hl trend_hl ml_hl texture_hl lsm_hl lib_hl sample_hl imp_hl cfimp_hl l2imp_hl aux: param_aux param_train_aux train_aux -exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis force-mdcp +exe: force force-parameter force-qai-inflate force-tile-finder force-tabulate-grid force-l2ps force-higher-level force-train force-lut-modis force-mdcp force-stack .PHONY: temp all install install_ bash python clean build @@ -350,6 +350,9 @@ force-lut-modis: temp cross lower $(DL)/_lut-modis.c force-mdcp: temp cross $(DA)/_md_copy.c $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) -o $(TB)/force-mdcp $(DA)/_md_copy.c $(TC)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) +force-stack: temp cross $(DA)/_stack.c + $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) -o $(TB)/force-stack $(DA)/_stack.c $(TC)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) + ### dummy code for testing stuff dummy: temp cross aux src/dummy.c diff --git a/src/aux-level/_stack.c b/src/aux-level/_stack.c index 5a0efff2..1bfa4b9f 100755 --- a/src/aux-level/_stack.c +++ b/src/aux-level/_stack.c @@ -29,6 +29,8 @@ This program stacks images #include "../cross-level/const-cl.h" #include "../cross-level/konami-cl.h" +#include "../cross-level/alloc-cl.h" +#include "../cross-level/dir-cl.h" /** Geospatial Data Abstraction Library (GDAL) **/ @@ -37,69 +39,221 @@ This program stacks images #include "cpl_string.h" // various convenience functions for strings +typedef struct { + char fname[NPOW_10]; // file name + char dname[NPOW_10]; // directory name + char bname[NPOW_10]; // base name + int nx, ny, nb; // dimensions + char proj[NPOW_10]; // projection + double geotran[6]; // geotransformation + int b; // band +} img_t; + + int main ( int argc, char *argv[] ){ +int f, nf; int b, nb; -GDALDatasetH src, dst; -GDALRasterBandH bsrc, bdst; -char fsrc[NPOW_10], fdst[NPOW_10]; +int k; +int nx, ny; +int nodata; +GDALDriverH driver = NULL; +GDALDatasetH src = NULL; +GDALDatasetH dst = NULL; +GDALRasterBandH bsrc = NULL; +GDALRasterBandH bdst = NULL; + +img_t *inp = NULL; +img_t *out = NULL; + + + +char *f_dst = NULL; +char d_out[NPOW_10]; + char **meta = NULL; char **bmeta = NULL; const char *bname = NULL; +const char *proj_ = NULL; + + +char source[NPOW_16]; + + +int interleave; +enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; + if (argc >= 2) check_arg(argv[1]); - if (argc < 2){ printf("Usage: %s file1 file2 file3 etc.\n\n", argv[0]); return FAILURE;} + if (argc < 3){ printf("Usage: %s file1 file2 [etc] outfile.\n", argv[0]); + printf(" At least two input files need to be given\n\n"); + return FAILURE;} + + // number of input files + nf = argc-2; + // output name + f_dst = argv[argc-1]; + directoryname(f_dst, d_out, NPOW_10); + + if (fileexist(f_dst)){ + printf("Output file already exists: %s\n", f_dst); + printf("Delete or user another filename\n\n"); + return FAILURE;} + + chdir(d_out); - if (strlen(argv[1]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(fsrc, argv[1], strlen(argv[1])); fsrc[strlen(argv[1])] = '\0';} - if (strlen(argv[2]) > NPOW_10-1){ - printf("cannot copy, string too long.\n"); return FAILURE; - } else { strncpy(fdst, argv[2], strlen(argv[2])); fdst[strlen(argv[2])] = '\0';} GDALAllRegister(); - if ((src = GDALOpenEx(fsrc, GDAL_OF_READONLY, NULL, NULL, NULL)) == NULL){ - printf("unable to open %s\n\n", fsrc); return FAILURE;} + alloc((void**)&inp, nf, sizeof(img_t)); + + // check if we stack file after file, or band after band + for (f=0, nb=0, interleave=_BYBAND_; f NPOW_10-1){ + printf("cannot copy, string too long\n\n"); return FAILURE; + } else { + strncpy(inp[f].fname, argv[f+1], strlen(argv[f+1])); + inp[f].fname[strlen(argv[f+1])] = '\0'; + } + directoryname(inp[f].fname, inp[f].dname, NPOW_10); + basename_with_ext(inp[f].fname, inp[f].bname, NPOW_10); + + if ((src = GDALOpenEx(inp[f].fname, GDAL_OF_READONLY, NULL, NULL, NULL)) == NULL){ + printf("Unable to open %s\n\n", inp[f].fname); return FAILURE;} + + nx = (inp[f].nx = GDALGetRasterXSize(src)); + ny = (inp[f].ny = GDALGetRasterYSize(src)); + nb += (inp[f].nb = GDALGetRasterCount(src)); + GDALGetGeoTransform(src, inp[f].geotran); + + proj_ = GDALGetProjectionRef(src); + if (strlen(proj_) > NPOW_10-1){ + printf("cannot copy, string too long\n\n"); return FAILURE; + } else { + strncpy(inp[f].proj, proj_, strlen(proj_)); + inp[f].proj[strlen(proj_)] = '\0'; + } + + GDALClose(src); + + + printf("file %d:\n", f+1); + printf(" %s\n", inp[f].dname); + printf(" %s\n", inp[f].bname); + printf(" %d %d %d\n", inp[f].nx, inp[f].ny, inp[f].nb); + + + // tests for consistency and interleave type + if (f > 0){ + if (strcmp(inp[f].dname, inp[0].dname) != 0){ + printf("Directories are different. This is not allowed.\n"); + printf("Dir 1: %s\nDir %d: %s\n\n", inp[0].dname, f+1, inp[f].dname); + return FAILURE;} + if (inp[f].nx != inp[0].nx){ + printf("Number of columns are different. This is not allowed.\n"); + printf("File 1: %d\nFile %d: %d\n\n", inp[0].nx, f+1, inp[f].nx); + return FAILURE;} + if (inp[f].ny != inp[0].ny){ + printf("Number of rows are different. This is not allowed.\n"); + printf("File 1: %d\nFile %d: %d\n\n", inp[0].ny, f+1, inp[f].ny); + return FAILURE;} + if (inp[f].nb != inp[0].nb) interleave = _BYFILE_; + } - if ((dst = GDALOpenEx(fdst, GDAL_OF_UPDATE, NULL, NULL, NULL)) == NULL){ - printf("unable to open %s\n\n", fdst); return FAILURE;} + } - if ((nb = GDALGetRasterCount(src)) != GDALGetRasterCount(dst)){ - printf("src and dst images have different number of bands\n\n"); - return FAILURE;} + if (strcmp(d_out, inp[0].dname) != 0){ + printf("Directories are different. This is not allowed.\n"); + printf("Dir input: %s\nDir output: %s\n\n", inp[0].dname, d_out); return FAILURE;} + + + + alloc((void**)&out, nb, sizeof(img_t)); + + switch (interleave){ + case _BYFILE_: + printf("\nDifferent number of bands detected. Stacking by file.\n\n"); + for (f=0, k=0; f" + " %s" + " %d" + " " + " " + " %d" + "", + out[b].bname, out[b].b, out[b].nx, out[b].ny, out[b].nx, out[b].ny, nodata); + + GDALSetMetadataItem(bdst, "source_0", source, "new_vrt_sources"); + GDALSetMetadata(bdst, bmeta, "FORCE"); GDALSetDescription(bdst, bname); + GDALClose(src); + } - GDALClose(src); GDALClose(dst); + free((void*)inp); + free((void*)out); return SUCCESS; } From 3250c508ba882b2733e40435ba1dda2ba2536847 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sun, 23 Aug 2020 19:49:20 +0200 Subject: [PATCH 32/78] changed domain in tsa metadata --- src/higher-level/tsa-hl.c | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index 5d076876..bc10070a 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -219,12 +219,12 @@ short ***ptr[98] = { nchar = snprintf(domain, NPOW_10, "%s_%s", fdate, sensor); if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling domain\n"); error++;} - set_stack_domain(TSA[o], t, domain); + //set_stack_domain(TSA[o], t, domain); set_stack_bandname(TSA[o], t, domain); break; case _stats_: set_stack_sensor(TSA[o], t, "BLEND"); - set_stack_domain(TSA[o], t, _TAGGED_ENUM_STA_[phl->tsa.stm.sta.metrics[t]].tag); + //set_stack_domain(TSA[o], t, _TAGGED_ENUM_STA_[phl->tsa.stm.sta.metrics[t]].tag); set_stack_bandname(TSA[o], t, _TAGGED_ENUM_STA_[phl->tsa.stm.sta.metrics[t]].tag); break; case _inter_: @@ -238,7 +238,7 @@ short ***ptr[98] = { compact_date(date.year, date.month, date.day, fdate, NPOW_10); set_stack_wavelength(TSA[o], t, date.year + (date.doy-1)/365.0); set_stack_unit(TSA[o], t, "decimal year"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); break; case _year_: @@ -250,7 +250,7 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, date.year); set_stack_unit(TSA[o], t, "year"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); break; case _quarter_: @@ -263,7 +263,7 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, k); set_stack_unit(TSA[o], t, "quarter"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); k++; break; @@ -277,7 +277,7 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, k); set_stack_unit(TSA[o], t, "month"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); k++; break; @@ -291,7 +291,7 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, k); set_stack_unit(TSA[o], t, "week"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); k++; break; @@ -305,7 +305,7 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, k); set_stack_unit(TSA[o], t, "day of year"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); k++; break; @@ -318,17 +318,17 @@ short ***ptr[98] = { printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, date.year); set_stack_unit(TSA[o], t, "year"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); break; case _trd_: set_stack_sensor(TSA[o], t, "BLEND"); - set_stack_domain(TSA[o], t, _TAGGED_ENUM_TRD_[t].tag); + //set_stack_domain(TSA[o], t, _TAGGED_ENUM_TRD_[t].tag); set_stack_bandname(TSA[o], t, _TAGGED_ENUM_TRD_[t].tag); break; case _cat_: set_stack_sensor(TSA[o], t, "BLEND"); - set_stack_domain(TSA[o], t, _TAGGED_ENUM_CAT_[t].tag); + //set_stack_domain(TSA[o], t, _TAGGED_ENUM_CAT_[t].tag); set_stack_bandname(TSA[o], t, _TAGGED_ENUM_CAT_[t].tag); break; default: @@ -384,6 +384,7 @@ stack_t *stack = NULL; date_t date; char fname[NPOW_10]; char dname[NPOW_10]; +char domain[NPOW_10]; int nchar; @@ -416,10 +417,12 @@ int nchar; set_stack_explode(stack, phl->explode); set_stack_par(stack, phl->params->log); + sprintf(domain, "%s_%s", phl->tsa.index_name[idx], prodname); + for (b=0; btsa.index_name[idx]); + set_stack_domain(stack, b, domain); } return stack; From cbbb8a68c6d4ad1d16996523d9d1444e0e0ae283 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sun, 23 Aug 2020 20:21:55 +0200 Subject: [PATCH 33/78] worked on stacking implementation --- Makefile | 2 +- src/aux-level/_stack.c | 21 ++++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/Makefile b/Makefile index 73c6c2f6..e8be533b 100755 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ GPP=g++ G11=g++ -std=c++11 CFLAGS=-O3 -Wall -fopenmp -CFLAGS=-g -Wall -fopenmp +#CFLAGS=-g -Wall -fopenmp ### DIRECTORIES diff --git a/src/aux-level/_stack.c b/src/aux-level/_stack.c index 1bfa4b9f..1ddf98a1 100755 --- a/src/aux-level/_stack.c +++ b/src/aux-level/_stack.c @@ -69,6 +69,7 @@ img_t *out = NULL; char *f_dst = NULL; char d_out[NPOW_10]; +char e_out[NPOW_10]; char **meta = NULL; char **bmeta = NULL; @@ -95,14 +96,20 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; // output name f_dst = argv[argc-1]; directoryname(f_dst, d_out, NPOW_10); + extension(f_dst, e_out, NPOW_10); + + if (strcmp(e_out, "VRT") != 0){ + printf("Output file must have .vrt extension\n\n"); + return FAILURE;} if (fileexist(f_dst)){ printf("Output file already exists: %s\n", f_dst); printf("Delete or user another filename\n\n"); return FAILURE;} - chdir(d_out); - + if (chdir(d_out) != 0){ + printf("Couldn't change to output directory\n\n"); + return FAILURE;} GDALAllRegister(); @@ -173,6 +180,7 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; alloc((void**)&out, nb, sizeof(img_t)); + // choose interleave type, and build band order switch (interleave){ case _BYFILE_: printf("\nDifferent number of bands detected. Stacking by file.\n\n"); @@ -200,12 +208,14 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; } + // create file with VRT driver if ((driver = GDALGetDriverByName("VRT")) == NULL){ printf("Error getting VRT driver.\n\n"); return FAILURE;} if ((dst = GDALCreate(driver, f_dst, nx, ny, 0, GDT_Int16, NULL)) == NULL){ printf("Error creating file %s\n\n", f_dst); return FAILURE;} + // copy file-level metadata if ((src = GDALOpenEx(inp[0].fname, GDAL_OF_READONLY, NULL, NULL, NULL)) == NULL){ printf("Unable to open %s\n\n", inp[0].fname); return FAILURE;} @@ -219,19 +229,19 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; // add the bands to vrt for (b=0; b" " %s" @@ -242,6 +252,7 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; "", out[b].bname, out[b].b, out[b].nx, out[b].ny, out[b].nx, out[b].ny, nodata); + // update source and metadata GDALSetMetadataItem(bdst, "source_0", source, "new_vrt_sources"); GDALSetMetadata(bdst, bmeta, "FORCE"); GDALSetDescription(bdst, bname); From c180a00c6f1934c4b66a147eb52583c36f73be62 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 24 Aug 2020 08:53:57 +0200 Subject: [PATCH 34/78] adding docs --- docs/source/refs-applied.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/source/refs-applied.rst b/docs/source/refs-applied.rst index f50b0c48..c2001625 100755 --- a/docs/source/refs-applied.rst +++ b/docs/source/refs-applied.rst @@ -8,6 +8,9 @@ This list summarizes all scientific publications that have used FORCE. If you ha * **2020** + | H. Bendini, L. Fonseca, M. Schwieder, P. Rufin, T. Korting, A. Koumrouyan, and P. Hostert(2020): Combining Environmental and Landsat Analysis Ready Data for Vegetation Mapping: A Case Study in the Brazilian Savanna Biome. The International Archives of the Photogrammetry, Remote Sensing and Spatial Information Sciences, Volume XLIII-B3-2020. + | https://doi.org/10.5194/isprs-archives-XLIII-B3-2020-953-2020 + | B. Jakimow, S. van der Linden, F. Thiel, D. Frantz, and Patrick Hostert (2020): Visualizing and labeling dense multi-sensor earth observation time series: The EO Time Series Viewer. Environmental Modelling & Software 125, 104631. | https://doi.org/10.1016/j.envsoft.2020.104631 From 8638a5544477c2d7581f1b84579d0eca741e2e03 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 24 Aug 2020 14:09:57 +0200 Subject: [PATCH 35/78] adding docs --- docs/source/history/v3.rst | 2 +- docs/source/history/vdev.rst | 45 ++++++++++++++++++++++++++++++++++-- 2 files changed, 44 insertions(+), 3 deletions(-) diff --git a/docs/source/history/v3.rst b/docs/source/history/v3.rst index 83555aa2..9c6e0c95 100755 --- a/docs/source/history/v3.rst +++ b/docs/source/history/v3.rst @@ -6,7 +6,7 @@ Version 3 FORCE v. 3.4.0 -------------- -Release: 19.03.2020 +Release: 03.08.2020 * **General changes** diff --git a/docs/source/history/vdev.rst b/docs/source/history/vdev.rst index 78483aba..a491ffd7 100755 --- a/docs/source/history/vdev.rst +++ b/docs/source/history/vdev.rst @@ -8,5 +8,46 @@ FORCE-dev Master release: TBA -No dev changes yet. -Develop and Master are synchronized. +* **General changes** + + Fixed a small bug that prevented program execution when the parameterfile was too long. + +* **FORCE HIGHER LEVEL** + + * in force-higher-level, Level 3 sub-module: + + Included a safety query, which ensures that at least one score should be > 0. + Setting all scores to 0 resulted in some crashes. + Thanks to Jonas Ardö to report on this. + + * in force-higher-level, TSA sub-module: + + The domain tag of the bandwise FORCE metadata domain was updated to be interoperable with a 4D data model (see force-stack below). + +* **FORCE AUX** + + * in force-mosaic: + + force-mosaic now copies all metadata to the generated mosaics. + For this, the new aux tool force-mdcp is used + + * new program force-mdcp: + + This new helper tool copies metadata from one file to another. + This included the FORCE metadata domains. + This program is now used by force-mosaic to carry the metadata to the generated mosaics. + + * new program force-stack: + + This new tool stacks files in VRT format. + This works with physical images (e.g. GeoTiffs), as well as VRT files (as e.g. generated by force-mosaic). + A variable number of input files can be stacked, wildcards are supported. + Most basically, this is e.g. useful to look at RGB combinations in QGIS, where RGB visualizations cannot be used when the channels are not in the same file. + If the number of bands in the input files is different, the images are stacked after another, e.g. file 1 band 1, file 1 band 2, file 2 band 1. + If the number of bands in the input files is the same, the images are stacked with band interleave, e.g. file 1 band 1, file 2 band 1, file 1 band 2, file 2 band 2. + The second option implements a 4D data model (think of time series), which is consistent with the QGIS plugins Raster Time Series Manager and Raster Data Plotting (C) Andreas Rabe. + A tutorial is in development to illustrate the interoperable use between FORCE and said QGIS plugins. + + + + From 02e75655dd29eb151f43a3c1ea5011e9be09b4c0 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 25 Aug 2020 09:45:32 +0200 Subject: [PATCH 36/78] added new band to tren/cat for computing relative change. Makes most sense when input has a physical meaning like fractional cover. Does not make sense at all when offset is negative! --- src/cross-level/enum-cl.c | 36 ++++++++++++----------- src/cross-level/enum-cl.h | 41 ++++++++++++++------------ src/higher-level/trend-hl.c | 58 ++++++++++++++++++++----------------- 3 files changed, 73 insertions(+), 62 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index b5e91664..7d60a636 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -121,25 +121,27 @@ const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { const tagged_enum_t _TAGGED_ENUM_TRD_[_TRD_LENGTH_] = { { _TRD_MEAN_, "MEAN"}, { _TRD_OFFSET_, "OFFSET"}, { _TRD_SLOPE_, "SLOPE"}, - { _TRD_RSQ_, "RSQ"}, { _TRD_SIG_, "SIG"}, { _TRD_RMSE_, "RMSE"}, - { _TRD_MAE_, "MAE"}, { _TRD_MAXE_, "MAXRES"}, { _TRD_NUM_, "NUM"}}; + { _TRD_GAIN_, "GAIN"}, { _TRD_RSQ_, "RSQ"}, { _TRD_SIG_, "SIG"}, + { _TRD_RMSE_, "RMSE"}, { _TRD_MAE_, "MAE"}, { _TRD_MAXE_, "MAXRES"}, + { _TRD_NUM_, "NUM"}}; const tagged_enum_t _TAGGED_ENUM_CAT_[_CAT_LENGTH_] = { - { _CAT_CHANGE_, "CHANGE"}, { _CAT_YEAR_, "YEAR-OF-CHANGE"}, - { _CAT_TOTAL_MEAN_, "TOTAL-MEAN"}, { _CAT_TOTAL_OFFSET_, "TOTAL-OFFSET"}, - { _CAT_TOTAL_SLOPE_, "TOTAL-SLOPE"}, { _CAT_TOTAL_RSQ_, "TOTAL-RSQ"}, - { _CAT_TOTAL_SIG_, "TOTAL-SIG"}, { _CAT_TOTAL_RMSE_, "TOTAL-RMSE"}, - { _CAT_TOTAL_MAE_, "TOTAL-MAE"}, { _CAT_TOTAL_MAXE_, "TOTAL-MAXRES"}, - { _CAT_TOTAL_NUM_, "TOTAL-NUM"}, { _CAT_BEFORE_MEAN_, "BEFORE-MEAN"}, - { _CAT_BEFORE_OFFSET_, "BEFORE-OFFSET"}, { _CAT_BEFORE_SLOPE_, "BEFORE-SLOPE"}, - { _CAT_BEFORE_RSQ_, "BEFORE-RSQ"}, { _CAT_BEFORE_SIG_, "BEFORE-SIG"}, - { _CAT_BEFORE_RMSE_, "BEFORE-RMSE"}, { _CAT_BEFORE_MAE_, "BEFORE-MAE"}, - { _CAT_BEFORE_MAXE_, "BEFORE-MAXRES"}, { _CAT_BEFORE_NUM_, "BEFORE-NUM"}, - { _CAT_AFTER_MEAN_, "AFTER-MEAN"}, { _CAT_AFTER_OFFSET_, "AFTER-OFFSET"}, - { _CAT_AFTER_SLOPE_, "AFTER-SLOPE"}, { _CAT_AFTER_RSQ_, "AFTER-RSQ"}, - { _CAT_AFTER_SIG_, "AFTER-SIG"}, { _CAT_AFTER_RMSE_, "AFTER-RMSE"}, - { _CAT_AFTER_MAE_, "AFTER-MAE"}, { _CAT_AFTER_MAXE_, "AFTER-MAXRES"}, - { _CAT_AFTER_NUM_, "AFTER-NUM"}}; + { _CAT_CHANGE_, "CHANGE"}, { _CAT_YEAR_, "YEAR-OF-CHANGE"}, + { _CAT_TOTAL_MEAN_, "TOTAL-MEAN"}, { _CAT_TOTAL_OFFSET_, "TOTAL-OFFSET"}, + { _CAT_TOTAL_SLOPE_, "TOTAL-SLOPE"}, { _CAT_TOTAL_GAIN_, "TOTAL-GAIN"}, + { _CAT_TOTAL_RSQ_, "TOTAL-RSQ"}, { _CAT_TOTAL_SIG_, "TOTAL-SIG"}, + { _CAT_TOTAL_RMSE_, "TOTAL-RMSE"}, { _CAT_TOTAL_MAE_, "TOTAL-MAE"}, + { _CAT_TOTAL_MAXE_, "TOTAL-MAXRES"}, { _CAT_TOTAL_NUM_, "TOTAL-NUM"}, + { _CAT_BEFORE_MEAN_, "BEFORE-MEAN"}, { _CAT_BEFORE_OFFSET_, "BEFORE-OFFSET"}, + { _CAT_BEFORE_SLOPE_, "BEFORE-SLOPE"}, { _CAT_BEFORE_GAIN_, "BEFORE-GAIN"}, + { _CAT_BEFORE_RSQ_, "BEFORE-RSQ"}, { _CAT_BEFORE_SIG_, "BEFORE-SIG"}, + { _CAT_BEFORE_RMSE_, "BEFORE-RMSE"}, { _CAT_BEFORE_MAE_, "BEFORE-MAE"}, + { _CAT_BEFORE_MAXE_, "BEFORE-MAXRES"}, { _CAT_BEFORE_NUM_, "BEFORE-NUM"}, + { _CAT_AFTER_MEAN_, "AFTER-MEAN"}, { _CAT_AFTER_OFFSET_, "AFTER-OFFSET"}, + { _CAT_AFTER_SLOPE_, "AFTER-SLOPE"}, { _CAT_AFTER_GAIN_, "AFTER-GAIN"}, + { _CAT_AFTER_RSQ_, "AFTER-RSQ"}, { _CAT_AFTER_SIG_, "AFTER-SIG"}, + { _CAT_AFTER_RMSE_, "AFTER-RMSE"}, { _CAT_AFTER_MAE_, "AFTER-MAE"}, + { _CAT_AFTER_MAXE_, "AFTER-MAXRES"}, { _CAT_AFTER_NUM_, "AFTER-NUM"}}; const tagged_enum_t _TAGGED_ENUM_STD_[_STD_LENGTH_] = { { _STD_NONE_, "NONE" }, {_STD_NORMAL_, "NORMALIZE"}, {_STD_CENTER_, "CENTER" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index e2c88e16..962ad335 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -197,27 +197,30 @@ enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGT enum { _PART_TOTAL_, _PART_BEFORE_, _PART_AFTER_, _PART_LENGTH_ }; // trend -enum { _TRD_MEAN_, _TRD_OFFSET_, _TRD_SLOPE_, - _TRD_RSQ_, _TRD_SIG_, _TRD_RMSE_, - _TRD_MAE_, _TRD_MAXE_, _TRD_NUM_, _TRD_LENGTH_ }; +enum { _TRD_MEAN_, _TRD_OFFSET_, _TRD_SLOPE_, + _TRD_GAIN_, _TRD_RSQ_, _TRD_SIG_, + _TRD_RMSE_, _TRD_MAE_, _TRD_MAXE_, + _TRD_NUM_, _TRD_LENGTH_ }; // change, aftereffect, trend -enum { _CAT_CHANGE_, _CAT_YEAR_, - _CAT_TOTAL_MEAN_, _CAT_TOTAL_OFFSET_, - _CAT_TOTAL_SLOPE_, _CAT_TOTAL_RSQ_, - _CAT_TOTAL_SIG_, _CAT_TOTAL_RMSE_, - _CAT_TOTAL_MAE_, _CAT_TOTAL_MAXE_, - _CAT_TOTAL_NUM_, _CAT_BEFORE_MEAN_, - _CAT_BEFORE_OFFSET_, _CAT_BEFORE_SLOPE_, - _CAT_BEFORE_RSQ_, _CAT_BEFORE_SIG_, - _CAT_BEFORE_RMSE_, _CAT_BEFORE_MAE_, - _CAT_BEFORE_MAXE_, _CAT_BEFORE_NUM_, - _CAT_AFTER_MEAN_, _CAT_AFTER_OFFSET_, - _CAT_AFTER_SLOPE_, _CAT_AFTER_RSQ_, - _CAT_AFTER_SIG_, _CAT_AFTER_RMSE_, - _CAT_AFTER_MAE_, _CAT_AFTER_MAXE_, - _CAT_AFTER_NUM_, _CAT_LENGTH_ }; - +enum { _CAT_CHANGE_, _CAT_YEAR_, + _CAT_TOTAL_MEAN_, _CAT_TOTAL_OFFSET_, + _CAT_TOTAL_SLOPE_, _CAT_TOTAL_GAIN_, + _CAT_TOTAL_RSQ_, _CAT_TOTAL_SIG_, + _CAT_TOTAL_RMSE_, _CAT_TOTAL_MAE_, + _CAT_TOTAL_MAXE_, _CAT_TOTAL_NUM_, + _CAT_BEFORE_MEAN_, _CAT_BEFORE_OFFSET_, + _CAT_BEFORE_SLOPE_, _CAT_BEFORE_GAIN_, + _CAT_BEFORE_RSQ_, _CAT_BEFORE_SIG_, + _CAT_BEFORE_RMSE_, _CAT_BEFORE_MAE_, + _CAT_BEFORE_MAXE_, _CAT_BEFORE_NUM_, + _CAT_AFTER_MEAN_, _CAT_AFTER_OFFSET_, + _CAT_AFTER_SLOPE_, _CAT_AFTER_GAIN_, + _CAT_AFTER_RSQ_, _CAT_AFTER_SIG_, + _CAT_AFTER_RMSE_, _CAT_AFTER_MAE_, + _CAT_AFTER_MAXE_, _CAT_AFTER_NUM_, + _CAT_LENGTH_ }; + // texture metrics enum { _TXT_ERO_, _TXT_DIL_, _TXT_OPN_, _TXT_CLS_, _TXT_GRD_, _TXT_THT_, _TXT_BHT_, _TXT_LENGTH_ }; diff --git a/src/higher-level/trend-hl.c b/src/higher-level/trend-hl.c index 4d95f58b..bc59a047 100755 --- a/src/higher-level/trend-hl.c +++ b/src/higher-level/trend-hl.c @@ -1,6 +1,6 @@ /**+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -This file is part of FORCE - Framework for Operational Radiometric +This file is part of FORCE - Framework for Operational Radiometric Correction for Environmental monitoring. Copyright (C) 2013-2020 David Frantz @@ -33,7 +33,7 @@ int cat(short **fld_, date_t *d_fld, small *mask_, int nc, int nf, short **cat_, /** This function computes a trend analysis for any time series. Currently -+++ implemented trend parameters are mean, intercept, slope, R-squared, ++++ implemented trend parameters are mean, intercept, slope, R-squared, +++ significance of slope, RMSE, MAE, max. absolute residual and # of obs. --- fld_: folded image array --- d_fld: dates of folded time series @@ -57,7 +57,7 @@ double mae, rmse; if (trd_ == NULL) return CANCEL; - + #pragma omp parallel private(b,f,x,mx,my,vx,vy,cv,k,ssqe,sae,sxsq,maxe,seb,mae,rmse,off,slp,rsq,yhat,e,sig) shared(mask_,fld_,d_fld,trd_,nc,nf,by,nodata,in_ce,trd) default(none) { @@ -79,7 +79,7 @@ double mae, rmse; for (f=0; ffbw_, ts->d_fbw, mask_, nc, phl->nw, ts->trw_, nodata, _FLD_WEEK_, in_ce, &phl->tsa.trd); trend(ts->fbd_, ts->d_fbd, mask_, nc, phl->nd, ts->trd_, nodata, _FLD_DOY_, in_ce, &phl->tsa.trd); - + if (phl->tsa.lsp.otrd){ for (l=0; lfbw_, ts->d_fbw, mask_, nc, phl->nw, ts->caw_, nodata, _FLD_WEEK_, in_ce, &phl->tsa.trd); cat(ts->fbd_, ts->d_fbd, mask_, nc, phl->nd, ts->cad_, nodata, _FLD_DOY_, in_ce, &phl->tsa.trd); - + if (phl->tsa.lsp.ocat){ for (l=0; l Date: Wed, 26 Aug 2020 09:08:52 +0200 Subject: [PATCH 37/78] adding docs --- docs/source/history/vdev.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/source/history/vdev.rst b/docs/source/history/vdev.rst index a491ffd7..85804b9e 100755 --- a/docs/source/history/vdev.rst +++ b/docs/source/history/vdev.rst @@ -24,6 +24,12 @@ Master release: TBA The domain tag of the bandwise FORCE metadata domain was updated to be interoperable with a 4D data model (see force-stack below). + * in force-higher-level, TSA sub-module, trend and CAT analyses: + + Added a new band to for computing relative change, i.e. gain/loss relative to initial value: (slope*timesteps)/offset. + This makes most sense when the index has a physical meaning like fractional cover. + This does not make sense at all when offset is negative! Take care. + * **FORCE AUX** * in force-mosaic: From 9e3fcc24d4a5491f8cea2d7a5bacd53c50f4df38 Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Wed, 26 Aug 2020 10:31:24 +0200 Subject: [PATCH 38/78] Rewrite of GCS download --- bash/force-level1-esa.sh | 1 - bash/force-level1-gcs.sh | 581 +++++++++++++++++++++++++-------------- 2 files changed, 378 insertions(+), 204 deletions(-) diff --git a/bash/force-level1-esa.sh b/bash/force-level1-esa.sh index 42297a17..bc17a38c 100755 --- a/bash/force-level1-esa.sh +++ b/bash/force-level1-esa.sh @@ -69,7 +69,6 @@ HELP exit 1 } -echo $@ # check for optional args and set dryrun var case $1 in -d) diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-gcs.sh index 0f1b9d38..3704a8b9 100755 --- a/bash/force-level1-gcs.sh +++ b/bash/force-level1-gcs.sh @@ -1,23 +1,25 @@ +#!/bin/bash + ########################################################################## -# -# This file is part of FORCE - Framework for Operational Radiometric +# +# This file is part of FORCE - Framework for Operational Radiometric # Correction for Environmental monitoring. -# +# # Copyright (C) 2013-2020 David Frantz -# +# # FORCE is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. -# +# # FORCE is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License # along with FORCE. If not, see . -# +# ########################################################################## # Copyright (C) 2020 Stefan Ernst @@ -37,7 +39,7 @@ Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi Mandatory arguments: metadata-dir directory where the Landsat metadata (csv file) is stored - + level-1-datapool An existing directory, your files will be stored here @@ -55,14 +57,14 @@ Mandatory arguments: (3) Path/Row (Landsat): "PPPRRR,PPPRRR,PPPRRR" Make sure to keep leading zeros - correct: 181034, incorrect: 18134 Tile name (Sentinel-2): "34UEU,33UUU" - + type of area of interest 1 - coordinates as text 2 - shapefile 3 - PathRow as text - + sensor - Specify the sensor(s) to include. Separate with commas while retaining the + Specify the sensor(s) to include. Separate with commas while retaining the order below. Landsat and Sentinel-2 sensors can not be combined. Landsat Sentinel-2 LT05 - Landsat 5 TM S2A @@ -77,52 +79,89 @@ Mandatory arguments: The cloud cover range must be specified in % Optional arguments (always placed AFTER platform/mirr and BEFORE mandatory arguments): - -d dry + -d dry will trigger a dry run that will only return the number of images and their total data volume - + -u update will update the metadata catalogue (download and extract from GCS) only the metadata dir is required as argument when using this option - - -h|--help + + -h|--help show this help - + + -t|--tier + Landsat collection tier level. Valid tiers: T1,T2,RT + Default: T1 + HELP exit 1 } +# TODO# + +# TIER for CSV check - how to handle if T1, T2, NRT + + +which_satellite() { + SENSIN=$(echo $SENSIN | tr '[:lower:]' '[:upper:]') # convert sensor strings to upper case to prevent unnecessary headaches + for SENSOR in $(echo $SENSIN | sed 's/,/ /g'); do + case $SENSOR in + S2A|S2B) + SENTINEL=1 ;; + LT04|LT05|LE07|LC08) + LANDSAT=1 ;; + *) + printf "%s\n" "Error: invalid sensor(s) specified" "Valid sensors: S2A,S2B,LT04,LT05,LE07,LC08" "" + exit 1 + esac + done +} + update_meta() { echo "Updating metadata catalogue..." - gsutil -m cp gs://gcp-public-data-$GCSNAME/index.csv.gz $METADIR + gsutil -m cp gs://gcp-public-data-$1/index.csv.gz $METADIR gunzip $METADIR/index.csv.gz - mv $METADIR/index.csv $METADIR/metadata_$SATELLITE.csv + mv $METADIR/index.csv $METADIR/metadata_$2.csv } -# set variables for urls, file names, layer names, print, ... -case $PLATFORM in - s2) - GCSNAME="sentinel-2" - SATELLITE="sentinel2" - PRINTNAME="Sentinel-2" ;; - ls) - GCSNAME="landsat" - SATELLITE="landsat" - PRINTNAME="Landsat" ;; -esac +SENSIN="LT04,LT05,LE07,LC08,S2A,S2B" +DATEMIN="1970-01-01" +DATEMAX=$(date +%Y-%m-%d) +CCMIN=0 +CCMAX=100 +TIER="T1" +DRYRUN=0 +LANDSAT=0 +SENTINEL=0 +# set variables for urls, file names, layer names, print, ... while :; do case $1 in - -d) + -c|--cloudcover) + CCMIN=$(echo $2 | cut -d"," -f1) + CCMAX=$(echo $2 | cut -d"," -f2) + shift ;; + -d|--daterange) + DATEMIN=$(echo $2 | cut -d"," -f1) + DATEMAX=$(echo $2 | cut -d"," -f2) + shift ;; + -n|--no-act) DRYRUN=1 ;; - -h|-\?|--help) + -h|-\?|--help) show_help ;; - -u) + -s|--sensors) + SENSIN=$2 + shift ;; + -t|--tier) + TIER=$2 + shift ;; + -u) METADIR=$2 if [ $# -lt 2 ]; then echo "Metadata directory not specified, exiting" - exit 1 + exit 1 elif [ $# -gt 2 ]; then echo "Error: Please only specify the metadata directory when using the update option (-u)" exit 1 @@ -130,76 +169,156 @@ while :; do echo "Can not write to metadata directory, exiting" exit 1 else - update_meta - echo "Done. You can run this script without option -u to download data now." - exit + which_satellite + if [ $SENTINEL -eq 1 ]; then + update_meta sentinel-2 sentinel2 + fi + if [ $LANDSAT -eq 1 ]; then + update_meta landsat landsat + fi + echo "Done. You can run this script without option -u to download data now." + exit fi ;; - -?*) printf "%s\n" "" "Incorrect option specified" "" - show_help >&2 ;; - *) - break #no more options + -?*) + printf "%s\n" "" "Incorrect option specified" "" + show_help >&2 ;; + *) + break #no more options esac shift done -if [ $# -ne 10 ]; then - printf "%s\n" "" "Incorrect number of mandatory input arguments provided" +if [ $# -ne 4 ]; then + printf "%s\n" "" "Incorrect number of mandatory input arguments provided" "Expected: 4 Received: $#: $(echo "$@" | sed 's/ /,/g')" show_help fi +which_satellite + # ============================================================ # Check user input and set up variables METADIR=$1 POOL=$2 QUEUE=$3 AOI=$4 -AOITYPE=$5 -SENSIN=$6 -DATEMIN=$7 -DATEMAX=$8 -CCMIN=$9 -CCMAX=${10} - -echo $PLATFORM - -SENSIN=$(echo $SENSIN | tr '[:lower:]' '[:upper:]') # convert sensor strings to upper case to prevent unnecessary headaches -case $SENSIN in - S2A|S2A,S2B|S2B) - if [ $PLATFORM = "ls" ]; then - print "%s\n" "Error: Sentinel-2 sensor names for Landsat query received" - show_help - fi ;; - LT05|LT05,LE07|LT05,LC08|LT05,LE07,LC08|LE07|LE07,LC08|LC08) - if [ $PLATFORM = "s2" ]; then - printf "%s\n" "" "Error: Landsat sensor names for Sentinel-2 query received" - show_help - fi ;; - *) - printf "%s\n" "" "Error: invalid sensor or invalid combination of sensors speficied" - show_help ;; -esac - -if ! date -d $DATEMIN &> /dev/null; then - printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + +if [[ -z $METADIR || -z $POOL || -z $QUEUE || -z $AOI || -z $CCMIN || -z $CCMAX || -z $DATEMIN || -z $DATEMAX || -z $SENSIN || -z $TIER ]]; then + printf "%s\n" "Error: One or more variables are undefined, please check:" "Metadata directory: $METADIR" "Level-1 pool: $POOL" "Queue: $QUEUE" "AOI: $AOI" "Sensors: $SENSIN" "Start date: $DATEMIN, End date: $DATEMAX" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "Tier (Landsat only): $TIER" exit 1 - elif ! date -d $DATEMAX &> /dev/null; then - printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" +fi + + +for T in $(echo $TIER | sed 's/,/ /g'); do + case $T in + T1|T2|RT) + true ;; + *) + printf "%s\n" "Error: Invalid tier specified. Valid tiers: T1,T2,RT" "" + exit 1 ;; + esac +done + +if [ $(date -d $DATEMIN +%s) -ge $(date -d $DATEMAX +%s) ]; then + printf "%s\n" "Error: Start of date range is larger or equal to end of date range" "Start: $DATEMIN, End: $DATEMAX" "" exit 1 + elif ! date -d $DATEMIN &> /dev/null; then + printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 + elif ! date -d $DATEMAX &> /dev/null; then + printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 fi -# ============================================================ -# Check if metadata catalogue exists and is up to date -METACAT=$METADIR"/metadata_$SATELLITE.csv" -if ! [ -f $METACAT ]; then - echo $METACAT - printf "%s\n" "" "Metadata catalogue does not exist. Use the -u option to download / update the metadata catalogue" "" + + +# FAILS FOR FLOATING POINTS, BASH DOESN'T DO FLOAT COMPARISON + + + + +if [ $CCMIN -lt 0 ] || [ $CCMIN -gt 100 ] || [ $CCMAX -lt 0 ] || [ $CCMAX -gt 100 ]; then + printf "%s\n" "Error: Cloud cover minimum and maximum must be specified between 0 and 100" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "" exit 1 + elif [ $CCMIN -gt $CCMAX ]; then + printf "%s\n" "Error: Cloud cover minimum is larger than cloud cover maximum" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "" + exit 1 fi -METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) -if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then - printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the metadata catalogue" "Results may be incomplete, please consider updating the metadata catalogue using the -d option." +# type of area of interest +# 1 - coordinates as text +# 2 - shapefile +# 3 - PathRow as text +if [ -f $AOI ]; then + # check if AOI is GDAL readable file + if ogrinfo $AOI >& /dev/null; then + AOITYPE=2 + OGR=1 + else + # check if tile list / bounding box file contains whitespaces + if grep -q " " $AOI; then + printf "%s\n" "Error: whitespace in AOI definition detected." "Please make sure this file uses Linux style end of lines and does not contain whitespaces." "" + exit 1 + fi + AOI=$(cat $AOI | sed 's/,/./g') + OGR=0 + fi +else + # tile list / bounding box is command line input + AOI=$(echo $AOI | sed 's/,/ /g') + OGR=0 +fi + +isinrange() { + awk -v value="$1" -v lower="$2" -v upper="$3" 'BEGIN {print (lower <= value && value <= upper)}' +} +if [ $OGR -eq 0 ]; then + # check if AOI file contains bounding box coordinates and check if coords are valid lat/lon + if $(echo $AOI | grep -q "/"); then + AOITYPE=1 + for COORD in $AOI; do + LAT=$(echo COORD | cut -d"/" -f1) + LON=$(echo COORD | cut -d"/" -f2) + if ! grep -q "/" $COORD; then + printf "%s\n" "Error: At least one of the AOI coordinates does not seem to be in the format LAT/LON" "Coordinate: $COORD" "" + exit 1 + elif ! [ $(isinrange $LAT -90 90) -eq 1 ]; then + printf "%s\n" "Error: Latitude out of range" "Coordinate: $COORD - $LAT is not in range -90 to 90" "" + exit 1 + elif ! [ $(isinrange $LON -180 180) -eq 1 ]; then + printf "%s\n" "Error: Longitute out of range" "Coordinate: $COORD - $LON is not in range -180 to 180" "" + exit 1 + fi + done + # else, AOI file must be tile list - check if tiles are formatted correctly + else + AOITYPE=3 + for ENTRY in $AOI + do + if $(echo $ENTRY | grep -q -E "[0-2][0-9]{2}[0-2][0-9]{2}"); then + LSPATH="${ENTRY:0:3}" + LSROW="${ENTRY:3:6}" + if [ $(isinrange $LSPATH 1 233) -eq 0 ] || [ $(isinrange $LSPATH 1 248) -eq 0 ]; then + printf "%s\n" "Landsat PATH / ROW out of range. PATH not in range 1 to 233 or ROW not in range 1 to 248." "PATH / ROW received: $ENTRY" "" + exit 1 + fi + continue + elif $(echo $ENTRY | grep -q -E "T[0-6][0-9][A-Z]{3}"); then + if ! [ $(isinrange ${ENTRY:2:3} 1 60) ]; then + printf "%s\n" "MGRS tile number out of range. Valid range: 0 to 60, received: $ENTRY" "" + exit 1 + elif [[ -z "$(echo ${ENTRY:3:1} | grep -E "[C,D,E,F,G,H,J,K,L,M,N,P,Q,R,S,T,U,V,W,X]")" || -z "$(echo ${ENTRY:4:1} | grep -E "[A,B,C,D,E,F,G,H,K,L,M,N,P,Q,R,T,U,V,W,X,Y,Z]")" || -z "$(echo ${ENTRY:5:1} | grep -E "[A,B,C,D,E,F,G,H,J,K,L,M,N,P,Q,R,S,T,U,V]")" ]]; then + echo "$(echo ${ENTRY:5:1} | grep -E "[A,B,C,D,E,F,G,H,K,L,M,N,P,Q,R,T,U,V,W,X,Y,Z]")" + printf "%s\n" "Tile does not seem to be a valid Sentinel-2 tile: $ENTRY" "Please make sure all tiles exist." + exit 1 + fi + continue + else + printf "%s\n" "Tile list as AOI detected." "Error: One or more tiles seem to be formatted incorrectly." "Please check $ENTRY" "" + exit 1 + fi + done + fi fi @@ -213,142 +332,198 @@ if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then fi -if [ "$AOITYPE" -eq 1 ]; then - printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." - WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") - case $PLATFORM in - s2) TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" ;; - ls) TILES="_"$(echo $TILERAW | sed 's/PR, //; s/ /_|_/g')"_" ;; - esac - - -elif [ "$AOITYPE" -eq 2 ]; then - - if ! [ $(basename "$AOI" | cut -d"." -f 2-) == "shp" ]; then - printf "%s\n" "" "WARNING: AOI does not seem to be a shapefile. Other filetypes supported by GDAL should work, but are untested." - fi - printf "%s\n" "" "Searching for footprints / tiles intersecting with geometries of AOI shapefile..." - AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) - BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename="$SATELLITE"&bbox="$BBOX - - ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update - ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update - - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT $SATELLITE.Name FROM $SATELLITE, $AOINE WHERE ST_Intersects($SATELLITE.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) - TILES="_"$(echo $TILERAW | sed 's/Name, /T/; s/ /_|_T/g')"_" - rm merged.gpkg - -elif [ "$AOITYPE" -eq 3 ]; then - - TILERAW=$AOI - case $PLATFORM in - "s2") TILES="_T"$(echo $TILERAW | sed 's/,/_|_T/g')"_" ;; - "ls") TILES="_"$(echo $TILERAW | sed 's/,/_|_/g')"_" ;; +# ============================================================ +# Function get_data: +# 1. Prepare request +# 2. Query metadata catalogue +# 3. Download data +get_data() { + SATELLITE=$1 + PRINTNAME=$2 + case $SATELLITE in + landsat) SENSORS=$(echo $SENSIN | grep -o "L[C,E,T]0[4,5,7,8]") ;; + sentinel2) SENSORS=$(echo $SENSIN | grep -o "S2[A-B]") ;; esac -else - echo " Error: Please specify aoitype as 1 for coordinates of a polygon, " - echo " 2 for shapefile (point/polygon/line) or " - echo " 3 for comma-separated tile names " - exit -fi + # ============================================================ + # Check if metadata catalogue exists and is up to date + METACAT=$METADIR"/metadata_$SATELLITE.csv" + if ! [ -f $METACAT ]; then + printf "%s\n" "" "$METACAT: Metadata catalogue does not exist. Use the -u option to download / update the metadata catalogue" "" + exit 1 + fi -# ============================================================ -# Filter metadata and extract download links -printf "%s\n" "" "Querying the metadata catalogue for" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" - -if [ $PLATFORM = "s2" ]; then - LINKS=$(grep -E $TILES $METACAT | grep -E $(echo $SENSIN | sed s'/,/|/g') | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') -elif [ $PLATFORM = "ls" ]; then - LINKS=$(grep -E $TILES $METACAT | grep -E $(echo "$SENSIN" | sed 's/,/_|/g')"_" | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $7 == "T1" && $12 >= clow && $12 <= chigh') -fi + METADATE=$(date -d $(stat $METACAT | grep "Change: " | cut -d" " -f2) +%s) + if [ $(date -d $DATEMAX +%s) -gt $METADATE ]; then + printf "%s\n" "" "WARNING: The selected time window exceeds the last update of the $PRINTNAME metadata catalogue." "Results may be incomplete, please consider updating the metadata catalogue using the -d option." + fi -printf "%s" "$LINKS" > filtered_metadata.txt -case $PLATFORM in - "s2") SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') ;; - "ls") SIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') ;; -esac -NSCENES=$(sed -n '$=' filtered_metadata.txt) -rm filtered_metadata.txt + if [ "$AOITYPE" -eq 1 ]; then + printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." + WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") + TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" + # case $SATELLITE in + # sentinel2) TILES="_T"$(echo $TILERAW | sed 's/Name, //; s/ /_|_T/g')"_" ;; + # landsat) TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" ;; + # esac + + elif [ "$AOITYPE" -eq 2 ]; then + printf "%s\n" "" "Searching for footprints / tiles intersecting with geometries of AOI shapefile..." + AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) + BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetCapabilities&typename="$SATELLITE"&bbox="$BBOX + + ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update + ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update + + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT $SATELLITE.Name FROM $SATELLITE, $AOINE WHERE ST_Intersects($SATELLITE.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) + TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" + rm merged.gpkg + + elif [ "$AOITYPE" -eq 3 ]; then + sensor_tile_mismatch() { + printf "%s\n" "" "Error: $PRINTNAME sensor(s) specified, but no $PRINTNAME tiles identified." "Check if sensors and footprints match or use the -s option to specify sensors to query." "" + exit 1 + } + case $SATELLITE in + landsat) + TILERAW=$(echo "$AOI" | grep -E -o "[0-9]{6}") || sensor_tile_mismatch + TILES="_"$(echo $TILERAW | sed 's/ /_|_/g')"_" ;; + sentinel2) + TILERAW=$(echo "$AOI" | grep -E -o "T[0-6][0-9][A-Z]{3}") || sensor_tile_mismatch + TILES="_"$(echo $TILERAW | sed 's/ /_|_/g')"_" ;; + + esac + fi -# ============================================================ -# Get total number and size of scenes matching criteria -UNIT="MB" -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="GB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="TB" -fi -if [ ${SIZE%%.*} -gt 1024 ]; then - SIZE=$(echo $SIZE | awk '{print $1 / 1024}') - UNIT="PB" -fi - -if [ -z $NSCENES ];then - printf "%s\n" "There were no $PRINTNAME Level 1 scenes found matching the search criteria" "" - exit 0 -else - printf "%s\n" "$NSCENES $PRINTNAME Level 1 scenes matching criteria found" "$SIZE $UNIT data volume found" "" -fi + printf "%s\n" "" "Querying the metadata catalogue for $PRINTNAME data" "Sensor(s): "$(echo $SENSORS | sed 's/ /,/g') + if [ $SATELLITE == "landsat" ]; then + printf "%s\n" "Tier(s): $TIER" + fi + printf "%s\n" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" -if [ $DRYRUN -eq 1 ]; then - exit 0 -fi + # ============================================================ + # Filter metadata and extract download links + if [ $SATELLITE = "sentinel2" ]; then + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') + elif [ $SATELLITE = "landsat" ]; then + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | grep -E $(echo "_"$TIER | sed 's/,/,|_/g')"," | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $12 >= clow && $12 <= chigh') + fi + printf "%s" "$LINKS" > filtered_metadata.txt + case $SATELLITE in + sentinel2) TOTALSIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') ;; + landsat) TOTALSIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') ;; + esac + NSCENES=$(sed -n '$=' filtered_metadata.txt) + #rm filtered_metadata.txt -# ============================================================ -# Download scenes -POOL=$(cd $POOL; pwd) -echo "Starting to download "$NSCENES" "$PRINTNAME" Level 1 scenes" -ITER=1 -for LINK in $LINKS -do - SCENEID=$(echo $LINK | cut -d, -f 2) - if [ $SATELLITE = "sentinel2" ]; then - TILE=$(echo $LINK | cut -d, -f 1 | grep -o -E "T[0-9]{2}[A-Z]{3}") - URL=$(echo $LINK | cut -d, -f 14) - elif [ $SATELLITE = "landsat" ]; then - TILE=$(echo $SCENEID | cut -d_ -f 3) - URL=$(echo $LINK | cut -d, -f 18) + # ============================================================ + # Get total number and size of scenes matching criteria + UNIT="MB" + PRSIZE=$TOTALSIZE + if [ ${PRSIZE%%.*} -gt 1024 ]; then + PRSIZE=$(echo $PRSIZE | awk '{print $1 / 1024}') + UNIT="GB" fi - - # create target directory if it doesn't exist - TILEPATH=$POOL/$TILE - if [ ! -w $TILEPATH ]; then - mkdir $TILEPATH - if [ ! -w $TILEPATH ]; then - echo "$TILEPATH: Creating directory failed." - exit 1 - fi + if [ ${PRSIZE%%.*} -gt 1024 ]; then + PRSIZE=$(echo $PRSIZE | awk '{print $1 / 1024}') + UNIT="TB" fi - - # Check if scene already exists# - SCENEPATH=$TILEPATH/$SCENEID - if [ $SATELLITE = "sentinel2" ]; then - SCENEPATH=$SCENEPATH".SAFE" + if [ ${PRSIZE%%.*} -gt 1024 ]; then + PRSIZE=$(echo $PRSIZE | awk '{print $1 / 1024}') + UNIT="PB" fi - if [ -d $SCENEPATH ]; then - echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." - ((ITER++)) - continue + + if [ -z $NSCENES ];then + printf "%s\n" "There were no $PRINTNAME Level 1 scenes found matching the search criteria." "" + else + LC_NUMERIC="en_US.UTF-8" printf "%s\n%.2f%s\n" "$NSCENES $PRINTNAME Level 1 scenes matching criteria found" "$PRSIZE" "$UNIT data volume found." fi - - echo "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." - gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH - - echo "$SCENEPATH QUEUED" >> $QUEUE - - ((ITER++)) -done -printf "%s\n" "" "Finished." "" + + # ============================================================ + # Download scenes + progress() { + local width=80 perc=$(echo $1 | cut -d"." -f1); shift + printf -v dots "%*s" "$(( $perc*$width/100 ))" ""; dots=${dots// /=}; + printf "\r\e[K|%-*s| %3d %% %s" "$w" "$dots" "$p" "$*"; + } + dl_done() { + SIZEDONE=$(awk -v done=$SIZEDONE -v fsize=$FILESIZE 'BEGIN { print (done + fsize) }' ) + PERCDONE=$(awk -v total=$TOTALSIZE -v done=$SIZEDONE 'BEGIN { printf( "%.2f\n", (100 / total * done) )}') + } + PERCDONE=0 + SIZEDONE=0 + if [[ $DRYRUN -eq 0 && ! -z $LINKS ]]; then + + POOL=$(cd $POOL; pwd) + echo "Starting to download "$NSCENES" "$PRINTNAME" Level 1 scenes" + ITER=1 + for LINK in $LINKS + do + SCENEID=$(echo $LINK | cut -d, -f 2) + + if [ $SATELLITE = "sentinel2" ]; then + TILE=$(echo $LINK | cut -d, -f 1 | grep -o -E "T[0-9]{2}[A-Z]{3}") + URL=$(echo $LINK | cut -d, -f 14) + FILESIZE=$(( $(echo $LINK | cut -d, -f 6) / 1048576 )) + elif [ $SATELLITE = "landsat" ]; then + TILE=$(echo $SCENEID | cut -d_ -f 3) + URL=$(echo $LINK | cut -d, -f 18) + FILESIZE=$(( $(echo $LINK | cut -d, -f 17) / 1048576 )) + fi + + # create target directory if it doesn't exist + TILEPATH=$POOL/$TILE + if [ ! -w $TILEPATH ]; then + mkdir $TILEPATH + if [ ! -w $TILEPATH ]; then + echo "$TILEPATH: Creating directory failed." + exit 1 + fi + fi + + # Check if scene already exists +# Implement size check to catch broken downloads! + SCENEPATH=$TILEPATH/$SCENEID + if [ $SATELLITE = "sentinel2" ]; then + SCENEPATH=$SCENEPATH".SAFE" + fi + if [ -d $SCENEPATH ]; then + echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." + dl_done + ((ITER++)) + continue + fi + printf "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." + gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH + + lockfile-create $QUEUE + echo "$SCENEPATH QUEUED" >> $QUEUE + lockfile-remove $QUEUE + + dl_done + progress $PERCDONE + ((ITER++)) + done + fi +} + +if [[ $LANDSAT -eq 1 && $SENTINEL -eq 1 ]]; then + printf "%s\n" "" "Landsat and Sentinel-2 data requested." "Landsat data will be queried and downloaded first." +fi +if [ $LANDSAT -eq 1 ]; then + get_data landsat Landsat +fi +if [ $SENTINEL -eq 1 ]; then + get_data sentinel2 Sentinel-2 +fi + +printf "%s\n" "" "Done." "" exit 0 \ No newline at end of file From 6b6330e7e2eda77a5a385021d798cb08cd3c5c3d Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 26 Aug 2020 12:02:43 +0200 Subject: [PATCH 39/78] force-magic-parameters.sh can handle pairwise combinations --- bash/force-magic-parameters.sh | 148 ++++++++++++++++++++++++++------- 1 file changed, 119 insertions(+), 29 deletions(-) diff --git a/bash/force-magic-parameters.sh b/bash/force-magic-parameters.sh index 688471fa..5afe5012 100755 --- a/bash/force-magic-parameters.sh +++ b/bash/force-magic-parameters.sh @@ -22,54 +22,143 @@ # ########################################################################## -EXPECTED_ARGS=1 +# functions/definitions ------------------------------------------------------------------ +PROG=`basename $0`; +BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -if [ $# -ne $EXPECTED_ARGS ] -then - echo "Usage: `basename $0` parameter-file" - echo "" - exit -fi +MANDATORY_ARGS=1 + +echoerr() { echo "$PROG: $@" 1>&2; } # warnings and/or errormessages go to STDERR + +cmd_not_found() { # check required external commands + for cmd in "$@"; do + stat=`which $cmd` + if [ $? != 0 ] ; then echoerr "\"$cmd\": external command not found, terminating..."; exit 1; fi + done +} + +help () { +cat < Date: Wed, 26 Aug 2020 12:07:41 +0200 Subject: [PATCH 40/78] updated docs and version history --- .../components/auxilliary/magic-parameters.rst | 18 ++++++++++++++++-- docs/source/history/vdev.rst | 6 ++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/docs/source/components/auxilliary/magic-parameters.rst b/docs/source/components/auxilliary/magic-parameters.rst index 6d21c014..7c4d01c0 100755 --- a/docs/source/components/auxilliary/magic-parameters.rst +++ b/docs/source/components/auxilliary/magic-parameters.rst @@ -19,11 +19,25 @@ Usage force-magic-parameters - Usage: force-magic-parameters parameter-file - + Usage: force-magic-parameters.sh [-h] [-c {all,paired}] parameter-file + + -h = show his help + -c = combination type + all: all combinations (default) + paired: pairwise combinations + + * parameter-file | Any FORCE parameterfile can be used. + +* combination type + + | If this argument is not given, we will use all combinations of all replacement vectors. + | This is the same as ``-c all``. + + | If ``-c paired``, pairwise combinations are used. + | In this case, the repplacement vectors must be of the same length. Syntax diff --git a/docs/source/history/vdev.rst b/docs/source/history/vdev.rst index 85804b9e..8fd53b56 100755 --- a/docs/source/history/vdev.rst +++ b/docs/source/history/vdev.rst @@ -54,6 +54,8 @@ Master release: TBA The second option implements a 4D data model (think of time series), which is consistent with the QGIS plugins Raster Time Series Manager and Raster Data Plotting (C) Andreas Rabe. A tutorial is in development to illustrate the interoperable use between FORCE and said QGIS plugins. + * in force-magic-parameters: - - + The user can now change between "all combinations" or "paired combinations". + Please refer to the program description: + https://force-eo.readthedocs.io/en/latest/components/auxilliary/magic-parameters.htm \ No newline at end of file From aefdf0acde7d1adf084604a30289d182db09c719 Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Wed, 26 Aug 2020 14:31:29 +0200 Subject: [PATCH 41/78] cmd line parsing now uses getopt --- bash/force-level1-gcs.sh | 86 +++++++++++++++++++++++----------------- 1 file changed, 49 insertions(+), 37 deletions(-) diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-gcs.sh index 3704a8b9..3b02c046 100755 --- a/bash/force-level1-gcs.sh +++ b/bash/force-level1-gcs.sh @@ -1,5 +1,9 @@ #!/bin/bash +# TO DO +# 1. Sanity check for CC fails if cc is specified as float +# 2. Check filesize if a scene has been downloaded already to catch broken downloads (delete and do again or check if gsutil can handle partial downloads) + ########################################################################## # # This file is part of FORCE - Framework for Operational Radiometric @@ -30,6 +34,8 @@ trap "echo Exited!; exit;" SIGINT SIGTERM # make sure that CTRL-C breaks out of download loop set -e # make sure script exits if any process exits unsuccessfully +echoerr() { echo "$PROG: $@" 1>&2; } # warnings and/or errormessages go to STDERR + show_help() { cat << HELP @@ -137,27 +143,33 @@ DRYRUN=0 LANDSAT=0 SENTINEL=0 # set variables for urls, file names, layer names, print, ... + +TEMP=`getopt --o c:d:nhs:t:u --long cloudcover:,daterange:,no-act,help,sensors:,tier:,update -n 'force-level1' -- "$@"` +eval set -- $TEMP + + +echo $@ while :; do case $1 in - -c|--cloudcover) + -c | --cloudcover) CCMIN=$(echo $2 | cut -d"," -f1) CCMAX=$(echo $2 | cut -d"," -f2) shift ;; - -d|--daterange) + -d | --daterange) DATEMIN=$(echo $2 | cut -d"," -f1) DATEMAX=$(echo $2 | cut -d"," -f2) shift ;; - -n|--no-act) + -n | --no-act) DRYRUN=1 ;; - -h|-\?|--help) + -h | --help) show_help ;; - -s|--sensors) + -s | --sensors) SENSIN=$2 shift ;; - -t|--tier) + -t | --tier) TIER=$2 shift ;; - -u) + -u | --update) METADIR=$2 if [ $# -lt 2 ]; then echo "Metadata directory not specified, exiting" @@ -179,15 +191,16 @@ while :; do echo "Done. You can run this script without option -u to download data now." exit fi ;; - -?*) - printf "%s\n" "" "Incorrect option specified" "" - show_help >&2 ;; + -- ) shift; break ;; + #-?*) + # printf "%s\n" "" "Incorrect option specified" "" + # show_help >&2 ;; *) break #no more options esac shift done - +echo $@ if [ $# -ne 4 ]; then printf "%s\n" "" "Incorrect number of mandatory input arguments provided" "Expected: 4 Received: $#: $(echo "$@" | sed 's/ /,/g')" show_help @@ -233,10 +246,6 @@ fi # FAILS FOR FLOATING POINTS, BASH DOESN'T DO FLOAT COMPARISON - - - - if [ $CCMIN -lt 0 ] || [ $CCMIN -gt 100 ] || [ $CCMAX -lt 0 ] || [ $CCMAX -gt 100 ]; then printf "%s\n" "Error: Cloud cover minimum and maximum must be specified between 0 and 100" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "" exit 1 @@ -450,20 +459,20 @@ get_data() { # ============================================================ # Download scenes progress() { - local width=80 perc=$(echo $1 | cut -d"." -f1); shift - printf -v dots "%*s" "$(( $perc*$width/100 ))" ""; dots=${dots// /=}; - printf "\r\e[K|%-*s| %3d %% %s" "$w" "$dots" "$p" "$*"; - } - dl_done() { SIZEDONE=$(awk -v done=$SIZEDONE -v fsize=$FILESIZE 'BEGIN { print (done + fsize) }' ) PERCDONE=$(awk -v total=$TOTALSIZE -v done=$SIZEDONE 'BEGIN { printf( "%.2f\n", (100 / total * done) )}') - } + local WIDTH=$(($(tput cols) - 9)) PERCINT=$(( $(echo $PERCDONE | cut -d"." -f1) + 1 )) + printf -v INCREMENT "%*s" "$(( $PERCINT*$WIDTH/100 ))" ""; INCREMENT=${INCREMENT// /=} + printf "\r\e[K|%-*s| %3d %% %s" "$WIDTH" "$INCREMENT" "$PERCINT" "$*" + } + PERCDONE=0 SIZEDONE=0 if [[ $DRYRUN -eq 0 && ! -z $LINKS ]]; then POOL=$(cd $POOL; pwd) - echo "Starting to download "$NSCENES" "$PRINTNAME" Level 1 scenes" + printf "%s\n" "" "Starting to download "$NSCENES" "$PRINTNAME" Level 1 scenes" "" "" "" "" "" + ITER=1 for LINK in $LINKS do @@ -479,6 +488,20 @@ get_data() { FILESIZE=$(( $(echo $LINK | cut -d, -f 17) / 1048576 )) fi + SCENEPATH=$TILEPATH/$SCENEID + if [ $SATELLITE = "sentinel2" ]; then + SCENEPATH=$SCENEPATH".SAFE" + fi + # Check if scene already exists + # Implement size check to catch broken downloads! + if [ -d $SCENEPATH ]; then + printf "\e[4A\e[100D\e[2KScene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping...\e[4B" + #dl_done + progress + ((ITER++)) + continue + fi + # create target directory if it doesn't exist TILEPATH=$POOL/$TILE if [ ! -w $TILEPATH ]; then @@ -489,27 +512,16 @@ get_data() { fi fi - # Check if scene already exists -# Implement size check to catch broken downloads! - SCENEPATH=$TILEPATH/$SCENEID - if [ $SATELLITE = "sentinel2" ]; then - SCENEPATH=$SCENEPATH".SAFE" - fi - if [ -d $SCENEPATH ]; then - echo "Scene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping..." - dl_done - ((ITER++)) - continue - fi - printf "Downloading "$SCENEID"("$ITER" of "$NSCENES")..." + + printf "\e[100D\e[2A\e[2KDownloading "$SCENEID"("$ITER" of "$NSCENES")...\e[2B" gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH lockfile-create $QUEUE echo "$SCENEPATH QUEUED" >> $QUEUE lockfile-remove $QUEUE - dl_done - progress $PERCDONE + #dl_done + progress ((ITER++)) done fi From f9517a7097de6922db9f2541d0cb26c72e5b4548 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 26 Aug 2020 16:00:44 +0200 Subject: [PATCH 42/78] reworked getopt --- bash/force-magic-parameters.sh | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/bash/force-magic-parameters.sh b/bash/force-magic-parameters.sh index 5afe5012..b6ec4533 100755 --- a/bash/force-magic-parameters.sh +++ b/bash/force-magic-parameters.sh @@ -57,22 +57,17 @@ exit 1 #cmd_not_found "..."; # important, check required commands !!! dies on missing # now get the options -------------------------------------------------------------------- +ARGS=`getopt -o hc: --long help,combine: -n "$0" -- "$@"` +if [ $? != 0 ] ; then help; fi +eval set -- "$ARGS" + combtype='all' while :; do - case $1 in - -h|-\?|--help) - help ;; - -c|--combine) - shift - if [ $# -le $MANDATORY_ARGS ]; then - echoerr "Option -c is missing an argument or mandatory argument is missing"; help; - fi - combtype=$1 ;; - -?*) - echoerr "Incorrect option specified"; - help ;; - *) - break #no more options + case "$1" in + -h|--help) help ;; + -c|--combine) combtype="$2"; shift ;; + -- ) shift; break ;; + * ) break ;; esac shift done From 8f7c337ac71e1e49a6816a6b33abfe3d3224aed8 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Wed, 26 Aug 2020 19:44:17 +0200 Subject: [PATCH 43/78] adding docs --- docs/source/components/auxilliary/magic-parameters.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/components/auxilliary/magic-parameters.rst b/docs/source/components/auxilliary/magic-parameters.rst index 7c4d01c0..13249357 100755 --- a/docs/source/components/auxilliary/magic-parameters.rst +++ b/docs/source/components/auxilliary/magic-parameters.rst @@ -37,7 +37,7 @@ Usage | This is the same as ``-c all``. | If ``-c paired``, pairwise combinations are used. - | In this case, the repplacement vectors must be of the same length. + | In this case, the replacement vectors must be of the same length. Syntax From 0af765bb27a40a632bdb9f3cb1d35ab57a6f7f1a Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Thu, 27 Aug 2020 16:07:07 +0200 Subject: [PATCH 44/78] check for downloaded files, cleaning up --- bash/force-level1-gcs.sh | 361 ++++++++++++++++++++------------------- bash/force-level1.sh | 56 ------ 2 files changed, 182 insertions(+), 235 deletions(-) delete mode 100755 bash/force-level1.sh diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-gcs.sh index 3b02c046..2bed5da7 100755 --- a/bash/force-level1-gcs.sh +++ b/bash/force-level1-gcs.sh @@ -1,9 +1,5 @@ #!/bin/bash -# TO DO -# 1. Sanity check for CC fails if cc is specified as float -# 2. Check filesize if a scene has been downloaded already to catch broken downloads (delete and do again or check if gsutil can handle partial downloads) - ########################################################################## # # This file is part of FORCE - Framework for Operational Radiometric @@ -33,81 +29,119 @@ trap "echo Exited!; exit;" SIGINT SIGTERM # make sure that CTRL-C breaks out of download loop set -e # make sure script exits if any process exits unsuccessfully +echoerr() { echo "$PROG: $@" 1>&2; } -echoerr() { echo "$PROG: $@" 1>&2; } # warnings and/or errormessages go to STDERR +# ============================================================ +# check dependencies +DEPENDENCIES=('gsutil' 'gunzip' 'ogrinfo') +for DEPENDENCY in "${DEPENDENCIES[@]}"; do + if ! [ -x "$(command -v $DEPENDENCY)" ]; then + printf "%s\n" "" "Error: could not find $DEPENDENCY" "Please make sure all required external programs are installed" "Required programs: ${DEPENDENCIES[@]}" + exit 1 + fi +done +# ============================================================ +# set up functions show_help() { cat << HELP -Usage: `basename $0` [-d] [-u] metadata-dir level-1-datapool queue aoi - aoitype sensor starttime endtime min-cc max-cc +Usage: `basename $0` [optional arguments] metadata-dir level-1-datapool queue aoi Mandatory arguments: + metadata-dir - directory where the Landsat metadata (csv file) is stored + Directory where the metadata catalogues (csv file) are stored level-1-datapool An existing directory, your files will be stored here queue - Downloaded files are appended to a file queue, which is needed for - the Level 2 processing. The file doesn't need to exist. If it exists, - new lines will be appended on successful ingestion + Downloaded files are appended to a file queue, which is needed for the Level 2 + processing. The file doesn't need to exist. If it exists, new lines will be + appended on successful ingestion area of interest - (1) The coordinates of your study area: "X1/Y1,X2/Y2,X3/Y3,...,X1/Y1" - The polygon must be closed (first X/Y = last X/Y). X/Y must be given as - decimal degrees with negative values for West and South coordinates. + (1) The coordinates of your study area: + Path to a file containing one coordinate per line or + as comma separated command line input + The polygon must be closed (first X/Y = last X/Y). X/Y must be given as + decimal degrees with negative values for West and South coordinates. + Comma-separated if provided on command line, one line per coordinate pair + if provided in a text file. (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, - but using EPSG4326 is recommended - (3) Path/Row (Landsat): "PPPRRR,PPPRRR,PPPRRR" + but using EPSG4326 is recommended. + (3) Path/Row (Landsat): "PPPRR" Make sure to keep leading zeros - correct: 181034, incorrect: 18134 - Tile name (Sentinel-2): "34UEU,33UUU" - - type of area of interest - 1 - coordinates as text - 2 - shapefile - 3 - PathRow as text - - sensor - Specify the sensor(s) to include. Separate with commas while retaining the - order below. Landsat and Sentinel-2 sensors can not be combined. - Landsat Sentinel-2 - LT05 - Landsat 5 TM S2A - LE07 - Landsat 7 ETM+ S2B - LC08 - Landsat 8 OLI - Correct: "LT05,LC08", incorrect: "LC08,LT05" or "LE07,S2B" - - starttime endtime - Dates must be given as YYYY-MM-DD + Tile name (Sentinel-2): "TXXXXX" + Make sure to keep the leading T before the MGRS tile number + Comma-separated if provided on command line, one line per tile if + provided in a text file. + + +Optional arguments (always placed BEFORE mandatory arguments): - min-cc max-cc + -c | --cloudcover + minimum,maximum The cloud cover range must be specified in % - -Optional arguments (always placed AFTER platform/mirr and BEFORE mandatory arguments): - -d dry - will trigger a dry run that will only return the number of images + Default: 0,100 + + -d | --daterange + starttime,endtime + Dates must be given in the following format: YYYY-MM-DD,YYYY-MM-DD + Default: 1970-01-01,today + + -h | --help + Show this help + + -n | --no-act + Will trigger a dry run that will only return the number of images and their total data volume + + -s | --sensor + Sensors to include in the query, comma-separated. + Valid sensors: + Landsat Sentinel-2 + LT04 - Landsat 4 TM S2A + LT05 - Landsat 5 TM S2B + LE07 - Landsat 7 ETM+ + LC08 - Landsat 8 OLI + Default: LT04,LT05,LE07,LC08,S2A,S2B - -u update - will update the metadata catalogue (download and extract from GCS) - only the metadata dir is required as argument when using this option - - -h|--help - show this help - - -t|--tier + -t | --tier Landsat collection tier level. Valid tiers: T1,T2,RT Default: T1 + + -u | --update + Will update the metadata catalogue (download and extract from GCS) + Only specify the metadata dir as argument when using this option HELP exit 1 } -# TODO# +is_in_range() { + awk -v value="$1" -v lower="$2" -v upper="$3" 'BEGIN {print (lower <= value && value <= upper)}' +} + +is_smaller() { + awk -v val1="$1" -v val2="$2" 'BEGIN {print (val1 < val2)}' +} -# TIER for CSV check - how to handle if T1, T2, NRT +show_progress() { + SIZEDONE=$(awk -v done=$SIZEDONE -v fsize=$FILESIZE 'BEGIN { print (done + fsize) }' ) + PERCDONE=$(awk -v total=$TOTALSIZE -v done=$SIZEDONE 'BEGIN { printf( "%.2f\n", (100 / total * done) )}') + local WIDTH=$(($(tput cols) - 9)) PERCINT=$(( $(echo $PERCDONE | cut -d"." -f1) + 1 )) + printf -v INCREMENT "%*s" "$(( $PERCINT*$WIDTH/100 ))" ""; INCREMENT=${INCREMENT// /=} + printf "\r\e[K|%-*s| %3d %% %s" "$WIDTH" "$INCREMENT" "$PERCINT" "$*" +} +update_meta() { + echo "Updating metadata catalogue..." + gsutil -m cp gs://gcp-public-data-$1/index.csv.gz $METADIR + gunzip $METADIR/index.csv.gz + mv $METADIR/index.csv $METADIR/metadata_$2.csv +} which_satellite() { SENSIN=$(echo $SENSIN | tr '[:lower:]' '[:upper:]') # convert sensor strings to upper case to prevent unnecessary headaches @@ -118,21 +152,15 @@ which_satellite() { LT04|LT05|LE07|LC08) LANDSAT=1 ;; *) - printf "%s\n" "Error: invalid sensor(s) specified" "Valid sensors: S2A,S2B,LT04,LT05,LE07,LC08" "" + printf "%s\n" "" "Error: invalid sensor(s) specified" "Sensors provided: $SENSIN" "Valid sensors: S2A,S2B,LT04,LT05,LE07,LC08" "" exit 1 esac done } -update_meta() { - echo "Updating metadata catalogue..." - gsutil -m cp gs://gcp-public-data-$1/index.csv.gz $METADIR - gunzip $METADIR/index.csv.gz - mv $METADIR/index.csv $METADIR/metadata_$2.csv -} - - +# ============================================================ +# Initialize arguments and parse command line input SENSIN="LT04,LT05,LE07,LC08,S2A,S2B" DATEMIN="1970-01-01" DATEMAX=$(date +%Y-%m-%d) @@ -142,15 +170,14 @@ TIER="T1" DRYRUN=0 LANDSAT=0 SENTINEL=0 -# set variables for urls, file names, layer names, print, ... - -TEMP=`getopt --o c:d:nhs:t:u --long cloudcover:,daterange:,no-act,help,sensors:,tier:,update -n 'force-level1' -- "$@"` -eval set -- $TEMP +UPDATE=0 +ARGS=`getopt -o c:d:nhs:t:u --long cloudcover:,daterange:,no-act,help,sensors:,tier:,update -n $0 -- "$@"` +if [ $? != 0 ] ; then echo "Error in command line options. Please check your options." >&2 ; show_help ; fi +eval set -- "$ARGS" -echo $@ while :; do - case $1 in + case "$1" in -c | --cloudcover) CCMIN=$(echo $2 | cut -d"," -f1) CCMAX=$(echo $2 | cut -d"," -f2) @@ -164,43 +191,47 @@ while :; do -h | --help) show_help ;; -s | --sensors) - SENSIN=$2 + SENSIN="$2" shift ;; -t | --tier) - TIER=$2 + TIER="$2" shift ;; -u | --update) - METADIR=$2 - if [ $# -lt 2 ]; then - echo "Metadata directory not specified, exiting" - exit 1 - elif [ $# -gt 2 ]; then - echo "Error: Please only specify the metadata directory when using the update option (-u)" - exit 1 - elif ! [ -w $METADIR ]; then - echo "Can not write to metadata directory, exiting" - exit 1 - else - which_satellite - if [ $SENTINEL -eq 1 ]; then - update_meta sentinel-2 sentinel2 - fi - if [ $LANDSAT -eq 1 ]; then - update_meta landsat landsat - fi - echo "Done. You can run this script without option -u to download data now." - exit - fi ;; - -- ) shift; break ;; - #-?*) - # printf "%s\n" "" "Incorrect option specified" "" - # show_help >&2 ;; + METADIR="$2" + UPDATE=1 ;; + -- ) + shift; break ;; *) - break #no more options + break esac shift done -echo $@ + +# Check for update flag and update metadata catalogue if set +if [ $UPDATE -eq 1 ]; then + if [ $# -lt 2 ]; then + echo "Metadata directory not specified, exiting" + exit 1 + elif [ $# -gt 2 ]; then + echo "Error: Please only specify the metadata directory when using the update option (-u)" + exit 1 + elif ! [ -w $METADIR ]; then + echo "Can not write to metadata directory, exiting" + exit 1 + else + which_satellite + if [ $SENTINEL -eq 1 ]; then + update_meta sentinel-2 sentinel2 + fi + if [ $LANDSAT -eq 1 ]; then + update_meta landsat landsat + fi + fi + echo "Done. You can run this script without option -u to download data now." + exit 0 +fi + +# check if number of mandatory args is correct if [ $# -ne 4 ]; then printf "%s\n" "" "Incorrect number of mandatory input arguments provided" "Expected: 4 Received: $#: $(echo "$@" | sed 's/ /,/g')" show_help @@ -215,12 +246,13 @@ POOL=$2 QUEUE=$3 AOI=$4 +# check for empty options if [[ -z $METADIR || -z $POOL || -z $QUEUE || -z $AOI || -z $CCMIN || -z $CCMAX || -z $DATEMIN || -z $DATEMAX || -z $SENSIN || -z $TIER ]]; then - printf "%s\n" "Error: One or more variables are undefined, please check:" "Metadata directory: $METADIR" "Level-1 pool: $POOL" "Queue: $QUEUE" "AOI: $AOI" "Sensors: $SENSIN" "Start date: $DATEMIN, End date: $DATEMAX" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "Tier (Landsat only): $TIER" + printf "%s\n" "" "Error: One or more variables are undefined, please check the following" "" "Metadata directory: $METADIR" "Level-1 pool: $POOL" "Queue: $QUEUE" "AOI: $AOI" "Sensors: $SENSIN" "Start date: $DATEMIN, End date: $DATEMAX" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "Tier (Landsat only): $TIER" "" exit 1 fi - +# check for correct tier for T in $(echo $TIER | sed 's/,/ /g'); do case $T in T1|T2|RT) @@ -231,8 +263,9 @@ for T in $(echo $TIER | sed 's/,/ /g'); do esac done -if [ $(date -d $DATEMIN +%s) -ge $(date -d $DATEMAX +%s) ]; then - printf "%s\n" "Error: Start of date range is larger or equal to end of date range" "Start: $DATEMIN, End: $DATEMAX" "" +# check if dates are correct +if [ $(date -d $DATEMIN +%s) -gt $(date -d $DATEMAX +%s) ]; then + printf "%s\n" "Error: Start of date range is larger than end of date range" "Start: $DATEMIN, End: $DATEMAX" "" exit 1 elif ! date -d $DATEMIN &> /dev/null; then printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" @@ -242,26 +275,23 @@ if [ $(date -d $DATEMIN +%s) -ge $(date -d $DATEMAX +%s) ]; then exit 1 fi - - - -# FAILS FOR FLOATING POINTS, BASH DOESN'T DO FLOAT COMPARISON -if [ $CCMIN -lt 0 ] || [ $CCMIN -gt 100 ] || [ $CCMAX -lt 0 ] || [ $CCMAX -gt 100 ]; then - printf "%s\n" "Error: Cloud cover minimum and maximum must be specified between 0 and 100" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "" +# check if cloud cover is valid +if [ $(is_smaller $CCMIN 0) -eq 1 ] || [ $(is_smaller 100 $CCMIN) -eq 1 ] || [ $(is_smaller $CCMAX 0) -eq 1 ] || [ $(is_smaller 100 $CCMAX ) -eq 1 ]; then + printf "%s\n" "" "Error: Cloud cover minimum and maximum must be specified between 0 and 100" "Cloud cover minimum: $CCMIN" "Cloud cover maximum: $CCMAX" "" exit 1 - elif [ $CCMIN -gt $CCMAX ]; then - printf "%s\n" "Error: Cloud cover minimum is larger than cloud cover maximum" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "" + elif [ $(is_smaller $CCMAX $CCMIN) -eq 1 ]; then + printf "%s\n" "" "Error: Cloud cover minimum is larger than cloud cover maximum" "Cloud cover minimum: $CCMIN" "Cloud cover maximum: $CCMAX" "" exit 1 fi -# type of area of interest -# 1 - coordinates as text -# 2 - shapefile +# check type of AOI +# 1 - shapefile +# 2 - coordinates as text # 3 - PathRow as text if [ -f $AOI ]; then # check if AOI is GDAL readable file if ogrinfo $AOI >& /dev/null; then - AOITYPE=2 + AOITYPE=1 OGR=1 else # check if tile list / bounding box file contains whitespaces @@ -272,34 +302,32 @@ if [ -f $AOI ]; then AOI=$(cat $AOI | sed 's/,/./g') OGR=0 fi +# if soi is not a file, it's a polygon or tile list as cmd line input else - # tile list / bounding box is command line input AOI=$(echo $AOI | sed 's/,/ /g') OGR=0 fi -isinrange() { - awk -v value="$1" -v lower="$2" -v upper="$3" 'BEGIN {print (lower <= value && value <= upper)}' -} if [ $OGR -eq 0 ]; then - # check if AOI file contains bounding box coordinates and check if coords are valid lat/lon + # check if AOI input contains bounding box coordinates if $(echo $AOI | grep -q "/"); then - AOITYPE=1 + AOITYPE=2 + # are coords valid lat/lon? for COORD in $AOI; do LAT=$(echo COORD | cut -d"/" -f1) LON=$(echo COORD | cut -d"/" -f2) if ! grep -q "/" $COORD; then printf "%s\n" "Error: At least one of the AOI coordinates does not seem to be in the format LAT/LON" "Coordinate: $COORD" "" exit 1 - elif ! [ $(isinrange $LAT -90 90) -eq 1 ]; then + elif ! [ $(is_in_range $LAT -90 90) -eq 1 ]; then printf "%s\n" "Error: Latitude out of range" "Coordinate: $COORD - $LAT is not in range -90 to 90" "" exit 1 - elif ! [ $(isinrange $LON -180 180) -eq 1 ]; then + elif ! [ $(is_in_range $LON -180 180) -eq 1 ]; then printf "%s\n" "Error: Longitute out of range" "Coordinate: $COORD - $LON is not in range -180 to 180" "" exit 1 fi done - # else, AOI file must be tile list - check if tiles are formatted correctly + # else, AOI input must be tile list - check if tiles are valid Path/Row or S2 tiles else AOITYPE=3 for ENTRY in $AOI @@ -307,23 +335,22 @@ if [ $OGR -eq 0 ]; then if $(echo $ENTRY | grep -q -E "[0-2][0-9]{2}[0-2][0-9]{2}"); then LSPATH="${ENTRY:0:3}" LSROW="${ENTRY:3:6}" - if [ $(isinrange $LSPATH 1 233) -eq 0 ] || [ $(isinrange $LSPATH 1 248) -eq 0 ]; then - printf "%s\n" "Landsat PATH / ROW out of range. PATH not in range 1 to 233 or ROW not in range 1 to 248." "PATH / ROW received: $ENTRY" "" + if [ $(is_in_range $LSPATH 1 233) -eq 0 ] || [ $(is_in_range $LSPATH 1 248) -eq 0 ]; then + printf "%s\n" "" "Landsat PATH / ROW out of range. PATH not in range 1 to 233 or ROW not in range 1 to 248." "PATH / ROW received: $ENTRY" "" exit 1 fi continue elif $(echo $ENTRY | grep -q -E "T[0-6][0-9][A-Z]{3}"); then - if ! [ $(isinrange ${ENTRY:2:3} 1 60) ]; then - printf "%s\n" "MGRS tile number out of range. Valid range: 0 to 60, received: $ENTRY" "" + if ! [ $(is_in_range ${ENTRY:2:3} 1 60) ]; then + printf "%s\n" "" "MGRS tile number out of range. Valid range: 0 to 60, received: $ENTRY" "" exit 1 elif [[ -z "$(echo ${ENTRY:3:1} | grep -E "[C,D,E,F,G,H,J,K,L,M,N,P,Q,R,S,T,U,V,W,X]")" || -z "$(echo ${ENTRY:4:1} | grep -E "[A,B,C,D,E,F,G,H,K,L,M,N,P,Q,R,T,U,V,W,X,Y,Z]")" || -z "$(echo ${ENTRY:5:1} | grep -E "[A,B,C,D,E,F,G,H,J,K,L,M,N,P,Q,R,S,T,U,V]")" ]]; then - echo "$(echo ${ENTRY:5:1} | grep -E "[A,B,C,D,E,F,G,H,K,L,M,N,P,Q,R,T,U,V,W,X,Y,Z]")" - printf "%s\n" "Tile does not seem to be a valid Sentinel-2 tile: $ENTRY" "Please make sure all tiles exist." + printf "%s\n" "" "Tile does not seem to be a valid Sentinel-2 tile: $ENTRY" "Please make sure all tiles exist." "" exit 1 fi continue else - printf "%s\n" "Tile list as AOI detected." "Error: One or more tiles seem to be formatted incorrectly." "Please check $ENTRY" "" + printf "%s\n" "" "Tile list as AOI detected." "" "Error: One or more tiles seem to be formatted incorrectly." "Please check $ENTRY" "" exit 1 fi done @@ -331,15 +358,6 @@ if [ $OGR -eq 0 ]; then fi -# ============================================================ -# Get tiles / footprints of interest -if [ "$AOITYPE" -eq 1 ] || [ "$AOITYPE" -eq 2 ]; then - if ! [ -x "$(command -v ogr2ogr)" ]; then - printf "%s\n" "Could not find ogr2ogr, is gdal installed?" "Define the AOI polygon using coordinates (option 3) if gdal is not available." >&2 - exit 1 - fi -fi - # ============================================================ # Function get_data: @@ -369,17 +387,6 @@ get_data() { fi if [ "$AOITYPE" -eq 1 ]; then - printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." - WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') - WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") - TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" - # case $SATELLITE in - # sentinel2) TILES="_T"$(echo $TILERAW | sed 's/Name, //; s/ /_|_T/g')"_" ;; - # landsat) TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" ;; - # esac - - elif [ "$AOITYPE" -eq 2 ]; then printf "%s\n" "" "Searching for footprints / tiles intersecting with geometries of AOI shapefile..." AOINE=$(echo $(basename "$AOI") | rev | cut -d"." -f 2- | rev) BBOX=$(ogrinfo -so $AOI $AOINE | grep "Extent: " | sed 's/Extent: //; s/(//g; s/)//g; s/, /,/g; s/ - /,/') @@ -392,6 +399,13 @@ get_data() { TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" rm merged.gpkg + elif [ "$AOITYPE" -eq 2 ]; then + printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." + WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') + WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") + TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" + elif [ "$AOITYPE" -eq 3 ]; then sensor_tile_mismatch() { printf "%s\n" "" "Error: $PRINTNAME sensor(s) specified, but no $PRINTNAME tiles identified." "Check if sensors and footprints match or use the -s option to specify sensors to query." "" @@ -410,9 +424,7 @@ get_data() { printf "%s\n" "" "Querying the metadata catalogue for $PRINTNAME data" "Sensor(s): "$(echo $SENSORS | sed 's/ /,/g') - if [ $SATELLITE == "landsat" ]; then - printf "%s\n" "Tier(s): $TIER" - fi + if [ $SATELLITE == "landsat" ]; then printf "%s\n" "Tier(s): $TIER"; fi printf "%s\n" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" # ============================================================ @@ -429,7 +441,7 @@ get_data() { landsat) TOTALSIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') ;; esac NSCENES=$(sed -n '$=' filtered_metadata.txt) - #rm filtered_metadata.txt + rm filtered_metadata.txt # ============================================================ @@ -458,14 +470,6 @@ get_data() { # ============================================================ # Download scenes - progress() { - SIZEDONE=$(awk -v done=$SIZEDONE -v fsize=$FILESIZE 'BEGIN { print (done + fsize) }' ) - PERCDONE=$(awk -v total=$TOTALSIZE -v done=$SIZEDONE 'BEGIN { printf( "%.2f\n", (100 / total * done) )}') - local WIDTH=$(($(tput cols) - 9)) PERCINT=$(( $(echo $PERCDONE | cut -d"." -f1) + 1 )) - printf -v INCREMENT "%*s" "$(( $PERCINT*$WIDTH/100 ))" ""; INCREMENT=${INCREMENT// /=} - printf "\r\e[K|%-*s| %3d %% %s" "$WIDTH" "$INCREMENT" "$PERCINT" "$*" - } - PERCDONE=0 SIZEDONE=0 if [[ $DRYRUN -eq 0 && ! -z $LINKS ]]; then @@ -476,52 +480,51 @@ get_data() { ITER=1 for LINK in $LINKS do - SCENEID=$(echo $LINK | cut -d, -f 2) + SCENEID=$(echo $LINK | cut -d"," -f 2) if [ $SATELLITE = "sentinel2" ]; then - TILE=$(echo $LINK | cut -d, -f 1 | grep -o -E "T[0-9]{2}[A-Z]{3}") - URL=$(echo $LINK | cut -d, -f 14) - FILESIZE=$(( $(echo $LINK | cut -d, -f 6) / 1048576 )) + TILE=$(echo $LINK | cut -d"," -f 1 | grep -o -E "T[0-9]{2}[A-Z]{3}") + URL=$(echo $LINK | cut -d"," -f 14) + FILESIZEBYTE=$(echo $LINK | cut -d"," -f 6) elif [ $SATELLITE = "landsat" ]; then - TILE=$(echo $SCENEID | cut -d_ -f 3) - URL=$(echo $LINK | cut -d, -f 18) - FILESIZE=$(( $(echo $LINK | cut -d, -f 17) / 1048576 )) + TILE=$(echo $SCENEID | cut -d"_" -f 3) + URL=$(echo $LINK | cut -d"," -f 18) + FILESIZEBYTE=$(echo $LINK | cut -d, -f 17) fi - + FILESIZE=$(( $FILESIZEBYTE / 1048576 )) + + TILEPATH=$POOL/$TILE SCENEPATH=$TILEPATH/$SCENEID if [ $SATELLITE = "sentinel2" ]; then SCENEPATH=$SCENEPATH".SAFE" fi - # Check if scene already exists - # Implement size check to catch broken downloads! + # Check if scene already exists, remove and download again if gsutil temp files are present if [ -d $SCENEPATH ]; then - printf "\e[4A\e[100D\e[2KScene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping...\e[4B" - #dl_done - progress - ((ITER++)) - continue + if ! ls -r $SCENEPATH | grep -q ".gstmp" && ! [ -z "$(ls -A $SCENEPATH)" ]; then + printf "\e[500D\e[4A\e[2KScene "$SCENEID"("$ITER" of "$NSCENES") exists, skipping...\e[4B" + show_progress + ((ITER++)) + continue + fi fi # create target directory if it doesn't exist - TILEPATH=$POOL/$TILE if [ ! -w $TILEPATH ]; then mkdir $TILEPATH if [ ! -w $TILEPATH ]; then - echo "$TILEPATH: Creating directory failed." + printf "%s\n" "" "$TILEPATH: Creating directory failed." "" exit 1 fi fi - - printf "\e[100D\e[2A\e[2KDownloading "$SCENEID"("$ITER" of "$NSCENES")...\e[2B" - gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH + printf "\e[500D\e[2A\e[2KDownloading "$SCENEID"("$ITER" of "$NSCENES")...\e[2B" + gsutil -m -q cp -c $POOL"/download_log.txt" -R $URL $TILEPATH lockfile-create $QUEUE echo "$SCENEPATH QUEUED" >> $QUEUE lockfile-remove $QUEUE - #dl_done - progress + show_progress ((ITER++)) done fi diff --git a/bash/force-level1.sh b/bash/force-level1.sh deleted file mode 100755 index 24286780..00000000 --- a/bash/force-level1.sh +++ /dev/null @@ -1,56 +0,0 @@ -# ============================================================ -# This script will act as 'master' for Level-1 download -# It calls the ESA or Google download script -# -# Check for satellite first: -# - if ls, go for the google script -# - if s2, check for -m option (esa, gcs, later also aws) -# -# - check if arguments are in right order, format for scripts -# -# call landsat-level1-sentinel2 or landsat-level1-gcs - -#set -e # makes sure that this script stops as soon as the sub scripts exit - - -# ============================================================ -# check for options -DRYRUN=0 -PLATFORM=$1 -# check for platform and mirror, discard platform ($1) and mirror ($2:-m and $3) afterwards -case $PLATFORM in - s2) - if ! [ $2 = "-m" ]; then - printf "%s\n" "" "Mirror option (-m) must be set as first optional argument for Sentinel-2" "Valid mirrors: 'esa' for ESA and 'gcs' for Google Cloud Storage" "" - exit 1 - else - MIRROR=$3 - case $MIRROR in - "esa"|"gcs") - shift 2 ;; - *) - printf "%s\n" "" "Mirror must be either esa (ESA archive) or gcs (Google Cloud Storage)" "" ;; - esac - fi ;; - ls) - MIRROR="gcs" ;; - *) - printf "%s\n" "" "Platform must be either ls (Landsat) or s2 (Sentinel-2)" "" - exit 1 ;; -esac -shift - -echo $(dirname $0) -# ============================================================ -# run ESA or GCS scripts -BINDIR=$(dirname $0) -case $MIRROR in - "esa") - echo $@ - source $BINDIR"/"force-level1-esa $@ ;; - - "gcs") - echo "$@" - source $BINDIR"/"force-level1-gcs $@ ;; -esac - From 2e8a391b79de5bbf96cf82a7fb7ac6bf54a9e311 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Fri, 28 Aug 2020 09:35:36 +0200 Subject: [PATCH 45/78] fixed small bug in _stack.c --- src/aux-level/_stack.c | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/src/aux-level/_stack.c b/src/aux-level/_stack.c index 1ddf98a1..694cc41d 100755 --- a/src/aux-level/_stack.c +++ b/src/aux-level/_stack.c @@ -61,30 +61,20 @@ GDALDatasetH src = NULL; GDALDatasetH dst = NULL; GDALRasterBandH bsrc = NULL; GDALRasterBandH bdst = NULL; - img_t *inp = NULL; img_t *out = NULL; - - - char *f_dst = NULL; char d_out[NPOW_10]; char e_out[NPOW_10]; - char **meta = NULL; char **bmeta = NULL; const char *bname = NULL; const char *proj_ = NULL; - - char source[NPOW_16]; - - int interleave; enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; - if (argc >= 2) check_arg(argv[1]); if (argc < 3){ printf("Usage: %s file1 file2 [etc] outfile.\n", argv[0]); printf(" At least two input files need to be given\n\n"); @@ -98,7 +88,7 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; directoryname(f_dst, d_out, NPOW_10); extension(f_dst, e_out, NPOW_10); - if (strcmp(e_out, "VRT") != 0){ + if (strcmp(e_out, "vrt") != 0){ printf("Output file must have .vrt extension\n\n"); return FAILURE;} From c6a50c36f803cf7b6cfa83ea58039b513fea7103 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Fri, 28 Aug 2020 09:39:47 +0200 Subject: [PATCH 46/78] fixed small bug in _stack.c --- src/aux-level/_stack.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aux-level/_stack.c b/src/aux-level/_stack.c index 694cc41d..ed977f18 100755 --- a/src/aux-level/_stack.c +++ b/src/aux-level/_stack.c @@ -88,7 +88,7 @@ enum { _BYFILE_, _BYBAND_, _INTERLEN_ }; directoryname(f_dst, d_out, NPOW_10); extension(f_dst, e_out, NPOW_10); - if (strcmp(e_out, "vrt") != 0){ + if (strcmp(e_out, ".vrt") != 0){ printf("Output file must have .vrt extension\n\n"); return FAILURE;} From afebad0f6a0752b93948d110353185d3f1279809 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sun, 30 Aug 2020 12:16:40 +0200 Subject: [PATCH 47/78] adding docs --- .../{higher-level.rst => hl-aux.rst} | 26 ++--- .../components/higher-level/hl-compute.rst | 37 +++++++ .../components/higher-level/hl-input.rst | 25 +++++ .../components/higher-level/hl-submodules.rst | 48 +++++++++ docs/source/components/higher-level/index.rst | 102 ++++-------------- 5 files changed, 134 insertions(+), 104 deletions(-) rename docs/source/components/higher-level/{higher-level.rst => hl-aux.rst} (85%) create mode 100755 docs/source/components/higher-level/hl-compute.rst create mode 100755 docs/source/components/higher-level/hl-input.rst create mode 100755 docs/source/components/higher-level/hl-submodules.rst diff --git a/docs/source/components/higher-level/higher-level.rst b/docs/source/components/higher-level/hl-aux.rst similarity index 85% rename from docs/source/components/higher-level/higher-level.rst rename to docs/source/components/higher-level/hl-aux.rst index 353e9825..6332b595 100755 --- a/docs/source/components/higher-level/higher-level.rst +++ b/docs/source/components/higher-level/hl-aux.rst @@ -1,28 +1,14 @@ -.. _higher-level: +.. _hl-aux: -force-higher-level -================== - - -Usage -^^^^^ - -.. code-block:: bash - - force-higher-level - - Usage: force-higher-level parameter-file - -* parameter-file - - | Any higher-level parameter file needs to be given as sole argument. - | Depending on the parameter file, the program will figure out which submodule to execute, e.g. time series analysis or machine learning predictions. +Auxiliary data +============== +FORCE HLPS supports the optional usage of several auxiliary datasets as explained below. .. _tilelist: - + Tile allow-list -^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^ Tile allow-lists are optional, and can be used to limit the analysis extent to non-square extents. The allow-list is intersected with the analysis extent, i.e. only tiles included in both the analysis extent AND the allow-list will be processed. diff --git a/docs/source/components/higher-level/hl-compute.rst b/docs/source/components/higher-level/hl-compute.rst new file mode 100755 index 00000000..19f8d0f6 --- /dev/null +++ b/docs/source/components/higher-level/hl-compute.rst @@ -0,0 +1,37 @@ +.. _hl-compute: + +Compute model +============= + +The conceptual figures below explain the general concept of the higher-level processing strategy, compute model, and nested parallelism. +The individual sub-figures can be enlarged by clicking on them. + +.. |hl-compute1-text| replace:: The cubed data are stored in a grid system. Each tile has a unique tile ID, which consists of an X-ID, and a Y-ID. The numbers increase from left ro right, and from top to bottom. In a first step, a rectangular extent needs to be specified using the tile X- and Y-IDs. In this example, we have selected the extent covering Belgium, i.e. 9 tiles. +.. |hl-compute2-text| replace:: If you do not want to process all tiles, you can use a :ref:`tilelist`. The allow-list is intersected with the analysis extent, i.e. only tiles included in both the analysis extent AND the allow-list will be processed. This is optional. +.. |hl-compute3-text| replace:: The image chips in each tile have an internal block structure for partial image access. These blocks are strips that are as wide as the ``TILE_SIZE`` and as high as the ``BLOCK_SIZE``. The blocks are the main processing units (PU), and are processed sequentially, i.e. one after another. +.. |hl-compute4-text| replace:: FORCE uses a streaming strategy, where three teams take care of reading, computing and writing data. The teams work simultaneously, e.g. input data for PU 19 is read, pre-loaded data for PU 18 is processed, and processed results for PU 17 are written - at the same time. If processing takes longer than I/O, this streaming strategy avoids idle CPUs waiting for delivery of input data. Optionally, :ref:`processing-masks` can be used, which restrict processing and analysis to certain pixels of interest. Processing units, which do not contain any active pixels, are skipped (in this case, the national territory of Belgium). +.. |hl-compute5-text| replace:: Each team can use several threads to further parallelize the work. In the input team, multiple threads read multiple input images simultaneously, e.g. different dates of ARD. In the computing team, the pixels are distributed to different threads (please note that the actual load distribution may differ from the idealized figure due to load balancing etc.). In the output team, multiple threads write multiple output products simultaneously, e.g. different Spectral Temporal Metrics. + +.. |hl-compute1-image| image:: hl-1.jpg + :width: 70% +.. |hl-compute2-image| image:: hl-2.jpg + :width: 70% +.. |hl-compute3-image| image:: hl-3.jpg + :width: 70% +.. |hl-compute4-image| image:: hl-4.jpg + :width: 70% +.. |hl-compute5-image| image:: hl-5.jpg + :width: 70% + ++----+--------------------+---------------------+ ++ 1. + |hl-compute1-text| + |hl-compute1-image| + ++----+--------------------+---------------------+ ++ 2. + |hl-compute2-text| + |hl-compute2-image| + ++----+--------------------+---------------------+ ++ 3. + |hl-compute3-text| + |hl-compute3-image| + ++----+--------------------+---------------------+ ++ 4. + |hl-compute4-text| + |hl-compute4-image| + ++----+--------------------+---------------------+ ++ 5. + |hl-compute5-text| + |hl-compute5-image| + ++----+--------------------+---------------------+ + diff --git a/docs/source/components/higher-level/hl-input.rst b/docs/source/components/higher-level/hl-input.rst new file mode 100755 index 00000000..834e8717 --- /dev/null +++ b/docs/source/components/higher-level/hl-input.rst @@ -0,0 +1,25 @@ +.. _hl-input: + +Input type +========== + +Input for HLPS must be in datacube format! + +.. seealso:: Check out this `tutorial `_, which explains how we define a datacube. + + +The different :ref:`hl-submodules` either process ARD or feature datasets (see :ref:`hl-input)`: + + +1. ARD are Level 2 Analysis Ready Data. + Alternatively, Level 3 Best Available Pixel composites can be input, too. + They consist of a reflectance product (mostly BOA, but TOA, IMP, BAP are supported, too), and pixel-based quality information (mostly QAI, but INF is supported, too). + These input data need to follow a strict data format, including number of bands, naming convention with time stamp, sensor etc. + + .. seealso:: Check out this `tutorial `_, which explains what Analysis Ready Data are, and how to use the FORCE :ref:`l2ps` to generate them.. + +2. Feature datasets can be anything from individual ARD datasets to external datasets like precipitation or DEM. + Most often, features are generated by one HLPS submodule, and then used by another one, e.g. generate Spectral Temporal Metrics with :ref:`tsa`, then use these outputs as features in :ref:`ml`. + The most important constraint is: HLPS only knows 16bit signed input, thus if you import external data, you need to scale accordingly. + + diff --git a/docs/source/components/higher-level/hl-submodules.rst b/docs/source/components/higher-level/hl-submodules.rst new file mode 100755 index 00000000..faec2b6e --- /dev/null +++ b/docs/source/components/higher-level/hl-submodules.rst @@ -0,0 +1,48 @@ +.. _hl-submodules: + +Submodules +========== + +There are multiple submodules available, which implement different workflows. +They all share the higher-level :ref:`hl-compute``. + + +**Table 2** Submodules of HLPS. + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ Submodule + Level + :ref:`hl-input` + Short description + ++===============+=======+=========+=============================================================================================================================================+ ++ :ref:`level3` + 3 + ARD + Generate temporal aggregations of Level 2 ARD, i.e. pixel-based composites + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`cso` + 3 + ARD + Statistics for Level 2-3 ARD data availability mining + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`tsa` + 3-4 + ARD + Multitemporal analysis and processing based on Level 2-3 ARD + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`ml` + 4 + feature + Model predictions based on any cubed features + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`txt` + 3-4 + feature + Morphological transformations based on any cubed features + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`lsm` + 3-4 + feature + Quantification of spatial patterns based on any cubed features + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`smp` + / + feature + Point-based extraction of features for training/validation purposes + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`cfi` + 4 + ARD + Increase the spatial resolution of coarse continuous fields (like Land Surface Phenology) to Level 2 ARD resolution using the ImproPhe code + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`l2i` + 2 + ARD + Increase the spatial resolution of lower resolution Level 2 ARD using higher resolution Level 2 ARD using the ImproPhe code + ++---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ + + +.. toctree:: + :hidden: + :maxdepth: 2 + + l3/index.rst + tsa/index.rst + cso/index.rst + ml/index.rst + txt/index.rst + lsm/index.rst + smp/index.rst + l2i/index.rst + cfi/index.rst + diff --git a/docs/source/components/higher-level/index.rst b/docs/source/components/higher-level/index.rst index c842be48..3d52ade1 100755 --- a/docs/source/components/higher-level/index.rst +++ b/docs/source/components/higher-level/index.rst @@ -6,7 +6,7 @@ Higher Level The FORCE Higher Level Processing System (HLPS) provides functionality for Higher Level Processing. HLPS consists of one executable only, i.e. :ref:`higher-level`. -Multiple submodules are available (see table below), which either process ARD or feature datasets, both in datacube format. +Multiple :ref:`hl-submodules` are available, which either process ARD or feature datasets (see :ref:`hl-input)`, both in datacube format. **Table 1** Higher Level module. @@ -19,82 +19,21 @@ Multiple submodules are available (see table below), which either process ARD or + + :ref:`higher-level` + 3-4 + Higher Level processing + +--------+-------------------------+-------+-----------------------------------------------------------------------------------------------------+ +.. higher-level: -**Table 2** Submodules of HLPS. - -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ Submodule + Level + Input + Short description + -+===============+=======+=========+=============================================================================================================================================+ -+ :ref:`level3` + 3 + ARD + Generate temporal aggregations of Level 2 ARD, i.e. pixel-based composites + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`cso` + 3 + ARD + Statistics for Level 2-3 ARD data availability mining + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`tsa` + 3-4 + ARD + Multitemporal analysis and processing based on Level 2-3 ARD + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`ml` + 4 + feature + Model predictions based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`txt` + 3-4 + feature + Morphological transformations based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`lsm` + 3-4 + feature + Quantification of spatial patterns based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`smp` + / + feature + Point-based extraction of features for training/validation purposes + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`cfi` + 4 + ARD + Increase the spatial resolution of coarse continuous fields (like Land Surface Phenology) to Level 2 ARD resolution using the ImproPhe code + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`l2i` + 2 + ARD + Increase the spatial resolution of lower resolution Level 2 ARD using higher resolution Level 2 ARD using the ImproPhe code + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ - - -**Input type** - -.. seealso:: Check out this `tutorial `_, which explains how we define a datacube. - -1. ARD are Level 2 Analysis Ready Data. - Alternatively, Level 3 Best Available Pixel composites can be input, too. - They consist of a reflectance product (mostly BOA, but TOA, IMP, BAP are supported, too), and pixel-based quality information (mostly QAI, but INF is supported, too). - These input data need to follow a strict data format, including number of bands, naming convention with time stamp, sensor etc. - - .. seealso:: Check out this `tutorial `_, which explains what Analysis Ready Data are, and how to use the FORCE :ref:`l2ps` to generate them.. - -2. Feature datasets can be anything from individual ARD datasets to external datasets like precipitation or DEM. - Most often, features are generated by one HLPS submodule, and then used by another one, e.g. generate Spectral Temporal Metrics with :ref:`tsa`, then use these outputs as features in :ref:`ml`. - The most important constraint is: HLPS only knows 16bit signed input, thus if you import external data, you need to scale accordingly. - - -**Compute Model** - -The conceptual figures below explain the general concept of the processing strategy, compute model, and nested parallelism. -The individual sub-figures can be enlarged by clicking on them. - -.. |hl-compute1-text| replace:: The cubed data are stored in a grid system. Each tile has a unique tile ID, which consists of an X-ID, and a Y-ID. The numbers increase from left ro right, and from top to bottom. In a first step, a rectangular extent needs to be specified using the tile X- and Y-IDs. In this example, we have selected the extent covering Belgium, i.e. 9 tiles. -.. |hl-compute2-text| replace:: If you do not want to process all tiles, you can use a :ref:`tilelist`. The allow-list is intersected with the analysis extent, i.e. only tiles included in both the analysis extent AND the allow-list will be processed. This is optional. -.. |hl-compute3-text| replace:: The image chips in each tile have an internal block structure for partial image access. These blocks are strips that are as wide as the ``TILE_SIZE`` and as high as the ``BLOCK_SIZE``. The blocks are the main processing units (PU), and are processed sequentially, i.e. one after another. -.. |hl-compute4-text| replace:: FORCE uses a streaming strategy, where three teams take care of reading, computing and writing data. The teams work simultaneously, e.g. input data for PU 19 is read, pre-loaded data for PU 18 is processed, and processed results for PU 17 are written - at the same time. If processing takes longer than I/O, this streaming strategy avoids idle CPUs waiting for delivery of input data. Optionally, :ref:`processing-masks` can be used, which restrict processing and analysis to certain pixels of interest. Processing units, which do not contain any active pixels, are skipped (in this case, the national territory of Belgium). -.. |hl-compute5-text| replace:: Each team can use several threads to further parallelize the work. In the input team, multiple threads read multiple input images simultaneously, e.g. different dates of ARD. In the computing team, the pixels are distributed to different threads (please note that the actual load distribution may differ from the idealized figure due to load balancing etc.). In the output team, multiple threads write multiple output products simultaneously, e.g. different Spectral Temporal Metrics. - -.. |hl-compute1-image| image:: hl-1.jpg - :width: 70% -.. |hl-compute2-image| image:: hl-2.jpg - :width: 70% -.. |hl-compute3-image| image:: hl-3.jpg - :width: 70% -.. |hl-compute4-image| image:: hl-4.jpg - :width: 70% -.. |hl-compute5-image| image:: hl-5.jpg - :width: 70% - -+----+--------------------+---------------------+ -+ 1. + |hl-compute1-text| + |hl-compute1-image| + -+----+--------------------+---------------------+ -+ 2. + |hl-compute2-text| + |hl-compute2-image| + -+----+--------------------+---------------------+ -+ 3. + |hl-compute3-text| + |hl-compute3-image| + -+----+--------------------+---------------------+ -+ 4. + |hl-compute4-text| + |hl-compute4-image| + -+----+--------------------+---------------------+ -+ 5. + |hl-compute5-text| + |hl-compute5-image| + -+----+--------------------+---------------------+ +Usage +^^^^^ +.. code-block:: bash + + force-higher-level + + Usage: force-higher-level parameter-file + +* parameter-file + + | Any higher-level parameter file needs to be given as sole argument. + | Depending on the parameter file, the program will figure out which submodule to execute, e.g. time series analysis or machine learning predictions. .. toctree:: @@ -102,13 +41,8 @@ The individual sub-figures can be enlarged by clicking on them. :maxdepth: 2 higher-level.rst - l3/index.rst - tsa/index.rst - cso/index.rst - ml/index.rst - txt/index.rst - lsm/index.rst - smp/index.rst - l2i/index.rst - cfi/index.rst + hl-input.rst + hl-compute.rst + hl-aux.rst + hl-submodules.rst From df77ba1bdecee9ed5e6e872cda4b6e32926dba14 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sun, 30 Aug 2020 12:26:41 +0200 Subject: [PATCH 48/78] adding docs --- .../components/higher-level/hl-submodules.rst | 42 +++++++++---------- docs/source/components/higher-level/index.rst | 10 ++--- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/docs/source/components/higher-level/hl-submodules.rst b/docs/source/components/higher-level/hl-submodules.rst index faec2b6e..27eb7d75 100755 --- a/docs/source/components/higher-level/hl-submodules.rst +++ b/docs/source/components/higher-level/hl-submodules.rst @@ -4,32 +4,32 @@ Submodules ========== There are multiple submodules available, which implement different workflows. -They all share the higher-level :ref:`hl-compute``. +They all share the higher-level :ref:`hl-compute`. **Table 2** Submodules of HLPS. -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ + Submodule + Level + :ref:`hl-input` + Short description + -+===============+=======+=========+=============================================================================================================================================+ -+ :ref:`level3` + 3 + ARD + Generate temporal aggregations of Level 2 ARD, i.e. pixel-based composites + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`cso` + 3 + ARD + Statistics for Level 2-3 ARD data availability mining + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`tsa` + 3-4 + ARD + Multitemporal analysis and processing based on Level 2-3 ARD + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`ml` + 4 + feature + Model predictions based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`txt` + 3-4 + feature + Morphological transformations based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`lsm` + 3-4 + feature + Quantification of spatial patterns based on any cubed features + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`smp` + / + feature + Point-based extraction of features for training/validation purposes + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`cfi` + 4 + ARD + Increase the spatial resolution of coarse continuous fields (like Land Surface Phenology) to Level 2 ARD resolution using the ImproPhe code + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ -+ :ref:`l2i` + 2 + ARD + Increase the spatial resolution of lower resolution Level 2 ARD using higher resolution Level 2 ARD using the ImproPhe code + -+---------------+-------+---------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++===============+=======+=================+=============================================================================================================================================+ ++ :ref:`level3` + 3 + ARD + Generate temporal aggregations of Level 2 ARD, i.e. pixel-based composites + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`cso` + 3 + ARD + Statistics for Level 2-3 ARD data availability mining + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`tsa` + 3-4 + ARD + Multitemporal analysis and processing based on Level 2-3 ARD + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`ml` + 4 + feature + Model predictions based on any cubed features + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`txt` + 3-4 + feature + Morphological transformations based on any cubed features + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`lsm` + 3-4 + feature + Quantification of spatial patterns based on any cubed features + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`smp` + / + feature + Point-based extraction of features for training/validation purposes + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`cfi` + 4 + ARD + Increase the spatial resolution of coarse continuous fields (like Land Surface Phenology) to Level 2 ARD resolution using the ImproPhe code + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ ++ :ref:`l2i` + 2 + ARD + Increase the spatial resolution of lower resolution Level 2 ARD using higher resolution Level 2 ARD using the ImproPhe code + ++---------------+-------+-----------------+---------------------------------------------------------------------------------------------------------------------------------------------+ .. toctree:: diff --git a/docs/source/components/higher-level/index.rst b/docs/source/components/higher-level/index.rst index 3d52ade1..4b6cadf2 100755 --- a/docs/source/components/higher-level/index.rst +++ b/docs/source/components/higher-level/index.rst @@ -5,8 +5,8 @@ Higher Level The FORCE Higher Level Processing System (HLPS) provides functionality for Higher Level Processing. -HLPS consists of one executable only, i.e. :ref:`higher-level`. -Multiple :ref:`hl-submodules` are available, which either process ARD or feature datasets (see :ref:`hl-input)`, both in datacube format. +HLPS consists of one executable only, i.e. ``force-higher-level``. +Multiple :ref:`hl-submodules` are available, which either process ARD or feature datasets (see :ref:`hl-input`). **Table 1** Higher Level module. @@ -19,10 +19,10 @@ Multiple :ref:`hl-submodules` are available, which either process ARD or feature + + :ref:`higher-level` + 3-4 + Higher Level processing + +--------+-------------------------+-------+-----------------------------------------------------------------------------------------------------+ + .. higher-level: -Usage -^^^^^ +**Usage** .. code-block:: bash @@ -38,7 +38,7 @@ Usage .. toctree:: :hidden: - :maxdepth: 2 + :maxdepth: 3 higher-level.rst hl-input.rst From 4b382939afffe80f5efd2c4cf1c0903f690513fb Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 31 Aug 2020 09:13:44 +0200 Subject: [PATCH 49/78] directoryname function now return PWD if no / detected --- src/cross-level/dir-cl.c | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/cross-level/dir-cl.c b/src/cross-level/dir-cl.c index 3f2d53a3..ef9125b1 100755 --- a/src/cross-level/dir-cl.c +++ b/src/cross-level/dir-cl.c @@ -248,7 +248,14 @@ char *slash; // Locate the last slash and set terminating 0 slash = strrchr(dirname, '/'); - if (slash != NULL) *slash = '\0'; + if (slash != NULL){ + *slash = '\0'; + } else { + if (getcwd(dirname, size) == NULL){ + printf("No directoryname detected and getting current directory failed.\n"); + exit(1); + } + } return; } From b21d38530d87de811064bd6743c8110e63809e89 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 3 Sep 2020 09:12:48 +0200 Subject: [PATCH 50/78] fixed small bug in assembling filename when OUTPUT_EXPLODE --- src/cross-level/stack-cl.c | 2 +- src/higher-level/tsa-hl.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/cross-level/stack-cl.c b/src/cross-level/stack-cl.c index 6b1f0c61..2c24be25 100755 --- a/src/cross-level/stack-cl.c +++ b/src/cross-level/stack-cl.c @@ -795,7 +795,7 @@ int i = 0; for (f=0; fexplode){ - nchar = snprintf(bname, NPOW_10, "_%s", stack->domain[bands[_STACK_][f][0]]); + nchar = snprintf(bname, NPOW_10, "_%s", stack->bandname[bands[_STACK_][f][0]]); if (nchar < 0 || nchar >= NPOW_10){ printf("Buffer Overflow in assembling band ID\n"); return FAILURE;} } else bname[0] = '\0'; diff --git a/src/higher-level/tsa-hl.c b/src/higher-level/tsa-hl.c index f7814bc1..97fcc3c8 100755 --- a/src/higher-level/tsa-hl.c +++ b/src/higher-level/tsa-hl.c @@ -561,7 +561,7 @@ stack_compile_info_t *info = NULL; printf("Buffer Overflow in assembling domain\n"); error++;} set_stack_wavelength(TSA[o], t, date.year); set_stack_unit(TSA[o], t, "year"); - set_stack_domain(TSA[o], t, fdate); + //set_stack_domain(TSA[o], t, fdate); set_stack_bandname(TSA[o], t, fdate); break; case _trd_: From 86f37481cc4c6f09e7b9508a70b1b46446a21108 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 3 Sep 2020 16:14:12 +0200 Subject: [PATCH 51/78] added small tweak in polar --- src/higher-level/polar-hl.c | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 870f5c48..5f29a792 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -494,7 +494,9 @@ polar_t *theta0 = NULL; } // max of season - if (polar[i].val > timing[_PEAK_].val){ + if (polar[i].cum >= pol->start && + polar[i].cum < pol->end && + polar[i].val > timing[_PEAK_].val){ memcpy(&timing[_PEAK_], &polar[i], sizeof(polar_t));} // average vector of early growing season part @@ -570,7 +572,8 @@ polar_t *theta0 = NULL; } - if (theta0 != NULL) free((void*)theta0); theta0 = NULL; + if (theta0 != NULL) free((void*)theta0); + theta0 = NULL; } From a0e2f1c55d1b89e8af5d92847424152146baf2fc Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 11:14:56 +0200 Subject: [PATCH 52/78] fixed a small issue when scaling mean DOY value in trend analysis + implemented integrals in polarmetrics --- src/cross-level/enum-cl.c | 4 +- src/cross-level/enum-cl.h | 3 +- src/higher-level/polar-hl.c | 74 ++++++++++++++++++++++++++++++++++--- src/higher-level/trend-hl.c | 4 +- 4 files changed, 75 insertions(+), 10 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index f88c4813..6ee87637 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -126,7 +126,9 @@ const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { { _POL_VEV_, "VEV" }, { _POL_VAV_, "VAV" }, { _POL_VLV_, "VLV" }, { _POL_VBL_, "VBL" }, { _POL_VSA_, "VSA" }, { _POL_VPA_, "VPA" }, { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, { _POL_DPY_, "DPY" }, - { _POL_DPV_, "DPV" }}; + { _POL_DPV_, "DPV" }, { _POL_IST_, "IST" }, { _POL_IBL_, "IBL" }, + { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, { _POL_IRD_, "IRD" }, + { _POL_IFD_, "IFD" }}; const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { { _TAIL_LEFT_, "LEFT" }, { _TAIL_TWO_, "TWO" }, { _TAIL_RIGHT_, "RIGHT" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index 90dea733..60f0451d 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -195,7 +195,8 @@ enum { _POL_DEM_, _POL_DLM_, _POL_DPS_, _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_DEV_, _POL_DAV_, _POL_DLV_, _POL_LTS_, _POL_LGS_, _POL_LGV_, _POL_VEM_, _POL_VLM_, _POL_VPS_, _POL_VSS_, _POL_VMS_, _POL_VES_, _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VSA_, _POL_VPA_, - _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_LENGTH_ }; + _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_IST_, _POL_IBL_, + _POL_IBT_, _POL_IGS_, _POL_IRD_, _POL_IFD_, _POL_LENGTH_ }; // folding enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGTH_ }; diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 5f29a792..e161276e 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -307,6 +307,7 @@ int l; int p; int i, i_, i0; int s, y; +int g; float r, v; bool valid; float ce_left, ce_right, ce; @@ -314,15 +315,18 @@ float v_left, v_right; enum { _LEFT_, _START_, _MID_, _PEAK_, _END_, _RIGHT_, _EVENT_LEN_ }; enum { _ALPHA_, _THETA_, _EARLY_, _GROW_, _LATE_, _WINDOW_LEN_ }; +enum { _GREEN_INT_, _SEASONAL_INT_, _LATENT_INT_, _TOTAL_INT_, _RISING_INT_, _FALLING_INT_, _INTEGRAL_LEN_ }; polar_t timing[_EVENT_LEN_]; polar_t vector[_WINDOW_LEN_]; float mean_window[_WINDOW_LEN_][2]; int n_window[_WINDOW_LEN_]; double recurrence[2]; +double integral[_INTEGRAL_LEN_]; polar_t *polar = NULL; polar_t *theta0 = NULL; +float green_val, base_val; valid = false; @@ -335,7 +339,7 @@ polar_t *theta0 = NULL; - #pragma omp parallel private(l,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,recurrence,polar,theta0) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) + #pragma omp parallel private(l,g,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,recurrence,integral,polar,theta0,green_val,base_val) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) { // allocate @@ -458,7 +462,7 @@ polar_t *theta0 = NULL; for (i=i0; i s){ i0 = i; break; } + if (polar[i].season > s) break; // start of phenological year if (polar[i].cum > 0 && timing[_LEFT_].cum == 0){ @@ -534,6 +538,58 @@ polar_t *theta0 = NULL; ce_from_polar_vector(s, &vector[_THETA_], &vector[_LATE_]); + green_val = (timing[_START_].val + timing[_END_].val) / 2.0; + base_val = (timing[_LEFT_].val + timing[_RIGHT_].val) / 2.0; + + memset(integral, 0, sizeof(double)*_INTEGRAL_LEN_); + + for (i=i0; i s){ i0 = i; break; } + + // green integral + if (polar[i].cum >= pol->start && + polar[i].cum < pol->end && + polar[i].val > green_val){ + integral[_GREEN_INT_] += polar[i].val*tsi->step; + } + + // min-min integral + if (polar[i].val > base_val){ + integral[_SEASONAL_INT_] += (polar[i].val-base_val)*tsi->step; + } + + // latent integral + if (polar[i].val > base_val){ + integral[_LATENT_INT_] += base_val*tsi->step; + } else { + integral[_LATENT_INT_] += polar[i].val*tsi->step; + } + + // total integral + integral[_TOTAL_INT_] += polar[i].val*tsi->step; + + // rising integral + if (i > 0 && polar[i].val - polar[i-1].val > 0){ + integral[_RISING_INT_] += (polar[i].val - polar[i-1].val)*tsi->step; + } + + // falling integral + if (i > 0 && polar[i].val - polar[i-1].val < 0){ + integral[_FALLING_INT_] += (polar[i-1].val - polar[i].val)*tsi->step; + } + + } + + + //scale integrals to percent in relation to a + // 365 days * 10000 value boxcar integral + for (g=0; g<_INTEGRAL_LEN_; g++){ + integral[g] = integral[g] / (1e4*365.0) * 10000; + } + + // date parameters if (pol->use[_POL_DEM_]) ts->pol_[_POL_DEM_][y][p] = (short)timing[_LEFT_].ce; if (pol->use[_POL_DSS_]) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_].ce; @@ -547,7 +603,7 @@ polar_t *theta0 = NULL; if (pol->use[_POL_DPY_]) ts->pol_[_POL_DPY_][y][p] = (short)(vector[_THETA_].ce); if (pol->use[_POL_DPV_]) ts->pol_[_POL_DPV_][y][p] = (short)(theta0[s].ce - vector[_THETA_].ce); - // length paramaters + // length parameters if (pol->use[_POL_LGS_]) ts->pol_[_POL_LGS_][y][p] = (short)(timing[_END_].ce - timing[_START_].ce); if (pol->use[_POL_LGV_]) ts->pol_[_POL_LGV_][y][p] = (short)(vector[_LATE_].ce - vector[_EARLY_].ce); if (pol->use[_POL_LTS_]) ts->pol_[_POL_LTS_][y][p] = (short)(timing[_RIGHT_].ce - timing[_LEFT_].ce); @@ -562,13 +618,19 @@ polar_t *theta0 = NULL; if (pol->use[_POL_VEV_]) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_].val; if (pol->use[_POL_VAV_]) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_].val; if (pol->use[_POL_VLV_]) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_].val; - if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_PEAK_].val - - (timing[_START_].val+timing[_END_].val)/2.0); + if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_PEAK_].val - green_val); if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_PEAK_].val - timing[_MID_].val); - if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)((timing[_LEFT_].val+timing[_RIGHT_].val)/2.0); + if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)base_val; if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); + // integral parameters + if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)integral[_SEASONAL_INT_]; + if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)integral[_LATENT_INT_]; + if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)integral[_TOTAL_INT_]; + if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)integral[_GREEN_INT_]; + if (pol->use[_POL_IRD_]) ts->pol_[_POL_IRD_][y][p] = (short)integral[_RISING_INT_]; + if (pol->use[_POL_IFD_]) ts->pol_[_POL_IFD_][y][p] = (short)integral[_FALLING_INT_]; } diff --git a/src/higher-level/trend-hl.c b/src/higher-level/trend-hl.c index aedaba81..c57ba2b6 100755 --- a/src/higher-level/trend-hl.c +++ b/src/higher-level/trend-hl.c @@ -158,7 +158,7 @@ double mae, rmse; // account for values given in continuous days if (in_ce){ - my -= 365*(nf-1)/2; my -= d_fld[0].year; my *= 1000; + my -= 365*(nf-1)/2; slp -= 365; slp *= 1000; } @@ -385,7 +385,7 @@ double mae, rmse; // account for values given in continuous days if (in_ce){ - my -= 365*(nf-1)/2; my -= d_fld[0].year; my *= 1000; + my -= 365*(nf-1)/2; slp -= 365; slp *= 1000; } From be63eaa18a98c086f23f4c310715a79c72a0867c Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 13:28:34 +0200 Subject: [PATCH 53/78] implemented rates in polarmetrics --- src/cross-level/enum-cl.c | 3 ++- src/cross-level/enum-cl.h | 3 ++- src/higher-level/polar-hl.c | 36 ++++++++++++++++++++++++++++++++++-- 3 files changed, 38 insertions(+), 4 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index 6ee87637..0d9e7ce7 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -128,7 +128,8 @@ const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, { _POL_DPY_, "DPY" }, { _POL_DPV_, "DPV" }, { _POL_IST_, "IST" }, { _POL_IBL_, "IBL" }, { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, { _POL_IRD_, "IRD" }, - { _POL_IFD_, "IFD" }}; + { _POL_IFD_, "IFD" }, { _POL_RAR_, "RAR" }, { _POL_RAF_, "RAF" }, + { _POL_RMR_, "RMR" }, { _POL_RMF_,"RMF" }}; const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { { _TAIL_LEFT_, "LEFT" }, { _TAIL_TWO_, "TWO" }, { _TAIL_RIGHT_, "RIGHT" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index 60f0451d..76dc79a0 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -196,7 +196,8 @@ enum { _POL_DEM_, _POL_DLM_, _POL_DPS_, _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_VEM_, _POL_VLM_, _POL_VPS_, _POL_VSS_, _POL_VMS_, _POL_VES_, _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VSA_, _POL_VPA_, _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_IST_, _POL_IBL_, - _POL_IBT_, _POL_IGS_, _POL_IRD_, _POL_IFD_, _POL_LENGTH_ }; + _POL_IBT_, _POL_IGS_, _POL_IRD_, _POL_IFD_, _POL_RAR_, _POL_RAF_, + _POL_RMR_, _POL_RMF_, _POL_LENGTH_ }; // folding enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGTH_ }; diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index e161276e..4ba37e5a 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -321,6 +321,9 @@ polar_t timing[_EVENT_LEN_]; polar_t vector[_WINDOW_LEN_]; float mean_window[_WINDOW_LEN_][2]; int n_window[_WINDOW_LEN_]; +float max_rate[_WINDOW_LEN_]; +float mean_rate[_WINDOW_LEN_]; +float rate; double recurrence[2]; double integral[_INTEGRAL_LEN_]; @@ -339,7 +342,7 @@ float green_val, base_val; - #pragma omp parallel private(l,g,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,recurrence,integral,polar,theta0,green_val,base_val) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) + #pragma omp parallel private(l,g,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,max_rate,mean_rate,rate,recurrence,integral,polar,theta0,green_val,base_val) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) { // allocate @@ -453,6 +456,8 @@ float green_val, base_val; memset(&timing, 0, sizeof(polar_t)*_EVENT_LEN_); memset(mean_window, 0, sizeof(float)*_WINDOW_LEN_*2); memset(n_window, 0, sizeof(float)*_WINDOW_LEN_); + memset(max_rate, 0, sizeof(float)*_WINDOW_LEN_); + memset(mean_rate, 0, sizeof(float)*_WINDOW_LEN_); memset(recurrence, 0, sizeof(double)*2); if (vector[_THETA_].doy < 182) y = s; else y = s+1; @@ -504,18 +509,30 @@ float green_val, base_val; memcpy(&timing[_PEAK_], &polar[i], sizeof(polar_t));} // average vector of early growing season part + // + average and maximum rising rate if (polar[i].cum >= pol->start && polar[i].cum < pol->mid){ mean_window[_EARLY_][_X_] += polar[i].pcx; mean_window[_EARLY_][_Y_] += polar[i].pcy; + if (i > 0 && polar[i].val - polar[i-1].val > 0){ + rate = (polar[i].val - polar[i-1].val)/tsi->step; + mean_rate[_EARLY_] += rate; + if (rate > max_rate[_EARLY_]) max_rate[_EARLY_] = rate; + } n_window[_EARLY_]++; } // average vector of late growing season part + // + average and maximum falling rate if (polar[i].cum >= pol->mid && polar[i].cum < pol->end){ mean_window[_LATE_][_X_] += polar[i].pcx; mean_window[_LATE_][_Y_] += polar[i].pcy; + if (i > 0 && polar[i].val - polar[i-1].val < 0){ + rate = (polar[i-1].val - polar[i].val)/tsi->step; + mean_rate[_LATE_] += rate; + if (rate > max_rate[_LATE_]) max_rate[_LATE_] = rate; + } n_window[_LATE_]++; } @@ -537,6 +554,8 @@ float green_val, base_val; ce_from_polar_vector(s, &vector[_THETA_], &vector[_EARLY_]); ce_from_polar_vector(s, &vector[_THETA_], &vector[_LATE_]); + mean_rate[_EARLY_] /= n_window[_EARLY_]; + mean_rate[_LATE_] /= n_window[_LATE_]; green_val = (timing[_START_].val + timing[_END_].val) / 2.0; base_val = (timing[_LEFT_].val + timing[_RIGHT_].val) / 2.0; @@ -583,12 +602,19 @@ float green_val, base_val; } - //scale integrals to percent in relation to a + // scale integrals to scaled percent in relation to a // 365 days * 10000 value boxcar integral + // 10000 -> 100% for (g=0; g<_INTEGRAL_LEN_; g++){ integral[g] = integral[g] / (1e4*365.0) * 10000; } + // adjust scaling for derivative integrals, i.e. + // scaled percent in relation to a steady increase from 0 to 10000, i.e. + // half the 365 days * 10000 value boxcar integral + integral[_RISING_INT_] *= 2; + integral[_FALLING_INT_] *= 2; + // date parameters if (pol->use[_POL_DEM_]) ts->pol_[_POL_DEM_][y][p] = (short)timing[_LEFT_].ce; @@ -632,6 +658,12 @@ float green_val, base_val; if (pol->use[_POL_IRD_]) ts->pol_[_POL_IRD_][y][p] = (short)integral[_RISING_INT_]; if (pol->use[_POL_IFD_]) ts->pol_[_POL_IFD_][y][p] = (short)integral[_FALLING_INT_]; + // rate parameters + if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)mean_rate[_EARLY_]; + if (pol->use[_POL_RAF_]) ts->pol_[_POL_RAF_][y][p] = (short)mean_rate[_LATE_]; + if (pol->use[_POL_RMR_]) ts->pol_[_POL_RMR_][y][p] = (short)max_rate[_EARLY_]; + if (pol->use[_POL_RMF_]) ts->pol_[_POL_RMF_][y][p] = (short)max_rate[_LATE_]; + } if (theta0 != NULL) free((void*)theta0); From 87ebf908a6eeb7f2654d02f4321725610f0b7e41 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 13:49:45 +0200 Subject: [PATCH 54/78] scaling of rates in polarmetrics --- src/higher-level/polar-hl.c | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 4ba37e5a..76a84a40 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -557,6 +557,14 @@ float green_val, base_val; mean_rate[_EARLY_] /= n_window[_EARLY_]; mean_rate[_LATE_] /= n_window[_LATE_]; + // scale the rates in relation to a steady increase from 0 to 10000, i.e. + // to 10000/365 = 27.4 per day + // values are reported in scales percent + mean_rate[_EARLY_] = mean_rate[_EARLY_] / (1e4/365.0) * 1e4; + mean_rate[_LATE_] = mean_rate[_LATE_] / (1e4/365.0) * 1e4; + max_rate[_EARLY_] = max_rate[_EARLY_] / (1e4/365.0) * 1e4; + max_rate[_LATE_] = max_rate[_LATE_] / (1e4/365.0) * 1e4; + green_val = (timing[_START_].val + timing[_END_].val) / 2.0; base_val = (timing[_LEFT_].val + timing[_RIGHT_].val) / 2.0; From 829a309dfb4a619050f06594e291c5ee6973a172 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 16:42:10 +0200 Subject: [PATCH 55/78] worked on scaling polarmetrics --- src/cross-level/enum-cl.c | 4 ++-- src/cross-level/enum-cl.h | 2 +- src/higher-level/polar-hl.c | 45 +++++++++---------------------------- 3 files changed, 14 insertions(+), 37 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index 0d9e7ce7..388a3af2 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -127,8 +127,8 @@ const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { { _POL_VBL_, "VBL" }, { _POL_VSA_, "VSA" }, { _POL_VPA_, "VPA" }, { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, { _POL_DPY_, "DPY" }, { _POL_DPV_, "DPV" }, { _POL_IST_, "IST" }, { _POL_IBL_, "IBL" }, - { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, { _POL_IRD_, "IRD" }, - { _POL_IFD_, "IFD" }, { _POL_RAR_, "RAR" }, { _POL_RAF_, "RAF" }, + { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, { _POL_IRR_, "IRR" }, + { _POL_IFR_, "IFR" }, { _POL_RAR_, "RAR" }, { _POL_RAF_, "RAF" }, { _POL_RMR_, "RMR" }, { _POL_RMF_,"RMF" }}; const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index 76dc79a0..45ebfe68 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -196,7 +196,7 @@ enum { _POL_DEM_, _POL_DLM_, _POL_DPS_, _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_VEM_, _POL_VLM_, _POL_VPS_, _POL_VSS_, _POL_VMS_, _POL_VES_, _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VSA_, _POL_VPA_, _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_IST_, _POL_IBL_, - _POL_IBT_, _POL_IGS_, _POL_IRD_, _POL_IFD_, _POL_RAR_, _POL_RAF_, + _POL_IBT_, _POL_IGS_, _POL_IRR_, _POL_IFR_, _POL_RAR_, _POL_RAF_, _POL_RMR_, _POL_RMF_, _POL_LENGTH_ }; // folding diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 76a84a40..14041eef 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -307,7 +307,6 @@ int l; int p; int i, i_, i0; int s, y; -int g; float r, v; bool valid; float ce_left, ce_right, ce; @@ -342,7 +341,7 @@ float green_val, base_val; - #pragma omp parallel private(l,g,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,max_rate,mean_rate,rate,recurrence,integral,polar,theta0,green_val,base_val) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) + #pragma omp parallel private(l,i,i0,i_,ce_left,ce_right,v_left,v_right,valid,ce,v,s,y,r,timing,vector,mean_window,n_window,max_rate,mean_rate,rate,recurrence,integral,polar,theta0,green_val,base_val) shared(mask_,ts,nc,ni,year_min,nodata,pol,tsi) default(none) { // allocate @@ -557,14 +556,6 @@ float green_val, base_val; mean_rate[_EARLY_] /= n_window[_EARLY_]; mean_rate[_LATE_] /= n_window[_LATE_]; - // scale the rates in relation to a steady increase from 0 to 10000, i.e. - // to 10000/365 = 27.4 per day - // values are reported in scales percent - mean_rate[_EARLY_] = mean_rate[_EARLY_] / (1e4/365.0) * 1e4; - mean_rate[_LATE_] = mean_rate[_LATE_] / (1e4/365.0) * 1e4; - max_rate[_EARLY_] = max_rate[_EARLY_] / (1e4/365.0) * 1e4; - max_rate[_LATE_] = max_rate[_LATE_] / (1e4/365.0) * 1e4; - green_val = (timing[_START_].val + timing[_END_].val) / 2.0; base_val = (timing[_LEFT_].val + timing[_RIGHT_].val) / 2.0; @@ -610,20 +601,6 @@ float green_val, base_val; } - // scale integrals to scaled percent in relation to a - // 365 days * 10000 value boxcar integral - // 10000 -> 100% - for (g=0; g<_INTEGRAL_LEN_; g++){ - integral[g] = integral[g] / (1e4*365.0) * 10000; - } - - // adjust scaling for derivative integrals, i.e. - // scaled percent in relation to a steady increase from 0 to 10000, i.e. - // half the 365 days * 10000 value boxcar integral - integral[_RISING_INT_] *= 2; - integral[_FALLING_INT_] *= 2; - - // date parameters if (pol->use[_POL_DEM_]) ts->pol_[_POL_DEM_][y][p] = (short)timing[_LEFT_].ce; if (pol->use[_POL_DSS_]) ts->pol_[_POL_DSS_][y][p] = (short)timing[_START_].ce; @@ -659,18 +636,18 @@ float green_val, base_val; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); // integral parameters - if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)integral[_SEASONAL_INT_]; - if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)integral[_LATENT_INT_]; - if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)integral[_TOTAL_INT_]; - if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)integral[_GREEN_INT_]; - if (pol->use[_POL_IRD_]) ts->pol_[_POL_IRD_][y][p] = (short)integral[_RISING_INT_]; - if (pol->use[_POL_IFD_]) ts->pol_[_POL_IFD_][y][p] = (short)integral[_FALLING_INT_]; + if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] * 10); + if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] * 10); + if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)(integral[_TOTAL_INT_] * 10); + if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)(integral[_GREEN_INT_] * 10); + if (pol->use[_POL_IRR_]) ts->pol_[_POL_IRR_][y][p] = (short)(integral[_RISING_INT_] * 100); + if (pol->use[_POL_IFR_]) ts->pol_[_POL_IFR_][y][p] = (short)(integral[_FALLING_INT_] * 100); // rate parameters - if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)mean_rate[_EARLY_]; - if (pol->use[_POL_RAF_]) ts->pol_[_POL_RAF_][y][p] = (short)mean_rate[_LATE_]; - if (pol->use[_POL_RMR_]) ts->pol_[_POL_RMR_][y][p] = (short)max_rate[_EARLY_]; - if (pol->use[_POL_RMF_]) ts->pol_[_POL_RMF_][y][p] = (short)max_rate[_LATE_]; + if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)(mean_rate[_EARLY_] * 100); + if (pol->use[_POL_RAF_]) ts->pol_[_POL_RAF_][y][p] = (short)(mean_rate[_LATE_] * 100); + if (pol->use[_POL_RMR_]) ts->pol_[_POL_RMR_][y][p] = (short)(max_rate[_EARLY_] * 100); + if (pol->use[_POL_RMF_]) ts->pol_[_POL_RMF_][y][p] = (short)(max_rate[_LATE_] * 100); } From 9a384929f14cc78acce323097e3d7757bea64295 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 17:01:19 +0200 Subject: [PATCH 56/78] worked on scaling polarmetrics --- src/higher-level/polar-hl.c | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 14041eef..5a0dc50a 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -636,18 +636,18 @@ float green_val, base_val; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); // integral parameters - if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] * 10); - if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] * 10); - if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)(integral[_TOTAL_INT_] * 10); - if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)(integral[_GREEN_INT_] * 10); - if (pol->use[_POL_IRR_]) ts->pol_[_POL_IRR_][y][p] = (short)(integral[_RISING_INT_] * 100); - if (pol->use[_POL_IFR_]) ts->pol_[_POL_IFR_][y][p] = (short)(integral[_FALLING_INT_] * 100); + if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] / 365.0); // integral of year + if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] / 365.0); // integral of year + if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)(integral[_TOTAL_INT_] / 365.0); // integral of year + if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)(integral[_GREEN_INT_] / 365.0); // integral of year + if (pol->use[_POL_IRR_]) ts->pol_[_POL_IRR_][y][p] = (short)(integral[_RISING_INT_] / 365.0); + if (pol->use[_POL_IFR_]) ts->pol_[_POL_IFR_][y][p] = (short)(integral[_FALLING_INT_] / 365.0); // rate parameters - if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)(mean_rate[_EARLY_] * 100); - if (pol->use[_POL_RAF_]) ts->pol_[_POL_RAF_][y][p] = (short)(mean_rate[_LATE_] * 100); - if (pol->use[_POL_RMR_]) ts->pol_[_POL_RMR_][y][p] = (short)(max_rate[_EARLY_] * 100); - if (pol->use[_POL_RMF_]) ts->pol_[_POL_RMF_][y][p] = (short)(max_rate[_LATE_] * 100); + if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)(mean_rate[_EARLY_] * 30); // increase per month + if (pol->use[_POL_RAF_]) ts->pol_[_POL_RAF_][y][p] = (short)(mean_rate[_LATE_] * 30); // decrease per month + if (pol->use[_POL_RMR_]) ts->pol_[_POL_RMR_][y][p] = (short)(max_rate[_EARLY_] * 30); // increase per month + if (pol->use[_POL_RMF_]) ts->pol_[_POL_RMF_][y][p] = (short)(max_rate[_LATE_] * 30); // decrease per month } From dfaf9df00aec57bb2f956ee4e95141f1e9fe434c Mon Sep 17 00:00:00 2001 From: David Frantz Date: Sat, 5 Sep 2020 17:09:08 +0200 Subject: [PATCH 57/78] worked on scaling polarmetrics --- src/higher-level/polar-hl.c | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 5a0dc50a..f8776441 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -636,12 +636,12 @@ float green_val, base_val; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); // integral parameters - if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] / 365.0); // integral of year - if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] / 365.0); // integral of year - if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)(integral[_TOTAL_INT_] / 365.0); // integral of year - if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)(integral[_GREEN_INT_] / 365.0); // integral of year - if (pol->use[_POL_IRR_]) ts->pol_[_POL_IRR_][y][p] = (short)(integral[_RISING_INT_] / 365.0); - if (pol->use[_POL_IFR_]) ts->pol_[_POL_IFR_][y][p] = (short)(integral[_FALLING_INT_] / 365.0); + if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] / 365.0); // integral, unit of time: year + if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] / 365.0); // integral, unit of time: year + if (pol->use[_POL_IBT_]) ts->pol_[_POL_IBT_][y][p] = (short)(integral[_TOTAL_INT_] / 365.0); // integral, unit of time: year + if (pol->use[_POL_IGS_]) ts->pol_[_POL_IGS_][y][p] = (short)(integral[_GREEN_INT_] / 365.0); // integral, unit of time: year + if (pol->use[_POL_IRR_]) ts->pol_[_POL_IRR_][y][p] = (short)(integral[_RISING_INT_] / 30.0); // integral, unit of time: month + if (pol->use[_POL_IFR_]) ts->pol_[_POL_IFR_][y][p] = (short)(integral[_FALLING_INT_] / 30.0); // integral, unit of time: month // rate parameters if (pol->use[_POL_RAR_]) ts->pol_[_POL_RAR_][y][p] = (short)(mean_rate[_EARLY_] * 30); // increase per month From b6a493c105a9d20de5f6d5475ca9d27f9f03fc27 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 7 Sep 2020 08:48:14 +0200 Subject: [PATCH 58/78] fixed wrong array size for quantile array --- src/higher-level/fold-hl.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/higher-level/fold-hl.c b/src/higher-level/fold-hl.c index fd8da789..9dd69c5d 100755 --- a/src/higher-level/fold-hl.c +++ b/src/higher-level/fold-hl.c @@ -67,7 +67,7 @@ bool alloc_q_array = false; { // initialize _STAts - if (alloc_q_array) alloc((void**)&q_array, nf, sizeof(float)); + if (alloc_q_array) alloc((void**)&q_array, ni, sizeof(float)); #pragma omp for for (p=0; p Date: Tue, 8 Sep 2020 15:20:51 +0200 Subject: [PATCH 59/78] fixed GDAL 3.0 axis order issue --- Makefile | 2 +- src/cross-level/warp-cl.cpp | 8 + src/dummy.c | 386 ++---------------------------------- 3 files changed, 22 insertions(+), 374 deletions(-) diff --git a/Makefile b/Makefile index bbaa95b4..47f3f809 100755 --- a/Makefile +++ b/Makefile @@ -361,7 +361,7 @@ force-stack: temp cross $(DA)/_stack.c ### dummy code for testing stuff -dummy: temp cross aux src/dummy.c +dummy: temp cross aux higher src/dummy.c $(G11) $(CFLAGS) $(GDAL) $(GSL) $(CURL) $(SPLITS) $(OPENCV) -o $(TB)/dummy src/dummy.c $(TC)/*.o $(TA)/*.o $(TH)/*.o $(LDGDAL) $(LDGSL) $(LDCURL) $(LDSPLITS) $(LDOPENCV) diff --git a/src/cross-level/warp-cl.cpp b/src/cross-level/warp-cl.cpp index 4c428f3e..1c022566 100755 --- a/src/cross-level/warp-cl.cpp +++ b/src/cross-level/warp-cl.cpp @@ -50,6 +50,8 @@ char *wkt = dst_wkt; double x, y; + CPLSetConfigOption("OGR_CT_FORCE_TRADITIONAL_GIS_ORDER", "YES"); + if (srs_x < -180 || srs_x > 180){ printf("Longitude is out of bounds.\n"); return FAILURE;} if (srs_y < -90 || srs_y > 90){ @@ -91,6 +93,9 @@ OGRCoordinateTransformation *poCT = NULL; char *wkt = src_wkt; double x, y; + + CPLSetConfigOption("OGR_CT_FORCE_TRADITIONAL_GIS_ORDER", "YES"); + x = srs_x; y = srs_y; // set coordinate systems @@ -134,6 +139,9 @@ char *src_css = src_wkt; char *dst_css = dst_wkt; double x, y; + + CPLSetConfigOption("OGR_CT_FORCE_TRADITIONAL_GIS_ORDER", "YES"); + x = srs_x; y = srs_y; // set coordinate systems diff --git a/src/dummy.c b/src/dummy.c index 6089b4b9..998da818 100755 --- a/src/dummy.c +++ b/src/dummy.c @@ -43,6 +43,7 @@ This program is for testing small things. Needs to be compiled on demand #include "cross-level/const-cl.h" #include "cross-level/stats-cl.h" #include "cross-level/stack-cl.h" +#include "cross-level/warp-cl.h" #include "higher-level/read-ard-hl.h" @@ -53,392 +54,31 @@ This program is for testing small things. Needs to be compiled on demand #include "ogr_spatialref.h" // coordinate systems services -unsigned crc8_slow(bool *data, int len) -{ -unsigned int crc = 0xff; -int i, k; - - for (i=0; i> 1) ^ 0xb2 : crc >> 1; - } - - return crc ^ 0xff; -} - - -stack_t *read_this_block(){ -stack_t *stack = NULL; -int nb; -int nx, ny, nc; - - nx = 3000; - ny = 300; - nc = nx*ny; - nb = 20; - - stack = allocate_stack(nb, nc, _DT_SHORT_); - - set_stack_ncols(stack, nx); - set_stack_nrows(stack, ny); - set_stack_chunkncols(stack, nx); - set_stack_chunknrows(stack, ny); - set_stack_nchunks(stack, 10); - set_stack_chunk(stack, 5); - - return stack; -} - - - -ard_t *read_data(int *nt){ -int t; -ard_t *ard = NULL; -int error = 0; - -int n = 250; - - - - - alloc((void**)&ard, n, sizeof(ard_t)); - - - #pragma omp parallel shared(ard,n) reduction(+: error) default(none) - { - - #pragma omp for - for (t=0; t 0){ - printf("%d reading errors. ", error); - free_ard(ard, n); - *nt = -1; - return NULL; - } - - *nt = n; - return ard; -} - - -void read_this(ard_t **ARD1, int *nt1, int pu, int npu, int thread){ - - - if (pu < 0 || pu >= npu) return; - - ARD1[pu] = NULL; - nt1[pu] = 0; - - omp_set_num_threads(thread); - - - ARD1[pu] = read_data(&nt1[pu]); - - - return; -} - - -int screen_this(ard_t *ard, int nt){ -int t; -int error = 0; - - - #pragma omp parallel shared(ard,nt) reduction(+: error) default(none) - { - - #pragma omp for - for (t=0; t 0){ - printf("%d screening QAI errors. ", error); - return FAILURE; - } - - - - - return SUCCESS; -} - - - - -typedef struct { - short **cso_[NPOW_08]; -} _cso_t; - -stack_t *comp_cso_stack(stack_t *from, int nb){ -stack_t *stack = NULL; - - - if ((stack = copy_stack(from, nb, _DT_SHORT_)) == NULL) return NULL; - - return stack; -} - - -stack_t **comp_cso(ard_t *ard, _cso_t *cs, int nw, int *nproduct){ -stack_t **CSO = NULL; -int o, nprod = 15; -int error = 0; -short ***ptr[NPOW_08]; - - - - for (o=0; ocso_[o]; - - - alloc((void**)&CSO, nprod, sizeof(stack_t*)); - - - for (o=0; o 0){ - printf("%d compiling CSO product errors.\n", error); - for (o=0; o= npu) return; - - OUTPUT[pu] = NULL; - nprod[pu] = 0; - - - - omp_set_num_threads(thread); - - if (screen_this(ARD1[pu], nt1[pu]) != SUCCESS) printf("screen error\n"); - - - OUTPUT[pu] = cso(ARD1[pu], nt1[pu], &nprod[pu]); - - - - free_ard(ARD1[pu], nt1[pu]); - - - return; -} - - - -void output_this(stack_t ***OUTPUT, int *nprod, int pu, int npu, int thread){ -int o; - - - if (pu < 0 || pu >= npu) return; - - if (nprod[pu] == 0) return; - if (OUTPUT[pu] == NULL) return; - - omp_set_num_threads(thread); - - // would write output here - - for (o=0; o Date: Wed, 9 Sep 2020 15:26:24 +0200 Subject: [PATCH 60/78] fixed a small issue with computing trend significance for DOY-based phenometrics --- src/higher-level/trend-hl.c | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/higher-level/trend-hl.c b/src/higher-level/trend-hl.c index c57ba2b6..67000ea4 100755 --- a/src/higher-level/trend-hl.c +++ b/src/higher-level/trend-hl.c @@ -159,13 +159,16 @@ double mae, rmse; // account for values given in continuous days if (in_ce){ my -= 365*(nf-1)/2; - slp -= 365; slp *= 1000; + slp -= 365; } // standard error of slope, and significance of slope seb = sqrt(1.0/(k-2)*ssqe)/sqrt(sxsq); sig = slope_significant(1-trd->conf, trd->tail, k, slp, 0.0, seb); + // account for values given in continuous days + if (in_ce) slp *= 1000; + rmse = sqrt(ssqe/k); mae = sae/k; From 29be8d28f334e50ba5252ec8576057997d5144eb Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 10 Sep 2020 13:30:23 +0200 Subject: [PATCH 61/78] fixed small issue in polar green integral --- bash/force-pyramid.sh | 2 +- src/higher-level/polar-hl.c | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bash/force-pyramid.sh b/bash/force-pyramid.sh index 69217504..866c4029 100755 --- a/bash/force-pyramid.sh +++ b/bash/force-pyramid.sh @@ -36,7 +36,7 @@ fi for i in "$@"; do INP=$(readlink -f $i) - echo $INP + #echo $INP # input file exists? if [ ! -r $INP ]; then echo $INP "ist not readable/existing" diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index f8776441..09fb11a4 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -570,7 +570,7 @@ float green_val, base_val; if (polar[i].cum >= pol->start && polar[i].cum < pol->end && polar[i].val > green_val){ - integral[_GREEN_INT_] += polar[i].val*tsi->step; + integral[_GREEN_INT_] += (polar[i].val-green_val)*tsi->step; } // min-min integral From 480e62e3f0cfdb000f08dae4f18c80bb365d03c3 Mon Sep 17 00:00:00 2001 From: Stefan Ernst Date: Thu, 10 Sep 2020 13:38:57 +0200 Subject: [PATCH 62/78] rename of files, getting ready for release --- Dockerfile | 4 +- Makefile | 3 +- ...orce-level1-gcs.sh => force-level1-csd.sh} | 154 ++++++++++-------- ...evel1-esa.sh => force-level1-sentinel2.sh} | 103 ++++++------ 4 files changed, 142 insertions(+), 122 deletions(-) rename bash/{force-level1-gcs.sh => force-level1-csd.sh} (80%) rename bash/{force-level1-esa.sh => force-level1-sentinel2.sh} (84%) diff --git a/Dockerfile b/Dockerfile index 1af5d4df..5139eb72 100755 --- a/Dockerfile +++ b/Dockerfile @@ -21,8 +21,8 @@ RUN apt-get -y install wget unzip curl git build-essential libgdal-dev gdal-bin # Set python aliases for Python 3.x RUN echo 'alias python=python3' >> ~/.bashrc \ && echo 'alias pip=pip3' >> ~/.bashrc -# NumPy is needed for OpenCV -RUN pip3 install numpy==1.18.1 +# NumPy is needed for OpenCV, gsutil for Google downloads +RUN pip3 install numpy==1.18.1 gsutil # Build OpenCV from source RUN mkdir -p $INSTALL_DIR/opencv diff --git a/Makefile b/Makefile index d51c7f83..3685c2c2 100755 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ # Modify the following lines to match your needs # Installation directory -BINDIR=/develop +BINDIR=/usr/local/bin # Libraries GDAL=-I/usr/include/gdal -L/usr/lib -Wl,-rpath=/usr/lib @@ -366,6 +366,7 @@ clean: bash: temp cp $(DB)/force-cube.sh $(TB)/force-cube cp $(DB)/force-l2ps_.sh $(TB)/force-l2ps_ + cp $(DB)/force-level1-csd.sh $(TB)/force-level1-csd cp $(DB)/force-level1-landsat.sh $(TB)/force-level1-landsat cp $(DB)/force-level1-sentinel2.sh $(TB)/force-level1-sentinel2 cp $(DB)/force-level2.sh $(TB)/force-level2 diff --git a/bash/force-level1-gcs.sh b/bash/force-level1-csd.sh similarity index 80% rename from bash/force-level1-gcs.sh rename to bash/force-level1-csd.sh index 2bed5da7..caba67db 100755 --- a/bash/force-level1-gcs.sh +++ b/bash/force-level1-csd.sh @@ -57,9 +57,9 @@ Mandatory arguments: An existing directory, your files will be stored here queue - Downloaded files are appended to a file queue, which is needed for the Level 2 - processing. The file doesn't need to exist. If it exists, new lines will be - appended on successful ingestion + Downloaded files are appended to a file queue, which is needed for the + Level 2 processing. The file doesn't need to exist. If it exists, new + lines will be appended on successful ingestion area of interest (1) The coordinates of your study area: @@ -88,8 +88,8 @@ Optional arguments (always placed BEFORE mandatory arguments): -d | --daterange starttime,endtime - Dates must be given in the following format: YYYY-MM-DD,YYYY-MM-DD - Default: 1970-01-01,today + Dates must be given in the following format: YYYYMMDD,YYYYMMDD + Default: 19700101,today -h | --help Show this help @@ -98,6 +98,11 @@ Optional arguments (always placed BEFORE mandatory arguments): Will trigger a dry run that will only return the number of images and their total data volume + -k | --keep-meta + Will write the results of the query to the metadata directory. + Two files will be created if Landsat and Sentinel-2 data is queried + at the same time. Filename: csd_metadata_YYYY-MM-DDTHH-MM-SS + -s | --sensor Sensors to include in the query, comma-separated. Valid sensors: @@ -114,12 +119,14 @@ Optional arguments (always placed BEFORE mandatory arguments): -u | --update Will update the metadata catalogue (download and extract from GCS) - Only specify the metadata dir as argument when using this option - + Only specify the metadata dir as mandatory argument when using this option. + Use the -s option to only update Landsat or Sentinel-2 metadata. + HELP exit 1 } + is_in_range() { awk -v value="$1" -v lower="$2" -v upper="$3" 'BEGIN {print (lower <= value && value <= upper)}' } @@ -137,10 +144,11 @@ show_progress() { } update_meta() { - echo "Updating metadata catalogue..." - gsutil -m cp gs://gcp-public-data-$1/index.csv.gz $METADIR - gunzip $METADIR/index.csv.gz - mv $METADIR/index.csv $METADIR/metadata_$2.csv + printf "%s\n" "" "Downloading ${1^} metadata catalogue..." + gsutil -m -q cp gs://gcp-public-data-$1/index.csv.gz "$METADIR" + printf "%s\n" "Extracting compressed ${1^} metadata catalogue..." + gunzip "$METADIR"/index.csv.gz + mv "$METADIR"/index.csv "$METADIR"/metadata_$2.csv } which_satellite() { @@ -162,8 +170,8 @@ which_satellite() { # ============================================================ # Initialize arguments and parse command line input SENSIN="LT04,LT05,LE07,LC08,S2A,S2B" -DATEMIN="1970-01-01" -DATEMAX=$(date +%Y-%m-%d) +DATEMIN="19700101" +DATEMAX=$(date +%F) CCMIN=0 CCMAX=100 TIER="T1" @@ -171,35 +179,37 @@ DRYRUN=0 LANDSAT=0 SENTINEL=0 UPDATE=0 +KEEPMETA=0 -ARGS=`getopt -o c:d:nhs:t:u --long cloudcover:,daterange:,no-act,help,sensors:,tier:,update -n $0 -- "$@"` -if [ $? != 0 ] ; then echo "Error in command line options. Please check your options." >&2 ; show_help ; fi +ARGS=`getopt -o c:d:nhks:t:u --long cloudcover:,daterange:,no-act,help,keep-meta,sensors:,tier:,update -n $0 -- "$@"` +if [ $? != 0 ] ; then printf "%s\n" "" "Error in command line options. Please check your options." >&2 ; show_help ; fi eval set -- "$ARGS" while :; do case "$1" in - -c | --cloudcover) - CCMIN=$(echo $2 | cut -d"," -f1) - CCMAX=$(echo $2 | cut -d"," -f2) + -c|--cloudcover) + CCMIN=$(echo "$2" | cut -d"," -f1) + CCMAX=$(echo "$2" | cut -d"," -f2) shift ;; - -d | --daterange) - DATEMIN=$(echo $2 | cut -d"," -f1) - DATEMAX=$(echo $2 | cut -d"," -f2) + -d|--daterange) + DATEMIN=$(echo "$2" | cut -d"," -f1) + DATEMAX=$(echo "$2" | cut -d"," -f2) shift ;; - -n | --no-act) + -n|--no-act) DRYRUN=1 ;; - -h | --help) + -h|--help) show_help ;; - -s | --sensors) + -k|--keepmeta) + KEEPMETA=1 ;; + -s|--sensors) SENSIN="$2" shift ;; - -t | --tier) + -t|--tier) TIER="$2" shift ;; - -u | --update) - METADIR="$2" + -u|--update) UPDATE=1 ;; - -- ) + --) shift; break ;; *) break @@ -207,27 +217,33 @@ while :; do shift done + # Check for update flag and update metadata catalogue if set if [ $UPDATE -eq 1 ]; then - if [ $# -lt 2 ]; then - echo "Metadata directory not specified, exiting" + METADIR="$1" + if [ $# -lt 1 ]; then + printf "%s\n" "" "Metadata directory not specified, exiting" "" exit 1 - elif [ $# -gt 2 ]; then - echo "Error: Please only specify the metadata directory when using the update option (-u)" + elif [ $# -gt 1 ]; then + printf "%s\n" "" "Error: Invalid argument." "Only specify the metadata directory when using the update option (-u)." "The only allowed optional argument is -s. Use it if you don't want to" "update the Landsat and Sentinel-2 metadata catalogues at the same time." "" + #"Please only specify the metadata directory when using the update option (-u)" "To only update either of the LS / S2 catalogues, you may also use the -s option" "" exit 1 elif ! [ -w $METADIR ]; then - echo "Can not write to metadata directory, exiting" + printf "%s\n" "" "Metadata directory does not exist, exiting" "" + exit 1 + elif ! [ -w $METADIR ]; then + printf "%s\n" "" "Can not write to metadata directory, exiting" "" exit 1 else which_satellite - if [ $SENTINEL -eq 1 ]; then - update_meta sentinel-2 sentinel2 - fi if [ $LANDSAT -eq 1 ]; then update_meta landsat landsat fi + if [ $SENTINEL -eq 1 ]; then + update_meta sentinel-2 sentinel2 + fi fi - echo "Done. You can run this script without option -u to download data now." + printf "%s\n" "" "Done. You can run this script without option -u to download data now." "" exit 0 fi @@ -241,14 +257,14 @@ which_satellite # ============================================================ # Check user input and set up variables -METADIR=$1 -POOL=$2 -QUEUE=$3 -AOI=$4 +METADIR="$1" +POOL="$2" +QUEUE="$3" +AOI="$4" -# check for empty options +# check for empty arguments if [[ -z $METADIR || -z $POOL || -z $QUEUE || -z $AOI || -z $CCMIN || -z $CCMAX || -z $DATEMIN || -z $DATEMAX || -z $SENSIN || -z $TIER ]]; then - printf "%s\n" "" "Error: One or more variables are undefined, please check the following" "" "Metadata directory: $METADIR" "Level-1 pool: $POOL" "Queue: $QUEUE" "AOI: $AOI" "Sensors: $SENSIN" "Start date: $DATEMIN, End date: $DATEMAX" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "Tier (Landsat only): $TIER" "" + printf "%s\n" "" "Error: One or more arguments are undefined, please check the following" "" "Metadata directory: $METADIR" "Level-1 pool: $POOL" "Queue: $QUEUE" "AOI: $AOI" "Sensors: $SENSIN" "Start date: $DATEMIN, End date: $DATEMAX" "Cloud cover minimum: $CCMIN, cloud cover maximum: $CCMAX" "Tier (Landsat only): $TIER" "" exit 1 fi @@ -264,15 +280,18 @@ for T in $(echo $TIER | sed 's/,/ /g'); do done # check if dates are correct -if [ $(date -d $DATEMIN +%s) -gt $(date -d $DATEMAX +%s) ]; then +if ! [[ $DATEMIN =~ ^[[:digit:]]+$ ]] || ! [[ $DATEMAX =~ ^[[:digit:]]+$ ]]; then + printf "%s\n" "" "Error: One of the entered dates seems to contain non-numeric characters." "Start: $DATEMIN, End: $DATEMAX" "" + exit 1 +elif ! date -d $DATEMIN &> /dev/null; then + printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 +elif ! date -d $DATEMAX &> /dev/null; then + printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" + exit 1 +elif [ $(date -d $DATEMIN +%s) -gt $(date -d $DATEMAX +%s) ]; then printf "%s\n" "Error: Start of date range is larger than end of date range" "Start: $DATEMIN, End: $DATEMAX" "" exit 1 - elif ! date -d $DATEMIN &> /dev/null; then - printf "%s\n" "" "starttime ($DATEMIN) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 - elif ! date -d $DATEMAX &> /dev/null; then - printf "%s\n" "" "endtime ($DATEMAX) is not a valid date." "Make sure date is formatted as YYYY-MM-DD" "" - exit 1 fi # check if cloud cover is valid @@ -366,7 +385,7 @@ fi # 3. Download data get_data() { SATELLITE=$1 - PRINTNAME=$2 + PRINTNAME=${SATELLITE^} case $SATELLITE in landsat) SENSORS=$(echo $SENSIN | grep -o "L[C,E,T]0[4,5,7,8]") ;; sentinel2) SENSORS=$(echo $SENSIN | grep -o "S2[A-B]") ;; @@ -395,16 +414,16 @@ get_data() { ogr2ogr -f "GPKG" merged.gpkg WFS:"$WFSURL" -append -update ogr2ogr -f "GPKG" merged.gpkg $AOI -append -update - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT $SATELLITE.Name FROM $SATELLITE, $AOINE WHERE ST_Intersects($SATELLITE.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) - TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -dialect sqlite -sql "SELECT $SATELLITE.PRFID FROM $SATELLITE, $AOINE WHERE ST_Intersects($SATELLITE.geom, ST_Transform($AOINE.geom, 4326))" merged.gpkg) + TILES="_"$(echo $TILERAW | sed 's/PRFID, //; s/ /_|_/g')"_" rm merged.gpkg elif [ "$AOITYPE" -eq 2 ]; then printf "%s\n" "" "Searching for footprints / tiles intersecting with input geometry..." WKT=$(echo $AOI | sed 's/,/%20/g; s/\//,/g') WFSURL="http://ows.geo.hu-berlin.de/cgi-bin/qgis_mapserv.fcgi?MAP=/owsprojects/grids.qgs&SERVICE=WFS&REQUEST=GetFeature&typename="$SATELLITE"&Filter=%3Cogc:Filter%3E%3Cogc:Intersects%3E%3Cogc:PropertyName%3Eshape%3C/ogc:PropertyName%3E%3Cgml:Polygon%20srsName=%22EPSG:4326%22%3E%3Cgml:outerBoundaryIs%3E%3Cgml:LinearRing%3E%3Cgml:coordinates%3E"$WKT"%3C/gml:coordinates%3E%3C/gml:LinearRing%3E%3C/gml:outerBoundaryIs%3E%3C/gml:Polygon%3E%3C/ogc:Intersects%3E%3C/ogc:Filter%3E" - TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "Name" WFS:"$WFSURL") - TILES="_"$(echo $TILERAW | sed 's/Name, //; s/ /_|_/g')"_" + TILERAW=$(ogr2ogr -f CSV /vsistdout/ -select "PRFID" WFS:"$WFSURL") + TILES="_"$(echo $TILERAW | sed 's/PRFID, //; s/ /_|_/g')"_" elif [ "$AOITYPE" -eq 3 ]; then sensor_tile_mismatch() { @@ -425,23 +444,30 @@ get_data() { printf "%s\n" "" "Querying the metadata catalogue for $PRINTNAME data" "Sensor(s): "$(echo $SENSORS | sed 's/ /,/g') if [ $SATELLITE == "landsat" ]; then printf "%s\n" "Tier(s): $TIER"; fi - printf "%s\n" "Tile(s): "$(echo $TILERAW | sed 's/Name, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" + printf "%s\n" "Tile(s): "$(echo $TILERAW | sed 's/PRFID, //; s/ /,/g') "Daterange: "$DATEMIN" to "$DATEMAX "Cloud cover minimum: "$CCMIN"%, maximum: "$CCMAX"%" "" + # ============================================================ # Filter metadata and extract download links if [ $SATELLITE = "sentinel2" ]; then - LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | awk -F "," '{OFS=","} {gsub("T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z|-","",$5)}1' | awk -v start="$DATEMIN" -v stop="$DATEMAX" -v clow="$CCMIN" -v chigh="$CCMAX" -F "," '{OFS=","} $5 >= start && $5 <= stop && $7 >= clow && $7 <= chigh') elif [ $SATELLITE = "landsat" ]; then - LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | grep -E $(echo "_"$TIER | sed 's/,/,|_/g')"," | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start=$DATEMIN -v stop=$DATEMAX -v clow=$CCMIN -v chigh=$CCMAX -F "," '$5 >= start && $5 <= stop && $6 == 01 && $12 >= clow && $12 <= chigh') + LINKS=$(grep -E $TILES $METACAT | grep -E $(echo ""$SENSORS"" | sed 's/ /_|/g')"_" | grep -E $(echo "_"$TIER | sed 's/,/,|_/g')"," | awk -F "," '{OFS=","} {gsub("-","",$5)}1' | awk -v start="$DATEMIN" -v stop="$DATEMAX" -v clow="$CCMIN" -v chigh="$CCMAX" -F "," '$5 >= start && $5 <= stop && $6 == 01 && $12 >= clow && $12 <= chigh') fi - printf "%s" "$LINKS" > filtered_metadata.txt + METAFNAME=$METADIR/csd_metadata_$(date +%FT%H-%M-%S).txt + printf "%s" "$LINKS" > $METAFNAME case $SATELLITE in sentinel2) TOTALSIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$6/1048576} END {printf "%f", s}') ;; landsat) TOTALSIZE=$(printf "%s" "$LINKS" | awk -F "," '{s+=$17/1048576} END {printf "%f", s}') ;; esac - NSCENES=$(sed -n '$=' filtered_metadata.txt) - rm filtered_metadata.txt + NSCENES=$(sed -n '$=' $METAFNAME) + + if [ $KEEPMETA -eq 0 ]; then + rm $METAFNAME + else + sed -i "1 s/^/$(head -n 1 $METACAT)\n/" $METAFNAME + fi # ============================================================ @@ -518,7 +544,7 @@ get_data() { fi printf "\e[500D\e[2A\e[2KDownloading "$SCENEID"("$ITER" of "$NSCENES")...\e[2B" - gsutil -m -q cp -c $POOL"/download_log.txt" -R $URL $TILEPATH + gsutil -m -q cp -c -L $POOL"/download_log.txt" -R $URL $TILEPATH lockfile-create $QUEUE echo "$SCENEPATH QUEUED" >> $QUEUE @@ -534,10 +560,10 @@ if [[ $LANDSAT -eq 1 && $SENTINEL -eq 1 ]]; then printf "%s\n" "" "Landsat and Sentinel-2 data requested." "Landsat data will be queried and downloaded first." fi if [ $LANDSAT -eq 1 ]; then - get_data landsat Landsat + get_data landsat fi if [ $SENTINEL -eq 1 ]; then - get_data sentinel2 Sentinel-2 + get_data sentinel2 fi printf "%s\n" "" "Done." "" diff --git a/bash/force-level1-esa.sh b/bash/force-level1-sentinel2.sh similarity index 84% rename from bash/force-level1-esa.sh rename to bash/force-level1-sentinel2.sh index bc17a38c..c4415d96 100755 --- a/bash/force-level1-esa.sh +++ b/bash/force-level1-sentinel2.sh @@ -24,66 +24,59 @@ # this script downloads Sentinel-2 from ESA and maintains a clean Level-1 datapool -show_help() { -cat << HELP +printf "%s\n" "" "This tool is deprecated and will only receive minimal support in the future." "Please consider using force-level1-csd instead" "" -Usage: `basename $0` [-d] Level-1-Datapool queue Boundingbox - starttime endtime min-cc max-cc +EXPECTED_ARGS=7 +MAXIMUM_ARGS=8 -Mandatory arguments: - Level-1-Datapool - An existing directory, your files will be stored here - - queue - Downloaded files are appended to a file queue, which is needed for - the Level 2 processing. The file doesn't need to exist. If it exists, - new lines will be appended on successful ingestion - - Boundingbox - The coordinates of your study area: \"X1/Y1,X2/Y2,X3/Y3,...,X1/Y1\" - The box must be closed (first X/Y = last X/Y). X/Y must be given as - decimal degrees with negative values for West and South coordinates. - Note that the box doesn't have to be square, you can specify a polygon - - starttime endtime - Dates must be given as YYYY-MM-DD - - min-cc max-cc - The cloud cover range must be given in % +# if wrong number of input args, stop +if [ $# -ne $EXPECTED_ARGS ] && [ $# -ne $MAXIMUM_ARGS ]; then + echo "" + echo "Usage: `basename $0` Level-1-Datapool queue Boundingbox" + echo " starttime endtime min-cc max-cc [dry]" + echo "" + echo " Level-1-Datapool" + echo " An existing directory, your files will be stored here" + echo "" + echo " queue" + echo " Downloaded files are appended to a file queue, which is needed for" + echo " the Level 2 processing. The file doesn't need to exist. If it exists," + echo " new lines will be appended on successful ingestion" + echo "" + echo " Boundingbox" + echo " The coordinates of your study area: \"X1/Y1,X2/Y2,X3/Y3,...,X1/Y1\"" + echo " The box must be closed (first X/Y = last X/Y). X/Y must be given as" + echo " decimal degrees with negative values for West and South coordinates." + echo " Note that the box doesn't have to be square, you can specify a polygon" + echo "" + echo " starttime endtime" + echo " Dates must be given as YYYY-MM-DD" + echo "" + echo " min-cc max-cc" + echo " The cloud cover range must be given in %" + echo "" + echo " dry will trigger a dry run that will only return the number of images" + echo " and their total data volume" + echo "" + echo " Your ESA credentials must be placed in \$HOME/.scihub" + echo " (OR in \$FORCE_CREDENTIALS/.scihub if the FORCE_CREDENTIALS environment" + echo " variable is defined)." + echo " First line: User name" + echo " Second line: Password, special characters might be problematic" + echo "" + exit +fi -Optional arguments (always placed BEFORE mandatory arguments): - -d - will trigger a dry run that will only return the number of images - and their total data volume - - -h|--help - show this help - - Your ESA credentials must be placed in \$HOME/.scihub - (OR in \$FORCE_CREDENTIALS/.scihub if the FORCE_CREDENTIALS environment - variable is defined). - First line: User name - Second line: Password, special characters might be problematic - -HELP -exit 1 -} -# check for optional args and set dryrun var -case $1 in - -d) +if [ $# -eq $MAXIMUM_ARGS ]; then + if [ $8 == dry ]; then dryrun=1 - shift ;; - -h|--help) - show_help ;; - *) - dryrun=0 ;; -esac - -# if wrong number of input args, stop -if [ $# -ne 7 ]; then - printf "%s\n" "" "Invalid number of input arguments specified" - show_help + else + echo "unknown option, optional argument 7 must be dry" + exit + fi +else + dryrun=0 fi POOL=$1 From 55a4dc00539970ca290d533bce8810b5538ad6fd Mon Sep 17 00:00:00 2001 From: David Frantz <32633788+davidfrantz@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:01:42 +0200 Subject: [PATCH 63/78] Update force-level1-csd.sh Small changes to usage. Question: what happens in case AOI is a line? All tiles touched by this line? --- bash/force-level1-csd.sh | 43 ++++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/bash/force-level1-csd.sh b/bash/force-level1-csd.sh index caba67db..7e916ebf 100755 --- a/bash/force-level1-csd.sh +++ b/bash/force-level1-csd.sh @@ -62,25 +62,27 @@ Mandatory arguments: lines will be appended on successful ingestion area of interest - (1) The coordinates of your study area: - Path to a file containing one coordinate per line or - as comma separated command line input + (1) user-supplied coordinates of your study area: The polygon must be closed (first X/Y = last X/Y). X/Y must be given as decimal degrees with negative values for West and South coordinates. - Comma-separated if provided on command line, one line per coordinate pair - if provided in a text file. + You can either give the path to a file, or give the coordinates on the command line. + If in the file, put one coordinate per line. + If on the command line, give a comma separated list. (2) a shapefile (point/polygon/line). On-the-fly reprojection is provided, but using EPSG4326 is recommended. - (3) Path/Row (Landsat): "PPPRR" - Make sure to keep leading zeros - correct: 181034, incorrect: 18134 - Tile name (Sentinel-2): "TXXXXX" - Make sure to keep the leading T before the MGRS tile number - Comma-separated if provided on command line, one line per tile if - provided in a text file. - - + (3) Scene identifier. + Landsat: Path/Row as "PPPRRR". Make sure to keep leading zeros: + correct: 181034, incorrect: 18134 + Sentinel-2: MGRS tile as "TXXXXX". Make sure to keep the leading T before the MGRS tile number. + You can either give the path to a file, or give the IDs on the command line. + If in the file, put one ID per line. + If on the command line, give a comma separated list. + Optional arguments (always placed BEFORE mandatory arguments): + -h | --help + Show this help + -c | --cloudcover minimum,maximum The cloud cover range must be specified in % @@ -91,9 +93,6 @@ Optional arguments (always placed BEFORE mandatory arguments): Dates must be given in the following format: YYYYMMDD,YYYYMMDD Default: 19700101,today - -h | --help - Show this help - -n | --no-act Will trigger a dry run that will only return the number of images and their total data volume @@ -106,20 +105,20 @@ Optional arguments (always placed BEFORE mandatory arguments): -s | --sensor Sensors to include in the query, comma-separated. Valid sensors: - Landsat Sentinel-2 - LT04 - Landsat 4 TM S2A - LT05 - Landsat 5 TM S2B + Landsat Sentinel + LT04 - Landsat 4 TM S2A - Sentinel-2A MSI + LT05 - Landsat 5 TM S2B - Sentinel-2B MSI LE07 - Landsat 7 ETM+ LC08 - Landsat 8 OLI Default: LT04,LT05,LE07,LC08,S2A,S2B - + -t | --tier Landsat collection tier level. Valid tiers: T1,T2,RT Default: T1 -u | --update Will update the metadata catalogue (download and extract from GCS) - Only specify the metadata dir as mandatory argument when using this option. + If this option is used, only one mandatory argument is expected (metadata-dir). Use the -s option to only update Landsat or Sentinel-2 metadata. HELP @@ -567,4 +566,4 @@ if [ $SENTINEL -eq 1 ]; then fi printf "%s\n" "" "Done." "" -exit 0 \ No newline at end of file +exit 0 From f044b244119f95ca19a18df8f776f2eff548399c Mon Sep 17 00:00:00 2001 From: David Frantz <32633788+davidfrantz@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:03:44 +0200 Subject: [PATCH 64/78] Update force-level1-landsat.sh --- bash/force-level1-landsat.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bash/force-level1-landsat.sh b/bash/force-level1-landsat.sh index a1c2a41a..8b6ed231 100755 --- a/bash/force-level1-landsat.sh +++ b/bash/force-level1-landsat.sh @@ -39,6 +39,7 @@ EXP_ARGS=4 MAX_ARGS=5 +printf "%s\n" "" "This tool is deprecated and will only receive minimal support in the future." "Please consider using force-level1-csd instead" "" if [ $# -ne $EXP_ARGS ] && [ $# -ne $MAX_ARGS ]; then echo "Usage: `basename $0` from to queue cp/mv [dry]" From 7c8503de58ea834dffd394575747c446e299117a Mon Sep 17 00:00:00 2001 From: David Frantz <32633788+davidfrantz@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:05:17 +0200 Subject: [PATCH 65/78] Update Makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 3685c2c2..8c19aca3 100755 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ # Modify the following lines to match your needs # Installation directory -BINDIR=/usr/local/bin +BINDIR=/develop # Libraries GDAL=-I/usr/include/gdal -L/usr/lib -Wl,-rpath=/usr/lib From 6d50aa77cc2ce952c81ac140fb94fe854b40c863 Mon Sep 17 00:00:00 2001 From: David Frantz <32633788+davidfrantz@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:12:36 +0200 Subject: [PATCH 66/78] Update Dockerfile Added license and copyright to Dockerfile --- Dockerfile | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 5139eb72..5aa8a436 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,27 @@ +########################################################################## +# +# This file is part of FORCE - Framework for Operational Radiometric +# Correction for Environmental monitoring. +# +# Copyright (C) 2013-2020 David Frantz +# +# FORCE is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# FORCE is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with FORCE. If not, see . +# +########################################################################## + +# Copyright (C) 2020 Gergely Padányi-Gulyás (github user fegyi001) + FROM ubuntu:18.04 as builder # Install folder @@ -67,4 +91,4 @@ RUN rm -rf $INSTALL_DIR RUN apt-get purge -y --auto-remove apt-utils cmake git build-essential software-properties-common # Test FORCE run -RUN force \ No newline at end of file +RUN force From e0239819a1fbbfb7dfbd8e54bca90a09b1c2aba0 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 10 Sep 2020 14:22:50 +0200 Subject: [PATCH 67/78] added dependencies --- docs/source/setup/depend.rst | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/docs/source/setup/depend.rst b/docs/source/setup/depend.rst index 56477055..398792e9 100755 --- a/docs/source/setup/depend.rst +++ b/docs/source/setup/depend.rst @@ -54,7 +54,7 @@ FORCE can also be installed on other Liunx distributions (e.g. CentOS). The inst sudo apt-get install libgdal1-dev gdal-bin python-gdal - *There are known problems with earlier releases (< 1.10.0), there are no known problems with later releases. + *There are known problems with earlier releases (< 1.10.0). FORCE < 3.5 should not be used with GDAL >= 3.0. However, the reporting of errors and warnings differs between versions, and GDAL may report many non-critical errors to stderr (e.g. ``ERROR 6 - not supported``, please refer to the GDAL error code description whether these are critical errors or just warnings that can be ignored). Please note that GDAL is a very dynamic development, therefore it is hard to develop applications that cover all cases and exceptions in all possible GDAL versions and builds. If you come across a GDAL version that does not work, please inform us.* * The **GSL library** is used for optimization purposes. @@ -98,6 +98,23 @@ FORCE can also be installed on other Liunx distributions (e.g. CentOS). The inst sudo apt-get install rename +* **python** is used by a couple of auxilliary scripts. + python should already be installed. If not, you can install a new python version like this: + + .. code-block:: bash + + sudo apt install software-properties-common + sudo add-apt-repository ppa:deadsnakes/ppa + sudo apt-get install python3.8 python3-pip + echo 'alias python=python3' >> ~/.bashrc + echo 'alias pip=pip3' >> ~/.bashrc + +* Some **python packages** are needed: + + .. code-block:: bash + + pip install numpy gsutil + * **OpenCV** is used for machine learning and image processing tasks We developed the code using OpenCV v. 4.1. The installation process might need some more dependencies, e.g. ``cmake``. From b026534e3fec54191d4edc658a0c067ba4af4ffb Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 10 Sep 2020 14:28:36 +0200 Subject: [PATCH 68/78] removed printfs from force-mosaix --- bash/force-mosaic.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bash/force-mosaic.sh b/bash/force-mosaic.sh index de8e9009..d0be1f46 100755 --- a/bash/force-mosaic.sh +++ b/bash/force-mosaic.sh @@ -64,7 +64,7 @@ function mosaic_this(){ bin=$3 LIST="force-mosaic_list_$2.txt" - echo $bin + #echo $bin echo "mosaicking" $prd @@ -114,6 +114,8 @@ function mosaic_this(){ echo "deleting file listing failed." exit fi + + echo "" } From 4e33c5f78cf6cde3f49f8b7376cae320748a71f3 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 10 Sep 2020 14:39:15 +0200 Subject: [PATCH 69/78] added force-level1-csd to docs --- docs/source/components/lower-level/index.rst | 6 ++++-- docs/source/components/lower-level/level1/index.rst | 11 +++++++---- .../components/lower-level/level1/level1-csd.rst | 7 +++++++ .../components/lower-level/level1/level1-landsat.rst | 4 ++-- .../lower-level/level1/level1-sentinel2.rst | 4 ++-- 5 files changed, 22 insertions(+), 10 deletions(-) create mode 100755 docs/source/components/lower-level/level1/level1-csd.rst diff --git a/docs/source/components/lower-level/index.rst b/docs/source/components/lower-level/index.rst index 34802378..eefc6c6a 100755 --- a/docs/source/components/lower-level/index.rst +++ b/docs/source/components/lower-level/index.rst @@ -11,9 +11,11 @@ Lower Level +========+=========================+=======+=============================================================================================================================================================================================================================+ | L1AS | The FORCE :ref:`l1as` is intended to assist in organizing and maintaining a clean and consistent Level 1 data pool, downloading of Sentinel-2 data, and maintaining file queues | + +-------------------------+-------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| | :ref:`level1-landsat` | 1 | Maintenance of Level-1 data pool, Landsat | +| | :ref:`level1-csd` | 1 | Download of data from cloud storage, and maintenance of Level-1 data pool, works with Landsat and Sentinel-2 | + +-------------------------+-------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| | :ref:`level1-sentinel2` | 1 | Download of data and maintenance of Level-1 data pool, Sentinel-2 | +| | :ref:`level1-landsat` | 1 | Maintenance of Level-1 data pool, Landsat (deprecated, consider using :ref:`level1-csd`) | ++ +-------------------------+-------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ +| | :ref:`level1-sentinel2` | 1 | Download of data and maintenance of Level-1 data pool, Sentinel-2 (deprecated, consider using :ref:`level1-csd`) | +--------+-------------------------+-------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | L2PS | The FORCE :ref:`l2ps` is intended to generate Analysis Ready Data (ARD), i.e. harmonized, standardized and radiometrically consistent Level 2 products. This includes cloud and cloud shadow detection, radiometric correction and data cubing | + +-------------------------+-------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ diff --git a/docs/source/components/lower-level/level1/index.rst b/docs/source/components/lower-level/level1/index.rst index 6240ff6a..8a441c02 100755 --- a/docs/source/components/lower-level/level1/index.rst +++ b/docs/source/components/lower-level/level1/index.rst @@ -14,21 +14,24 @@ In addition, FORCE L1AS assists in building and updating the file queues needed **Figure.** FORCE Level 1 Archiving Suite (L1AS) workflow. -The main difference is that Landsat data need to be downloaded manually, while Sentinel-2 images are automatically retrieved by FORCE. +As of ``FORCE v. 3.5`` :ref:`level1-csd` replaces :ref:`level1-landsat` and :ref:`level1-sentinel2`. +The deprecated tools will be removed in a future FORCE version. We recommend to change your processes accordingly. + On successful ingestion, the image is appended to a :ref:`queue`, which controls Level 2 processing. The file queue is a text file that holds the full path to the image, as well as a processing-state flag. This flag is either ``QUEUED`` or ``DONE``, which means that it is enqueued for Level 2 processing or was already processed and will be ignored next time. -+-----------------------+-------------------------+--------------+ -+ :ref:`level1-landsat` + :ref:`level1-sentinel2` + :ref:`queue` + -+-----------------------+-------------------------+--------------+ ++-------------------+-----------------------+-------------------------+--------------+ ++ :ref:`level1-csd` + :ref:`level1-landsat` + :ref:`level1-sentinel2` + :ref:`queue` + ++-------------------+-----------------------+-------------------------+--------------+ .. toctree:: :maxdepth: 1 :hidden: + level1-csd.rst level1-landsat.rst level1-sentinel2.rst queue.rst diff --git a/docs/source/components/lower-level/level1/level1-csd.rst b/docs/source/components/lower-level/level1/level1-csd.rst new file mode 100755 index 00000000..0e43cbb3 --- /dev/null +++ b/docs/source/components/lower-level/level1/level1-csd.rst @@ -0,0 +1,7 @@ +.. _level1-csd: + +force-level1-csd +================ + +Usage to follow. + diff --git a/docs/source/components/lower-level/level1/level1-landsat.rst b/docs/source/components/lower-level/level1/level1-landsat.rst index 13de5782..efb8452c 100755 --- a/docs/source/components/lower-level/level1/level1-landsat.rst +++ b/docs/source/components/lower-level/level1/level1-landsat.rst @@ -1,7 +1,7 @@ .. _level1-landsat: -level1-landsat -============== +force-level1-landsat +==================== FORCE can process Level 1 Landsat data, generated using the Level 1 Product Generation System (LPGS) of the U.S. Geological Survey (USGS). diff --git a/docs/source/components/lower-level/level1/level1-sentinel2.rst b/docs/source/components/lower-level/level1/level1-sentinel2.rst index 40c58e55..f1ba9726 100755 --- a/docs/source/components/lower-level/level1/level1-sentinel2.rst +++ b/docs/source/components/lower-level/level1/level1-sentinel2.rst @@ -1,7 +1,7 @@ .. _level1-sentinel2: -level1-sentinel2 -================ +force-level1-sentinel2 +====================== FORCE can process Level 1C Sentinel-2A and Sentinel-2B MSI data as provided by ESA through their data hub. From 864ff0f9ee3f3ee4bdae5092d89392b08004b920 Mon Sep 17 00:00:00 2001 From: David Frantz <32633788+davidfrantz@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:58:52 +0200 Subject: [PATCH 70/78] Update force-level1-csd.sh --- bash/force-level1-csd.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bash/force-level1-csd.sh b/bash/force-level1-csd.sh index 7e916ebf..855d2847 100755 --- a/bash/force-level1-csd.sh +++ b/bash/force-level1-csd.sh @@ -78,7 +78,7 @@ Mandatory arguments: If in the file, put one ID per line. If on the command line, give a comma separated list. -Optional arguments (always placed BEFORE mandatory arguments): +Optional arguments: -h | --help Show this help From 69cd107390eacbca7ecdbaa0419ee56fb6e46da1 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Thu, 10 Sep 2020 14:59:22 +0200 Subject: [PATCH 71/78] updated FORCE entry point with new tools --- src/aux-level/_main.c | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/aux-level/_main.c b/src/aux-level/_main.c index 736f5c40..6bb226ce 100755 --- a/src/aux-level/_main.c +++ b/src/aux-level/_main.c @@ -89,15 +89,25 @@ char user[NPOW_10]; printf("\nTutorials are available at davidfrantz.github.io/tutorials\n"); printf("\nFORCE consists of several components:\n" + "\nLevel 1 Archiving System (L1AS)\n" + "+ force-level1-csd Download from cloud storage + maintenance of Level 1 " + "Landsat and Sentinel-2 data pool\n" "+ force-level1-landsat Maintenance of Level 1 Landsat " - "data pool\n" - "+ force-level1-sentinel2 Download + maintenance of Level 1 " - "Sentinel-2 data pool\n" - "+ force-parameter Generation of parameter files\n" + "data pool (deprecated)\n" + "+ force-level1-sentinel2 Download from ESA + maintenance of Level 1 " + "Sentinel-2 data pool (deprecated)\n" + "\nLevel 2 Processing System (L2PS)\n" "+ force-level2 Level 2 processing of image archive\n" "+ force-l2ps Level 2 processing of single image\n" + "\nWater Vapor Database (WVDB)\n" + "+ force-lut-modis Generation and maintenance of water vapor database " + "using MODIS products\n" + "\nHigher Level Processing System (HLPS)\n" "+ force-higher-level Higher level processing (compositing, " "time series analysis, ...)\n" + "\nAuxiliary (AUX)\n" + "+ force-parameter Generation of parameter files\n" + "+ force-magic-parameters Replace variables in parameter file with vectors\n" "+ force-train Training (and validation) of Machine " "Learning models\n" "+ force-synthmix Synthetic Mixing of training data\n" @@ -111,7 +121,9 @@ char user[NPOW_10]; "cube format\n" "+ force-procmask Processing masks from raster images\n" "+ force-pyramid Generation of image pyramids\n" - "+ force-mosaic Mosaicking of image chips\n"); + "+ force-mosaic Mosaicking of image chips\n" + "+ force-stack Stack images, works with 4D data model\n" + "+ force-mdcp Copy FORCE metadata from one file to another\n"); printf("\n##########################################################################\n\n"); From 7bf4d7aa2dd743a25967a67379d557dd90bd87c9 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Fri, 11 Sep 2020 08:39:00 +0200 Subject: [PATCH 72/78] added another amplitude to polarmetrics --- src/cross-level/enum-cl.c | 12 ++++++------ src/cross-level/enum-cl.h | 8 ++++---- src/higher-level/polar-hl.c | 7 +++++-- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/cross-level/enum-cl.c b/src/cross-level/enum-cl.c index 388a3af2..ea0d8bbe 100755 --- a/src/cross-level/enum-cl.c +++ b/src/cross-level/enum-cl.c @@ -124,12 +124,12 @@ const tagged_enum_t _TAGGED_ENUM_POL_[_POL_LENGTH_] = { { _POL_VEM_, "VEM" }, { _POL_VLM_, "VLM" }, { _POL_VPS_, "VPS" }, { _POL_VSS_, "VSS" }, { _POL_VMS_, "VMS" }, { _POL_VES_, "VES" }, { _POL_VEV_, "VEV" }, { _POL_VAV_, "VAV" }, { _POL_VLV_, "VLV" }, - { _POL_VBL_, "VBL" }, { _POL_VSA_, "VSA" }, { _POL_VPA_, "VPA" }, - { _POL_VGA_, "VGA" }, { _POL_VGV_, "VGV" }, { _POL_DPY_, "DPY" }, - { _POL_DPV_, "DPV" }, { _POL_IST_, "IST" }, { _POL_IBL_, "IBL" }, - { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, { _POL_IRR_, "IRR" }, - { _POL_IFR_, "IFR" }, { _POL_RAR_, "RAR" }, { _POL_RAF_, "RAF" }, - { _POL_RMR_, "RMR" }, { _POL_RMF_,"RMF" }}; + { _POL_VBL_, "VBL" }, { _POL_VGA_, "VGA" }, { _POL_VSA_, "VSA" }, + { _POL_VPA_, "VPA" }, { _POL_VGM_, "VGM" }, { _POL_VGV_, "VGV" }, + { _POL_DPY_, "DPY" }, { _POL_DPV_, "DPV" }, { _POL_IST_, "IST" }, + { _POL_IBL_, "IBL" }, { _POL_IBT_, "IBT" }, { _POL_IGS_, "IGS" }, + { _POL_IRR_, "IRR" }, { _POL_IFR_, "IFR" }, { _POL_RAR_, "RAR" }, + { _POL_RAF_, "RAF" }, { _POL_RMR_, "RMR" }, { _POL_RMF_, "RMF" }}; const tagged_enum_t _TAGGED_ENUM_TAIL_[_TAIL_LENGTH_] = { { _TAIL_LEFT_, "LEFT" }, { _TAIL_TWO_, "TWO" }, { _TAIL_RIGHT_, "RIGHT" }}; diff --git a/src/cross-level/enum-cl.h b/src/cross-level/enum-cl.h index 45ebfe68..21fadf54 100755 --- a/src/cross-level/enum-cl.h +++ b/src/cross-level/enum-cl.h @@ -194,10 +194,10 @@ enum { _LSP_DEM_, _LSP_DSS_, _LSP_DRI_, _LSP_DPS_, _LSP_DFI_, _LSP_DES_, enum { _POL_DEM_, _POL_DLM_, _POL_DPS_, _POL_DSS_, _POL_DMS_, _POL_DES_, _POL_DEV_, _POL_DAV_, _POL_DLV_, _POL_LTS_, _POL_LGS_, _POL_LGV_, _POL_VEM_, _POL_VLM_, _POL_VPS_, _POL_VSS_, _POL_VMS_, _POL_VES_, - _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VSA_, _POL_VPA_, - _POL_VGA_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_IST_, _POL_IBL_, - _POL_IBT_, _POL_IGS_, _POL_IRR_, _POL_IFR_, _POL_RAR_, _POL_RAF_, - _POL_RMR_, _POL_RMF_, _POL_LENGTH_ }; + _POL_VEV_, _POL_VAV_, _POL_VLV_, _POL_VBL_, _POL_VGA_, _POL_VSA_, + _POL_VPA_, _POL_VGM_, _POL_VGV_, _POL_DPY_, _POL_DPV_, _POL_IST_, + _POL_IBL_, _POL_IBT_, _POL_IGS_, _POL_IRR_, _POL_IFR_, _POL_RAR_, + _POL_RAF_, _POL_RMR_, _POL_RMF_, _POL_LENGTH_ }; // folding enum { _FLD_YEAR_, _FLD_QUARTER_, _FLD_MONTH_, _FLD_WEEK_, _FLD_DOY_, _FLD_LENGTH_ }; diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index 09fb11a4..c66e2c9f 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -629,12 +629,15 @@ float green_val, base_val; if (pol->use[_POL_VEV_]) ts->pol_[_POL_VEV_][y][p] = (short)vector[_EARLY_].val; if (pol->use[_POL_VAV_]) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_].val; if (pol->use[_POL_VLV_]) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_].val; - if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_PEAK_].val - green_val); - if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_PEAK_].val - timing[_MID_].val); if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)base_val; if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); + // amplitude parameters + if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)(timing[_PEAK_].val - green_val); + if (pol->use[_POL_VPA_]) ts->pol_[_POL_VPA_][y][p] = (short)(timing[_PEAK_].val - timing[_MID_].val); + if (pol->use[_POL_VSA_]) ts->pol_[_POL_VSA_][y][p] = (short)(timing[_PEAK_].val - base_val); + // integral parameters if (pol->use[_POL_IST_]) ts->pol_[_POL_IST_][y][p] = (short)(integral[_SEASONAL_INT_] / 365.0); // integral, unit of time: year if (pol->use[_POL_IBL_]) ts->pol_[_POL_IBL_][y][p] = (short)(integral[_LATENT_INT_] / 365.0); // integral, unit of time: year From 3084bcd830326d2f89ef884c5c37e9f02daffba3 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Fri, 11 Sep 2020 09:20:09 +0200 Subject: [PATCH 73/78] updated parameterfile generator for polarmetrics --- src/aux-level/param-aux.c | 108 +++++++++++++++++++++++++++++++++++- src/aux-level/param-aux.h | 1 + src/higher-level/polar-hl.c | 2 +- 3 files changed, 107 insertions(+), 4 deletions(-) diff --git a/src/aux-level/param-aux.c b/src/aux-level/param-aux.c index 7a84bb81..8c77ffb7 100755 --- a/src/aux-level/param-aux.c +++ b/src/aux-level/param-aux.c @@ -1508,7 +1508,7 @@ void write_par_hl_fold(FILE *fp, bool verbose){ void write_par_hl_lsp(FILE *fp, bool verbose){ - fprintf(fp, "\n# LAND SURFACE PHENOLOGY PARAMETERS\n"); + fprintf(fp, "\n# LAND SURFACE PHENOLOGY PARAMETERS - SPLITS-BASED\n"); fprintf(fp, "# ------------------------------------------------------------------------\n"); fprintf(fp, "# The Land Surface Phenology (LSP) options are only available if FORCE was\n"); fprintf(fp, "# compiled with SPLITS (see installation section in the FORCE user guide).\n"); @@ -1598,7 +1598,7 @@ void write_par_hl_lsp(FILE *fp, bool verbose){ if (verbose){ fprintf(fp, "# Compute and output a linear trend analysis on the requested Phenometric time\n"); - fprintf(fp, "# series? Note that the OUTPUT_FBX parameters don't need to be TRUE to do this.\n"); + fprintf(fp, "# series? Note that the OUTPUT_LSP parameters don't need to be TRUE to do this.\n"); fprintf(fp, "# See also the TREND PARAMETERS block below.\n"); fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); } @@ -1607,7 +1607,7 @@ void write_par_hl_lsp(FILE *fp, bool verbose){ if (verbose){ fprintf(fp, "# Compute and output an extended Change, Aftereffect, Trend (CAT) analysis on\n"); fprintf(fp, "# the requested Phenometric time series?\n"); - fprintf(fp, "# Note that the OUTPUT_FBX parameters don't need to be TRUE to do this.\n"); + fprintf(fp, "# Note that the OUTPUT_LSP parameters don't need to be TRUE to do this.\n"); fprintf(fp, "# See also the TREND PARAMETERS block below.\n"); fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); } @@ -1617,6 +1617,108 @@ void write_par_hl_lsp(FILE *fp, bool verbose){ } +/** This function writes parameters into a parameter skeleton file: higher ++++ level polarmetrics pars +--- fp: parameter skeleton file +--- verbose: add description, or use more compact format for experts? ++++ Return: void ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++**/ +void write_par_hl_pol(FILE *fp, bool verbose){ + + + fprintf(fp, "\n# LAND SURFACE PHENOLOGY PARAMETERS - POLAR-BASED\n"); + fprintf(fp, "# ------------------------------------------------------------------------\n"); + + + if (verbose){ + fprintf(fp, "# Threshold for detecing Start of Season in the cumulative time series.\n"); + fprintf(fp, "# Type: Float. Valid range: ]0,1[\n"); + } + fprintf(fp, "POL_START_THRESHOLD = 0.2\n"); + + if (verbose){ + fprintf(fp, "# Threshold for detecing Mid of Season in the cumulative time series.\n"); + fprintf(fp, "# Type: Float. Valid range: ]0,1[\n"); + } + fprintf(fp, "POL_MID_THRESHOLD = 0.5\n"); + + if (verbose){ + fprintf(fp, "# Threshold for detecing End of Season in the cumulative time series.\n"); + fprintf(fp, "# Type: Float. Valid range: ]0,1[\n"); + } + fprintf(fp, "POL_END_THRESHOLD = 0.8\n"); + + + if (verbose){ + fprintf(fp, "# Should the start of each phenological year be adapated?\n"); + fprintf(fp, "# If FALSE, the start is static, i.e. Date of Early Minimum and Date of Late\n"); + fprintf(fp, "# Minimum are the same for all years and 365 days apart. If TRUE, they differ\n"); + fprintf(fp, "# from year to year and a phenological year is not forced to be 365 days long.\n"); + fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); + } + fprintf(fp, "POL_ADAPTIVE = TRUE\n"); + + if (verbose){ + fprintf(fp, "# Which Polarmetrics should be computed? There will be a POL output file for\n"); + fprintf(fp, "# each metric (with years as bands).\n"); + fprintf(fp, "# Currently available are the dates of the early minimum, late minimum, peak of season,\n"); + fprintf(fp, "# start of season, mid of season, end of season, early average vector, average vector,\n"); + fprintf(fp, "# late average vector; lengths of the total season, green season, between averge vectors;\n"); + fprintf(fp, "# values of the early minimum, late minimum, peak of season, start of season, mid of season,\n"); + fprintf(fp, "# end of season, early average vector, average vector, late average vector, base level,\n"); + fprintf(fp, "# green amplitude, seasonal amplitude, peak amplitude, green season mean , green season\n"); + fprintf(fp, "# variability, dates of start of phenological year, difference between start of phenological\n"); + fprintf(fp, "# year and its longterm average; integrals of the total season, base level, base+total,\n"); + fprintf(fp, "# green season, rising rate, falling rate; rates of average rising, average falling, maximum\n"); + fprintf(fp, "# rising, maximum falling.\n"); + fprintf(fp, "# Type: Character list. Valid values: {DEM,DLM,DPS,DSS,DMS,DES,DEV,DAV,DLV,LTS,\n"); + fprintf(fp, "# LGS,LGV,VEM,VLM,VPS,VSS,VMS,VES,VEV,VAV,VLV,VBL,VGA,VSA,VPA,VGM,VGV,DPY,DPV,\n"); + fprintf(fp, "# IST,IBL,IBT,IGS,IRR,IFR,RAR,RAF,RMR,RMF}\n"); + } + fprintf(fp, "POL = VSS VPS VES VSA RMR IGS\n"); + + if (verbose){ + fprintf(fp, "# Standardize the POL time series with pixel mean and/or standard deviation?\n"); + fprintf(fp, "# Type: Logical. Valid values: {NONE,NORMALIZE,CENTER}\n"); + } + fprintf(fp, "STANDARDIZE_POL = NONE\n"); + + if (verbose){ + fprintf(fp, "# Output the polar-transformed time series? These are layer stack of cartesian X-\n"); + fprintf(fp, "# and Y-coordinates for each interpolated date. This results in two files, product\n"); + fprintf(fp, "# IDs are PCX and PCY.\n"); + fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); + } + fprintf(fp, "OUTPUT_PCT = FALSE\n"); + + if (verbose){ + fprintf(fp, "# Output the Polarmetrics? These are layer stacks per polarmetric with as many\n"); + fprintf(fp, "# bands as years.\n"); + fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); + } + fprintf(fp, "OUTPUT_POL = FALSE\n"); + + if (verbose){ + fprintf(fp, "# Compute and output a linear trend analysis on the requested Polarmetric time\n"); + fprintf(fp, "# series? Note that the OUTPUT_POL parameters don't need to be TRUE to do this.\n"); + fprintf(fp, "# See also the TREND PARAMETERS block below.\n"); + fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); + } + fprintf(fp, "OUTPUT_TRO = FALSE\n"); + + if (verbose){ + fprintf(fp, "# Compute and output an extended Change, Aftereffect, Trend (CAT) analysis on\n"); + fprintf(fp, "# the requested Polarmetric time series?\n"); + fprintf(fp, "# Note that the OUTPUT_POL parameters don't need to be TRUE to do this.\n"); + fprintf(fp, "# See also the TREND PARAMETERS block below.\n"); + fprintf(fp, "# Type: Logical. Valid values: {TRUE,FALSE}\n"); + } + fprintf(fp, "OUTPUT_CAO = FALSE\n"); + + return; +} + + /** This function writes parameters into a parameter skeleton file: higher +++ level trend pars --- fp: parameter skeleton file diff --git a/src/aux-level/param-aux.h b/src/aux-level/param-aux.h index 9aa62852..04bdb088 100755 --- a/src/aux-level/param-aux.h +++ b/src/aux-level/param-aux.h @@ -69,6 +69,7 @@ void write_par_hl_tsi(FILE *fp, bool verbose); void write_par_hl_stm(FILE *fp, bool verbose); void write_par_hl_fold(FILE *fp, bool verbose); void write_par_hl_lsp(FILE *fp, bool verbose); +void write_par_hl_pol(FILE *fp, bool verbose); void write_par_hl_trend(FILE *fp, bool verbose); void write_par_hl_cso(FILE *fp, bool verbose); void write_par_hl_imp(FILE *fp, bool verbose); diff --git a/src/higher-level/polar-hl.c b/src/higher-level/polar-hl.c index c66e2c9f..01c62642 100755 --- a/src/higher-level/polar-hl.c +++ b/src/higher-level/polar-hl.c @@ -630,7 +630,7 @@ float green_val, base_val; if (pol->use[_POL_VAV_]) ts->pol_[_POL_VAV_][y][p] = (short)vector[_GROW_].val; if (pol->use[_POL_VLV_]) ts->pol_[_POL_VLV_][y][p] = (short)vector[_LATE_].val; if (pol->use[_POL_VBL_]) ts->pol_[_POL_VBL_][y][p] = (short)base_val; - if (pol->use[_POL_VGA_]) ts->pol_[_POL_VGA_][y][p] = (short)recurrence[0]; + if (pol->use[_POL_VGM_]) ts->pol_[_POL_VGM_][y][p] = (short)recurrence[0]; if (pol->use[_POL_VGV_]) ts->pol_[_POL_VGV_][y][p] = (short)standdev(recurrence[1], n_window[_GROW_]); // amplitude parameters From 3e29b457e309862c8d71967bbc325fe300b72341 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 14 Sep 2020 16:52:50 +0200 Subject: [PATCH 74/78] adding docs --- .../components/higher-level/tsa/format.rst | 230 +++++++++++------- docs/source/setup/depend.rst | 2 +- 2 files changed, 145 insertions(+), 87 deletions(-) diff --git a/docs/source/components/higher-level/tsa/format.rst b/docs/source/components/higher-level/tsa/format.rst index 8b5b2da8..1018f2d4 100755 --- a/docs/source/components/higher-level/tsa/format.rst +++ b/docs/source/components/higher-level/tsa/format.rst @@ -135,6 +135,14 @@ Example filename: 1984-2020_182-274_HL_TSA_LNDLG_TCG_STM.tif + +---------+---------------------------------------------------------+ + + XXX-CAP + Extended CAT Analysis on Phenometrics + +----------------+---------+---------------------------------------------------------+ ++ 36-42 + Product Type: Polarmetrics (replace XXX with Table 3) + ++ +---------+---------------------------------------------------------+ ++ + XXX-POL + Polarmetrics + ++ +---------+---------------------------------------------------------+ ++ + XXX-TRO + Trend Analysis on Polarmetrics + ++ +---------+---------------------------------------------------------+ ++ + XXX-CAO + Extended CAT Analysis on Polarmetrics + ++----------------+---------+---------------------------------------------------------+ + 40-42 / 44-46 + File extension + + +---------+---------------------------------------------------------+ + + tif + image data in compressed GeoTiff format + @@ -164,61 +172,95 @@ Example filename: 1984-2020_182-274_HL_TSA_LNDLG_TCG_STM.tif **Table 3:** Phenology name tags -+-----+-----------------------------+ -+ Tag + Description + -+=====+=============================+ -+ DEM + Date of Early Minimum + -+-----+-----------------------------+ -+ DSS + Date of Start of Season + -+-----+-----------------------------+ -+ DRI + Date of Rising Inflection + -+-----+-----------------------------+ -+ DPS + Date of Peak of Season + -+-----+-----------------------------+ -+ DFI + Date of Falling Inflection + -+-----+-----------------------------+ -+ DES + Date of End of Season + -+-----+-----------------------------+ -+ DLM + Date of Late Minimum + -+-----+-----------------------------+ -+ LTS + Length of Total Season + -+-----+-----------------------------+ -+ LGS + Length of Green Season + -+-----+-----------------------------+ -+ VEM + Value of Early Minimum + -+-----+-----------------------------+ -+ VSS + Value of Start of Season + -+-----+-----------------------------+ -+ VRI + Value of Rising Inflection + -+-----+-----------------------------+ -+ VPS + Value of Peak of Season + -+-----+-----------------------------+ -+ VFI + Value of Falling Inflection + -+-----+-----------------------------+ -+ VES + Value of End of Season + -+-----+-----------------------------+ -+ VLM + Value of Late Minimum + -+-----+-----------------------------+ -+ VBL + Value of Base Level + -+-----+-----------------------------+ -+ VSA + Value of Seasonal Amplitude + -+-----+-----------------------------+ -+ IST + Integral of Total Season + -+-----+-----------------------------+ -+ IBL + Integral of Base Level + -+-----+-----------------------------+ -+ IBT + Integral of Base+Total + -+-----+-----------------------------+ -+ IGS + Integral of Green Season + -+-----+-----------------------------+ -+ RAR + Rate of Average Rising + -+-----+-----------------------------+ -+ RAF + Rate of Average Falling + -+-----+-----------------------------+ -+ RMR + Rate of Maximum Rising + -+-----+-----------------------------+ -+ RMF + Rate of Maximum Falling + -+-----+-----------------------------+ ++-----+---------------------------------------------------+-------+--------+ ++ Tag + Description + Polar + SPLITS + ++=====+===================================================+=======+========+ ++ DEM + Date of Early Minimum + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ DSS + Date of Start of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ DRI + Date of Rising Inflection + + X + ++-----+---------------------------------------------------+-------+--------+ ++ DPS + Date of Peak of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ DMS + Date of Mid of Season + X + + ++-----+---------------------------------------------------+-------+--------+ ++ DFI + Date of Falling Inflection + + X + ++-----+---------------------------------------------------+-------+--------+ ++ DES + Date of End of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ DLM + Date of Late Minimum + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ DEV + Date of Early Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ DAV + Date of Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ DLV + Date of Late Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ DPY + Date of Start of Phenological Year + X + + ++-----+---------------------------------------------------+-------+--------+ ++ DPV + delta Date of adaptive Start of Phenological Year + X + + ++-----+---------------------------------------------------+-------+--------+ ++ LTS + Length of Total Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ LGS + Length of Green Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ LGV + Length of between early/late vectors + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VEM + Value of Early Minimum + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VSS + Value of Start of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VRI + Value of Rising Inflection + + X + ++-----+---------------------------------------------------+-------+--------+ ++ VPS + Value of Peak of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VMS + Value of Mid of Season + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VFI + Value of Falling Inflection + + X + ++-----+---------------------------------------------------+-------+--------+ ++ VES + Value of End of Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VLM + Value of Late Minimum + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VEV + Value of Early Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VAV + Value of Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VLV + Value of Late Average Vector + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VBL + Value of Base Level + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VSA + Value of Seasonal Amplitude + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ VGA + Value of Green Amplitude + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VPA + Value of Peak Amplitude + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VGM + Value of Green Mean + X + + ++-----+---------------------------------------------------+-------+--------+ ++ VGV + Value of Green Variability + X + + ++-----+---------------------------------------------------+-------+--------+ ++ IST + Integral of Total Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ IBL + Integral of Base Level + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ IBT + Integral of Base+Total + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ IGS + Integral of Green Season + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ IRR + Integral of Rising Rate + X + + ++-----+---------------------------------------------------+-------+--------+ ++ IFR + Integral of Falling Rate + X + + ++-----+---------------------------------------------------+-------+--------+ ++ RAR + Rate of Average Rising + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ RAF + Rate of Average Falling + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ RMR + Rate of Maximum Rising + X + X + ++-----+---------------------------------------------------+-------+--------+ ++ RMF + Rate of Maximum Falling + X + X + ++-----+---------------------------------------------------+-------+--------+ File format @@ -276,35 +318,51 @@ the FBW contains one band per week (up to 52, depends on MONTH_MIN/MAX and DOY_M the FBD product contains one band per DOY (up to 365, depends on MONTH_MIN/MAX and DOY_MIN/MAX), the 26 LSP products contain one band per year (do not overdo YEAR_MIN/MAX, this will give many useless bands). -Basic Statistics -The Basic Statistics (STA) product provides a summary of all observations (or the requested subset). It is a multi-layer image with following bands: -+ + 1 µ + Average of index values -+ + 2 σ + Standard deviation of index values -+ + 3 min + Minimum index value -+ + 4 max + Maximum index value -+ + 5 # of obs. + Number of good quality observations - -Trend Analysis -The Trend Analysis (TRD) product contains trend parameters. It is a multi-layer image with following bands: -+ + 1 µ + Average -+ + 2 a + Intercept -+ + 3 b + Trend -+ + 4 R² + R squared -+ + 5 sig. + Significance (-1, 0, 1) -+ + 6 RMSE + Root Mean Squared Error -+ + 7 MAE + Mean Absolute Error -+ + 8 max |e| + Maximum Absolute Residual -+ + 9 # of obs. + Number of good quality observations - -Change, Aftereffect, Trend -The Change, Aftereffect, Trend (CAT) product (following Hird et al. 2016, DOI: 10.1109/jstars.2015.2419594) contains extended change and trend parameters. It detects one change per time series, splits the time series into three parts, and derives trend parameters: (1) complete time series (this is the same as the TRD product), (2) time series before change, and (3) time series after change. It is a multi-layer image with following bands: -+ + 1 Change + Magnitude of change -+ + 2 Time of change Timestamp of the change (depends on the input time series, i.e. year/month/week/day) -+ + 3–11 Trend parameters for complete time series (see TRD product) -+ + 12–20 Trend parameters for time series before change (see TRD product) -+ + 21–29 Trend parameters for time series after change (see TRD product) - -File format -The data are provided in (i) ENVI Standard format (flat binary images), or (ii) as GeoTiff (LZW compression with horizontal differencing). Each dataset consists of an image dataset (.dat/,tif) and additional metadata (.hdr). The image data have signed 16bit datatype and band sequential (BSQ) interleaving. Scaling factor is 10000 for most products. -The metadata (.hdr) are provided in ENVI Standard format as human-readable text using tag and value notation. Metadata include image characteristics like dimensions, data type, band interleave, coordinate reference system, map info, band names etc. +**Trend Analysis** + +The Trend Analysis products contains trend parameters: + ++------+-----------------------------+ ++ Band + Description + ++======+=============================+ ++ 1 + Average + ++------+-----------------------------+ ++ 2 + Intercept + ++------+-----------------------------+ ++ 3 + Trend + ++------+-----------------------------+ ++ 4 + relative change + ++------+-----------------------------+ ++ 4 + R-squared + ++------+-----------------------------+ ++ 5 + Significance (-1, 0, 1) + ++------+-----------------------------+ ++ 6 + Root Mean Squared Error + ++------+-----------------------------+ ++ 7 + Mean Absolute Error + ++------+-----------------------------+ ++ 8 + Maximum Absolute Residual + ++------+-----------------------------+ ++ 9 + Number of used observations + ++------+-----------------------------+ + + +**Change and Trend** + +The Change, Aftereffect, Trend (CAT) product (following [Hird et al. 2016](https://ieeexplore.ieee.org/document/7094220) contains extended change and trend parameters. +CAT detects one change per time series, splits the time series into three parts, and derives trend parameters for the three parts: + ++-------+--------------------------------------------------------------------+ ++ Band + Description + ++=======+====================================================================+ ++ 1 + Magnitude of change + ++-------+--------------------------------------------------------------------+ ++ 2 + Time of change + ++-------+--------------------------------------------------------------------+ ++ 3–12 + Trend parameters for complete time series (see Trend product) + ++-------+--------------------------------------------------------------------+ ++ 13–22 + Trend parameters for time series before change (see Trend product) + ++-------+--------------------------------------------------------------------+ ++ 23–32 + Trend parameters for time series after change (see Trend product) + ++-------+--------------------------------------------------------------------+ diff --git a/docs/source/setup/depend.rst b/docs/source/setup/depend.rst index 398792e9..48361952 100755 --- a/docs/source/setup/depend.rst +++ b/docs/source/setup/depend.rst @@ -56,7 +56,7 @@ FORCE can also be installed on other Liunx distributions (e.g. CentOS). The inst *There are known problems with earlier releases (< 1.10.0). FORCE < 3.5 should not be used with GDAL >= 3.0. However, the reporting of errors and warnings differs between versions, and GDAL may report many non-critical errors to stderr (e.g. ``ERROR 6 - not supported``, please refer to the GDAL error code description whether these are critical errors or just warnings that can be ignored). Please note that GDAL is a very dynamic development, therefore it is hard to develop applications that cover all cases and exceptions in all possible GDAL versions and builds. If you come across a GDAL version that does not work, please inform us.* - + * The **GSL library** is used for optimization purposes. We developed the code using version 1.15. The software can be installed with: From 9dc6825024b6bf682ce5e38986e430ff700f01cb Mon Sep 17 00:00:00 2001 From: David Frantz Date: Mon, 14 Sep 2020 16:58:47 +0200 Subject: [PATCH 75/78] adding docs --- .../components/higher-level/tsa/format.rst | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/source/components/higher-level/tsa/format.rst b/docs/source/components/higher-level/tsa/format.rst index 1018f2d4..5e7d62ae 100755 --- a/docs/source/components/higher-level/tsa/format.rst +++ b/docs/source/components/higher-level/tsa/format.rst @@ -352,17 +352,17 @@ The Trend Analysis products contains trend parameters: The Change, Aftereffect, Trend (CAT) product (following [Hird et al. 2016](https://ieeexplore.ieee.org/document/7094220) contains extended change and trend parameters. CAT detects one change per time series, splits the time series into three parts, and derives trend parameters for the three parts: -+-------+--------------------------------------------------------------------+ -+ Band + Description + -+=======+====================================================================+ -+ 1 + Magnitude of change + -+-------+--------------------------------------------------------------------+ -+ 2 + Time of change + -+-------+--------------------------------------------------------------------+ -+ 3–12 + Trend parameters for complete time series (see Trend product) + -+-------+--------------------------------------------------------------------+ -+ 13–22 + Trend parameters for time series before change (see Trend product) + -+-------+--------------------------------------------------------------------+ -+ 23–32 + Trend parameters for time series after change (see Trend product) + -+-------+--------------------------------------------------------------------+ ++----------+--------------------------------------------------------------------+ ++ Band + Description + ++==========+====================================================================+ ++ 1 + Magnitude of change + ++----------+--------------------------------------------------------------------+ ++ 2 + Time of change + ++----------+--------------------------------------------------------------------+ ++ 3 to 12 + Trend parameters for complete time series (see Trend product) + ++----------+--------------------------------------------------------------------+ ++ 13 to 22 + Trend parameters for time series before change (see Trend product) + ++----------+--------------------------------------------------------------------+ ++ 23 to 32 + Trend parameters for time series after change (see Trend product) + ++----------+--------------------------------------------------------------------+ From 494f5027bb4adf65d926aecf884147c3e45dfaf5 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 15 Sep 2020 09:08:06 +0200 Subject: [PATCH 76/78] adding docs --- .../components/higher-level/tsa/param.rst | 74 ++++++++++++++++++- docs/source/history/vdev.rst | 54 +++++++++++++- 2 files changed, 125 insertions(+), 3 deletions(-) diff --git a/docs/source/components/higher-level/tsa/param.rst b/docs/source/components/higher-level/tsa/param.rst index 82056b6e..0145248d 100755 --- a/docs/source/components/higher-level/tsa/param.rst +++ b/docs/source/components/higher-level/tsa/param.rst @@ -442,7 +442,7 @@ The following parameter descriptions are a print-out of ``force-parameter``, whi | ``OUTPUT_CAW = FALSE`` | ``OUTPUT_CAD = FALSE`` -* **Land surface phenology parameters** +* **Land surface phenology parameters (SPLITS)** .. note:: The Land Surface Phenology (LSP) options are only available if FORCE was compiled with SPLITS (see :ref:`install` section). @@ -521,6 +521,78 @@ The following parameter descriptions are a print-out of ``force-parameter``, whi | *Type:* Logical. Valid values: {TRUE,FALSE} | ``OUTPUT_CAP = FALSE`` + +.. _tsa-param-polar: + + .. note:: + The Polar-based Land Surface Phenology (LSP) metrics are always available (they do not rely on SPLITS). + + * Threshold for detecing Start of Season in the cumulative time series. + + | *Type:* Float. Valid range: ]0,1[ + | ``POL_START_THRESHOLD = 0.2`` + + * Threshold for detecing Mid of Season in the cumulative time series. + + | *Type:* Float. Valid range: ]0,1[ + | ``POL_MID_THRESHOLD = 0.5`` + + * Threshold for detecing End of Season in the cumulative time series. + + | *Type:* Float. Valid range: ]0,1[ + | ``POL_END_THRESHOLD = 0.8`` + + * Should the start of each phenological year be adapated? + If FALSE, the start is static, i.e. Date of Early Minimum and Date of Late Minimum are the same for all years and 365 days apart. + If TRUE, they differ from year to year and a phenological year is not forced to be 365 days long. + + | *Type:* Logical. Valid values: {TRUE,FALSE} + | ``POL_ADAPTIVE = TRUE`` + + * Which Polarmetrics should be computed? There will be a POL output file for each metric (with years as bands). + Currently available are the dates of the early minimum, late minimum, peak of season, start of season, mid of season, end of season, early average vector, average vector, late average vector; + lengths of the total season, green season, between averge vectors; + values of the early minimum, late minimum, peak of season, start of season, mid of season, end of season, early average vector, average vector, late average vector, base level, green amplitude, seasonal amplitude, peak amplitude, green season mean , green season variability, dates of start of phenological year, difference between start of phenological year and its longterm average; + integrals of the total season, base level, base+total, green season, rising rate, falling rate; + rates of average rising, average falling, maximum rising, maximum falling. + + | *Type:* Character list. Valid values: {DEM,DLM,DPS,DSS,DMS,DES,DEV,DAV,DLV,LTS,LGS,LGV,VEM,VLM,VPS,VSS,VMS,VES,VEV,VAV,VLV,VBL,VGA,VSA,VPA,VGM,VGV,DPY,DPV,IST,IBL,IBT,IGS,IRR,IFR,RAR,RAF,RMR,RMF} + | ``POL = VSS VPS VES VSA RMR IGS`` + + * Standardize the POL time series with pixel mean and/or standard deviation? + + | *Type:* Logical. Valid values: {NONE,NORMALIZE,CENTER} + | ``STANDARDIZE_POL = NONE`` + + + * Output the polar-transformed time series? These are layer stack of cartesian X-and Y-coordinates for each interpolated date. + This results in two files, product IDs are PCX and PCY. + + | *Type:* Logical. Valid values: {TRUE,FALSE} + | ``OUTPUT_PCT = FALSE`` + + + * Output the Polarmetrics? These are layer stacks per polarmetric with as many bands as years. + + | *Type:* Logical. Valid values: {TRUE,FALSE} + | ``OUTPUT_POL = FALSE`` + + + * Compute and output a linear trend analysis on the requested Polarmetric time series? + Note that the OUTPUT_POL parameters don't need to be TRUE to do this. + See also the TREND PARAMETERS block below. + + | *Type:* Logical. Valid values: {TRUE,FALSE} + | ``OUTPUT_TRO = FALSE`` + + + * Compute and output an extended Change, Aftereffect, Trend (CAT) analysis on the requested Polarmetric time series? + Note that the OUTPUT_POL parameters don't need to be TRUE to do this. + See also the TREND PARAMETERS block below. + + | *Type:* Logical. Valid values: {TRUE,FALSE} + | ``OUTPUT_CAO = FALSE`` + * **Trend parameters** * This parameter specifies the tail-type used for significance testing of the slope in the trend analysis. diff --git a/docs/source/history/vdev.rst b/docs/source/history/vdev.rst index 8fd53b56..95325bdf 100755 --- a/docs/source/history/vdev.rst +++ b/docs/source/history/vdev.rst @@ -10,7 +10,40 @@ Master release: TBA * **General changes** - Fixed a small bug that prevented program execution when the parameterfile was too long. + * GDAL >= 3.0 support: + + Since GDAL >= 3.0, the coordinates from coordinate transformation operations are no longer sorted as X/Y or LON/LAT, but in the typical order of each coordinate system. + This has caused an incompatibility of FORCE with GDAL >= 3. + This is fixed now. Still, do not use FORCE < 3.5 with GDAL >= 3. Update FORCE instead. + + * Fixed a small bug that prevented program execution when the parameterfile was too long. + + * FORCE programs will now transition to a Unix-typical usage, where non-mandatory options are specified with -o or --long-option. + This will happen from time to time in the next releases. + The new program force-level1-csd is a prototype for this. + + * Reduced the amount of compiler warnings when compiling with gcc 9.3.0 under Ubuntu 20.40 LTS. + This is ongoing work. The goal is to get rid of all warnings (they are not critical, though). + +* **FORCE LEVEL 1 ARCHIVING SYSTEM** + + * new program force-level1-csd: + + The new force-csd tool (cloud storage download) was kindly contributed by Stefan Ernst. + This tool goes way beyond the capabilities of the former sensor-specific scripts force-level1-landsat and force-level1-sentinel2. + It downloads Sentinel-2 AND Landsat data from the Google Cloud Storage. + You need the gutils python package, and you might need an account on Google's end. + This is currently free. After communicating with Google, we expect this to remain free in the future. + force-csd is very efficient, you can use parallel downloads. + You can filter the datapool w.r.t. sensor, date, cloud cover, tier level etc. + The area of interest can be specified in a variety of ways, e.g. coordinate string as in force-level1-sentinel2, but a list of WRS-2/MGRS scenes, or vector geometries (e.g. shapefile) are also supported. + Of course, it also takes care about the file queues needed for Level 2 Processing. + + * deprecated programs: force-level1-landsat and force-level1-sentinel2: + + Due to the superiority of force-level1-csd, force-level1-landsat and force-level1-sentinel2 are now deprecated. + They will remain in the repo for a while, but will only receive minimal support in the future. + Users are encouraged to change their workflow accordingly. * **FORCE HIGHER LEVEL** @@ -20,16 +53,32 @@ Master release: TBA Setting all scores to 0 resulted in some crashes. Thanks to Jonas Ardö to report on this. + * in force-higher-level, TSA sub-module: + + A new suite of metrics was implemented: Land Surface Phenology descriptors based on a polar transformation, Polarmetrics it is. + This is based on the paper by Bjorn-Gustaf J. Brooks: https://www.mdpi.com/1999-4907/11/6/606, but was modified (e.g. to consider interannual shifts in the start of the phenological year) and complemented by many more metrics. + A couple of new parameters are now necessary, force-parameter has been updated to include these in the skeleton. + See :ref:`tsa-param-polar`. + * in force-higher-level, TSA sub-module: The domain tag of the bandwise FORCE metadata domain was updated to be interoperable with a 4D data model (see force-stack below). + * in force-higher-level, TSA sub-module: + + When folding the time series, the quantile statistics were broken with a memory error. + This is fixed now. Thanks for Christoph Raab and Benjamin Jakimow for reporting this. + * in force-higher-level, TSA sub-module, trend and CAT analyses: Added a new band to for computing relative change, i.e. gain/loss relative to initial value: (slope*timesteps)/offset. This makes most sense when the index has a physical meaning like fractional cover. This does not make sense at all when offset is negative! Take care. + * in force-higher-level, TSA sub-module, trend and CAT analyses: + + fixed an issue that compromised signifance levels for trends, which were computed on DOY-based phenometrics. + * **FORCE AUX** * in force-mosaic: @@ -58,4 +107,5 @@ Master release: TBA The user can now change between "all combinations" or "paired combinations". Please refer to the program description: - https://force-eo.readthedocs.io/en/latest/components/auxilliary/magic-parameters.htm \ No newline at end of file + https://force-eo.readthedocs.io/en/latest/components/auxilliary/magic-parameters.htm + \ No newline at end of file From 0ddd6672c50b6ce68fcdaed8fd2d6e67451e4edf Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 15 Sep 2020 09:10:22 +0200 Subject: [PATCH 77/78] bumped version --- README.md | 2 +- docs/source/index.rst | 2 +- src/cross-level/_version-cl.h | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 3d2d63fe..1c8cb3e1 100755 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ **Framework for Operational Radiometric Correction for Environmental monitoring** -**Version 3.4.0** +**Version 3.5.0** ![FORCE Logo](/images/force.png) diff --git a/docs/source/index.rst b/docs/source/index.rst index 1d2f2ad6..f8fa326c 100755 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -5,7 +5,7 @@ FORCE documentation **FORCE: Framework for Operational Radiometric Correction for Environmental monitoring** -**Version 3.4.0** +**Version 3.5.0** `Download from Github `_. diff --git a/src/cross-level/_version-cl.h b/src/cross-level/_version-cl.h index aec11e17..1acf0d64 100755 --- a/src/cross-level/_version-cl.h +++ b/src/cross-level/_version-cl.h @@ -32,7 +32,7 @@ Version number extern "C" { #endif -#define _VERSION_ "3.4.0" +#define _VERSION_ "3.5.0" #ifdef __cplusplus } From 146dc8dbb06dc62b54aaf70d75d8a2438d1ea148 Mon Sep 17 00:00:00 2001 From: David Frantz Date: Tue, 15 Sep 2020 09:12:11 +0200 Subject: [PATCH 78/78] updated Makefile for merge with main --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f3b8a7e8..ec39e7bf 100755 --- a/Makefile +++ b/Makefile @@ -24,7 +24,7 @@ # Modify the following lines to match your needs # Installation directory -BINDIR=/develop +BINDIR=/usr/local/bin # Libraries GDAL=-I/usr/include/gdal -L/usr/lib -Wl,-rpath=/usr/lib