Skip to content

Commit

Permalink
handled series with one element per year, for which the POT analysis …
Browse files Browse the repository at this point in the history
…is meaningless
  • Loading branch information
Lorenzo Mentaschi authored and Lorenzo Mentaschi committed Mar 23, 2017
1 parent dc489ef commit e0e65f4
Show file tree
Hide file tree
Showing 12 changed files with 158 additions and 113 deletions.
156 changes: 69 additions & 87 deletions tsEVstatistics.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

% in a gaussian approximation alphaCI~68% corresponds to 1 sigma confidence
% interval

defvals={[],[],.68};
minvars=1;
EVdata = [];
Expand All @@ -22,65 +23,51 @@
Tr=[5,10,20,50,100,200,500,1000];

EVmeta.Tr=Tr;
try
EVmeta.lon=pointData.lon;
EVmeta.lat=pointData.lat;
catch
disp('lon lat not found. Cannot set them to metadata.');
end

npoints = 1;
[nyears]=size(pointData.annualMax);

%% stationary GEV

imethod=1;
methodname='GEVstat';

paramEsts=nan(npoints,3);
paramEsts=nan(1,3);
paramCIs=nan*ones(2,3);
rlvls=nan(npoints,length(Tr));
rlvls=nan(1,length(Tr));

criterio=zeros(npoints,1);
criterio=0;

if ~isempty(pointData.annualMax)
for jj=1:npoints;

disp(['GEV - ' num2str(jj/npoints*100) '%'])

if strcmpi(gevMaxima, 'annual')
tmpmat=pointData.annualMax(jj,:);
elseif strcmpi(gevMaxima, 'monthly')
tmpmat=pointData.monthlyMax(jj,:);
else
error(['tsEVstatistics: invalid gevMaxima type: ' gevMaxima]);
end

iIN=~isnan(tmpmat);

if sum(iIN)>=minGEVSample
if strcmpi(gevMaxima, 'annual')
tmpmat=pointData.annualMax(:);
elseif strcmpi(gevMaxima, 'monthly')
tmpmat=pointData.monthlyMax(:);
else
error(['tsEVstatistics: invalid gevMaxima type: ' gevMaxima]);
end

criterio(jj)=1;
iIN=~isnan(tmpmat);

tmp=tmpmat(iIN);
if sum(iIN)>=minGEVSample

[paramEsts(jj,1:3),paramCIs]=gevfit(tmp, alphaCI);
% paramEsts(jj,1): shape param
% paramEsts(jj,2): scale param
% paramEsts(jj,3): location param
criterio=1;

% the second parameter returned by gevfit is the 95% confidence
% interval
tmp=tmpmat(iIN);

rlvls(jj,:) = gevinv(1-1./Tr,paramEsts(jj,1),paramEsts(jj,2),paramEsts(jj,3));
[paramEsts(1,1:3),paramCIs]=gevfit(tmp, alphaCI);
% paramEsts(jj,1): shape param
% paramEsts(jj,2): scale param
% paramEsts(jj,3): location param

else
% the second parameter returned by gevfit is the 95% confidence
% interval

disp('Skipping...')
isValid = false;
rlvls(1,:) = gevinv(1-1./Tr,paramEsts(1,1),paramEsts(1,2),paramEsts(1,3));

end
else

disp('Skipping...')
isValid = false;

end

Expand All @@ -94,74 +81,69 @@
EVdata(imethod).values = [];
EVdata(imethod).parameters = [];
EVdata(imethod).paramCIs = [];
criterio = ones(npoints,1);
criterio = 1;
end

%% stationary GPD

imethod=2;
methodname='GPDstat';

paramEstsall=nan(npoints,6);
paramEstsall=nan(1,6);
paramCIs=nan*ones(2,2);
rlvls=nan(npoints,length(Tr));
rlvls=nan(1,length(Tr));

if length(EVdata)<imethod

for ik=1:npoints

disp(['GPD - ' num2str(ik/npoints*100) '%'])

try
if criterio(ik)==1

d1=pointData.POT(ik).peaks-pointData.POT(ik).threshold;

[paramEsts,paramCIs]=gpfit(d1, alphaCI);
% shape parameter
ksi=paramEsts(1);
% scale parameter
sgm=paramEsts(2);

if ksi < -.5
% computing anyway the confidence interval (in a rough way)
probs = [alphaCI/2; 1-alphaCI/2];
[~, acov] = gplike([ksi sgm], d1);
se = sqrt(diag(acov))';

% Compute the CI for k using a normal distribution for khat.
kci = norminv(probs, ksi, se(1));
% VERY ROUGHT: minimizing the lower boundary of kci to -1
kci(kci < -1) = -1;
try
ik = 1;
if criterio(ik)==1
d1=pointData.POT(ik).peaks-pointData.POT(ik).threshold;

[paramEsts,paramCIs]=gpfit(d1, alphaCI);
% shape parameter
ksi=paramEsts(1);
% scale parameter
sgm=paramEsts(2);

if ksi < -.5
% computing anyway the confidence interval (in a rough way)
probs = [alphaCI/2; 1-alphaCI/2];
[~, acov] = gplike([ksi sgm], d1);
se = sqrt(diag(acov))';

% Compute the CI for k using a normal distribution for khat.
kci = norminv(probs, ksi, se(1));
% VERY ROUGHT: minimizing the lower boundary of kci to -1
kci(kci < -1) = -1;

% Compute the CI for sigma using a normal approximation for
% log(sigmahat), and transform back to the original scale.
% se(log(sigmahat)) is se(sigmahat) / sigmahat.
lnsigci = norminv(probs, log(sgm), se(2)./sgm);

paramCIs = [kci exp(lnsigci)];

% Compute the CI for sigma using a normal approximation for
% log(sigmahat), and transform back to the original scale.
% se(log(sigmahat)) is se(sigmahat) / sigmahat.
lnsigci = norminv(probs, log(sgm), se(2)./sgm);
end

paramCIs = [kci exp(lnsigci)];

end

% paramCIs: 95% confidence interval
% paramCIs: 95% confidence interval


paramEstsall(ik,:)=[sgm ksi pointData.POT(ik).threshold length(d1) length(pointData.POT(ik).peaks) pointData.POT(ik).percentile];
paramEstsall(ik,:)=[sgm ksi pointData.POT(ik).threshold length(d1) length(pointData.POT(ik).peaks) pointData.POT(ik).percentile];

rlvls(ik,:) = pointData.POT(ik).threshold+(sgm/ksi).*((((length(d1)/length(pointData.POT(ik).peaks))*(1./Tr)).^(-ksi))-1);
rlvls(ik,:) = pointData.POT(ik).threshold+(sgm/ksi).*((((length(d1)/length(pointData.POT(ik).peaks))*(1./Tr)).^(-ksi))-1);

else
else

disp('Skipping...')
disp('Skipping...')

end
catch err
disp(getReport(err));
paramEstsall(ik,:)=[0 0 0 0 0 0];
rlvls(ik,:) = zeros(1, length(Tr));
isValid = false;
end
end
end
catch err
disp(getReport(err));
paramEstsall(ik,:)=[0 0 0 0 0 0];
rlvls(ik,:) = zeros(1, length(Tr));
isValid = false;
end

EVdata(imethod).method=methodname;
EVdata(imethod).values=rlvls;
Expand Down
20 changes: 20 additions & 0 deletions tsEvaComputeAnnualMaxima.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
function [annualMax, annualMaxDate, annualMaxIndx] = tsEvaComputeAnnualMaxima(timeAndSeries)

timeStamps = timeAndSeries(:,1);
srs = timeAndSeries(:,2);

tmvec = datevec(timeStamps);
years = tmvec(:,1);
srsIndices = 1:numel(srs);

function maxIndx = findMax(subIndxs)
[~, subIndxMaxIndx] = max(srs(subIndxs));
maxIndx = subIndxs(subIndxMaxIndx);
end

annualMaxIndx = accumarray(years, srsIndices, [], @findMax);
annualMaxIndx = annualMaxIndx(annualMaxIndx ~= 0);
annualMax = srs(annualMaxIndx);
annualMaxDate = timeStamps(annualMaxIndx);

end
13 changes: 13 additions & 0 deletions tsEvaDetrendTimeSeries.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
function [ detrendSeries, trendSeries, filledTimeStamps, filledSeries, nRunMn ] = tsEvaDetrendTimeSeries( timeStamps, series, timeWindow, varargin )

args.extremeLowThreshold = -Inf;
args = tsEasyParseNamedArgs(varargin, args);
extremeLowThreshold = args.extremeLowThreshold;

[trendSeries, filledTimeStamps, filledSeries, nRunMn] = tsEvaRunningMeanTrend(timeStamps, series, timeWindow);
statSeries = filledSeries;
statSeries(statSeries < extremeLowThreshold) = nan;
detrendSeries = statSeries - trendSeries;

end

26 changes: 25 additions & 1 deletion tsEvaFillSeries.m
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,31 @@
mint = min(timeStamps);
maxt = max(timeStamps);
dt = min(diff(timeStamps));
filledTimeStamps = (mint:dt:maxt)';
if dt >= 350 && dt <= 370
% this is an annual series
mindtVec = datevec(mint);
mindtY = mindtVec(1);
maxdtVec = datevec(maxt);
maxdtY = maxdtVec(1);
years = (mindtY:maxdtY)';
dtvec = [years, ones(size(years)), ones(size(years))];
filledTimeStamps = datenum(dtvec);
elseif dt >= 28 && dt <= 31
% this is a monthly series
mindtVec = datevec(mint);
mindtY = mindtVec(1);
maxdtVec = datevec(maxt);
maxdtY = maxdtVec(1);
years = (mindtY:maxdtY);
months = 1:12;
[ymtx, mmtx] = meshgrid(years, months);
ys = ymtx(:);
ms = mmtx(:);
dtvec = [ys, ms, ones(size(ys))];
filledTimeStamps = datenum(dtvec);
else
filledTimeStamps = (mint:dt:maxt)';
end
filledSeries = interp1(timeStamps, series, filledTimeStamps, 'nearest');
filledSeries = tsRemoveConstantSubseries(filledSeries, 4);
end
Expand Down
6 changes: 4 additions & 2 deletions tsEvaNonStationary.m
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,12 @@
args.minPeakDistanceInDays = -1;
args.ciPercentile = NaN;
args.potEventsPerYear = -1;
args.extremeLowThreshold = -inf;
args = tsEasyParseNamedArgs(varargin, args);
minPeakDistanceInDays = args.minPeakDistanceInDays;
ciPercentile = args.ciPercentile;
transfType = args.transfType;
extremeLowThreshold = args.extremeLowThreshold;
if ~( strcmpi(transfType, 'trend') || strcmpi(transfType, 'seasonal') || strcmpi(transfType, 'trendCIPercentile') || strcmpi(transfType, 'seasonalCIPercentile') )
error('nonStationaryEvaJRCApproach: transfType can be in (trend, seasonal, trendCIPercentile)');
end
Expand All @@ -56,12 +58,12 @@

if strcmpi(transfType, 'trend')
disp('evalueting long term variations of extremes');
trasfData = tsEvaTransformSeriesToStationaryTrendOnly( timeStamps, series, timeWindow );
trasfData = tsEvaTransformSeriesToStationaryTrendOnly( timeStamps, series, timeWindow, varargin{:} );
gevMaxima = 'annual';
potEventsPerYear = 5;
elseif strcmpi(transfType, 'seasonal')
disp('evalueting long term an seasonal variations of extremes');
trasfData = tsEvaTransformSeriesToStationaryMultiplicativeSeasonality( timeStamps, series, timeWindow );
trasfData = tsEvaTransformSeriesToStationaryMultiplicativeSeasonality( timeStamps, series, timeWindow, varargin{:} );
gevMaxima = 'monthly';
potEventsPerYear = 12;
elseif strcmpi(transfType, 'trendCIPercentile')
Expand Down
18 changes: 15 additions & 3 deletions tsEvaPlotGEVImageSc.m
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
function phandles = tsEvaPlotGEVImageSc( Y, timeStamps, epsilon, sigma, mu, varargin )
avgYearLength = 365.2425;
args.nPlottedTimesByYear = 360;
nyears = (max(timeStamps) - min(timeStamps))/avgYearLength;
nelmPerYear = length(timeStamps)/nyears;

args.nPlottedTimesByYear = min(360, round(nelmPerYear));
args.ylabel = 'levels (m)';
args.zlabel = 'pdf';
args.minYear = -7000;
Expand Down Expand Up @@ -43,6 +46,7 @@

if length(epsilon) == 1
epsilon0 = ones(npdf, 1)*epsilon;
%epsilon0 = epsilon;
else
epsilon_ = nan*ones(npdf*navg, 1);
epsilon_(1:L) = epsilon(:);
Expand All @@ -52,11 +56,19 @@

sigma_ = interp1(timeStamps, sigma, timeStamps_plot);
sigmaMtx = reshape(sigma_, navg, []);
sigma0 = nanmean(sigmaMtx)';
if size(sigmaMtx, 1) > 1
sigma0 = nanmean(sigmaMtx)';
else
sigma0 = sigmaMtx';
end

mu_ = interp1(timeStamps, mu, timeStamps_plot);
muMtx = reshape(mu_, navg, []);
mu0 = nanmean(muMtx)';
if size(muMtx, 1) > 1
mu0 = nanmean(muMtx)';
else
mu0 = muMtx';
end

[~, epsilonMtx] = meshgrid(Y, epsilon0);
[~, sigmaMtx] = meshgrid(Y, sigma0);
Expand Down
9 changes: 2 additions & 7 deletions tsEvaSampleData.m
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,8 @@
pointData.completeSeries = ms;
pointData.POT = POTData;
pointData.Rlargest = rLargestData;
if ~isempty(rLargestData)
pointData.annualMax = rLargestData.peaks(:,1)';
pointData.annualMaxTimeStamp = rLargestData.sdpeaks(:,1)';
else
pointData.annualMax = [];
pointData.annualMaxTimeStamp = [];
end

[pointData.annualMax, pointData.annualMaxTimeStamp, pointData.annualMaxIndexes] = tsEvaComputeAnnualMaxima(ms);
[pointData.monthlyMax, pointData.monthlyMaxTimeStamp, pointData.monthlyMaxIndexes] = tsEvaComputeMonthlyMaxima(ms);

yrs = unique(tsYear(ms(:,1)));
Expand Down
3 changes: 1 addition & 2 deletions tsEvaTransformSeriesToStatSeasonal_ciPercentile.m
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@
seasonalityTimeWindow = 2*30.4; % 2 months

disp('computing trend ...');
[trendSeries, filledTimeStamps, filledSeries, nRunMn] = tsEvaRunningMeanTrend(timeStamps, series, timeWindow);
statSeries = filledSeries - trendSeries;
[statSeries, trendSeries, filledTimeStamps, filledSeries, nRunMn] = tsEvaDetrendTimeSeries(timeStamps, series, timeWindow, varargin{:});
disp('computing trend seasonality ...');
trendSeasonality = tsEstimateAverageSeasonality(filledTimeStamps, statSeries);
statSeries = statSeries - trendSeasonality;
Expand Down
5 changes: 2 additions & 3 deletions tsEvaTransformSeriesToStationaryMultiplicativeSeasonality.m
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
function [trasfData] = tsEvaTransformSeriesToStationaryMultiplicativeSeasonality( timeStamps, series, timeWindow )
function [trasfData] = tsEvaTransformSeriesToStationaryMultiplicativeSeasonality( timeStamps, series, timeWindow, varargin )
% this function decomposes the series into a season-dependent trend and a
% season-dependent standard deviation.
% The season-dependent standard deviation is given by a seasonal factor
Expand All @@ -15,8 +15,7 @@
seasonalityTimeWindow = 2*30.4; % 2 months

disp('computing trend ...');
[trendSeries, filledTimeStamps, filledSeries, nRunMn] = tsEvaRunningMeanTrend(timeStamps, series, timeWindow);
statSeries = filledSeries - trendSeries;
[statSeries, trendSeries, filledTimeStamps, filledSeries, nRunMn] = tsEvaDetrendTimeSeries(timeStamps, series, timeWindow, varargin{:});
disp('computing trend seasonality ...');
trendSeasonality = tsEstimateAverageSeasonality(filledTimeStamps, statSeries);
statSeries = statSeries - trendSeasonality;
Expand Down
Loading

0 comments on commit e0e65f4

Please sign in to comment.