Update src/seismic_hazard_forecasting.py
This commit is contained in:
parent
d904101ba1
commit
a0dcc808bc
@ -1,7 +1,65 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from igfash.rate import apply_beast, datenum_to_datetime
|
from igfash.rate import datenum_to_datetime
|
||||||
|
|
||||||
|
def apply_beast(act_rate, **kwargs):
|
||||||
|
"""
|
||||||
|
Applies BEAST to the smmothed rate data using different smoothing windows.
|
||||||
|
Input
|
||||||
|
act_rate : The activity rate data array to smooth and apply BEAST.
|
||||||
|
Output
|
||||||
|
out : A list of BEAST results for each smoothed rate array.
|
||||||
|
prob : A list of probabilities and change points extracted from BEAST results.
|
||||||
|
"""
|
||||||
|
out = []; prob = []
|
||||||
|
# Define the range of smoothing windows based on 15-20% of the size of rate array
|
||||||
|
ll = int(np.ceil(0.15 * len(act_rate)))
|
||||||
|
ul = int(np.ceil(0.20 * len(act_rate)))
|
||||||
|
|
||||||
|
# Apply BEAST for each smoothed rate array
|
||||||
|
for ii in range(ll, ul + 1):
|
||||||
|
# Apply Gaussian smoothing with window size ~15-20%
|
||||||
|
bb=pd.Series(act_rate).rolling(window=ii, win_type='gaussian', center=True, min_periods=1).mean(std=ii).to_numpy()
|
||||||
|
# Apply BEAST on the smoothed array discarding periodicity
|
||||||
|
beast_result = rb.beast(bb, season='none', **kwargs)
|
||||||
|
out.append(beast_result)
|
||||||
|
|
||||||
|
# Extract probabilities and change points from BEAST results
|
||||||
|
# Gets the median number of changepoints or if median = 0 gets 90th percentile
|
||||||
|
for result in out:
|
||||||
|
if result.trend.ncp_median != 0 and not np.isnan(result.trend.ncp_median):
|
||||||
|
ncp_median = int(result.trend.ncp_median) # No need for np.max if it's scalar
|
||||||
|
cp_probs = result.trend.cpPr[:ncp_median]
|
||||||
|
cps = result.trend.cp[:ncp_median]
|
||||||
|
else:
|
||||||
|
if not np.isnan(result.trend.ncp_pct90):
|
||||||
|
ncp_pct90 = int(result.trend.ncp_pct90)
|
||||||
|
cp_probs = result.trend.cpPr[:ncp_pct90]
|
||||||
|
cps = result.trend.cp[:ncp_pct90]
|
||||||
|
else:
|
||||||
|
# Optional fallback in case both are NaN
|
||||||
|
cp_probs = []
|
||||||
|
cps = []
|
||||||
|
|
||||||
|
# for result in out:
|
||||||
|
# if result.trend.ncp_median != 0:
|
||||||
|
# ncp_median = int(np.max(result.trend.ncp_median)) #Ensure it's integer
|
||||||
|
# cp_probs = result.trend.cpPr[:ncp_median]
|
||||||
|
# cps = result.trend.cp[:ncp_median]
|
||||||
|
# else:
|
||||||
|
# ncp_pct90 = int(np.max(result.trend.ncp_pct90)) #Ensure it's integer
|
||||||
|
# cp_probs = result.trend.cpPr[:ncp_pct90]
|
||||||
|
# cps = result.trend.cp[:ncp_pct90]
|
||||||
|
|
||||||
|
# Sort the change points and corresponding probabilities
|
||||||
|
sorted_indices = np.argsort(cps)
|
||||||
|
sorted_cps = cps[sorted_indices]
|
||||||
|
sorted_probs = cp_probs[sorted_indices]
|
||||||
|
|
||||||
|
# Store the sorted change points and probabilities
|
||||||
|
prob.append(np.column_stack((sorted_probs, sorted_cps)))
|
||||||
|
|
||||||
|
return out, prob
|
||||||
|
|
||||||
def calc_bins2(dates, unit, bin_dur, dates_calc, rate_forecast, rate_unc_high, rate_unc_low, multiplicator, filename = "activity_rate.png", **kwargs):
|
def calc_bins2(dates, unit, bin_dur, dates_calc, rate_forecast, rate_unc_high, rate_unc_low, multiplicator, filename = "activity_rate.png", **kwargs):
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user