From a0dcc808bc60cb2113ef9c147d443ea69f604277 Mon Sep 17 00:00:00 2001 From: ftong Date: Mon, 30 Jun 2025 18:27:44 +0200 Subject: [PATCH] Update src/seismic_hazard_forecasting.py --- src/seismic_hazard_forecasting.py | 60 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/src/seismic_hazard_forecasting.py b/src/seismic_hazard_forecasting.py index 8cdd3a1..c45baa2 100644 --- a/src/seismic_hazard_forecasting.py +++ b/src/seismic_hazard_forecasting.py @@ -1,7 +1,65 @@ # -*- coding: utf-8 -*- import numpy as np -from igfash.rate import apply_beast, datenum_to_datetime +from igfash.rate import datenum_to_datetime +def apply_beast(act_rate, **kwargs): + """ + Applies BEAST to the smmothed rate data using different smoothing windows. + Input + act_rate : The activity rate data array to smooth and apply BEAST. + Output + out : A list of BEAST results for each smoothed rate array. + prob : A list of probabilities and change points extracted from BEAST results. + """ + out = []; prob = [] + # Define the range of smoothing windows based on 15-20% of the size of rate array + ll = int(np.ceil(0.15 * len(act_rate))) + ul = int(np.ceil(0.20 * len(act_rate))) + + # Apply BEAST for each smoothed rate array + for ii in range(ll, ul + 1): + # Apply Gaussian smoothing with window size ~15-20% + bb=pd.Series(act_rate).rolling(window=ii, win_type='gaussian', center=True, min_periods=1).mean(std=ii).to_numpy() + # Apply BEAST on the smoothed array discarding periodicity + beast_result = rb.beast(bb, season='none', **kwargs) + out.append(beast_result) + + # Extract probabilities and change points from BEAST results + # Gets the median number of changepoints or if median = 0 gets 90th percentile + for result in out: + if result.trend.ncp_median != 0 and not np.isnan(result.trend.ncp_median): + ncp_median = int(result.trend.ncp_median) # No need for np.max if it's scalar + cp_probs = result.trend.cpPr[:ncp_median] + cps = result.trend.cp[:ncp_median] + else: + if not np.isnan(result.trend.ncp_pct90): + ncp_pct90 = int(result.trend.ncp_pct90) + cp_probs = result.trend.cpPr[:ncp_pct90] + cps = result.trend.cp[:ncp_pct90] + else: + # Optional fallback in case both are NaN + cp_probs = [] + cps = [] + +# for result in out: +# if result.trend.ncp_median != 0: +# ncp_median = int(np.max(result.trend.ncp_median)) #Ensure it's integer +# cp_probs = result.trend.cpPr[:ncp_median] +# cps = result.trend.cp[:ncp_median] +# else: +# ncp_pct90 = int(np.max(result.trend.ncp_pct90)) #Ensure it's integer +# cp_probs = result.trend.cpPr[:ncp_pct90] +# cps = result.trend.cp[:ncp_pct90] + + # Sort the change points and corresponding probabilities + sorted_indices = np.argsort(cps) + sorted_cps = cps[sorted_indices] + sorted_probs = cp_probs[sorted_indices] + + # Store the sorted change points and probabilities + prob.append(np.column_stack((sorted_probs, sorted_cps))) + + return out, prob def calc_bins2(dates, unit, bin_dur, dates_calc, rate_forecast, rate_unc_high, rate_unc_low, multiplicator, filename = "activity_rate.png", **kwargs):