diff --git a/src/seismic_hazard_forecasting.py b/src/seismic_hazard_forecasting.py index 4ebc6ba..9e43458 100644 --- a/src/seismic_hazard_forecasting.py +++ b/src/seismic_hazard_forecasting.py @@ -52,7 +52,6 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max from math import ceil, floor, isnan import numpy as np import dask - from dask.diagnostics import ProgressBar # use Dask progress bar import kalepy as kale import utm from skimage.transform import resize @@ -69,6 +68,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max from matplotlib.contour import ContourSet import xml.etree.ElementTree as ET import json + import multiprocessing as mp logger = getDefaultLogger('igfash') @@ -88,9 +88,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max else: logger.setLevel(logging.INFO) - # temporary hard-coded configuration - # exclude_low_fxy = False - exclude_low_fxy = True + exclude_low_fxy = True # skip low probability areas of the map thresh_fxy = 1e-3 # minimum fxy value (location PDF) needed to do PGA estimation (to skip low probability areas); also should scale according to number of grid points # log user selections @@ -125,10 +123,6 @@ verbose: {verbose}") logger.info("No magnitude label of catalog specified, therefore try Mw by default") mag_label = 'Mw' - # if cat_label == None: - # print("No magnitude label of catalog specified, therefore try 'Catalog' by default") - # cat_label='Catalog' - time, mag, lat, lon, depth = read_mat_cat(catalog_file, mag_label=mag_label, catalog_label='Catalog') # check for null magnitude values @@ -258,7 +252,7 @@ verbose: {verbose}") # %% compute KDE and extract PDF start = timer() - if xy_win_method == "TW": + if xy_win_method: logger.info("Time weighting function selected") x_weights = np.linspace(0, 15, len(t_windowed)) @@ -319,7 +313,7 @@ verbose: {verbose}") # run activity rate modeling lambdas = [None] - if custom_rate != None and forecast_select: + if custom_rate != None and forecast_select and not rate_select: logger.info(f"Using activity rate specified by user: {custom_rate} per {time_unit}") lambdas = np.array([custom_rate], dtype='d') lambdas_perc = np.array([1], dtype='d') @@ -454,20 +448,23 @@ verbose: {verbose}") start = timer() - use_pp = False + use_pp = True if use_pp: # use dask parallel computing - pbar = ProgressBar() - pbar.register() - # iter = range(0,len(distances)) + mp.set_start_method("fork", force=True) iter = indices iml_grid_raw = [] # raw ground motion grids for imt in products: logger.info(f"Estimating {imt}") + if imt == "PGV": + IMT_max = 200 # search interval max for velocity (cm/s) + else: + IMT_max = 2.0 # search interval max for acceleration (g) + imls = [dask.delayed(compute_IMT_exceedance)(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas, forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model, - log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=2.0, rx_label=i, + log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=IMT_max, rx_label=i, rtol=0.1, use_cython=True) for i in iter] iml = dask.compute(*imls) iml_grid_raw.append(list(iml)) @@ -476,11 +473,17 @@ verbose: {verbose}") iml_grid_raw = [] iter = indices for imt in products: + + if imt == "PGV": + IMT_max = 200 # search interval max for velocity (cm/s) + else: + IMT_max = 2.0 # search interval max for acceleration (g) + iml = [] for i in iter: iml_i = compute_IMT_exceedance(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas, forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model, imt=imt, IMT_min = 0.0, - IMT_max = 2.0, rx_label = i, rtol = 0.1, use_cython=True) + IMT_max = IMT_max, rx_label = i, rtol = 0.1, use_cython=True) iml.append(iml_i) logger.info(f"Estimated {imt} at rx {i} is {iml_i}") iml_grid_raw.append(iml) @@ -515,17 +518,20 @@ verbose: {verbose}") dtype=np.float64) # this reduces values to 8 decimal places iml_grid_tmp = np.nan_to_num(iml_grid[j]) # change nans to zeroes - # upscale the grid - up_factor = 4 - iml_grid_hd = resize(iml_grid_tmp, (up_factor * len(iml_grid_tmp), up_factor * len(iml_grid_tmp)), - mode='reflect', anti_aliasing=False) + # upscale the grid, trim, and interpolate if there are at least 10 grid values with range greater than 0.1 + if np.count_nonzero(iml_grid_tmp) >= 10 and vmax-vmin > 0.1: + up_factor = 4 + iml_grid_hd = resize(iml_grid_tmp, (up_factor * len(iml_grid_tmp), up_factor * len(iml_grid_tmp)), + mode='reflect', anti_aliasing=False) + trim_thresh = vmin + iml_grid_hd[iml_grid_hd < trim_thresh] = np.nan + else: + iml_grid_hd = iml_grid_tmp + iml_grid_hd[iml_grid_hd == 0.0] = np.nan # change zeroes back to nan - # trim edges so the grid is not so blocky - vmin_hd = min(x for x in iml_grid_hd.flatten() if not isnan(x)) + #vmin_hd = min(x for x in iml_grid_hd.flatten() if not isnan(x)) vmax_hd = max(x for x in iml_grid_hd.flatten() if not isnan(x)) - trim_thresh = vmin - iml_grid_hd[iml_grid_hd < trim_thresh] = np.nan # generate image overlay north, south = lat.max(), lat.min() # Latitude range @@ -538,7 +544,7 @@ verbose: {verbose}") cmap_name = 'YlOrRd' cmap = plt.get_cmap(cmap_name) fig, ax = plt.subplots(figsize=(6, 6)) - ax.imshow(iml_grid_hd, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax) + ax.imshow(iml_grid_hd, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax, interpolation='bilinear') ax.axis('off') # Save the figure