Compare commits
	
		
			12 Commits
		
	
	
		
			v2.66
			...
			50930e3233
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 50930e3233 | |||
| a5534212ba | |||
| d661cad991 | |||
| 3136c4985d | |||
| deb7005604 | |||
| fe9d886499 | |||
| f7eb39c43c | |||
| 00bd39a098 | |||
| 5a1f43d6cd | |||
| a1c0ae36bb | |||
| 63351ceb10 | |||
| 65759b86f1 | 
@@ -52,7 +52,6 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
 | 
				
			|||||||
    from math import ceil, floor, isnan
 | 
					    from math import ceil, floor, isnan
 | 
				
			||||||
    import numpy as np
 | 
					    import numpy as np
 | 
				
			||||||
    import dask
 | 
					    import dask
 | 
				
			||||||
    from dask.diagnostics import ProgressBar  # use Dask progress bar
 | 
					 | 
				
			||||||
    import kalepy as kale
 | 
					    import kalepy as kale
 | 
				
			||||||
    import utm
 | 
					    import utm
 | 
				
			||||||
    from skimage.transform import resize
 | 
					    from skimage.transform import resize
 | 
				
			||||||
@@ -69,6 +68,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
 | 
				
			|||||||
    from matplotlib.contour import ContourSet
 | 
					    from matplotlib.contour import ContourSet
 | 
				
			||||||
    import xml.etree.ElementTree as ET
 | 
					    import xml.etree.ElementTree as ET
 | 
				
			||||||
    import json
 | 
					    import json
 | 
				
			||||||
 | 
					    import multiprocessing as mp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    logger = getDefaultLogger('igfash')
 | 
					    logger = getDefaultLogger('igfash')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -88,9 +88,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
 | 
				
			|||||||
    else:
 | 
					    else:
 | 
				
			||||||
        logger.setLevel(logging.INFO)
 | 
					        logger.setLevel(logging.INFO)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # temporary hard-coded configuration
 | 
					    exclude_low_fxy = True # skip low probability areas of the map
 | 
				
			||||||
    # exclude_low_fxy = False
 | 
					 | 
				
			||||||
    exclude_low_fxy = True
 | 
					 | 
				
			||||||
    thresh_fxy = 1e-3  # minimum fxy value (location PDF) needed to do PGA estimation (to skip low probability areas); also should scale according to number of grid points
 | 
					    thresh_fxy = 1e-3  # minimum fxy value (location PDF) needed to do PGA estimation (to skip low probability areas); also should scale according to number of grid points
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # log user selections
 | 
					    # log user selections
 | 
				
			||||||
@@ -125,10 +123,6 @@ verbose: {verbose}")
 | 
				
			|||||||
            logger.info("No magnitude label of catalog specified, therefore try Mw by default")
 | 
					            logger.info("No magnitude label of catalog specified, therefore try Mw by default")
 | 
				
			||||||
            mag_label = 'Mw'
 | 
					            mag_label = 'Mw'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # if cat_label == None:
 | 
					 | 
				
			||||||
        #   print("No magnitude label of catalog specified, therefore try 'Catalog' by default")
 | 
					 | 
				
			||||||
        #   cat_label='Catalog'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        time, mag, lat, lon, depth = read_mat_cat(catalog_file, mag_label=mag_label, catalog_label='Catalog')
 | 
					        time, mag, lat, lon, depth = read_mat_cat(catalog_file, mag_label=mag_label, catalog_label='Catalog')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # check for null magnitude values
 | 
					        # check for null magnitude values
 | 
				
			||||||
@@ -258,7 +252,7 @@ verbose: {verbose}")
 | 
				
			|||||||
        # %% compute KDE and extract PDF
 | 
					        # %% compute KDE and extract PDF
 | 
				
			||||||
        start = timer()
 | 
					        start = timer()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if xy_win_method == "TW":
 | 
					        if xy_win_method:
 | 
				
			||||||
            logger.info("Time weighting function selected")
 | 
					            logger.info("Time weighting function selected")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            x_weights = np.linspace(0, 15, len(t_windowed))
 | 
					            x_weights = np.linspace(0, 15, len(t_windowed))
 | 
				
			||||||
@@ -319,7 +313,7 @@ verbose: {verbose}")
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
    # run activity rate modeling
 | 
					    # run activity rate modeling
 | 
				
			||||||
    lambdas = [None]
 | 
					    lambdas = [None]
 | 
				
			||||||
    if custom_rate != None and forecast_select:
 | 
					    if custom_rate != None and forecast_select and not rate_select:
 | 
				
			||||||
        logger.info(f"Using activity rate specified by user: {custom_rate} per {time_unit}")
 | 
					        logger.info(f"Using activity rate specified by user: {custom_rate} per {time_unit}")
 | 
				
			||||||
        lambdas = np.array([custom_rate], dtype='d')
 | 
					        lambdas = np.array([custom_rate], dtype='d')
 | 
				
			||||||
        lambdas_perc = np.array([1], dtype='d')
 | 
					        lambdas_perc = np.array([1], dtype='d')
 | 
				
			||||||
@@ -454,20 +448,23 @@ verbose: {verbose}")
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
        start = timer()
 | 
					        start = timer()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        use_pp = False
 | 
					        use_pp = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if use_pp:         # use dask parallel computing
 | 
					        if use_pp:         # use dask parallel computing
 | 
				
			||||||
            pbar = ProgressBar()
 | 
					            mp.set_start_method("fork", force=True)
 | 
				
			||||||
            pbar.register()
 | 
					 | 
				
			||||||
            # iter = range(0,len(distances))
 | 
					 | 
				
			||||||
            iter = indices
 | 
					            iter = indices
 | 
				
			||||||
            iml_grid_raw = []  # raw ground motion grids
 | 
					            iml_grid_raw = []  # raw ground motion grids
 | 
				
			||||||
            for imt in products:
 | 
					            for imt in products:
 | 
				
			||||||
                logger.info(f"Estimating {imt}")
 | 
					                logger.info(f"Estimating {imt}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if imt == "PGV":
 | 
				
			||||||
 | 
					                    IMT_max = 200 # search interval max for velocity (cm/s)
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    IMT_max = 2.0 # search interval max for acceleration (g)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                imls = [dask.delayed(compute_IMT_exceedance)(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas,
 | 
					                imls = [dask.delayed(compute_IMT_exceedance)(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas,
 | 
				
			||||||
                                                            forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model,
 | 
					                                                            forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model,
 | 
				
			||||||
                                                            log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=2.0, rx_label=i,
 | 
					                                                            log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=IMT_max, rx_label=i,
 | 
				
			||||||
                                                            rtol=0.1, use_cython=True) for i in iter]
 | 
					                                                            rtol=0.1, use_cython=True) for i in iter]
 | 
				
			||||||
                iml = dask.compute(*imls)
 | 
					                iml = dask.compute(*imls)
 | 
				
			||||||
                iml_grid_raw.append(list(iml))
 | 
					                iml_grid_raw.append(list(iml))
 | 
				
			||||||
@@ -476,11 +473,17 @@ verbose: {verbose}")
 | 
				
			|||||||
            iml_grid_raw = []
 | 
					            iml_grid_raw = []
 | 
				
			||||||
            iter = indices
 | 
					            iter = indices
 | 
				
			||||||
            for imt in products:
 | 
					            for imt in products:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if imt == "PGV":
 | 
				
			||||||
 | 
					                    IMT_max = 200 # search interval max for velocity (cm/s)
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    IMT_max = 2.0 # search interval max for acceleration (g)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                iml = []
 | 
					                iml = []
 | 
				
			||||||
                for i in iter:
 | 
					                for i in iter:
 | 
				
			||||||
                    iml_i = compute_IMT_exceedance(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas, forecast_len, 
 | 
					                    iml_i = compute_IMT_exceedance(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas, forecast_len, 
 | 
				
			||||||
                                                    lambdas_perc, m_range, m_pdf, m_cdf, model, imt=imt, IMT_min = 0.0,
 | 
					                                                    lambdas_perc, m_range, m_pdf, m_cdf, model, imt=imt, IMT_min = 0.0,
 | 
				
			||||||
                                                    IMT_max = 2.0, rx_label = i, rtol = 0.1, use_cython=True)
 | 
					                                                    IMT_max = IMT_max, rx_label = i, rtol = 0.1, use_cython=True)
 | 
				
			||||||
                    iml.append(iml_i)
 | 
					                    iml.append(iml_i)
 | 
				
			||||||
                    logger.info(f"Estimated {imt} at rx {i} is {iml_i}")
 | 
					                    logger.info(f"Estimated {imt} at rx {i} is {iml_i}")
 | 
				
			||||||
                iml_grid_raw.append(iml)
 | 
					                iml_grid_raw.append(iml)
 | 
				
			||||||
@@ -515,17 +518,20 @@ verbose: {verbose}")
 | 
				
			|||||||
                dtype=np.float64)  # this reduces values to 8 decimal places
 | 
					                dtype=np.float64)  # this reduces values to 8 decimal places
 | 
				
			||||||
            iml_grid_tmp = np.nan_to_num(iml_grid[j])  # change nans to zeroes
 | 
					            iml_grid_tmp = np.nan_to_num(iml_grid[j])  # change nans to zeroes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # upscale the grid
 | 
					            # upscale the grid, trim, and interpolate if there are at least 10 grid values with range greater than 0.1
 | 
				
			||||||
            up_factor = 4
 | 
					            if np.count_nonzero(iml_grid_tmp) >= 10 and vmax-vmin > 0.1:
 | 
				
			||||||
            iml_grid_hd = resize(iml_grid_tmp, (up_factor * len(iml_grid_tmp), up_factor * len(iml_grid_tmp)),
 | 
					                up_factor = 4
 | 
				
			||||||
                                 mode='reflect', anti_aliasing=False)
 | 
					                iml_grid_hd = resize(iml_grid_tmp, (up_factor * len(iml_grid_tmp), up_factor * len(iml_grid_tmp)),
 | 
				
			||||||
 | 
					                                    mode='reflect', anti_aliasing=False)
 | 
				
			||||||
 | 
					                trim_thresh = vmin
 | 
				
			||||||
 | 
					                iml_grid_hd[iml_grid_hd < trim_thresh] = np.nan
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                iml_grid_hd = iml_grid_tmp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            iml_grid_hd[iml_grid_hd == 0.0] = np.nan  # change zeroes back to nan
 | 
					            iml_grid_hd[iml_grid_hd == 0.0] = np.nan  # change zeroes back to nan
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # trim edges so the grid is not so blocky
 | 
					            #vmin_hd = min(x for x in iml_grid_hd.flatten() if not isnan(x))
 | 
				
			||||||
            vmin_hd = min(x for x in iml_grid_hd.flatten() if not isnan(x))
 | 
					 | 
				
			||||||
            vmax_hd = max(x for x in iml_grid_hd.flatten() if not isnan(x))
 | 
					            vmax_hd = max(x for x in iml_grid_hd.flatten() if not isnan(x))
 | 
				
			||||||
            trim_thresh = vmin
 | 
					 | 
				
			||||||
            iml_grid_hd[iml_grid_hd < trim_thresh] = np.nan
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # generate image overlay
 | 
					            # generate image overlay
 | 
				
			||||||
            north, south = lat.max(), lat.min()  # Latitude range
 | 
					            north, south = lat.max(), lat.min()  # Latitude range
 | 
				
			||||||
@@ -538,7 +544,7 @@ verbose: {verbose}")
 | 
				
			|||||||
            cmap_name = 'YlOrRd'
 | 
					            cmap_name = 'YlOrRd'
 | 
				
			||||||
            cmap = plt.get_cmap(cmap_name)
 | 
					            cmap = plt.get_cmap(cmap_name)
 | 
				
			||||||
            fig, ax = plt.subplots(figsize=(6, 6))
 | 
					            fig, ax = plt.subplots(figsize=(6, 6))
 | 
				
			||||||
            ax.imshow(iml_grid_hd, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax)
 | 
					            ax.imshow(iml_grid_hd, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax, interpolation='bilinear')
 | 
				
			||||||
            ax.axis('off')
 | 
					            ax.axis('off')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # Save the figure
 | 
					            # Save the figure
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user