Compare commits

...

42 Commits

Author SHA1 Message Date
9e75e0e768 Merge pull request 'upload Lasocki2013 ground motion model' (!20) from lasocki2013_gmm into master
Reviewed-on: official-apps/SeismicHazardForecasting#20
2025-07-11 11:28:58 +02:00
a73f17fc35 upload Lasocki2013 ground motion model 2025-07-10 11:05:36 +02:00
2fbca21917 Merge pull request 'enable cython' (!19) from ftong-patch-cython into master
Reviewed-on: official-apps/SeismicHazardForecasting#19
Reviewed-by: asia <asia@noreply.example.org>
2025-07-10 09:36:22 +02:00
910d933467 Merge branch 'master' into ftong-patch-cython 2025-07-10 08:55:13 +02:00
e06f4a5a05 edit error message for clarity 2025-07-09 15:23:42 +02:00
2906af6918 improve error message when user provides magnitude file incompatible with ground motion model 2025-07-09 15:09:44 +02:00
dd84829b6d define m_max when magnitude distribution estimation is not enabled; take from user's m_file 2025-07-09 13:27:44 +02:00
5b25b93090 convert custom activity rate from list to numpy array of type 'double' to satisfy cython input 2025-07-09 13:15:04 +02:00
9fd7de6124 Merge pull request 'Update src/seismic_hazard_forecasting.py' (!18) from ftong-patch-mc into master
Reviewed-on: official-apps/SeismicHazardForecasting#18
Reviewed-by: asia <asia@noreply.example.org>
2025-07-09 12:05:59 +02:00
d56aaeef39 Update src/seismic_hazard_forecasting.py 2025-07-09 11:25:55 +02:00
b4ef228a03 Update src/seismic_hazard_forecasting.py 2025-07-09 11:08:04 +02:00
fc2e8b53c7 Merge pull request 'July2025updates' (!17) from July2025updates into master
Reviewed-on: official-apps/SeismicHazardForecasting#17
Reviewed-by: asia <asia@noreply.example.org>
2025-07-08 14:49:31 +02:00
664ab1025b catch time_win_duration values that are too large 2025-07-07 16:42:57 +02:00
36378f4d6c Update src/seismic_hazard_forecasting.py 2025-07-07 16:36:40 +02:00
1244655a68 Update src/seismic_hazard_forecasting.py 2025-07-07 15:07:33 +02:00
70c08f47ab add info msg about m_max 2025-07-07 14:49:35 +02:00
60ae1c96cd raise exception for null magnitude values 2025-07-07 14:43:42 +02:00
1ea6c85ab2 Update src/seismic_hazard_forecasting.py 2025-07-07 13:45:17 +02:00
84440152eb Update src/seismic_hazard_forecasting.py 2025-07-07 13:40:02 +02:00
a941939493 remove processing of depth as it is not used 2025-07-07 13:25:05 +02:00
f4d2cfc3cd add check that m_max is not too large for the ground motion model 2025-07-02 15:43:05 +02:00
bd1ad26693 change default m_max from 3 to 1 mag unit above max mag in catalog 2025-07-02 15:33:42 +02:00
bc4d57a5ab catch null result from ground motion forecasting 2025-07-02 14:27:33 +02:00
921de3f423 fix - activity rate figure no longer cut off at the bottom 2025-07-01 16:24:59 +02:00
b287715f44 use distances in km instead of m 2025-07-01 16:09:24 +02:00
eb14b7d235 update package version logging, remove cpu tracking 2025-07-01 16:06:11 +02:00
3ffcf2c4db Added licence 2025-06-23 14:51:08 +02:00
6f3d95974e Merge pull request 'fixes' (#13) from fixes into master
Reviewed-on: official-apps/SeismicHazardForecasting#13
Reviewed-by: mieszkomakuch <mieszkomakuch@noreply.example.org>
2025-06-09 12:36:33 +02:00
4adfbb6a54 add 'g' as unit of colorbar for PGA and SA 2025-06-06 17:09:48 +02:00
bd468927f3 Update src/seismic_hazard_forecasting.py 2025-06-06 16:40:46 +02:00
bda00e225c Update src/seismic_hazard_forecasting.py 2025-06-06 14:08:24 +02:00
a0f29de47f Merge pull request 'ISEPOS-2373 Add coordinates to SVG overlay files' (#10) from ISEPOS-2373-add-coordinates-for-svg-overlay into master
Reviewed-on: official-apps/SeismicHazardForecasting#10
Reviewed-by: ftong <ftong@noreply.example.org>
2025-06-05 14:26:35 +02:00
86fb03c792 Merge branch 'master' into ISEPOS-2373-add-coordinates-for-svg-overlay 2025-06-05 14:09:06 +02:00
846078352b revert f1472bf250
revert add non-parallel processing and set as default; fix colourbar
2025-06-04 17:49:54 +02:00
f1472bf250 add non-parallel processing and set as default; fix colourbar 2025-06-04 17:15:19 +02:00
7a39d5a07e Update src/seismic_hazard_forecasting.py 2025-05-23 16:43:15 +02:00
9c58664770 ISEPOS-2373 Simplify code 2025-05-09 10:30:00 +02:00
17cbcc8e79 ISEPOS-2373 Simplify code 2025-05-09 10:26:27 +02:00
cce0cd258d ISEPOS-2373 Also store the bounding box as a data attribute for quick access 2025-05-07 12:59:21 +02:00
c20f7c06a7 ISEPOS-2373 Fix: json not defined 2025-05-07 12:54:42 +02:00
22fc9f7c07 ISEPOS-2373 Fix: name 'svg_path' is not defined 2025-05-07 12:52:06 +02:00
8f1ab5518a ISEPOS-2373 Add saving geographical coordinates to SVG files as metadata 2025-05-07 12:35:14 +02:00
3 changed files with 273 additions and 60 deletions

13
LICENCE.txt Normal file
View File

@ -0,0 +1,13 @@
Copyright 2025 Institute of Geophysics, Polish Academy of Sciences
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

142
gsim/lasocki_2013.py Normal file
View File

@ -0,0 +1,142 @@
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2013-2023 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports :class:`Lasocki2013`.
"""
import numpy as np
from scipy.constants import g
from openquake.hazardlib.gsim.base import GMPE, CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, PGV, SA
def get_magnitude_energy(mag):
"""
Converts magnitude to energy-based magnitude term.
"""
return 1.15 + 1.96 * mag
def get_distance_term(coeffs, repi):
"""
Computes the distance term using the given GMPE coefficients.
"""
R_h = np.sqrt(repi ** 2 + coeffs["c7"] ** 2)
return np.log10(R_h)
def get_standard_deviation(coeffs, coeffs_cov, magE, repi):
"""
Computes the standard deviation term.
"""
Cb = np.array(list(coeffs_cov)).reshape(3,3)
R_h = np.sqrt(repi ** 2 + coeffs["c7"] ** 2)
X0 = np.array([1, magE[0], np.log10(R_h[0]**2)])
variance_term = np.sqrt(X0 @ Cb @ X0 + coeffs["sigma"]**2)
return variance_term
class Lasocki2013(GMPE):
"""
Implement equation developed by Lasocki in "REPORT ON THE ATTENUATION
RELATIONS OF PEAK GROUND ACCELERATION AND SPECTRAL ORDINATES OF GROUND
MOTION FOR MINING-INDUCED SEISMIC EVENTS IN THE REGION OF THE ŻELAZNY
MOST REPOSITORY", 2009.
Equation coefficients provided for the random horizontal component
"""
#: Supported tectonic region type is induced, given
#: that the equations have been derived for the LGCD mining area
DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.INDUCED
#: Supported intensity measure types are spectral acceleration,
#: and peak ground acceleration
DEFINED_FOR_INTENSITY_MEASURE_TYPES = {PGA, SA}
#: Supported intensity measure component is random horizontal
#: :attr:`~openquake.hazardlib.const.IMC.RANDOM_HORIZONTAL`,
DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.RANDOM_HORIZONTAL
# DEFINED_FOR_INTENSITY_MEASURE_COMPONENT = const.IMC.VERTICAL
#: Supported standard deviation type is total
DEFINED_FOR_STANDARD_DEVIATION_TYPES = {const.StdDev.TOTAL}
#: site params are not required
REQUIRES_SITES_PARAMETERS = set()
#: Required rupture parameter is magnitude
REQUIRES_RUPTURE_PARAMETERS = {'mag'}
#: Required distance measure is epicentral distance
#: see paragraph 'Predictor Variables', page 6.
REQUIRES_DISTANCES = {'repi'}
def compute(self, ctx: np.recarray, imts, mean, sig, tau, phi):
"""
Computes mean ground motion values and standard deviations for Lasocki (2013) GMPE.
Parameters:
repi (float): Epicentral distance (m)
mag (float): Earthquake magnitude
imts (list of str): List of intensity measure types (e.g., 'PHA', 'PVA')
mean (np.array): Array to store computed mean values
sig (np.array): Array to store computed standard deviations
"""
# Loop through each IMT and compute values
for i, imt in enumerate(imts):
C = self.COEFFS[imt]
C_cov = self.COEFFS_COV[imt]
mag = ctx.mag
repi = ctx.repi*1000.0 # Convert distance from km to m
# Compute magnitude energy term
magE = get_magnitude_energy(mag)
# Compute GMPE terms
mag_term = C['c1'] + C['c2'] * magE
dist_term = C['c5'] * get_distance_term(C, repi)
# Compute mean ground motion
imean = mag_term + dist_term
mean_value = np.log((10 ** imean) / g) # Convert to natural log scale and divide by g
mean[i] = mean_value
# Compute standard deviation
sigma = get_standard_deviation(C, C_cov, magE, repi)
sig[i] = sigma
#: coefficient table provided by report
COEFFS = CoeffsTable(sa_damping=5, table="""\
IMT c1 c2 c5 c7 sigma N
pga 1.25 0.31 -1.34 558 0.196 1196
0.6 -3.86 0.55 -0.65 183 0.242 1194
1.0 -3.94 0.62 -0.66 308 0.262 1197
2.0 -1.14 0.47 -1.02 741 0.235 1195
5.0 0.99 0.31 -1.15 690 0.234 1206
10.0 3.06 0.27 -1.65 906 0.203 1192
20.0 2.62 0.27 -1.60 435 0.196 1191
50.0 2.09 0.27 -1.48 375 0.204 1191
""")
COEFFS_COV = CoeffsTable(sa_damping=5, table="""\
IMT Cb00 Cb01 Cb02 Cb10 Cb11 Cb12 Cb20 Cb21 Cb22
pga 0.005586 -0.000376 -0.000752 -0.000376 0.000103 -0.000111 -0.000752 -0.000111 0.000440
0.6 0.007509 -0.000662 -0.000688 -0.000662 0.000161 -0.000154 -0.000688 -0.000154 0.000516
1.0 0.009119 -0.00075 -0.000948 -0.00075 0.000189 -0.000187 -0.000948 -0.000187 0.000657
2.0 0.008563 -0.000514 -0.001282 -0.000514 0.000147 -0.000164 -0.001282 -0.000164 0.000696
5.0 0.008283 -0.000516 -0.001202 -0.000516 0.000145 -0.000161 -0.001202 -0.000161 0.000668
10.0 0.006904 -0.00036 -0.001145 -0.00036 0.000108 -0.000126 -0.001145 -0.000126 0.000578
20.0 0.005389 -0.000396 -0.000658 -0.000396 0.000104 -0.000107 -0.000658 -0.000107 0.000408
50.0 0.005874 -0.000449 -0.000678 -0.000449 0.000114 -0.000114 -0.000678 -0.000114 0.000428
""")

View File

@ -17,7 +17,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
then the program looks for a label of 'Mw' for magnitude in the catalog
mc: The magnitude of completeness (Mc) of the catalog
m_max:M_max. The magnitude distribution is estimated for the range from Mc to M_max. If no value is provided,
then the program sets M_max to be 3 magnitude units above the maximum magnitude value in the catalog.
then the program sets M_max to be 1 magnitude units above the maximum magnitude value in the catalog.
m_kde_method: The kernel density estimator to use.
xy_select: If True, perform an estimation of the magnitude distribution using KDE with the chosen KDE method
grid_dim: The grid cell size (in metres) of the final ground motion product map. A smaller cell size will
@ -45,22 +45,19 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
"""
import sys
from importlib.metadata import version
import logging
from base_logger import getDefaultLogger
from timeit import default_timer as timer
from math import ceil, floor
from math import ceil, floor, isnan
import numpy as np
import scipy
import obspy
import dask
from dask.diagnostics import ProgressBar # use Dask progress bar
import kalepy as kale
import utm
from skimage.transform import resize
import psutil
import openquake.engine
import igfash
from igfash.io import read_mat_cat, read_mat_m, read_mat_pdf, read_csv
from igfash.io import read_mat_cat, read_mat_m, read_mat_mc, read_mat_pdf, read_csv
from igfash.window import win_CTL, win_CNE
import igfash.kde as kde
from igfash.gm import compute_IMT_exceedance
@ -70,6 +67,8 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
from matplotlib.contour import ContourSet
import xml.etree.ElementTree as ET
import json
logger = getDefaultLogger('igfash')
@ -82,6 +81,7 @@ def main(catalog_file, mc_file, pdf_file, m_file, m_select, mag_label, mc, m_max
m_range = [None]
else:
m_range = read_mat_m(m_file)
m_max = m_range[-1] # take m_max from the m_file
if verbose:
logger.setLevel(logging.DEBUG)
@ -103,25 +103,13 @@ verbose: {verbose}")
# print key package version numbers
logger.debug(f"Python version {sys.version}")
logger.debug(f"Numpy version {np.__version__}")
logger.debug(f"Scipy version {scipy.__version__}")
logger.debug(f"Obspy version {obspy.__version__}")
logger.debug(f"Openquake version {openquake.engine.__version__}")
logger.debug(f"Igfash version {igfash.__version__}")
# print number of cpu cores available
ncpu = psutil.cpu_count(logical=False)
logger.debug(f"Number of cpu cores available: {ncpu}")
for process in psutil.process_iter():
with process.oneshot():
# cpu = process.cpu_percent()
cpu = process.cpu_percent() / ncpu
if cpu > 1:
logger.debug(f"{process.name()}, {cpu}")
logger.debug(f"BASELINE CPU LOAD% {psutil.cpu_percent(interval=None, percpu=True)}")
logger.debug(f"Numpy version {version('numpy')}")
logger.debug(f"Scipy version {version('scipy')}")
logger.debug(f"Obspy version {version('obspy')}")
logger.debug(f"Openquake version {version('openquake.engine')}")
logger.debug(f"Igfash version {version('igfash')}")
logger.debug(f"Rbeast version {version('rbeast')}")
logger.debug(f"Dask version {version('dask')}")
dask.config.set(scheduler='processes')
@ -143,12 +131,18 @@ verbose: {verbose}")
time, mag, lat, lon, depth = read_mat_cat(catalog_file, mag_label=mag_label, catalog_label='Catalog')
# check for null magnitude values
m_null_idx = np.where(np.isnan(mag))[0]
if len(m_null_idx) > 0:
msg = f"There are null values in the magnitude column of the catalog at indices {m_null_idx}"
logger.error(msg)
raise Exception(msg)
if mc != None:
logger.info("Mc value provided by user")
trim_to_mc = True
elif mc_file != None:
logger.info("Mc estimation output file provided; selecting largest Mc from the list")
mc = read_mc(mc_file)
mc = read_mat_mc(mc_file)
trim_to_mc = True
else:
logger.info("No Mc provided; using all magnitudes from the catalog")
@ -164,9 +158,10 @@ verbose: {verbose}")
lat = np.delete(lat, indices)
lon = np.delete(lon, indices)
# if user does not provide a m_max, set m_max to 3 magnitude units above max magnitude in catalog
# if user does not provide a m_max, set m_max to 1 magnitude unit above max magnitude in catalog
if m_max == None:
m_max = mag.max() + 3.0
m_max = mag.max() + 1.0
logger.info(f"No m_max was given. Therefore m_max is automatically set to: {m_max}")
start = timer()
@ -231,14 +226,15 @@ verbose: {verbose}")
y_min = y.min()
x_max = x.max()
y_max = y.max()
z_min = depth.min()
z_max = depth.max()
grid_x_max = int(ceil(x_max / grid_dim) * grid_dim)
grid_x_min = int(floor(x_min / grid_dim) * grid_dim)
grid_y_max = int(ceil(y_max / grid_dim) * grid_dim)
grid_y_min = int(floor(y_min / grid_dim) * grid_dim)
grid_lat_max, grid_lon_max = utm.to_latlon(grid_x_max, grid_y_max, utm_zone_number, utm_zone_letter)
grid_lat_min, grid_lon_min = utm.to_latlon(grid_x_min, grid_y_min, utm_zone_number, utm_zone_letter)
# rectangular grid
nx = int((grid_x_max - grid_x_min) / grid_dim) + 1
ny = int((grid_y_max - grid_y_min) / grid_dim) + 1
@ -325,8 +321,8 @@ verbose: {verbose}")
lambdas = [None]
if custom_rate != None and forecast_select:
logger.info(f"Using activity rate specified by user: {custom_rate} per {time_unit}")
lambdas = [custom_rate]
lambdas_perc = [1]
lambdas = np.array([custom_rate], dtype='d')
lambdas_perc = np.array([1], dtype='d')
elif rate_select:
logger.info(f"Activity rate modeling selected")
@ -344,6 +340,12 @@ verbose: {verbose}")
elif time_unit == 'years':
multiplicator = 1 / 365
# Raise an exception when time_win_duration from the user is too large relative to the catalog
if time_win_duration/multiplicator > 0.5*(time[-1] - time[0]):
msg = "Activity rate estimation time window must be less than half the catalog length. Use a shorter time window."
logger.error(msg)
raise Exception(msg)
# Selects dates in datenum format and procceeds to forecast value
start_date = datenum_data[-1] - (2 * time_win_duration / multiplicator)
dates_calc = [date for date in datenum_data if start_date <= date <= datenum_data[-1]]
@ -360,7 +362,7 @@ verbose: {verbose}")
act_rate, bin_counts, bin_edges, out, pprs, rt, idx, u_e = calc_bins(np.array(datenum_data), time_unit,
time_win_duration, dates_calc,
rate_forecast, rate_unc_high, rate_unc_low,
multiplicator, quiet=True)
multiplicator, quiet=True, figsize=(14,9))
# Assign probabilities
lambdas, lambdas_perc = lambda_probs(act_rate, dates_calc, bin_edges)
@ -372,18 +374,29 @@ verbose: {verbose}")
if forecast_select:
products = products_string.split()
logger.info(
f"Ground motion forecasting selected with ground motion model {model} and IMT products {products_string}")
logger.info(f"Ground motion forecasting selected with ground motion model {model} and IMT products {products_string}")
# validate m_max against the grond motion model
models_anthro_limited = ['Lasocki2013', 'Atkinson2015', 'ConvertitoEtAl2012Geysers'] # these models require that m_max<=4.5
if m_max > 4.5 and model in models_anthro_limited:
if m_file is None:
msg = f"The selected ground motion model {model} is only valid for magnitudes up to 4.5. Please select a lower maximum magnitude."
else:
msg = f"The selected ground motion model {model} is only valid for magnitudes up to 4.5, but the provided magnitude file includes values up to {m_max}. Please adjust the magnitude range in the file accordingly."
logger.error(msg)
raise Exception(msg)
if not xy_select:
msg = "Event location distribution modeling was not selected; cannot continue..."
logger.error(msg)
raise Exception(msg)
elif m_pdf[0] == None:
if m_pdf[0] == None:
msg = "Magnitude distribution modeling was not selected and magnitude PDF file was not provided; cannot continue..."
logger.error(msg)
raise Exception(msg)
elif lambdas[0] == None:
if lambdas[0] == None:
msg = "Activity rate modeling was not selected and custom activity rate was not provided; cannot continue..."
logger.error(msg)
raise Exception(msg)
@ -421,7 +434,10 @@ verbose: {verbose}")
rx_lat[i], rx_lon[i] = utm.to_latlon(x_rx[i], y_rx[i], utm_zone_number,
utm_zone_letter) # get receiver location as lat,lon
# experimental - compute ground motion only at grid points that have minimum probability density of thresh_fxy
# convert distances from m to km because openquake ground motion models take input distances in kilometres
distances = distances/1000.0
# compute ground motion only at grid points that have minimum probability density of thresh_fxy
if exclude_low_fxy:
indices = list(np.where(fxy.flatten() > thresh_fxy)[0])
else:
@ -434,25 +450,47 @@ verbose: {verbose}")
PGA = np.zeros(shape=(nx * ny))
# use dask parallel computing
start = timer()
pbar = ProgressBar()
pbar.register()
# iter = range(0,len(distances))
iter = indices
iml_grid_raw = [] # raw ground motion grids
for imt in products:
logger.info(f"Estimating {imt}")
imls = [dask.delayed(compute_IMT_exceedance)(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas,
forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model,
log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=2.0,
rx_label=i) for i in iter]
iml = dask.compute(*imls)
iml_grid_raw.append(list(iml))
use_pp = False
if use_pp: # use dask parallel computing
pbar = ProgressBar()
pbar.register()
# iter = range(0,len(distances))
iter = indices
iml_grid_raw = [] # raw ground motion grids
for imt in products:
logger.info(f"Estimating {imt}")
imls = [dask.delayed(compute_IMT_exceedance)(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas,
forecast_len, lambdas_perc, m_range, m_pdf, m_cdf, model,
log_level=logging.DEBUG, imt=imt, IMT_min=0.0, IMT_max=2.0, rx_label=i,
rtol=0.1, use_cython=True) for i in iter]
iml = dask.compute(*imls)
iml_grid_raw.append(list(iml))
else:
iml_grid_raw = []
iter = indices
for imt in products:
iml = []
for i in iter:
iml_i = compute_IMT_exceedance(rx_lat[i], rx_lon[i], distances[i].flatten(), fr, p, lambdas, forecast_len,
lambdas_perc, m_range, m_pdf, m_cdf, model, imt=imt, IMT_min = 0.0,
IMT_max = 2.0, rx_label = i, rtol = 0.1, use_cython=True)
iml.append(iml_i)
logger.info(f"Estimated {imt} at rx {i} is {iml_i}")
iml_grid_raw.append(iml)
end = timer()
logger.info(f"Ground motion exceedance computation time: {round(end - start, 1)} seconds")
if np.isnan(iml_grid_raw).all():
msg = "No valid ground motion intensity measures were forecasted. Try a different ground motion model."
logger.error(msg)
raise Exception(msg)
# create list of one empty list for each imt
iml_grid = [[] for _ in range(len(products))] # final ground motion grids
iml_grid_prep = iml_grid.copy() # temp ground motion grids
@ -481,6 +519,12 @@ verbose: {verbose}")
mode='reflect', anti_aliasing=False)
iml_grid_hd[iml_grid_hd == 0.0] = np.nan # change zeroes back to nan
# trim edges so the grid is not so blocky
vmin_hd = min(x for x in iml_grid_hd.flatten() if not isnan(x))
vmax_hd = max(x for x in iml_grid_hd.flatten() if not isnan(x))
trim_thresh = vmin
iml_grid_hd[iml_grid_hd < trim_thresh] = np.nan
# generate image overlay
north, south = lat.max(), lat.min() # Latitude range
east, west = lon.max(), lon.min() # Longitude range
@ -489,17 +533,27 @@ verbose: {verbose}")
map_center = [np.mean([north, south]), np.mean([east, west])]
# Create an image from the grid
cmap_name = 'YlOrRd'
cmap = plt.get_cmap(cmap_name)
fig, ax = plt.subplots(figsize=(6, 6))
ax.imshow(iml_grid_hd, origin='lower', cmap='viridis')
ax.imshow(iml_grid_hd, origin='lower', cmap=cmap, vmin=vmin, vmax=vmax)
ax.axis('off')
# Save the figure
fig.canvas.draw()
plt.savefig("overlay_" + str(j) + ".svg", bbox_inches="tight", pad_inches=0, transparent=True)
overlay_filename = f"overlay_{j}.svg"
plt.savefig(overlay_filename, bbox_inches="tight", pad_inches=0, transparent=True)
plt.close(fig)
# Embed geographic bounding box into the SVG
map_bounds = dict(zip(("south", "west", "north", "east"),
map(float, (grid_lat_min, grid_lon_min, grid_lat_max, grid_lon_max))))
tree = ET.parse(overlay_filename)
tree.getroot().set("data-map-bounds", json.dumps(map_bounds))
tree.write(overlay_filename, encoding="utf-8", xml_declaration=True)
logger.info(f"Saved geographic bounds to SVG metadata (data-map-bounds): {overlay_filename} → {map_bounds}")
# Make the color bar
cmap_name = 'viridis'
width = 50
height = 500
@ -507,16 +561,20 @@ verbose: {verbose}")
gradient = np.vstack((gradient, gradient)).T
gradient = np.tile(gradient, (1, width))
colorbar_title = products[j]
if "PGA" in colorbar_title or "SA" in colorbar_title:
colorbar_title = colorbar_title + " (g)"
fig, ax = plt.subplots(figsize=((width + 40) / 100.0, (height + 20) / 100.0),
dpi=100) # Increase fig size for labels
ax.imshow(gradient, aspect='auto', cmap=plt.get_cmap(cmap_name),
extent=[0, 1, vmin, vmax]) # Note: extent order is different for vertical
ax.imshow(gradient, aspect='auto', cmap=cmap.reversed(),
extent=[0, 1, vmin, vmax_hd]) # Note: extent order is different for vertical
ax.set_xticks([]) # Remove x-ticks for vertical colorbar
num_ticks = 11 # Show more ticks
tick_positions = np.linspace(vmin, vmax, num_ticks)
tick_positions = np.linspace(vmin, vmax_hd, num_ticks)
ax.set_yticks(tick_positions)
ax.set_yticklabels([f"{tick:.2f}" for tick in tick_positions]) # format tick labels
ax.set_title(imt, pad=15)
ax.set_title(colorbar_title, loc='right', pad=15)
fig.subplots_adjust(left=0.25, right=0.75, bottom=0.05, top=0.95) # Adjust Layout
fig.savefig("colorbar_" + str(j) + ".svg", bbox_inches='tight')
plt.close(fig)