initial commit

This commit is contained in:
Joanna Kocot Test 2025-06-02 15:13:13 +02:00
commit 5a17249639
6 changed files with 367 additions and 0 deletions

9
src/data_gen_01d.py Normal file
View File

@ -0,0 +1,9 @@
import numpy as np
def data_gen(D, x, y, elevations, picks):
min_pick = np.min(picks)
x = np.array(x)
y = np.array(y)
elevations = np.array(elevations)
picks = np.array(picks)
data = np.column_stack((np.arange(1, len(x) + 1), x, y, elevations, picks - min_pick))
np.savetxt(D, data, delimiter='\t', fmt='%d\t%.6f\t%.6f\t%.6f\t%.6f')

203
src/trmloc_input_builder.py Normal file
View File

@ -0,0 +1,203 @@
import utm
from obspy import read_events, read_inventory
from data_gen_01d import data_gen
from vmodel_gen_01d import vmodel_gen
from trmloc_scenario import trmloc_scenario
BOUNDARY_EXTENSION = 0.005 # ~500m extension for min/max coordinates
LOG_FILE_NAME = "log.txt"
OUT_FILE_NAME = "out.txt"
RES_FILE_NAME = "res.txt"
XCP_FILE_NAME = "xcp.txt"
INPUT_PAR_FILE_NAME = "inputPar.txt"
MAX_LONGITUDE_IN_ZONE = 6 - 1e-9
MIN_LONGITUDE_IN_ZONE = 0
def main(network_inventory_path, picks_path, velocity_model_file_path, latitude, longitude, depth, event_name, fvi,
fdi, fpo, err, cme, cmz, ia, ip, cp, omp, fi, fh, cp_min, cp_max, cpi, cpf, ftm, fit, fsc, fec, m, e, xstep,
ystep, z1, z2, zstep, log_file_name=LOG_FILE_NAME, out_file_name=OUT_FILE_NAME, res_file_name=RES_FILE_NAME,
xcp_file_name=XCP_FILE_NAME, input_par_file_name=INPUT_PAR_FILE_NAME
):
inventory = read_inventory(network_inventory_path)
picks = read_events(picks_path).events[0].picks
waveforms_with_p_points = get_waveforms_with_points([pick for pick in picks if pick.phase_hint[0] == "P"])
pickedSeismogramsNo = len(waveforms_with_p_points.keys())
if pickedSeismogramsNo < 3:
raise ValueError(
f"Picks have to be present on at least 3 seismograms. Found picks only for {pickedSeismogramsNo}.")
data_file_name = prepare_data_file(
waveforms_with_p_points, inventory, latitude, longitude
)
vmodel_file_name = prepare_vmodel_file(
inventory, velocity_model_file_path, latitude, longitude, xstep, ystep, z1, z2, zstep
)
params_file_name = prepare_params_file( data_file_name, vmodel_file_name, latitude, longitude, depth, event_name,
fvi, fdi, fpo, err, cme, cmz, ia, ip, cp, omp, fi, fh, cp_min, cp_max, cpi, cpf, ftm, fit, fsc, fec, m, e,
log_file_name, out_file_name, res_file_name, xcp_file_name, input_par_file_name
)
return params_file_name
def prepare_data_file(seismograms_with_points, inventory, latitude, longitude):
utm_zone = utm.from_latlon(latitude, longitude)[2:4]
station_data = {"station_x": [], "station_y": [], "station_elevations": [], "pick_times": []}
for points in seismograms_with_points.values():
channel = find_channel(points[0].waveform_id, inventory)
if channel is None:
raise ValueError(f"Channel {points[0].waveform_id} not found")
station_utm_coords = convert_to_utm(channel.latitude, channel.longitude, utm_zone[0], utm_zone[1])
if len(points) != 1:
raise ValueError(
f"Seismogram {points[0].waveform_id} contains more than one picked point."
" This would cause the application to produce inconsistent results"
)
for point in points:
station_data["station_x"].append(station_utm_coords[0])
station_data["station_y"].append(station_utm_coords[1])
station_data["station_elevations"].append(-channel.elevation)
station_data["pick_times"].append(point.time)
data_file_path = "data.txt"
data_gen(
data_file_path,
station_data["station_x"],
station_data["station_y"],
station_data["station_elevations"],
station_data["pick_times"],
)
return data_file_path
def prepare_vmodel_file(inventory, velocity_model_file_path, latitude, longitude, xstep, ystep, z1, z2, zstep):
utm_zone = utm.from_latlon(latitude, longitude)[2:4]
min_coords, max_coords = get_min_max_coordinates(inventory)
min_utm = convert_to_utm(min_coords["latitude"], min_coords["longitude"], utm_zone[0], utm_zone[1])
max_utm = convert_to_utm(max_coords["latitude"], max_coords["longitude"], utm_zone[0], utm_zone[1])
vmodel_file_path = "VeloMod.txt"
vmodel_gen(
vmodel_file_path, "Velocity model", 0, # Struc 0 or 1 [default set to 0. 1 is not supported yet]
round(min_utm[0]), round(max_utm[0]), xstep,
round(min_utm[1]), round(max_utm[1]), ystep,
z1, z2, zstep, velocity_model_file_path
)
return vmodel_file_path
def prepare_params_file( data_file_name, vmodel_file_name, latitude, longitude, depth, event_name,
fvi, fdi, fpo, err, cme, cmz, ia, ip, cp, omp, fi, fh, cp_min, cp_max, cpi, cpf, ftm, fit, fsc, fec, m, e,
log_file_name, out_file_name, res_file_name, xcp_file_name, input_par_file_name
):
utm_zone = utm.from_latlon(latitude, longitude)[2:4]
event_utm = convert_to_utm(latitude, longitude, utm_zone[0], utm_zone[1])
trmloc_scenario(
input_par_file_name,
data_file_name,
vmodel_file_name,
"", # cp list file inactive
log_file_name, out_file_name, res_file_name, xcp_file_name,
event_name, fvi, fdi, fpo, err, cme, cmz, ia, event_utm[0], event_utm[1], depth * 1000, ip,
cp, omp, fi, fh, cp_min, cp_max, cpi, cpf, ftm, fit, fsc, fec, m, e
)
return input_par_file_name
def get_min_max_coordinates(inventory):
channels = get_all_channels(inventory)
latitudes = [channel.latitude for channel in channels]
longitudes = [channel.longitude for channel in channels]
# Extend boundaries by 0.005 degrees (~500 meters) in any direction
min_coords = {"latitude": min(latitudes) - BOUNDARY_EXTENSION, "longitude": min(longitudes) - BOUNDARY_EXTENSION}
max_coords = {"latitude": max(latitudes) + BOUNDARY_EXTENSION, "longitude": max(longitudes) + BOUNDARY_EXTENSION}
return min_coords, max_coords
def get_waveforms_with_points(picks):
stack = []
for pick in picks:
stack.append(pick)
grouped_picks = {}
while stack:
pick = stack.pop()
if pick.phase_hint.endswith("e"):
stack.pop()
else:
waveform_id_string = stringify_waveform_id(pick.waveform_id)
if waveform_id_string not in grouped_picks:
grouped_picks[waveform_id_string] = []
grouped_picks[waveform_id_string].append(pick)
return grouped_picks
def find_channel(waveform_id, networks):
return next(
(
channel
for network in networks if network.code == waveform_id.network_code
for station in network.stations if station.code == waveform_id.station_code
for channel in station.channels
if channel.location_code == waveform_id.location_code
and channel.code == waveform_id.channel_code
),
None
)
def get_all_channels(networks):
return [channel for network in networks for station in network.stations for channel in station.channels]
def convert_to_utm(latitude, longitude, target_longitude_zone, target_latitude_zone):
easting, northing, zone_number, zone_letter = utm.from_latlon(latitude, longitude)
if (zone_letter >= 'N' and target_latitude_zone < 'N') or (zone_letter < 'N' and target_latitude_zone >= 'N'):
raise ValueError(
f"Points must be on the same hemisphere (N/S). Offending point latitude:{latitude}, longitude: {longitude}"
)
POLAR_ZONE_CODES = {'A', 'B', 'Y', 'Z'}
if zone_letter in POLAR_ZONE_CODES:
raise ValueError(
f"Polar regions not supported. Offending point latitude:{latitude}, longitude: {longitude}"
)
if zone_number != target_longitude_zone:
if abs(target_longitude_zone - zone_number) > 2:
raise ValueError(
f"Point latitude:{latitude}, longitude: {longitude} is too far from requested UTM zone {target_longitude_zone}"
)
# Recalculate to the requested UTM zone if necessary
easting, northing, _, _ = recalculate_to_zone(
[easting, northing, zone_number, zone_letter], target_longitude_zone, latitude
)
return [easting, northing]
def recalculate_to_zone(utm_coords, target_longitude_zone, latitude):
zone_difference = get_longitude_zone_difference(utm_coords[2], target_longitude_zone)
min_easting, _, _, _ = utm.from_latlon(latitude, MIN_LONGITUDE_IN_ZONE)
max_easting, _, _, _ = utm.from_latlon(latitude, MAX_LONGITUDE_IN_ZONE)
shift = zone_difference * (max_easting - min_easting)
adjusted_easting = utm_coords[0] + shift
return [adjusted_easting, utm_coords[1], utm_coords[2], utm_coords[3]]
def get_longitude_zone_difference(longitude_zone1, longitude_zone2):
difference = longitude_zone1 - longitude_zone2
if difference < -30:
return -60 - difference
if difference <= 30:
return difference
return 60 - difference
def stringify_waveform_id(waveform_id):
return f"{waveform_id.network_code}{waveform_id.station_code}{waveform_id.location_code}{waveform_id.channel_code}"

View File

@ -0,0 +1,71 @@
import sys
import argparse
from trmloc_input_builder import main as trmloc_input_builder
def main(argv):
def str2bool(v):
if v.lower() in ("True", "TRUE", "yes", "true", "t", "y", "1"):
return True
elif v.lower() in ("False", "FALSE", "no", "false", "f", "n", "0"):
return False
else:
raise argparse.ArgumentTypeError("Boolean value expected.")
def parse_int_with_nan(value):
try:
return int(value)
except ValueError:
return float('nan')
def parse_float_with_nan(value):
try:
return float(value)
except ValueError:
return float('nan')
parser = argparse.ArgumentParser()
parser.add_argument("network_inventory_path", help="Path to input file of type inventory")
parser.add_argument("picks_path", help="Path to the picks file")
parser.add_argument("--velocity_model_file_path", help="Path to the velocity model file", type=str, required=True)
parser.add_argument("--latitude", help="Event latitude", type=parse_float_with_nan, required=True)
parser.add_argument("--longitude", help="Event longitude", type=parse_float_with_nan, required=True)
parser.add_argument("--depth", help="Event depth in kilometers", type=parse_float_with_nan, required=True)
parser.add_argument("--event_name", help="Name of the event", type=str, required=True)
parser.add_argument("--fvi", help="FVI parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--fdi", help="FDI parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--fpo", help="FPO parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--err", help="Error parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--cme", help="CME parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--cmz", help="CMZ parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--ia", help="IA parameter", type=str, required=True)
parser.add_argument("--ip", help="IP parameter", type=str, required=True)
parser.add_argument("--cp", help="CP parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--omp", help="OMP parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--fi", help="FI parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--fh", help="FH parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--cp_min", help="CP_MIN parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--cp_max", help="CP_MAX parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--cpi", help="CPI parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--cpf", help="CPF parameter", type=parse_int_with_nan, required=False)
parser.add_argument("--ftm", help="FTM parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--fit", help="FIT parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--fsc", help="FSC parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--fec", help="FEC parameter", type=parse_float_with_nan, required=True)
parser.add_argument("--m", help="M parameter", type=str2bool, required=True)
parser.add_argument("--e", help="E parameter", type=str2bool, required=True)
parser.add_argument("--xstep", help="X step size", type=parse_int_with_nan, required=True)
parser.add_argument("--ystep", help="Y step size", type=parse_int_with_nan, required=True)
parser.add_argument("--z1", help="Z1 parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--z2", help="Z2 parameter", type=parse_int_with_nan, required=True)
parser.add_argument("--zstep", help="Z step size", type=parse_int_with_nan, required=True)
parser.add_argument("--log_file_name", help="log file name", type=str, required=True)
parser.add_argument("--out_file_name", help="out file name", type=str, required=True)
parser.add_argument("--res_file_name", help="res file name", type=str, required=True)
parser.add_argument("--xcp_file_name", help="xcp file name", type=str, required=True)
parser.add_argument("--input_par_file_name", help="input params file name", type=str, required=True)
args = parser.parse_args()
trmloc_input_builder(**vars(args))
return
if __name__ == '__main__':
main(sys.argv)

20
src/trmloc_scenario.py Normal file
View File

@ -0,0 +1,20 @@
def trmloc_scenario(P, D, V, C, L, O, R, X, Es, Fvi, Fdi,
Fpo, Err, Cme, Cmz, Ia, Xap, Yap, Zap, Ip, Cp, Omp,
Fi, Fh, Cpm, CpM, Cpi, Cpf, Ftm, Fit, Fsc, Fec, m, e):
with open(P, 'w') as INfile:
INfile.write(f"INPUT FILES \n -P:{P} \n -D:{D} \n -V:{V} \n -C:{C} \n")
INfile.write(f"OUTPUT FILES \n -L:{L} \n -O:{O} \n -R:{R} \n -X:{X} \n")
INfile.write(f"NUMERICAL PARAMETERS \n -Es={Es} \n -Fvi={Fvi:.6f} \n -Fdi={Fdi:.6f} \n"
f" -Fpo={Fpo:.6f} \n -Err={Err:.6f} \n")
INfile.write(f" -Cme={Cme:.6f} \n -Cmz={Cmz:.6f} \n -Ia={Ia} \n -Xap={Xap:.6f} \n"
f" -Yap={Yap:.6f} \n -Zap={Zap:.6f} \n")
INfile.write(f" -Ip={Ip} \n -Cp={Cp:.6f} \n -Omp={Omp:.6f} \n -Fi={Fi:.6f} \n -Fh={Fh:.6f} \n")
INfile.write(f" -Cpm={Cpm:.6f} \n -CpM={CpM:.6f} \n -Cpi={Cpi:.6f} \n -Cpf={Cpf:.6f} \n")
INfile.write(f" -Ftm={Ftm:.6f} \n -Fit={Fit:.6f} \n -Fsc={Fsc:.6f} \n -Fec={Fec:.6f} \n")
INfile.write("SWITCHING PARAMETERS\n")
if m:
INfile.write(" -m\n")
if e:
INfile.write(" -e\n")

64
src/vmodel_gen_01d.py Normal file
View File

@ -0,0 +1,64 @@
import numpy as np
from scipy.io import loadmat
def vmodel_gen(P, Discr, Struc, x1, x2, xstep, y1, y2, ystep, z1, z2, zstep, velocity_model_file_path):
"""
Generate a 1D velocity model and save it to a file.
Parameters:
- P: filename for output.
- Discr: description of the model, typed by the user [string up to 255 characters].
- Struc: structure flag (0 or 1).
- x1, x2, xstep: minimum, maximum values and step for generation of the X-Axis.
- y1, y2, ystep: minimum, maximum values and step for generation of the Y-Axis.
- z1, z2, zstep: minimum, maximum values and step for generation of the Z-Axis.
- velocity_model_file_path: path to matlab file with P-wave velocity values.
In the current version 2 options are supported:
1) 0d model: the user types a single value
2) 1d model: a Nx2 array is inserted, corresponding to the depth and
velocity of N layers. e.g.
200 5800 (depth - velocity)
800 6200
1500 6500
"""
data = loadmat(velocity_model_file_path)
data = np.column_stack((
np.array(data['d']['Depth'][0, 0]).flatten(),
np.array(data['d']['Vp'][0, 0]).flatten()
))
data = data * 1000
szx = int(np.ceil((x2 - x1) / xstep))
szy = int(np.ceil((y2 - y1) / ystep))
szz = int(np.ceil((z2 - z1) / zstep))
z = np.arange(z1, z2 + 1, zstep)
if data.shape[0] == 1:
vz = np.full(z.shape, data[0, 1])
Dimens = 0
elif data.shape[0] > 1:
data = np.vstack((data, [1000000, data[-1, 1]]))
vz = np.zeros_like(z)
for j in range(1, len(data)):
f1 = (z < data[j, 0]) & (z >= data[j - 1, 0])
vz[f1] = data[j - 1, 1]
vz[z >= data[-1, 0]] = data[-1, 1]
Dimens = 1
else:
raise ValueError("Complex velocity models are not supported.")
if np.any(vz == 0):
vz[vz == 0] = vz[vz != 0][0]
with open(P, 'w') as file:
file.write("# TNF 1.0 \n")
file.write(f"# DIMENSION {Dimens} \n")
file.write("# FORMAT ASCII \n")
file.write(f"# DESCRIPTION {Discr} \n")
file.write(f"# STRUCTURE {Struc} \n")
file.write(f"# SIZE {szx} {szy} {szz} {szx * szy * szz} \n")
file.write(f"# XAXIS {x1} {x2} {xstep} \n")
file.write(f"# YAXIS {y1} {y2} {ystep} \n")
file.write(f"# ZAXIS {z1} {z2} {zstep} \n")
np.savetxt(file, vz, fmt="%d")

BIN
trmloc Normal file

Binary file not shown.