Tools, scripts, apps and functions to run fast source and ground shaking estimation
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

2050 lines
58 KiB

# GPLv3
#
# The Developers, 21st Century
import logging
import re
import os
import numpy as num
from pyrocko import orthodrome as pod, util as putil, moment_tensor as pmt
from pyrocko.cake import earthradius
from pyrocko.guts import Float, Int, List, Object, StringChoice, Tuple
from pyrocko.guts_array import Array
from pyrocko.model import GNSSCampaign, GNSSComponent, GNSSStation, Station
from pyrocko.trace import Trace
from pyrocko.gf.seismosizer import map_anchor, Location
from ewrica.sources import (EwricaIDSSource, EwricaIDSPatch, EwricaIDSSTF,
EwricaIDSLog)
op = os.path
logger = logging.getLogger('ewrica.io.ids')
km = 1e3
d2r = num.pi / 180.
class WaveformResult(Location):
nslc = Tuple.T(optional=True)
observed = Trace.T(optional=True)
synthetic = Trace.T(optional=True)
misfit = Float.T(optional=True)
class WaveformResultList(Object):
results = List.T(
WaveformResult.T(),
default=[],
optional=True)
@classmethod
def primitive_merge(cls, lists):
result = cls()
for lst in lists:
result.results += lst.results
return result
def get_results(self, subset=None):
results = self.results
if subset == 'with_traces':
results = [r for r in self.results
if r.observed is not None and r.synthetic is not None]
elif subset == 'without_traces':
results = [r for r in self.results
if r.observed is None or r.synthetic is None]
return results
def iter_traces(self, *args, **kwargs):
for res in self.get_results(*args, **kwargs):
for tr in (res.observed, res.synthetic):
yield tr
def grouped_results(self, gather, *args, **kwargs):
results = self.get_results(*args, **kwargs)
sort = list(map(gather, results))
if all(map(lambda x: isinstance(x, bool), sort)):
idx = num.arange(0, len(sort))[sort]
res = [results[i] for i in idx]
yield WaveformResultList(results=res)
else:
sort_set = set(sort)
for it in sort_set:
idx = [i for i, s in enumerate(sort) if s == it]
res = [results[i] for i in idx]
yield WaveformResultList(results=res)
def grouped_traces(self, gather, *args, **kwargs):
traces = list(self.iter_traces(*args, **kwargs))
sort = list(map(gather, traces))
if all(map(lambda x: isinstance(x, bool), sort)):
idx = num.arange(0, len(sort))[sort]
yield [traces[i] for i in idx]
else:
sort_set = set(sort)
for it in sort_set:
idx = [i for i, s in enumerate(sort) if s == it]
yield [traces[i] for i in idx]
snapshot_parameters = [
'Latitude[deg]',
'Longitude[deg]',
'Depth[km]',
'X_strike[km]',
'Y_downdip[km]',
'Slip[m]',
'Moment[Nm]']
slipmodel_parameters = [
'Lat[deg]',
'Lon[deg]',
'Depth[km]',
'X_strike[km]',
'Y_downdip[km]',
'Length[km]',
'Width[km]',
'Strike[deg]',
'Dip[deg]',
'Rake[deg]',
'Slip[m]',
'Moment[Nm]']
earthquake_stf_parameters = [
'Time',
'Moment_Rate',
'Accu_Moment',
'Mw']
rupture_front_parameters = [
'Lat[deg]',
'Lon[deg]',
'Depth[km]',
'X_strike[km]',
'Y_downdip[km]',
'Ruptime[s]']
obssyn_nearfield_parameters = [
'TimeH',
'ObsUe',
'ObsUn',
'SynUe',
'SynUn',
'TimeZ',
'ObsUz',
'SynUz']
obssyn_farfield_parameters = [
'TimeSH',
'ObsUe',
'ObsUn',
'SynUe',
'SynUn',
'TimeP',
'ObsUz',
'SynUz']
obssyn_gnss_parameters = [
2 years ago
'Lat',
'Lon',
'Ue_Obs',
'Un_obs',
'Up_Obs',
'Ue_Syn',
'Un_Syn',
'Up_Syn']
wfmisfitvar_parameters = [
'Station',
'Lat[deg]',
'Lon[deg]',
'nrv_dsp_e',
'nrv_dsp_n',
'nrv_dsp_z']
class IDSUnpackError(Exception):
pass
def _validate_ids_file(fn, parameter):
with open(fn) as f:
header = f.readline().split()
if header != parameter:
raise IDSUnpackError(
'Parameter order in IDS file "{}" differs from known '
'one "{}".'.format(fn, parameter))
def _validate_snapshot_file(fn):
_validate_ids_file(fn, snapshot_parameters)
def _validate_slipmodel_file(fn):
_validate_ids_file(fn, slipmodel_parameters)
def _validate_earthquake_stf_file(fn):
_validate_ids_file(fn, earthquake_stf_parameters)
def _validate_rupture_front_file(fn):
_validate_ids_file(fn, rupture_front_parameters)
def _validate_obssyn_nearfield_file(fn):
_validate_ids_file(fn, obssyn_nearfield_parameters)
def _validate_obssyn_farfield_file(fn):
_validate_ids_file(fn, obssyn_farfield_parameters)
def _validate_obssyn_gnss_file(fn):
_validate_ids_file(fn, obssyn_gnss_parameters)
def _validate_wf_misfit_file(fn):
_validate_ids_file(fn, wfmisfitvar_parameters)
def load_stationinfo_file(fn):
'''Load all information from IDS StationInfo.dat input file
Returns all information on input stations
All output is given in standard units.
:param fn: filename of the StationInfo file
:type fn: str
:returns: list of stations
:rtype: list of :py:class:`pyrocko.model.station.Station`
'''
stations = []
with open(fn) as f:
lines = []
for i, line in enumerate(f):
if line.startswith('#'):
continue
lines.append(line)
if len(lines[2].split('!')) > 1:
nstations = int(lines[2].split('!')[1].strip())
else:
nstations = int(lines[2].strip())
for line in lines[4:]:
line = line.strip().split()
nsl = tuple(line[0].split('.'))
net = nsl[0] if len(nsl) > 1 else ''
sta = nsl[0] if len(nsl) == 1 else nsl[1]
loc = nsl[2] if len(nsl) > 2 else ''
stations.append(
Station(
network=net,
station=sta,
location=loc,
lat=float(line[1]),
lon=float(line[2])))
if len(stations) != nstations:
raise IDSUnpackError(
'Expected and found number of stations from {} differ.'.format(fn))
return stations
def load_snapshot_file(fn):
'''Load all information from IDSSnapshot or SnapshotR file
Returns all information on the patch wise moment release within the time
frame of the Snapshotfile.
All output is given in standard units.
:param fn: filename of the snapshot file
:type fn: str
:returns: patch center latitude in [deg],
patch center longitude in [deg],
patch center depth in [m],
patch center x coordinate along strike in [m] relative to hypocenter,
patch center y coordinate along downdip in [m] relative to hypocenter,
patch slip vector length in [m],
patch moment release in [Nm]
:rtype: :py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`
'''
_validate_snapshot_file(fn)
lats, lons, depth, x, y, slip, moment = num.genfromtxt(
fn, unpack=True, skip_header=1)
depth *= km
x *= km
y *= km
return lats, lons, depth, x, y, slip, moment
def load_slipmodel_file(fn):
'''Load all information from IDSSlipModel.dat file
That includes extension and orientation of the patches. Slip vector length
and orientation are given as well as the final patch moment release.
All output is given in standard units.
:param fn: filename of the slipmodel file
:type fn: str
:returns: patch center latitude in [deg],
patch center longitude in [deg],
patch center depth in [m],
patch center x coordinate along strike in [m] relative to hypocenter,
patch center y coordinate along downdip in [m] relative to hypocenter,
patch length in [m],
patch width in [m],
patch strike in [deg],
patch dip in [deg],
patch rake in [deg]
patch slip vector length in [m],
patch moment release in [Nm]
:rtype: :py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`
'''
_validate_slipmodel_file(fn)
lats, lons, depth, x, y, ln, wd, strike, dip, rake, slip, moment = \
num.genfromtxt(fn, unpack=True, skip_header=1)
depth *= km
x *= km
y *= km
ln *= km
wd *= km
return lats, lons, depth, x, y, ln, wd, strike, dip, rake, slip, moment
def load_patch_stf_file(fn):
'''Load all information from PT_STF.dat patch source time function file
The patch order is the same as in the corresponding slipmodel.dat file (
see :py:func:`ewrica.io.ids.load_slipmodel_file`). Here only the relative
time after event origin time and the corresponding patch moment rates are
given.
:param fn: filename of the patch source time function file
:type fn: str
:returns: time after event origin in [s],
patch moment rates (source time functions) in [Nm/s]
:rtype: :py:class:`numpy.ndarray` ``(ntimes,)``,
:py:class:`numpy.ndarray` ``(ntimes, nsubfaults)``
'''
data = num.genfromtxt(fn, skip_header=1)
return data[:, 0], data[:, 1:]
def load_earthquake_stf_file(fn):
'''Load all information from EQ_STF.dat source time function file
Here only the relative time after event origin time and the corresponding
source moment rates are given.
:param fn: filename of the earthquake source time function file
:type fn: str
:returns: time after event origin in [s],
source moment rate (source time function) in [Nm/s],
source cumulative moment in [Nm]
:rtype: :py:class:`numpy.ndarray` ``(ntimes,)``,
:py:class:`numpy.ndarray` ``(ntimes,)``,
:py:class:`numpy.ndarray` ``(ntimes,)``
'''
_validate_earthquake_stf_file(fn)
time, moment_rate, moment = num.genfromtxt(
fn, unpack=True, skip_header=1, usecols=[0, 1, 2])
magnitude = num.genfromtxt(
fn, unpack=True, skip_header=(moment > 0).argmax() + 1, usecols=[3])
atol = 0.01
try:
num.testing.assert_allclose(
pmt.moment_to_magnitude(moment[1:]), magnitude, atol=atol)
except AssertionError:
IDSUnpackError(
'Read magnitude and moments converted to magnitude differ by more '
'than {}'.format(atol))
rtol = 0.001
dt = num.mean(num.diff(time))
try:
num.testing.assert_allclose(
num.cumsum(moment_rate)[:-1] * dt, moment[1:], rtol=rtol)
except AssertionError:
IDSUnpackError(
'Read moments and moment rates converted to moment differ by more '
'than {}%%'.format(rtol*100))
return time, moment_rate, moment
def load_rupture_front_file(fn):
'''Load all information from Rup_Front.dat rupture front file
Here only the relative time after event origin time is given. The order of
patches is not the same as in :py:func:`ewrica.io.ids.load_patch_stf_file`
or :py:func:`ewrica.io.ids.load_slipmodel_file`.
:param fn: filename of the rupture front file
:type fn: str
:returns: patch center latitude in [deg],
patch center longitude in [deg],
patch center depth in [m],
patch center x coordinate along strike in [m] relative to hypocenter,
patch center y coordinate along downdip in [m] relative to hypocenter,
patch rupture arrival time in [s]
:rtype: :py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`,
:py:class:`numpy.ndarray`
'''
_validate_rupture_front_file(fn)
lat, lon, depth, x, y, time = num.genfromtxt(
fn, unpack=True, skip_header=1)
depth *= km
x *= km
y *= km
# interpolator = nearest_interp(num.vstack([x, y]).T, time)
return lat, lon, depth, x, y, time # , interpolator
def load_idslog_file(fn):
'''Load final misfit only from idslod_iteration.dat log file
The misfit is given as normalized residual variance.
:param fn: filename of the idslog file
:type fn: str
:returns: Detailed IDS Log information
:rtype: :py:class:`ewrica.sources.EwricaIDSLog`
'''
rows_header = 5
rows_footer = 3
data = num.genfromtxt(
fn,
unpack=True,
skip_header=rows_header,
skip_footer=rows_footer,
names=True)
names = data.dtype.names
subset_dict = dict(
nrv_swf='waveform',
nrv_osm='sm_offset',
nrv_gns='gnss',
nrv_sar='insar')
log = EwricaIDSLog(
misfit_all=num.array([i[names.index('nrv_all')] for i in data]),
misfit_subsets={
subset_dict[name]: num.array([i[names.index(name)] for i in data])
for name in subset_dict.keys() if name in names},
smoothing=num.array(
[i[names.index('smoothing')] for i in data]).astype(num.int64),
max_slip=num.array([i[names.index('max_slip')] for i in data]),
magnitude=num.array([i[names.index('Mw')] for i in data]))
msg = ('IDS file "{}" format differs from known one with {} header and {} '
'footer rows.'.format(fn, rows_header, rows_footer))
if int(data[0][names.index('iteration')]) != 1:
raise IDSUnpackError(msg)
if int(data[-1][names.index('iteration')]) != log.n_iterations:
raise IDSUnpackError(msg)
with open(fn) as f:
line = f.readlines()[-rows_footer].strip()
if not line.startswith('iteration terminated:'):
raise IDSUnpackError(msg)
log.stop_cause = line.split(':')[1].strip().replace(' ', '_')[:-1]
# log.validate()
return log
def load_earthquake_mp_file(fn):
'''Load selected information from EQ_MP.dat information file
:param fn: filename of the earthquake mp file
:type fn: str
:returns: source cumulative moment in [Nm],
number of subfaults,
moment centroid latitude in [deg],
moment centroid longitude in [deg],
moment centroid depth in [m],
mean rupture velocity (based on first 10% of moment release) in [m/s]
:rtype: float,
int,
float,
float,
float,
float
'''
pattern_moment = (r'Seismic moment:\s{1,}(?P<moment>\d+\.\d+)[eE]'
r'(?P<moment_exp>[+-]\d+) Nm')
pattern_magnitude = r'Moment magnitude:\s{1,}(?P<mag>\d+\.\d+)'
pattern_n_subfaults = r'Number of sub-faults:\s{1,}(?P<n_subfaults>\d+)'
pattern_centroid = (
r'Centroid of moment distribution: \(\s+(?P<clat>[-]{0,1}\d+\.\d+) '
r'deg_N,\s+(?P<clon>[-]{0,1}\d+\.\d+) deg_E,\s+'
r'(?P<cdepth>[-]{0,1}\d+\.\d+) km\)')
pattern_velocity = (
r'Average rupture velocity \(based on the first \d+\% of local moment '
r'release\):\s+(?P<vrup>\d+\.\d+) km\/s')
pattern = '|'.join([
pattern_moment,
pattern_magnitude,
pattern_n_subfaults,
pattern_centroid,
pattern_velocity])
matches = []
with open(fn) as f:
for line in f:
line = line.strip()
m = re.match(pattern, line)
if m is None:
continue
matches.append(m)
d = {k: float(v) for m in matches for k, v in m.groupdict().items()
if v is not None}
d['cdepth'] *= km
d['vrup'] *= km
d['moment'] *= 10**(d['moment_exp'])
atol = 0.01
try:
num.testing.assert_allclose(
pmt.moment_to_magnitude(d['moment']),
d['mag'],
atol=atol)
except AssertionError:
IDSUnpackError(
'Read magnitude and moment converted to magnitude differ by more '
'than {}'.format(atol))
return (d['moment'], d['mag'], int(d['n_subfaults']), d['clat'], d['clon'],
d['cdepth'], d['vrup'])
def load_ids_config_file(fn):
'''Load all information from ids config file
:param fn: filename of the ids config file
:type fn: str
:returns: dictionary of all settings, files, paths and parameters set in
the given config
:rtype: dict
'''
def string_or_float(string):
try:
a = float(string)
try:
return int(string)
except ValueError:
return a
except (ValueError, TypeError):
return string.replace("'", '')
data = []
with open(fn) as f:
for line in f:
if line.startswith('#'):
continue
data.append([string_or_float(d) for d in line.strip().split()])
d = {}
d['time'] = putil.stt('{}-{}-{} {}:{}:{}'.format(*data[0]))
d['hypocenter_lat'] = data[1][0]
d['hypocenter_lon'] = data[1][1]
d['hypocenter_depth'] = data[1][2] * km
d['magnitude'] = data[2][0]
d['wf_path'] = data[3][0]
d['wf_distance_range'] = tuple(d * km for d in data[4])
d['wf_static_weight'] = data[5][0]
d['gf_dyn_path'] = data[6][0]
d['gf_dyn_synthetic_filter'] = tuple(data[7])
d['gnss_sites'] = data[8][0]
d['gnss_path'] = data[8][1]
d['gnss_weight'] = data[9][0]
d['insar_grids'] = data[10][0]
d['insar_path'] = data[10][1]
d['insar_weight'] = data[11][0]
d['gf_geodetic_path'] = data[12][0]
d['gf_geodetic_filenames'] = tuple(data[13])
d['idisc'] = data[14][0]
2 years ago
if d['idisc'] == 1:
d['strike'], d['dip'], d['rake'] = data[15]
2 years ago
elif d['idisc'] == 0:
logger.warn('Loading from sub fault file not implemented.')
d['subfault_file'], d['n_subfaults'], d['iref'] = data[15]
d['niter_max'] = data[16][0]
d['rundir'] = data[17][0]
d['o2sdir'] = data[18][0]
d['ebcdir'] = data[19][0]
d['eqmp_file'] = data[20][0]
d['eqstf_file'] = data[21][0]
d['ptstf_file'] = data[22][0]
d['rupfront_file'] = data[23][0]
d['slipmodel_file'] = data[24][0]
d['wfvariance_file'] = data[25][0]
d['smstaticoffset_file'] = data[26][0]
d['gnssfit_file'] = data[27][0]
d['insarfit_file'] = data[28][0]
d['nsnapshots'] = data[29][0]
if len(data[30:]) != d['nsnapshots']:
raise IDSUnpackError(
'Mismatching number of snapshots and given snapshots in config')
d['snapshot_files'] = [i[0] for i in data[30:]]
d['snapshot_times'] = num.array([i[1:] for i in data[30:]])
return d
def load_rectangular_source(
config,
lat,
lon,
lats,
lons,
depths,
slips,
strikes,
dips,
rakes,
xs,
ys,
patch_length,
patch_width,
nx,
ny,
mp_n_subfaults,
mp_vrup,
deltat,
patch_stfs,
patch_stf_times,
moments,
rundir,
*args, **kwargs):
patch_length, patch_width = patch_length[0], patch_width[0]
if nx * ny != mp_n_subfaults:
raise IDSUnpackError('Patch grid has different size than expected.')
source_length = nx * patch_length
source_width = ny * patch_width
nucleation_x = 2 * (num.abs(
xs.min() - 0.5 * patch_length) / source_length) - 1
nucleation_y = 2 * (num.abs(
ys.min() - 0.5 * patch_width) / source_width) - 1
# Distance to new source reference point (at fault top center)
anchor = 'top'
strike, dip = config['strike'], config['dip']
anch_x, anch_y = map_anchor[anchor]
dx = (anch_x - nucleation_x) * 0.5 * source_length # along strike shift
dy = (anch_y - nucleation_y) * 0.5 * source_width # downdip shift
anch_depth = config['hypocenter_depth'] + num.sin(d2r*dip) * dy
anch_northshift = num.cos(d2r*strike) * dx - \
num.sin(d2r*strike) * num.cos(d2r*dip) * dy
anch_eastshift = num.sin(d2r*strike) * dx + \
num.cos(d2r*strike) * num.cos(d2r*dip) * dy
anch_lat, anch_lon = pod.ne_to_latlon(
lat, lon, anch_northshift, anch_eastshift)
norths, easts = pod.latlon_to_ne_numpy(anch_lat, anch_lon, lats, lons)
patches = []
for ip in range(lats.shape[0]):
stf = EwricaIDSSTF(deltat=deltat, amplitudes=patch_stfs[:, ip])
stf_moment = stf.moment
time = None
if moments[ip] > 0.:
time = patch_stf_times[patch_stfs[:, ip] > 0.][0] + config['time']
# Based on maximum decimal digit of pt_stf file
atol = 2. * 10**(int(num.floor(num.log10(moments[ip]))) - 3)
try:
num.testing.assert_allclose(
moments[ip], stf_moment, atol=atol)
except AssertionError:
logger.warn(
'Summed patch source time function (moment={:e} Nm) '
'differs more than the maximum absolut tolerance of {:e} '
'Nm from the moment given in the slip model file ({:e} '
'Nm). The source time function is rescaled to slip model '
'moment.'.format(stf_moment, moments[ip], atol))
stf.amplitudes *= moments[ip] / stf_moment
patches.append(EwricaIDSPatch(
lat=anch_lat,
lon=anch_lon,
time=time,
depth=depths[ip],
north_shift=norths[ip],
east_shift=easts[ip],
x_coordinate=xs[ip] - dx,
y_coordinate=ys[ip] - dy,
length=patch_length,
width=patch_width,
slip=slips[ip],
strike=strikes[ip],
dip=dips[ip],
rake=rakes[ip],
anchor='center',
stf=stf))
source = EwricaIDSSource(
lat=anch_lat,
lon=anch_lon,
depth=anch_depth,
anchor=anchor,
length=source_length,
width=source_width,
time=config['time'],
strike=config['strike'],
dip=config['dip'],
rake=config['rake'],
nucleation_x=nucleation_x,
nucleation_y=nucleation_y,
patches=patches,
nx=nx,
ny=ny,
velocity=mp_vrup,
misfit_log=load_idslog_file(op.join(rundir, 'idslog_iteration.dat')))
return source
def load_curved_source(
config,
lats,
lons,
depths,
slips,
strikes,
dips,
rakes,
xs,
ys,
patch_length,
patch_width,
mp_vrup,
deltat,
patch_stfs,
patch_stf_times,
moments,
rundir,
*args, **kwargs):
anch_lat, anch_lon = pod.geographic_midpoint(lats, lons)
norths, easts = pod.latlon_to_ne_numpy(anch_lat, anch_lon, lats, lons)
patches = []
for ip in range(lats.shape[0]):
stf = EwricaIDSSTF(deltat=deltat, amplitudes=patch_stfs[:, ip])
stf_moment = stf.moment
time = None
if moments[ip] > 0.:
time = patch_stf_times[patch_stfs[:, ip] > 0.][0] + config['time']
# Based on maximum decimal digit of pt_stf file
atol = 2. * 10**(int(num.floor(num.log10(moments[ip]))) - 3)
try:
num.testing.assert_allclose(
moments[ip], stf_moment, atol=atol)
except AssertionError:
logger.warn(
'Summed patch source time function (moment={:e} Nm) '
'differs more than the maximum absolut tolerance of {:e} '
'Nm from the moment given in the slip model file ({:e} '
'Nm). The source time function is rescaled to slip model '
'moment.'.format(stf_moment, moments[ip], atol))
stf.amplitudes *= moments[ip] / stf_moment
patches.append(EwricaIDSPatch(
lat=anch_lat,
lon=anch_lon,
time=time,
depth=depths[ip],
north_shift=norths[ip],
east_shift=easts[ip],
x_coordinate=xs[ip],
y_coordinate=ys[ip],
length=patch_length[ip],
width=patch_width[ip],
slip=slips[ip],
strike=strikes[ip],
dip=dips[ip],
rake=rakes[ip],
anchor='center',
stf=stf))
source = EwricaIDSSource(
curved=True,
lat=anch_lat,
lon=anch_lon,
depth=0.,
anchor='center',
length=0.,
width=0.,
time=config['time'],
strike=None,
dip=None,
rake=None,
nucleation_x=None,
nucleation_y=None,
patches=patches,
nx=None,
ny=None,
velocity=mp_vrup,
misfit_log=load_idslog_file(op.join(rundir, 'idslog_iteration.dat')))
return source
def load_ids_source(config_fn):
'''Load all information/results from ids file and rund dir into source
:param config_fn: filename of the ids config file
:type config_fn: str
:returns: Ewrica IDS Source object with as mutch information converted as