User contributed plug-ins for Pyrocko's seismic waveform browser Snuffler.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

706 lines
24 KiB

5 years ago
from __future__ import print_function
from builtins import zip
from builtins import range
6 years ago
import numpy as num
import time
from matplotlib.animation import FuncAnimation
6 years ago
from pyrocko.gui.snuffling import Param, Snuffling, Choice, Switch
from scipy.signal import fftconvolve, lfilter, hilbert
6 years ago
from scipy.interpolate import UnivariateSpline
6 years ago
from pyrocko import orthodrome as ortho
from pyrocko import parstack
from pyrocko import util
from pyrocko import trace
from pyrocko import model
import logging
6 years ago
logger = logging.getLogger('pyrocko.gui.snufflings.fk_parstack.py')
6 years ago
d2r = num.pi/180.
km = 1000.
def search_max_block(n_maxsearch, data):
'''
Find indices of maxima of *data* in groups of *n_maxsearch*
samples.
returns an array of indices.
If length of *data* is not a multiple of *n_maxsearch*, *data* will be
padded.
'''
n = len(data)
n_dim2 = (int(n/n_maxsearch)+1) * n_maxsearch
n_missing = n_dim2 - n
if n % n_maxsearch != 0:
a = num.pad(data, [0, n_missing], mode='minimum')
else:
a = data
return num.argmax(a.reshape((-1, n_maxsearch)), axis=1) +\
num.arange(0, (n+n_missing)/n_maxsearch) * n_maxsearch
def instantaneous_phase(signal):
analytic_signal = hilbert(signal)
return num.unwrap(num.angle(analytic_signal))
def get_instantaneous_frequency(signal, fs):
inst_phase = instantaneous_phase(signal)
return (num.diff(inst_phase) / (2.0*num.pi) * fs)
def get_center_station(stations, select_closest=False):
6 years ago
''' gravitations center of *stations* list.'''
n = len(stations)
slats = num.empty(n)
slons = num.empty(n)
for i, s in enumerate(stations):
slats[i] = s.lat
slons[i] = s.lon
center_lat, center_lon = ortho.geographic_midpoint(slats, slons)
if select_closest:
center_lats = num.ones(n)*center_lat
center_lons = num.ones(n)*center_lon
dists = ortho.distance_accurate50m_numpy(
center_lats, center_lons, slats, slons)
return stations[num.argmin(dists)]
else:
return model.Station(center_lat, center_lon)
6 years ago
def get_theoretical_backazimuth(event, stations, center_station):
return (ortho.azimuth_numpy(
event.lat, event.lon, center_station.lat, center_station.lon)
6 years ago
+ 180.) % 360.
def get_shifts(stations, center_station, bazis, slownesses):
6 years ago
''' shape = (len(bazi), len(slow))'''
lats = num.array([s.lat for s in stations])
lons = num.array([s.lon for s in stations])
lat0 = num.array([center_station.lat] * len(stations))
lon0 = num.array([center_station.lon] * len(stations))
6 years ago
ns, es = ortho.latlon_to_ne_numpy(lat0, lon0, lats, lons)
station_vector = num.array((ns, es)).T
bazis = bazis * d2r
shifts = num.zeros((len(bazis)*len(slownesses), len(stations)))
ishift = 0
for ibazi, bazi in enumerate(bazis):
s_vector = num.array((num.cos(bazi), num.sin(bazi)))
for islowness, slowness in enumerate(slownesses):
shifts[ishift] = station_vector.dot(s_vector) * slowness
ishift += 1
return shifts
6 years ago
def to_db(d):
return 10*num.log10(d/num.max(d))
def lowpass_array(ydata_array, deltat, order, corner, demean=True, axis=1):
'''
Apply butterworth highpass to the trace.
:param order: order of the filter
:param corner: corner frequency of the filter
Mean is removed before filtering.
'''
(b, a) = trace._get_cached_filter_coefs(
order, [corner*2.0*deltat], btype='low')
data = ydata_array.astype(num.float64)
if len(a) != order+1 or len(b) != order+1:
logger.warn(
'Erroneous filter coefficients returned by '
'scipy.signal.butter(). You may need to downsample the '
'signal before filtering.')
if demean:
data -= num.mean(data, axis=1)[None].T
return lfilter(b, a, data)
def highpass_array(ydata_array, deltat, order, corner, demean=True, axis=1):
'''
Apply butterworth highpass to the trace.
:param order: order of the filter
:param corner: corner frequency of the filter
Mean is removed before filtering.
'''
(b, a) = trace._get_cached_filter_coefs(
order, [corner*2.0*deltat], btype='high')
data = ydata_array.astype(num.float64)
if len(a) != order+1 or len(b) != order+1:
logger.warn(
'Erroneous filter coefficients returned by '
'scipy.signal.butter(). You may need to downsample the '
'signal before filtering.')
if demean:
data -= num.mean(data, axis=1)[None].T
return lfilter(b, a, data)
def value_to_index(value, range_min, range_max, range_delta, clip=True):
''' map a(n array of) *values* to its' index in a continuous data range
defined by *range_min*, *range_max* and *range_delta*.
'''
indices = num.round((value-range_min)/range_delta)
if clip:
indices = num.clip(indices, 0, (range_max-range_min)/range_delta)
return num.asarray(indices, dtype=num.int)
6 years ago
class FK(Snuffling):
'''
<html>
<head>
<style type="text/css">
body { margin-left:10px };
</style>
</head>
<body>
<h1 align='center'>FK ANALYSIS</h1>
<p>
Performs delay and sum in the time domain.
<u>Usage</u><br>
- Load station information at startup <br>
- Zoom into the data until you see only data you desire to analyse or
use extended markers to selected time regions for analysis<br>
- Press the 'Run' button <br>
</p>
The slowness is in units s/km.
<p>
if <b>Show</n> is activated, three images will be genereated for each
processing block: A polar plot which shows the maximum coherence found
along the time axis for all slownesses and back-azimuths. The blue dot
within that figure indicates the position of the maximum. Based on that
slowness/back-azimuth the other two coherence maps are generated which
show the coherence in the slowness and back-azimuth domain for that
specific maximum of that processing block.<br>
Picinbono, et. al, 1997, On Instantaneous Amplitude and Phase of Signals,
552 IEEE TRANSACTIONS ON SIGNAL PROCESSING, 45, 3, March 1997
6 years ago
</body>
</html>
'''
def setup(self):
self.set_name('FK (parstack)')
self.add_parameter(Param(
'max slowness [s/km]', 'slowness_max', 0.2, 0., 1.))
self.add_parameter(Param(
'min slowness [s/km]', 'slowness_min', 0.01, 0., 1.))
self.add_parameter(Param(
'delta slowness [s/km]', 'slowness_delta', 0.002, 0., 0.2))
6 years ago
self.add_parameter(Param(
'delta backazimut', 'delta_bazi', 2, 1, 20))
6 years ago
self.add_parameter(Param(
'Increment [s]', 'tinc', 60., 0.5, 60.,
6 years ago
high_is_none=True))
self.add_parameter(Param(
'Smoothing length [N]', 'ntaper', 0, 0, 30, low_is_none=True))
self.add_parameter(Param(
'Maximum search factor', 'search_factor', 1, 0, 3))
6 years ago
self.add_parameter(Choice(
'Use channels', 'want_channel', '*',
['*', '*Z', '*E', '*N', 'SHZ', 'BHZ', 'p0']))
self.add_parameter(
Choice('method', 'method', 'stack', ['stack', 'correlate'])
)
6 years ago
self.add_parameter(Switch('Show', 'want_all', True))
self.add_parameter(Switch('Phase weighted stack', 'want_pws', False))
6 years ago
self.set_live_update(False)
self.irun = 0
self.figs2draw = []
def new_figure(self, title=''):
'''Return a new Figure instance'''
fig_frame = self.pylab(name='FK: %s (%i)' %
(title, self.irun), get='figure_frame')
self.figs2draw.append(fig_frame.gcf())
return self.figs2draw[-1]
def draw_figures(self):
''' Draw all new figures and clear list.'''
for fig in self.figs2draw:
fig.canvas.draw()
self.figs2draw = []
6 years ago
def call(self):
self.cleanup()
figs = []
6 years ago
azi_theo = None
method = {'stack': 0,
'correlate': 2}[self.method]
6 years ago
bazis = num.arange(0., 360.+self.delta_bazi, self.delta_bazi)
slownesses = num.arange(self.slowness_min/km,
self.slowness_max/km,
self.slowness_delta/km)
n_bazis = len(bazis)
n_slow = len(slownesses)
viewer = self.get_viewer()
event = viewer.get_active_event()
stations = self.get_stations()
stations_dict = dict(zip([viewer.station_key(s) for s in stations],
stations))
traces_pile = self.get_pile()
deltats = traces_pile.deltats.keys()
if len(deltats) > 1:
self.fail('sampling rates differ in dataset')
else:
deltat_cf = deltats[0]
tinc_use = self.get_tinc_use(precision=deltat_cf)
6 years ago
if self.ntaper:
taper = num.hanning(int(self.ntaper))
else:
taper = None
frames = None
t1 = time.time()
# make sure that only visible stations are used
use_stations = stations
center_station = get_center_station(use_stations, select_closest=True)
5 years ago
print('Center station: ', center_station)
6 years ago
shift_table = get_shifts(
stations=use_stations,
center_station=center_station,
6 years ago
bazis=bazis,
slownesses=slownesses)
shifts = num.round(shift_table / deltat_cf).astype(num.int32)
# padding from maximum shift of traces:
npad = num.max(num.abs(shifts))
tpad = npad * deltat_cf
# additional padding for cross over fading
npad_fade = 0
tpad_fade = npad_fade * deltat_cf
npad += npad_fade
tpad += tpad_fade
6 years ago
frames = None
tinc_add = tinc_use or 0
def trace_selector(x):
return util.match_nslc('*.*.*.%s' % self.want_channel, x.nslc_id)
6 years ago
for traces in self.chopper_selected_traces(
tinc=tinc_use, tpad=tpad, fallback=True,
want_incomplete=False, trace_selector=trace_selector):
if len(traces) == 0:
self.fail('No traces matched')
6 years ago
continue
# should be correct
6 years ago
t_min = traces[0].tmin
t_max = traces[0].tmax
6 years ago
use_stations = []
for tr in traces:
try:
use_stations.append(stations_dict[viewer.station_key(tr)])
except KeyError:
self.fail('no trace %s' % ('.'.join(tr.nslc_id)))
6 years ago
shift_table = get_shifts(
stations=use_stations,
center_station=center_station,
6 years ago
bazis=bazis,
slownesses=slownesses)
shifts = num.round(shift_table / deltat_cf).astype(num.int32)
6 years ago
wmin = traces[0].tmin
wmax = wmin + tinc_add
iwmin = int(round((wmin-wmin) / deltat_cf))
iwmax = int(round((wmax-wmin) / deltat_cf))
lengthout = iwmax - iwmin
arrays = num.zeros((len(traces), lengthout + npad*2))
for itr, tr in enumerate(traces):
tr = tr.copy()
if viewer.highpass:
tr.highpass(4, viewer.highpass, demean=True)
else:
tr.ydata = num.asarray(
tr.ydata, dtype=num.float) - num.mean(tr.ydata)
6 years ago
if viewer.lowpass:
tr.lowpass(4, viewer.lowpass)
6 years ago
arrays[itr] = tr.get_ydata()
# if viewer.highpass:
# arrays = highpass_array(
# arrays, deltat_cf, 4, viewer.highpass)
# if viewer.lowpass:
# arrays = lowpass_array(arrays, deltat_cf, 4, viewer.lowpass)
6 years ago
_arrays = []
for itr, tr in enumerate(traces):
if taper is not None:
ydata = fftconvolve(arrays[itr], taper, mode='same')
6 years ago
else:
ydata = arrays[itr]
_arrays.append(num.asarray(ydata, dtype=num.float64))
arrays = _arrays
offsets = num.array(
[int(round((tr.tmin-wmin) / deltat_cf)) for tr in traces],
dtype=num.int32)
ngridpoints = len(bazis)*len(slownesses)
weights = num.ones((ngridpoints, len(traces)))
frames, ioff = parstack.parstack(
arrays, offsets, shifts, weights, method,
offsetout=iwmin,
lengthout=lengthout,
result=frames,
impl='openmp')
# theoretical bazi
if event is not None:
azi_theo = get_theoretical_backazimuth(
event, use_stations, center_station)
6 years ago
print('theoretical azimuth %s degrees' % (azi_theo))
print('processing time: %s seconds' % (time.time()-t1))
if frames is None:
self.fail('Could not process data!')
return
frames_reshaped = frames.reshape((n_bazis, n_slow, lengthout))
times = num.linspace(t_min-tpad_fade, t_max+tpad_fade, lengthout)
max_powers = num.max(frames, axis=0)
# power maxima in blocks
i_max_blocked = search_max_block(
n_maxsearch=int(npad*self.search_factor), data=max_powers)
6 years ago
max_powers += (num.min(max_powers)*-1)
max_powers /= num.max(max_powers)
max_powers *= max_powers
weights = max_powers[i_max_blocked]
6 years ago
block_max_times = times[i_max_blocked]
_argmax = num.argmax(frames, axis=0)
imax_bazi_all, imax_slow_all = num.unravel_index(
_argmax, dims=(n_bazis, n_slow))
local_max_bazi = bazis[imax_bazi_all][i_max_blocked]
local_max_slow = slownesses[imax_slow_all][i_max_blocked]*km
k_north = num.sin(local_max_bazi * d2r) * local_max_slow
k_east = num.cos(local_max_bazi * d2r) * local_max_slow
smooth = 4e7
spline_north = UnivariateSpline(
block_max_times, k_north, w=weights,
s=smooth
)
spline_east = UnivariateSpline(
block_max_times, k_east, w=weights,
s=smooth,
6 years ago
)
k_north_fit = spline_north(times)
k_east_fit = spline_east(times)
bazi_fitted = num.arctan2(k_east_fit, k_north_fit) / d2r
bazi_fitted -= 90.
bazi_fitted *= -1.
bazi_fitted[num.where(bazi_fitted<0.)] += 360.
6 years ago
spline_slow = UnivariateSpline(
block_max_times,
local_max_slow,
w=weights,
6 years ago
)
slow_fitted = spline_slow(times)
i_bazi_fitted = value_to_index(
bazi_fitted, 0., 360., self.delta_bazi)
6 years ago
i_slow_fitted = value_to_index(
slow_fitted, self.slowness_min, self.slowness_max,
self.slowness_delta)
i_shift = num.ravel_multi_index(
num.vstack((i_bazi_fitted, i_slow_fitted)),
(n_bazis, n_slow),
)
stack_trace = num.zeros(lengthout)
i_base = num.arange(lengthout, dtype=num.int) + npad
for itr, tr in enumerate(traces):
isorting = num.clip(
i_base-shifts[i_shift, itr], npad, lengthout+npad)
stack_trace += tr.ydata[isorting]
beam_tr = trace.Trace(
tmin=t_min+tpad, ydata=stack_trace, deltat=deltat_cf)
self.add_trace(beam_tr)
6 years ago
if self.want_all:
6 years ago
# ---------------------------------------------------------
# maxima search
# ---------------------------------------------------------
fig1 = self.new_figure('Max Power')
6 years ago
nsubplots = 1
ax = fig1.add_subplot(nsubplots, 1, 1)
ax.plot(num.max(frames, axis=0))
# --------------------------------------------------------------
# coherence maps
# --------------------------------------------------------------
6 years ago
max_time = num.amax(frames, axis=0)
imax_time = num.argmax(max_time)
best_frame = num.amax(frames, axis=1)
imax_bazi_slow = num.argmax(best_frame)
imax_bazi, imax_slow = num.unravel_index(
num.argmax(best_frame),
dims=(n_bazis, n_slow))
fig2 = self.new_figure('Slowness')
6 years ago
data = frames_reshaped[imax_bazi, :, :]
data_max = num.amax(frames_reshaped, axis=0)
6 years ago
ax = fig2.add_subplot(211)
ax.set_title('Global maximum slize')
6 years ago
ax.set_ylabel('slowness [s/km]')
ax.plot(times[imax_time], slownesses[imax_slow]*km, 'b.')
6 years ago
ax.pcolormesh(times, slownesses*km, data)
ax = fig2.add_subplot(212, sharex=ax, sharey=ax)
ax.set_ylabel('slowness [s/km]')
ax.pcolormesh(times, slownesses*km, data_max)
ax.set_title('Maximum')
# highlight block maxima
ax.plot(block_max_times, local_max_slow, 'wo')
ax.plot(times, num.clip(
slow_fitted, self.slowness_min, self.slowness_max)
)
fig3 = self.new_figure('Back-Azimuth')
6 years ago
data = frames_reshaped[:, imax_slow, :]
data_max = num.amax(frames_reshaped, axis=1)
6 years ago
ax = fig3.add_subplot(211, sharex=ax)
ax.set_title('Global maximum slize')
6 years ago
ax.set_ylabel('back-azimuth')
ax.pcolormesh(times, bazis, data)
ax.plot(times[imax_time], bazis[imax_bazi], 'b.')
ax = fig3.add_subplot(212, sharex=ax, sharey=ax)
6 years ago
ax.set_ylabel('back-azimuth')
ax.set_title('Maximum')
ax.pcolormesh(times, bazis, data_max)
# highlight block maxima
ax.plot(block_max_times, local_max_bazi, 'wo')
ax.plot(times, num.clip(bazi_fitted, 0, 360.))
6 years ago
# xfmt = md.DateFormatter('%Y-%m-%d %H:%M:%S')
# ax.xaxis.set_major_formatter(xfmt)
# fig.autofmt_xdate()
# fig.subplots_adjust(hspace=0)
semblance = best_frame.reshape((n_bazis, n_slow))
fig4 = self.new_figure('Max')
6 years ago
theta, r = num.meshgrid(bazis, slownesses)
theta *= (num.pi/180.)
ax = fig4.add_subplot(111, projection='polar')
6 years ago
m = ax.pcolormesh(theta.T, r.T*km, to_db(semblance))
ax.plot(bazis[imax_bazi]*d2r, slownesses[imax_slow]*km, 'o')
bazi_max = bazis[imax_bazi]*d2r
slow_max = slownesses[imax_slow]*km
ax.plot(bazi_max, slow_max, 'b.')
ax.text(0.5, 0.01, 'Maximum at %s degrees, %s s/km' %
(num.round(bazi_max, 1), slow_max),
transform=fig4.transFigure,
horizontalalignment='center',
verticalalignment='bottom')
6 years ago
if azi_theo:
ax.arrow(azi_theo/180.*num.pi, num.min(slownesses), 0,
num.max(slownesses), alpha=0.5, width=0.015,
edgecolor='black', facecolor='green', lw=2,
zorder=5)
self.adjust_polar_axis(ax)
fig4.colorbar(m)
6 years ago
6 years ago
# ---------------------------------------------------------
# CF and beam forming
# ---------------------------------------------------------
fig5 = self.new_figure('Beam')
nsubplots = 4
nsubplots += self.want_pws
ax_raw = fig5.add_subplot(nsubplots, 1, 1)
ax_shifted = fig5.add_subplot(nsubplots, 1, 2)
ax_beam = fig5.add_subplot(nsubplots, 1, 3)
ax_beam_new = fig5.add_subplot(nsubplots, 1, 4)
6 years ago
axkwargs = dict(alpha=0.3, linewidth=0.3, color='grey')
ybeam = num.zeros(lengthout)
ybeam_weighted = num.zeros(lengthout)
6 years ago
for i, (shift, array) in enumerate(zip(shifts.T, arrays)):
ax_raw.plot(times, array[npad: -npad], **axkwargs)
ishift = shift[imax_bazi_slow]
ax_shifted.plot(
times, array[npad-ishift: -npad-ishift], **axkwargs)
6 years ago
ydata = traces[i].get_ydata()[npad-ishift: -npad-ishift]
ybeam += ydata
# calculate phase weighting
if self.want_pws:
ph_inst = instantaneous_phase(ydata)
ybeam_weighted += num.abs(num.exp(ph_inst))**4
ax_beam_new.plot(stack_trace)
ax_beam_new.set_title('continuous mode')
# ax_beam.plot(times, ybeam, color='black')
ax_beam.plot(ybeam, color='black')
6 years ago
ax_raw.set_title('Characteristic Function')
ax_shifted.set_title('Shifted CF')
ax_beam.set_title('Linear Stack')
6 years ago
if self.want_pws:
ax_playground = fig5.add_subplot(nsubplots, 1, 5)
ax_playground.plot(ybeam*ybeam_weighted/len(arrays))
ax_playground.set_title('Phase Weighted Stack')
6 years ago
# -----------------------------------------------------------
# polar movie:
# -----------------------------------------------------------
fig6 = self.new_figure('Beam')
self.polar_movie(
fig=fig6,
frames=frames,
times=times,
theta=theta.T,
r=r.T*km,
nth_frame=2,
n_bazis=n_bazis,
n_slow=n_slow,
)
self.draw_figures()
6 years ago
self.irun += 1
def polar_movie(self, fig, frames, times, theta, r, nth_frame,
n_bazis, n_slow):
frame_artists = []
# progress_artists = []
ax = fig.add_subplot(111, projection='polar')
iframe_min = 0
iframe_max = len(times)-1
vmin = num.min(frames)
vmax = num.max(frames)
self.adjust_polar_axis(ax)
def update(iframe):
# if iframe is not None:
if False:
frame = frames[:, iframe]
'''
if not progress_artists:
progress_artists[:] = [axes2.axvline(
tmin_frames - t0 + deltat_cf * iframe,
color=scolor('scarletred3'),
alpha=0.5,
lw=2.)]
else:
progress_artists[0].set_xdata(
tmin_frames - t0 + deltat_cf * iframe)
'''
else:
frame = frames[:, iframe].reshape((n_bazis, n_slow))
frame_artists[:] = [ax.pcolormesh(theta, r, frame, vmin=vmin,
vmax=vmax)]
# return frame_artists + progress_artists + static_artists
return frame_artists
6 years ago
axf = FuncAnimation( # noqa
fig, update,
frames=list(
5 years ago
range(iframe_min, iframe_max+1))[::nth_frame] + [None],
interval=20.,
repeat=False,
blit=True)
fig.canvas.draw()
6 years ago
def adjust_polar_axis(self, ax):
ax.set_theta_zero_location('N')
ax.set_theta_direction(-1)
ax.set_xticks([0, num.pi/2., num.pi, 3*num.pi/2])
ax.set_xticklabels(['N', 'E', 'S', 'W'])
def get_tinc_use(self, precision=1.):
6 years ago
'''
Set increment time for data processing.
'''
if self.tinc is not None:
tinc = self.tinc
6 years ago
else:
tmin, tmax = self.get_selected_time_range(fallback=True)
if tmin == tmax:
# selected event marker
tmin, tmax = self.get_viewer().get_time_range()
tinc = tmax-tmin
return num.floor(tinc/precision) * precision
6 years ago
def __snufflings__():
return [FK()]