master
Marius Isken 2 years ago
parent 4f988ccc74
commit 915dbbefcb

4
.gitignore vendored

@ -137,4 +137,8 @@ dmypy.json
# Cython debug symbols
cython_debug/
/venv
# Added by cargo
/target

@ -8,4 +8,4 @@
"python.pythonPath": "/usr/bin/python3",
"python.linting.flake8Enabled": true,
"python.linting.enabled": true
}
}

341
Cargo.lock generated

@ -0,0 +1,341 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "autocfg"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a"
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "cfg-if"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "indoc"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8"
dependencies = [
"indoc-impl",
"proc-macro-hack",
]
[[package]]
name = "indoc-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0"
dependencies = [
"proc-macro-hack",
"proc-macro2",
"quote",
"syn",
"unindent",
]
[[package]]
name = "instant"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "libc"
version = "0.2.104"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2f96d100e1cf1929e7719b7edb3b90ab5298072638fccd77be9ce942ecdfce"
[[package]]
name = "lock_api"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712a4d093c9976e24e7dbca41db895dabcbac38eb5f4045393d17a95bdfb1109"
dependencies = [
"scopeguard",
]
[[package]]
name = "matrixmultiply"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a8a15b776d9dfaecd44b03c5828c2199cddff5247215858aac14624f8d6b741"
dependencies = [
"rawpointer",
]
[[package]]
name = "ndarray"
version = "0.15.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08e854964160a323e65baa19a0b1a027f76d590faba01f05c0cbc3187221a8c9"
dependencies = [
"matrixmultiply",
"num-complex",
"num-integer",
"num-traits",
"rawpointer",
]
[[package]]
name = "num-complex"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26873667bbbb7c5182d4a37c1add32cdf09f841af72da53318fdb81543c15085"
dependencies = [
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
dependencies = [
"autocfg",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
]
[[package]]
name = "numpy"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09c15af63aa0c74e0f7230d4e95d9a3d71a23449905f30f50b055df9a6a6a3e6"
dependencies = [
"cfg-if 0.1.10",
"libc",
"ndarray",
"num-complex",
"num-traits",
"pyo3",
]
[[package]]
name = "once_cell"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56"
[[package]]
name = "parking_lot"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
dependencies = [
"cfg-if 1.0.0",
"instant",
"libc",
"redox_syscall",
"smallvec",
"winapi",
]
[[package]]
name = "paste"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"
dependencies = [
"paste-impl",
"proc-macro-hack",
]
[[package]]
name = "paste-impl"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"
dependencies = [
"proc-macro-hack",
]
[[package]]
name = "proc-macro-hack"
version = "0.5.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70"
dependencies = [
"unicode-xid",
]
[[package]]
name = "pyo3"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35100f9347670a566a67aa623369293703322bb9db77d99d7df7313b575ae0c8"
dependencies = [
"cfg-if 1.0.0",
"indoc",
"libc",
"parking_lot",
"paste",
"pyo3-build-config",
"pyo3-macros",
"unindent",
]
[[package]]
name = "pyo3-build-config"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d12961738cacbd7f91b7c43bc25cfeeaa2698ad07a04b3be0aa88b950865738f"
dependencies = [
"once_cell",
]
[[package]]
name = "pyo3-macros"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc0bc5215d704824dfddddc03f93cb572e1155c68b6761c37005e1c288808ea8"
dependencies = [
"pyo3-macros-backend",
"quote",
"syn",
]
[[package]]
name = "pyo3-macros-backend"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71623fc593224afaab918aa3afcaf86ed2f43d34f6afde7f3922608f253240df"
dependencies = [
"proc-macro2",
"pyo3-build-config",
"quote",
"syn",
]
[[package]]
name = "pyrocko-das"
version = "0.1.0"
dependencies = [
"ndarray",
"numpy",
"pyo3",
]
[[package]]
name = "quote"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
dependencies = [
"proc-macro2",
]
[[package]]
name = "rawpointer"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
[[package]]
name = "redox_syscall"
version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
dependencies = [
"bitflags",
]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "smallvec"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309"
[[package]]
name = "syn"
version = "1.0.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194"
dependencies = [
"proc-macro2",
"quote",
"unicode-xid",
]
[[package]]
name = "unicode-xid"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
[[package]]
name = "unindent"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7"
[[package]]
name = "winapi"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
dependencies = [
"winapi-i686-pc-windows-gnu",
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"

@ -0,0 +1,18 @@
[package]
name = "pyrocko-das"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "pyrocko_das"
crate-type = ["cdylib"]
[dependencies]
numpy = "0.14.1"
ndarray = "0.15.3"
[dependencies.pyo3]
version = "0.14.5"
features = ["extension-module"]

Binary file not shown.

@ -0,0 +1,287 @@
import numpy as num
from scipy import interpolate, ndimage, signal
from collections.abc import Iterable
from pyrocko.guts import Object, Float, List, Tuple, String, Timestamp, Bool
from pyrocko import gf
from pyrocko.model import Location
import pyrocko.orthodrome as od
import matplotlib.pyplot as plt
r2d = 180.0 / num.pi
km = 1e3
META = {
"measure_length": None,
"start_distance": None,
"stop_distance": None,
"gauge_length": None,
"spatial_resolution": None,
"geo_lat": None,
"geo_lon": None,
"geo_elevation": None,
"channel": None,
"unit": None,
}
class QuantityType(gf.meta.QuantityType):
choices = gf.meta.QuantityType.choices + ["strain", "strain_rate"]
class Fiber(Object):
lat = Float.T(default=0.0)
lon = Float.T(default=0.0)
codes = Tuple.T(
4,
String.T(),
default=("", "", "", "HSF"),
help="network, station, location and channel codes to be set on "
"the response trace. If station code is empty it will be filled"
" by the channel number",
)
quantity = QuantityType.T(
default="strain",
help="Measurement quantity type. If not given, it is guessed from the "
"channel code. For some common cases, derivatives of the stored "
"quantities are supported by using finite difference "
"approximations (e.g. displacement to velocity or acceleration). "
"4th order central FD schemes are used.",
)
elevation = Float.T(default=0.0, help="station surface elevation in [m]")
store_id = gf.meta.StringID.T(
optional=True,
help="ID of Green's function store to use for the computation. "
"If not given, the processor may use a system default.",
)
sample_rate = Float.T(
optional=True,
help="sample rate to produce. "
"If not given the GF store's default sample rate is used. "
"GF store specific restrictions may apply.",
)
tmin = Timestamp.T(
optional=True,
help="time of first sample to request in [s]. "
"If not given, it is determined from the Green's functions.",
)
tmax = Timestamp.T(
optional=True,
help="time of last sample to request in [s]. "
"If not given, it is determined from the Green's functions.",
)
interpolation = gf.meta.InterpolationMethod.T(
default="multilinear",
help="Interpolation method between Green's functions. Supported are"
" ``nearest_neighbor`` and ``multilinear``",
)
coordinates = List.T(
help="coordinates of the cable as ``pyrocko.model.Location`` or a tuple"
" of (north_shift, east_shift, [elevation])."
)
channel_spacing = Float.T(default=4.0, help="Channel spacing [m].")
smoothing_sigma = Float.T(
default=10.0, help="Standard deviation for Gaussian kernel in [m]"
)
spectral_differences = Bool.T(
default=False, help="Use spectral derivation for estimation of linear strain."
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._locations = []
for coord in self.coordinates:
if isinstance(coord, Location):
loc = coord
elif isinstance(coord, Iterable) and len(coord) in (2, 3):
loc = Location(
lat=self.lat,
lon=self.lon,
north_shift=coord[0],
east_shift=coord[1],
elevation=self.elevation if len(coord) == 2 else coord[2],
)
else:
raise AttributeError(
"coordinates have to be a list of pyrocko.model.Location"
" or tuples of (north_shift, east_shifts, [elevation]),"
" not %s" % type(coord)
)
self._locations.append(loc)
assert len(self._locations) > 1, "Fiber needs more than 1 coordinates"
@classmethod
def from_stations(cls, stations, **kwargs):
return cls(coordinates=stations, **kwargs)
@property
def distances(self):
locations = self._locations
nlocs = len(locations)
dists = [0.0]
for il in range(1, nlocs):
dists.append(locations[il - 1].distance_3d_to(locations[il]))
return num.array(dists)
@property
def length(self):
return num.sum(self.distances)
@property
def nchannels(self):
dists = self.length
return int(dists // self.channel_spacing)
def interpolate_channels(self, start_channel=0):
dists = num.cumsum(self.distances)
interp_lat = interpolate.interp1d(
dists, tuple(loc.effective_lat for loc in self._locations)
)
interp_lon = interpolate.interp1d(
dists, tuple(loc.effective_lon for loc in self._locations)
)
interp_elevation = interpolate.interp1d(
dists, tuple(loc.elevation for loc in self._locations)
)
interp_distances = num.arange(0.0, dists.max(), step=self.channel_spacing)
channels = num.arange(interp_distances.size) + start_channel
nchannels = channels.size
lats = interp_lat(interp_distances)
lons = interp_lon(interp_distances)
elevations = interp_elevation(interp_distances)
azis = num.empty(nchannels)
azis[:-1] = od.azimuth_numpy(
lats[:-1],
lons[:-1],
lats[1:],
lons[1:],
)
azis[-1] = azis[-2]
ds = num.full(nchannels, self.channel_spacing)
dips = -num.arctan2(num.gradient(elevations), ds) * r2d
return lats, lons, elevations, azis, dips, channels
def get_targets(self):
lats, lons, elevations, azis, dips, channels = self.interpolate_channels()
nchannels = self.nchannels
quantity = self.quantity
if self.quantity in ("strain", "strain_rate"):
quantity = "displacement"
targets = []
for icha in range(nchannels):
codes = list(self.codes)
if not codes[1]:
codes[1] = "%05d" % icha
t = gf.Target(
lat=lats[icha],
lon=lons[icha],
elevation=elevations[icha],
azimuth=azis[icha],
dip=dips[icha],
north_shift=0.0,
east_shift=0.0,
sample_rate=self.sample_rate,
codes=codes,
store_id=self.store_id,
quantity=quantity,
interpolation=self.interpolation,
)
targets.append(t)
return targets
def get_locations(self):
return self._locations
class LocalEngine(gf.LocalEngine):
def process_fiber(self, source, fiber, **kwargs):
assert isinstance(source, Iterable), "Currently only one source is supported!"
targets = fiber.get_targets()
resp = self.process(source, targets, **kwargs)
traces = resp.pyrocko_traces()
ntraces = len(traces)
deltat = traces[0].deltat
all_times = [(tr.tmin, tr.tmax) for tr in traces]
all_tmin = num.min(all_times)
all_tmax = num.max(all_times)
meta = {
"measure_length": fiber.length,
"start_distance": 0.0,
"stop_distance": fiber.length,
"gauge_length": fiber.channel_spacing,
"spatial_resolution": fiber.channel_spacing,
}
for icha, (tr, target) in enumerate(zip(traces, targets)):
tr.extend(tmin=all_tmin, tmax=all_tmax, fillmethod="repeat")
tr.meta = meta.copy()
tr.meta["channel"] = icha
tr.meta["geo_lat"] = target.lat
tr.meta["geo_lon"] = target.lon
tr.meta["geo_elevation"] = target.elevation
nsamples = set([tr.ydata.size for tr in traces])
assert len(nsamples) == 1
nsamples = nsamples.pop()
# Spatial smoothing
trs_data = num.array([tr.ydata for tr in traces])
if fiber.smoothing_sigma:
trs_data = ndimage.gaussian_filter1d(
trs_data, sigma=fiber.smoothing_sigma / fiber.channel_spacing, axis=0
)
if fiber.quantity in ("strain_rate", "strain"):
assert (
ntraces > 3
), "Too few channels in fiber for finite-difference derivation"
if fiber.spectral_differences:
kappa = num.fft.fftfreq(ntraces, fiber.channel_spacing)
trs_spec = num.fft.fft(trs_data, axis=0)
trs_spec *= 2 * num.pi * kappa[:, num.newaxis] * 1j
trs_data = num.fft.ifft(trs_spec, axis=0).real
else:
trs_data = num.gradient(trs_data, fiber.channel_spacing, axis=0)
if fiber.quantity == "strain_rate":
trs_data = num.gradient(trs_data, deltat, axis=1)
signal.detrend(trs_data, type="linear", axis=1, overwrite_data=True)
# if fiber.quantity == 'strain':
# trs_data = num.cumsum(
# trs_data, axis=1)
for itr, tr in enumerate(traces):
tr.set_ydata(trs_data[itr])
return traces

@ -0,0 +1,149 @@
from __future__ import annotations
import os.path as op
import time
from functools import lru_cache
import matplotlib.pyplot as plt
import numpy as num
from matplotlib.widgets import Slider
from scipy import signal
from .rust import rust_func as goldstein_rust
data = num.load(op.join(op.dirname(__file__), "data", "data-DAS-gfz2020wswf.npy"))
@lru_cache
def triangular_taper(size: int, plateau: int):
if plateau > size:
raise ValueError("Plateau cannot be larger than size.")
if size % 2 or plateau % 2:
raise ValueError("Size and plateau have to be even.")
ramp_size = int((size - plateau) / 2)
ramp = num.linspace(0.0, 1.0, ramp_size)
window = num.ones(size)
window[:ramp_size] = ramp
window[size - ramp_size :] = ramp[::-1]
return window * window[:, num.newaxis]
def data_coherency(data):
ntraces = data.shape[0]
coherency = 0.0
for itr in range(ntraces - 1):
coherency += signal.coherence(data[itr], data[itr + 1])[1].mean()
return coherency / (ntraces - 1)
def goldstein_filter(data, window_size: int = 32, overlap: int = 14, exponent=0.3):
if num.log2(window_size) % 1.0 or window_size < 4:
raise ValueError("window_size has to be pow(2) and > 4.")
if overlap > window_size / 2 - 1:
raise ValueError("Overlap is large small. Minimum window_size)/ 2 - 1.")
window_stride = window_size - overlap
window_non_overlap = window_size - 2 * overlap
npx_x, npx_y = data.shape
nwin_x = npx_x // window_stride
nwin_y = npx_y // window_stride
if nwin_x % 1 or nwin_y % 1:
raise ValueError("Padding does not match desired data shape")
filtered_data = num.zeros_like(data)
for iwin_x in range(nwin_x):
px_x = iwin_x * window_stride
slice_x = slice(px_x, px_x + window_size)
for iwin_y in range(nwin_y):
px_y = iwin_y * window_stride
slice_y = slice(px_y, px_y + window_size)
window_data = data[slice_x, slice_y]
window_fft = num.fft.rfft2(window_data)
window_fft_abs = num.abs(window_fft) / window_fft.size
# Optional
# window_fft_abs = signal.medfilt2d(window_fft_abs, kernel_size=3)
# window_coherency = data_coherency(window_data)
window_fft *= window_fft_abs ** exponent
window_filtered = num.fft.irfft2(window_fft)
taper = triangular_taper(window_size, window_non_overlap)
taper = taper[: window_filtered.shape[0], : window_filtered.shape[1]]
filtered_data[
px_x : px_x + window_filtered.shape[0],
px_y : px_y + window_filtered.shape[1],
] += (
window_filtered * taper
)
return filtered_data
def normalize_diff(data, filtered_data):
norm = num.abs(data).max()
filtered_data_norm = filtered_data / num.abs(filtered_data).max() * (1.0 / norm)
filtered_data_norm -= data / norm
return filtered_data_norm
def plot_goldstein():
exponent = 0.5
window_size = 64
overlap = 30
t = time.time()
data_filtered = goldstein_filter(
data, window_size=window_size, exponent=exponent, overlap=overlap
)
print(time.time() - t)
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, sharex=True, sharey=True)
ax1.imshow(data, aspect=5)
image = ax2.imshow(data_filtered, aspect=5)
image_diff = ax3.imshow(normalize_diff(data, data_filtered), aspect=5)
ax_slider = plt.axes((0.25, 0.1, 0.65, 0.03))
ax1.set_title("Data Input")
ax2.set_title("Data Filtered")
ax3.set_title("Data Residual")
exp_slider = Slider(
ax=ax_slider,
label="Exponent",
valmin=0,
valmax=1.0,
valinit=exponent,
orientation="horizontal",
)
def update(val):
global data
data_filtered = goldstein_filter(
data, window_size=window_size, exponent=val, overlap=overlap
)
image.set_data(data_filtered)
image.set_norm(None)
image_diff.set_data(normalize_diff(data, data_filtered))
image_diff.set_norm(None)
fig.canvas.draw_idle()
exp_slider.on_changed(update)
plt.show()
def plot_taper():
window = triangular_taper(32, 4)
plt.imshow(window)
plt.show()
if __name__ == "__main__":
print(goldstein_rust())
plot_goldstein()

@ -1,26 +1,34 @@
import numpy as num
from scipy import signal
from .utils import traces_to_numpy_and_meta
def rad_to_de(traces, copy=False, m_per_rad=11.6e-9):
out_traces = []
for tr in traces:
assert 'gauge_length' in tr.meta, 'gauge_length not in metadata'
assert "gauge_length" in tr.meta, "gauge_length not in metadata"
if copy:
tr = tr.copy()
tr.ydata *= m_per_rad
tr.ydata /= tr.meta['gauge_length'] * tr.deltat
tr.meta['unit'] = 'strain rate (m/s/m)'
tr.ydata /= tr.meta["gauge_length"] * tr.deltat
tr.ydata /= 8192
tr.meta["unit"] = "strain rate (m/s/m)"
out_traces.append(tr)
def de_to_e(traces, copy=False):
def de_to_e(traces, detrend=True, copy=False):
out_traces = []
for tr in traces:
data, _ = traces_to_numpy_and_meta(traces)
# if detrend:
# signal.detrend(data, type='linear', axis=1, overwrite_data=True)
data = num.cumsum(data, axis=1)
for itr, tr in enumerate(traces):
if copy:
tr = tr.copy()
tr.set_ydata(num.cumsum(tr.ydata) / tr.deltat)
tr.meta['unit'] = 'strain (m/m)'
tr.set_ydata(data[itr])
tr.meta["unit"] = "strain (m/m)"
out_traces.append(tr)
return out_traces
@ -32,7 +40,7 @@ def de_to_acceleration_static_slowness(traces, slowness, copy=False):
if copy:
tr = tr.copy()
tr.set_ydata(tr.ydata / slowness)
tr.meta['unit'] = 'acceleration (m/s^2)'
tr.meta["unit"] = "acceleration (m/s^2)"
out_traces.append(tr)
return out_traces
@ -42,7 +50,7 @@ def de_to_velocity_static_slowness(traces, slowness, copy=False):
out_traces = de_to_e(traces, copy)
for tr in out_traces:
tr.set_ydata(tr.ydata / slowness)
tr.meta['unit'] = 'velocity (m/s)'
tr.meta["unit"] = "velocity (m/s)"
return out_traces
@ -54,7 +62,7 @@ def de_to_relative_displacement(traces, copy=False):
data = num.cumsum(data, axis=0) * meta.spatial_resolution
for itr, tr in enumerate(traces):
tr.set_ydata(data[itr])
tr.meta['unit'] = 'displacement (m)'
tr.meta["unit"] = "displacement (m)"
return traces
@ -66,6 +74,6 @@ def de_to_relative_velocity(traces, copy=False):
data = num.diff(data, n=1, axis=0) / meta.spatial_resolution
for itr, tr in enumerate(traces):
tr.set_ydata(data[itr])
tr.meta['unit'] = 'velocity (m/s)'
tr.meta["unit"] = "velocity (m/s)"
return traces

@ -9,12 +9,12 @@ class AttrDict(dict):
def traces_to_numpy_and_meta(traces):
ntraces = len(traces)
nsamples = set(tr.nsamples for tr in traces)
nsamples = set(tr.ydata.size for tr in traces)
assert len(nsamples) == 1, "Traces nsamples differ"
nsamples = nsamples[0]
nsamples = nsamples.pop()
data = num.zeros(ntraces, nsamples)
data = num.zeros((ntraces, nsamples))
for itr, tr in enumerate(traces):
data[itr, :] = tr.ydata
meta = tr.meta

@ -1 +1,4 @@
pytest
setuptools-rust
flake8
black

@ -1,4 +1,5 @@
import setuptools
from setuptools_rust import Binding, RustExtension
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
@ -9,6 +10,7 @@ setuptools.setup(
author="Marius Paul Isken",
author_email="mi@gfz-potsdam.de",
description="DAS Tools for Pyrocko",
zip_safe=False,
long_description=long_description,
long_description_content_type="text/markdown",
url="https://git.pyrocko.org/pyrocko/pyrocko-das",
@ -20,7 +22,15 @@ setuptools.setup(
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_dir={"pyrocko_das": "src"},
rust_extensions=[
RustExtension(
"pyrocko_das.rust",
path="Cargo.toml",
binding=Binding.PyO3,
debug=False,
)
],
package_data={"pyrocko_das": ["data/*.npy"]},
packages=["pyrocko_das"],
python_requires=">=3.6",
)

@ -1,277 +0,0 @@
import numpy as num
from scipy import interpolate, ndimage
from collections.abc import Iterable
from pyrocko.guts import Object, Float, List, Tuple, String, Timestamp
from pyrocko import gf
from pyrocko.model import Location
import pyrocko.orthodrome as od
r2d = 180./num.pi
km = 1e3
META = {
'measure_length': None,
'start_distance': None,
'stop_distance': None,
'gauge_length': None,
'spatial_resolution': None,
'geo_lat': None,
'geo_lon': None,
'geo_elevation': None,
'channel': None,
'unit': None
}
class QuantityType(gf.meta.QuantityType):
choices = gf.meta.QuantityType.choices + ['strain', 'strain_rate']
class Fiber(Object):
lat = Float.T(
default=0.)
lon = Float.T(
default=0.)
codes = Tuple.T(
4, String.T(), default=('', '', '', 'HSF'),
help='network, station, location and channel codes to be set on '
'the response trace. If station code is empty it will be filled'
' by the channel number')
quantity = QuantityType.T(
default='strain',
help='Measurement quantity type. If not given, it is guessed from the '
'channel code. For some common cases, derivatives of the stored '
'quantities are supported by using finite difference '
'approximations (e.g. displacement to velocity or acceleration). '
'4th order central FD schemes are used.')
elevation = Float.T(
default=0.0,
help='station surface elevation in [m]')
store_id = gf.meta.StringID.T(
optional=True,
help='ID of Green\'s function store to use for the computation. '
'If not given, the processor may use a system default.')
sample_rate = Float.T(
optional=True,
help='sample rate to produce. '
'If not given the GF store\'s default sample rate is used. '
'GF store specific restrictions may apply.')
tmin = Timestamp.T(
optional=True,
help='time of first sample to request in [s]. '
'If not given, it is determined from the Green\'s functions.')
tmax = Timestamp.T(
optional=True,
help='time of last sample to request in [s]. '
'If not given, it is determined from the Green\'s functions.')
interpolation = gf.meta.InterpolationMethod.T(
default='multilinear',
help='Interpolation method between Green\'s functions. Supported are'
' ``nearest_neighbor`` and ``multilinear``')
coordinates = List.T(
help='coordinates of the cable as ``pyrocko.model.Location`` or a tuple'
' of (north_shift, east_shift, [elevation]).')
channel_spacing = Float.T(
default=4.,
help='Channel spacing [m].')
smoothing_sigma = Float.T(
default=30.,
help='Standard deviation for Gaussian kernel in [m]'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._locations = []
for coord in self.coordinates:
if isinstance(coord, Location):
loc = coord
elif isinstance(coord, Iterable) and len(coord) in (2, 3):
loc = Location(
lat=self.lat,
lon=self.lon,
north_shift=coord[0],
east_shift=coord[1],
elevation=self.elevation if len(coord) == 2 else coord[2])
else:
raise AttributeError(
'coordinates have to be a list of pyrocko.model.Location'
' or tuples of (north_shift, east_shifts, [elevation]),'
' not %s' % type(coord))
self._locations.append(loc)
assert len(self._locations) > 1, 'Fiber needs more than 1 coordinates'
@classmethod
def from_stations(cls, stations, **kwargs):
return cls(
coordinates=stations,
**kwargs)
@property
def distances(self):
locations = self._locations
nlocs = len(locations)
dists = [0.]
for il in range(1, nlocs):
dists.append(
locations[il-1].distance_3d_to(locations[il]))
return num.array(dists)
@property
def length(self):
return num.sum(self.distances)
@property
def nchannels(self):
dists = self.length
return int(dists // self.channel_spacing)
def interpolate_channels(self, start_channel=0):
dists = num.cumsum(self.distances)
interp_lat = interpolate.interp1d(
dists,
tuple(loc.effective_lat for loc in self._locations))
interp_lon = interpolate.interp1d(
dists,
tuple(loc.effective_lon for loc in self._locations))
interp_elevation = interpolate.interp1d(
dists,
tuple(loc.elevation for loc in self._locations))
interp_distances = num.arange(
0., dists.max(),
step=self.channel_spacing)
channels = num.arange(interp_distances.size) + start_channel
nchannels = channels.size
lats = interp_lat(interp_distances)
lons = interp_lon(interp_distances)
elevations = interp_elevation(interp_distances)
azis = num.empty(nchannels)
azis[:-1] = od.azimuth_numpy(
lats[:-1], lons[:-1],
lats[1:], lons[1:],
)
azis[-1] = azis[-2]
ds = num.full(nchannels, self.channel_spacing)
dips = -num.arctan2(
num.gradient(elevations), ds) * r2d
return lats, lons, elevations, azis, dips, channels
def get_targets(self):
lats, lons, elevations, azis, dips, channels = \
self.interpolate_channels()
nchannels = self.nchannels
quantity = self.quantity
if self.quantity in ('strain', 'strain_rate'):
quantity = 'displacement'
targets = []
for icha in range(nchannels):
codes = list(self.codes)
if not codes[1]:
codes[1] = '%05d' % icha
t = gf.Target(
lat=lats[icha],
lon=lons[icha],
elevation=elevations[icha],
azimuth=azis[icha],
dip=dips[icha],
north_shift=0.,
east_shift=0.,
sample_rate=self.sample_rate,
codes=codes,
store_id=self.store_id,
quantity=quantity,
interpolation=self.interpolation
)
targets.append(t)
return targets
def get_locations(self):
return self._locations
class LocalEngine(gf.LocalEngine):
def process_fiber(self, source, fiber, **kwargs):
assert isinstance(source, Iterable), \
'Currently only one source is supported!'
targets = fiber.get_targets()
resp = self.process(source, targets, **kwargs)
traces = resp.pyrocko_traces()
ntraces = len(traces)
all_times = [(tr.tmin, tr.tmax) for tr in traces]
all_tmin = num.min(all_times)
all_tmax = num.max(all_times)
meta = {
'measure_length': fiber.length,
'start_distance': 0.,
'stop_distance': fiber.length,
'gauge_length': fiber.channel_spacing,
'spatial_resolution': fiber.channel_spacing
}
for icha, (tr, target) in enumerate(zip(traces, targets)):
tr.extend(tmin=all_tmin, tmax=all_tmax, fillmethod='repeat')
tr.meta = meta.copy()
tr.meta['channel'] = icha
tr.meta['geo_lat'] = target.lat
tr.meta['geo_lon'] = target.lon
tr.meta['geo_elevation'] = target.elevation
nsamples = set([tr.ydata.size for tr in traces])
assert len(nsamples) == 1
nsamples = nsamples.pop()
# Spatial smoothing
trs_data = ndimage.gaussian_filter1d(
num.array([tr.ydata for tr in traces]),
sigma=fiber.smoothing_sigma/fiber.channel_spacing, axis=0)
for itr, tr in enumerate(traces):
tr.set_ydata(trs_data[itr])
if fiber.quantity in ('strain', 'strain_rate'):
assert ntraces > 3, \
'Too few channels in fiber for finite-difference derivation'
trs_strain_grad = num.gradient(
num.array([tr.ydata for tr in traces]), fiber.channel_spacing,
axis=0)
for itr, tr in enumerate(traces):
tr.set_ydata(trs_strain_grad[itr])
if fiber.quantity == 'strain_rate':
for tr in traces:
tr.differentiate()
return traces

@ -0,0 +1,22 @@
use ndarray::Array;
use numpy::{IntoPyArray, PyArray1, PyReadonlyArray2};
use pyo3::prelude::*;
fn triangular_taper(taper_size: usize, plateau: usize) -> Array<f32, _> {
assert!(plateau < taper_size, "Plateau cannot be larger than size.");
assert!(taper_size as f32 % 2.0 != 0., "Size has to be even");
}
/// A Python module implemented in Rust.
#[pymodule]
fn rust(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
#[pyfn(m)]
fn rust_func<'py>(
py: Python<'py>,
data: PyReadonlyArray2<'py, f64>,
) -> PyResult<&'py PyArray1<f64>> {
let x = vec![1., 2., 3.].into_pyarray(py);
Ok(x)
}
Ok(())
}

@ -0,0 +1,30 @@
import pytest
from pyrocko_das import gf
km = 1e3
@pytest.fixture
def syn_das_data():
def get_data(**kwargs):
import pyrocko.gf as pgf
default_kwargs = dict(
lat=0.0,
lon=0.0,
quantity="velocity",
store_id="das_test",
coordinates=((1100, 0), (2000, 1000), (2000, 3000)),
channel_spacing=4.0,
)
default_kwargs.update(kwargs)
fiber = gf.Fiber(**default_kwargs)
engine = gf.LocalEngine(use_config=True)
source = pgf.DCSource(lat=0.0, lon=0.0, depth=2 * km, strike=45.0, dip=30.0)
traces = engine.process_fiber(source, fiber)
return traces
return get_data

@ -0,0 +1,40 @@
5J.05119. 63.82725 -22.67712 0.0 0.0
5J.04992. 63.83083 -22.67155 0.0 0.0
5J.04864. 63.83406 -22.66579 0.0 0.0
5J.04736. 63.83600 -22.65668 0.0 0.0
5J.04608. 63.83991 -22.65147 0.0 0.0
5J.04480. 63.84300 -22.64507 0.0 0.0
5J.04352. 63.84610 -22.63717 0.0 0.0
5J.04224. 63.84933 -22.63000 0.0 0.0
5J.04096. 63.85254 -22.62261 0.0 0.0
5J.03969. 63.85398 -22.61294 0.0 0.0
5J.03841. 63.85595 -22.60373 0.0 0.0
5J.03713. 63.85521 -22.59439 0.0 0.0
5J.03585. 63.85409 -22.58429 0.0 0.0
5J.03457. 63.85324 -22.57465 0.0 0.0
5J.03329. 63.85459 -22.56499 0.0 0.0
5J.03201. 63.85883 -22.56068 0.0 0.0
5J.03073. 63.86222 -22.55340 0.0 0.0
5J.02946. 63.86495 -22.54518 0.0 0.0
5J.02818. 63.86642 -22.53514 0.0 0.0
5J.02690. 63.86757 -22.52550 0.0 0.0
5J.02562. 63.86656 -22.51521 0.0 0.0
5J.02434. 63.86616 -22.50483 0.0 0.0
5J.02306. 63.86613 -22.49439 0.0 0.0
5J.02178. 63.86787 -22.48457 0.0 0.0
5J.02050. 63.86989 -22.47506 0.0 0.0
5J.01923. 63.87134 -22.46520 0.0 0.0
5J.01795. 63.87258 -22.45499 0.0 0.0
5J.01667. 63.87480 -22.44567 0.0 0.0
5J.01539. 63.87709 -22.43664 0.0 0.0
5J.01412. 63.87888 -22.43064 0.0 0.0
5J.01284. 63.87482 -22.42599 0.0 0.0
5J.01156. 63.87039 -22.42549 0.0 0.0
5J.01028. 63.86590 -22.42404 0.0 0.0
5J.00901. 63.86151 -22.42662 0.0 0.0
5J.00773. 63.85709 -22.42943 0.0 0.0
5J.00645. 63.85263 -22.43191 0.0 0.0
5J.00517. 63.85036 -22.43858 0.0 0.0
5J.00389. 63.84684 -22.44513 0.0 0.0
5J.00261. 63.84249 -22.44831 0.0 0.0
5J.00133. 63.84025 -22.44546 0.0 0.0

@ -1,6 +1,9 @@
import pytest
import os.path as op
import numpy as num
from copy import deepcopy
from pyrocko_das import gf
from pyrocko.model import Location
from pyrocko import trace
@ -13,27 +16,22 @@ def test_fiber():
fiber = gf.Fiber(
lat=0.5,
lon=0.5,
store_id='test',
coordinates=(
(0, 0),
(100, 0)
),
channel_spacing=1.
store_id="test",
coordinates=((0, 0), (100, 0)),
channel_spacing=1.0,
)
assert fiber.distances.size == 2
assert fiber.distances[0] == 0.
assert fiber.distances[1] == 100.
assert fiber.distances[0] == 0.0
assert fiber.distances[1] == 100.0
assert fiber.length == 100.
assert fiber.length == 100.0
assert fiber.nchannels == 100
northings, eastings, elevations, azis, dips, channels = \
fiber.interpolate_channels()
assert channels.size == 100.
northings, eastings, elevations, azis, dips, channels = fiber.interpolate_channels()
assert channels.size == 100.0
num.testing.assert_equal(azis, 0.)
num.testing.assert_equal(dips, 0.)
num.testing.assert_equal(azis, 0.0)
num.testing.assert_equal(dips, 0.0)
targets = fiber.get_targets()
@ -41,47 +39,37 @@ def test_fiber():
assert len(targets) == fiber.nchannels
fiber = gf.Fiber(
lat=0.,
lon=0.,
store_id='test',
lat=0.0,
lon=0.0,
store_id="test",
coordinates=(
(0, 0, 0.),
(-20, 0, 20.),
(-40, 0, 40.),
(0, 0, 0.0),
(-20, 0, 20.0),
(-40, 0, 40.0),
),
channel_spacing=1.,
interpolation='multilinear'
channel_spacing=1.0,
interpolation="multilinear",
)
northings, eastings, elevations, azis, dips, channels = \
fiber.interpolate_channels()
num.testing.assert_equal(azis, 180.)
num.testing.assert_equal(dips, -45.)
northings, eastings, elevations, azis, dips, channels = fiber.interpolate_channels()
num.testing.assert_equal(azis, 180.0)
num.testing.assert_equal(dips, -45.0)
fiber = gf.Fiber(
lat=0.,
lon=0.,
store_id='test',
lat=0.0,
lon=0.0,
store_id="test",
coordinates=(
Location(
north_shift=0.,
east_shift=0,
elevation=0),
Location(
north_shift=-100.,
east_shift=0,
elevation=100),
Location(north_shift=0.0, east_shift=0, elevation=0),
Location(north_shift=-100.0, east_shift=0, elevation=100),
),
channel_spacing=1.,
interpolation='multilinear'
channel_spacing=1.0,
interpolation="multilinear",
)
northings, eastings, elevations, azis, dips, channels = \
fiber.interpolate_channels()
num.testing.assert_equal(azis, 180.)
num.testing.assert_equal(dips, -45.)
northings, eastings, elevations, azis, dips, channels = fiber.interpolate_channels()
num.testing.assert_equal(azis, 180.0)
num.testing.assert_equal(dips, -45.0)
def test_process_fiber():
@ -90,45 +78,77 @@ def test_process_fiber():
fiber = gf.Fiber(
lat=0.0,
lon=0.0,
quantity='acceleration',
store_id='das_test',
coordinates=(
(1100, 0),
(2000, 1000),
(2000, 3000)
),
channel_spacing=4.
quantity="strain_rate",
store_id="das_test",
coordinates=((1100, 0), (2000, 1000), (2000, 3000)),
channel_spacing=4.0,
smoothing_sigma=30.0,
)
engine = gf.LocalEngine(use_config=True)
source = pgf.DCSource(
lat=0., lon=0.,
depth=2*km,
strike=45.,
dip=30.
)
source = pgf.DCSource(lat=0.0, lon=0.0, depth=2 * km, strike=45.0, dip=30.0)
traces = engine.process_fiber(source, fiber)
trace.snuffle(traces)
def test_process_fiber_fft():
import pyrocko.gf as pgf
engine = gf.LocalEngine(use_config=True)
fiber = gf.Fiber(
lat=0.0,
lon=0.0,
quantity="strain",
store_id="das_test",
coordinates=((1100, 0), (2000, 1000), (2000, 3000)),
channel_spacing=4.0,
smoothing_sigma=30.0,
spectral_differences=True,
)
source = pgf.DCSource(lat=0.0, lon=0.0, depth=2 * km, strike=45.0, dip=30.0)
traces_fft = engine.process_fiber(source, fiber)
for tr in traces_fft:
tr.set_location("SSP")
fiber.spectral_differences = False
traces_grad = engine.process_fiber(source, fiber)
for tr in traces_grad:
tr.set_location("SGR")
fiber.quantity = "displacement"
traces_u = engine.process_fiber(source, fiber)
for tr in traces_u:
tr.set_location("U")
tr_u = traces_u[0].copy()
tr_u.set_ydata((traces_u[1].ydata - traces_u[0].ydata) / fiber.channel_spacing)
tr_u.set_location("U2")
trace.snuffle(traces_grad + traces_fft + traces_u + [tr_u])