Compare commits

...

20 Commits
master ... mole

Author SHA1 Message Date
mmetz 8b72325977 ids: wrapper for fortran IDS function 10 months ago
mmetz b440eb1fd3 ids compatibility: WIP 11 months ago
mmetz 9d39194bfb flake8 11 months ago
mmetz c94f099971 sources: export strike, dip and rake of nodal planes directly in MTSource 11 months ago
mmetz f791ad62f5 WIP 11 months ago
mmetz 296a9c8bfb WIP 11 months ago
mmetz 9c6e7be4a5 mole: new functionalities in mole call and further basic work 12 months ago
mmetz b9ba505448 store: make event folder in runs for all runs 12 months ago
mmetz f720c9f9a3 mole: further work on general run setup 12 months ago
mmetz 621c0443ce mole: setup of general running procedure 1 year ago
mmetz a448a9c1e9 mole, module work, basic architecture 1 year ago
mmetz 39888cb050 mole: init 1st draft, mine WIP 1 year ago
mmetz 9e58797708 mole: bugfixes in init, WIP config 1 year ago
mmetz 5670c1e7cb ewrica: WIP 1 year ago
mmetz afdab04003 mole: added new options plus set general store directory structure 1 year ago
mmetz 3986daa79d ewrica: new folder: 1 year ago
mmetz 0dfef16b60 mole: executeable for fast source inversion wip 1 year ago
mmetz 95da28ba00 config: small changes 1 year ago
mmetz 32b2502380 config: new config file 1 year ago
mmetz cd81b468be tsumaps: docs 1 year ago
  1. 1
      README.md
  2. BIN
      doc/source/static/mole_flow_chart.odg
  3. 5
      examples/create_ensemble_files.py
  4. 30
      setup.py
  5. 242
      src/apps/mole.py
  6. 83
      src/config.py
  7. 0
      src/core.py
  8. 104
      src/dataset/tsumaps.py
  9. 0
      src/gm/__init__.py
  10. 17
      src/gm/config.py
  11. 21
      src/gm/module.py
  12. 158
      src/io/grond.py
  13. 1584
      src/io/ids.py
  14. 0
      src/ls/__init__.py
  15. 17
      src/ls/config.py
  16. 22
      src/ls/module.py
  17. 0
      src/message/__init__.py
  18. 58
      src/module.py
  19. 20
      src/plot/ids.py
  20. 0
      src/si/__init__.py
  21. 262
      src/si/config.py
  22. 0
      src/si/ids/__init__.py
  23. 502
      src/si/ids/config.py
  24. 612
      src/si/ids/core.py
  25. 7
      src/si/ids/targets/__init__.py
  26. 78
      src/si/ids/targets/base.py
  27. 21
      src/si/ids/targets/gnss.py
  28. 21
      src/si/ids/targets/insar.py
  29. 216
      src/si/ids/targets/waveform.py
  30. 50
      src/si/invert.py
  31. 83
      src/si/module.py
  32. 357
      src/sources.py
  33. 276
      src/store.py
  34. 82
      src/util.py
  35. 0
      test/io/__init__.py
  36. 712
      test/io/test_ids.py
  37. 0
      test/si/__init__.py
  38. 63
      test/si/test_ids.py

1
README.md

@ -0,0 +1 @@
Here could be your Readme

BIN
doc/source/static/mole_flow_chart.odg

Binary file not shown.

5
examples/create_ensemble_files.py

@ -12,14 +12,13 @@ store_id = 'crust2_m2'
# would like to use, you can skip this step and point the *store_superdirs* in
# the next step to that directory.
# if not os.path.exists(store_id):
# ws.download_gf_store(site='kinherd', store_id=store_id)
if not os.path.exists(store_id):
ws.download_gf_store(site='kinherd', store_id=store_id)
# We need a pyrocko.gf.Engine object which provides us with the ground
# parameter. In this case we are going to use a local
# engine since we are going to query a local store.
engine = LocalEngine(store_superdirs=['.'])
engine = LocalEngine(store_superdirs=['/home/malde/src/gf_stores'])
store = engine.get_store(store_id)
# Create list of sources (should be of same type)

30
setup.py

@ -42,18 +42,26 @@ class CustomInstallCommand(install):
packages = [
'%s' % packname,
'%s.apps' % packname,
'%s.dataset' % packname,
'%s.apps' % packname]
'%s.gm' % packname,
'%s.io' % packname,
'%s.ls' % packname,
'%s.message' % packname,
'%s.plot' % packname,
'%s.si' % packname,
'%s.si.ids' % packname,
'%s.si.ids.targets' % packname]
# entry_points = {
# 'console_scripts':
# [
# 'gm_map = %s.apps.gm_map:main' % packname]}
entry_points = {
'console_scripts':
['mole = %s.apps.mole:main' % packname]}
setup(
cmdclass={'install': CustomInstallCommand},
cmdclass={
'install': CustomInstallCommand},
name=packname,
version=version,
author='mmetz',
@ -62,7 +70,7 @@ setup(
long_description=read('README.md'),
license='GPLv3',
keywords=[
'pyrocko', 'ewrica'],
'geophysics, fast source inversion, shakemap generation'],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, <4',
install_requires=[],
@ -73,16 +81,16 @@ setup(
packages=packages,
package_dir={'ewrica': 'src'},
# entry_points=entry_points,
entry_points=entry_points,
classifiers=[
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Development Status :: 1 - Planning',
'Intended Audience :: Other Audience',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3',
'Operating System :: POSIX',
'Topic :: Games/Entertainment :: Simulation'],
'Topic :: Scientific/Engineering'],
package_data={
packname: ['data/tsumaps_all_params.*',
'data/examples/*'
] + get_readme_paths()}
)

242
src/apps/mole.py

@ -0,0 +1,242 @@
#!/usr/bin/env python3
# GPLv3
#
# The Developers, 21st Century
import sys
import logging
from argparse import ArgumentParser
import os.path as op
from pyrocko import util
from ewrica import store
logger = logging.getLogger('ewrica.apps.mole')
def d2u(d):
return dict((k.replace('-', '_'), v) for (k, v) in d.items())
subcommand_descriptions = {
'init': 'create needed folder structure',
'go': 'run source inversion and shake map estimation'
}
subcommand_usages = {
'init': 'init <storedir> [options]',
'go': 'go [options]'
}
subcommands = subcommand_descriptions.keys()
program_name = 'mole'
usage = program_name + ''' <subcommand> <arguments> ... [options]
Subcommands:
init %(init)s
go %(go)s
To get further help and a list of available options for any subcommand run:
mole <subcommand> --help
''' % d2u(subcommand_descriptions)
def add_common_options(parser):
parser.add_argument(
'--loglevel',
action='store',
dest='loglevel',
choices=('critical', 'error', 'warning', 'info', 'debug'),
default='info',
metavar='',
help='set logger level to '
'"critical", "error", "warning", "info", or "debug". '
'default is "%(default)s"')
def process_common_options(options):
util.setup_logging(program_name, options.loglevel)
def cl_parse(command, args, setup=None, details=None):
usage = subcommand_usages[command]
descr = subcommand_descriptions[command]
if isinstance(usage, str):
usage = [usage]
susage = '%s %s' % (program_name, usage[0])
for s in usage[1:]:
susage += '\n%s%s %s' % (' '*7, program_name, s)
description = descr[0].upper() + descr[1:] + '.'
if details:
description = description + ' %s' % details
parser = ArgumentParser(
usage=susage,
description=description)
parser.format_description = lambda formatter: description
if setup:
setup(parser)
add_common_options(parser)
args = parser.parse_args(args)
process_common_options(args)
return parser, args
def die(message, err=''):
sys.exit('%s: error: %s \n %s' % (program_name, message, err))
def command_init(args):
modules = ('gm', 'ls', 'si')
def setup(parser):
parser.add_argument('storedir', help='main storage directory')
parser.add_argument(
'--force', dest='force', action='store_true',
help='overwrite existing directory and files')
parser.add_argument(
'--modules',
action='store',
dest='modules',
choices=modules + ('all',),
default='all',
metavar='',
help='initialize only with certain module supports: '
'"si": source inversion only, '
'"gm": ground motion only, '
'"ls": landslide only, '
'"all": all modules. '
'default is "%(default)s"')
parser, args = cl_parse('init', args, setup=setup)
if 'all' in args.modules:
modules = modules
else:
modules = [args.modules]
config_fns = store.ResultStorage.create_editeables(
store_dir=args.storedir, force=args.force, modules=modules)
logger.info('(1) configure settings in files:\n %s'
% '\n '.join(config_fns))
logger.info('(2) run "mole go" in directory: "%s"' % args.storedir)
def command_go(args):
# TODO create caller for infinite running with multistep inversion etc
# Data check on regular base (or when xml is changed) (squirrel)
# Synchronize via seed link with GNSS data etc. -> Store in cache
# When trigger, start multistep inversion based on config
# Keep results within folder in defined structure
def setup(parser):
parser.add_argument(
'--force', dest='force', action='store_true',
help='overwrite existing run directories (if needed). '
'default is %(default)s')
parser.add_argument(
'--offline', dest='offline', action='store_true',
help='If given, offline not realtime mode is done')
parser.add_argument(
'--playback_sim', dest='playback', action='store_true',
help='If given, offline data is replayed as in realtime')
parser.add_argument(
'--event', dest='event_id', action='store',
help='If given, evaluate local data in ./data/<event_id>. '
'default is %(default)s')
parser.add_argument(
'--data', dest='data_dir', action='store',
help='If given, evaluate given local data')
parser.add_argument(
'--run', dest='run_dir', action='store',
help='If given, store all results in given directory')
parser.add_argument(
'--config', dest='config', action='store',
help='Directory with all module configuration files stored')
parser.add_argument(
'--si_conf', dest='si_conf', action='store',
help='SourceInversion config file, if config/si.conf shall not be '
'used')
parser.add_argument(
'--gm_conf', dest='gm_conf', action='store',
help='GroundMotion config file, if config/gm.conf shall not be '
'used')
parser.add_argument(
'--ls_conf', dest='ls_conf', action='store',
help='LandSlide config file, if config/ls.conf shall not be used')
parser, args = cl_parse('go', args, setup=setup)
storage = store.ResultStorage(store_dir='.')
storage.open(
conf_dir=args.config,
si_conf=args.si_conf,
gm_conf=args.gm_conf,
ls_conf=args.ls_conf)
if args.event_id:
args.offline = True
problem = store.Problem(
run_dir=args.run_dir or op.join(storage.run_dir, args.event_id),
data_dir=args.data_dir or op.join(storage.data_dir, args.event_id),
event_id=args.event_id,
offline=args.offline,
playback=args.playback,
configs=storage.configs)
problem.run(force=args.force)
# TODO capture logs, errors etc
def main(args=None):
if args is None:
args = sys.argv[1:]
if len(args) < 1:
sys.exit('Usage: %s' % usage)
command = args.pop(0)
if command in subcommands:
globals()['command_' + command](args)
elif command in ('--help', '-h', 'help'):
if command == 'help' and args:
acommand = args[0]
if acommand in subcommands:
globals()['command_' + acommand](['--help'])
sys.exit('Usage: %s' % usage)
else:
sys.exit('mole: error: no such subcommand: %s' % command)
if __name__ == '__main__':
main(sys.argv[1:])

83
src/config.py

@ -0,0 +1,83 @@
# GPLv3
#
# The Developers, 21st Century
import os
import os.path as op
from pyrocko.config import (
PathWithPlaceholders, ConfigBase, BadConfig, expand, processed,
mtime)
from pyrocko.guts import load, dump
from pyrocko.util import ensuredirs
try:
newstr = unicode
range = xrange
except NameError:
newstr = str
def encode_utf8(s):
return s.encode('utf-8')
def no_encode(s):
return s
ewrica_dir_tmpl = os.environ.get('EWRICA_DIR', op.join('~', '.ewrica'))
def make_conf_path_tmpl(name='config'):
return op.join(ewrica_dir_tmpl, '%s.pf' % name)
class EwricaConfig(ConfigBase):
run_dir = PathWithPlaceholders.T(
default=op.join(ewrica_dir_tmpl, 'runs'),
help='Directory, where all data, results, etc. generated during '
'the run of Ewrica are stored.')
data_dir = PathWithPlaceholders.T(
default=op.join(ewrica_dir_tmpl, 'data'),
help='Storage directory. All data needed for the runs of Ewrica as '
'the bayesian networks are stored here.')
config_cls = {
'config': EwricaConfig
}
g_conf_mtime = {}
g_conf = {}
def raw_config(config_name='config'):
conf_path = expand(make_conf_path_tmpl(config_name))
if not op.exists(conf_path):
g_conf[config_name] = config_cls[config_name].default()
write_config(g_conf[config_name], config_name)
conf_mtime_now = mtime(conf_path)
if conf_mtime_now != g_conf_mtime.get(config_name, None):
g_conf[config_name] = load(filename=conf_path)
if not isinstance(g_conf[config_name], config_cls[config_name]):
raise BadConfig('config file does not contain a '
'valid "%s" section.' %
config_cls[config_name].__name__)
g_conf_mtime[config_name] = conf_mtime_now
return g_conf[config_name]
def config(config_name='config'):
return processed(raw_config(config_name))
def write_config(conf, config_name='config'):
conf_path = expand(make_conf_path_tmpl(config_name))
ensuredirs(conf_path)
dump(conf, filename=conf_path)

0
src/core.py

104
src/dataset/tsumaps.py

@ -1,9 +1,14 @@
# GPLv3
#
# The Developers, 21st Century
import os.path as op
import numpy as num
from scipy import io as sio
from scipy.interpolate import griddata
from pyrocko import moment_tensor as pmt
from ewrica import util
@ -20,13 +25,47 @@ def tsumaps_file():
return fn
def load():
return sio.loadmat(tsumaps_file())
def get_probs(
lat, lon, interpolation='nearest_neighbor'):
def load(fn=None):
'''Load tsumaps mat file into dictionary
The tsumaps mat file stored in ewrica.data is loaded. The dictionary which
is returned has three keys:
- latlon: :py:class:`numpy.ndarray`, size: `(No points, 2)
coordinates of each grid point in lat, lon
- fm: :py:class`numpy.ndarray`, size: `(No focal mecs, 3)
first nodal plane orientation for usedfocal mechanisms in
strike, dip, rake
- prob: :py:class`numpy.ndarray`, size: `(No points, No focal mecs)
probabilities for each given focal mechanism nodal plane
orientation at each point of the grid
'''
if fn is None:
fn = tsumaps_file()
return sio.loadmat(fn)
tsumap = load()
def get_probs(
lat, lon, interpolation='nearest_neighbor', cum_prob_max=1., fn=None):
'''Get tsumap focal mechanism probabilites at a given location
:param lat: latitude of the chosen point in [deg]
:type lat: float
:param lon: longitude of the chosen point in [deg]
:type lon: float
:param interpolation: interpolation method to get probabilities at
`lat, lon`. Choices are:
- nearest_neighbor
- multilinear
:type interpolation: optional, string
:rtype: :py:class`numpy.ndarray`, size: `(No focal mecs, 3),
:py:class`numpy.ndarray`, size: `(No focal mecs, )
:returns: Focal mechanism nodal plane orientations (strike, dip, rake) and
the corresponding interpolated probabilities. The sum of all
probabilites is 1.
'''
tsumap = load(fn=fn)
latlon = tsumap['latlon']
fms = tsumap['fm']
probs = tsumap['prob']
@ -44,7 +83,7 @@ def get_probs(
raise ValueError(
'interpolation must be either {}'.format(' or '.join(choices)))
# calculate samples with closest kagan angles
# TODO calculate samples with closest kagan angles
probs_interp = num.zeros(fms.shape[0])
for i, fm in enumerate(fms):
@ -52,4 +91,55 @@ def get_probs(
points=latlon, values=probs[:, i], xi=num.array([lat, lon]),
method=choices[interpolation])
return fms, probs_interp
probs_interp = probs_interp / num.sum(probs_interp)
idx = num.argsort(probs_interp)[::-1]
fms = fms[idx]
probs_interp = probs_interp[idx]
if cum_prob_max < 1.:
cum_prob = num.cumsum(probs_interp)
cum_prob_max = cum_prob[cum_prob > cum_prob_max][0]
return fms[cum_prob <= cum_prob_max], probs_interp[
cum_prob <= cum_prob_max]
else:
return fms, probs_interp
def tsumaps_to_models(
n_models=1000, ranges={}, fms=None, probs=None, *args, **kwargs):
if fms is None and probs is None:
fms, probs = get_probs(*args, **kwargs)
mts = [pmt.MomentTensor(
strike=fm[0], dip=fm[1], rake=fm[2]) for fm in fms]
no_evs = num.around(
n_models * (probs / num.sum(probs)),
decimals=0).astype(int).flatten()
diff = num.sum(no_evs) - n_models
no_evs[-1] -= diff
rstate = num.random.RandomState()
xs_inject = num.zeros((n_models, len(ranges)))
for i, k in enumerate(
('time north_shift east_shift depth magnitude rmnn rmee rmdd rmne '
'rmnd rmed duration').split()):
if 'rm' in k:
ms = [getattr(mt, k[1:]) / mt.moment for mt in mts]
ms = num.repeat(ms, no_evs)
xs_inject[:, i] = ms
else:
xs_inject[:, i] = rstate.uniform(
low=ranges[k].start,
high=ranges[k].stop,
size=num.sum(no_evs))
return xs_inject

0
src/gm/__init__.py

17
src/gm/config.py

@ -0,0 +1,17 @@
# GPLv3
#
# The Developers, 21st Century
import logging
from ewrica.module import ModuleConfig
logger = logging.getLogger('ewrica.gm.config')
def get_config(*args, **kwargs):
return GroundMotionConfig(*args, **kwargs)
class GroundMotionConfig(ModuleConfig):
pass

21
src/gm/module.py

@ -0,0 +1,21 @@
# GPLv3
#
# The Developers, 21st Century
import logging
from ..module import BaseModule
logger = logging.getLogger('ewrica.gm.module')
def get_module(*args, **kwargs):
return GroundMotionModule(*args, **kwargs)
class GroundMotionModule(BaseModule):
@property
def name(self):
return 'gm'
pass

158
src/io/grond.py

@ -0,0 +1,158 @@
# GPLv3
#
# The Developers, 21st Century
import logging
import numpy as num
import grond
# from grond.core import make_stats
from grond.environment import Environment
from grond.problems.base import ProblemDataNotAvailable
# from grond.problems import DynamicRuptureProblemConfig
from pyrocko import guts # noqa
from pyrocko import moment_tensor as pmt
from pyrocko import orthodrome as pod
# from pyrocko.gf import Range
logger = logging.getLogger('ewrica.io.grond')
def _get_problem_and_history(rundir, subset='harvest'):
'''Extract models from the grond run directory
'''
env = Environment(rundir)
problem = env.get_problem()
try:
history = env.get_history(subset=subset)
except ProblemDataNotAvailable:
# TODO Check behaviour
grond.harvest(
rundir, problem=None, nbest=10, force=False, weed=0,
export_fits=[])
history = env.get_history(subset=subset)
return env, problem, history
def grond_to_sources(*args, **kwargs):
'''Convert grond history into ewrica.sources of appropiate type
'''
from ewrica import sources as ewsources
_, problem, history = _get_problem_and_history(*args, **kwargs)
xs = history.get_sorted_primary_models()
bootstrap_misfits = history.get_sorted_primary_misfits()
base_source = problem.base_source
# params = problem.problem_parameters # TODO params to extract models
if isinstance(problem, grond.problems.cmt.problem.CMTProblem):
source = ewsources.EwricaMTSource
stf = base_source.stf.T.cls
moments = pmt.magnitude_to_moment(xs[:, 4])
xs[:, 5:11] *= moments[:, num.newaxis]
lats, lons = pod.ne_to_latlon(
base_source.lat, base_source.lon, xs[:, 1], xs[:, 2])
xs[:, 1] = lats
xs[:, 2] = lons
return [source(
time=base_source.time + i[0],
lat=i[1],
lon=i[2],
depth=i[3],
m6=i[5:11],
stf=stf(duration=i[11]),
stf_mode=base_source.stf_mode,
misfit=mf)
for i, mf in zip(xs, bootstrap_misfits)]
elif isinstance(
problem,
grond.problems.dynamic_rupture.problem.DynamicRuptureProblem):
logger.warn(
'Take care. Grond2sources for Dynamic Rupture could be errouneous')
source = ewsources.EwricaPseudoDynamicRupture
# stf = base_source.stf.T.cls
lats, lons = pod.ne_to_latlon(
base_source.lat, base_source.lon, xs[:, 1], xs[:, 0])
xs[:, 0] = lats
xs[:, 1] = lons
return [source(
anchor=base_source.anchor,
decimation_factor=base_source.decimation_factor,
nthreads=base_source.nthreads,
pure_shear=base_source.pure_shear,
smooth_rupture=base_source.smooth_rupture,
lat=i[0],
lon=i[1],
depth=i[2],
length=i[3],
width=i[4],
slip=i[5],
strike=i[6],
dip=i[7],
rake=i[8],
gamma=i[9],
nx=i[10],
ny=i[11],
nucleation_x=i[12],
nucleation_y=i[13],
time=i[14])
for i, mf in zip(xs, bootstrap_misfits)]
else:
# TODO
raise TypeError('Not implemented yet for the current problem type.')
# def point_source_to_dynrupt_gronf(*args, **kwargs):
# env, problem, history = _get_problem_and_history(*args, **kwargs)
# models = history.models
# stats = make_stats(
# problem, models,
# history.get_primary_chain_misfits())
# # TODO get GF config
# prob_conf = DynamicRuptureProblemConfig(
# name_template='',
# ranges=dict(
# time=Range(start=-15, stop=15, relative='add'),
# north_shift=Range(start=-25.e3, stop=25.e3, step=1e3),
# east_shift=Range(start=-25.e3, stop=25.e3, step=1e3),
# depth=Range(start=1.e3, stop=25.e3, step=1e3),
# slip=Range(start=.0, stop=8.0, step=0.1),
# length=Range(start=, stop=, step=),
# width=Range(start=, stop=, step=),
# nucleation_x=Range(start=-1., stop=1., step=0.05),
# nucleation_y=Range(start=-1., stop=1., step=0.05),
# gamma=Range(start=0.65, stop=0.8, step=0.025),
# nx=Range(start=2, stop=1., step=1),
# ny=Range(start=2, stop=20., step=1),
# strike=Range(start=, stop=, step=),
# dip=Range(start=, stop=, step=),
# rake=Range(start=, stop=, step=)),
# distance_min=1e3,
# norm_exponent=1,
# decimation_factor=self.decimation_factor,
# nthreads=self.nthreads,
# pure_shear=self.pure_shear,
# smooth_rupture=self.smooth_rupture)

1584
src/io/ids.py

File diff suppressed because it is too large

0
src/ls/__init__.py

17
src/ls/config.py

@ -0,0 +1,17 @@
# GPLv3
#
# The Developers, 21st Century
import logging
from ewrica.module import ModuleConfig
logger = logging.getLogger('ewrica.ls.config')
def get_config(*args, **kwargs):
return LandSlideConfig(*args, **kwargs)
class LandSlideConfig(ModuleConfig):
pass

22
src/ls/module.py

@ -0,0 +1,22 @@
# GPLv3
#
# The Developers, 21st Century
import logging
from ..module import BaseModule
logger = logging.getLogger('ewrica.ls.module')
def get_module(*args, **kwargs):
return LandSlideModule(*args, **kwargs)
class LandSlideModule(BaseModule):
@property
def name(self):
return 'ls'
pass

0
src/message/__init__.py

58
src/module.py

@ -0,0 +1,58 @@
# GPLv3
#
# The Developers, 21st Century
import logging
import os.path as op
from pyrocko.guts import load, dump, Bool, Object
from ewrica import util
logger = logging.getLogger('ewrica.module')
class ModuleConfig(Object):
'''Configuration of the module run
'''
run = Bool.T(
default=True,
help='Set to True, if the module shall be runned, else False')
def dump_module_config(conf, filename=None, include_help=False):
# TODO check, if help can be dumped as well
dump(conf, filename=filename, header=True)
def load_module_config(filename):
return load(filename=filename)
class BaseModule(Object):
# TODO generate basic functionalities as start function, evaluate, etc?
config = ModuleConfig.T(default=ModuleConfig())
@property
def name(self):
return ''
def setup_file_logging(self, module_logger=None, problem=None):
if module_logger is None:
module_logger = logger
if problem is None:
raise NotImplementedError
fn = op.join(problem.module_processing_dir(module=self), 'log.txt')
util.setup_file_logging(logger=module_logger, fn=fn)
def run(self, problem=None, force=False):
pass
def dump_config(self, *args, **kwargs):
dump_module_config(self.config, *args, **kwargs)
def load_config(self, *args, **kwargs):
self.config = load_module_config(*args, **kwargs)

20
src/plot/ids.py

@ -0,0 +1,20 @@
# GPLv3
#
# The Developers, 21st Century
import logging
import os
import numpy as num
# from pyrocko import orthodrome as pod, util as putil
# from pyrocko.gf.seismosizer import map_anchor
# from ewrica.sources import EwricaIDSSource, EwricaIDSPatch, EwricaIDSSTF
op = os.path
logger = logging.getLogger('ewrica.plot.ids')
km = 1e3
d2r = num.pi / 180.

0
src/si/__init__.py

262
src/si/config.py

@ -0,0 +1,262 @@
# GPLv3
#
# The Developers, 21st Century
import logging
import os.path as op
from pyrocko import model
from pyrocko.guts import Float, Tuple, String, Bool, List
from pyrocko.gf import Range
from pyrocko.gf.meta import QuantityType
from grond import (
Path, read_config, Config,
DirectedSamplerPhase, HighScoreOptimiserConfig,
UniformSamplerPhase, InjectionSamplerPhase, EngineConfig,
DatasetConfig)
from grond.targets import WaveformMisfitConfig, WaveformTargetGroup
from grond.problems import CMTProblemConfig
from grond.analysers.target_balancing import TargetBalancingAnalyserConfig
from ewrica.module import ModuleConfig
from ewrica.dataset import tsumaps
logger = logging.getLogger('ewrica.si.config')
km = 1e3
def get_config(*args, **kwargs):
return SourceInversionConfig(*args, **kwargs)
class InversionConfig(ModuleConfig):
config_path = Path.T(
default='',
help='Path to the grond inversion configuration file.')
config__ = Config.T(
optional=True,
help='loaded grond inversion configuration object')
def __init__(self, *args, **kwargs):
ModuleConfig.__init__(self, *args, **kwargs)
self.config__ = None
def generate_config(self, *args, **kwargs):
raise NotImplementedError
def get_config(self, *args, **kwargs):
if self.config__:
return self.config__
if self.config_path:
self.config__ = read_config(self.config_path)
return self.config__
self.generate_config(*args, **kwargs)
return self.config__
class PointSourceInversionConfig(InversionConfig):
distance_range = Tuple.T(
default=(100*km, 200*km))
fmin = Float.T(
default=0.01,
help='Minimum waveform frequency')
fmax = Float.T(
default=0.1,
help='Maximum waveform frequency')
quantity = QuantityType.T(
default='displacement',
help='Target quantity, where synthetic and real data are compared.')
mt_type = String.T(
default='full',
help=('Components of the full moment tensor, which are inverted:'
'"full", "deviatoric" or "dc".'))
inject_models_from_tsumaps = Bool.T(
default=True,
help='If true, models based on tsumaps probabilities are injected.')
store_id = String.T(
default='ak135_2000km_1Hz',
help='Greens function store id')
gf_store_superdirs = List.T(
Path.T(),
default=['.'],
help='Greens function store super directory')
event_file = Path.T(
default='',
help='Event file path')
event_id = String.T(
default='',
help='Event ID')
station_paths = List.T(
Path.T(),
default=[],
help='List of StationXML station and response files')
waveform_paths = List.T(
Path.T(),
default=[],
help='List of paths to waveform directories')
parent_path = Path.T(
default='.',
help='Path to parent directory. Other paths can be relative to it.')
blocklist_paths = List.T(
Path.T(),
optional=True,
default=None,
help='Path to txt file with blocked stations.')
def generate_config(self, *args, **kwargs):
target_groups = []
target_groups += [WaveformTargetGroup(
normalisation_family='td',
path='td.p',
distance_min=self.distance_range[0],
distance_max=self.distance_range[1],
channels=['Z', 'R'],
weight=1.0,
interpolation='nearest_neighbor',
store_id=self.store_id,
misfit_config=WaveformMisfitConfig(
quantity=self.quantity,
fmin=self.fmin,
fmax=self.fmax,
ffactor=1.5,
tmin='{stored:any_P}',
tmax='{stored:any_P}+50.',
domain='time_domain',
tautoshift_max=2.5,
autoshift_penalty_max=0.05,
norm_exponent=1))]
problem_conf = CMTProblemConfig(
name_template='',
ranges=dict(
time=Range(start=-15, stop=15, relative='add'),
north_shift=Range(start=-25.e3, stop=25.e3, step=1e3),
east_shift=Range(start=-25.e3, stop=25.e3, step=1e3),
depth=Range(start=1.e3, stop=25.e3, step=1e3),
magnitude=Range(start=4.0, stop=8.0, step=0.1),
rmnn=Range(start=-1.41421, stop=1.41421, step=0.05),
rmee=Range(start=-1.41421, stop=1.41421, step=0.05),
rmdd=Range(start=-1.41421, stop=1.41421, step=0.05),
rmne=Range(start=-1., stop=1., step=0.05),
rmnd=Range(start=-1., stop=1., step=0.05),
rmed=Range(start=-1., stop=1., step=0.05),
duration=Range(start=5., stop=20., step=1.)),
distance_min=1e3,
mt_type=self.mt_type,
norm_exponent=1)
anaylser_confs = []
anaylser_confs += [TargetBalancingAnalyserConfig(niterations=1000)]
optimiser_phases = []
optimiser_phases += [UniformSamplerPhase(niterations=1000)]
optimiser_phases += [DirectedSamplerPhase(
niterations=5000,
scatter_scale_begin=2.0,
scatter_scale_end=0.5)]
optimiser_conf = HighScoreOptimiserConfig(
nbootstrap=50, sampler_phases=optimiser_phases)
engine_conf = EngineConfig(
gf_stores_from_pyrocko_config=False,
gf_store_superdirs=self.gf_store_superdirs)
dataset_conf = DatasetConfig(
stations_stationxml_paths=self.station_paths,
responses_stationxml_paths=self.station_paths,
events_path=self.event_file,
waveform_paths=self.waveform_paths,
blacklist_paths=self.blocklist_paths or None,
apply_correction_factors=True,
extend_incomplete=True)
dataset_conf.set_basepath('/')
return Config(
rundir_template=op.join(
self.parent_path,
self.event_id, 'si', 'proc', '${problem_name}.grun'),
path_prefix='', # self.parent_path,
event_names=[self.event_id],
dataset_config=dataset_conf,
target_groups=target_groups,
problem_config=problem_conf,
analyser_configs=anaylser_confs,
optimiser_config=optimiser_conf,
engine_config=engine_conf)
def optimiser_conf_from_tsumaps(self, config):
optimiser_phases = []
optimiser_phases += [UniformSamplerPhase(niterations=1000)]
if self.inject_models_from_tsumaps:
events = model.load_events(self.event_file)
event = None
for ev in events:
if ev.name == self.event_id:
event = ev
if event is None:
raise ValueError(
'No event called %s found in the given event file' %
self.event_id)
xs_inject = tsumaps.tsumaps_to_models(
lat=event.lat, lon=event.lon,
ranges=config.problem_config.ranges,
n_models=1000, cum_prob_max=1.)
optimiser_phases += [InjectionSamplerPhase(
niterations=xs_inject.shape[0], xs_inject=xs_inject)]
optimiser_phases += [DirectedSamplerPhase(
niterations=5000,
scatter_scale_begin=2.0,
scatter_scale_end=0.5)]
config.optimiser_config = HighScoreOptimiserConfig(
nbootstrap=50, sampler_phases=optimiser_phases)
return config
def get_config(self, *args, **kwargs):
if self.config__ is not None:
return self.config__
if self.config_path:
config = read_config(self.config_path)
config = self.generate_config(*args, **kwargs)
if self.inject_models_from_tsumaps:
config = self.optimiser_conf_from_tsumaps(config)
self.config__ = config
return config
class SourceInversionConfig(ModuleConfig):
pointsource = PointSourceInversionConfig.T(
default=PointSourceInversionConfig(),
help='Configurations for point source inversion')
# @property
# def paths(self):
# return dict(
# grondconfig_pointsource=self.grondconfig_pointsource)
# def check_paths(self):
# for key, val in self.paths.items():
# if val is None:
# continue
# if not op.exists(val):
# raise ValueError(
# 'Current %s path "%s" is not valid' % (key, val))

0
src/si/ids/__init__.py

502
src/si/ids/config.py

@ -0,0 +1,502 @@
# GPLv3
#
# The Developers, 21st Century
import logging
import os
import os.path as op
import numpy as num
from pyrocko import model, util as putil
from pyrocko.guts import Float, List, String, Dict, StringChoice, Int, Object
from .targets import \
TargetConfig, WaveformTargetConfig, GNSSTargetConfig, InSARTargetConfig
logger = logging.getLogger('ewrica.si.ids.config')
km = 1e3
class DatasetConfig(Object):
events_path__ = String.T(default='event.txt')
event_name = String.T(default='event1')
fault_plane = StringChoice.T(
choices=['nodalplane1', 'nodalplane2'], default='nodalplane1')
_expand_paths = TargetConfig._expand_paths
def __init__(self, *args, parent=None, **kwargs):
Object.__init__(self, *args, **kwargs)
self._parent = parent
def bind_parent(self, parent):
self._parent = parent
def unbind_parent(self):
self._parent = None
@staticmethod
def example():
return DatasetConfig()
@property
def events_path(self):
return self._expand_paths(self.events_path__)
@events_path.setter
def events_path(self, events_path):
self.events_path__ = events_path
@property
def event(self):
events = model.load_events(self.events_path)
for event in events:
if event.name == self.event_name:
return event
break
raise ValueError(
'Given event_name "{}" not found in given event file'.format(
self.event_name))
class RunConfig(Object):
n_iterations = Int.T(default=100)
outpath_template__ = String.T(optional=True)
deltat_snapshots = Float.T(default=5.)
t_max_snapshots = Float.T(default=50.)
_expand_paths = TargetConfig._expand_paths
def __init__(self, *args, parent=None, **kwargs):
Object.__init__(self, *args, **kwargs)
self._parent = parent
def bind_parent(self, parent):
self._parent = parent
def unbind_parent(self):
self._parent = None
@staticmethod
def example():
return RunConfig()
@property
def outpath_template(self):
return self._expand_paths(self.outpath_template__)
@outpath_template.setter
def outpath_template(self, outpath_template):
self.outpath_template__ = outpath_template
class EngineConfig(Object):
ids_store_superdirs = List.T(String.T(), default=['.'])
pyrocko_store_superdirs = List.T(String.T(), default=['.'])
ids_waveform_store_id = String.T(default='.')
ids_geodetic_store_id = String.T(default='.')
ids_geodetic_files = Dict.T(
String.T(), String.T(),
default=dict(Z='uz', R='ur', T='ut'))
ids_synthetic_bandpass_filter = Dict.T(
String.T(), Float.T(),
default=dict(order=3., f_min=0.01, f_max=0.5))
@staticmethod
def example():
return EngineConfig(
ids_store_superdirs=['../ids_store_superdirs'],
pyrocko_store_superdirs=['../pyrocko_store_superdirs'],
ids_waveform_store_id='ids_waveform_store',
ids_geodetic_store_id='ids_geodetic_store',
ids_synthetic_bandpass_filter=dict(
order=3,
f_min=0.01,
f_max=0.5))
@property
def ids_geodetic_store(self):
for d in self.ids_store_superdirs:
store = op.join(d, self.ids_geodetic_store_id)
if op.exists(store):
if not store.endswith(os.sep):
store += os.sep
return store
@property
def ids_waveform_store(self):
for d in self.ids_store_superdirs:
store = op.join(d, self.ids_waveform_store_id)
if op.exists(store):
if not store.endswith(os.sep):
store += os.sep
return store
class IDSConfig(Object):
ids_version = String.T(default='2020')
path_prefix = String.T(
default=op.expanduser('~'),
help='Location of parent directory. All other dataset paths can be '
'given as relative paths from "path_prefix"',
optional=True)
dataset_conf = DatasetConfig.T(default=DatasetConfig())
waveform_conf = WaveformTargetConfig.T(
default=WaveformTargetConfig())
gnss_conf = GNSSTargetConfig.T(
default=GNSSTargetConfig(), optional=True)
insar_conf = InSARTargetConfig.T(
default=InSARTargetConfig(), optional=True)
run_conf = RunConfig.T(default=RunConfig())
engine_conf = EngineConfig.T(default=EngineConfig())
def __init__(self, *args, **kwargs):
Object.__init__(self, *args, **kwargs)
self.bind_parent()
def bind_parent(self):
for conf in (
self.dataset_conf,
self.waveform_conf,
self.gnss_conf,
self.insar_conf,
self.run_conf):
if conf is None:
continue
conf.bind_parent(self)
class IDSConfigFull(IDSConfig):
@property
def inputdir(self):
return 'input'
@property
def inputfn(self):
return op.join(self.inputdir, 'ids_input.inp')
@property
def waveformdir(self):
return op.join(self.inputdir, 'waveform')
@property
def gnssdir(self):
return op.join(self.inputdir, 'gnss')
@property
def insardir(self):
return op.join(self.inputdir, 'insar')
@property
def rundir(self):
return 'run'
@staticmethod
def example():
dataset_conf = DatasetConfig.example()
waveform_conf = WaveformTargetConfig.example()
gnss_conf = GNSSTargetConfig.example()
insar_conf = InSARTargetConfig.example()
engine_conf = EngineConfig.example()
run_conf = RunConfig.example()
return IDSConfigFull(
path_prefix='..',
dataset_conf=dataset_conf,
waveform_conf=waveform_conf,
gnss_conf=gnss_conf,
insar_conf=insar_conf,
engine_conf=engine_conf,
run_conf=run_conf)
def string_for_config(self):
event = self.dataset_conf.event
eng_conf = self.engine_conf
np = self.dataset_conf.fault_plane[-1]
d = dict(
origin_time=putil.tts(event.time, format='%Y %m %d %H %M %S'),
lat=event.lat,
lon=event.lon,
depth=event.depth / km,
mag=event.magnitude,
waveform_dir=op.relpath(
self.waveformdir, start=self.inputdir) + os.sep,
dist_min=self.waveform_conf.distance_min / km,
dist_max=self.waveform_conf.distance_max / km,
waveform_static_weight=self.waveform_conf.static_weight,
gf_dir=eng_conf.ids_waveform_store,
filter_order=eng_conf.ids_synthetic_bandpass_filter['order'],
filter_f_min=eng_conf.ids_synthetic_bandpass_filter['f_min'],
filter_f_max=eng_conf.ids_synthetic_bandpass_filter['f_max'],
gnss_nsta=self.gnss_conf.n_stations, # TODO
gnss_dat=self.gnss_conf.dat_path, # TODO
gnss_weight=self.gnss_conf.weight, # TODO
insar_ngrid=self.insar_conf.n_grids, # TODO
insar_dat=self.insar_conf.dat_path, # TODO
insar_weight=self.insar_conf.weight, # TODO
geodetic_gf_dir=eng_conf.ids_geodetic_store,
geodetic_gf_z=eng_conf.ids_geodetic_files['Z'],
geodetic_gf_radial=eng_conf.ids_geodetic_files['R'],
geodetic_gf_transvers=eng_conf.ids_geodetic_files['T'],
idisc=1,
subfault='#',
subfault_file="' '",
n_subfaults=0,
ref_subfault=None,
focmec='',
strike=getattr(event.moment_tensor, 'strike{}'.format(np)),
dip=getattr(event.moment_tensor, 'dip{}'.format(np)),
rake=getattr(event.moment_tensor, 'rake{}'.format(np)),
n_iter_max=self.run_conf.n_iterations,
run_dir=op.relpath(self.rundir, start=self.inputdir) + os.sep)
t_max = self.run_conf.t_max_snapshots
deltat = self.run_conf.deltat_snapshots
snaps = ["'Snapshot_{}s.dat' {:<10.1f}{:<10.1f}".format(t, 0.0, t)
for t in num.arange(deltat, t_max + deltat, deltat)]
snaps += [
"'SnapshotR_{}s.dat' {:<10.1f}{:<10.1f}".format(t, t - deltat, t)
for t in num.arange(deltat, t_max + deltat, deltat)]
d['n_snaps'] = len(snaps)
d['snapshot_lines'] = '\n '.join(snaps)
template = '''# Example for the input file of FORTRAN77 program "ids2020" for kinematic earthquake
# source imaging based on the Iterative Deconvolution and Stacking (IDS) Method using
# seismic waveform and static displacement data jointly.
#
# written by Rongjiang Wang
# Helmholtz Centre Potsdam
# GFZ German Research Centre for Geosciences
# e-mail: wang@gfz-potsdam.de
# Phone +49 331 2881209
# Fax +49 331 2881204
#
# Last modified: Potsdam, August, 2020
#
# Reference:
# Zhang and Wang et al. (2014). Automatic imaging of earthquake rupture processes
# by iterative deconvolution and stacking of high-rate GNSS and strong motion
# seismograms, JGR Solid Earth, 199(7), 5633-5650.
#
#################################################################
## ##
## If not specified otherwise, SI Unit System is used overall! ##
## ##
## ##
## Each comment line should start with "#" ##
## and no blank line is allowed! ##
## ##
#################################################################
#
#================================================================================
# EARTHQUAKE LOCATION PARAMETERS
# ------------------------------
# 1. origin time [s] (year, month, day, hour, minute [positive integer number]
# and second [positive or negative real number]), which will be also used as
# the reference time of the output data
# 2. hypocentre (lat[deg], lon[deg], depth[km])
# 3. preliminary estimate of moment magnitude
# Note: the hypocentre location and origin time (at least to minute) should be
# consistent with those given in the "StationInfo.dat" file of the
# waveform data (s. below)
#--------------------------------------------------------------------------------
{origin_time}
{lat} {lon} {depth}
{mag}
#================================================================================
# WAVEFORM DATA (HIGH-RATE-GNSS/STRONG-MOTION SEISMOGRAMS)
# --------------------------------------------------------------------
# 1. folder name of the data
# Note: this folder should include a file 'StationInfo.dat' giving all
# necessary information about the local monitoring network.
# 2. lower and upper thresholds of the epicentral distances [km] (stations
# outside the threshold distances will not be used)
# 3. weight of the static offset data (obtained automatically from the velocity
# waveforms after an empirical baseline correction) relative to the waveform
# data (suggested value 0.0-1.0)
#--------------------------------------------------------------------------------
'{waveform_dir}'
{dist_min} {dist_max}
{waveform_static_weight}
#================================================================================
# GREEN'S FUNCTION DATABASE
# -------------------------
# 1. Green's function database (folder name)
# 2. Parameters of Butterworth bandpass filter applied to the synthetics:
# order, lower and upper cutoff frequencies [Hz]
# computation [Hz]
#--------------------------------------------------------------------------------
'{gf_dir}'
{filter_order} {filter_f_min} {filter_f_max}
#================================================================================
# GNSS STATIC OFFSETS