Browse Source

v0.4

obspy
braunfuss 2 years ago
parent
commit
82226d08fb
6 changed files with 299 additions and 600 deletions
  1. +0
    -82
      Waveform/DataDir.py
  2. +0
    -1
      Waveform/Version.py
  3. +0
    -515
      Waveform/pyrocko_down.py
  4. +297
    -0
      example/events/EGYPT_1995-11-22T22-16-55/EGYPT_1995-11-22T22-16-55.config
  5. +2
    -2
      example/global.conf
  6. +0
    -0
      example/tttgrid/.gitkeep

+ 0
- 82
Waveform/DataDir.py View File

@ -1,82 +0,0 @@
import os
import sys
import platform
WINDOWS =(platform.system() == 'Windows')
# add local directories to import path
sys.path.append('../Common/')
#import fnmatch
import obspy.core.trace
# Import from Common
import Globals # Own global data
import Basic # Own module with basic functions
import Logfile # Implements logfile
from DataTypes import Station
DATA_DIR= 'data' # root name of data directory(relativ to event dir)
FILE_NAME_FORMAT = '%s.%s.%s.%s.D.%s.%s'
# -------------------------------------------------------------------------------------------------
def filename(trace, day):
postfix = str("%03d" % day.julday)
if type(trace) is obspy.core.trace.Trace:
t= trace.stats
filename =(FILE_NAME_FORMAT) %(t.network, t.station, t.location, t.channel,
t.starttime.year, postfix)
else:
Logfile.exception('DataDir.filename', str(type(trace)))
Logfile.abort('')
#Logfile.add(filename)
return filename
# -------------------------------------------------------------------------------------------------
def getFileNames(eventDir=None):
if eventDir == None: eventDir = Globals.EventDir()
names = []
path = os.path.join(eventDir, DATA_DIR)
for root,dirs,files in os.walk(path):
for s in files: names.append(s)
#Logfile.addLines(names)
return sorted(names)
# -------------------------------------------------------------------------------------------------
def getNetworks(eventDir=None):
files= getFileNames(eventDir)
networks = []
for s in files:
net = str.split(s, '.')[0]
networks.append(net)
networks = sorted(set(networks))
#Logfile.addLines(networks)
return networks
def isNetwork(network, eventDir=None):
assert network != None
return network in getNetworks(eventDir)
# -------------------------------------------------------------------------------------------------

+ 0
- 1
Waveform/Version.py View File

@ -1 +0,0 @@
VERSION_STRING = 'Version 0.2 - 8.May 2018'

+ 0
- 515
Waveform/pyrocko_down.py View File

@ -1,515 +0,0 @@
from pyrocko.client import fdsn, catalog
import os
from pyrocko import util, io, trace, model, gf
import sys
sys.path.append('../tools/')
sys.path.append('../Common/')
import config
import Globals
import Basic
from optparse import OptionParser
from configparser import SafeConfigParser
from pyrocko import util, io, trace, cake
from config import Event, Trigger
from ConfigFile import ConfigObj, FilterCfg, OriginCfg
global options, args
from pyrocko.io import stationxml
from pyrocko.gf import ws, LocalEngine, Target, DCSource, RectangularSource
from pyrocko import util, model
from pyrocko.client import catalog
import numpy as num
def main(args):
parser = OptionParser(usage="\npython %prog -t 2009-12-31T12:23:34 -d 5 -m SDS -k key -s all/acq")
parser.add_option("-t","--time", type="string", dest="time", help="time")
parser.add_option("-d","--duration", type="string", dest="duration", help="duration in min")
parser.add_option("-m","--sdsfolder", type="string", dest="sdsfolder", help="sdsfolder")
parser.add_option("-s","--station", type="string", dest="stationversion", help="stationversion")
parser.add_option("-f","--evpath", type="string", dest="eventpath", help="eventpath")
parser.add_option("-x","--dummy", type="string", dest="station", help="dummy")
parser.add_option("-n","--dummy2", type="string", dest="network", help="dummy2")
return parser.parse_args(args)
def globalConf():
cDict = {}
parser = SafeConfigParser()
parser.read(os.path.join('../', 'global.conf'))
for section_name in parser.sections():
for name, value in parser.items(section_name):
cDict[name] = value
return cDict
params = globalConf()
options, args = main(sys.argv)
Basic.checkExistsDir(options.eventpath, isAbort=True)
Globals.setEventDir(options.eventpath)
C = config.Config(options.eventpath)
Origin = C.parseConfig('origin')
Conf = globalConf()
Config = C.parseConfig('config')
filter = FilterCfg(Config)
cfg = ConfigObj(dict=Config)
minDist = float(params['mindist'])
maxDist = float(params['maxdist'])
ev = Event(Origin['lat'], Origin['lon'], Origin['depth'], Origin['time'])
event = model.Event(lat=float(ev.lat), lon=float(ev.lon),
depth=float(ev.depth)*1000.,
time=util.str_to_time(ev.time))
tmin = util.str_to_time(ev.time)-40
tmax = util.str_to_time(ev.time)+40
global_cmt_catalog = catalog.GlobalCMT()
events = global_cmt_catalog.get_events(
time_range=(tmin, tmax),
latmin=float(ev.lat)-1.,
latmax=float(ev.lat)+1,
lonmin=float(ev.lon)-1,
lonmax=float(ev.lon)+1)
event_cat = events[0]
source = gf.DCSource(lat=event_cat.lat, lon=event_cat.lon,
strike=event_cat.moment_tensor.strike1,
rake=event_cat.moment_tensor.rake1,
dip=event_cat.moment_tensor.dip1,
magnitude=event.magnitude)
newFreq = float(filter.newFrequency())
options.time = Origin['time']
options.duration = int(Conf['duration'])
sdspath = os.path.join(options.eventpath, 'data')
try:
os.mkdir(sdspath)
except Exception:
pass
model.dump_events([event], sdspath+'event.pf')
if float(params['duration']) == 0:
tmin = util.str_to_time(ev.time)+float(params['tmin'])
tmax = util.str_to_time(ev.time)+float(params['tmax'])
else:
tmin = util.str_to_time(ev.time)
tmax = util.str_to_time(ev.time) + float(params['duration'])
def get_stations(site, lat, lon, rmin, rmax, tmin, tmax,
channel_pattern='BH*'):
extra = {}
if site == 'iris':
extra.update(matchtimeseries=True)
sx = fdsn.station(
site=site, latitude=lat, longitude=lon,
minradius=rmin, maxradius=rmax,
startbefore=tmin, endafter=tmax, channel=channel_pattern,
format='text', level='channel', includerestricted=False)
return sx.get_pyrocko_stations()
site = 'geofon'
minDist = float(params['mindist'])
maxDist = float(params['maxdist'])
diffDist = (maxDist - minDist)/9.
displacement_geofon = []
stations_disp_geofon = []
stations_real_geofon = []
gaps = []
trs_projected_geofon = []
trs_projected_displacement_geofon = []
quantity_to_unit = {
'displacement': 'M',
'velocity': 'M/S',
'acceleration': 'M/S**2'}
quantity = cfg.quantity()
try:
for l in range(0, 1):
gaps = []
stations_geofon = get_stations(site, event.lat, event.lon, minDist,
maxDist, tmin, tmax, 'BH*')
nstations_geofon = [s for s in stations_geofon]
selection_geofon = fdsn.make_data_selection(nstations_geofon, tmin,
tmax)
request_waveform_geofon = fdsn.dataselect(site=site,
selection=selection_geofon)
with open(os.path.join(sdspath,
'traces_geofon_part%s.mseed' % l), 'wb') as file:
file.write(request_waveform_geofon.read())
print('traces written')
traces_geofon = io.load(os.path.join(sdspath,
'traces_geofon_part%s.mseed' % l))
for tr in traces_geofon:
for st in stations_geofon:
for channel in st.channels:
if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
stations_real_geofon.append(st)
gaps.append(st.station)
remove = [x for x in gaps if gaps.count(x) > 3]
for re in remove:
stations_real_geofon.remove(re)
request_response = fdsn.station(
site=site, selection=selection_geofon, level='response')
request_response.dump(filename=os.path.join(sdspath,
'responses_geofon_part%s.yml' % l))
request_response.dump_xml(filename=os.path.join(sdspath,
'responses_geofon_part%s.xml' % l))
sx = stationxml.load_xml(filename=os.path.join(sdspath,
'responses_geofon_part%s.xml' % l))
pyrocko_stations = sx.get_pyrocko_stations()
event_origin = gf.Source(
lat=event.lat,
lon=event.lon)
traces_geofon = io.load(os.path.join(sdspath,
'traces_geofon_part%s.mseed' % l))
projections = []
for station in stations_real_geofon:
backazimuth = source.azibazi_to(station)[1]
projections.extend(station.guess_projections_to_rtu(
out_channels=('R', 'T', 'Z'),
backazimuth=backazimuth))
for matrix, in_channels, out_channels in projections:
deps = trace.project_dependencies(
matrix, in_channels, out_channels)
trs_projected_geofon.extend(
trace.project(
traces_geofon, matrix,
in_channels, out_channels))
disp_rot = []
for tr in traces_geofon:
for station in stations_real_geofon:
if tr.station == station.station and tr.location == station.location:
try:
polezero_response = request_response.get_pyrocko_response(
nslc=tr.nslc_id,
timespan=(tr.tmin, tr.tmax),
fake_input_units=quantity_to_unit[quantity])
restituted = tr.transfer(
tfade=2.,
freqlimits=(0.01, 0.1, 1., 2.),
transfer_function=polezero_response,
invert=True)
if quantity == 'velocity':
tr.ydata = num.diff(tr.ydata)
displacement_geofon.append(restituted)
disp_rot.append(restituted)
stations_disp_geofon.append(station)
except:
pass
model.dump_stations(stations_disp_geofon,
os.path.join(sdspath,
'stations_geofon_part%s.txt' % l))
trs_projected_displacement_geofon.extend(
trace.project(
disp_rot, matrix,
in_channels, out_channels))
except:
for l in range(0,9):
try:
gaps = []
maxDist = minDist+diffDist
stations_geofon = get_stations(site, event.lat,event.lon,minDist, maxDist,tmin,tmax, 'BH*')
nstations_geofon = [s for s in stations_geofon]
selection_geofon = fdsn.make_data_selection(nstations_geofon, tmin, tmax)
request_waveform_geofon = fdsn.dataselect(site=site, selection=selection_geofon)
with open(os.path.join(sdspath,'traces_geofon_part%s.mseed' %l), 'wb') as file:
file.write(request_waveform_geofon.read())
print('traces written')
traces_geofon = io.load(os.path.join(sdspath,'traces_geofon_part%s.mseed' %l))
for tr in traces_geofon:
for st in stations_geofon:
for channel in st.channels:
if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
stations_real_geofon.append(st)
gaps.append(st.station)
remove = [x for x in gaps if gaps.count(x) > 3]
for re in remove:
stations_real_geofon.remove(re)
request_response = fdsn.station(
site=site, selection=selection_geofon, level='response')
request_response.dump(filename=os.path.join(sdspath,'responses_geofon_part%s.yml'%l))
request_response.dump_xml(filename=os.path.join(sdspath,'responses_geofon_part%s.xml'%l))
sx = stationxml.load_xml(filename=os.path.join(sdspath,'responses_geofon_part%s.xml'%l))
pyrocko_stations = sx.get_pyrocko_stations()
event_origin = gf.Source(
lat=event.lat,
lon=event.lon)
traces_geofon = io.load(os.path.join(sdspath,'traces_geofon_part%s.mseed' %l))
projections = []
for station in stations_real_geofon:
backazimuth = source.azibazi_to(station)[1]
projections.extend(station.guess_projections_to_rtu(
out_channels=('R', 'T', 'Z'),
backazimuth=backazimuth))
for matrix, in_channels, out_channels in projections:
deps = trace.project_dependencies(
matrix, in_channels, out_channels)
trs_projected_geofon.extend(
trace.project(
traces_geofon, matrix,
in_channels, out_channels))
disp_rot = []
for tr in traces_geofon:
for station in stations_real_geofon:
if tr.station == station.station and tr.location == station.location:
try:
polezero_response = request_response.get_pyrocko_response(
nslc=tr.nslc_id,
timespan=(tr.tmin, tr.tmax),
fake_input_units=quantity_to_unit[quantity])
restituted = tr.transfer(
tfade=2.,
freqlimits=(0.01, 0.1, 1., 2.),
transfer_function=polezero_response,
invert=True)
if quantity == 'velocity':
tr.ydata = num.diff(tr.ydata)
disp_rot.append(restituted)
displacement_geofon.append(restituted)
stations_disp_geofon.append(station)
except:
pass
except:
pass
model.dump_stations(stations_disp_geofon, os.path.join(sdspath,'stations_geofon_part%s.txt' %l))
minDist = minDist+diffDist
try:
trs_projected_displacement_geofon.extend(
trace.project(
disp_rot, matrix,
in_channels, out_channels))
except:
pass
io.save(displacement_geofon, os.path.join(sdspath,'traces_restituted_geofon.mseed'))
model.dump_stations(stations_disp_geofon, os.path.join(sdspath,'stations_disp_geofon.txt'))
model.dump_stations(stations_disp_geofon, os.path.join(sdspath,'stations_geofon.txt'))
io.save(trs_projected_displacement_geofon, os.path.join(sdspath,'traces_restituted_rotated_geofon.mseed'))
io.save(trs_projected_geofon, os.path.join(sdspath,'traces_rotated_geofon.mseed'))
stations_sites = []
stations_real_sites = []
stations_disp_sites = []
displacement_sites = []
traces_sites = []
trs_projected = []
trs_projected_displacement = []
minDist = float(params['mindist'])
maxDist = float(params['maxdist'])
sites = ['iris','orfeus', 'resif', 'usp', 'bgr', 'ingv', 'geonet', 'ethz', 'ncedc', 'knmi', 'isc', 'ipgp', 'koeri']
for site in sites:
try:
stations_site = get_stations(site, event.lat,event.lon,minDist, maxDist,tmin,tmax, 'BH*')
if not stations_sites:
stations_sites = stations_site
else:
stations_sites = stations_sites + stations_site
nstations_site = [s for s in stations_site]
selection_site = fdsn.make_data_selection(nstations_site, tmin, tmax)
request_waveform_site = fdsn.dataselect(site=site, selection=selection_site)
with open(os.path.join(sdspath,'traces_%s.mseed' %site), 'wb') as file:
file.write(request_waveform_site.read())
print('traces written')
traces_site = io.load(os.path.join(sdspath,'traces_%s.mseed'%site))
stations_real_site = []
if not traces_sites:
traces_sites = traces_site
else:
traces_sites = traces_sites+traces_site
gaps= []
for tr in traces_site:
for st in stations_site:
for channel in st.channels:
if tr.station == st.station and tr.location == st.location and channel.name == tr.channel and tr.location == st.location and tr.network == st.network:
stations_real_site.append(st)
stations_real_sites.append(st)
gaps.append(st.station)
remove = [x for x in gaps if gaps.count(x) > 3]
for re in remove:
stations_real_site.remove(re)
request_response = fdsn.station(
site=site, selection=selection_site, level='response')
request_response.dump(filename=os.path.join(sdspath,'responses_%s.yml'%site))
request_response.dump_xml(filename=os.path.join(sdspath,'responses_%s.xml'%site))
sx = stationxml.load_xml(filename=os.path.join(sdspath,'responses_%s.xml'%site))
pyrocko_stations = sx.get_pyrocko_stations()
event_origin = gf.Source(
lat=event.lat,
lon=event.lon)
traces_site = io.load(os.path.join(sdspath,'traces_%s.mseed'%site))
projections = []
for station in stations_real_site:
backazimuth = source.azibazi_to(station)[1]
projections.extend(station.guess_projections_to_rtu(
out_channels=('R', 'T', 'Z'),
backazimuth=backazimuth))
for matrix, in_channels, out_channels in projections:
deps = trace.project_dependencies(
matrix, in_channels, out_channels)
trs_projected.extend(
trace.project(
traces_site, matrix,
in_channels, out_channels))
displacement_site = []
stations_disp_site = []
disp_rot = []
for tr in traces_site:
for station in stations_real_site:
if tr.station == station.station and tr.location == station.location:
try:
polezero_response = request_response.get_pyrocko_response(
nslc=tr.nslc_id,
timespan=(tr.tmin, tr.tmax),
fake_input_units=quantity_to_unit[quantity])
restituted = tr.transfer(
tfade=2.,
freqlimits=(0.01, 0.1, 1., 2.),
transfer_function=polezero_response,
invert=True)
if quantity == 'velocity':
tr.ydata = num.diff(tr.ydata)
displacement_site.append(restituted)
displacement_sites.append(restituted)
disp_rot.append(restituted)
stations_disp_site.append(station)
stations_disp_sites.append(station)
except:
pass
trs_projected_displacement.extend(
trace.project(
disp_rot, matrix,
in_channels, out_channels))
io.save(displacement_site, os.path.join(sdspath,'traces_restituted_%s.mseed'%site))
model.dump_stations(stations_disp_site, os.path.join(sdspath,'stations_disp_%s.txt'%site))
model.dump_stations(stations_disp_site, os.path.join(sdspath,'stations_%s.txt'%site))
except:
pass
stations_all = stations_disp_sites+stations_disp_geofon
for stg in stations_real_geofon:
for sti in stations_real_sites:
if sti.station == stg.station and sti.location == stg.location and sti.network == stg.network:
try:
stations_all.remove(sti)
except:
pass
else:
pass
try:
traces_all = traces_sites+traces_geofon
traces_all_rot = trs_projected_geofon+trs_projected
except:
traces_all = traces_sites
traces_all_rot = trs_projected
for tr in traces_all:
try:
tr.downsample_to(newFreq)
except:
pass
io.save(traces_all, os.path.join(sdspath,'traces.mseed'))
model.dump_stations(stations_all, os.path.join(sdspath,'stations.txt'))
io.save(trs_projected_displacement, os.path.join(sdspath,'traces_restituted_rotated_sites.mseed'))
io.save(trs_projected, os.path.join(sdspath,'traces_rotated_sites.mseed'))
io.save(traces_all_rot, os.path.join(sdspath,'traces_rotated.mseed'))
try:
stations_all_disp = stations_disp_sites+stations_disp_geofon
for stg in stations_disp_geofon:
for sti in stations_disp_sites:
if sti.station == stg.station and sti.location == stg.location:
try:
stations_all_disp.remove(sti)
except:
pass
else:
pass
traces_all_disp = displacement_sites+displacement_geofon
traces_all_rot_disp = trs_projected_displacement_geofon+trs_projected_displacement
for tr in traces_all_disp:
tr.downsample_to(newFreq)
for tr in traces_all_rot_disp:
tr.downsample_to(newFreq)
for st in stations_all_disp:
for channel in st.channels:
if channel.name == 'BHE':
channel.name = 'R'
if channel.name == 'BHN':
channel.name = 'T'
if channel.name == 'BHZ':
channel.name = 'Z'
io.save(traces_all_rot_disp, os.path.join(sdspath,'traces_restituted_rotated.mseed'))
io.save(traces_all_disp, os.path.join(sdspath,'traces_restituted.mseed'))
model.dump_stations(stations_all_disp, os.path.join(sdspath,'stations_disp.txt'))
except:
stations_all_disp = stations_disp_sites
traces_all_disp = displacement_sites
traces_all_rot_disp = trs_projected_displacement
for tr in traces_all_disp:
tr.downsample_to(newFreq)
for tr in traces_all_rot_disp:
tr.downsample_to(newFreq)
for st in stations_all_disp:
for channel in st.channels:
if channel.name == 'BHE':
channel.name = 'R'
if channel.name == 'BHN':
channel.name = 'T'
if channel.name == 'BHZ':
channel.name = 'Z'
io.save(traces_all_rot_disp, os.path.join(sdspath,'traces_restituted_rotated.mseed'))
io.save(traces_all_disp, os.path.join(sdspath,'traces_restituted.mseed'))
model.dump_stations(stations_all_disp, os.path.join(sdspath,'stations_disp.txt'))
model.dump_stations(stations_all_disp, os.path.join(sdspath,'stations_cluster.txt'))

+ 297
- 0
example/events/EGYPT_1995-11-22T22-16-55/EGYPT_1995-11-22T22-16-55.config View File

@ -0,0 +1,297 @@
[clusterparameter]
maxCluster = 100
minStationAroundInitialCluster = 10
initialstationdistance = 100
cutoff = 10
runs = 1
#minimum distance of initial centroids
centroidminDistance = 5
comparedelta = 2
#maximum distance from station to cluster center
stationdistance = 60
minClusterStation = 1
[traveltime calculation options]
tttopt = -ph P
[algorithm method]
#2 kmeans
cam = 2
[data]
# Phase to consider [right now only P and S possible!]
ttphases=P
# resampling data to frequency in Hz or s, should match your gf store
new_frequence = 0.5
# if download of was done with pyrocko_download command, set to 1
pyrocko_download = 1
# if download with pyrocko was done you can choose between velocity and displacement
quantity = velocity
#if not using pyrocko download:
export_unfiltered=false
export_filtered=false
export_resampled=false
# if colosseo synthetics should be used, set to 1
colesseo_input = 0
# give the colosseo scenario.yml file
colosseo_scenario_yml = /media/asteinbe/data/asteinbe/mydl/scenario.yml
[synthetic test]
# do synthetic test with a real station distribution, specify the
# parameters in eventfolder with event.syn
synthetic_test = 0
# add noise to the synthetic, based on the variance of the real station
# covariance of noise not enabled right now
synthetic_test_add_noise = 0
synthetic_test_pertub_arrivals = 0
shift_max = 4 # [s] also the maxium shift for empirical corrections
# weight the arrays based on equal weights per azimuth. Azimuth gets divided in
# 12 blocks. Any arrays in each block will be of combined weight 1.
weight_by_azimuth = 1
# bootstrap the arrays to estimate the uncerainity:
bootstrap_array_weights = 0
# number of bootstraps to carry out:
n_bootstrap = 0
[general parameter]
correct_shifts_empirical_run = 0
# enable to run an optimization for the timeshifts, which lead to the highest semblance (foreshock/aftershock)
correct_shifts = 0
# turn valid to enable shift correction, either empirical(switch below) or xcorr (default)
correct_shifts_empirical = 0
correct_shifts_empirical_manual = 0
correct_shifts_empirical_manual_station_wise = 0
# enable to apply empirical time shifts atm the empirical_run than needs to be disabled
# dimx of grid for empirical correction (should be the same as for the main process in most cases)
dimx_emp = 50
# dimy of grid (should be the same as for the main process in most cases)
dimy_emp = 50
# step length in s.
step_emp = 2
# window length in s.
winlen_emp = 8
#calculate semblance from all arrays in a combined step (else each arrays semblance will be calculated
# seperatly and than combined, weighted by the azimuthal distribution of arrays)
combine_all = 0
#normalize all semblances to 0-1
norm_all=1
#after each depth/filter inspect the semblance
inspect_semb = 0
#depths= from,to,steps relative to depth in origin config
depths=5,5,5
# run each depth step with a number of filter(s), used for high vs. low freq.
filters=2
# dimx of grid
dimx = 5
# dimy of grid
dimy = 5
dimz = 5
# min distance to origin of stations
minDist = 0
# max distance to origin of stations
maxDist = 93
# step length in s.
step = 2
# window length in s.
winlen = 8
# step length in s.44
step_f2 = 2
# window length in s.
winlen_f2 = 8
# length of data before phase onset in s.
forerun = 10
# length of data after phase onset in s.
duration = 20
security = 200
# gridspacing in degree
gridspacing = 0.025
traveltime_model = ak135-f-continental.m.nd
#apply futterman_attenuation to S waves
futterman_attenuation = 0
[Optimization parameter]
# Optimize for a model with array responses as input
# enable optimzation
optimize = 0
# enable optimization of combined semblance
optimize_all = 0
[process parameter]
#number of cores for traveltime calculation
ncore = 2
# create output of compressed sensing as grid [warning: experimental]
cs = 0
#weight array contributions by noise variance (pre-event)
weight_by_noise = 0
# shift the traces to theoretical onset, disregarding curved travel times
# produces better crosscorrelations, but only valid for small arrays
shift_by_phase_onset = 0
# use a phase weighted stacking
shift_by_phase_pws = 0
# shift by crosscorrelation
shift_by_phase_cc = 0
# create an obspy array response
array_response = 0
[focal mechanism solution values from event file]
#not used right now
fm = 1
[automatic picker and correlation parameter]
xcorr=1
# for manual phase shift picking for each array set autoxcorrcorrectur to 1:
autoxcorrcorrectur = 1
# crosscorrelation threshold for excluding stations
xcorrtreshold = 0.6
#filter for referencestation for automatic picker
#should match your main filter
refstationfreqmin=0.03
refstationfreqmax=1.00
refstationcorners=2
refstationzph=false
#STA/LTA parameter
refsta=0.5
reflta=4
[array parameter]
networks=r7
r1=XA.MM04..Z|XA.MM05..Z
r1refstation=
r1phase=P
r2=IC.LSA..Z|IC.XAN..Z|IU.CHTO..Z
r2refstation=
r2phase=P
r3=GE.KBS..Z|IU.KEV..Z
r3refstation=
r3phase=P
r4=GT.BDFB..Z
r4refstation=
r4phase=P
r5=G.KOG..Z|IU.SJG..Z
r5refstation=
r5phase=P
r6=GT.DBIC..Z
r6refstation=
r6phase=P
r7=GE.LID..Z|GE.MHV..Z|GE.MLR..Z|GE.MORC..Z|GE.SUW..Z|CZ.DPC..Z|IU.KIEV..Z|IU.KONO..Z
r7refstation=
r7phase=P
r11=XA.MM12..Z|XA.MM13..Z
r11refstation=
r11phase=P
r15=IU.HRV..Z|LD.PAL..Z|XA.MM01..Z|XA.MM02..Z|XA.MM03..Z|XJ.BLUE..Z|XJ.CLER..Z
r15refstation=
r15phase=P
r19=GE.DSB..Z|IU.PAB..Z
r19refstation=
r19phase=P
r25=IU.PET..Z|IU.YSS..Z|PS.OGS..Z|PS.TSK..Z
r25refstation=
r25phase=P
r33=GT.BOSA..Z|IU.TSUM..Z
r33refstation=
r33phase=P
r42=G.ATD..Z|GT.BGCA..Z
r42refstation=
r42phase=P
r52=XA.MM08..Z|XA.MM09..Z|XA.MM10..Z
r52refstation=
r52phase=P
r61=GE.WLF..Z|GR.GRA1..Z|IU.GRFO..Z|MN.WDD..Z
r61refstation=
r61phase=P
r71=GE.BGIO..Z|KZ.AKT..Z|MN.KEG..Z
r71refstation=
r71phase=P
r84=IC.HIA..Z|IC.WMQ..Z|IU.ULN..Z|IU.YAK..Z
r84refstation=
r84phase=P
r98=IC.BJT..Z|IU.INCN..Z|IU.TATO..Z
r98refstation=
r98phase=P
[beamforming method]
#delaysum
#capon
beam = delaysum
[filterparameter]
#use dynamic filtering (attached to theoretical moment release)
dynamic_filter = 0
# define main filter for picking etc:
filterswitch=1
###############################################################
#Parameter for first filter
#bp butterworth
# low cut corner frequency
flo = 0.08
# high cut corner frequency
fhi = 0.24
# number of filter sections
ns = 4
# TRUE -> zero phase filter
zph = false
###############################################################
#Parameter for second filter
#bp butterworth
# low cut corner frequency
flo2 = 0.1
# high cut corner frequency
fhi2 = 0.5
# number of filter sections
ns2 = 4
# TRUE -> zero phase filter
zph2 = false
###############################################################
#Alternative lowpass filter example
#lowpass butterworth
l_fc = 1.5
# number of filter sections
l_ns = 4
# TRUE -> zero phase filter
l_zph = false
###############################################################
#Alternative highpass filter example
#highpass butterworth
h_fc = 2
# number of filter sections
h_ns = 4
# TRUE -> zero phase filter
h_zph = false
##################################################################

global.conf → example/global.conf View File


+ 0
- 0
example/tttgrid/.gitkeep View File


Loading…
Cancel
Save