Browse Source

pep8, cleanup, cluster algorithm streamlined

obspy
asteinbe 2 years ago
parent
commit
4dfdba5bc8
  1. 82
      src/apps/plot.py
  2. 4
      src/cluster/ClusterProgs.py
  3. 9
      src/cluster/callcluster.py
  4. 236
      src/cluster/cluster2.py
  5. 3
      src/process/sembCalc.py
  6. 125
      src/process/semp.py
  7. 5
      src/process/stacking.py
  8. 13
      src/tools/create.py

82
src/apps/plot.py

@ -4,14 +4,11 @@ from mpl_toolkits.basemap import Basemap
import numpy as num
import numpy as np
import matplotlib.pyplot as plt
import os
from pathlib import Path
import sys
from matplotlib.pyplot import cm
from matplotlib.widgets import Slider
from pylab import plot, show, figure, scatter, axes, draw
from itertools import cycle
import random
import csv
from obspy.imaging.beachball import beach
import matplotlib.colors as colors
@ -683,7 +680,7 @@ def plot_integrated_movie():
i = 0
for k in np.nan_to_num(data[:,2]):
if k>data_int[i]:
data_int[i]= k
data_int[i] = k
i = i+1
if sys.argv[3] == 'combined':
@ -705,15 +702,17 @@ def plot_integrated_movie():
ratio_lat = num.max(northings)/num.min(northings)
ratio_lon = num.max(eastings)/num.min(eastings)
map.drawmapscale(num.min(eastings)+ratio_lon*0.25, num.min(northings)+ratio_lat*0.25, num.mean(eastings), num.mean(northings), 30)
map.drawmapscale(num.min(eastings)+ratio_lon*0.25,
num.min(northings)+ratio_lat*0.25,
num.mean(eastings), num.mean(northings), 50)
parallels = np.arange(num.min(northings),num.max(northings),0.2)
meridians = np.arange(num.min(eastings),num.max(eastings),0.2)
parallels = np.arange(num.min(northings),num.max(northings), int(ratio_lat))
meridians = np.arange(num.min(eastings),num.max(eastings), int(ratio_lon))
eastings, northings = map(eastings, northings)
map.drawparallels(parallels,labels=[1,0,0,0],fontsize=22)
map.drawmeridians(meridians,labels=[1,1,0,1],fontsize=22)
map.drawmeridians(meridians,labels=[1,1,0,1],fontsize=22, rotation=45)
x, y = map(data[:,1], data[:,0])
mins = np.max(data[:,2])
triang = tri.Triangulation(x, y)
@ -722,13 +721,10 @@ def plot_integrated_movie():
triang.set_mask(mask)
plt.tricontourf(triang, data_int, vmax=maxs, vmin=0, cmap=cm.coolwarm)
#plt.tricontourf(triang, data_int, cmap=cm.coolwarm)
m = plt.cm.ScalarMappable(cmap=cm.coolwarm)
m.set_array(data_int)
m.set_clim(0., maxs)
plt.colorbar(m, orientation="horizontal", boundaries=np.linspace(0, maxs, 10))
# plt.colorbar(orientation="horizontal")
plt.title(path_in_str)
event = 'events/'+ str(sys.argv[1]) + '/' + str(sys.argv[1])+'.origin'
@ -742,6 +738,7 @@ def plot_integrated_movie():
event_mech=[[float(s[-3:]) for s in row] for i,row in enumerate(reader) if i in desired]
x, y = map(event_cor[1][0],event_cor[0][0])
ax = plt.gca()
np1 = [event_mech[0][0], event_mech[1][0], event_mech[2][0]]
beach1 = beach(np1, xy=(x, y), width=0.09)
ax.add_collection(beach1)
@ -749,7 +746,9 @@ def plot_integrated_movie():
if argv == "--topography":
try:
xpixels = 1000
map.arcgisimage(service='World_Shaded_Relief', xpixels = xpixels, verbose= False)
map.arcgisimage(service='World_Shaded_Relief',
xpixels = xpixels,
verbose= False)
except:
pass
plt.savefig('time:'+str(i)+'_f1'+'.png', bbox_inches='tight')
@ -845,8 +844,8 @@ def plot_integrated_movie():
eastings = data[:,1]
northings = data[:,0]
plt.figure()
fig = plt.figure()
ax = fig.axes
map = Basemap(projection='merc', llcrnrlon=num.min(eastings),
llcrnrlat=num.min(northings),
urcrnrlon=num.max(eastings),
@ -867,18 +866,17 @@ def plot_integrated_movie():
x, y = map(data[:,1], data[:,0])
mins = np.max(data[:,2])
triang = tri.Triangulation(x, y)
isbad = np.less(data_int, 0.01)
isbad = np.less(data_int, 0.001)
mask = np.all(np.where(isbad[triang.triangles], True, False), axis=1)
#triang.set_mask(mask)
#plt.tricontourf(triang, data_int, vmax=maxs, vmin=0, cmap=cm.coolwarm)
plt.tricontourf(triang, data_int, cmap=cm.coolwarm)
triang.set_mask(mask)
plt.tricontourf(triang, data_int, vmax=maxs, vmin=0, cmap=cm.coolwarm)
#plt.tricontourf(triang, data_int, cmap=cm.coolwarm)
m = plt.cm.ScalarMappable(cmap=cm.coolwarm)
m.set_array(data_int)
print(maxs)
m.set_clim(0., maxs)
#plt.colorbar(m, orientation="horizontal", boundaries=np.linspace(0, maxs, 10))
plt.colorbar(orientation="horizontal")
plt.colorbar(m, orientation="horizontal", boundaries=np.linspace(0, maxs, 10))
# plt.colorbar(orientation="horizontal")
plt.title(path_in_str)
@ -905,6 +903,48 @@ def plot_integrated_movie():
pass
plt.savefig('time:'+str(i)+'_f1'+'.png', bbox_inches='tight')
if cfg.Bool('synthetic_test') is True:
from pyrocko.gf import ws, LocalEngine, Target, DCSource, RectangularSource, MTSource
Syn_in = C.parseConfig('syn')
syn_in = SynthCfg(Syn_in)
sources = []
if syn_in.source() == 'RectangularSource':
sources.append(RectangularSource(
lat=float(syn_in.lat_0()),
lon=float(syn_in.lon_0()),
east_shift=float(syn_in.east_shift_0())*1000.,
north_shift=float(syn_in.north_shift_0())*1000.,
depth=syn_in.depth_syn_0()*1000.,
strike=syn_in.strike_0(),
dip=syn_in.dip_0(),
rake=syn_in.rake_0(),
width=syn_in.width_0()*1000.,
length=syn_in.length_0()*1000.,
nucleation_x=syn_in.nucleation_x_0(),
slip=syn_in.slip_0(),
nucleation_y=syn_in.nucleation_y_0(),
velocity=syn_in.velocity_0(),
anchor=syn_in.anchor(),
time=util.str_to_time(syn_in.time_0())))
if syn_in.source() == 'DCSource':
sources.append(DCSource(
lat=float(syn_in.lat_0()),
lon=float(syn_in.lon_0()),
east_shift=float(syn_in.east_shift_0())*1000.,
north_shift=float(syn_in.north_shift_0())*1000.,
depth=syn_in.depth_syn_0()*1000.,
strike=syn_in.strike_0(),
dip=syn_in.dip_0(),
rake=syn_in.rake_0(),
time=util.str_to_time(syn_in.time_0()),
magnitude=syn_in.magnitude_0()))
for source in sources:
print(source)
n, e = source.outline(cs='latlon').T
e, n = map(e,n)
ax.fill(e, n, color=(0.5, 0.5, 0.5), lw = 2)
plt.show()
def plot_integrated():

4
src/cluster/ClusterProgs.py

@ -31,13 +31,12 @@ class Intern(object):
self.error(s)
def start(config):
intern = Intern()
if sys.argv[1] == 'cluster':
intern.checkProgramParameter(3,4)
intern.checkProgramParameter(3, 4)
workDir = [Globals.EventDir(), 'tmp2', 'cluster'] # ???
workDir = ['cluster']
@ -47,7 +46,6 @@ def start(config):
return False
Basic.changeDirectory(workDir) # create working directory
#Basic.removeFiles('.') # ... and empty it
os.system(cmd)
return True

9
src/cluster/callcluster.py

@ -1,16 +1,13 @@
import os
import sys
from optparse import OptionParser
from palantiri.tools import config
from palantiri.common import Globals
parser = OptionParser(usage="%prog -f eventpath ")
parser.add_option("-f", "--evpath", type="string", dest="evpath", help="evpath")
(options, args) = parser.parse_args()
options.evpath = args[0]
print(options, args)
def init():
@ -22,7 +19,7 @@ def init():
import palantiri
path = palantiri.__path__
at = os.path.join(path[0], 'cluster/cluster2.py')
cmd = sys.executable + ' ' + at + ' -f '+ options.evpath
cmd = sys.executable + ' ' + at + ' -f ' + options.evpath
print('cmd = ', cmd)
for i in range(tests):
@ -34,10 +31,10 @@ def init():
'cluster'))
at = os.path.join(path[0], 'cluster/evaluateCluster.py')
cmd = sys.executable + ' ' + at + ' -f '+ os.path.join(options.evpath, "cluster")
cmd = sys.executable + ' ' + at + ' -f ' + os.path.join(options.evpath,
"cluster")
os.system(cmd)
def main():
init()

236
src/cluster/cluster2.py

@ -1,5 +1,4 @@
import logging
import os.path as op
from optparse import OptionParser
from pyrocko import guts
import os
@ -8,9 +7,7 @@ import fnmatch
import random
import time
import shutil
import sys
import math
import time
from palantiri.common import Basic
from palantiri.common import Globals
@ -69,6 +66,7 @@ class Centroid(object):
self.lon = lon
self.rank = rank
def readMetaInfoFile(EventPath):
Logfile.red('Parsing MetaInfoFile')
@ -107,19 +105,30 @@ def readMetaInfoFile(EventPath):
return MetaL
def readpyrockostations(path, disp):
def readpyrockostations(path, disp, cfg):
if disp is True:
stations = model.load_stations(path+'/data/stations_cluster.txt')
else:
stations = model.load_stations(path+'/data/stations_cluster.txt')
MetaL = []
phases = cfg.Str('ttphases')
phases = phases.split(',')
for phase in phases:
if phase is 'P':
desired = 'Z'
if phase is 'S':
desired = 'T'
for sl in stations:
channel = sl.channels[0]
MetaL.append(Station(str(sl.network), str(sl.station),
str(sl.location), str(sl.channels[0].name), str(sl.lat), str(sl.lon),
str(sl.elevation),str(channel.dip),str(channel.azimuth),
str(channel.gain)))
for channel in sl.channels:
if channel.name[-1] == desired:
MetaL.append(Station(str(sl.network), str(sl.station),
str(sl.location), str(channel.name),
str(sl.lat), str(sl.lon),
str(sl.elevation), str(channel.dip),
str(channel.azimuth),
str(channel.gain)))
return MetaL
def readcolosseostations(scenario_path):
@ -127,10 +136,12 @@ def readcolosseostations(scenario_path):
MetaL = []
for sl in stations:
channel = sl.channels[2]
MetaL.append(Station(str(sl.network),str(sl.station),
str(sl.location),str(sl.channels[2])[:3],str(sl.lat),str(sl.lon),
str(sl.elevation),str(channel.dip),str(channel.azimuth),
str(channel.gain)))
MetaL.append(Station(str(sl.network), str(sl.station),
str(sl.location), str(sl.channels[2])[:3],
str(sl.lat), str(sl.lon),
str(sl.elevation), str(channel.dip),
str(channel.azimuth),
str(channel.gain)))
return MetaL
@ -139,7 +150,7 @@ def createFolder(EventPath):
Folder = {}
Logfile.red('Create working environment')
Folder['cluster'] = os.path.join(EventPath,'cluster')
Folder['cluster'] = os.path.join(EventPath, 'cluster')
if os.access(Folder['cluster'], os.F_OK) is False:
os.makedirs(Folder['cluster'])
@ -159,7 +170,7 @@ def createFolder(EventPath):
else:
Logfile.abort('create Folder: No write permissions for ' + os.getcwd())
Folder['config'] = os.path.join(os.getcwd(), '..', 'skeleton')
Folder['config'] = os.path.join(os.getcwd(), 'skeleton')
return Folder
@ -196,32 +207,117 @@ def checkStationAroundInitialCentroid(station, Config, StationMetaList):
return counter
def addOK(station,stationList,Config,MetaList):
def addOK(station, stationList, Config, MetaList):
cfg = ConfigObj(dict=Config)
minDist = 0
cfg = ConfigObj(dict=Config)
minDist = 0
minAround = cfg.UInt('minstationaroundinitialcluster')
t = 0
t = 0
for i in stationList:
sdelta = loc2degrees(station, i)
if sdelta > minDist:
aroundcounter = checkStationAroundInitialCentroid(station,Config,MetaList)
aroundcounter = checkStationAroundInitialCentroid(station,
Config,
MetaList)
if aroundcounter >= minAround:
t = 1
else:
t=0
t = 0
return t
else:
t=0
t = 0
return t
return t
def DeterminedInitialCentroids(Config, StationMetaList):
Logfile.red('Begin initial centroid search')
cfg = ConfigObj(dict=Config)
initialCentroids = []
usedIndexes = []
random.seed(time.clock())
if len(StationMetaList) == 0:
Logfile.red('Empty station list')
return initialCentroids
MAX_TIME_ALLOWED = 350
start = time.time()
radi = 0
if int(Config['maxcluster']) == 0:
to = len(StationMetaList)-1
else:
to = int(Config['maxcluster'])
while len(initialCentroids) < to:
dist_centroids = float(Config['centroidmindistance'])
randomIndex = radi = radi+random.randint(0, 5)
if randomIndex > len(StationMetaList):
randomIndex = random.randint(0, 5)
redraw = True
while redraw is True:
if randomIndex in usedIndexes:
randomIndex = random.randint(0, len(StationMetaList)-1)
else:
if len(usedIndexes) > 2:
for rdx in usedIndexes:
s1 = StationMetaList[randomIndex]
s2 = StationMetaList[rdx]
delta = loc2degrees(s1, s2)
if delta >= dist_centroids:
redraw = False
else:
redraw = False
usedIndexes.append(randomIndex)
around = checkStationAroundInitialCentroid(StationMetaList[randomIndex],
Config, StationMetaList)
found = False
if len(initialCentroids) == 0:
initialCentroids.append(StationMetaList[randomIndex])
found = True
start = time.time()
else:
t = addOK(StationMetaList[randomIndex], initialCentroids,
Config, StationMetaList)
if (time.time() - start) > MAX_TIME_ALLOWED:
break
if t == 1:
if len(usedIndexes) > 1:
for rdx in usedIndexes:
s1=StationMetaList[randomIndex]
s2=StationMetaList[rdx]
delta=loc2degrees(s1, s2)
if delta >= dist_centroids:
initialCentroids.append(StationMetaList[randomIndex])
found = True
else:
initialCentroids.append(StationMetaList[randomIndex])
found = True
else:
continue
if found:
initDist = cfg.Float('initialstationdistance')
Logfile.red('found initial cluster %d' %(len(initialCentroids)))
Logfile.red('centroid %s with %d stations around %s deegree' % (StationMetaList[randomIndex], around, initDist))
print(len(initialCentroids))
Logfile.red('Initial centroid search finished')
return initialCentroids
def createRandomInitialCentroids(Config, StationMetaList):
Logfile.red('Begin initial centroid search')
cfg = ConfigObj(dict=Config)
initialCentroids = []
usedIndexes = []
@ -231,10 +327,13 @@ def createRandomInitialCentroids(Config, StationMetaList):
Logfile.red('Empty station list')
return initialCentroids
MAX_TIME_ALLOWED = 50
MAX_TIME_ALLOWED = 350
start = time.time()
while len(initialCentroids) < int(Config['maxcluster']):
if int(Config['maxcluster']) == 0:
to = len(StationMetaList)-1
else:
to = int(Config['maxcluster'])
while len(initialCentroids) < to:
dist_centroids = float(Config['centroidmindistance'])
randomIndex = random.randint(0, len(StationMetaList)-1)
@ -247,7 +346,7 @@ def createRandomInitialCentroids(Config, StationMetaList):
for rdx in usedIndexes:
s1 = StationMetaList[randomIndex]
s2 = StationMetaList[rdx]
delta = loc2degrees(s1, s2)
delta = loc2degrees(s1, s2)
if delta >= dist_centroids:
redraw = False
else:
@ -271,9 +370,9 @@ def createRandomInitialCentroids(Config, StationMetaList):
if t == 1:
if len(usedIndexes) > 1:
for rdx in usedIndexes:
s1 = StationMetaList[randomIndex]
s2 = StationMetaList[rdx]
delta = loc2degrees(s1, s2)
s1=StationMetaList[randomIndex]
s2=StationMetaList[rdx]
delta=loc2degrees(s1, s2)
if delta >= dist_centroids:
initialCentroids.append(StationMetaList[randomIndex])
found = True
@ -285,8 +384,9 @@ def createRandomInitialCentroids(Config, StationMetaList):
continue
if found:
Logfile.red('found initial cluster %d' %(len(initialCentroids)))
Logfile.red('centroid %s with %d stations around %s deegree' %(StationMetaList[randomIndex],around,Config['centroidmindistance']))
initDist = cfg.Float('initialstationdistance')
Logfile.red('found initial cluster %d' % (len(initialCentroids)))
Logfile.red('centroid %s with %d stations around %s deegree' % (StationMetaList[randomIndex], around, initDist))
Logfile.red('Initial centroid search finished')
@ -298,23 +398,23 @@ def stationBelongTocluster(Config, CentroidList, StationMetaList):
clusterList = []
for i in StationMetaList:
mind = 100000
c= 0
mind = 0
c = 0
for j in CentroidList:
delta = loc2degrees(j, i)
c += 1
if mind > delta:
delta = abs(loc2degrees(j, i))
c += 1
if mind == 0:
mind = delta
i.member = c
#endfor
clusterList.append(Station(i.net,i.sta,i.loc,i.comp,i.lat,i.lon,i.ele,i.dip,i.azi,i.gain,i.member))
#endfor
else:
if delta < mind:
mind = delta
i.member = c
clusterList.append(Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon,
i.ele, i.dip, i.azi, i.gain, i.member))
return clusterList
# -------------------------------------------------------------------------------------------------
def calculateclusterCentre(Config, clusterStationList):
@ -342,32 +442,25 @@ def calculateclusterCentre(Config, clusterStationList):
newclusterVector.append(Centroid(scla,sclo,name))
return newclusterVector
# -------------------------------------------------------------------------------------------------
def compareclusterCentre(oldcluster, newcluster, Config):
counter = 0
for i in range(int(Config['maxcluster'])):
if newcluster[i].rank == -1: continue #hs-1
if newcluster[i].rank == -1:
continue
delta = loc2degrees(oldcluster[i], newcluster[i])
msg = str(i) + ' OLD: ' + str(oldcluster[i].lat) + ' ' + str(oldcluster[i].lon)
msg+= ' NEW: ' + str(newcluster[i].lat) + ' ' + str(newcluster[i].lon) + ' DELTA: ' + str(delta)
msg+= ' NEW: ' + str(newcluster[i].lat) + ' ' + str(newcluster[i].lon) + ' DELTA: ' + str(delta)
Logfile.add(msg)
s = 'NO'
if delta < float(Config['comparedelta']):
s = 'JO'; counter +=1
Logfile.add(s)
#endfor
if delta < float(Config['centroidmindistance']):
counter +=1
return counter
# -------------------------------------------------------------------------------------------------
def deleteFarStations(CentroidList, StationclusterList,Config):
def deleteFarStations(CentroidList, StationclusterList, Config):
cfg = ConfigObj(dict=Config)
stationdistance = int(cfg.Distance('stationdistance'))
@ -377,13 +470,13 @@ def deleteFarStations(CentroidList, StationclusterList,Config):
if i.rank == j.member:
if loc2degrees(i, j) > stationdistance:
j.member = -1
#endfor
for index,k in enumerate(StationclusterList):
if k.member == -1: del StationclusterList[index]
if k.member == -1:
del StationclusterList[index]
return StationclusterList
# -------------------------------------------------------------------------------------------------
def filterclusterStationMinimumNumber(CentroidList, StationclusterList, Config):
@ -413,6 +506,7 @@ def filterclusterStationMinimumNumber(CentroidList, StationclusterList, Config):
return newStationclusterList, newCentroidList
def calcMeanCentroidDistance(CentroidList):
sumdelta = 0
@ -423,7 +517,7 @@ def calcMeanCentroidDistance(CentroidList):
meanCentroidDistance = sumdelta / len(CentroidList)
return meanCentroidDistance
# -------------------------------------------------------------------------------------------------
def calcMinValue(CentroidList):
@ -485,13 +579,13 @@ def write4Plot(Config, Origin, StationclusterList, CentroidList, Folder, flag):
def km(Config, FilterMeta, Folder, Origin, flag):
ic = createRandomInitialCentroids(Config, FilterMeta)
ic = DeterminedInitialCentroids(Config, FilterMeta)
if len(ic) == 0:
return
counter = 0
kmean(Config,ic,FilterMeta,counter,Folder,Origin,flag)
kmean(Config, ic, FilterMeta, counter, Folder, Origin, flag)
def endcheck(inputCentroid,FilterMeta,Config,Folder,Origin,flag):
@ -501,40 +595,40 @@ def endcheck(inputCentroid,FilterMeta,Config,Folder,Origin,flag):
write4Plot(Config, Origin, FMM, NCL, Folder, flag)
def kmean(Config,inputCentroid,FilterMeta,counter,Folder,Origin,flag):
def kmean(Config, inputCentroid, FilterMeta, counter, Folder, Origin, flag):
counter += 1
Logfile.add('COUNTER ' + str(counter) + ' CUTOFF ' + Config['cutoff'])
cfg = ConfigObj(dict=Config)
if counter == cfg.UInt('cutoff'):
endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
sys.exit()
scl = stationBelongTocluster(Config, inputCentroid, FilterMeta)
acounter = 1
for a in inputCentroid:
for i in scl:
if acounter == i.member:
delta = loc2degrees(i, a)
if delta > cfg.Float('initialstationdistance'):
i.member = -1
acounter+=1
if counter == cfg.UInt('cutoff'):
endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
sys.exit()
nsc = calculateclusterCentre(Config, scl)
t = compareclusterCentre(inputCentroid, nsc, Config)
Logfile.add('ITERATIONSTEP: ---> ' + str(counter) + ' <-----------------------------')
while t != cfg.UInt('maxcluster'):
Logfile.add('ANZAHL DER JO in KMEAN: ' + str(t))
while t < cfg.UInt('maxcluster'):
Logfile.add('number of arrays in KMEAN: ' + str(t))
kmean(Config, nsc, FilterMeta, counter, Folder, Origin, flag)
endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
sys.exit()
@ -571,7 +665,7 @@ class clusterMain(MainObj):
disp = True
else:
disp = False
Meta = readpyrockostations(self.eventpath, disp)
Meta = readpyrockostations(self.eventpath, disp, cfg)
elif cfg.colesseo_input() is True:
scenario = guts.load(filename=cfg.colosseo_scenario_yml())
scenario_path = cfg.colosseo_scenario_yml()[:-12]

3
src/process/sembCalc.py

@ -532,7 +532,6 @@ def collectSemb(SembList, Config, Origin, Folder, ntimes, arrays, switch,
fobjsembmax = open(os.path.join(folder, 'sembmax_%s_boot%s_%s.txt'
% (switch, boot, phase)), 'w')
norm = num.max(num.max(tmp, axis=1))
max_p = 0.
sum_i = 0.
@ -571,6 +570,8 @@ def collectSemb(SembList, Config, Origin, Folder, ntimes, arrays, switch,
counter_time = 0
uncert = num.std(i)
semb_cum =+ i
norm = num.max(num.max(i))
for j in range(num.shape(latv)[0]):
x = latv[j]
y = lonv[j]

125
src/process/semp.py

@ -382,75 +382,10 @@ def semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY, mint,
Basic.writeVector(lonv_txt, lonv, '%e')
'''
trs_orgs = []
do_trad = True
k = 0
for tr in sorted(calcStreamMap):
tr_org = obspy_compat.to_pyrocko_trace(calcStreamMap[tr])
if combine is True and cfg.Bool('shift_by_phase_pws') is True:
num_step = max(index_steps)
# relstart = num.mean(traveltime[k][:])
# tmin = time+relstart-5
# tmax = time+relstart+5
# tr_org_polarity = tr_org.copy()
# tr_org_polarity.chop(tmin=tmin, tmax=tmax)
# k = k+1
# tr_org.ydata = tr_org.ydata/max(tr_org.ydata)
# if num.max(tr_org.ydata)<0.:
# tr_org.ydata = tr_org.ydata*-1.
#trld.snuffle([tr_org] + [tr_org_polarity])
#tr_org.ydata = num.gradient(abs(tr_org.ydata), max(index_steps))
# tr_org.ydata = num.gradient(tr_org.ydata)
# tr_org.ydata = abs(tr_org.ydata)
# tr_org.ydata = num.ediff1d(tr_org.ydata, to_end=tr_org.ydata[-1])
#analytic = hilbert(tr_org.ydata)
# envelope = np.sqrt(np.sum((np.square(analytic),
# np.square(tr_org.ydata)), axis=0))
# tr_org.ydata = analytic / envelope
# tr_org.ydata = envelope
# tr_org.ydata = hilbert(tr_org.ydata)
# tr_org.ydata = tr_org.ydata / np.sqrt(np.mean(np.square(tr_org.ydata)))
# tr_org.ydata = tr_org.ydata / np.max(tr_org.ydata)
# normalize = True
# if normalize:
# norm = tr_org.ydata /\
# np.sqrt(np.mean(np.square(tr_org.ydata)))
# else:
# norm = tr_org.ydata
# tr_org.ydata = abs(tr_org.ydata)
# tr_org.ydata = num.ediff1d(tr_org.ydata, to_end=tr_org.ydata[-1])
# tr_org.ydata = abs(tr_org.ydata)
tr_org.ydata = abs(tr_org.ydata)
tr_org.ydata = num.ediff1d(tr_org.ydata, to_end=tr_org.ydata[-1])
# tr_org.ydata = num.gradient(tr_org.ydata)
# tr_org.ydata = tr_org.ydata/max(tr_org.ydata)
# tr_org.ydata = abs(tr_org.ydata)
# tr_org.ydata = num.ediff1d(tr_org.ydata, to_end=tr_org.ydata[-1])
# tr_org.ydata = num.ediff1d(tr_org.ydata, to_end=tr_org.ydata[-1])
#tr_org.ydata = np.sum((norm, tr_org.ydata), axis=0)
#weight = 1.
# tr_org.ydata = tr_org.ydata *\
# np.abs(tr_org.ydata ** weight)
# tr_org.ydata = tr_org.ydata *\
# np.abs(tr_org.ydata ** weight)
# tr_org.ydata = abs(tr_org.ydata)
#tr_org.ydata = num.ediff1d(tr_org.ydata)
# tr_org.ydata = num.gradient(abs(tr_org.ydata), max(index_steps))
if combine is True and do_trad is True:
if combine is True:
# some trickery to make all waveforms have same polarity, while still
# considering constructive/destructive interferences. This is needed
# when combing all waveforms/arrays from the world at once(only then)
@ -459,23 +394,17 @@ def semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY, mint,
# mechanism.
tr_org.ydata = tr_org.ydata / np.sqrt(np.mean(np.square(tr_org.ydata)))
# analytic = hilbert(tr_org.ydata)
# envelope = np.sqrt(np.sum((np.square(analytic),
# np.square(tr_org.ydata)), axis=0))
# tr_org.ydata = analytic / envelope
# tr_org.ydata = envelope
tr_org.ydata = abs(tr_org.ydata)
tr_org.ydata = num.ediff1d(tr_org.ydata)
if max(index_steps) % 2 == 1:
tr_org.ydata = abs(tr_org.ydata)
tr_org.ydata = num.ediff1d(tr_org.ydata)
# tr_org.ydata = abs(tr_org.ydata)
# if cfg.Bool('shift_by_phase_pws') is True:
# cfx = num.fft.fft(tr_org.ydata)
# sums_schimmel = sums_schimmel + (cfx/(abs(cfx)) * num.exp(1j*2*num.pi*cfx)))
# print('calculate pws')
trs_orgs.append(tr_org)
if nsamp == 0:
nsamp = 1
backSemb = np.ndarray(shape=(ntimes, dimX*dimY), dtype=float)
@ -491,6 +420,7 @@ def semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY, mint,
sums = 0.
relstart = []
relstarts = nostat
sums_schimmel = 0
for k in range(nostat):
relstart = traveltime[k][j]
@ -498,35 +428,60 @@ def semblance_py(ncpus, nostat, nsamp, ntimes, nstep, dimX, dimY, mint,
ibeg = index_begins[str(j)+str(k)][0]+i*index_steps[j+k]
iend = index_begins[str(j)+str(k)][0]+index_window[j+k]+i*index_steps[j+k]
data = tr.ydata[ibeg:iend]
data = data / np.sqrt(np.mean(np.square(data)))
# normalize:
#data = data / np.sqrt(np.mean(np.square(data)))
if cfg.Bool('shift_by_phase_pws') is True:
cfx = num.fft.fft(data)
sums_schimmel = sums_schimmel + (cfx/(abs(cfx)) * num.exp(1j*2*num.pi*cfx))
try:
if do_bs_weights is True:
sums += data*bs_weights[k]
else:
sums = sums+data
except ValueError:
if num.shape(data)< num.shape(sums):
if num.shape(data) < num.shape(sums):
data = tr.ydata[ibeg:iend+1]
else:
data = tr.ydata[ibeg:iend-1]
sums = sums+data
relstarts -= relstart
sums_schimmel = abs(sums_schimmel)**2.
if cfg.Bool('shift_by_phase_pws') is True:
for k in range(nostat):
relstart = traveltime[k][j]
tr = trs_orgs[k]
ibeg = index_begins[str(j)+str(k)][0]+i*index_steps[j+k]
iend = index_begins[str(j)+str(k)][0]+index_window[j+k]+i*index_steps[j+k]
data = tr.ydata[ibeg:iend]
cfx = num.fft.fft(data) * sums_schimmel
data = num.fft.ifft(cfx)
try:
if do_bs_weights is True:
sums += data*bs_weights[k]
else:
sums = sums+data
except ValueError:
if num.shape(data) < num.shape(sums):
data = tr.ydata[ibeg:iend+1]
else:
data = tr.ydata[ibeg:iend-1]
sums = sums+data
sum = abs(num.sum(sums))
#if combine is True:
#sum = (1./nostat)* ((abs(num.sum((sums)))**2) /num.sum(sums))
if combine is True:
sum = (1./nostat)* ((abs(num.sum((sums)))**2) /num.sum(sums))
semb = sum
backSemb[i][j] = sum
if semb > sembmax:
sembmax = semb # search for maximum and position of maximum on semblance
# grid for given time step
sembmax = semb
sembmaxX = latv[j]
sembmaxY = lonv[j]
#backSemb[i][:] = backSemb[i][:]/num.max(backSemb[i][:])
Logfile.add('max semblance: ' + str(sembmax) + ' at lat/lon: ' +
str(sembmaxX) + ','+ str(sembmaxY))
str(sembmaxX) + ',' + str(sembmaxY))
backSemb = backSemb/num.max(num.max(backSemb))
return backSemb

5
src/process/stacking.py

@ -1,7 +1,8 @@
"""
Utility module of the EQcorrscan package to allow for different methods of \
stacking of seismic signal in one place.
Modififed Utility module of the EQcorrscan package to allow for different
methods of stacking of seismic signal in one place.
Modififed: Andreas Steinberg,
:copyright:
EQcorrscan developers.

13
src/tools/create.py

@ -39,8 +39,7 @@ def parseEvent(eventID):
eventID = eventID[1].replace('_', '')
url = 'http://service.iris.edu/fdsnws/event\
/1/query?eventid=%s&format=text' % (eventID)
url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
data = urlopen(url).read()
data = data.decode('utf_8')
data = data.split('\n')
@ -85,8 +84,7 @@ def writeOriginFile(path, ev_id):
eventID = ev_id[1].replace('_', '')
url = 'http://service.iris.edu/fdsnws/event\
/1/query?eventid=%s&format=text' % (eventID)
url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
data = urlopen(url).read()
data = data.decode('utf_8')
data = data.split('\n')
@ -120,8 +118,7 @@ def writeSynFile(path, ev_id):
eventID = ev_id[1].replace('_', '')
url = 'http://service.iris.edu/fdsnws/\
event/1/query?eventid=%s&format=text' % (eventID)
url = 'http://service.iris.edu/fdsnws/event/1/query?eventid=%s&format=text' % (eventID)
data = urlopen(url).read()
data = data.decode('utf_8')
data = data.split('\n')
@ -173,9 +170,7 @@ def copyConfigSkeleton(evfolder):
event = evfolder.split('/')[-1]
logger.info('\033[31mNEXT PROCESSING STEP: \n\n \
palantiri_down {evdirectory} "time" 10352.323104588522\
0.001 10 --radius-min=1110 "name"\
\n\n\033[0m'.format(evdirectory=str(event.strip('[]'))))
palantiri_down {evdirectory} "time" 10352.323104588522 0.001 10 --radius-min=1110 "name" \n\n\033[0m'.format(evdirectory=str(event.strip('[]'))))
def main():

Loading…
Cancel
Save