Browse Source

v0.4

obspy
braunfuss 2 years ago
parent
commit
08809cf49f
72 changed files with 18192 additions and 0 deletions
  1. +5
    -0
      requirements.txt
  2. +27
    -0
      setup.cfg
  3. +79
    -0
      setup.py
  4. +0
    -0
      src/__init__.py
  5. +0
    -0
      src/apps/__init__.py
  6. +127
    -0
      src/apps/arraytool.py
  7. +1227
    -0
      src/apps/palantiri_down.py
  8. +1961
    -0
      src/apps/plot.py
  9. +53
    -0
      src/cluster/ClusterProgs.py
  10. +13
    -0
      src/cluster/TestCluster.py
  11. +0
    -0
      src/cluster/__init__.py
  12. +41
    -0
      src/cluster/callcluster.py
  13. +611
    -0
      src/cluster/cluster2.py
  14. +258
    -0
      src/cluster/evaluateCluster.py
  15. +577
    -0
      src/common/Basic.py
  16. +24
    -0
      src/common/CommonProgs.py
  17. +560
    -0
      src/common/ConfigFile.py
  18. +118
    -0
      src/common/DataTypes.py
  19. +100
    -0
      src/common/Globals.py
  20. +376
    -0
      src/common/Logfile.py
  21. +31
    -0
      src/common/ObspyFkt.py
  22. +102
    -0
      src/common/Program.py
  23. +0
    -0
      src/common/__init__.py
  24. +0
    -0
      src/data/__init__.py
  25. +145
    -0
      src/data/ak135-f-average-no-ocean.f.nd
  26. +33
    -0
      src/data/ak135-f-average-no-ocean.l.nd
  27. +46
    -0
      src/data/ak135-f-average-no-ocean.m.nd
  28. +149
    -0
      src/data/ak135-f-average.f.nd
  29. +37
    -0
      src/data/ak135-f-average.l.nd
  30. +50
    -0
      src/data/ak135-f-average.m.nd
  31. +142
    -0
      src/data/ak135-f-average.vf.csv
  32. +145
    -0
      src/data/ak135-f-average.vf.nd
  33. +144
    -0
      src/data/ak135-f-continental.f.nd
  34. +33
    -0
      src/data/ak135-f-continental.l.nd
  35. +46
    -0
      src/data/ak135-f-continental.m.nd
  36. +58
    -0
      src/data/prem-no-ocean.f.nd
  37. +32
    -0
      src/data/prem-no-ocean.l.nd
  38. +41
    -0
      src/data/prem-no-ocean.m.nd
  39. +60
    -0
      src/data/prem.f.nd
  40. +34
    -0
      src/data/prem.l.nd
  41. +43
    -0
      src/data/prem.m.nd
  42. +63
    -0
      src/process/ProcessProgs.py
  43. +1
    -0
      src/process/Version.py
  44. +0
    -0
      src/process/__init__.py
  45. +718
    -0
      src/process/array_crosscorrelation_v4.py
  46. +287
    -0
      src/process/beam_stack.py
  47. +112
    -0
      src/process/deserializer.py
  48. +884
    -0
      src/process/main.py
  49. +73
    -0
      src/process/noise_addition.py
  50. +159
    -0
      src/process/noise_analyser.py
  51. +1579
    -0
      src/process/optim.py
  52. +1326
    -0
      src/process/optim_csemb.py
  53. +1805
    -0
      src/process/sembCalc.py
  54. +600
    -0
      src/process/semp.py
  55. +163
    -0
      src/process/stacking.py
  56. +25
    -0
      src/process/times.py
  57. +29
    -0
      src/process/trigger.py
  58. +501
    -0
      src/process/ttt.py
  59. +250
    -0
      src/process/waveform.py
  60. +122
    -0
      src/process/xcorrfilter.py
  61. +0
    -0
      src/skeleton/__init__.py
  62. +249
    -0
      src/skeleton/example.config
  63. +179
    -0
      src/skeleton/plot_cluster.py
  64. +0
    -0
      src/tools/__init__.py
  65. +264
    -0
      src/tools/config.py
  66. +190
    -0
      src/tools/create.py
  67. +413
    -0
      src/tools/ev_wave_mt4.py
  68. +74
    -0
      src/tools/eventsearch.py
  69. +82
    -0
      src/waveform/DataDir.py
  70. +1
    -0
      src/waveform/Version.py
  71. +0
    -0
      src/waveform/__init__.py
  72. +515
    -0
      src/waveform/pyrocko_down.py

+ 5
- 0
requirements.txt View File

@ -0,0 +1,5 @@
utm
mpld3
numpy
scipy
pyrocko

+ 27
- 0
setup.cfg View File

@ -0,0 +1,27 @@
[metadata]
description-file=README.md
license-file=LICENSE
[bdist_wheel]
universal=1
[build_ext]
inplace=0
[nosetests]
verbosity=2
detailed-errors=1
#with-coverage=0
#cover-erase=1
#cover-package=grond
[coverage:report]
exclude_lines =
def __repr__
def __str__
raise AssertionError
raise NotImplementedError
raise ValueError
raise
if __name__ == .__main__.:
logger.error

+ 79
- 0
setup.py View File

@ -0,0 +1,79 @@
#!/usr/bin/env python
from setuptools import setup
from setuptools.command.install import install
class CustomInstallCommand(install):
def run(self):
install.run(self)
setup(
cmdclass={
'install': CustomInstallCommand,
},
name='palantiri',
description='A probabilistic earthquake source inversion framework. '
'Designed and crafted in Mordor.',
version='0.3',
author='The palantiri Developers',
author_email='info@pyrocko.org',
packages=[
'palantiri',
'palantiri.apps',
'palantiri.cluster',
'palantiri.common',
'palantiri.process',
'palantiri.skeleton',
'palantiri.tools',
'palantiri.data',
'palantiri.waveform',
],
#python_requires='>=3.5',
entry_points={
'console_scripts': [
'bat = palantiri.apps.arraytool:main',
'palantiri_plot = palantiri.apps.plot:main',
'palantiri_down = palantiri.apps.palantiri_down:main',
'palantiri_eventsearch = palantiri.tools.eventsearch:main',
'palantiri_cluster = palantiri.cluster.callcluster:main',
'palantiri_create = palantiri.tools.create:main',
'palantiri_process = palantiri.process.main:main',
]
},
package_dir={'palantiri': 'src'},
package_data={
'palantiri': [
'data/*nd',
'skeleton/example.config']},
data_files=[],
license='GPLv3',
classifiers=[
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
keywords=[
'seismology, waveform analysis, earthquake modelling, geophysics,'
' backprojection'],
)

+ 0
- 0
src/__init__.py View File


+ 0
- 0
src/apps/__init__.py View File


+ 127
- 0
src/apps/arraytool.py View File

@ -0,0 +1,127 @@
import os
import fnmatch
import sys
from optparse import OptionParser
import logging
import imp
import obspy.core
from palantiri.process import ProcessProgs
from palantiri.cluster import ClusterProgs
from palantiri.common import CommonProgs
from palantiri.tools import config as config
logger = logging.getLogger(sys.argv[0])
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
usage = ''' %prog [options] <command> [args]
%prog list [list all events from event folder]
%prog search [search for events in online catalog parameter from global.conf]
%prog create eventID [eventID from online catalog]
%prog pyrocko_down eventname
%prog plotstations eventname
%prog cluster eventname [automatic array clustering and print arrayconfig]
%prog process eventname
%prog plotresult eventname
'''
parser = OptionParser(usage=usage)
(options, args) = parser.parse_args()
def folderContent(p):
'''
method to lookup necessary config files for event processing in the event folders
return list of eventname if config and origin file are existing
'''
L = []
for root,dirs ,files in os.walk(p):
flags = 0
for i in files:
if fnmatch.fnmatch(i, '*.config'):
flags += 1
if fnmatch.fnmatch(i, '*.origin'):
flags += 1
if flags == 2:
name = root.split('/')
L.append(name[-1:])
return L
def listEvents():
'''
method to list events in the event folder
only list event if config and origin files are exisiting in event folder
return list of events and print them
'''
for item in os.listdir(os.path.join(os.getcwd(),'events')):
print(item)
def parseArguments(args):
'''
parse arguments of main and entry script for arraytool
do what you gave as commandline argument to main script
'''
dir = 'tools'
if ProcessProgs.start(config):
return
if ClusterProgs.start(config):
return
if sys.argv[1] == 'list':
listEvents()
elif sys.argv[1] == 'create':
at = "palantiri_create"
t = ''
for i in sys.argv[2:] : t +=i+'_'
cmd = at+' '+t
os.system(cmd)
elif sys.argv[1] == 'cluster':
at = "palantiri_cluster"
cmd = at+' -f '+ path
os.system(cmd)
elif sys.argv[1] == 'pyrocko_download':
at = os.path.join(os.getcwd(),'Waveform','pyrocko_down.py')
path = os.path.join(os.getcwd(),'events',sys.argv[2])
cmd = sys.executable+' '+at+' -f '+ path
os.chdir(os.path.join(os.getcwd(),"Waveform"))
os.system(cmd)
elif sys.argv[1] == 'search':
os.system("palantiri_eventsearch")
else:
logger.info('\033[31m Option not available \033[0m')
def main():
if len(sys.argv) > 1:
parseArguments(sys.argv)
else:
cmd = 'python '+sys.argv[0]+' --help'
os.system(cmd)

+ 1227
- 0
src/apps/palantiri_down.py
File diff suppressed because it is too large
View File


+ 1961
- 0
src/apps/plot.py
File diff suppressed because it is too large
View File


+ 53
- 0
src/cluster/ClusterProgs.py View File

@ -0,0 +1,53 @@
import os
import sys
from palantiri.common import Basic
from palantiri.common import Globals
def Usage():
return 'arraytool.py cluster event_name [automatic array clustering and print arrayconfig]'
class Intern(object):
def __init__(self): dummy = 0
def error(self, text):
print('\nError: ' + text + '\n')
print('Usage: ' + Usage())
sys.exit('\n*** Program aborted ***')
def checkProgramParameter(self, nMinParams, nMaxParams):
eventName = sys.argv[2]
if len(sys.argv) < nMinParams: self.error('event name missing')
if len(sys.argv) > nMaxParams: self.error('Too many parameters')
if not Globals.checkEventDirParameter(eventName): # Exists event directory ?
s = 'Invalid parameter - <' + eventName +'>'
s += '\n '
s += 'Cannot find directory ' + Globals.EventDir()
self.error(s)
def start(config):
intern = Intern()
if sys.argv[1] == 'cluster':
intern.checkProgramParameter(3,4)
workDir = [Globals.EventDir(), 'tmp2', 'cluster'] # ???
workDir = ['cluster']
cmd = "palantiri_cluster" + ' -f ' + Globals.EventDir()
else:
return False
Basic.changeDirectory(workDir) # create working directory
#Basic.removeFiles('.') # ... and empty it
os.system(cmd)
return True

+ 13
- 0
src/cluster/TestCluster.py View File

@ -0,0 +1,13 @@
import sys
import Basic
from Program import startTest # from Common
Basic.onlyForInternalUse()
sys.argv = startTest('cluster', workingDir='tools')
import cluster2 # from cluster
cluster2.MainProc()

+ 0
- 0
src/cluster/__init__.py View File


+ 41
- 0
src/cluster/callcluster.py View File

@ -0,0 +1,41 @@
import os
import sys
from optparse import OptionParser
from palantiri.tools import config
from palantiri.common import Globals
parser = OptionParser(usage="%prog -f eventpath ")
parser.add_option("-f", "--evpath", type="string", dest="evpath", help="evpath")
(options, args) = parser.parse_args()
def init():
C = config.Config(options.evpath)
Config = C.parseConfig('config')
tests = int(Config['runs'])
import palantiri
path = palantiri.__path__
at = os.path.join(path[0], 'cluster/cluster2.py')
cmd = sys.executable + ' ' + at + ' -f '+ options.evpath
print('cmd = ', cmd)
for i in range(tests):
print('RUN: ', i)
os.system(cmd)
cmd = ('%s evaluatecluster.py -f %s') % (sys.executable,
os.path.join(options.evpath,
'cluster'))
at = os.path.join(path[0], 'cluster/evaluateCluster.py')
cmd = sys.executable + ' ' + at + ' -f '+ os.path.join(options.evpath, "cluster")
os.system(cmd)
def main():
init()

+ 611
- 0
src/cluster/cluster2.py View File

@ -0,0 +1,611 @@
import logging
import os.path as op
from optparse import OptionParser
from pyrocko import guts
import os
import sys
import fnmatch
import random
import time
import shutil
import sys
import math
import time
from palantiri.common import Basic
from palantiri.common import Globals
from palantiri.common import Logfile
from palantiri.common import DataTypes
from palantiri.common.Program import MainObj
from palantiri.common.ObspyFkt import loc2degrees
from palantiri.common.ConfigFile import ConfigObj
from pyrocko import model
from palantiri.tools import config
sys.setrecursionlimit(1500)
logger = logging.getLogger(sys.argv[0])
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(message)s")
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
class Station(object):
def __init__(self, net, sta, loc, comp, lat=0, lon=0, ele=0, dip=0, azi=0,
gain=0, member=-1):
self.net = net
self.sta = sta
self.loc = loc
self.comp = comp
self.lat = lat
self.lon = lon
self.ele = ele
self.dip = dip
self.azi = azi
self.gain = gain
self.member = member
def getName(self):
return self.net+'.'+self.sta+'.'+self.loc+'.'+self.comp
def __str__(self):
return('%s.%s.%s.%s') % (self.net, self.sta, self.loc, self.comp)
def __eq__(self, other):
return self.getName() == other.getName()
class Centroid(object):
def __init__(self, lat, lon, rank):
self.lat = lat
self.lon = lon
self.rank = rank
def readMetaInfoFile(EventPath):
Logfile.red('Parsing MetaInfoFile')
try:
for i in os.listdir(EventPath):
if fnmatch.fnmatch(i, 'metainfo-*.meta'):
evfile = os.path.join(EventPath, i)
MetaL = []
Logfile.add(evfile)
fobj = open(evfile, 'r')
for i in fobj:
line = i.split()
net = line[0]
sta = line[1]
loc = line[2]
comp = line[3]
lat = line[4]
lon = line[5]
ele = line[6]
dip = line[7]
azi = line[8]
gain = line[9]
if fnmatch.fnmatch(comp, '**Z'):
MetaL.append(Station(net, sta, loc, comp, lat, lon, ele, dip,
azi, gain))
Logfile.red('%d ENTRIES IN METAFILE FOUND' % (len(MetaL)))
except Exception:
Logfile.red('METAFILE NOT READABLE')
return MetaL
def readpyrockostations(path, disp):
if disp is True:
stations = model.load_stations(path+'/data/stations_cluster.txt')
else:
stations = model.load_stations(path+'/data/stations_cluster.txt')
MetaL = []
for sl in stations:
channel = sl.channels[0]
MetaL.append(Station(str(sl.network), str(sl.station),
str(sl.location), str(sl.channels[0].name), str(sl.lat), str(sl.lon),
str(sl.elevation),str(channel.dip),str(channel.azimuth),
str(channel.gain)))
return MetaL
def readcolosseostations(scenario_path):
stations = model.load_stations(scenario_path+'/meta/stations.txt')
MetaL = []
for sl in stations:
channel = sl.channels[2]
MetaL.append(Station(str(sl.network),str(sl.station),
str(sl.location),str(sl.channels[2])[:3],str(sl.lat),str(sl.lon),
str(sl.elevation),str(channel.dip),str(channel.azimuth),
str(channel.gain)))
return MetaL
def createFolder(EventPath):
Folder = {}
Logfile.red('Create working environment')
Folder['cluster'] = os.path.join(EventPath,'cluster')
if os.access(Folder['cluster'], os.F_OK) is False:
os.makedirs(Folder['cluster'])
if os.access(os.getcwd(), os.W_OK):
basedir = os.path.join(EventPath, 'work')
sembdir = os.path.join(basedir, 'semblance')
ascdir = os.path.join(basedir, 'asc')
mseeddir = os.path.join(basedir, 'mseed')
Folder['base'] = basedir
Folder['semb'] = sembdir
Folder['asc'] = ascdir
Folder['mseed'] = mseeddir
Folder['event'] = EventPath
else:
Logfile.abort('create Folder: No write permissions for ' + os.getcwd())
Folder['config'] = os.path.join(os.getcwd(), '..', 'skeleton')
return Folder
def filterStations(StationList, Config, Origin):
F = []
cfg = ConfigObj(dict=Config)
minDist, maxDist = cfg.FloatRange('mindist', 'maxdist')
origin = DataTypes.dictToLocation(Origin)
Logfile.red('Filter stations with configured parameters')
for i in StationList:
sdelta = loc2degrees(origin, i)
if sdelta > minDist and sdelta < maxDist:
F.append(Station(i.net, i.sta, i.loc, i.comp, i.lat, i.lon, i.ele,
i.dip, i.azi, i.gain))
Logfile.red('%d STATIONS LEFT IN LIST' % len(F))
return F
def checkStationAroundInitialCentroid(station, Config, StationMetaList):
cfg = ConfigObj(dict=Config)
initDist = cfg.Float('initialstationdistance')
counter = 0
for i in StationMetaList:
sdelta = loc2degrees(station, i)
if sdelta < initDist:
counter +=1
return counter
def addOK(station,stationList,Config,MetaList):
cfg = ConfigObj(dict=Config)
minDist = 0
minAround = cfg.UInt('minstationaroundinitialcluster')
t = 0
for i in stationList:
sdelta = loc2degrees(station, i)
if sdelta > minDist:
aroundcounter = checkStationAroundInitialCentroid(station,Config,MetaList)
if aroundcounter >= minAround:
t = 1
else:
t=0
return t
else:
t=0
return t
return t
def createRandomInitialCentroids(Config, StationMetaList):
Logfile.red('Begin initial centroid search')
initialCentroids = []
usedIndexes = []
random.seed(time.clock())
if len(StationMetaList) == 0:
Logfile.red('Empty station list')
return initialCentroids
MAX_TIME_ALLOWED = 50
start = time.time()
while len(initialCentroids) < int(Config['maxcluster']):
dist_centroids = float(Config['centroidmindistance'])
randomIndex = random.randint(0, len(StationMetaList)-1)
redraw = True
while redraw is True:
if randomIndex in usedIndexes:
randomIndex = random.randint(0, len(StationMetaList)-1)
else:
if len(usedIndexes) > 2:
for rdx in usedIndexes:
s1 = StationMetaList[randomIndex]
s2 = StationMetaList[rdx]
delta = loc2degrees(s1, s2)
if delta >= dist_centroids:
redraw = False
else:
redraw = False
usedIndexes.append(randomIndex)
around = checkStationAroundInitialCentroid(StationMetaList[randomIndex],
Config, StationMetaList)
found = False
if len(initialCentroids) == 0:
initialCentroids.append(StationMetaList[randomIndex])
found = True
start = time.time()
else:
t = addOK(StationMetaList[randomIndex], initialCentroids,
Config, StationMetaList)
if (time.time() - start) > MAX_TIME_ALLOWED:
break
if t == 1:
if len(usedIndexes) > 1:
for rdx in usedIndexes:
s1 = StationMetaList[randomIndex]
s2 = StationMetaList[rdx]
delta = loc2degrees(s1, s2)
if delta >= dist_centroids:
initialCentroids.append(StationMetaList[randomIndex])
found = True
else:
initialCentroids.append(StationMetaList[randomIndex])
found = True
else:
continue
if found:
Logfile.red('found initial cluster %d' %(len(initialCentroids)))
Logfile.red('centroid %s with %d stations around %s deegree' %(StationMetaList[randomIndex],around,Config['centroidmindistance']))
Logfile.red('Initial centroid search finished')
return initialCentroids
def stationBelongTocluster(Config, CentroidList, StationMetaList):
clusterList = []
for i in StationMetaList:
mind = 100000
c= 0
for j in CentroidList:
delta = loc2degrees(j, i)
c += 1
if mind > delta:
mind = delta
i.member = c
#endfor
clusterList.append(Station(i.net,i.sta,i.loc,i.comp,i.lat,i.lon,i.ele,i.dip,i.azi,i.gain,i.member))
#endfor
return clusterList
# -------------------------------------------------------------------------------------------------
def calculateclusterCentre(Config, clusterStationList):
newclusterVector = []
cfg = ConfigObj(dict=Config)
for i in range(1, cfg.Int('maxcluster')+1):
sumlat = 0
sumlon = 0
clusterstationcounter = 0
for j in clusterStationList:
if i == j.member:
sumlat += float(j.lat)
sumlon += float(j.lon)
clusterstationcounter += 1
if clusterstationcounter == 0:
newclusterVector.append(Centroid(0.0, 0.0, -1))
else:
scla = sumlat / clusterstationcounter
sclo = sumlon / clusterstationcounter
name = i
newclusterVector.append(Centroid(scla,sclo,name))
return newclusterVector
# -------------------------------------------------------------------------------------------------
def compareclusterCentre(oldcluster, newcluster, Config):
counter = 0
for i in range(int(Config['maxcluster'])):
if newcluster[i].rank == -1: continue #hs-1
delta = loc2degrees(oldcluster[i], newcluster[i])
msg = str(i) + ' OLD: ' + str(oldcluster[i].lat) + ' ' + str(oldcluster[i].lon)
msg+= ' NEW: ' + str(newcluster[i].lat) + ' ' + str(newcluster[i].lon) + ' DELTA: ' + str(delta)
Logfile.add(msg)
s = 'NO'
if delta < float(Config['comparedelta']):
s = 'JO'; counter +=1
Logfile.add(s)
#endfor
return counter
# -------------------------------------------------------------------------------------------------
def deleteFarStations(CentroidList, StationclusterList,Config):
cfg = ConfigObj(dict=Config)
stationdistance = int(cfg.Distance('stationdistance'))
for i in CentroidList:
for j in StationclusterList:
if i.rank == j.member:
if loc2degrees(i, j) > stationdistance:
j.member = -1
#endfor
for index,k in enumerate(StationclusterList):
if k.member == -1: del StationclusterList[index]
return StationclusterList
# -------------------------------------------------------------------------------------------------
def filterclusterStationMinimumNumber(CentroidList, StationclusterList, Config):
newCentroidList = []
newStationclusterList = []
for i in CentroidList:
counter = 0
for j in StationclusterList:
if i.rank == j.member:
counter +=1
streamID = j.net+'.'+j.sta+'.'+j.loc+'.'+j.comp
if counter < int(Config['minclusterstation']): s1 = 'OUT'
else:
s1 = 'IN '
newCentroidList.append(i)
Logfile.red('Centroid %s %d %s %.2f %5.2f' % (i.rank, counter, s1,
i.lat, i.lon))
for i in newCentroidList:
for j in StationclusterList:
if i.rank == j.member:
newStationclusterList.append(j)
return newStationclusterList, newCentroidList
def calcMeanCentroidDistance(CentroidList):
sumdelta = 0
for i in CentroidList:
for j in CentroidList:
sumdelta += loc2degrees(i, j)
meanCentroidDistance = sumdelta / len(CentroidList)
return meanCentroidDistance
# -------------------------------------------------------------------------------------------------
def calcMinValue(CentroidList):
mCD = calcMeanCentroidDistance(CentroidList)
sumdelta = 0
for i in CentroidList:
for j in CentroidList:
delta = loc2degrees(i, j)
x=(delta - mCD)
sumdelta += math.pow( x, 2 )
minval = sumdelta / len(CentroidList)
return minval
def write4Plot(Config, Origin, StationclusterList, CentroidList, Folder, flag):
plotfolder = 'plot-'+str(flag)
p = os.path.join(Folder['cluster'],plotfolder)
if os.access(p,os.F_OK) == False:
os.makedirs(p)
fobjorigin = open(os.path.join(p,'event.orig'),'w')
fobjorigin.write(Origin['lat']+','+Origin['lon'])
fobjorigin.close()
dst = os.path.join(p,'plot_cluster.py')
import palantiri
path = palantiri.__path__
src = os.path.join(path[0], 'skeleton/plot_cluster.py')
shutil.copy(src, dst)
fobjcentroid = open(os.path.join(p,'event.centroid'),'w')
for i in CentroidList:
fobjcentroid.write(str(i.rank)+' '+str(i.lat)+' '+str(i.lon)+'\n')
fobjcentroid.close()
fobjstation = open(os.path.join(p,'event.stations'),'w')
for i in StationclusterList:
if i.loc == '--': i.loc=''
streamID = i.net+'.'+i.sta+'.'+i.loc+'.'+i.comp
fobjstation.write(streamID+' '+i.lat+' '+i.lon+' '+str(i.member)+'\n')
fobjstation.close()
fobjcentroid = open(os.path.join(p,'event.statistic'),'w')
mCD = calcMeanCentroidDistance(CentroidList)
mv = calcMinValue(CentroidList)
fobjcentroid.write(str(mCD)+' '+str(mv)+' '+str(len(CentroidList))+' '+str(len(StationclusterList))+'\n')
fobjcentroid.close()
def km(Config, FilterMeta, Folder, Origin, flag):
ic = createRandomInitialCentroids(Config, FilterMeta)
if len(ic) == 0:
return
counter = 0
kmean(Config,ic,FilterMeta,counter,Folder,Origin,flag)
def endcheck(inputCentroid,FilterMeta,Config,Folder,Origin,flag):
FM = deleteFarStations(inputCentroid,FilterMeta,Config)
FMM,NCL = filterclusterStationMinimumNumber(inputCentroid,FM,Config)
write4Plot(Config, Origin, FMM, NCL, Folder, flag)
def kmean(Config,inputCentroid,FilterMeta,counter,Folder,Origin,flag):
counter += 1
Logfile.add('COUNTER ' + str(counter) + ' CUTOFF ' + Config['cutoff'])
cfg = ConfigObj(dict=Config)
if counter == cfg.UInt('cutoff'):
endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
sys.exit()
scl = stationBelongTocluster(Config, inputCentroid, FilterMeta)
acounter = 1
for a in inputCentroid:
for i in scl:
if acounter == i.member:
delta = loc2degrees(i, a)
if delta > cfg.Float('initialstationdistance'):
i.member = -1
acounter+=1
nsc = calculateclusterCentre(Config, scl)
t = compareclusterCentre(inputCentroid, nsc, Config)
Logfile.add('ITERATIONSTEP: ---> ' + str(counter) + ' <-----------------------------')
while t != cfg.UInt('maxcluster'):
Logfile.add('ANZAHL DER JO in KMEAN: ' + str(t))
kmean(Config, nsc, FilterMeta, counter, Folder, Origin, flag)
endcheck(inputCentroid, FilterMeta, Config, Folder, Origin, flag)
sys.exit()
class clusterMain(MainObj):
def __init__(self):
parser = OptionParser(usage="%prog -f Eventpath ")
parser.add_option("-f", "--evpath", type="string", dest="evpath",
help="evpath")
(options, args) = parser.parse_args()
self.eventpath = options.evpath
Basic.checkExistsDir(self.eventpath, isAbort=True)
Globals.setEventDir(self.eventpath)
MainObj.__init__(self, self, 'ok', 'cluster_run.log',
'cluster.log')
def init(self):
return Globals.init()
def process(self):
t = time.time()
C = config.Config(self.eventpath)
Config = C.parseConfig('config')
cfg = ConfigObj(dict=Config)
Origin = C.parseConfig('origin')
if cfg.pyrocko_download() is True:
if cfg.quantity() == 'displacement':
disp = True
else:
disp = False
Meta = readpyrockostations(self.eventpath, disp)
elif cfg.colesseo_input() is True:
scenario = guts.load(filename=cfg.colosseo_scenario_yml())
scenario_path = cfg.colosseo_scenario_yml()[:-12]
Meta = readcolosseostations(scenario_path)
events = scenario.get_events()
ev = events[0]
Origin['strike'] = str(ev.moment_tensor.strike1)
Origin['rake'] = str(ev.moment_tensor.rake1)
Origin['dip'] = str(ev.moment_tensor.dip1)
Origin['lat'] = str(ev.lat)
Origin['lon'] = str(ev.lon)
Origin['depth'] = str(ev.depth/1000.)
else:
Meta = readMetaInfoFile(self.eventpath)
Folder = createFolder(self.eventpath)
FilterMeta = filterStations(Meta, Config, Origin)
km(Config, FilterMeta, Folder, Origin, t)
return True
def finish(self):
pass
def MainProc():
mainObj = clusterMain()
mainObj.run()
if __name__ == "__main__":
MainProc()

+ 258
- 0
src/cluster/evaluateCluster.py View File

@ -0,0 +1,258 @@
import os
import sys
from optparse import OptionParser
import logging
import shutil
import fnmatch
from palantiri.common import Basic
from palantiri.common import Globals
from palantiri.common import Logfile
from palantiri.common.ObspyFkt import loc2degrees, obs_kilometer2degrees
from palantiri.tools.config import Config # Import from Tools
from palantiri.cluster.cluster2 import Centroid,Station # Import from cluster
from palantiri.common.ConfigFile import ConfigObj # Import from common
parser = OptionParser(usage="%prog -f eventpath ")
parser.add_option("-f", "--evpath", type="string", dest="evpath", help="evpath")
(options, args) = parser.parse_args()
# -------------------------------------------------------------------------------------------------
class Result(object):
def __init__(self,meanvalue,minvalue,centroidcount,usedstationcount,path):
self.meanvalue= meanvalue
self.minvalue= minvalue
self.centroidcount= centroidcount
self.usedstationcount = usedstationcount
self.path= path
# -------------------------------------------------------------------------------------------------
class BestSolution(object):
def __init__(self,station,cluster,lat,lon):
self.station = station
self.cluster = cluster
self.lat = lat
self.lon = lon
def getStatistics(clusterresults):
p= os.path.join(options.evpath,'stat.dat')
fobjstat = open(p,'w')
resDict = {}
for root,dirs,filesf in os.walk(clusterresults):
for i in files:
if i == 'event.statistic':
fname = os.path.join(root,i)
bspath = os.path.join('/',*fname.split('/')[:-1])
fobj = open(fname,'r')
for line in fobj:
line = line.split()
resDict[fname] = Result(line[0],line[1],line[2],line[3],bspath)
fobjstat.write(('%s: %s %s %s %s\n')%(fname,line[0],line[1],line[2],line[3]))
#endfor
fobj.close()
#endfor
#endfor
fobjstat.close()
return resDict
# -------------------------------------------------------------------------------------------------
def getBestSolution(resultDictionary):
bestsolution = -100000
for i in resultDictionary.keys():
resultDictionary[i].meanvalue = float(resultDictionary[i].meanvalue)
if bestsolution < resultDictionary[i].meanvalue:
bestsolution = resultDictionary[i].meanvalue
L = []
for j in resultDictionary.keys():
if bestsolution == resultDictionary[j].meanvalue:
L.append(resultDictionary[j])
return L[0]
# -------------------------------------------------------------------------------------------------
def copycluster2EventConfig(clusterDict, evpath):
epath = os.path.join('/',*evpath.split('/')[:-1])
t= epath.split(os.path.sep)[-1]
fname = t+'.config'
fullfname = os.path.join(epath,fname)
L= []
fobj = open(fullfname,'r')
for index,line in enumerate(fobj):
L.append(line)
if fnmatch.fnmatch(line,'*array parameter*'):
#print index,line
firstend = index+1
if fnmatch.fnmatch(line,'*beamforming method*'):
#print index,line
secondbegin = index
#endfor
fobj.close()
Confpart1 = L [:firstend]
Confpart2 = L [secondbegin:]
fobj = open(fullfname,'w')
fobj.write(''.join(Confpart1))
#print Confpart1
nlist=''
for i in clusterDict.keys():
if len(clusterDict[i]) > 0:
nlist += 'r'+str(i)+','
fobj.write(('networks=%s\n')%(nlist[:-1]))
fobj.write('\n')
for i in clusterDict.keys():
if len(clusterDict[i]) > 0:
aname = 'r'+str(i)
fobj.write(('%s=%s\n')%(aname,clusterDict[i][:-1]))
fobj.write(('%srefstation=\n')%(aname))
fobj.write(('%sphase=P\n')%(aname))
#endfor
fobj.write('\n')
fobj.write(''.join(Confpart2))
fobj.close()
def printBestSolution(solution):
maxline = -100
L = []
M = []
Logfile.add('eventpath: ', os.path.join(solution.path,'event.stations'))
fobj = open(os.path.join(solution.path,'event.stations'),'r')
for line in fobj:
line = line.split()
M.append(float(line[3]))
L.append(BestSolution(line[0],float(line[3]),float(line[1]),float(line[2])))
fobj.close()
maxline = max(M)
C = {}
fobjarrays = open(os.path.join(os.path.join('/',*solution.path.split('/')[:-2]),'arraycluster.dat'),'w')
for i in range(int(maxline)+1):
array = ''
for j in L:
if int(j.cluster) == i:
array+=j.station+'|'
#endfor
ar = 'r'+str(i)+'='+array[:-1]+'\n'
C[i] = array
print(ar,len(ar))
fobjarrays.write(ar)
#endfor
fobjarrays.close()
return C
# -------------------------------------------------------------------------------------------------
def copyAndShowBestSolution(solution):
dest = solution.path
src = os.path.join('/',*solution.path.split('/')[:-4])
src = os.path.join(src,'skeleton','clusterplot.sh')
# -------------------------------------------------------------------------------------------------
def filterBestSolution(solution):
evp = os.path.join('/',*solution.path.split('/')[:-2])
C= Config(evp)
Conf = C.parseConfig('config')
cfg = ConfigObj(dict=Conf)
SL = []
M= []
fobj = open(os.path.join(solution.path, 'event.stations'),'r')
for s in fobj:
try:
line = s.split()
net,sta,loc,comp = line[0].split('.')
slat= line[1]
slon= line[2]
smember = line[3]
M.append(smember)
SL.append(Station(net,sta,loc,comp,lat=slat,lon=slon,member=smember))
except:
Logfile.exception('filterBestSolution', '<' + s + '>')
continue
#endfor
fobj.close()
M = list(set(M))
Logfile.add('number of clusters ' + str(len(M)),
'number of stations ' + str(len(SL)))
kd = obs_kilometer2degrees(cfg.Distance('intraclusterdistance'))
Logfile.add('icd ' + str(kd))
maxdist = -1
for i in SL:
counter = 0
for k in SL:
if i.member == '8' and k.member == '8':
if i.getName() != k.getName():
delta = loc2degrees(i, k)
if delta > maxdist: maxdist = delta
if delta < kd: counter +=1
#endif
#endif
#endfor
print(i,'less then allowd ',counter)
#endfor
print('masxdist ',maxdist)
# -------------------------------------------------------------------------------------------------
if __name__ == "__main__":
rD = getStatistics(options.evpath)
bs = getBestSolution(rD)
CD = printBestSolution(bs)
copyAndShowBestSolution(bs)
copycluster2EventConfig(CD,options.evpath)

+ 577
- 0
src/common/Basic.py View File

@ -0,0 +1,577 @@
import os
import sys
import time
import shutil
import subprocess
import getpass
from palantiri.common import Logfile
import httplib2
if sys.version_info.major >= 3:
from urllib.request import urlopen
import _pickle as pickle
else:
from urllib2 import urlopen
import cPickle as pickle
def floatToString(fList, format=None, delim=','):
if not format:
return delim.join(map(str, fList))
else:
s = []
for val in fList:
s.append(format % val)
return delim.join(s)
def stringToFloat(s, delim=','):
words = s.split(delim)
line = []
for i in range(len(words)):
if words[i] == '\n':
break
line.append(float(words[i]))
return line
def matrixToString(matrix, nLines, nColumns, format=None, delim=','):
lines = []
for i in range(nLines):
lines.append(floatToString(matrix[i], format))
return '\n'.join(lines)
def stringToMatrix(lines, nLines, nColumns, delim=','):
matrix = []
for i in range(nLines):
vect = stringToFloat(lines[i], delim)
assert len(vect) == nColumns
matrix.append(vect)
assert len(matrix) == nLines
return matrix
def writeVector(fileName, vector, format=None):
writeTextFile(fileName, list(floatToString(vector, format)))
def readVector(fileName):
return stringToFloat(readTextFile(fileName, 1)[0])
def writeMatrix(fileName, matrix, nLines, nColumns, format=None):
writeTextFile(fileName, matrixToString(matrix, nLines, nColumns, format))
def readMatrix(fileName, nLines, nColumns):
lines = readTextFile(fileName)
matrix = stringToMatrix(lines, nLines, nColumns)
return matrix
def formatStrings(strings, format1):
result = []
try:
for i in range(len(strings)):
result.append(format1 %(strings[i]))
except Exception:
Logfile.exception('formatStrings', 'Illegal format', abortProg=True)
return result
def selectStrings(strings, mask):
result = []
for i in range(len(mask)):
if mask[i]:
result.append(strings[i])
return result
def _stringsEndsWith(strings, postfixList):
assert len(postfixList) > 0
mask = []
for i in range(len(strings)):
s = strings[i]
b = False
for postfix in postfixList:
if s.endswith(postfix):
b = True
break
mask.append(b)
assert len(mask) == len(strings)
return mask
def stringsEndsWith(strings, postfixList):
if type(postfixList) is str: list1 = list(postfixList)
else: list1 = postfixList
return _stringsEndsWith(strings, list1)
def toStringList(arg0, arg1=None,arg2=None,arg3=None,arg4=None):
s = []
s.append(arg0)
if arg1 != None: s.append(arg1)
if arg2 != None: s.append(arg2)
if arg3 != None: s.append(arg3)
if arg4 != None: s.append(arg4)
return s
def Not(mask):
result = []
for i in range(len(mask)):
if mask[i]: result.append(False)
else: result.append(True)
return result
def And(mask):
for i in range(len(mask)):
if not mask[i]: return False
return True
def baseFileName(fullName):
basename = os.path.basename(fullName)
filename = os.path.splitext(basename)
return filename[0]
def isNumber(s):
try : float(s)
except: return False
return True
def isInt(s):
if not isNumber(s): return False
try: int(s)
except: return False
return True
def checkIsNumber(string, minVal=None, maxVal=None):
assert isNumber(minVal) and isNumber(maxVal)
msg = None
s1 = 'Value ' + string + ' '
if not isNumber(string): msg = s1 + 'is not a number'
else:
val = float(string)
if minVal == None and maxVal == None: msg = None
elif minVal == None and val > maxVal : msg = s1 + '> ' + str(maxVal)
elif maxVal == None and val < minVal : msg = s1 + '< ' + str(minVal)
elif val < minVal or val > maxVal :
msg = s1 + 'outside range [' + str(minVal) + ',' + str(maxVal) + ']'
return msg
def checkGreaterZero(string):
msg = None
s1 = 'Value ' + string + ' '
if not isNumber(string): msg = s1 + 'is not a number'
else:
val = float(string)
if val == 0.0: msg = s1 + 'is zero'
elif val < 0.0: msg = s1 + '< 0.0'
return msg
def checkNotNegative(string):
msg = None
s1 = 'Value ' + string + ' '
if not isNumber(string) : msg = s1 + 'is not a number'
elif float(string) < 0.0: msg = s1 + '< 0.0'
return msg
# Check if keys exists in a dictionary
def checkExistsKeys(dict, keyList, isAbort=False):
isOk = True
for key in keyList:
if not key in dict:
isOk = Logfile.error('Key <' + str(key) + '> missing in config file')
if isOk : return True
elif isAbort: Logfile.abort()
return False
def checkExistsDir(dirName, isAbort=False):
if os.path.isdir(dirName): return True
Logfile.error('Cannot find directory', dirName)
if isAbort: Logfile.abort()
return False
def createDirectory(dirName, optional = False):
if os.path.isdir(dirName): return True
os.makedirs(dirName)
if os.path.isdir(dirName): return True
else: Logfile.error('Cannot open directory', dirName)
if not optional: Logfile.abort()
return False
def changeDirectory(dirPath):
path = []
if not type(dirPath) is list: path.append(dirPath)
else: path = dirPath
for dir in path:
createDirectory(dir)
os.chdir(dir)
return os.getcwd() # return current directory
def checkFileExists(fileName, isAbort=False):
if os.path.isfile(fileName): return True
Logfile.fileOpenError(fileName)
if isAbort: Logfile.abort('')
else: return False
def openTextFile(fileName, mode):
if mode == 'r' and not checkFileExists(fileName):
return None
return open(fileName, mode)
def readTextFile(fileName, maxLines = -1):
lines = []
fp= openTextFile(fileName, 'r')
if fp == None: return lines
if maxLines == -1:
lines = fp.readlines()
else:
lines = fp.readlines(maxLines)
fp.close()
return lines
def writeTextFile(fileName, lines):
fp = openTextFile(fileName, 'w')
if fp == None: return
for s in lines:
fp.write(s)
fp.close()
def appendToFile(fileName, lines):
fp = openTextFile(fileName, 'a')
if fp == None: return
for s in lines: fp.write(s)
fp.close()
def copyFile(srcFile, destFile, isAbort=False):
if not checkFileExists(srcFile, isAbort): return False
shutil.copy(srcFile, destFile)
return True
<