# Import third-party libraries
import os
from pathlib import Path
import pandas as pd
from obspy.core import read, UTCDateTime, Stream
evid = "49462969" # event of interest
event_dir = evid
infile = "%s/datetime.csv"%event_dir # we need the event origin time
station_file = "%s/station.csv"%event_dir
sacdir = "%s/sac"%event_dir # location of processed data
outdir = "%s"%event_dir # location of filtered/cut/down-sampled data for inversion
# Check if data directory exist
P = Path(sacdir)
if P.exists():
# Read event info and station info into Pandas table
df = pd.read_csv(infile,parse_dates=True)
station_df = pd.read_csv("%s"%(station_file),parse_dates=True,dtype={"location":str},na_filter=False)
origin_time = UTCDateTime(df["origin"][0])
print(origin_time)
st = Stream()
for _,row in station_df.iterrows():
st += read("%s/%s.%s.%s.%s[%s]"%(
sacdir,row.network,row.station,row.location,row.channel,row.component),format="SAC")
else:
print("Directory %s does not exist. %s does not have instrument corrected data."%(sacdir,evid))
# Filter
freqmin = 0.025
freqmax = 0.0625
corners = 3
# Desired sampling interval
dt = 0.02
# Reduction velocity in km/sec, 0 sets the reference time to origin time
vred = 0
# time before and after reference time, data will be cut before and after the reference time
time_before = 50
time_after = 250
from obspy.core.event import Origin
if vred:
p = 1/vred
else:
p = 0
st.filter("bandpass",freqmin=freqmin,freqmax=freqmax,corners=corners,zerophase=True)
#st.taper(max_percentage=0.05)
# Trim and decimate the data
for tr in st:
#print(tr.stats)
#tr.decimate(factor=int(tr.stats.sampling_rate*dt), strict_length=False, no_filter=True)
df = pd.read_csv(infile,parse_dates=True)
origin_time = UTCDateTime(df["origin"][0])
print(origin_time)
origin = Origin()
tr.resample(1/dt)
#tr.stats.sac.t1 = origin_time# + p*(tr.stats.sac.dist) # set reference time
#print(tr.stats.sac.t1)
origin.time = origin_time
tr.trim(origin_time-time_before,origin_time+time_after,pad=True,fill_value=0)
#tr.data = 100*tr.data # m/s to cm/s
#tr.stats.sac.b = -1*(origin_time - tr.stats.starttime)
#tr.stats.sac.o = 0
print(tr.stats)
# Save final trace using tdmtpy file name format
sacout = "%s/%s%s.dat"%(outdir,tr.id[:-4],tr.id[-1])
tr.write(sacout,format="SAC")
model = "mahesh2013"
#depths = round(df["depth"][0]) # Only compute GFs at catalog depth
depths = sorted([5,round(df["depth"][0])]) # compute GF at 10, 20 km and at catalog depth
print(depths)
npts = int(1028) # number of points in the time series, must be a power of 2
t0 = int(0) # used to define the first sample point, t0 + distance_in_km/vred
#dt = 0.02
# Location of synthetic Green's functions
green_dir = "%s/%s"%(event_dir,model)
Path(green_dir).mkdir(parents=True,exist_ok=True)
print(Path(green_dir))
for depth in depths:
# Create distance file
dfile = ("{dist:.0f} {dt:.2f} {npts:d} {t0:d} {vred:.1f}\n")
print(dfile)
dfile_out = "%s/dfile"%event_dir
print(dfile_out)
with open(dfile_out,"w") as f:
for _,row in station_df.iterrows():
f.write(dfile.format(dist=row.distance,dt=dt,npts=npts,t0=t0,vred=vred))
# Generate the synthetics
print(os)
os.system("/home/leo/PROGRAMS.330/bin/hprep96 -M %s.d -d %s -HS %.4f -HR 0 -EQEX"%(model,dfile_out,depth))
os.system("/home/leo/PROGRAMS.330/bin/hspec96")
os.system("/home/leo/PROGRAMS.330/bin/hpulse96 -D -i > file96")
os.system("/home/leo/PROGRAMS.330/bin/f96tosac -B file96")
# Filter and save the synthetic Green's functions
greens = ("ZDD","RDD","ZDS","RDS","TDS","ZSS","RSS","TSS","ZEX","REX")
for index,row in station_df.iterrows():
for j,grn in enumerate(greens):
sacin = "B%03d%02d%s.sac"%(index+1,j+1,grn)
sacout = "%s/%s.%s.%.4f"%(green_dir,row.network,row.station,depth)
#sacout = "%s/%s.%s.%s.%.4f"%(green_dir,row.network,row.station,row.location,depth)
tmp = read(sacin,format="SAC")
tmp.filter('bandpass',freqmin=freqmin,freqmax=freqmax,corners=corners,zerophase=True)
#tmp.data = 100000*tmp.data
#print(temp.stats)
tmp.write("%s.%s"%(sacout,grn),format="SAC") # overwrite
# Uncomment to remove unfiltered synthetic SAC files
os.system("rm B*.sac") # remove the unfiltered SAC files
# Create headers
headers = dict(datetime=df["origin"][0],
longitude=df["lon"][0],
latitude=df["lat"][0],
depth=",".join([ "%.4f"%d for d in depths]),
path_to_data=event_dir,
path_to_green=green_dir,
green="herrmann",
components="ZRT",
degree=5,
weight="distance",
plot=0,
correlate=0,
)
# Add station table
pd.options.display.float_format = "{:,.2f}".format
frame = {"station": station_df[["network","station"]].apply(lambda x: ".".join(x),axis=1)}
df_out = pd.DataFrame(frame)
df_out[["distance","azimuth"]] = station_df[["distance","azimuth"]]
df_out["ts"] = int(30)
df_out["npts"] = int(256)
df_out["dt"] = dt
df_out["used"] = 1
df_out[["longitude","latitude"]] = station_df[["longitude","latitude"]]
#print(df_out.to_string(index=False))
# write
with open("mtinv.in","w") as f:
for key, value in headers.items():
f.write("{0:<15}{1}\n".format(key,value))
f.write(df_out.to_string(index=False))
!cat mtinv.in
Error
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[9], line 4
1 # Pass the parameters to the Inversion object and launch the inversion
2 # The default is to plot all solutions
3 tdmt = mttime.Inversion(config=config)
----> 4 tdmt.invert()
5 tdmt.write()
File ~/anaconda3/envs/myenv/lib/python3.8/site-packages/mttime/core/inversion.py:113, in Inversion.invert(self, show)
111 tensors = []
112 for _depth in self.config.depth:
--> 113 self._single_inversion(_depth,tensors)
115 # Get preferred moment tensor solution
116 if len(tensors):
File ~/anaconda3/envs/myenv/lib/python3.8/site-packages/mttime/core/inversion.py:146, in Inversion._single_inversion(self, depth, tensors)
144 if self.config.correlate:
145 self._correlate()
--> 146 self._data_to_d()
147 self._weights()
149 ind = self.w != 0
File ~/anaconda3/envs/myenv/lib/python3.8/site-packages/mttime/core/inversion.py:412, in Inversion._data_to_d(self)
410 e = self.config.index2[i]
411 for d_b, d_e, component in zip(b, e, self.config.components):
--> 412 d[d_b:d_e] = self.streams[i].select(component=component)[0].data[obs_b[i]:obs_e[i]]
414 self.d = d
ValueError: could not broadcast input array from shape (201,) into shape (256,)
Error