Files
TomoATT/examples/utils/functions_for_data.ipynb
2025-12-17 10:53:43 +08:00

2720 lines
112 KiB
Plaintext
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Initialization, class definition, and declaration.\n",
"\n",
"import os\n",
"import math\n",
"from obspy import UTCDateTime\n",
"import numpy as np\n",
"import copy\n",
"\n",
"class Event(): # class of earthquake\n",
" def __init__(self):\n",
" self.name = \"nan\" # evname1 Earthquake name, recommended as \"earthquake\".\n",
" self.id = -1\n",
" self.lat = 0.0\n",
" self.lon = 0.0\n",
" self.dep = 0.0\n",
" self.mag = 0.0\n",
" self.ortime = UTCDateTime(1999,1,1,0,0,0)\n",
" self.Nt = 0 # Number of the absolute traveltime of earthquake\n",
" self.Ncs_dt = 0 # Number of the commmon source differential traveltime of earthquake\n",
" self.Ncr_dt = 0 # Number of the commmon receiver differential traveltime of earthquake\n",
" self.t = {} # stname1+phase -> (stname1, phase, time, data_weight)\n",
" self.cs_dt = {} # stname1 + stname2 + phase -> (stname1, stname2, phase, dif_time, data_weight)\n",
" self.cr_dt = {} # stname1 + evname2 + phase -> (stname1, evname2, phase, dif_time, data_weight)\n",
" self.azi_gap = 360.0 # the max azimuthal gap of each earthquake\n",
" self.misfit = {} # traveltime residual of the data, the difference between real data and synthetic data, used for evaluation. stname or stname1+stname2 or stname1+evname2 -> residual\n",
" self.tag = {} # additional tags for the earthquake, e.g., azi_gap, weight. (azimuthal gap, weight of the earthquake)\n",
"\n",
"class Station():\n",
" def __init__(self):\n",
" self.name = \"nan\" # stname1, recommend: network.stname\n",
" self.id = -1\n",
" self.lat = 0.0\n",
" self.lon = 0.0\n",
" self.ele = 0.0\n",
" self.tag = {} # additional tags for the station, e.g., wright\n",
"\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: some basic auxiliary functions for processing data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function cal_dis(lat1, lon1,lat2, lon2) (in kilometers) cal_azimuth(lat1, lon1, lat2, lon2) (degree) calculate epicentral distance (km) and azimuth (degree)\n",
"\n",
"def cal_dis(lat1, lon1,lat2, lon2, R = 6371):\n",
" latitude1 = (math.pi/180)*lat1\n",
" latitude2 = (math.pi/180)*lat2\n",
" longitude1 = (math.pi/180)*lon1\n",
" longitude2= (math.pi/180)*lon2\n",
" # Therefore, the spherical distance between points A and B is:{arccos[sinb*siny+cosb*cosy*cos(a-x)]}*R\n",
" # Radius of the earth\n",
" if((lat1-lat2)**2+(lon1-lon2)**2<0.000001):\n",
" return 0\n",
"\n",
" d = math.acos(math.sin(latitude1)*math.sin(latitude2)+ math.cos(latitude1)*math.cos(latitude2)*math.cos(longitude2-longitude1))/math.pi*180\n",
" return d * 2 * math.pi * R / 360\n",
"\n",
"def cal_azimuth(lat1, lon1, lat2, lon2):\n",
" lat1_rad = lat1 * math.pi / 180\n",
" lon1_rad = lon1 * math.pi / 180\n",
" lat2_rad = lat2 * math.pi / 180\n",
" lon2_rad = lon2 * math.pi / 180\n",
"\n",
" y = math.sin(lon2_rad - lon1_rad) * math.cos(lat2_rad)\n",
" x = math.cos(lat1_rad) * math.sin(lat2_rad) - math.sin(lat1_rad) * math.cos(lat2_rad) * math.cos(lon2_rad - lon1_rad)\n",
" brng = math.atan2(y, x) * 180 / math.pi\n",
" if((lat1-lat2)**2+(lon1-lon2)**2<0.0001):\n",
" return 0\n",
" return float((brng + 360.0) % 360.0)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: Coordinate rotation rotate_src_rec(ev_info, st_info, theta0, phi0, psi): rotate to the new coordinate system, satisfying the center point transformation r0, t0, p0 -> r0, 0, 0 and an anticlockwise rotation angle psi.\n",
"# Satisfying the center point transformation r0, t0, p0 -> r0, 0, 0 and an anticlockwise rotation angle psi.\n",
"\n",
"import numpy as np\n",
"\n",
"RAD2DEG = 180/np.pi\n",
"DEG2RAD = np.pi/180\n",
"R_earth = 6371.0\n",
"\n",
"# Spherical coordinates to Cartesian coordinate\n",
"def rtp2xyz(r,theta,phi):\n",
" x = r * np.cos(theta*DEG2RAD) * np.cos(phi*DEG2RAD)\n",
" y = r * np.cos(theta*DEG2RAD) * np.sin(phi*DEG2RAD)\n",
" z = r * np.sin(theta*DEG2RAD)\n",
" return (x,y,z)\n",
"\n",
"# Cartesian coordinates to Spherical coordinate\n",
"def xyz2rtp(x,y,z):\n",
" # theta: -90~90; phi: -180~180\n",
" r = np.sqrt(x**2+y**2+z**2)\n",
" theta = np.arcsin(z/r)\n",
" phi = np.arcsin(y/r/np.cos(theta))\n",
"\n",
"\n",
" idx = np.where((phi > 0) & (x*y < 0))\n",
" phi[idx] = np.pi - phi[idx]\n",
" idx = np.where((phi < 0) & (x*y > 0))\n",
" phi[idx] = -np.pi - phi[idx]\n",
"\n",
"\n",
" # for i in range(phi.size):\n",
" # if(phi[i] > 0 and x[i]*y[i] < 0):\n",
" # phi[i] = np.pi - phi[i]\n",
" # if(phi[i] < 0 and x[i]*y[i] > 0):\n",
" # phi[i] = -np.pi - phi[i]\n",
"\n",
" return (r,theta*RAD2DEG,phi*RAD2DEG)\n",
"\n",
"# anti-clockwise rotation along x-axis\n",
"def rotate_x(x,y,z,theta):\n",
" new_x = x\n",
" new_y = y * np.cos(theta*DEG2RAD) + z * -np.sin(theta*DEG2RAD)\n",
" new_z = y * np.sin(theta*DEG2RAD) + z * np.cos(theta*DEG2RAD)\n",
" return (new_x,new_y,new_z)\n",
"\n",
"# anti-clockwise rotation along y-axis\n",
"def rotate_y(x,y,z,theta):\n",
" new_x = x * np.cos(theta*DEG2RAD) + z * np.sin(theta*DEG2RAD)\n",
" new_y = y\n",
" new_z = x * -np.sin(theta*DEG2RAD) + z * np.cos(theta*DEG2RAD)\n",
" return (new_x,new_y,new_z)\n",
"\n",
"# anti-clockwise rotation along z-axis\n",
"def rotate_z(x,y,z,theta):\n",
" new_x = x * np.cos(theta*DEG2RAD) + y * -np.sin(theta*DEG2RAD)\n",
" new_y = x * np.sin(theta*DEG2RAD) + y * np.cos(theta*DEG2RAD)\n",
" new_z = z\n",
" return (new_x,new_y,new_z)\n",
"\n",
"# spherical Rotation\n",
"\n",
"# rotate to the new coordinate, satisfying the center r0,t0,p0 -> r0,0,0 and a anticlockwise angle psi\n",
"def rtp_rotation(t,p,theta0,phi0,psi):\n",
" # step 1: r,t,p -> x,y,z\n",
" (x,y,z) = rtp2xyz(1.0,t,p)\n",
"\n",
" # step 2: anti-clockwise rotation with -phi0 along z-axis: r0,t0,p0 -> r0,t0,0\n",
" (x,y,z) = rotate_z(x,y,z,-phi0)\n",
"\n",
" # step 3: anti-clockwise rotation with theta0 along y-axis: r0,t0,0 -> r0,0,0\n",
" (x,y,z) = rotate_y(x,y,z,theta0)\n",
"\n",
" # # step 4: anti-clockwise rotation with psi along x-axis\n",
" (x,y,z) = rotate_x(x,y,z,psi)\n",
"\n",
" # step 5: x,y,z -> r,t,p\n",
" (new_r,new_t,new_p) = xyz2rtp(x,y,z)\n",
"\n",
" return (new_t,new_p)\n",
"\n",
"\n",
"def rtp_rotation_reverse(new_t,new_p,theta0,phi0,psi):\n",
" # step 1: r,t,p -> x,y,z\n",
" (x,y,z) = rtp2xyz(1.0,new_t,new_p)\n",
"\n",
" # step 2: anti-clockwise rotation with -psi along x-axis\n",
" (x,y,z) = rotate_x(x,y,z,-psi)\n",
"\n",
" # step 3: anti-clockwise rotation with -theta0 along y-axis: r0,0,0 -> r0,t0,0\n",
" (x,y,z) = rotate_y(x,y,z,-theta0)\n",
"\n",
" # step 4: anti-clockwise rotation with phi0 along z-axis: r0,t0,0 -> r0,t0,p0\n",
" (x,y,z) = rotate_z(x,y,z,phi0)\n",
"\n",
" # step 5: x,y,z -> r,t,p\n",
" (r,t,p) = xyz2rtp(x,y,z)\n",
"\n",
" return (t,p)\n",
"\n",
"def rotate_src_rec(ev_info,st_info,theta0,phi0,psi):\n",
" ev_info_rotate = {}\n",
" st_info_rotate = {}\n",
"\n",
" # rotate earthquakes\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
" ev_lat = np.array([ev.lat]); ev_lon = np.array([ev.lon])\n",
" (ev_lat,ev_lon,) = rtp_rotation(ev_lat,ev_lon,theta0,phi0,psi)\n",
" ev.lat = ev_lat[0]; ev.lon = ev_lon[0]\n",
" ev_info_rotate[key_ev] = ev\n",
"\n",
" # rotate stations\n",
" for key_st in st_info:\n",
" st = st_info[key_st]\n",
" st_lat = np.array([st.lat]); st_lon = np.array([st.lon])\n",
" (st_lat,st_lon) = rtp_rotation(st_lat,st_lon,theta0,phi0,psi)\n",
" st.lat = st_lat[0]; st.lon = st_lon[0]\n",
" st_info_rotate[key_st] = st\n",
"\n",
" return (ev_info_rotate,st_info_rotate)\n",
"\n",
"def rotate_src_rec_reverse(ev_info_rotate,st_info_rotate,theta0,phi0,psi):\n",
" ev_info = {}\n",
" st_info = {}\n",
"\n",
" # rotate earthquakes\n",
" for key_ev in ev_info_rotate:\n",
" ev = ev_info_rotate[key_ev]\n",
" ev_lat = np.array([ev.lat]); ev_lon = np.array([ev.lon])\n",
" (ev_lat,ev_lon,) = rtp_rotation_reverse(ev_lat,ev_lon,theta0,phi0,psi)\n",
" ev.lat = ev_lat[0]; ev.lon = ev_lon[0]\n",
" ev_info[key_ev] = ev\n",
"\n",
" # rotate stations\n",
" for key_st in st_info_rotate:\n",
" st = st_info_rotate[key_st]\n",
" st_lat = np.array([st.lat]); st_lon = np.array([st.lon])\n",
" (st_lat,st_lon) = rtp_rotation_reverse(st_lat,st_lon,theta0,phi0,psi)\n",
" st.lat = st_lat[0]; st.lon = st_lon[0]\n",
" st_info[key_st] = st\n",
"\n",
" return (ev_info,st_info)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# # Function: Coordinate rotation rotate_src_rec(ev_info, st_info, theta0, phi0, psi): rotate to the new coordinate system, satisfying the center point transformation r0, t0, p0 -> r0, 0, 0 and an anticlockwise rotation angle psi.\n",
"# # Satisfying the center point transformation r0, t0, p0 -> r0, 0, 0 and an anticlockwise rotation angle psi.\n",
"\n",
"# import numpy as np\n",
"\n",
"# RAD2DEG = 180/np.pi\n",
"# DEG2RAD = np.pi/180\n",
"# R_earth = 6371.0\n",
"\n",
"# # Spherical coordinates to Cartesian coordinate\n",
"# def rtp2xyz(r,theta,phi):\n",
"# x = r * np.cos(theta*DEG2RAD) * np.cos(phi*DEG2RAD)\n",
"# y = r * np.cos(theta*DEG2RAD) * np.sin(phi*DEG2RAD)\n",
"# z = r * np.sin(theta*DEG2RAD)\n",
"# return (x,y,z)\n",
"\n",
"# # Cartesian coordinates to Spherical coordinate\n",
"# def xyz2rtp(x,y,z):\n",
"# # theta: -90~90; phi: -180~180\n",
"# r = np.sqrt(x**2+y**2+z**2)\n",
"# theta = np.arcsin(z/r)\n",
"# phi = np.arcsin(y/r/np.cos(theta))\n",
"\n",
"\n",
"# if(phi > 0 and x*y < 0):\n",
"# phi = np.pi - phi\n",
"# if(phi < 0 and x*y > 0):\n",
"# phi = -np.pi - phi\n",
"\n",
"# return (r,theta*RAD2DEG,phi*RAD2DEG)\n",
"\n",
"# # anti-clockwise rotation along x-axis\n",
"# def rotate_x(x,y,z,theta):\n",
"# new_x = x\n",
"# new_y = y * np.cos(theta*DEG2RAD) + z * -np.sin(theta*DEG2RAD)\n",
"# new_z = y * np.sin(theta*DEG2RAD) + z * np.cos(theta*DEG2RAD)\n",
"# return (new_x,new_y,new_z)\n",
"\n",
"# # anti-clockwise rotation along y-axis\n",
"# def rotate_y(x,y,z,theta):\n",
"# new_x = x * np.cos(theta*DEG2RAD) + z * np.sin(theta*DEG2RAD)\n",
"# new_y = y\n",
"# new_z = x * -np.sin(theta*DEG2RAD) + z * np.cos(theta*DEG2RAD)\n",
"# return (new_x,new_y,new_z)\n",
"\n",
"# # anti-clockwise rotation along z-axis\n",
"# def rotate_z(x,y,z,theta):\n",
"# new_x = x * np.cos(theta*DEG2RAD) + y * -np.sin(theta*DEG2RAD)\n",
"# new_y = x * np.sin(theta*DEG2RAD) + y * np.cos(theta*DEG2RAD)\n",
"# new_z = z\n",
"# return (new_x,new_y,new_z)\n",
"\n",
"# # spherical Rotation\n",
"\n",
"# # rotate to the new coordinate, satisfying the center r0,t0,p0 -> r0,0,0 and a anticlockwise angle psi\n",
"# def rtp_rotation(t,p,theta0,phi0,psi):\n",
"# # step 1: r,t,p -> x,y,z\n",
"# (x,y,z) = rtp2xyz(1.0,t,p)\n",
"\n",
"# # step 2: anti-clockwise rotation with -phi0 along z-axis: r0,t0,p0 -> r0,t0,0\n",
"# (x,y,z) = rotate_z(x,y,z,-phi0)\n",
"\n",
"# # step 3: anti-clockwise rotation with theta0 along y-axis: r0,t0,0 -> r0,0,0\n",
"# (x,y,z) = rotate_y(x,y,z,theta0)\n",
"\n",
"# # # step 4: anti-clockwise rotation with psi along x-axis\n",
"# (x,y,z) = rotate_x(x,y,z,psi)\n",
"\n",
"# # step 5: x,y,z -> r,t,p\n",
"# (new_r,new_t,new_p) = xyz2rtp(x,y,z)\n",
"\n",
"# return (new_t,new_p)\n",
"\n",
"\n",
"# def rtp_rotation_reverse(new_t,new_p,theta0,phi0,psi):\n",
"# # step 1: r,t,p -> x,y,z\n",
"# (x,y,z) = rtp2xyz(1.0,new_t,new_p)\n",
"\n",
"# # step 2: anti-clockwise rotation with -psi along x-axis\n",
"# (x,y,z) = rotate_x(x,y,z,-psi)\n",
"\n",
"# # step 3: anti-clockwise rotation with -theta0 along y-axis: r0,0,0 -> r0,t0,0\n",
"# (x,y,z) = rotate_y(x,y,z,-theta0)\n",
"\n",
"# # step 4: anti-clockwise rotation with phi0 along z-axis: r0,t0,0 -> r0,t0,p0\n",
"# (x,y,z) = rotate_z(x,y,z,phi0)\n",
"\n",
"# # step 5: x,y,z -> r,t,p\n",
"# (r,t,p) = xyz2rtp(x,y,z)\n",
"\n",
"# return (t,p)\n",
"\n",
"# def rotate_src_rec(ev_info,st_info,theta0,phi0,psi):\n",
"# ev_info_rotate = {}\n",
"# st_info_rotate = {}\n",
"\n",
"# # rotate earthquakes\n",
"# for key_ev in ev_info:\n",
"# ev = ev_info[key_ev]\n",
"# (ev.lat,ev.lon,) = rtp_rotation(ev.lat,ev.lon,theta0,phi0,psi)\n",
"# ev_info_rotate[key_ev] = ev\n",
"\n",
"# # rotate stations\n",
"# for key_st in st_info:\n",
"# st = st_info[key_st]\n",
"# (st.lat,st.lon) = rtp_rotation(st.lat,st.lon,theta0,phi0,psi)\n",
"# st_info_rotate[key_st] = st\n",
"\n",
"# return (ev_info_rotate,st_info_rotate)\n",
"\n",
"# def rotate_src_rec_reverse(ev_info_rotate,st_info_rotate,theta0,phi0,psi):\n",
"# ev_info = {}\n",
"# st_info = {}\n",
"\n",
"# # rotate earthquakes\n",
"# for key_ev in ev_info_rotate:\n",
"# ev = ev_info_rotate[key_ev]\n",
"# (ev.lat,ev.lon) = rtp_rotation_reverse(ev.lat,ev.lon,theta0,phi0,psi)\n",
"# ev_info[key_ev] = ev\n",
"\n",
"# # rotate stations\n",
"# for key_st in st_info_rotate:\n",
"# st = st_info_rotate[key_st]\n",
"# (st.lat,st.lon) = rtp_rotation_reverse(st.lat,st.lon,theta0,phi0,psi)\n",
"# st_info[key_st] = st\n",
"\n",
"# return (ev_info,st_info)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# linear_regression(X,Y)\n",
"def linear_regression(X,Y):\n",
" slope,intercept = np.polyfit(X,Y,deg=1)\n",
" fitted_values = slope * X + intercept\n",
" residual = Y - fitted_values\n",
" SEE = np.std(residual)\n",
" return (slope,intercept,SEE)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"\n",
"Functions: obtain target information from ev_info and st_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [lon,lat,dep,weight] of the earthquake\n",
"def data_lon_lat_dep_wt_ev(ev_info):\n",
" lat = []\n",
" lon = []\n",
" dep = []\n",
" weight = []\n",
" for key in ev_info:\n",
" lat.append(ev_info[key].lat)\n",
" lon.append(ev_info[key].lon)\n",
" dep.append(ev_info[key].dep)\n",
" try:\n",
" weight.append(ev_info[key].tag[\"weight\"])\n",
" except:\n",
" weight.append(1.0)\n",
" return [np.array(lon),np.array(lat),np.array(dep),np.array(weight)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [lon, lat, dep, ortime] of the earthquake\n",
"def data_ev_loc(ev_info):\n",
" lat = []\n",
" lon = []\n",
" dep = []\n",
" ortime = []\n",
" for key in ev_info:\n",
" lat.append(ev_info[key].lat)\n",
" lon.append(ev_info[key].lon)\n",
" dep.append(ev_info[key].dep)\n",
" ortime.append(ev_info[key].ortime.timestamp)\n",
" return [np.array(lon),np.array(lat),np.array(dep),np.array(ortime)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [lon,lat,dep,weight] of the station\n",
"def data_lon_lat_ele_wt_st(ev_info,st_info):\n",
" names = {}\n",
" lat = []\n",
" lon = []\n",
" ele = []\n",
" weight = []\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t: # absolute traveltime data\n",
" name_st = ev_info[key_ev].t[key_t][0]\n",
" names[name_st] = name_st\n",
"\n",
" for key_t in ev_info[key_ev].cs_dt: # common source differential traveltime data\n",
" name_st = ev_info[key_ev].cs_dt[key_t][0]\n",
" names[name_st] = name_st\n",
" name_st = ev_info[key_ev].cs_dt[key_t][1]\n",
" names[name_st] = name_st\n",
"\n",
" for key_t in ev_info[key_ev].cr_dt: # common receiver differential traveltime data\n",
" name_st = ev_info[key_ev].cr_dt[key_t][0]\n",
" names[name_st] = name_st\n",
"\n",
" for name in names: # only output the station which has data\n",
" lat.append(st_info[name].lat)\n",
" lon.append(st_info[name].lon)\n",
" ele.append(st_info[name].ele)\n",
" try:\n",
" weight.append(st_info[name].tag[\"weight\"])\n",
" except:\n",
" weight.append(1.0)\n",
" return [np.array(lon),np.array(lat),np.array(ele),np.array(weight)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [dis,time] of all data\n",
"def data_dis_time(ev_info,st_info):\n",
" all_dis = []\n",
" all_time = []\n",
" for key_ev in ev_info:\n",
" lat_ev = ev_info[key_ev].lat\n",
" lon_ev = ev_info[key_ev].lon\n",
" dep_ev = ev_info[key_ev].dep\n",
" for key_t in ev_info[key_ev].t:\n",
" all_time.append(ev_info[key_ev].t[key_t][2])\n",
" lat_st = st_info[ev_info[key_ev].t[key_t][0]].lat\n",
" lon_st = st_info[ev_info[key_ev].t[key_t][0]].lon\n",
" ele_st = st_info[ev_info[key_ev].t[key_t][0]].ele\n",
" dis = math.sqrt(cal_dis(lat_ev,lon_ev,lat_st,lon_st)**2 + (dep_ev+ele_st/1000)**2)\n",
" all_dis.append(dis)\n",
"\n",
" return [np.array(all_dis),np.array(all_time)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [epidis,time] of all data\n",
"def data_epidis_time(ev_info,st_info):\n",
" all_dis = []\n",
" all_time = []\n",
" for key_ev in ev_info:\n",
" lat_ev = ev_info[key_ev].lat\n",
" lon_ev = ev_info[key_ev].lon\n",
" for key_t in ev_info[key_ev].t:\n",
" all_time.append(ev_info[key_ev].t[key_t][2])\n",
" lat_st = st_info[ev_info[key_ev].t[key_t][0]].lat\n",
" lon_st = st_info[ev_info[key_ev].t[key_t][0]].lon\n",
" dis = cal_dis(lat_ev,lon_ev,lat_st,lon_st)**2\n",
" all_dis.append(dis)\n",
"\n",
" return [np.array(all_dis),np.array(all_time)]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# function: output the [cs_dt] of all data\n",
"def data_cs_dt(ev_info):\n",
" all_time = []\n",
" for key_ev in ev_info:\n",
" for key_dt in ev_info[key_ev].cs_dt:\n",
" all_time.append(ev_info[key_ev].cs_dt[key_dt][3])\n",
"\n",
" return np.array(all_time)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: data_dis_time_phase(ev_info, st_info, phase_list) Given a list of seismic phases, output the [epicentral distance, arrival time] for each phase.\n",
"def data_dis_time_phase(ev_info,st_info,phase_list):\n",
" all_dis = {}\n",
" all_time = {}\n",
" for phase in phase_list:\n",
" all_dis[phase] = []\n",
" all_time[phase] = []\n",
"\n",
" for key_ev in ev_info:\n",
" lat_ev = ev_info[key_ev].lat\n",
" lon_ev = ev_info[key_ev].lon\n",
" dep_ev = ev_info[key_ev].dep\n",
" for key_t in ev_info[key_ev].t:\n",
" phase = key_t.split(\"+\")[1]\n",
" if (not phase in phase_list):\n",
" continue\n",
"\n",
" all_time[phase].append(ev_info[key_ev].t[key_t][2])\n",
" lat_st = st_info[ev_info[key_ev].t[key_t][0]].lat\n",
" lon_st = st_info[ev_info[key_ev].t[key_t][0]].lon\n",
" ele_st = st_info[ev_info[key_ev].t[key_t][0]].ele\n",
"\n",
" dis = math.sqrt(cal_dis(lat_ev,lon_ev,lat_st,lon_st)**2 + (dep_ev+ele_st/1000)**2)\n",
" all_dis[phase].append(dis)\n",
"\n",
" for phase in phase_list:\n",
" all_dis[phase] = np.array(all_dis[phase])\n",
" all_time[phase] = np.array(all_time[phase])\n",
"\n",
" return [all_dis,all_time]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: data_lon_lat_dep_wt_ev(ev_info) Outputs the lines connecting station and earthquake for traveltime data as [line_x, line_y].\n",
"\n",
"def data_line(ev_info,st_info):\n",
" line_x = []\n",
" line_y = []\n",
"\n",
" for key_ev in ev_info:\n",
" lat_ev = ev_info[key_ev].lat\n",
" lon_ev = ev_info[key_ev].lon\n",
" for key_t in ev_info[key_ev].t:\n",
" lat_st = st_info[ev_info[key_ev].t[key_t][0]].lat\n",
" lon_st = st_info[ev_info[key_ev].t[key_t][0]].lon\n",
"\n",
" line_x.append([lon_ev,lon_st])\n",
" line_y.append([lat_ev,lat_st])\n",
"\n",
" return [line_x,line_y]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: discard some data in ev_info and st_info based on selection criteria"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_ev_region(ev_info, lat1, lat2, lon1, lon2, dep1, dep2) Delete the earthquakes that are out of the specified region.\n",
"\n",
"def limit_ev_region(ev_info,lat_min,lat_max,lon_min,lon_max,dep_min,dep_max):\n",
" count_delete = 0\n",
"\n",
" del_key_ev = []\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
" lat = ev.lat\n",
" lon = ev.lon\n",
" dep = ev.dep\n",
"\n",
" if (lat < min(lat_min,lat_max) or lat > max(lat_min,lat_max) \\\n",
" or lon < min(lon_min,lon_max) or lon > max(lon_min,lon_max) \\\n",
" or dep < min(dep_min,dep_max) or dep > max(dep_min,dep_max)):\n",
" del_key_ev.append(key_ev)\n",
" count_delete += 1\n",
"\n",
" del_key_t = []\n",
" for key_t in ev_info[key_ev].cr_dt:\n",
" name_ev2 = ev_info[key_ev].cr_dt[key_t][1]\n",
" lat2 = ev_info[name_ev2].lat\n",
" lon2 = ev_info[name_ev2].lon\n",
" dep2 = ev_info[name_ev2].dep\n",
" if (lat2 < min(lat_min,lat_max) or lat2 > max(lat_min,lat_max) \\\n",
" or lon2 < min(lon_min,lon_max) or lon2 > max(lon_min,lon_max) \\\n",
" or dep2 < min(dep_min,dep_max) or dep2 > max(dep_min,dep_max)):\n",
"\n",
" del_key_t.append(key_t)\n",
"\n",
" for key_t in del_key_t:\n",
" del ev_info[key_ev].cr_dt[key_t]\n",
"\n",
" ev_info[key_ev].Ncr_dt = len(ev_info[key_ev].cr_dt)\n",
"\n",
" for key_ev in del_key_ev:\n",
" del ev_info[key_ev]\n",
"\n",
" print(\"delete %d events out of the region, now %d earthquakes are retained within the study region\"%(count_delete,len(ev_info)))\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_st_region(ev_info, st_info, lat1, lat2, lon1, lon2) Delete the stations that are out of the specified region.\n",
"\n",
"def limit_st_region(ev_info,st_info,lat1,lat2,lon1,lon2):\n",
"\n",
" for key_ev in ev_info:\n",
" # delete the station out of the region in the absolute traveltime data\n",
" del_key_t = []\n",
" for key_t in ev_info[key_ev].t:\n",
" name_st = ev_info[key_ev].t[key_t][0]\n",
" lat_st = st_info[name_st].lat\n",
" lon_st = st_info[name_st].lon\n",
" if(lat_st < min(lat1,lat2) or lat_st > max(lat1,lat2) or lon_st < min(lon1,lon2) or lon_st > max(lon1,lon2)):\n",
" del_key_t.append(key_t)\n",
"\n",
" for key_t in del_key_t:\n",
" del ev_info[key_ev].t[key_t]\n",
" ev_info[key_ev].Nt = len(ev_info[key_ev].t)\n",
"\n",
" # delete the station out of the region in the common source differential traveltime data\n",
" del_key_t = []\n",
" for key_t in ev_info[key_ev].cs_dt:\n",
" name_st1 = ev_info[key_ev].cs_dt[key_t][0]\n",
" lat_st1 = st_info[name_st1].lat\n",
" lon_st1 = st_info[name_st1].lon\n",
"\n",
" name_st2 = ev_info[key_ev].cs_dt[key_t][1]\n",
" lat_st2 = st_info[name_st2].lat\n",
" lon_st2 = st_info[name_st2].lon\n",
" if(lat_st1 < min(lat1,lat2) or lat_st1 > max(lat1,lat2) or lon_st1 < min(lon1,lon2) or lon_st1 > max(lon1,lon2) \\\n",
" or lat_st2 < min(lat1,lat2) or lat_st2 > max(lat1,lat2) or lon_st2 < min(lon1,lon2) or lon_st2 > max(lon1,lon2)):\n",
" del_key_t.append(key_t)\n",
"\n",
" for key_t in del_key_t:\n",
" del ev_info[key_ev].cs_dt[key_t]\n",
" ev_info[key_ev].Ncs_dt = len(ev_info[key_ev].cs_dt)\n",
"\n",
" # delete the station out of the region in the common receiver differential traveltime data\n",
" del_key_st = []\n",
" for key_t in ev_info[key_ev].cr_dt:\n",
" name_st = ev_info[key_ev].cr_dt[key_t][0]\n",
" lat_st = st_info[name_st].lat\n",
" lon_st = st_info[name_st].lon\n",
" if(lat_st < min(lat1,lat2) or lat_st > max(lat1,lat2) or lon_st < min(lon1,lon2) or lon_st > max(lon1,lon2)):\n",
" del_key_st.append(key_t)\n",
"\n",
" for key_t in del_key_st:\n",
" del ev_info[key_ev].cr_dt[key_t]\n",
" ev_info[key_ev].Ncr_dt = len(ev_info[key_ev].cr_dt)\n",
"\n",
" return ev_info\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_epi_dis(ev_info, st_info, epi_dis1, epi_dis2) Delete the stations with epicentral distance in the range from epi_dis1 to epi_dis2.\n",
"\n",
"def limit_epi_dis(ev_info,st_info,epi_dis1,epi_dis2):\n",
"\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
"\n",
" lat_ev = ev.lat\n",
" lon_ev = ev.lon\n",
"\n",
" # delete the absolute traveltime data\n",
" del_key_t = []\n",
" for key_t in ev.t:\n",
" stname = ev.t[key_t][0]\n",
" lat_st = st_info[stname].lat\n",
" lon_st = st_info[stname].lon\n",
" dis = cal_dis(lat_ev, lon_ev, lat_st, lon_st)\n",
" if (dis > epi_dis1 and dis < epi_dis2):\n",
" del_key_t.append(key_t)\n",
" for key_t in del_key_t:\n",
" del ev.t[key_t]\n",
" ev.Nt = len(ev.t)\n",
"\n",
" # delete the common source differential traveltime data\n",
" del_key_t = []\n",
" for key_t in ev.cs_dt:\n",
" for i in range(2):\n",
" stname = ev.t[key_t][i]\n",
" lat_st = st_info[stname].lat\n",
" lon_st = st_info[stname].lon\n",
" dis = cal_dis(lat_ev, lon_ev, lat_st, lon_st)\n",
"\n",
" if (dis > epi_dis1 and dis < epi_dis2):\n",
" del_key_t.append(key_t)\n",
" break\n",
" for key_t in del_key_t:\n",
" del ev.cs_dt[key_t]\n",
" ev.Ncs_dt = len(ev.cs_dt)\n",
"\n",
" # delete the common receiver differential traveltime data\n",
" del_key_t = []\n",
" for key_t in ev.cr_dt:\n",
" stname = ev.cr_dt[key_t][0]\n",
" lat_st = st_info[stname].lat\n",
" lon_st = st_info[stname].lon\n",
" dis = cal_dis(lat_ev, lon_ev, lat_st, lon_st)\n",
" if (dis > epi_dis1 and dis < epi_dis2):\n",
" del_key_t.append(key_t)\n",
"\n",
" lat_ev2 = ev_info[ev.cr_dt[key_t][1]].lat\n",
" lon_ev2 = ev_info[ev.cr_dt[key_t][1]].lon\n",
" dis = cal_dis(lat_ev2, lon_ev2, lat_st, lon_st)\n",
" if (dis > epi_dis1 and dis < epi_dis2):\n",
" del_key_t.append(key_t)\n",
"\n",
" for key_t in del_key_t:\n",
" del ev.cr_dt[key_t]\n",
" ev.Ncr_dt = len(ev.cr_dt)\n",
"\n",
"\n",
" ev_info[key_ev] = ev\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_data_residual(ev_info, st_info, slope, intercept, up, down) Limit the data within the range defined by the line time = dis * slope + intercept and the bounds up and down.\n",
"\n",
"# remove outliers, only retain data satisfying: slope * dis + intercept + down < time < slope * dis + intercept + up\n",
"def limit_data_residual(ev_info,st_info,slope,intercept,up,down):\n",
" for key_ev in ev_info:\n",
" lat_ev = ev_info[key_ev].lat\n",
" lon_ev = ev_info[key_ev].lon\n",
" dep_ev = ev_info[key_ev].dep\n",
" del_key_t = []\n",
" for key_t in ev_info[key_ev].t:\n",
" name_st = ev_info[key_ev].t[key_t][0]\n",
" lat_st = st_info[name_st].lat\n",
" lon_st = st_info[name_st].lon\n",
" ele_st = st_info[name_st].ele\n",
" dis = math.sqrt(cal_dis(lat_ev,lon_ev,lat_st,lon_st)**2 + (dep_ev+ele_st/1000)**2)\n",
" residual = ev_info[key_ev].t[key_t][2] - (slope*dis+intercept)\n",
"\n",
" if (residual < down or residual > up):\n",
" del_key_t.append(key_t)\n",
"\n",
" for key_t in del_key_t:\n",
" del ev_info[key_ev].t[key_t]\n",
"\n",
" for key_ev in ev_info:\n",
" ev_info[key_ev].Nt = len(ev_info[key_ev].t)\n",
"\n",
" return ev_info\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_data_phase(ev_info, phase_list) Retain only the specified seismic phases.\n",
"\n",
"def limit_data_phase(ev_info,phase_list):\n",
" for key_ev in ev_info:\n",
" # process the absolute traveltime data\n",
" new_t = {}\n",
" for key_t in ev_info[key_ev].t:\n",
" phase = ev_info[key_ev].t[key_t][1]\n",
" if phase in phase_list:\n",
" new_t[key_t] = ev_info[key_ev].t[key_t]\n",
"\n",
" ev_info[key_ev].t = new_t\n",
" ev_info[key_ev].Nt = len(ev_info[key_ev].t)\n",
"\n",
" # process the common source differential traveltime data\n",
" new_t = {}\n",
" for key_t in ev_info[key_ev].cs_dt:\n",
" phase = ev_info[key_ev].cs_dt[key_t][2]\n",
" phase = phase.split(\",\")[0]\n",
" if phase in phase_list:\n",
" new_t[key_t] = ev_info[key_ev].cs_dt[key_t]\n",
"\n",
" ev_info[key_ev].cs_dt = new_t\n",
" ev_info[key_ev].Ncs_dt = len(ev_info[key_ev].cs_dt)\n",
"\n",
" # process the common receiver differential traveltime data\n",
" new_t = {}\n",
" for key_t in ev_info[key_ev].cr_dt:\n",
" phase = ev_info[key_ev].cr_dt[key_t][2]\n",
" phase = phase.split(\",\")[0]\n",
" if phase in phase_list:\n",
" new_t[key_t] = ev_info[key_ev].cr_dt[key_t]\n",
"\n",
" ev_info[key_ev].cr_dt = new_t\n",
" ev_info[key_ev].Ncr_dt = len(ev_info[key_ev].cr_dt)\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_min_Nt(min_Nt_thd, ev_info) Delete the earthquakes with the number of data less than min_Nt_thd.\n",
"\n",
"def limit_min_Nt(min_Nt_thd, ev_info):\n",
" Nev = len(ev_info)\n",
"\n",
" del_key_ev = []\n",
" for key_ev in ev_info:\n",
" if(ev_info[key_ev].Nt < min_Nt_thd):\n",
" del_key_ev.append(key_ev)\n",
"\n",
" for key_ev in del_key_ev:\n",
" del ev_info[key_ev]\n",
"\n",
" print(\"Original data set has %d earthquakes, %d earthquakes are deleted, %d earthquakes are retained\"%(Nev,len(del_key_ev),len(ev_info)))\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_azi_gap(gap_thd) Calculate the azimuthal gap for all events and delete events with a gap greater than gap_thd.\n",
"def limit_azi_gap(gap_thd,ev_info,st_info):\n",
" Nev = len(ev_info)\n",
"\n",
" del_key_ev = []\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
" gap = cal_azi_gap(ev,st_info)\n",
" if (gap > gap_thd):\n",
" del_key_ev.append(key_ev)\n",
" else:\n",
" ev_info[key_ev].tag[\"azi_gap\"] = gap\n",
" for key_ev in del_key_ev:\n",
" del ev_info[key_ev]\n",
"\n",
" print(\"Original data set has %d earthquakes, %d earthquakes are deleted, %d earthquakes are retained\"%(Nev,len(del_key_ev),len(ev_info)))\n",
"\n",
" return ev_info\n",
"\n",
"# Function: cal_azi_gap(ev, st_info) Calculate the azimuthal gap of a single earthquake.\n",
"def cal_azi_gap(ev,st_info):\n",
" azi_all = []\n",
" lat_ev = ev.lat\n",
" lon_ev = ev.lon\n",
" stlist = {}\n",
" for key in ev.t:\n",
" stname = ev.t[key][0]\n",
" if (not stname in stlist):\n",
" lat_st = st_info[stname].lat\n",
" lon_st = st_info[stname].lon\n",
" azi = cal_azimuth(lat_ev, lon_ev, lat_st, lon_st)\n",
" azi_all.append(azi)\n",
" stlist[stname] = 1\n",
"\n",
" azi_all.sort()\n",
" if(len(azi_all) < 2):\n",
" return 360.0\n",
" else:\n",
" gap = 0.0\n",
" for i in range(len(azi_all)-1):\n",
" gap = max(gap,azi_all[i+1] - azi_all[i])\n",
" gap = max(gap,azi_all[0] + 360 - azi_all[-1])\n",
" return gap\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: limit_earthquake_decluster_Nt(ev_info, dlat, dlon, ddep, Top_N) Divide the region into several subdomains, sort by the number of arrival times, and retain only the top Top_N earthquakes with the most arrival times in each box.\n",
"# option 3, declustering. Divide the region into several subdomains, retain the Top N earthquakes in terms of the number of arrival times in each subdomain.\n",
"def limit_earthquake_decluster_Nt(ev_info,dlat,dlon,ddep,Top_N):\n",
" # subdivide earthquakes into different subdomains\n",
" [ev_info,tag2name] = tag_event_cluster(dlat,dlon,ddep,ev_info)\n",
"\n",
" # sort earthquakes in the same subdomain\n",
" # Sort the quality of earthquakes within each tag based on the number of arrivals.\n",
" tag2name = sort_cluster_Nt(ev_info, tag2name)\n",
"\n",
" # only retain Top_N earthquakes in each subdomain\n",
" # Within each tag, prioritize selecting the top Top_N earthquakes.\n",
" [ev_info,tag2name] = limit_decluster(ev_info, tag2name,Top_N)\n",
"\n",
" return ev_info\n",
"\n",
"\n",
"\n",
"# Function: tag_event_cluster(size_lat, size_lon, size_dep, ev_info) Subdivide the study area, assign each earthquake to a subregion, and place it in a tag.\n",
"def tag_event_cluster(size_lat,size_lon,size_dep,ev_info):\n",
" tag2name = {}\n",
" for key_ev in ev_info:\n",
" name = ev_info[key_ev].name\n",
" lat = ev_info[key_ev].lat\n",
" lon = ev_info[key_ev].lon\n",
" dep = ev_info[key_ev].dep\n",
" tag = \"%d_%d_%d\"%(math.floor(lon/size_lon),math.floor(lat/size_lat),math.floor(dep/size_dep))\n",
" ev_info[key_ev].tag[\"cluster\"] = tag\n",
"\n",
" if (tag in tag2name):\n",
" tag2name[tag].append(name)\n",
" else:\n",
" tag2name[tag] = []\n",
" tag2name[tag].append(name)\n",
"\n",
" return [ev_info,tag2name]\n",
"\n",
"# Function: sort_cluster_Nt(ev_info, tag2name) Sort the quality of earthquakes within each tag based on the number of arrivals.\n",
"def sort_cluster_Nt(ev_info, tag2name):\n",
" for key_tag in tag2name:\n",
" names_ev = tag2name[key_tag]\n",
" Nt = []\n",
" for key_ev in names_ev:\n",
" Nt.append(len(ev_info[key_ev].t))\n",
"\n",
" # Sort the earthquakes within each tag based on the number of arrivals.\n",
" sorted_Nt = sorted(enumerate(Nt), key=lambda x: x[1], reverse=True)\n",
" tag2name[key_tag] = []\n",
" for index, Nt in sorted_Nt:\n",
" tag2name[key_tag].append(names_ev[index])\n",
"\n",
" return tag2name\n",
"\n",
"# Function: limit_cluster(ev_info, tag2name, Max) Prioritize selecting the top Max earthquakes within each tag.\n",
"def limit_decluster(ev_info, tag2name, Max):\n",
" del_key_ev = []\n",
" for key_tag in tag2name:\n",
" names_ev = tag2name[key_tag]\n",
"\n",
" if(len(names_ev) > Max):\n",
" tag2name[key_tag] = names_ev[0:Max]\n",
" for i in range(Max,len(names_ev)): # Delete earthquakes that exceed the threshold in the sorted list.\n",
" del_key_ev.append(names_ev[i])\n",
"\n",
" for key_ev in del_key_ev:\n",
" del ev_info[key_ev]\n",
"\n",
" return [ev_info,tag2name]\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: assign weights to earthquakes, stations, and data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: box_weighting_ev(ev_info, dlat, dlon, ddep) Assign box-weight to the earthquakes.\n",
"def box_weighting_ev(ev_info,dlon,dlat,ddep):\n",
"\n",
" # categorization\n",
" distribute = {}\n",
" all_tag_wt = {}\n",
"\n",
" for key_ev in ev_info:\n",
" lat_id = math.floor((ev_info[key_ev].lat) / dlat)\n",
" lon_id = math.floor((ev_info[key_ev].lon) / dlon)\n",
" dep_id = math.floor((ev_info[key_ev].dep) / ddep)\n",
"\n",
" tag = '%d_%d_%d'%(lat_id,lon_id,dep_id)\n",
" if (tag in distribute):\n",
" distribute[tag] += 1\n",
" else:\n",
" distribute[tag] = 1\n",
"\n",
" max_weight = 0\n",
" for tag in distribute:\n",
" all_tag_wt[tag] = 1.0/math.sqrt(distribute[tag])\n",
" max_weight = max(max_weight,all_tag_wt[tag])\n",
"\n",
" for key_ev in ev_info:\n",
" lat_id = math.floor((ev_info[key_ev].lat) / dlat)\n",
" lon_id = math.floor((ev_info[key_ev].lon) / dlon)\n",
" dep_id = math.floor((ev_info[key_ev].dep) / ddep)\n",
"\n",
" tag = '%d_%d_%d'%(lat_id,lon_id,dep_id)\n",
"\n",
" ev_info[key_ev].tag[\"weight\"] = all_tag_wt[tag]/max_weight\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: geographical_weighting_ev_rough(ev_info, dlat, dlon, ddep) Assign geographical weighting to the earthquakes roughly.\n",
"def geographical_weighting_ev_rough(ev_info,dlat,dlon,ddep,coefficient = 0.5):\n",
"\n",
" # categorization\n",
" distribute = {}\n",
" all_tag_wt = {}\n",
"\n",
" for key_ev in ev_info:\n",
" lat_id = int(ev_info[key_ev].lat/dlat)\n",
" lon_id = int(ev_info[key_ev].lon/dlat)\n",
" dep_id = int(ev_info[key_ev].dep/ddep)\n",
"\n",
"\n",
" tag = '%d_%d_%d'%(lat_id,lon_id,dep_id)\n",
" if (tag in distribute):\n",
" distribute[tag] += 1\n",
" else:\n",
" distribute[tag] = 1\n",
"\n",
" # Calculate the weight of each category.\n",
" delta0 = 0\n",
" for tag1 in distribute:\n",
" tmp1 = tag1.split('_')\n",
" # evlat1 = float(tmp1[0])*dlat; evlon1 = float(tmp1[1])*dlon; evdep1 = float(tmp1[2])*ddep\n",
"\n",
" for tag2 in distribute:\n",
" tmp2 = tag2.split('_')\n",
" # evlat2 = float(tmp2[0])*dlat; evlon2 = float(tmp2[1])*dlon; evdep2 = float(tmp2[2])*ddep\n",
"\n",
" # distance of id\n",
" delta_tp = math.sqrt((int(tmp1[0]) - int(tmp2[0]))**2 + (int(tmp1[1]) - int(tmp2[1]))**2 + (int(tmp1[2]) - int(tmp2[2]))**2)\n",
" delta0 = delta0 + distribute[tag1] * distribute[tag2] * delta_tp\n",
"\n",
" delta0 = delta0/(len(ev_info)**2) * coefficient\n",
"\n",
" max_weight = 0.0\n",
" for tag1 in distribute:\n",
" tmp1 = tag1.split('_')\n",
" # evlat1 = float(tmp1[0])*dlat; evlon1 = float(tmp1[1])*dlon; evdep1 = float(tmp1[2])*ddep\n",
"\n",
" weight = 0\n",
" for tag2 in distribute:\n",
" tmp2 = tag2.split('_')\n",
" # evlat2 = float(tmp2[0])*dlat; evlon2 = float(tmp2[1])*dlon; evdep2 = float(tmp2[2])*ddep\n",
"\n",
" delta_tp = math.sqrt((int(tmp1[0]) - int(tmp2[0]))**2 + (int(tmp1[1]) - int(tmp2[1]))**2 + (int(tmp1[2]) - int(tmp2[2]))**2)\n",
" weight = weight + math.exp(-(delta_tp/delta0)**2) * distribute[tag2]\n",
"\n",
" all_tag_wt[tag1] = (1.0/weight)\n",
" max_weight = max(max_weight,1.0/weight)\n",
"\n",
" # Assign weights to each earthquake based on its tag.\n",
" for key_ev in ev_info:\n",
" lat_id = int(ev_info[key_ev].lat/dlat)\n",
" lon_id = int(ev_info[key_ev].lon/dlon)\n",
" dep_id = int(ev_info[key_ev].dep/ddep)\n",
"\n",
" tag = '%d_%d_%d'%(lat_id,lon_id,dep_id)\n",
"\n",
" ev_info[key_ev].tag[\"weight\"] = all_tag_wt[tag]/max_weight\n",
" return ev_info\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: box_weighting_st(ev_info, st_info, dlat, dlon) Assign geographical weighting to the stations roughly.\n",
"def box_weighting_st(ev_info,st_info,dlon,dlat):\n",
"\n",
" [lon_ev,lat_ev,dep_ev,wt_ev] = data_lon_lat_dep_wt_ev(ev_info)\n",
"\n",
" # Integrate all involved stations.\n",
" wt_st = {}\n",
" name_st = {}\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t:\n",
" name_rec = ev_info[key_ev].t[key_t][0]\n",
" wt_st[name_rec] = -1.0\n",
" name_st[name_rec] = 1\n",
"\n",
" # categorization\n",
" distribute = {}\n",
" all_tag_wt = {}\n",
"\n",
" # Count the number of stations in each subdomain.\n",
" for key_st in name_st:\n",
" lat_id = math.floor((st_info[key_st].lat) / dlat)\n",
" lon_id = math.floor((st_info[key_st].lon) / dlon)\n",
"\n",
" tag = '%d_%d'%(lat_id,lon_id)\n",
" if (tag in distribute):\n",
" distribute[tag] += 1\n",
" else:\n",
" distribute[tag] = 1\n",
"\n",
" max_weight = 0\n",
" for tag in distribute:\n",
" all_tag_wt[tag] = 1.0/math.sqrt(distribute[tag])\n",
" max_weight = max(max_weight,all_tag_wt[tag])\n",
"\n",
" # Assign weights to each station based on its tag.\n",
" for key_st in name_st:\n",
" lat_id = math.floor((st_info[key_st].lat) / dlat)\n",
" lon_id = math.floor((st_info[key_st].lon) / dlon)\n",
" tag = '%d_%d'%(lat_id,lon_id)\n",
" wt_st[key_st] = all_tag_wt[tag]/max_weight\n",
"\n",
" # modify weight tag in st_info\n",
" for key_t in wt_st:\n",
" st_info[key_t].tag[\"weight\"] = wt_st[key_t]\n",
"\n",
" # modify weight of abs data ev_info\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t:\n",
" name_rec = ev_info[key_ev].t[key_t][0]\n",
" ev_info[key_ev].t[key_t][3] = wt_st[name_rec]\n",
"\n",
" return [ev_info,st_info]\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: geographical_weighting_st(ev_info,st_info) Assign geographical weighting to the stations roughly.\n",
"def geographical_weighting_st(ev_info,st_info,coefficient = 0.5):\n",
"\n",
" # Integrate all involved stations.\n",
" wt_st = {}\n",
" name_st = {}\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t:\n",
" name_rec = ev_info[key_ev].t[key_t][0]\n",
" wt_st[name_rec] = -1.0\n",
" name_st[name_rec] = 1\n",
"\n",
" # Calculate the weight of each station.\n",
" delta0 = 0\n",
" for key_st1 in name_st:\n",
" stlat1 = st_info[key_st1].lat\n",
" stlon1 = st_info[key_st1].lon\n",
"\n",
" for key_st2 in name_st:\n",
" stlat2 = st_info[key_st2].lat\n",
" stlon2 = st_info[key_st2].lon\n",
"\n",
" delta_tp = cal_dis(stlat1,stlon1,stlat2,stlon2)\n",
" delta0 = delta0 + delta_tp\n",
"\n",
" delta0 = delta0/(len(wt_st)**2)*coefficient\n",
"\n",
" max_weight = 0.0\n",
" for key_st1 in name_st:\n",
" stlat1 = st_info[key_st1].lat\n",
" stlon1 = st_info[key_st1].lon\n",
"\n",
" weight = 0\n",
" for key_st2 in name_st:\n",
" stlat2 = st_info[key_st2].lat\n",
" stlon2 = st_info[key_st2].lon\n",
"\n",
" delta_tp = cal_dis(stlat1,stlon1,stlat2,stlon2)\n",
" weight = weight + math.exp(-(delta_tp/delta0)**2)\n",
"\n",
" wt_st[key_st1] = (1.0/weight)\n",
" max_weight = max(max_weight,1.0/weight)\n",
"\n",
" for key_st1 in wt_st:\n",
" wt_st[key_st1] = wt_st[key_st1]/max_weight\n",
"\n",
" # Add weight to each data point in the earthquakes.\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t:\n",
" name_rec = ev_info[key_ev].t[key_t][0]\n",
" if (not name_rec in wt_st):\n",
" ValueError(\"The station of the data is not in the calculation list\")\n",
"\n",
" if (len(ev_info[key_ev].t[key_t])==3):\n",
" ev_info[key_ev].t[key_t].append(wt_st[name_rec])\n",
" elif (len(ev_info[key_ev].t[key_t])==4):\n",
" ev_info[key_ev].t[key_t][3] = wt_st[name_rec]\n",
" else:\n",
" ValueError(\"Error in the weight information of the absolute traveltime data\")\n",
"\n",
" # modify weight tag in st_info\n",
" for key_t in wt_st:\n",
" st_info[key_t].tag[\"weight\"] = wt_st[key_t]\n",
"\n",
" # modify weight of abs data ev_info\n",
" for key_ev in ev_info:\n",
" for key_t in ev_info[key_ev].t:\n",
" name_rec = ev_info[key_ev].t[key_t][0]\n",
" ev_info[key_ev].t[key_t][3] = wt_st[name_rec]\n",
"\n",
" return [ev_info,st_info]\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Function: add noise into data"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Functionassign_gaussian_noise():\n",
"def assign_gaussian_noise(ev_info,sigma):\n",
"\n",
" # Record which seismic phases correspond to each station.\n",
" st2phase = {} # Station name -> [Keys of absolute arrival time data related to this station]\n",
"\n",
"\n",
" for key_ev in ev_info:\n",
" # Absolute arrival time noise\n",
" for key_t in ev_info[key_ev].t:\n",
" stname = ev_info[key_ev].t[key_t][0]\n",
" ev_info[key_ev].t[key_t][2] = ev_info[key_ev].t[key_t][2] + np.random.normal(0,sigma)\n",
" if(stname in st2phase):\n",
" st2phase[stname].append(key_t)\n",
" else:\n",
" st2phase[stname] = [key_t]\n",
"\n",
" for key_ev in ev_info:\n",
" # Double-difference arrival time noise\n",
" for key_dt in ev_info[key_ev].cs_dt:\n",
" stname1 = ev_info[key_ev].cs_dt[key_dt][0]\n",
" stname2 = ev_info[key_ev].cs_dt[key_dt][1]\n",
" t1 = -999\n",
" t2 = -999\n",
" # Search for the arrival time of the data.\n",
" if (stname1 in st2phase):\n",
" for key_t in st2phase[stname1]:\n",
" if (key_t in ev_info[key_ev].t):\n",
" t1 = ev_info[key_ev].t[key_t][2]\n",
" break\n",
" if (stname2 in st2phase):\n",
" for key_t in st2phase[stname2]:\n",
" if (key_t in ev_info[key_ev].t):\n",
" t2 = ev_info[key_ev].t[key_t][2]\n",
" break\n",
"\n",
" if (t1 == -999 or t2 == -999):\n",
" # If there is no absolute arrival time data, the double-difference data residuals increase by a factor of sqrt(2) in noise.\n",
" ev_info[key_ev].cs_dt[key_dt][3] = ev_info[key_ev].cs_dt[key_dt][3] + np.random.normal(0,sigma*np.sqrt(2))\n",
" print('no data: ', key_ev, key_dt)\n",
" else:\n",
" # If there is absolute arrival time data, the double-difference data is obtained by subtraction.\n",
" ev_info[key_ev].cs_dt[key_dt][3] = t1 - t2\n",
"\n",
" # Common station double-difference arrival time\n",
" for key_dt in ev_info[key_ev].cr_dt:\n",
" stname = ev_info[key_ev].cr_dt[key_dt][0]\n",
" key_ev2 = ev_info[key_ev].cr_dt[key_dt][1]\n",
"\n",
" t1 = -999\n",
" t2 = -999\n",
" # Search for the arrival time of the data.\n",
" if (stname in st2phase):\n",
" for key_t in st2phase[stname]:\n",
" if (key_t in ev_info[key_ev].t):\n",
" t1 = ev_info[key_ev].t[key_t][2]\n",
" break\n",
" else:\n",
" print('not found 1: ', key_ev, key_t)\n",
"\n",
" for key_t in st2phase[stname]:\n",
" if (key_t in ev_info[key_ev2].t):\n",
" t2 = ev_info[key_ev2].t[key_t][2]\n",
" break\n",
" else:\n",
" print('not found 2: ', key_ev, key_t)\n",
"\n",
" if (t1 == -999 or t2 == -999):\n",
" # If there is no absolute arrival time data, the double-difference data residuals increase by a factor of sqrt(2) in noise.\n",
" ev_info[key_ev].cr_dt[key_dt][3] = ev_info[key_ev].cr_dt[key_dt][3] + np.random.normal(0,sigma*np.sqrt(2))\n",
" print('no data: ', key_ev, key_dt)\n",
" else:\n",
" # If there is absolute arrival time data, the double-difference data is obtained by subtraction.\n",
" ev_info[key_ev].cr_dt[key_dt][3] = t1 - t2\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Functionassign_uniform_noise_to_ev():\n",
"def assign_uniform_noise_to_ev(ev_info, range_lat, range_lon, range_dep, range_time):\n",
"\n",
" # Loop through all earthquakes and assign noise to them.\n",
" ev_noise = {} # Name of the earthquake -> noise of [lat,lon,dep,ortime]\n",
" # loop list of earthquakes\n",
" for key_ev in ev_info:\n",
" evname = key_ev\n",
" if (evname in ev_noise):\n",
" print(\"error: repeated earthquake name\")\n",
" exit()\n",
" else:\n",
" # generate noise\n",
" ev_noise[evname] = np.random.uniform(-1,1,4) * np.array([range_lat,range_lon,range_dep,range_time])\n",
"\n",
" # Add noise to each data point.\n",
" for key_ev in ev_info:\n",
"\n",
" # Absolute arrival time noise\n",
" for key_t in ev_info[key_ev].t:\n",
"\n",
" ev_info[key_ev].t[key_t][2] = ev_info[key_ev].t[key_t][2] - ev_noise[key_ev][3]\n",
"\n",
"\n",
" # Double-difference arrival time noise (double-difference arrival time remains unchanged)\n",
"\n",
" # Common station double-difference arrival time\n",
" for key_dt in ev_info[key_ev].cr_dt:\n",
" key_ev2 = ev_info[key_ev].cr_dt[key_dt][1]\n",
"\n",
" if (key_ev2 in ev_noise):\n",
" ev_info[key_ev].cr_dt[key_dt][3] = ev_info[key_ev].cr_dt[key_dt][3] - ev_noise[key_ev][3] + ev_noise[key_ev2][3]\n",
" else:\n",
" print(\"earthquake %s is not included in ev_list\"%(key_ev2))\n",
" ev_noise[key_ev2] = np.random.uniform(-1,1,4) * np.array([range_lat,range_lon,range_dep,range_time])\n",
" ev_info[key_ev].cr_dt[key_dt][3] = ev_info[key_ev].cr_dt[key_dt][3] - ev_noise[key_ev][3] + ev_noise[key_ev2][3]\n",
"\n",
"\n",
" # Add noise to each earthquake.\n",
" for key_ev in ev_noise:\n",
" ev_info[key_ev].lat = ev_info[key_ev].lat + ev_noise[key_ev][0]\n",
" ev_info[key_ev].lon = ev_info[key_ev].lon + ev_noise[key_ev][1]\n",
" ev_info[key_ev].dep = abs(ev_info[key_ev].dep + ev_noise[key_ev][2])\n",
" ev_info[key_ev].ortime = ev_info[key_ev].ortime + ev_noise[key_ev][3]\n",
"\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: generate differential traveltime"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: generate_cs_dif(ev_info, st_info, dis_thd, azi_thd) Generate double-difference arrival times from absolute arrival times, with inter-station distance less than dis_thd and azimuthal difference less than azi_thd.\n",
"# function: generate common source differential traveltime data from absolute traveltime data, the stations separation is less than dis_thd, the azimuth difference is less than azi_thd\n",
"def generate_cs_dif(ev_info,st_info,dis_thd,azi_thd):\n",
" count_t = 0\n",
" count_cs_dt = 0\n",
"\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
"\n",
" lat_ev = ev.lat\n",
" lon_ev = ev.lon\n",
"\n",
" # traverse all arrival times\n",
" name_st_list = [] # names of stations\n",
" t_list = [] # traveltime\n",
" wt_list = [] # weight\n",
" for key_t in ev.t:\n",
" name_st_list.append(ev.t[key_t][0])\n",
" t_list.append(ev.t[key_t][2])\n",
" wt_list.append(ev.t[key_t][3])\n",
" count_t += 1\n",
"\n",
" # search for possible double-difference arrival times\n",
" for id_st1 in range(len(name_st_list)-1):\n",
" name_st1 = name_st_list[id_st1]\n",
" lat_st1 = st_info[name_st1].lat\n",
" lon_st1 = st_info[name_st1].lon\n",
" t_st1 = t_list[id_st1]\n",
" wt_st1 = wt_list[id_st1]\n",
"\n",
" for id_st2 in range(id_st1+1,len(name_st_list)):\n",
" name_st2 = name_st_list[id_st2]\n",
" lat_st2 = st_info[name_st2].lat\n",
" lon_st2 = st_info[name_st2].lon\n",
" t_st2 = t_list[id_st2]\n",
" wt_st2 = wt_list[id_st2]\n",
"\n",
" dis = cal_dis(lat_st1,lon_st1,lat_st2,lon_st2)\n",
" azi_st1 = cal_azimuth(lat_ev,lon_ev,lat_st1,lon_st1)\n",
" azi_st2 = cal_azimuth(lat_ev,lon_ev,lat_st2,lon_st2)\n",
"\n",
" azi_dif = abs(azi_st1 - azi_st2)\n",
"\n",
" if(dis < dis_thd and (azi_dif < azi_thd or (360-azi_dif) < azi_thd )):\n",
" ev.cs_dt[\"%s+%s+%s\"%(name_st1,name_st2,\"P,cs\")] = [name_st1,name_st2,\"P,cs\",t_st1-t_st2,(wt_st1+wt_st2)/2]\n",
" count_cs_dt += 1\n",
"\n",
" ev_info[key_ev].Ncs_dt = len(ev.cs_dt)\n",
"\n",
" print('we generate %d common source differential traveltimes from %s absolute traveltimes'%(count_cs_dt,count_t))\n",
" return ev_info\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: generate_cr_dif(ev_info, st_info, dis_thd) Generate common station double-difference arrival times from absolute arrival times, with inter-event distance less than dis_thd.\n",
"# Function: generate common receiver differential traveltime data from absolute traveltime data, the earthquake separation is less than dis_thd\n",
"def generate_cr_dif(ev_info,st_info,dis_thd,azi_thd):\n",
"\n",
" # Construct mappingrec2src[name_ev] -> {name_st: [name_ev, name_st, t, wt]; name_st: [name_ev, name_st, t, wt]; ...}\n",
" rec2src = build_rec_src_map(ev_info,dis_thd)\n",
" print(\"rec to src map generation finished\")\n",
"\n",
" # Construct double-difference data association mappingrec2src_pair[key_t]\n",
" rec2src_pair = build_rec_src_pair_map(rec2src)\n",
" print(\"rec to src_pair map generation finished\")\n",
"\n",
" for key_t in rec2src_pair:\n",
" name_st = key_t.split('+')[0]\n",
" lat_st = st_info[name_st].lat\n",
" lon_st = st_info[name_st].lon\n",
"\n",
" for ev_tag in rec2src_pair[key_t]:\n",
" name_ev1 = rec2src_pair[key_t][ev_tag][0]\n",
" lat_ev1 = ev_info[name_ev1].lat\n",
" lon_ev1 = ev_info[name_ev1].lon\n",
" dep_ev1 = ev_info[name_ev1].dep\n",
"\n",
" name_ev2 = rec2src_pair[key_t][ev_tag][1]\n",
" lat_ev2 = ev_info[name_ev2].lat\n",
" lon_ev2 = ev_info[name_ev2].lon\n",
" dep_ev2 = ev_info[name_ev2].dep\n",
"\n",
" dis_xy = cal_dis(lat_ev1,lon_ev1,lat_ev2,lon_ev2)\n",
" dis_z = abs(dep_ev1 - dep_ev2)\n",
" dis = math.sqrt(dis_xy**2 + dis_z**2)\n",
" if(dis > dis_thd): # limit of the distance between two earthquakes\n",
" continue\n",
"\n",
" azi1 = cal_azimuth(lat_ev1,lon_ev1,lat_st,lon_st)\n",
" azi2 = cal_azimuth(lat_ev2,lon_ev2,lat_st,lon_st)\n",
" azi_dif = abs(azi1 - azi2)\n",
"\n",
" if(azi_dif > azi_thd and (360-azi_dif) > azi_thd): # limit of the azimuth difference between two earthquakes\n",
" continue\n",
"\n",
" t_ev1 = ev_info[name_ev1].t[key_t][2]\n",
" t_ev2 = ev_info[name_ev2].t[key_t][2]\n",
" wt_ev1 = ev_info[name_ev1].t[key_t][3] * ev_info[name_ev1].tag[\"weight\"]\n",
" wt_ev2 = ev_info[name_ev2].t[key_t][3] * ev_info[name_ev2].tag[\"weight\"]\n",
" # The actual data weight is wt_ev1 + wt_ev2, but in TomoATT calculations, we need to divide it by ev_info[name_ev1].tag[\"weight\"].\n",
" wt = (wt_ev1 + wt_ev2)/2/ev_info[name_ev1].tag[\"weight\"]\n",
"\n",
" ev_info[name_ev1].cr_dt[\"%s+%s+%s\"%(name_st,name_ev2,\"P,cr\")] = [name_st,name_ev2,\"P,cr\",t_ev1-t_ev2,wt]\n",
"\n",
" # Count the number of double-difference data points.\n",
" count_cr_dt = 0\n",
" count_t = 0\n",
" for key_ev in ev_info:\n",
" ev_info[key_ev].Ncr_dt = len(ev_info[key_ev].cr_dt)\n",
" count_cr_dt += ev_info[key_ev].Ncr_dt\n",
" count_t += ev_info[key_ev].Nt\n",
"\n",
" print('we generate %d common receiver differential traveltimes from %s absolute traveltimes'%(count_cr_dt,count_t))\n",
"\n",
" return ev_info\n",
"\n",
"# Construct mapping: rec2src = {key_t: dict_tag; key_t: dict_tag; ...}\n",
"# dict_tag = {tag: list_name_ev; tag: list_name_ev; ...}\n",
"# list_name_ev = [name_ev1, name_ev2, ...]\n",
"# Assign earthquakes to different subregions based on their locations. The subregion size is dlat * dlon * ddep. When performing common station double-difference calculations, only earthquake pairs within the same subregion or adjacent subregions will be considered.\n",
"def build_rec_src_map(ev_info,dis_thd):\n",
" rec2src = {}\n",
" for key_ev in ev_info:\n",
" name_ev = ev_info[key_ev].name\n",
" lat = ev_info[key_ev].lat\n",
" lon = ev_info[key_ev].lon\n",
" dep = ev_info[key_ev].dep\n",
" tag_dep = math.floor(dep/dis_thd)\n",
" tag_lat = math.floor(lat/180*math.pi*R_earth/dis_thd)\n",
" tag_lon = math.floor(lon/180*math.pi*R_earth*math.cos(lat)/dis_thd)\n",
" tag = \"%d_%d_%d\"%(tag_lon,tag_lat,tag_dep)\n",
"\n",
"\n",
" for key_t in ev_info[key_ev].t:\n",
"\n",
" # create dictionary\n",
" if (not key_t in rec2src):\n",
" rec2src[key_t] = {tag:[]}\n",
" elif (not tag in rec2src[key_t]):\n",
" rec2src[key_t][tag] = []\n",
"\n",
" # Add data\n",
" rec2src[key_t][tag].append(name_ev)\n",
"\n",
" return rec2src\n",
"\n",
"# Function: generate_adjacent_tag(tag) Generate tags surrounding the given tag.\n",
"def generate_adjacent_tag(tag): # Excluding the tag itself.\n",
" adjacent_tag_list = []\n",
" tmp = tag.split('_')\n",
" tag_lon = int(tmp[0])\n",
" tag_lat = int(tmp[1])\n",
" tag_dep = int(tmp[2])\n",
"\n",
" for i in range(-1,2):\n",
" for j in range(-1,2):\n",
" for k in range(-1,2):\n",
" if(i == 0 and j == 0 and k == 0):\n",
" continue\n",
" adjacent_tag_list.append(\"%d_%d_%d\"%(tag_lon+i,tag_lat+j,tag_dep+k))\n",
"\n",
" return adjacent_tag_list\n",
"\n",
"\n",
"# construct mappingrec2src_pair\n",
"def build_rec_src_pair_map(rec2src):\n",
" rec2src_pair = {}\n",
"\n",
" for key_t in rec2src:\n",
" rec2src_pair[key_t] = {}\n",
"\n",
" for tag in rec2src[key_t]:\n",
" name_ev_list1 = rec2src[key_t][tag]\n",
"\n",
" name_ev_list2 = rec2src[key_t][tag]\n",
" adjacent_tag_list = generate_adjacent_tag(tag)\n",
" for adjacent_tag in adjacent_tag_list:\n",
" if (adjacent_tag in rec2src[key_t]): # If the surrounding tag's region has earthquakes, add them to the earthquake list.\n",
" name_ev_list2 = name_ev_list2 + rec2src[key_t][adjacent_tag]\n",
"\n",
" # Find possible earthquake pairs.\n",
" for id_ev1 in range(len(name_ev_list1)-1):\n",
" name_ev1 = name_ev_list1[id_ev1]\n",
"\n",
" for id_ev2 in range(id_ev1+1,len(name_ev_list2)): # Starting from id_ev1 + 1 already excludes duplicate earthquakes within the tag.\n",
" name_ev2 = name_ev_list2[id_ev2]\n",
"\n",
" ev_tag1 = \"%s+%s\"%(name_ev1,name_ev2)\n",
" ev_tag2 = \"%s+%s\"%(name_ev2,name_ev1)\n",
"\n",
" if(ev_tag1 in rec2src_pair[key_t] or ev_tag2 in rec2src_pair[key_t]):\n",
" continue\n",
"\n",
" rec2src_pair[key_t][ev_tag1] = [name_ev1,name_ev2]\n",
"\n",
"\n",
" return rec2src_pair"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: read and write src_rec.dat file"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: reorder_src(ev) Reorder the earthquake IDs. If the earthquake has no data, the ID is -999.\n",
"def reorder_src(ev_info):\n",
"\n",
" ev_id = 0\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
"\n",
" if(ev.Nt + ev.Ncs_dt + ev.Ncr_dt == 0):\n",
" ev.id = -999\n",
" else:\n",
" ev_info[key_ev].id = ev_id\n",
" ev_id += 1\n",
"\n",
" return ev_info\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: read_src_rec_file(fname) Read the src_rec.dat file.\n",
"#\n",
"def read_src_rec_file(fname):\n",
" ev_info = {}\n",
" st_info = {}\n",
"\n",
" tmp_ev_info = {}\n",
"\n",
" doc = open(fname,'r')\n",
" doc_input = doc.readlines()\n",
" doc.close()\n",
"\n",
" cc = 0\n",
" for info in doc_input:\n",
" tmp=info.split()\n",
" if (cc == 0): # event line\n",
" ev = Event()\n",
" # 1 2000 1 2 20 28 37.270 38.2843 39.0241 11.00 3.60 8 1725385\n",
" # id_ev = int(tmp[0])\n",
" ev.id = int(tmp[0])\n",
" year = int(tmp[1])\n",
" month = int(tmp[2])\n",
" day = int(tmp[3])\n",
" hour = int(tmp[4])\n",
" minute = int(tmp[5])\n",
" second = float(tmp[6])\n",
" ev.ortime = UTCDateTime(year,month,day,hour,minute,0) + second\n",
" ev.lat = float(tmp[7])\n",
" ev.lon = float(tmp[8])\n",
" ev.dep = float(tmp[9])\n",
" ev.mag = float(tmp[10])\n",
" ev.Nt = 0\n",
" ev.Ncs_dt = 0\n",
" ev.Ncr_dt = 0\n",
" ev.t = {}\n",
" ev.cs_dt = {}\n",
" ev.cr_dt = {}\n",
" ndata = int(tmp[11])\n",
" name_ev = tmp[12]\n",
" ev.name = name_ev\n",
" cc += 1\n",
" try:\n",
" ev.tag[\"weight\"] = float(tmp[13])\n",
" except:\n",
" pass\n",
"\n",
" if (ndata == 0):\n",
" cc = 0\n",
" ev_info[name_ev] = ev\n",
"\n",
" else: # data line\n",
" # 1 1 MYA 38.3261 38.4253 1050.0000 P 52.46 6.630 weight\n",
" if(len(tmp) < 10): # absolue traveltime data\n",
" name_st = tmp[2]\n",
" phase = tmp[6]\n",
" if (phase == \"PG\"):\n",
" phase = \"Pg\"\n",
" if (phase == \"PB\"):\n",
" phase = \"Pb\"\n",
" if (phase == \"PN\"):\n",
" phase = \"Pn\"\n",
"\n",
" if (not name_st in st_info):\n",
" st = Station()\n",
" st.name = name_st\n",
" st.id = float(tmp[1])\n",
" st.lat = float(tmp[3])\n",
" st.lon = float(tmp[4])\n",
" st.ele = float(tmp[5])\n",
" st_info[name_st] = st\n",
"\n",
" time = float(tmp[7])\n",
" if(len(tmp) == 9):\n",
" weight = float(tmp[8])\n",
" else:\n",
" weight = 1.0\n",
" ev.t[\"%s+%s\"%(name_st,phase)] = [name_st,phase,time,weight]\n",
" ev.Nt += 1\n",
"\n",
" else: # differential traveltime data\n",
" phase = tmp[11]\n",
" if (phase.__contains__(\"cr\")): # common receiver differential traveltime\n",
" # evid stid1 stname1 lat1 lon1 eve1 evid2 evname2 lat2 lon2 dep2 phase,cr diftime weight\n",
"\n",
" name_st1 = tmp[2]\n",
" if (not name_st1 in st_info): # add station to the station list\n",
" st = Station()\n",
" st.name = name_st1\n",
" st.id = float(tmp[1])\n",
" st.lat = float(tmp[3])\n",
" st.lon = float(tmp[4])\n",
" st.ele = float(tmp[5])\n",
" st_info[name_st1] = st\n",
"\n",
" name_ev2 = tmp[7]\n",
" # add earthquake to the temp earthquake list\n",
" ev2 = Event()\n",
" ev2.name = name_ev2\n",
" ev2.id = float(tmp[6])\n",
" ev2.lat = float(tmp[8])\n",
" ev2.lon = float(tmp[9])\n",
" ev2.dep = float(tmp[10])\n",
" tmp_ev_info[name_ev2] = ev2\n",
"\n",
"\n",
" dif_time = float(tmp[12])\n",
" if(len(tmp) == 14):\n",
" weight = float(tmp[13])\n",
" else:\n",
" weight = 1.0\n",
" ev.cr_dt[\"%s+%s+%s\"%(name_st1,name_ev2,phase)] = [name_st1,name_ev2,phase,dif_time,weight]\n",
" ev.Ncr_dt += 1\n",
"\n",
" else: # common source differential traveltime\n",
" # evid stid1 stname1 lat1 lon1 eve1 stid2 stname2 lat2 lon2 ele2 phase,cs diftime weight\n",
"\n",
" name_st1 = tmp[2]\n",
" if (not name_st1 in st_info):\n",
" st = Station()\n",
" st.name = name_st1\n",
" st.id = len(st_info)\n",
" st.lat = float(tmp[3])\n",
" st.lon = float(tmp[4])\n",
" st.ele = float(tmp[5])\n",
" st_info[name_st1] = st\n",
"\n",
" name_st2 = tmp[7]\n",
" if (not name_st2 in st_info):\n",
" st = Station()\n",
" st.name = name_st2\n",
" st.id = float(tmp[6])\n",
" st.lat = float(tmp[8])\n",
" st.lon = float(tmp[9])\n",
" st.ele = float(tmp[10])\n",
" st_info[name_st2] = st\n",
"\n",
" dif_time = float(tmp[12])\n",
" if(len(tmp) == 14):\n",
" weight = float(tmp[13])\n",
" else:\n",
" weight = 1.0\n",
" ev.cs_dt[\"%s+%s+%s\"%(name_st1,name_st2,phase)] = [name_st1,name_st2,phase,dif_time,weight]\n",
" ev.Ncs_dt += 1\n",
"\n",
" if (cc == ndata): # end of the event data\n",
" cc = 0\n",
" ev_info[name_ev] = ev\n",
" else:\n",
" cc += 1\n",
"\n",
" # Add earthquakes from the temporary earthquake list to the main earthquake list.\n",
" for key_ev in tmp_ev_info:\n",
" if (not key_ev in ev_info):\n",
" ev_info[key_ev] = tmp_ev_info[key_ev]\n",
"\n",
" return [ev_info,st_info]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: write_src_rec_file(fname, ev_info, st_info) Output the src_rec.dat file.\n",
"def write_src_rec_file(fname,ev_info,st_info):\n",
" ev_info = reorder_src(ev_info)\n",
" doc_src_rec = open(fname,'w')\n",
"\n",
" min_lat = 9999\n",
" max_lat = -9999\n",
" min_lon = 9999\n",
" max_lon = -9999\n",
" min_dep = 9999\n",
" max_dep = -9999\n",
"\n",
" record_ev = {}\n",
" record_st = {}\n",
" Nt_total = 0\n",
" Ncs_dt_total = 0\n",
" Ncr_dt_total = 0\n",
"\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
" evid = ev.id\n",
" year = ev.ortime.year\n",
" month = ev.ortime.month\n",
" day = ev.ortime.day\n",
" hour = ev.ortime.hour\n",
" minute = ev.ortime.minute\n",
" second = ev.ortime.second\n",
" msec = ev.ortime.microsecond\n",
" lat_ev = ev.lat\n",
" lon_ev = ev.lon\n",
" dep_ev = ev.dep\n",
" mag = ev.mag\n",
" ndata = ev.Nt + ev.Ncs_dt + ev.Ncr_dt\n",
" name_ev = ev.name\n",
" try:\n",
" weight_ev = ev.tag[\"weight\"]\n",
" except:\n",
" weight_ev = 1.0\n",
"\n",
" if(ndata == 0): # if the earthquake has no data, do not output it\n",
" continue\n",
"\n",
" doc_src_rec.write('%7d %6d %2d %2d %2d %2d %5.2f %9.4f %9.4f %9.4f %5.2f %7d %s %7.3f\\n'%(\\\n",
" evid,year,month,day,hour,minute,second+msec/1000000,lat_ev,lon_ev,dep_ev,mag,ndata,name_ev,weight_ev))\n",
"\n",
" min_lat = min(min_lat, lat_ev)\n",
" max_lat = max(max_lat, lat_ev)\n",
" min_lon = min(min_lon, lon_ev)\n",
" max_lon = max(max_lon, lon_ev)\n",
" min_dep = min(min_dep, dep_ev)\n",
" max_dep = max(max_dep, dep_ev)\n",
"\n",
" record_ev[name_ev] = 1 # record this earthquake\n",
" Nt_total += ev.Nt\n",
" Ncs_dt_total += ev.Ncs_dt\n",
" Ncr_dt_total += ev.Ncr_dt\n",
"\n",
" for key_t in ev.t:\n",
" data = ev.t[key_t]\n",
" st = st_info[data[0]]\n",
" stid = st.id\n",
" name_st = st.name\n",
" lat_st = st.lat\n",
" lon_st = st.lon\n",
" ele_st = st.ele\n",
" phase = data[1]\n",
" time = data[2]\n",
" try:\n",
" weight_data = data[3]\n",
" except:\n",
" weight_data = 1.0\n",
" doc_src_rec.write('%7d %7d %6s %9.4f %9.4f %9.4f %s %8.4f %7.3f \\n'%(evid,stid,name_st,lat_st,lon_st,ele_st,phase,time,weight_data))\n",
"\n",
" min_lat = min(min_lat, lat_st)\n",
" max_lat = max(max_lat, lat_st)\n",
" min_lon = min(min_lon, lon_st)\n",
" max_lon = max(max_lon, lon_st)\n",
" min_dep = min(min_dep, -ele_st/1000)\n",
" max_dep = max(max_dep, -ele_st/1000)\n",
"\n",
" record_st[name_st] = 1 # record this station\n",
"\n",
" for key_t in ev.cs_dt:\n",
" data = ev.cs_dt[key_t]\n",
" st1 = st_info[data[0]]\n",
" stid1 = st1.id\n",
" name_st1= st1.name\n",
" lat_st1 = st1.lat\n",
" lon_st1 = st1.lon\n",
" ele_st1 = st1.ele\n",
" st2 = st_info[data[1]]\n",
" stid2 = st2.id\n",
" name_st2= st2.name\n",
" lat_st2 = st2.lat\n",
" lon_st2 = st2.lon\n",
" ele_st2 = st2.ele\n",
" phase = data[2]\n",
" time = data[3]\n",
" try:\n",
" weight_data = data[4]\n",
" except:\n",
" weight_data = 1.0\n",
" doc_src_rec.write('%7d %7d %6s %9.4f %9.4f %9.4f %7d %6s %9.4f %9.4f %9.4f %s %8.4f %7.3f \\n'%(\\\n",
" evid,stid1,name_st1,lat_st1,lon_st1,ele_st1,stid2,name_st2,lat_st2,lon_st2,ele_st2,phase,time,weight_data))\n",
"\n",
" min_lat = min(min_lat, lat_st1)\n",
" max_lat = max(max_lat, lat_st1)\n",
" min_lon = min(min_lon, lon_st1)\n",
" max_lon = max(max_lon, lon_st1)\n",
" min_dep = min(min_dep, -ele_st1/1000)\n",
" max_dep = max(max_dep, -ele_st1/1000)\n",
"\n",
" min_lat = min(min_lat, lat_st2)\n",
" max_lat = max(max_lat, lat_st2)\n",
" min_lon = min(min_lon, lon_st2)\n",
" max_lon = max(max_lon, lon_st2)\n",
" min_dep = min(min_dep, -ele_st2/1000)\n",
" max_dep = max(max_dep, -ele_st2/1000)\n",
"\n",
" record_st[name_st1] = 1 # record this station\n",
" record_st[name_st2] = 1 # record this station\n",
"\n",
" for key_t in ev.cr_dt:\n",
" data = ev.cr_dt[key_t]\n",
" st = st_info[data[0]]\n",
" stid = st.id\n",
" name_st = st.name\n",
" lat_st = st.lat\n",
" lon_st = st.lon\n",
" ele_st = st.ele\n",
" ev2 = ev_info[data[1]]\n",
" evid2 = ev2.id\n",
" name_ev2= ev2.name\n",
" lat_ev2 = ev2.lat\n",
" lon_ev2 = ev2.lon\n",
" dep_ev2 = ev2.dep\n",
" phase = data[2]\n",
" time = data[3]\n",
" try:\n",
" weight_data = data[4]\n",
" except:\n",
" weight_data = 1.0\n",
" doc_src_rec.write('%7d %7d %6s %9.4f %9.4f %9.4f %7d %6s %9.4f %9.4f %9.4f %s %8.4f %7.3f \\n'%(\\\n",
" evid,stid,name_st,lat_st,lon_st,ele_st,evid2,name_ev2,lat_ev2,lon_ev2,dep_ev2,phase,time,weight_data))\n",
"\n",
" min_lat = min(min_lat, lat_st)\n",
" max_lat = max(max_lat, lat_st)\n",
" min_lon = min(min_lon, lon_st)\n",
" max_lon = max(max_lon, lon_st)\n",
" min_dep = min(min_dep, -ele_st/1000)\n",
" max_dep = max(max_dep, -ele_st/1000)\n",
"\n",
" min_lat = min(min_lat, lat_ev2)\n",
" max_lat = max(max_lat, lat_ev2)\n",
" min_lon = min(min_lon, lon_ev2)\n",
" max_lon = max(max_lon, lon_ev2)\n",
" min_dep = min(min_dep, dep_ev2)\n",
" max_dep = max(max_dep, dep_ev2)\n",
"\n",
" record_ev[name_ev2] = 1 # record this station\n",
" record_st[name_st] = 1 # record this station\n",
"\n",
" doc_src_rec.close()\n",
"\n",
" print(\"src_rec.dat has been outputed: %d events, %d stations, %d abs traveltime, %d cs_dif traveltime, %d cr_dif traveltime. \" \\\n",
" %(len(record_ev),len(record_st),Nt_total,Ncs_dt_total,Ncr_dt_total))\n",
" print(\"earthquake and station region, lat: %6.1f - %6.1f, lon: %6.1f - %6.1f, dep: %6.1f - %6.1f\"%(min_lat,max_lat,min_lon,max_lon,min_dep,max_dep) )\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: write_src_list_file(fname, ev_info) Output the event list file.\n",
"def write_src_list_file(fname,ev_info):\n",
" doc_ev_list = open(fname,'w')\n",
"\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
" evid = ev.id\n",
" lat_ev = ev.lat\n",
" lon_ev = ev.lon\n",
" dep_ev = ev.dep\n",
" mag = ev.mag\n",
" name_ev = ev.name\n",
" if (ev.id == -999): # if the earthquake has no data, do not output it\n",
" continue\n",
" doc_ev_list.write(\"%7d %s %s %9.4f %9.4f %9.4f %5.2f \\n\"%(evid,name_ev,ev.ortime,lat_ev,lon_ev,dep_ev,mag))\n",
" doc_ev_list.close()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: write_rec_list_file(fname, ev_info, st_info) Output the station list file.\n",
"def write_rec_list_file(fname,ev_info,st_info):\n",
" doc_st_list = open(fname,'w')\n",
"\n",
" st_list = {}\n",
" for key_ev in ev_info:\n",
" ev = ev_info[key_ev]\n",
"\n",
" for key_t in ev.t:\n",
" data = ev.t[key_t]\n",
" st = st_info[data[0]]\n",
" name_st = st.name\n",
" lat_st = st.lat\n",
" lon_st = st.lon\n",
" ele_st = st.ele\n",
" if(not name_st in st_list):\n",
" doc_st_list.write(\"%6s %9.4f %9.4f %10.4f \\n\"%(name_st,lat_st,lon_st,ele_st))\n",
" st_list[name_st] = 1\n",
"\n",
" for key_t in ev.cs_dt:\n",
" data = ev.cs_dt[key_t]\n",
" st1 = st_info[data[0]]\n",
" name_st1= st1.name\n",
" lat_st1 = st1.lat\n",
" lon_st1 = st1.lon\n",
" ele_st1 = st1.ele\n",
" st2 = st_info[data[0]]\n",
" name_st2= st2.name\n",
" lat_st2 = st2.lat\n",
" lon_st2 = st2.lon\n",
" ele_st2 = st2.ele\n",
" if(not name_st1 in st_list):\n",
" doc_st_list.write(\"%6s %9.4f %9.4f %10.4f \\n\"%(name_st1,lat_st1,lon_st1,ele_st1))\n",
" st_list[name_st1] = 1\n",
" if(not name_st2 in st_list):\n",
" doc_st_list.write(\"%6s %9.4f %9.4f %10.4f \\n\"%(name_st2,lat_st2,lon_st2,ele_st2))\n",
" st_list[name_st2] = 1\n",
"\n",
" for key_t in ev.cr_dt:\n",
" data = ev.cr_dt[key_t]\n",
" st = st_info[data[0]]\n",
" name_st = st.name\n",
" lat_st = st.lat\n",
" lon_st = st.lon\n",
" ele_st = st.ele\n",
" if(not name_st in st_list):\n",
" doc_st_list.write(\"%6s %9.4f %9.4f %10.4f \\n\"%(name_st,lat_st,lon_st,ele_st))\n",
" st_list[name_st] = 1\n",
"\n",
" doc_st_list.close()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: read objective function file"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: read_objective_function_file(path)\n",
"def read_objective_function_file(path):\n",
"\n",
" full_curve = []\n",
" location_curve = []\n",
" model_curve = []\n",
"\n",
" with open('%s/objective_function.txt'%(path)) as f:\n",
" for i,line in enumerate(f):\n",
" tmp = line.split(',')\n",
" if (tmp[0].__contains__(\"#\")):\n",
" continue # skip the comment line\n",
" \n",
" iter = int(tmp[0])\n",
" tag = tmp[1]\n",
" obj = float(tmp[2])\n",
" obj_abs = float(tmp[3])\n",
" obj_cs = float(tmp[4])\n",
" obj_cr = float(tmp[5])\n",
" obj_tele = float(tmp[6])\n",
" tmp2 = tmp[7].split('/')\n",
" mean = float(tmp2[0])\n",
" std = float(tmp2[1])\n",
" tmp2 = tmp[8].split('/')\n",
" mean_abs = float(tmp2[0])\n",
" std_abs = float(tmp2[1])\n",
" tmp2 = tmp[9].split('/')\n",
" mean_cs = float(tmp2[0])\n",
" std_cs = float(tmp2[1])\n",
" tmp2 = tmp[10].split('/')\n",
" mean_cr = float(tmp2[0])\n",
" std_cr = float(tmp2[1])\n",
" tmp2 = tmp[11].split('/')\n",
" mean_tele = float(tmp2[0])\n",
" std_tele = float(tmp2[1])\n",
"\n",
" full_curve.append([obj,obj_abs,obj_cs,obj_cr,obj_tele,mean,std,mean_abs,std_abs,mean_cs,std_cs,mean_cr,std_cr,mean_tele,std_tele])\n",
" if tag.__contains__(\"relocation\"):\n",
" location_curve.append([obj,obj_abs,obj_cs,obj_cr,obj_tele,mean,std,mean_abs,std_abs,mean_cs,std_cs,mean_cr,std_cr,mean_tele,std_tele])\n",
" if tag.__contains__(\"model\"):\n",
" model_curve.append([obj,obj_abs,obj_cs,obj_cr,obj_tele,mean,std,mean_abs,std_abs,mean_cs,std_cs,mean_cr,std_cr,mean_tele,std_tele])\n",
"\n",
" return np.array(full_curve),np.array(location_curve),np.array(model_curve)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: read inversion grid file"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: read the inversion grid file\n",
"def read_inversion_grid_file(path):\n",
"\n",
" inv_grid_vel = []\n",
" inv_grid_ani = []\n",
"\n",
" switch = False\n",
" igrid = -1\n",
" with open('%s/inversion_grid.txt'%(path)) as f:\n",
" tmp_inv_grid = []\n",
" for i,line in enumerate(f):\n",
"\n",
" # read the number of inversion grid in dep, lat, lon directions\n",
" if(i==0):\n",
" tmp = line.split()\n",
" ndep = int(tmp[1])\n",
" nlines = 3*ndep+1 # The number of rows for each inversion grid is 3*ndep+1\n",
"\n",
" iline = i % nlines\n",
"\n",
" if(iline == 0): # info: number of inversion grid\n",
" tmp = line.split()\n",
" if (int(tmp[0]) > igrid):\n",
" igrid = int(tmp[0])\n",
" else: # change from vel to ani\n",
" switch = True\n",
" igrid = int(tmp[0])\n",
"\n",
" else: # info location of inversion grid\n",
" iline_sub = (iline-1) % 3\n",
" if(iline_sub == 0): # dep\n",
" tmp = line.split()\n",
" dep = float(tmp[0])\n",
" if(iline_sub == 1): # list of lat\n",
" lat_list = line.split()\n",
" if(iline_sub == 2): # list of lon\n",
" lon_list = line.split()\n",
"\n",
" # add inversion grid\n",
" for lat in lat_list:\n",
" for lon in lon_list:\n",
" tmp_inv_grid.append([float(lon), float(lat), dep])\n",
"\n",
" if(iline == nlines-1): # the last line of inversion grid\n",
" if(switch):\n",
" inv_grid_ani.append(tmp_inv_grid)\n",
" else:\n",
" inv_grid_vel.append(tmp_inv_grid)\n",
" tmp_inv_grid = []\n",
"\n",
" return [np.array(inv_grid_vel),np.array(inv_grid_ani)]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: for plotting"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: fig_ev_st_distribution_dep(ev_info, st_info) Plot the distribution of the earthquakes and stations, color-coded by earthquake depth.\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.gridspec import GridSpec\n",
"\n",
"def fig_ev_st_distribution_dep(ev_info,st_info):\n",
"\n",
" [lon_ev,lat_ev,dep_ev,wt_ev] = data_lon_lat_dep_wt_ev(ev_info)\n",
" [lon_st,lat_st,ele_st,wt_st] = data_lon_lat_ele_wt_st(ev_info,st_info)\n",
"\n",
" min_lon = min(min(lon_ev),min(lon_st))\n",
" max_lon = max(max(lon_ev),max(lon_st))\n",
"\n",
" min_lat = min(min(lat_ev),min(lat_st))\n",
" max_lat = max(max(lat_ev),max(lat_st))\n",
"\n",
" max_dep = max(dep_ev)\n",
"\n",
" # Insert a value that does not affect the plot to make the colorbar range look better.\n",
" lon_ev = np.insert(lon_ev, 0, 9999); lat_ev = np.insert(lat_ev, 0, 9999); dep_ev = np.insert(dep_ev, 0, 0);\n",
"\n",
" fig = plt.figure(figsize=(12,12))\n",
" gridspace = GridSpec(12,12,figure = fig)\n",
"\n",
" xrange = max_lon - min_lon + 1.0\n",
" yrange = max_lat - min_lat + 1.0\n",
"\n",
" if (xrange > yrange):\n",
" fig_x_size = 6\n",
" fig_y_size = round(6*yrange/xrange)\n",
" else:\n",
" fig_x_size = round(6*xrange/yrange)\n",
" fig_y_size = 6\n",
"\n",
"\n",
" ax1 = fig.add_subplot(gridspace[0:fig_y_size,0:fig_x_size])\n",
"\n",
" bar_ev = ax1.scatter(lon_ev,lat_ev,c=dep_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
" # ax1.plot(lon_st,lat_st,'rv',label = \"rec\",markersize = 6)\n",
" bar_st = ax1.scatter(lon_st,lat_st,c=\"red\",label = \"rec\",s = 100,marker='v',edgecolors='white')\n",
"\n",
" ax1.legend(fontsize = 14)\n",
" ax1.tick_params(axis='x',labelsize=18)\n",
" ax1.tick_params(axis='y',labelsize=18)\n",
" ax1.set_xlabel('Lon',fontsize=18)\n",
" ax1.set_ylabel('Lat',fontsize=18)\n",
" ax1.set_xlim((min_lon - (max_lon - min_lon)*0.1,max_lon + (max_lon - min_lon)*0.1))\n",
" ax1.set_ylim((min_lat - (max_lat - min_lat)*0.1,max_lat + (max_lat - min_lat)*0.1))\n",
"\n",
"\n",
" ax2 = fig.add_subplot(gridspace[0:fig_y_size, fig_x_size+1 : fig_x_size+3])\n",
"\n",
" ax2.scatter(dep_ev,lat_ev,c=dep_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
"\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_xlabel('Dep',fontsize=18)\n",
" ax2.set_ylabel('Lat',fontsize=18)\n",
" ax2.set_xlim((-max_dep*0.05,max_dep*1.1))\n",
" ax2.set_ylim((min_lat - (max_lat - min_lat)*0.1,max_lat + (max_lat - min_lat)*0.1))\n",
"\n",
"\n",
" ax3 = fig.add_subplot(gridspace[fig_y_size+1:fig_y_size+3,0:fig_x_size])\n",
"\n",
" ax3.scatter(lon_ev,dep_ev,c=dep_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
"\n",
" ax3.tick_params(axis='x',labelsize=18)\n",
" ax3.tick_params(axis='y',labelsize=18)\n",
" ax3.set_xlabel('Lon',fontsize=18)\n",
" ax3.set_ylabel('Dep',fontsize=18)\n",
" ax3.set_xlim((min_lon - (max_lon - min_lon)*0.1,max_lon + (max_lon - min_lon)*0.1))\n",
" ax3.set_ylim((-max_dep*0.05,max_dep*1.1))\n",
" ax3.invert_yaxis()\n",
"\n",
" # Place the colorbar on a new axis.\n",
" ax4 = fig.add_subplot(gridspace[fig_y_size+2:fig_y_size+3,fig_x_size+1:fig_x_size+3])\n",
" cbar1 = plt.colorbar(bar_ev, ax=ax4,orientation='horizontal')\n",
" cbar1.set_label('Depth of earthquakes',fontsize=16)\n",
" cbar1.ax.tick_params(axis='x', labelsize=16) # Colorbar font size.\n",
"\n",
" # Hide the borders of the axes.\n",
" ax4.spines['top'].set_visible(False)\n",
" ax4.spines['right'].set_visible(False)\n",
" ax4.spines['bottom'].set_visible(False)\n",
" ax4.spines['left'].set_visible(False)\n",
"\n",
" # Hide the tick values of the axes.\n",
" ax4.set_xticks([])\n",
" ax4.set_yticks([])\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Function: fig_ev_st_distribution_wt(ev_info, st_info) Plot the distribution of the earthquakes and stations, color-coded by weight.\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.gridspec import GridSpec\n",
"from matplotlib.colors import ListedColormap\n",
"from mpl_toolkits.axes_grid1 import make_axes_locatable\n",
"import math\n",
"\n",
"def fig_ev_st_distribution_wt(ev_info,st_info):\n",
"\n",
"\n",
" [lon_ev,lat_ev,dep_ev,wt_ev] = data_lon_lat_dep_wt_ev(ev_info)\n",
" [lon_st,lat_st,ele_st,wt_st] = data_lon_lat_ele_wt_st(ev_info,st_info)\n",
"\n",
" # Insert a value that does not affect the plot to make the colorbar range look better.\n",
" lon_ev = np.insert(lon_ev, 0, lon_ev[0]); lat_ev = np.insert(lat_ev, 0, lat_ev[0]); dep_ev = np.insert(dep_ev, 0, dep_ev[0]); wt_ev = np.insert(wt_ev, 0, 0.0)\n",
" lon_ev = np.insert(lon_ev, 0, lon_ev[0]); lat_ev = np.insert(lat_ev, 0, lat_ev[0]); dep_ev = np.insert(dep_ev, 0, dep_ev[0]); wt_ev = np.insert(wt_ev, 0, 1.0)\n",
" lon_st = np.insert(lon_st, 0, lon_st[0]); lat_st = np.insert(lat_st, 0, lat_st[0]); ele_st = np.insert(ele_st, 0, ele_st[0]); wt_st = np.insert(wt_st, 0, 0.0)\n",
" lon_st = np.insert(lon_st, 0, lon_st[0]); lat_st = np.insert(lat_st, 0, lat_st[0]); ele_st = np.insert(ele_st, 0, ele_st[0]); wt_st = np.insert(wt_st, 0, 1.0)\n",
"\n",
" min_lon = min(min(lon_ev),min(lon_st))\n",
" max_lon = max(max(lon_ev),max(lon_st))\n",
"\n",
" min_lat = min(min(lat_ev),min(lat_st))\n",
" max_lat = max(max(lat_ev),max(lat_st))\n",
"\n",
" max_dep = max(dep_ev)\n",
"\n",
" fig = plt.figure(figsize=(12,12))\n",
" gridspace = GridSpec(12,12,figure = fig)\n",
"\n",
" xrange = max_lon - min_lon + 1.0\n",
" yrange = max_lat - min_lat + 1.0\n",
"\n",
" if (xrange > yrange):\n",
" fig_x_size = 6\n",
" fig_y_size = round(6*yrange/xrange)\n",
" else:\n",
" fig_x_size = round(6*xrange/yrange)\n",
" fig_y_size = 6\n",
"\n",
"\n",
" ax1 = fig.add_subplot(gridspace[0:fig_y_size,0:fig_x_size])\n",
"\n",
" bar_ev = ax1.scatter(lon_ev,lat_ev,c=wt_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
" bar_st = ax1.scatter(lon_st,lat_st,c=wt_st,cmap=\"jet\",label = \"rec\",s = 100,marker='^',edgecolors='white')\n",
"\n",
" ax1.legend(fontsize = 14)\n",
" ax1.tick_params(axis='x',labelsize=18)\n",
" ax1.tick_params(axis='y',labelsize=18)\n",
" ax1.set_xlabel('Lon',fontsize=18)\n",
" ax1.set_ylabel('Lat',fontsize=18)\n",
" ax1.set_xlim((min_lon - (max_lon - min_lon)*0.1,max_lon + (max_lon - min_lon)*0.1))\n",
" ax1.set_ylim((min_lat - (max_lat - min_lat)*0.1,max_lat + (max_lat - min_lat)*0.1))\n",
"\n",
"\n",
" ax2 = fig.add_subplot(gridspace[0:fig_y_size, fig_x_size+1 : fig_x_size+3])\n",
"\n",
" ax2.scatter(dep_ev,lat_ev,c=wt_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
"\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_xlabel('Dep',fontsize=18)\n",
" ax2.set_ylabel('Lat',fontsize=18)\n",
" ax2.set_xlim((-max_dep*0.05,max_dep*1.1))\n",
" ax2.set_ylim((min_lat - (max_lat - min_lat)*0.1,max_lat + (max_lat - min_lat)*0.1))\n",
"\n",
"\n",
" ax3 = fig.add_subplot(gridspace[fig_y_size+1:fig_y_size+3,0:fig_x_size])\n",
"\n",
" ax3.scatter(lon_ev,dep_ev,c=wt_ev,cmap=\"jet\",label = \"src\",s = 3)\n",
"\n",
" ax3.tick_params(axis='x',labelsize=18)\n",
" ax3.tick_params(axis='y',labelsize=18)\n",
" ax3.set_xlabel('Lon',fontsize=18)\n",
" ax3.set_ylabel('Dep',fontsize=18)\n",
" ax3.set_xlim((min_lon - (max_lon - min_lon)*0.1,max_lon + (max_lon - min_lon)*0.1))\n",
" ax3.set_ylim((-max_dep*0.05,max_dep*1.1))\n",
" ax3.invert_yaxis()\n",
"\n",
" # Place the colorbar on a new axis.\n",
" ax4 = fig.add_subplot(gridspace[fig_y_size+2:fig_y_size+3,fig_x_size+1:fig_x_size+3])\n",
" cbar1 = plt.colorbar(bar_st, ax=ax4,orientation='horizontal')\n",
" cbar1.set_label('Weight of stations',fontsize=16)\n",
" cbar1.ax.tick_params(axis='x', labelsize=16) # colorbar font size.\n",
" # Hide the borders of the axes.\n",
" ax4.spines['top'].set_visible(False)\n",
" ax4.spines['right'].set_visible(False)\n",
" ax4.spines['bottom'].set_visible(False)\n",
" ax4.spines['left'].set_visible(False)\n",
"\n",
" # Hide the tick values of the axes.\n",
" ax4.set_xticks([])\n",
" ax4.set_yticks([])\n",
"\n",
" # Place the colorbar on a new axis.\n",
" ax5 = fig.add_subplot(gridspace[fig_y_size+1:fig_y_size+2,fig_x_size+1:fig_x_size+3])\n",
" cbar1 = plt.colorbar(bar_ev, ax=ax5,orientation='horizontal')\n",
" cbar1.set_label('Weight of earthquakes',fontsize=16)\n",
" cbar1.ax.tick_params(axis='x', labelsize=16) # colorbar font size.\n",
" # Hide the borders of the axes.\n",
" ax5.spines['top'].set_visible(False)\n",
" ax5.spines['right'].set_visible(False)\n",
" ax5.spines['bottom'].set_visible(False)\n",
" ax5.spines['left'].set_visible(False)\n",
"\n",
" # Hide the tick values of the axes.\n",
" ax5.set_xticks([])\n",
" ax5.set_yticks([])\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot and function: plot the distance-time scatter plot, remove the outliers.\n",
"# Limit the data within the range defined by the line time = dis * slope + intercept and the bounds up and down.\n",
"# Remove outliers, only retain data satisfying: slope * dis + intercept + down < time < slope * dis + intercept + up.\n",
"\n",
"def fig_data_plot_remove_outliers(ev_info,st_info,slope,intercept,up,down,dis_min,dis_max):\n",
"\n",
" fig = plt.figure(figsize=(10,10))\n",
" gridspace = GridSpec(6,6,figure = fig)\n",
" ax2 = fig.add_subplot(gridspace[0:6, 0:6])\n",
"\n",
" # plot original data\n",
" [dis_obs,time_obs] = data_dis_time(ev_info,st_info)\n",
" ax2.plot(dis_obs,time_obs,'r.',markersize=1.5,label = \"discarded\")\n",
"\n",
" # remove outliers, only retain data satisfying: slope * dis + intercept + down < time < slope * dis + intercept + up\n",
" ev_info = limit_data_residual(ev_info,st_info,slope,intercept,up,down)\n",
"\n",
" [dis_obs,time_obs] = data_dis_time(ev_info,st_info)\n",
" ax2.plot(dis_obs,time_obs,'b.',markersize=1.5,label = \"retained\")\n",
"\n",
" ax2.plot([dis_min,dis_max],[slope*dis_min+intercept+up,slope*dis_max+intercept+up],'b-',linewidth=2)\n",
" ax2.plot([dis_min,dis_max],[slope*dis_min+intercept+down,slope*dis_max+intercept+down],'b-',linewidth=2)\n",
" ax2.plot([dis_min,dis_max],[slope*dis_min+intercept,slope*dis_max+intercept],'k-',linewidth=2)\n",
"\n",
" ax2.legend(fontsize = 14)\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_xlabel('Distance (km)',fontsize=18)\n",
" ax2.set_ylabel('Traveltime',fontsize=18)\n",
" ax2.set_xlim((dis_min,dis_max))\n",
" ax2.set_ylim((intercept+down-5,slope*dis_max+intercept+up+5))\n",
"\n",
" return ev_info"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Plot: distance-time scatter plot of given phases.\n",
"\n",
"def fig_data_plot_phase(ev_info,st_info,phase_list,color_list,dis_min,dis_max):\n",
"\n",
" [dis_obs_phase,time_obs_phase] = data_dis_time_phase(ev_info,st_info,phase_list)\n",
"\n",
" regression = {}\n",
" # Calculate the least squares y = ax+b\n",
" for key_phase in phase_list:\n",
" X = dis_obs_phase[key_phase]\n",
" Y = time_obs_phase[key_phase]\n",
"\n",
" if(len(X)>20):\n",
" regression[key_phase] = linear_regression(X,Y)\n",
" else:\n",
" print(\"No enough data: %d, for %s\"%(len(X),key_phase))\n",
" regression[key_phase] = [0,0,0]\n",
"\n",
"\n",
" # draw\n",
" fig = plt.figure(figsize=(10,10))\n",
" gridspace = GridSpec(6,6,figure = fig)\n",
" ax2 = fig.add_subplot(gridspace[0:6, 0:6])\n",
" y1 = 99999; y2 = -99999\n",
"\n",
" # scatter plot\n",
" for iphase in range(len(phase_list)):\n",
" phase = phase_list[iphase]\n",
" color = color_list[iphase]\n",
" ax2.plot(dis_obs_phase[phase],time_obs_phase[phase],'%s.'%(color),markersize=1)\n",
"\n",
" # linear regression plot\n",
" for iphase in range(len(phase_list)):\n",
" phase = phase_list[iphase]\n",
" color = color_list[iphase]\n",
" (slope,intercept,SEE)= regression[phase]\n",
" ax2.plot([dis_min,dis_max],[dis_min*slope+intercept,dis_max*slope+intercept],'%s-'%(color),linewidth=2,label = \"%s: a,b,SEE=%5.2f,%5.2f,%5.2f\"%(phase,slope,intercept,SEE))\n",
" y1 = min(y1,intercept-5)\n",
" y2 = max(y2,dis_max*slope+intercept+5)\n",
"\n",
" ax2.legend(fontsize = 14)\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_xlabel('Distance (km)',fontsize=18)\n",
" ax2.set_ylabel('Traveltime (s)',fontsize=18)\n",
" ax2.set_xlim((dis_min,dis_max))\n",
"\n",
"\n",
" for iphase in range(len(phase_list)):\n",
" try:\n",
" y1 = min(y1,min(time_obs_phase[phase]))\n",
" y2 = max(y2,max(time_obs_phase[phase]))\n",
" except:\n",
" pass\n",
" ax2.set_ylim((y1,y2))\n",
"\n",
" title = \"\"\n",
" for phase in dis_obs_phase:\n",
" title = title + \"%s(%d) \"%(phase,len(dis_obs_phase[phase]))\n",
"\n",
" ax2.set_title(title)\n",
"\n",
" print(\"a is slope, b is intercept, SEE is standard error of estimate\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Functions: results analysis and evaluation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# plot: plot the residual histogram of the initial and final model\n",
"def fig_residual_histogram(fn_syn_init,fn_syn_final,fn_obs,range_l,range_r,Nbar,tag1 = \"initial\",tag2 = \"final\"):\n",
"\n",
" # read synthetic traveltime data in the initial model\n",
" [ev_info_syn_init, st_info_syn_init] = read_src_rec_file(fn_syn_init)\n",
" time_syn_init = data_dis_time(ev_info_syn_init,st_info_syn_init)[1]\n",
"\n",
" # read synthetic traveltime data in the final model\n",
" [ev_info_syn_final, st_info_syn_final] = read_src_rec_file(fn_syn_final)\n",
" time_syn_final = data_dis_time(ev_info_syn_final,st_info_syn_final)[1]\n",
"\n",
" # read observed traveltime data\n",
" [ev_info_obs, st_info_obs] = read_src_rec_file(fn_obs)\n",
" time_obs = data_dis_time(ev_info_obs,st_info_obs)[1]\n",
"\n",
" fig = plt.figure(figsize=(6,6))\n",
" gridspace = GridSpec(6,6,figure = fig)\n",
"\n",
" ax2 = fig.add_subplot(gridspace[0:6, 0:6])\n",
"\n",
" bins=np.linspace(range_l,range_r,Nbar)\n",
" error_init = time_syn_init - time_obs\n",
" error_final = time_syn_final - time_obs\n",
"\n",
" hist_init, _, _ = ax2.hist(error_init,bins=bins,histtype='step', edgecolor = \"red\", linewidth = 2,\n",
" label = \"%s: std = %5.3f s, mean = %5.3f s\"%(tag1,np.std(error_init),np.mean(error_init)))\n",
"\n",
" hist_final, _, _ = ax2.hist(error_final,bins=bins,alpha = 0.5, color = \"blue\",\n",
" label = \"%s: std = %5.3f s, mean = %5.3f s\"%(tag2,np.std(error_final),np.mean(error_final)))\n",
"\n",
" print(\"residual for \",tag1,\" model is: \",\"mean: \",np.mean(error_init),\"sd: \",np.std(error_init))\n",
" print(\"residual for \",tag2,\" model is: \",\"mean: \",np.mean(error_final),\"sd: \",np.std(error_final))\n",
" ax2.legend(fontsize=14)\n",
"\n",
" ax2.set_xlim(range_l - abs(range_l)*0.1,range_r + abs(range_r)*0.1)\n",
" ax2.set_ylim(0,1.3*max(max(hist_init),max(hist_final)))\n",
"\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_ylabel('Number of data',fontsize=18)\n",
" ax2.set_xlabel('Traveltime residuals (s)',fontsize=18)\n",
" ax2.set_title(\"$t_{syn} - t_{obs}$\",fontsize=18)\n",
" ax2.grid()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# plot: plot the residual histogram of the initial and final model\n",
"def fig_data_difference_histogram(fn_A,fn_B,range_l,range_r,Nbar):\n",
"\n",
" # read data A\n",
" [ev_info_A, st_info_A] = read_src_rec_file(fn_A)\n",
"\n",
" # read data B\n",
" [ev_info_B, st_info_B] = read_src_rec_file(fn_B)\n",
"\n",
" # absolute traveltime residual\n",
" error_t = []\n",
" for key_ev in ev_info_A:\n",
" for key_t in ev_info_A[key_ev].t:\n",
" data_A = ev_info_A[key_ev].t[key_t]\n",
" if (key_ev in ev_info_B and key_t in ev_info_B[key_ev].t):\n",
" data_B = ev_info_B[key_ev].t[key_t]\n",
" error_t.append(data_A[2] - data_B[2])\n",
"\n",
" # common-source differential traveltime residual\n",
" error_cs_dt = []\n",
" for key_ev in ev_info_A:\n",
" for key_dt in ev_info_A[key_ev].cs_dt:\n",
" data_A = ev_info_A[key_ev].cs_dt[key_dt]\n",
" if (key_ev in ev_info_B and key_dt in ev_info_B[key_ev].cs_dt):\n",
" data_B = ev_info_B[key_ev].cs_dt[key_dt]\n",
" error_cs_dt.append(data_A[3] - data_B[3])\n",
" else:\n",
" print(key_ev,key_dt)\n",
"\n",
" # common-receiver differential traveltime residual\n",
" error_cr_dt = []\n",
" for key_ev in ev_info_A:\n",
" for key_dt in ev_info_A[key_ev].cr_dt:\n",
" data_A = ev_info_A[key_ev].cr_dt[key_dt]\n",
" if (key_ev in ev_info_B and key_dt in ev_info_B[key_ev].cr_dt):\n",
" data_B = ev_info_B[key_ev].cr_dt[key_dt]\n",
" error_cr_dt.append(data_A[3] - data_B[3])\n",
"\n",
" # plot\n",
" fig = plt.figure(figsize=(14,6))\n",
" gridspace = GridSpec(6,14,figure = fig)\n",
"\n",
"\n",
" ax2 = fig.add_subplot(gridspace[0:6, 0:6])\n",
" bins=np.linspace(range_l,range_r,Nbar)\n",
" # hist_t, _, _ = ax2.hist(error_t,bins=bins,histtype='step', edgecolor = \"red\", linewidth = 2,\n",
" # label = \"noise: std = %5.3f s, mean = %5.3f s\"%(np.std(error_t),np.mean(error_t)))\n",
" hist_t, _, _ = ax2.hist(error_t,bins=bins,alpha = 0.5, color = \"blue\",\n",
" label = \"noise: std = %5.3f s, mean = %5.3f s\"%(np.std(error_t),np.mean(error_t)))\n",
"\n",
" ax2.legend(fontsize=14)\n",
" ax2.set_xlim(range_l - abs(range_l)*0.1,range_r + abs(range_r)*0.1)\n",
" try:\n",
" ax2.set_ylim(0,1.3*max(hist_t))\n",
" except:\n",
" ax2.set_ylim(0,1.0)\n",
" ax2.tick_params(axis='x',labelsize=18)\n",
" ax2.tick_params(axis='y',labelsize=18)\n",
" ax2.set_ylabel('Number of data',fontsize=18)\n",
" ax2.set_xlabel('Noise (s)',fontsize=18)\n",
" ax2.set_title(\"Noise of traveltime\",fontsize=18)\n",
"\n",
"\n",
" ax3 = fig.add_subplot(gridspace[0:6,8:14])\n",
" bins=np.linspace(range_l,range_r,Nbar)\n",
" # hist_t, _, _ = ax3.hist(error_t,bins=bins,histtype='step', edgecolor = \"red\", linewidth = 2,\n",
" # label = \"noise: std = %5.3f s, mean = %5.3f s\"%(np.std(error_t),np.mean(error_t)))\n",
" hist_cs_dt, _, _ = ax3.hist(error_cs_dt,bins=bins,alpha = 0.5, color = \"blue\",\n",
" label = \"noise: std = %5.3f s, mean = %5.3f s\"%(np.std(error_cs_dt),np.mean(error_cs_dt)))\n",
"\n",
" ax3.legend(fontsize=14)\n",
" ax3.set_xlim(range_l - abs(range_l)*0.1,range_r + abs(range_r)*0.1)\n",
" try:\n",
" ax3.set_ylim(0,1.3*max(hist_cs_dt))\n",
" except:\n",
" ax3.set_ylim(0,1.0)\n",
" ax3.tick_params(axis='x',labelsize=18)\n",
" ax3.tick_params(axis='y',labelsize=18)\n",
" ax3.set_ylabel('Number of data',fontsize=18)\n",
" ax3.set_xlabel('Noise (s)',fontsize=18)\n",
" ax3.set_title(\"Noise of differential traveltime\",fontsize=18)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}