Skip to content

Commit

Permalink
Add reading src_rec_file from url
Browse files Browse the repository at this point in the history
  • Loading branch information
xumi1993 committed Nov 12, 2024
1 parent de447a8 commit f12f70e
Show file tree
Hide file tree
Showing 4 changed files with 60 additions and 6 deletions.
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,12 @@ dependencies = [
"ruamel.yaml",
"xarray",
"tqdm",
"obspy",
"pyproj",
"scikit-learn",
]

[project.urls]
"Homepage" = "https://your.project.homepage"
"Homepage" = "https://tomoatt.com/"


[tool.hatch.version]
Expand Down
6 changes: 4 additions & 2 deletions pytomoatt/io/seispy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
import pandas as pd
import numpy as np
from os.path import join, basename, dirname
from obspy.io.sac import SACTrace

try:
from obspy.io.sac import SACTrace
except ImportError:
raise ImportError('ObsPy is required for Seispy I/O')

class Seispy:
def __init__(self, rf_path:str):
Expand Down
33 changes: 31 additions & 2 deletions pytomoatt/src_rec.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@
import pandas as pd
from .distaz import DistAZ
from .setuplog import SetupLog
from .utils.src_rec_utils import define_rec_cols, setup_rec_points_dd, get_rec_points_types, update_position
from .utils.src_rec_utils import define_rec_cols, setup_rec_points_dd, \
get_rec_points_types, update_position, \
download_src_rec_file
from sklearn.metrics.pairwise import haversine_distances
import copy
from io import StringIO
import os

pd.options.mode.chained_assignment = None # default='warn'

Expand Down Expand Up @@ -220,7 +223,15 @@ def read(cls, fname: str, dist_in_data=False, name_net_and_sta=False, **kwargs):
:rtype: SrcRec
"""
sr = cls(fname=fname, **kwargs)
alldf = pd.read_table(
if not os.path.exists(fname):
sr.log.SrcReclog.info("Downloading src_rec file from {}".format(fname))
src_rec_data = download_src_rec_file(fname)
if src_rec_data is None:
sr.log.SrcReclog.error("No src_rec file found")
return sr
else:
src_rec_data = fname
alldf = pd.read_csv(
fname, sep=r"\s+", header=None, comment="#", low_memory=False
)

Expand Down Expand Up @@ -1491,6 +1502,24 @@ def add_noise(self, range_in_sec=0.1, mean_in_sec=0.0, shape="gaussian"):
)
rec_type["tt"] += noise

def add_noise_to_source(self, lat_pert=0.1, lon_pert=0.1, depth_pert=10, tau_pert=0.5):
"""Add random noise on source location
:param lat_pert: Maximum perturbation on latitude in degree, defaults to 0.1
:type lat_pert: float, optional
:param lon_pert: Maximum perturbation on longitude in degree, defaults to 0.1
:type lon_pert: float, optional
:param depth_pert: Maximum perturbation on depth in km, defaults to 10
:type depth_pert: float, optional
:param tau_pert: Maximum perturbation on origin time in sec, defaults to 0.0
:type tau_pert: float, optional
"""
self.log.SrcReclog.info("Adding noise on source location...")
self.src_points["evla"] += np.random.uniform(-lat_pert, lat_pert, self.src_points.shape[0])
self.src_points["evlo"] += np.random.uniform(-lon_pert, lon_pert, self.src_points.shape[0])
self.src_points["evdp"] += np.random.uniform(-depth_pert, depth_pert, self.src_points.shape[0])
self.src_points["origin_time"] += pd.to_timedelta(np.random.uniform(-tau_pert, tau_pert, self.src_points.shape[0]))

def rotate(self, clat:float, clon:float, angle:float, reverse=False):
"""Rotate sources and receivers around a center point
Expand Down
24 changes: 24 additions & 0 deletions pytomoatt/utils/src_rec_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import urllib3
import io


def define_rec_cols(dist_in_data, name_net_and_sta):
if not dist_in_data:
Expand Down Expand Up @@ -64,6 +67,7 @@ def define_rec_cols(dist_in_data, name_net_and_sta):
]
return columns, last_col


def get_rec_points_types(dist):
common_type = {
"src_index": int,
Expand All @@ -80,6 +84,7 @@ def get_rec_points_types(dist):
common_type["dist_deg"] = float
return common_type


def setup_rec_points_dd(type='cs'):
if type == 'cs':
columns = [
Expand Down Expand Up @@ -151,6 +156,7 @@ def setup_rec_points_dd(type='cs'):
raise ValueError('type should be either "cs" or "cr"')
return columns, data_type


def update_position(sr):
sr.src_points = sr.src_points.merge(
sr.sources[['event_id', 'evlo', 'evla']],
Expand Down Expand Up @@ -213,3 +219,21 @@ def update_position(sr):
sr.rec_points_cr['evlo2'] = sr.rec_points_cr['evlo']
sr.rec_points_cr['evla2'] = sr.rec_points_cr['evla']
sr.rec_points_cr.drop(columns=['evlo', 'evla', 'event_id'], inplace=True)


def download_src_rec_file(url):
http = urllib3.PoolManager()
response = http.request('GET', url, preload_content=False)
if response.status == 200:
data = io.StringIO()
while True:
chunk = response.read(1024)
if not chunk:
break
data.write(chunk.decode('utf-8'))
data.seek(0) # 重置 StringIO 对象的指针到开始位置
response.release_conn()
return data
else:
response.release_conn()
return None

0 comments on commit f12f70e

Please sign in to comment.