Skip to content

Commit

Permalink
Merge pull request #17 from MIGG-NTU/devel
Browse files Browse the repository at this point in the history
Devel
  • Loading branch information
xumi1993 authored Jul 14, 2024
2 parents 13d5e77 + e635014 commit 167220a
Show file tree
Hide file tree
Showing 19 changed files with 116,795 additions and 258 deletions.
15 changes: 6 additions & 9 deletions .github/workflows/build-test-conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,17 @@ jobs:
max-parallel: 5
matrix:
os: [ubuntu-latest, macos-latest]
python-version: ["3.9", "3.10", "3.11"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
runs-on: ${{matrix.os}}

steps:
- uses: actions/checkout@v3
with:
ref: devel
- name: Set up Python
uses: actions/setup-python@v3
uses: conda-incubator/setup-miniconda@v3
with:
python-version: ${{matrix.python-version}}
- name: Add conda to system path
run: |
# $CONDA is an environment variable pointing to the root of the miniconda directory
echo $CONDA/bin >> $GITHUB_PATH
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
conda install numpy scipy h5py pyyaml pandas xarray
Expand All @@ -36,11 +32,12 @@ jobs:
with:
fetch-depth: 1
- name: Set up Python
uses: actions/setup-python@v3
uses: conda-incubator/setup-miniconda@v3
with:
python-version: '3.11'
python-version: "3.12"
- name: Install dependencies
run: |
conda install numpy scipy h5py pyyaml pandas xarray
python -m pip install --upgrade pip
pip install .
pip install pytest pytest-cov
Expand Down
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -132,4 +132,6 @@ dmypy.json
crust_model/
*.xmf

test_src_rec_eg
test_src_rec_eg
test_src_rec_sd.ipynb
test_src_rec_config.dat
36 changes: 22 additions & 14 deletions pytomoatt/attarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,49 +6,57 @@
class Dataset(xarray.Dataset):
"""Sub class of `xarray.Dataset <https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html>`__
"""
__slots__ = ()
def __init__(self, data_vars, coords, attrs=None) -> None:

__slots__ = ()
super().__init__(data_vars, coords, attrs)

@classmethod
def from_xarray(cls, dataset):
ds = cls(dataset.data_vars, dataset.coords)
return ds

def interp_dep(self, depth:float, field:str):
def interp_dep(self, depth:float, field:str, samp_interval=0):
"""Interpolate map view with given depth
:param depth: Depth in km
:type depth: float
:param field: Field name in ATT model data
:type field: str
:param samp_interval: Sampling interval, defaults to 0
:type samp_interval: int, optional
:return: xyz data with 3 columns [lon, lat, value]
:rtype: numpy.ndarray
"""
if field not in self.data_vars.keys():
raise ValueError('Error field name of {}'.format(field))
idx = np.where(self.coords['dep'].values == depth)[0]
# resample self of xarray with given interval of ``samp_interval``

if samp_interval > 0:
resampled = self.isel(t=slice(0, None, samp_interval), p=slice(0, None, samp_interval))
else:
resampled = self
idx = np.where(resampled.coords['dep'].values == depth)[0]
if idx.size > 0:
offset = 0
data = np.zeros([self.coords['lat'].size*self.coords['lon'].size, 3])
for i, la in enumerate(self.coords['lat'].values):
for j, lo in enumerate(self.coords['lon'].values):
data[offset] = [lo, la, self.data_vars[field].values[idx[0], i, j]]
data = np.zeros([resampled.coords['lat'].size*resampled.coords['lon'].size, 3])
for i, la in enumerate(resampled.coords['lat'].values):
for j, lo in enumerate(resampled.coords['lon'].values):
data[offset] = [lo, la, resampled.data_vars[field].values[idx[0], i, j]]
offset += 1
else:
rad = 6371 - depth
points = np.zeros([self.coords['lat'].size*self.coords['lon'].size, 4])
points = np.zeros([resampled.coords['lat'].size*resampled.coords['lon'].size, 4])
offset = 0
for _, la in enumerate(self.coords['lat'].values):
for _, lo in enumerate(self.coords['lon'].values):
for _, la in enumerate(resampled.coords['lat'].values):
for _, lo in enumerate(resampled.coords['lon'].values):
points[offset] = [rad, la, lo, 0.]
offset += 1
points[:, 3] = interpn(
(self.coords['rad'].values,
self.coords['lat'].values,
self.coords['lon'].values),
self.data_vars[field].values,
(resampled.coords['rad'].values,
resampled.coords['lat'].values,
resampled.coords['lon'].values),
resampled.data_vars[field].values,
points[:, 0:3]
)
data = points[:, [2, 1, 3]]
Expand Down
2 changes: 1 addition & 1 deletion pytomoatt/checkerboard.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import h5py
import numpy as np
from .utils import init_axis, sind, cosd
from .utils.common import init_axis, sind, cosd
import copy


Expand Down
5 changes: 2 additions & 3 deletions pytomoatt/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import h5py
from .para import ATTPara
from .attarray import Dataset
from .utils import asind, acosd


class ATTData():
Expand Down Expand Up @@ -74,9 +73,9 @@ def read(cls, fname:str, fname_params:str,
attdata.fgrid = np.loadtxt(fname_grid)
if isinstance(dataset_name, str) and attdata.format == 'hdf5':
attdata._add_field(dataset_name)
attdata.__dict__[key], attdata.grid_glob_r, \
attdata.__dict__[dataset_name], attdata.grid_glob_r, \
attdata.grid_glob_t, attdata.grid_glob_p = \
attdata._data_retrieval(group_name=group_name, dataset_name=key)
attdata._data_retrieval(group_name=group_name, dataset_name=dataset_name)
elif isinstance(dataset_name, str) and attdata.format != 'hdf5':
attdata._add_field('data')
attdata.data, attdata.grid_glob_r, attdata.grid_glob_t, attdata.grid_glob_p = \
Expand Down
2 changes: 1 addition & 1 deletion pytomoatt/io/asciimodel.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from ..utils import init_axis, ignore_nan_3d
from ..utils.common import init_axis, ignore_nan_3d
from ..setuplog import SetupLog
from scipy.interpolate import griddata

Expand Down
4 changes: 2 additions & 2 deletions pytomoatt/io/crustmodel.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from os.path import dirname, abspath, join
from scipy.interpolate import griddata
from ..utils import init_axis, ignore_nan_3d
from ..utils.common import init_axis, ignore_nan_3d
from ..setuplog import SetupLog
import h5py

Expand All @@ -24,7 +24,7 @@ class CrustModel():
def __init__(self, fname=join(dirname(dirname(abspath(__file__))), 'data', 'crust1.0.h5')) -> None:
"""Read internal CRUST1.0 model
:param fname: _description_, defaults to join(dirname(dirname(abspath(__file__))), 'data', 'crust1.0-vp.npz')
:param fname: Path to CRUST1.0 model, defaults to join(dirname(dirname(abspath(__file__))), 'data', 'crust1.0-vp.npz')
:type fname: str, optional
"""
with h5py.File(fname) as f:
Expand Down
20 changes: 12 additions & 8 deletions pytomoatt/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .io.crustmodel import CrustModel
from .io.asciimodel import ASCIIModel
from .attarray import Dataset
from .utils import init_axis, acosd, atand
from .utils.common import init_axis, atand


class ATTModel():
Expand Down Expand Up @@ -67,16 +67,20 @@ def to_ani(self):
"""Convert to anisotropic strength (epsilon) and azimuth (phi)
"""
self.epsilon = np.sqrt(self.eta**2+self.xi**2)
self.phi = np.zeros_like(self.epsilon)
idx = np.where(self.xi <= 0)
self.phi[idx] = 90 + 0.5*atand(self.eta[idx]/self.xi[idx])
idx = np.where((self.xi > 0) & (self.eta <= 0))
self.phi[idx] = 180 + 0.5*atand(self.eta[idx]/self.xi[idx])
idx = np.where((self.xi > 0) & (self.eta > 0))
self.phi[idx] = 0.5*atand(self.eta[idx]/self.xi[idx])
# self.phi = np.zeros_like(self.epsilon)
self.phi = np.rad2deg(0.5*np.arctan2(self.eta, self.xi))
# idx = np.where(self.xi <= 0)
# self.phi[idx] = 90 + 0.5*atand(self.eta[idx]/self.xi[idx])
# idx = np.where((self.xi > 0) & (self.eta <= 0))
# self.phi[idx] = 180 + 0.5*atand(self.eta[idx]/self.xi[idx])
# idx = np.where((self.xi > 0) & (self.eta > 0))
# self.phi[idx] = 0.5*atand(self.eta[idx]/self.xi[idx])

def to_xarray(self):
"""Convert to xarray
:return: attarray dataset
:rtype: pytomoatt.attarray.Dataset
"""
data_dict = {}
data_dict['vel'] = (["r", "t", "p"], self.vel)
Expand Down
2 changes: 1 addition & 1 deletion pytomoatt/para.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import yaml
from .utils import init_axis
from .utils.common import init_axis


class ATTPara:
Expand Down
4 changes: 2 additions & 2 deletions pytomoatt/script.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from .src_rec import SrcRec
from .model import ATTModel
from .para import ATTPara
from .utils import to_vtk, init_axis
from .utils.common import to_vtk, init_axis
from .checkerboard import Checker


Expand Down Expand Up @@ -81,7 +81,7 @@ def create_model(self):
default='0/1/2/3', metavar='ncol_lon/ncol_lat/ncol_dep/ncol_vel')
parser.add_argument('-o', help='Path to output model, defaults to Sub_CRUST1.0_nr_nt_np.h5', default=None, metavar='fname')
parser.add_argument('-s', help='Smooth the 3D model with a Gaussian filter,'
'Sigma is the standard division of the smoothing kernel, defaults to None',
'Sigma is the standard division of the smoothing kernel in km, defaults to None',
default=None, type=float, metavar='sigma')
parser.add_argument('-t', help='Type of velocity vp or vs are available, valid for -m1', default='vp', metavar='vel_type')
args = parser.parse_args(sys.argv[2:])
Expand Down
Loading

0 comments on commit 167220a

Please sign in to comment.