Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Small updates to wkf tools #403

Merged
merged 36 commits into from
Jun 13, 2024
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
572a32a
update set_defaults to accept a list of polygons to use
kirstybayliss Feb 13, 2024
c0a9289
check if a toml file contains all necessary details and make a copy t…
kirstybayliss Feb 13, 2024
b3a9054
tidying check_toml
kirstybayliss Feb 13, 2024
50d2af3
make set_gr_params check a use list properly
kirstybayliss Feb 19, 2024
baa299a
adding focal mechanism plots
kirstybayliss Feb 21, 2024
a6ca24a
adding dependencies for focal mechanism plots
kirstybayliss Feb 21, 2024
aee91d5
adding Kaverina colour dictionary
kirstybayliss Feb 21, 2024
e67f3d3
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Feb 22, 2024
64dbe4c
fix create_subcatalogues to process list of sources
kirstybayliss Feb 22, 2024
6bdc653
subcatalogues pass on hour, minute, second
kirstybayliss Feb 22, 2024
8de3034
subcatalogues pass on all columns
kirstybayliss Mar 14, 2024
20d4253
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Mar 14, 2024
2f7caad
adding save_csv option to analysis_nodal_plane
kirstybayliss Mar 14, 2024
1b1befa
all wkf functions can take a list of sources
kirstybayliss Mar 18, 2024
9703ecf
adding plot=True in plot geometries where pyvista is available
kirstybayliss Mar 18, 2024
16667bf
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Apr 2, 2024
e9ba6d6
fixing a value in plot_mfd
kirstybayliss Apr 5, 2024
20b81cc
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Apr 8, 2024
124764f
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Apr 13, 2024
0d821cf
minor changes
kirstybayliss Apr 15, 2024
8318194
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss Apr 17, 2024
421c57f
updates for ocean models
kirstybayliss Apr 27, 2024
3ae4e64
updates to play nicely with h3
kirstybayliss Apr 27, 2024
ce2bdb8
fix for specific sources passed to subcatalogue
kirstybayliss Apr 30, 2024
d79bf86
Merge branch 'master' of github.com:GEMScienceTools/oq-mbtk into wkf_…
kirstybayliss May 29, 2024
9d8d891
tidying
kirstybayliss Jun 3, 2024
1b6bff5
fix check_mfds so it takes correct values for each zone from the config
kirstybayliss Jun 3, 2024
60fa66c
generalising check_mfds (no longer requires config)
kirstybayliss Jun 3, 2024
39badec
adding sanity checks for seismic sources
kirstybayliss Jun 3, 2024
ff80b40
fixing stack
kirstybayliss Jun 4, 2024
794179a
tidying mosaic file
kirstybayliss Jun 4, 2024
dc30b7f
check_mfds can take individual sources
kirstybayliss Jun 4, 2024
d3499b0
create_subcatalogues will accept a multipolygon
kirstybayliss Jun 6, 2024
78b6a3f
correcting the name of the antarctic model
kirstybayliss Jun 13, 2024
93caf24
removing unnecessary import
kirstybayliss Jun 13, 2024
27b053a
fixing optional inputs
kirstybayliss Jun 13, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions openquake/mbi/wkf/analysis_nodal_plane.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
from openquake.wkf.seismicity.nodal_plane import process_gcmt_datafames


def main(fname_folder, folder_out):
process_gcmt_datafames(fname_folder, folder_out)
def main(fname_folder, folder_out, save_csv = False):
process_gcmt_datafames(fname_folder, folder_out, save_csv)


main.fname_folder = 'Name of the folder with input files'
Expand Down
2 changes: 1 addition & 1 deletion openquake/mbi/wkf/check_mfds.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


def main(fname_input_pattern, fname_config, *, src_id=None):
check_mfds(fname_input_pattern, fname_config, src_id)
check_mfds(fname_input_pattern, fname_config)


main.fname_input_pattern = "Pattern to as set of OQ Engine .xml files with a SSM"
Expand Down
122 changes: 122 additions & 0 deletions openquake/mbi/wkf/check_toml.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
#!/usr/bin/env python
# coding: utf-8
# ------------------- The Model Building Toolkit ------------------------------
#
# Copyright (C) 2022 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------

import toml
import shutil
import os
from openquake.wkf.utils import get_list


HERE = os.path.dirname(__file__)

def main(fname_config: str, copy_loc: str, use: str = []):
""" Check config file contains all necessary parameters and make a working copy

:param fname_config:
Location of config file describing model
:param copy_loc:
location to store an editable copy of the config file

"""
model = toml.load(fname_config)

if all(i in model for i in ('name', 'mmin', 'bin_width', 'rupture_mesh_spacing')):
print("Checking model ", model.get('name'))
else:
print("missing default data (name, mmin, bin_width or rupture_mesh_spacing not found)")

# Check for declustering parameters
if 'declustering' in model:
declust_params = model['declustering']
if declust_params['method'] == 'Zaliapin':
if all(i in declust_params for i in ('fractal_dim', 'b_value', 'threshold', 'depth', 'output_nearest_neighbor_distances')):
print("All parameters specified for Zaliapin declustering")
else:
print("Check Zaliapin declustering parameters. Should contain: 'fractal_dim', 'b_value', 'threshold', 'depth', 'output_nearest_neighbor_distances'.")

elif declust_params['method'] == 'Reasenberg':
if all(i in declust_params for i in ('taumin', 'taumax', 'p', 'xk', 'xmeff', 'rfact', 'horiz_error', 'depth_error', 'interaction_formula', 'max_interaction_dist')):
print("All parameters specified for Reasenberg declustering")
else:
print(declust_params)
print("Check Reasenberg parameters. Should contain: 'taumin', 'taumax', 'p', 'xk', 'xmeff', 'rfact', 'horiz_error', 'depth_error', 'interaction_formula', 'max_interaction_dist'.")

elif declust_params['method'] == 'windowing':
if all(i in declust_params for i in ('time_distance_window', 'fs_time_prop')):
print("All parameters specified for windowing declustering")
else:
print(declust_params)
print("Check windowing parameters. Should contain: 'time_distance_window', 'fs_time_prop'.")

else:
print("unrecognised declustering algorithm. Please choose from 'Zaliapin', 'Reasenberg' or 'windowing'" )


else:
print("No declustering algorithm found")

# Check smoothing parameters
if 'smoothing' in model:
gauss = False
adap = False
smoothing_params = model['smoothing']
if all(i in smoothing_params for i in ('n_v', 'kernel', 'd_i_min', 'maxdist', 'h3res')):
print("Found parameters for adaptive smoothing")
adap = True
if all(i in smoothing_params for i in ('kernel_maximum_distance', 'kernel_smoothing')):
print("Found parameters for Gaussian smoothing")
gauss = True

if adap == False and gauss == False:
print("Smoothing paramaters missing. 'kernel_maximum_distance', 'kernel_smoothing' needed for Gaussian smoothing. 'n_v', 'kernel', 'd_i_min', 'maxdist', 'h3res' needed for adaptive smoothing. ")

else:
print("No smoothing parameters found")

# check sources
if len(use) > 0:
use = get_list(use)
for src_id in use:
# Add sources specified by use
if src_id not in model['sources']:
model['sources'][src_id] = {}
# Should remove sources not in use, but for some reason this is stupidly difficult to find out how to do!
print("Configured ", len(use), "sources as specified by use argument")

elif 'sources' in model:
print("Found ", len(model['sources']), " sources in model config")

else:
print("No sources found. Please add some!")

# Check for msr section
if 'msr' not in model:
print("No magnitude scaling relationships defined")

print("copying toml to ", copy_loc)
source = fname_config
destination = copy_loc
shutil.copy(source, destination)

main.fname_config = 'location of toml file for source model'
main.copy_loc = 'location to store an editable copy of the config'

if __name__ == '__main__':
sap.run(main)
6 changes: 3 additions & 3 deletions openquake/mbi/wkf/create_smoothing_per_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
from openquake.wkf.seismicity.smoothing import create_smoothing_per_zone


def main(fname_points: str, fname_polygons: str, folder_out: str='/tmp',
skip=[]):
create_smoothing_per_zone(fname_points, fname_polygons, folder_out, skip)
def main(fname_points: str, fname_polygons: str, folder_out: str='/tmp',*,
skip=[], use = []):
create_smoothing_per_zone(fname_points, fname_polygons, folder_out, skip, use)
kirstybayliss marked this conversation as resolved.
Show resolved Hide resolved


main.fname_points = ".csv file created by the smoothing code"
Expand Down
2 changes: 1 addition & 1 deletion openquake/mbi/wkf/create_subcatalogues_per_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@


def main(fname_polygons: str, fname_cat: str, folder_out: str, *,
source_ids: list=[]):
source_ids: str=[]):
"""
Given a file (e.g. a shapefile) with a set of polygons and an earthquake
catalogue, it creates a set of .csv files each one containing the
Expand Down
67 changes: 67 additions & 0 deletions openquake/mbi/wkf/focal_mech_loc_plots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
from obspy.imaging.beachball import beach
from openquake.hmtk.parsers.catalogue.gcmt_ndk_parser import ParseNDKtoGCMT
import pandas as pd
import matplotlib.pyplot as plt
from openquake.sub.utils import mecclass
from matplotlib.gridspec import GridSpec
import numpy as np

KAVERINA = {'N': 'blue',
'SS': 'green',
'R': 'red',
'N-SS': 'turquoise',
'SS-N': 'palegreen',
'R-SS': 'goldenrod',
'SS-R': 'yellow'}

def focal_mech_loc_plots(fname, figsize = (15, 10), width = 0.5, size = 0.1):
"""
Produce a figure consisting of:
1) nodal planes plotted in space (lat/Lon) with Kaverina classification colours
2) scatterplot of event Kaverina classificatons and magnitudes
3) scatterplot of event strike vs rake, coloured by Kaverina classification
Please note that the 'width' parameter might need to be adjusted for different models
"""

cmt_cat_zone = pd.read_csv(fname)
plungeb = cmt_cat_zone['plunge_b']
plungep = cmt_cat_zone['plunge_p']
plunget = cmt_cat_zone['plunge_t']
mclass = ['']*len(plunget)

for i in range(0, len(plungeb)):
mclass[i] = mecclass(plunget[i], plungeb[i], plungep[i])

cmt_cat_zone['class'] = mclass

mts = np.column_stack([cmt_cat_zone.strike1, cmt_cat_zone.dip1, cmt_cat_zone.rake1])

fig = plt.figure(layout="constrained", figsize = figsize)
gs = GridSpec( 2, 3, figure=fig)
a0 = fig.add_subplot(gs[0:, :-1])

a0.set_xlim(np.min(cmt_cat_zone['longitude']) - 0.1, np.max(cmt_cat_zone['longitude'])+ 0.1)
a0.set_ylim(np.min(cmt_cat_zone['latitude']) - 0.1, np.max(cmt_cat_zone['latitude']) + 0.1)

a0.margins(0.05)

idx = 0
for i in range(0, len(plungeb)):
bcc = beach(mts[idx],xy=(cmt_cat_zone['longitude'][idx], cmt_cat_zone['latitude'][idx]), width=width, linewidth=1, zorder=20, size=size, facecolor=KAVERINA[mclass[idx]])
bcc.set_alpha(0.5)
a0.add_collection(bcc)
idx += 1

a1 = fig.add_subplot(gs[0, -1])

a1.scatter(cmt_cat_zone['class'], cmt_cat_zone['magnitude'], c=cmt_cat_zone['class'].map(KAVERINA))
a1.set_xlabel("Kaverina classification")
a1.set_ylabel("magnitude")

a2 = fig.add_subplot(gs[1, -1])
a2.scatter(cmt_cat_zone['strike1'], cmt_cat_zone['rake1'], c=cmt_cat_zone['class'].map(KAVERINA), s = 1, alpha = 0.5)
a2.set_xlabel("strike")
a2.set_ylabel("rake")

fig.suptitle("Zone nodal plane distribution")
plt.show()
13 changes: 10 additions & 3 deletions openquake/mbi/wkf/set_defaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,13 @@
from openquake.wkf.utils import get_list


def main(fname_conf: str, fname_defaults: str):
def main(fname_conf: str, fname_defaults: str, use: str = [], skip: str = []):
kirstybayliss marked this conversation as resolved.
Show resolved Hide resolved

if len(use) > 0:
use = get_list(use)
if len(skip) > 0:
skip = get_list(skip)

# Parsing config
model = toml.load(fname_conf)
output = copy.copy(model)
Expand All @@ -17,10 +22,12 @@ def main(fname_conf: str, fname_defaults: str):
defaults = toml.load(fname_defaults)

# Adding fields
if 'sources' not in output:
output['sources'] = {}
#if 'sources' not in output:
# output['sources'] = {}

for src_id in defaults['sources']:
if (len(use) and src_id not in use) or (src_id in skip):
continue
if src_id not in output['sources']:
output['sources'][src_id] = {}
for key in defaults['sources'][src_id]:
Expand Down
2 changes: 2 additions & 0 deletions openquake/mbi/wkf/set_gr_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ def set_gr_params(fname_conf: str, use: str = "*", method: str = "weichert",
for src_id in model['sources']:
if exclude is not None and src_id in exclude:
continue
if use != "*" and src_id not in use:
continue
else:
print("src_id:", src_id, " ", method)
if use == "*" or src_id in use:
Expand Down
4 changes: 2 additions & 2 deletions openquake/mbi/wkf/set_h3_to_zones.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
from openquake.wkf.h3.zones import discretize_zones_with_h3_grid


def main(h3_level: str, fname_poly: str, folder_out: str):
def main(h3_level: str, fname_poly: str, folder_out: str,*, use: str = []):
kirstybayliss marked this conversation as resolved.
Show resolved Hide resolved
"""
Given a set of polygons, using H3 this creates grids representing in a
'discrete' sense the original geometries
"""
discretize_zones_with_h3_grid(h3_level, fname_poly, folder_out)
discretize_zones_with_h3_grid(h3_level, fname_poly, folder_out, use)


descr = 'The level of the H3 grid'
Expand Down
29 changes: 28 additions & 1 deletion openquake/mbt/tools/mfd.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,35 @@ def get_evenlyDiscretizedMFD_from_multiMFD(mfd, bin_width=None):
else:
tmfd = EEvenlyDiscretizedMFD(min_m, binw, occ)
emfd.stack(tmfd)

elif mfd.kind == 'truncGutenbergRichterMFD':
aval = mfd.kwargs['a_val']
bval = mfd.kwargs['b_val']
min_mag = mfd.kwargs['min_mag']
max_mag = mfd.kwargs['max_mag']
binw = mfd.kwargs['bin_width'][0]

for i in range(mfd.size):
bgr = bval[0] if len(bval) == 1 else bval[i]
agr = aval[0] if len(aval) == 1 else aval[i]
min_m = min_mag[0] if len(min_mag) == 1 else min_mag[i]
max_m = max_mag[0] if len(max_mag) == 1 else max_mag[i]

left = np.arange(min_m, max_m, binw)
rates = 10.**(agr-bgr*left)-10.**(agr-bgr*(left+binw))

if i == 0:
emfd = EEvenlyDiscretizedMFD(min_m+binw/2.,
binw,
list(rates))
else:
tmfd = EEvenlyDiscretizedMFD(min_m+binw/2.,
binw,
list(rates))
emfd.stack(tmfd)

else:
raise ValueError('Unsupported MFD type')
raise ValueError('Unsupported MFD type ', mfd.kind)

return emfd

Expand Down
2 changes: 1 addition & 1 deletion openquake/mbt/tools/model_building/plt_mfd.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def _compute_mfd(cat, compl_table, mwid):
weichert_config = {'magnitude_interval': mwid,
'reference_magnitude': 0.0}
weichert = Weichert()
bval_wei, sigmab, aval_wei, sigmaa = weichert.calculate(cat,
bval_wei, sigmab, aval_wei, sigmaa = weichert.calc(cat,
weichert_config,
compl_table)
#
Expand Down
6 changes: 1 addition & 5 deletions openquake/wkf/catalogue.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,10 +150,6 @@ def create_subcatalogues(fname_polygons: str, fname_cat: str, folder_out: str,
# Read polygons
polygons_gdf = gpd.read_file(fname_polygons)

# Select point in polygon
columns = ['eventID', 'year', 'month', 'day', 'magnitude', 'longitude',
'latitude', 'depth']

# Iterate over sources
out_fnames = []
for idx, poly in polygons_gdf.iterrows():
Expand All @@ -171,7 +167,7 @@ def create_subcatalogues(fname_polygons: str, fname_cat: str, folder_out: str,
fname = f'subcatalogue_zone_{poly.id}.csv'
out_fname = os.path.join(folder_out, fname)
out_fnames.append(out_fname)
within.to_csv(out_fname, index=False, columns=columns)
within.to_csv(out_fname, index=False)

return out_fnames

Expand Down
12 changes: 9 additions & 3 deletions openquake/wkf/h3/zones.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,15 @@
import json
import shapely
import geopandas as gpd
from openquake.wkf.utils import create_folder
from openquake.wkf.utils import create_folder, get_list


def discretize_zones_with_h3_grid(h3_level: str, fname_poly: str,
folder_out: str):

folder_out: str, use: str = []):

if len(use) > 0:
use = get_list(use)

h3_level = int(h3_level)
create_folder(folder_out)

Expand All @@ -48,6 +51,9 @@ def discretize_zones_with_h3_grid(h3_level: str, fname_poly: str,
# Select point in polygon
fout = open(fname_out, 'w')
for idx, poly in polygons_gdf.iterrows():

if len(use) > 0 and poly.id not in use:
continue

tmps = shapely.geometry.mapping(poly.geometry)
geojson_poly = eval(json.dumps(tmps))
Expand Down
Loading
Loading