diff --git a/adminpager b/bin/adminpager similarity index 84% rename from adminpager rename to bin/adminpager index 6313a30..3b78bfb 100755 --- a/adminpager +++ b/bin/adminpager @@ -3,52 +3,54 @@ # stdlib imports import argparse import sys -import configparser import os.path import datetime import collections import zipfile import shutil import glob -import re from operator import attrgetter from calendar import monthrange # local imports -from losspager.utils.config import read_config, read_mail_config, write_config, get_mail_config_file +from losspager.utils.config import (read_config, read_mail_config, + get_mail_config_file) from losspager.io.pagerdata import PagerData -from losspager.utils.admin import PagerAdmin, RemoteAdmin, transfer, unset_pending, get_id_and_source +from losspager.utils.admin import (PagerAdmin, RemoteAdmin, transfer, + unset_pending, get_id_and_source) +from losspager.utils.eventpath import get_event_folder from losspager.utils.exception import PagerException # third party imports from impactutils.io.cmd import get_command_output -from impactutils.textformat.text import pop_round_short, dollar_round, round_to_nearest, commify from impactutils.comcat.query import ComCatInfo import pandas as pd DATEFMT = '%Y-%m-%d' DATETIMEFMT = '%Y-%m-%d %H:%M:%S' NDAYS = 7 -MAX_DAYS = 300*365 # maximum number of days to do queries for events -PAGER_SCRIPT = 'runpager.py' # this is a system call hack until I can refactor runpager to export a run function +MAX_DAYS = 300 * 365 # maximum number of days to do queries for events +# this is a system call hack until I can refactor runpager to export a run function +PAGER_SCRIPT = 'runpager.py' TIMEFMT = '%Y-%m-%d %H:%M:%S' HDRDICT = collections.OrderedDict([('ID', '%-10s'), - ('Ver', '%-3s'), - ('Time', '%-19s'), - ('Mag', '%-4s'), - ('Depth', '%-8s'), - ('Level', '%-7s'), - ('MMI', '%-3s'), - ('Tsunami?', '%-8s'), - ('Stopped?', '%-8s'), - ('Location', '%-42s')]) + ('Ver', '%-3s'), + ('Time', '%-19s'), + ('Mag', '%-4s'), + ('Depth', '%-8s'), + ('Level', '%-7s'), + ('MMI', '%-3s'), + ('Tsunami?', '%-8s'), + ('Stopped?', '%-8s'), + ('Location', '%-42s')]) LEVELS = {'green': 0, 'yellow': 1, 'orange': 2, 'red': 3} + def run_pager(eventid): try: ccinfo = ComCatInfo(eventid) @@ -56,26 +58,29 @@ def run_pager(eventid): cmd = 'pager %s' % gridurl print('Running command "%s"' % cmd) res, stdout, stderr = get_command_output(cmd) - return (res, stdout+stderr) - except: + return (res, stdout + stderr) + except Exception: return (False, 'Could not open connection to ComCat for event %s' % eventid) - + + def order_event_data(event_data, sort_by=('time',)): if not isinstance(sort_by, tuple): raise PagerException('sort_by option must be a tuple of strings.') sort_options = ('time', 'magnitude', 'alert', 'processing_time') for option in sort_by: - if option not in sort_options: - raise PagerException('Sort option %s not allowed.' % option) + if option not in sort_options: + raise PagerException('Sort option %s not allowed.' % option) event_data = sorted(event_data, key=attrgetter(*sort_by)) return event_data + def archive_event(event, archive_folder, output_folder): eventfolder = get_event_folder(event, output_folder) if eventfolder is None: return False - zipname = os.path.join(archive_folder, event+'.zip') - myzip = zipfile.ZipFile(zipname, mode='w', compression=zipfile.ZIP_DEFLATED) + zipname = os.path.join(archive_folder, event + '.zip') + myzip = zipfile.ZipFile( + zipname, mode='w', compression=zipfile.ZIP_DEFLATED) for root, dirs, files in os.walk(eventfolder): arcfolder = root[root.find(event):] for fname in files: @@ -87,6 +92,7 @@ def archive_event(event, archive_folder, output_folder): shutil.rmtree(eventfolder) return True + def is_date(datestr): try: datetime.datetime.strptime(datestr, DATETIMEFMT) @@ -97,9 +103,11 @@ def is_date(datestr): return False return True + def query_events_since(outfolder): pass + def get_all_events(outfolder): allevents = os.listdir(outfolder) events = [] @@ -108,6 +116,7 @@ def get_all_events(outfolder): events.append(event) return events + def get_event_data(eventfolder): data_blobs = [] for versionfolder in glob.glob(os.path.join(eventfolder, 'version.*')): @@ -117,6 +126,7 @@ def get_event_data(eventfolder): data_blobs.append(vdata) return data_blobs + def get_date(datestr): """datestr can be a datetime date or date/time string, OR 'all' or 'recent'. 'recent' returns last two weeks of events, 'all' gets all events. @@ -136,18 +146,23 @@ def get_date(datestr): pass return archdate + def do_archive(archive_info, archive_threshold, admin): archive_date = get_date(archive_info[0]) if archive_info[0] == 'all': narchived, nerrors = admin.archive(all_events=True) elif archive_info[0] == 'auto': - narchived, nerrors = admin.archive(events_before=archive_threshold) + tnow = datetime.datetime.utcnow() + dt = datetime.timedelta(days=archive_threshold) + archive_date = tnow - dt + narchived, nerrors = admin.archive(events_before=archive_date) elif archive_date is not None: narchived, nerrors = admin.archive(events_before=archive_date) else: narchived, nerrors = admin.archive(events=archive_info) return (narchived, nerrors) + def do_release(eventid, admin, config): # find the most recent version of PAGER for this event event_folder = admin.getEventFolder(eventid) @@ -173,11 +188,13 @@ def do_release(eventid, admin, config): pdata = PagerData() pdata.loadFromJSON(jsonfolder) try: - msg = transfer(config, pdata, authid, authsource, version_folder, release=True) + msg = transfer(config, pdata, authid, authsource, + version_folder, release=True) except Exception as e: msg = str(e) return (True, msg) + def do_force(eventid, admin, config, mailconfig): # find the most recent version of PAGER for this event event_folder = admin.getEventFolder(eventid) @@ -197,7 +214,7 @@ def do_force(eventid, admin, config, mailconfig): pdata.loadFromJSON(jsonfolder) event_time = pdata.time thresh = mailconfig['release_threshold'] - email_threshold = event_time + datetime.timedelta(seconds=thresh*3600) + email_threshold = event_time + datetime.timedelta(seconds=thresh * 3600) now_time = datetime.datetime.utcnow() user_msg = '' if now_time < email_threshold: @@ -207,14 +224,14 @@ def do_force(eventid, admin, config, mailconfig): if response != 'Y': msg = 'You chose not to force sending of emails. Exiting.' return (False, msg) - + try: - msg = transfer(config, pdata, authid, authsource, version_folder, force_email=True) + msg = transfer(config, pdata, authid, authsource, + version_folder, force_email=True) except Exception as e: msg = str(e) return (True, msg) - - + def do_renotify(eventid, admin, config): # find the most recent version of PAGER for this event @@ -234,11 +251,13 @@ def do_renotify(eventid, admin, config): pdata = PagerData() pdata.loadFromJSON(jsonfolder) try: - msg = transfer(config, pdata, authid, authsource, version_folder, renotify=True) + msg = transfer(config, pdata, authid, authsource, + version_folder, renotify=True) except Exception as e: msg = str(e) return (True, msg) + def do_status(status, admin): # check the pager config first current_status = admin.getStatus() @@ -280,6 +299,7 @@ def do_status(status, admin): msg += msg2 return msg + def do_stats(stats, admin, config): if 'stats_folder' not in config.keys(): print('Configure the stats_folder variable first.') @@ -298,10 +318,11 @@ def do_stats(stats, admin, config): query_year = tnow.year - 1 ndays = monthrange(query_year, query_month)[1] start_date = datetime.datetime(query_year, query_month, 1) - end_date = datetime.datetime(query_year, query_month, ndays, 23, 59, 59) + end_date = datetime.datetime( + query_year, query_month, ndays, 23, 59, 59) fname = 'monthly_%i_%i.xlsx' % (query_year, query_month) elif stats[0] == 'quarter': - this_quarter = (tnow.month-1)//3 + this_quarter = (tnow.month - 1) // 3 last_quarter = this_quarter - 1 if last_quarter == -1: query_year = tnow.year - 1 @@ -312,9 +333,11 @@ def do_stats(stats, admin, config): 3: (10, 12)} end_month_days, tmp = monthrange(query_year, quarters[last_quarter][1]) - start_date = datetime.datetime(query_year, quarters[last_quarter][0], 1) - end_date = datetime.datetime(query_year, quarters[last_quarter][1], end_month_days, 23, 59, 59) - fname = 'quarterly_%i_Q%i.xlsx' % (query_year, (last_quarter+1)) + start_date = datetime.datetime( + query_year, quarters[last_quarter][0], 1) + end_date = datetime.datetime( + query_year, quarters[last_quarter][1], end_month_days, 23, 59, 59) + fname = 'quarterly_%i_Q%i.xlsx' % (query_year, (last_quarter + 1)) elif stats[0] == 'year': query_year = tnow.year - 1 start_date = datetime.datetime(query_year, 1, 1) @@ -324,10 +347,10 @@ def do_stats(stats, admin, config): msg = 'Unsupported stats period %s.' % stats[0] res = False pdataframe, broken = admin.query(start_time=start_date, - end_time=end_date, - mag_threshold=0.0, - alert_threshold='green', - version='all') + end_time=end_date, + mag_threshold=0.0, + alert_threshold='green', + version='all') pdataframe['tmp'] = pdataframe.index pdataframe = pdataframe.sort_values(['tmp', 'Version']) pdataframe = pdataframe.drop('tmp', 1) @@ -337,12 +360,14 @@ def do_stats(stats, admin, config): res = True return (res, msg) + def do_query(query, output, admin): msg = '' pd.set_option('display.width', 1000) pd.set_option('display.max_rows', 1000) start_date = get_date(query[0]) - end_date = datetime.datetime(3000, 1, 1) # some scenarios are in the future. Sigh. + # some scenarios are in the future. Sigh. + end_date = datetime.datetime(3000, 1, 1) mag_threshold = 0.0 alert_threshold = 'green' version = 'last' @@ -357,28 +382,30 @@ def do_query(query, output, admin): mag_threshold = float(query[1]) assert mag_threshold >= 0 and mag_threshold <= 10.0 except: - msg = qsyntax+'Second argument must be a magnitude [0-10].' + msg = qsyntax + 'Second argument must be a magnitude [0-10].' res = False if len(query) >= 3: alert_threshold = query[2] if alert_threshold not in ['green', 'yellow', 'orange', 'red']: - msg = qsyntax+'Fourth argument must be one of (green,yellow,orange,red).' + msg = qsyntax + \ + 'Fourth argument must be one of (green,yellow,orange,red).' res = False if len(query) >= 4: end_date = get_date(query[3]) if end_date is None: - msg = qsyntax+'Third argument must be a date/time string.' + msg = qsyntax + 'Third argument must be a date/time string.' res = False if len(query) >= 5: version = query[4] if version not in ['first', 'last', 'eight', 'all']: - msg = qsyntax+'Fifth argument must be one of (first,last,eight,all).' + msg = qsyntax + \ + 'Fifth argument must be one of (first,last,eight,all).' res = False pdataframe, broken_events = admin.query(start_time=start_date, end_time=end_date, - mag_threshold=mag_threshold, - alert_threshold=alert_threshold, - version=version) + mag_threshold=mag_threshold, + alert_threshold=alert_threshold, + version=version) if output == 'screen': if len(pdataframe): print(pdataframe) @@ -392,17 +419,19 @@ def do_query(query, output, admin): else: fpath, fname = os.path.split(output) if not os.path.isdir(fpath): - msg = 'Cannot create %s in %s - directory does not exist.' % (fname, fpath) + msg = 'Cannot create %s in %s - directory does not exist.' % ( + fname, fpath) res = False pdataframe.to_excel(output) msg = '%i rows written to %s.' % (len(pdataframe), output) res = True return (res, msg) + def main(args): # Get config file loaded config = read_config() - + # figure out where the output data goes pager_folder = config['output_folder'] @@ -411,7 +440,8 @@ def main(args): # figure out auto archive threshold archive_threshold_days = config['archive_older_than'] - archive_threshold = datetime.datetime.utcnow() - datetime.timedelta(days=archive_threshold_days) + archive_threshold = datetime.datetime.utcnow( + ) - datetime.timedelta(days=archive_threshold_days) # if we're on a laptop, then status should not be set in the config at all if 'status' not in config: @@ -437,16 +467,18 @@ def main(args): action = 'unstop' eventid = args.unstop if action != '': - print('You are not on a primary system, but have PDL configured. Trying remote admin actions...') + print( + 'You are not on a primary system, but have PDL configured. Trying remote admin actions...') res, stdout, stderr = admin.sendAction(action, eventid) if not res: - print('Sending remote action %s failed. "%s", %s".' % (action, stdout, stderr)) + print('Sending remote action %s failed. "%s", %s".' % + (action, stdout, stderr)) sys.exit(1) else: - print('Successfully sent remote action %s. "%s".' % (action, stdout)) + print('Successfully sent remote action %s. "%s".' % + (action, stdout)) sys.exit(0) - - + admin = PagerAdmin(pager_folder, archive_folder) if args.stop: @@ -467,7 +499,7 @@ def main(args): sys.exit(1) else: sys.exit(0) - + if args.renotify: res, msg = do_renotify(args.renotify, admin, config) print(msg) @@ -494,8 +526,10 @@ def main(args): sys.exit(0) if args.archive: - narchived, nerrors = do_archive(args.archive, config['archive_older_than'], admin) - print('%i events archived to %s, %i errors' % (narchived, archive_folder, nerrors)) + narchived, nerrors = do_archive( + args.archive, config['archive_older_than'], admin) + print('%i events archived to %s, %i errors' % + (narchived, archive_folder, nerrors)) sys.exit(0) if args.release: @@ -505,7 +539,7 @@ def main(args): sys.exit(1) else: sys.exit(0) - + if args.restore: if args.restore[0] == 'all': nrestored = admin.restore(all_events=True) @@ -540,15 +574,18 @@ def main(args): print('Tsunami syntax: adminpager --tsunami EVENT on/off') sys.exit(1) # toggling re-runs PAGER, including whatever transfer may happen - result, stdout, stderr = admin.toggleTsunami(args.tsunami[0], args.tsunami[1]) - print('Tsunami status has been set to %s for event %s' % (args.tsunami[1], args.tsunami[0])) + result, stdout, stderr = admin.toggleTsunami( + args.tsunami[0], args.tsunami[1]) + print('Tsunami status has been set to %s for event %s' % + (args.tsunami[1], args.tsunami[0])) if result: lines = stdout.decode('utf-8').split('\n') print('This event has been re-run successfully, with the output:') for line in lines: print(line) if not result: - print('This event has not been re-run successfully, with the output:"%s"' % (stderr)) + print( + 'This event has not been re-run successfully, with the output:"%s"' % (stderr)) sys.exit(0) if args.stats: @@ -558,7 +595,7 @@ def main(args): sys.exit(1) else: sys.exit(0) - + if args.query: res, msg = do_query(args.query, args.output, admin) print(msg) @@ -566,9 +603,10 @@ def main(args): sys.exit(0) else: sys.exit(1) - + + if __name__ == '__main__': - desc='Administer the PAGER system with a series of subcommands.' + desc = 'Administer the PAGER system with a series of subcommands.' usage = ''' System Status: To query the system status: "adminpager --status check" @@ -637,15 +675,15 @@ if __name__ == '__main__': metavar=('EVENT', 'on/off/auto')) argparser.add_argument('--query', nargs='*', metavar='PARAM', help="List events that match the query. Params are [START/all/recent [MAG [ALERT [END [VERSION]]]]].") - argparser.add_argument("--history", + argparser.add_argument("--history", help="Print history of input event.", metavar='EVENT') - argparser.add_argument("--output", + argparser.add_argument("--output", help="Select output format for queries ('screen' or excel filename.", default='screen', metavar='FORMAT') - argparser.add_argument("--stats", nargs=1, + argparser.add_argument("--stats", nargs=1, help="Create dump of monthly, quarterly, or yearly PAGER results.", - choices = ('month', 'quarter', 'year'), metavar='PERIOD') + choices=('month', 'quarter', 'year'), metavar='PERIOD') argparser.add_argument("--release", help="Release orange/red alert level event.", metavar='EVENTCODE') argparser.add_argument("--force-email", help="Force sending of email to all appropriate users, ignoring email threshold (nominally 8 hours).", @@ -657,10 +695,6 @@ if __name__ == '__main__': argparser.add_argument("--cancel", nargs=1, help="Cancel event.", metavar='EVENT') - args = argparser.parse_args() main(args) - - - diff --git a/callpager b/bin/callpager similarity index 100% rename from callpager rename to bin/callpager diff --git a/emailpager b/bin/emailpager similarity index 77% rename from emailpager rename to bin/emailpager index 0cfaa8c..596a224 100755 --- a/emailpager +++ b/bin/emailpager @@ -29,9 +29,11 @@ ALERT_DICT = {'green': 0, 'orange': 2, 'red': 3} + def get_version(session, pdata, release=False, renotify=False): eventid = pdata.id - event = session.query(es.Event).filter(es.Event.eventcode == eventid).first() + event = session.query(es.Event).filter( + es.Event.eventcode == eventid).first() ccinfo = None authid = eventid if event is None: @@ -41,7 +43,8 @@ def get_version(session, pdata, release=False, renotify=False): allids.insert(0, authid) allids.remove(eventid) for testid in allids: - event = session.query(es.Event).filter(es.Event.eventcode == testid).first() + event = session.query(es.Event).filter( + es.Event.eventcode == testid).first() if event is not None: break except: @@ -67,7 +70,7 @@ def get_version(session, pdata, release=False, renotify=False): if len(sversions) and renotify: version = sversions[-1] return (version, event, ccinfo) - + prow = pdata.toSeries() country = prow['Impacted Country ($)'] # Now create a new version @@ -78,26 +81,27 @@ def get_version(session, pdata, release=False, renotify=False): if pending == 'pending': released = False was_pending = True - + alert = ALERT_DICT[pdata.summary_alert] - version = es.Version(versioncode = eventid, - time = pdata.time, - country = country, - lat = pdata.latitude, - lon = pdata.longitude, - depth = pdata.depth, - magnitude = pdata.magnitude, - number = len(event.versions) + 1, - fatlevel = ALERT_DICT[pdata.fatality_alert], - ecolevel = ALERT_DICT[pdata.fatality_alert], - summarylevel = alert, - released = released, - was_pending = was_pending, - processtime = pdata.processing_time, - maxmmi = pdata.maxmmi) + version = es.Version(versioncode=eventid, + time=pdata.time, + country=country, + lat=pdata.latitude, + lon=pdata.longitude, + depth=pdata.depth, + magnitude=pdata.magnitude, + number=len(event.versions) + 1, + fatlevel=ALERT_DICT[pdata.fatality_alert], + ecolevel=ALERT_DICT[pdata.fatality_alert], + summarylevel=alert, + released=released, + was_pending=was_pending, + processtime=pdata.processing_time, + maxmmi=pdata.maxmmi) event.versions.append(version) return (version, event, ccinfo) + def send_emails(version, addresses, properties, msg, subject, DEBUG, attachments=[]): props = properties.copy() props['recipients'] = [address.email for address in addresses] @@ -114,31 +118,37 @@ def send_emails(version, addresses, properties, msg, subject, DEBUG, attachments version.addresses += addresses return version + def main(args): - + DEBUG = False if args.debug: DEBUG = True - + # get all of the information from the mail config file config = read_mail_config() if 'log_folder' in config: eventid = args.eventid tnowstr = datetime.utcnow().strftime('%Y%m%d%H%M%S') - logfile = os.path.join(config['log_folder'], 'emailpager_%s_%s.log' % (eventid, tnowstr)) - logging.basicConfig(filename=logfile, level=logging.DEBUG, format='%(asctime)s %(message)s') + logfile = os.path.join( + config['log_folder'], 'emailpager_%s_%s.log' % (eventid, tnowstr)) + logging.basicConfig(filename=logfile, level=logging.DEBUG, + format='%(asctime)s %(message)s') else: - logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s') - + logging.basicConfig(level=logging.DEBUG, + format='%(asctime)s %(message)s') + # check the status of the system - stop if we are NOT primary if 'status' not in config or config['status'] != 'primary': - logging.warning('This system is not configured to send email. Stopping.') + logging.warning( + 'This system is not configured to send email. Stopping.') sys.exit(0) - + # first make sure this is a losspager product - if args.type != 'losspager': - logging.warning('emailpager is only configured to work with losspager products. Exiting.') + if args.type not in ['losspager', 'losspager-admin']: + logging.warning( + 'emailpager is only configured to work with losspager/losspager-admin products. Exiting.') sys.exit(1) # TODO: Do something with delete messages @@ -149,7 +159,8 @@ def main(args): jsondir = os.path.join(args.directory, 'json') if not os.path.isdir(jsondir): - logging.warning('JSON directory "%s" containing PAGER output not found. Exiting.' % args.directory) + logging.warning( + 'JSON directory "%s" containing PAGER output not found. Exiting.' % args.directory) sys.exit(1) # check to see if a --property-renotify option has been set @@ -170,11 +181,10 @@ def main(args): # scenario events are frequently set in the future. Since we NEVER anticipate # wanting to send emails for scenario events for any reason, let's set a check # for this. Times look like this: 2018-03-11T04:45:36.000Z - etime = datetime.strptime(args.time,'%Y-%m-%dT%H:%M:%S.%fZ') + etime = datetime.strptime(args.time, '%Y-%m-%dT%H:%M:%S.%fZ') if etime > datetime.utcnow(): logging.warning('The event time stamp is in the future. Exiting.') sys.exit(1) - # Everything is cool... logging.debug('Loading from data directory...') @@ -185,21 +195,23 @@ def main(args): logging.debug('Connecting to database...') dburl = config['email']['database']['url'] session = es.get_session(url=dburl, create_db=False) - + # Find event in database, or create it if not found. Return a new version for that event, # or (if event has just been released and previous version was not released, return most recent # version with released column set to True. logging.debug('Finding event in database...') - version, event, ccinfo = get_version(session, pdata, release=release, renotify=renotify) + version, event, ccinfo = get_version( + session, pdata, release=release, renotify=renotify) # check to see if we forced and the event is older than the configured threshold past_email_deadline = False if force_email: nowtime = datetime.utcnow() - threshtime = version.time + timedelta(seconds=config['release_threshold']*3600) + threshtime = version.time + \ + timedelta(seconds=config['release_threshold']*3600) if nowtime > threshtime: past_email_deadline = True - + # add/commit the event for now, but we may have to delete it if we crash for any reason session.add(event) session.commit() @@ -220,9 +232,9 @@ def main(args): logging.debug('Determining which users should get emailed...') for address in all_addresses: should_alert, notified_before = address.shouldAlert(version, - renotify=renotify, - release=release, - ignore_time_limit=force_email) + renotify=renotify, + release=release, + ignore_time_limit=force_email) if should_alert: if address.format == 'short': if notified_before: @@ -240,15 +252,18 @@ def main(args): else: pdf_addresses_nonupdate.append(address) - # how many emails are we sending - logging.debug('%i new short addresses.' % (len(short_addresses_update))) - logging.debug('%i short addresses to update.' % (len(short_addresses_nonupdate))) + logging.debug('%i new short addresses.' % + (len(short_addresses_update))) + logging.debug('%i short addresses to update.' % + (len(short_addresses_nonupdate))) logging.debug('%i new long addresses.' % (len(long_addresses_update))) - logging.debug('%i long addresses to update.' % (len(long_addresses_nonupdate))) + logging.debug('%i long addresses to update.' % + (len(long_addresses_nonupdate))) logging.debug('%i new pdf addresses.' % (len(pdf_addresses_update))) - logging.debug('%i pdf addresses to update.' % (len(pdf_addresses_nonupdate))) - + logging.debug('%i pdf addresses to update.' % + (len(pdf_addresses_nonupdate))) + # try to find the event url logging.debug('Getting event url...') if ccinfo is not None: @@ -258,8 +273,10 @@ def main(args): # create the short and long message texts logging.debug('Creating message text and subject...') - short_msg = format_msg(version, pdata, 'short', event_url, past_email_deadline=past_email_deadline) - long_msg = format_msg(version, pdata, 'long', event_url, past_email_deadline=past_email_deadline) + short_msg = format_msg( + version, pdata, 'short', event_url, past_email_deadline=past_email_deadline) + long_msg = format_msg(version, pdata, 'long', event_url, + past_email_deadline=past_email_deadline) # create the long and short subjects subject, subject_update = generate_subject_line(version, pdata) @@ -273,16 +290,20 @@ def main(args): # send emails to all short format addresses logging.debug('Sending short addresses...') if len(short_addresses_update): - version = send_emails(version, short_addresses_update, all_props, short_msg, subject_update, DEBUG) + version = send_emails( + version, short_addresses_update, all_props, short_msg, subject_update, DEBUG) if len(short_addresses_nonupdate): - version = send_emails(version, short_addresses_nonupdate, all_props, short_msg, subject, DEBUG) + version = send_emails( + version, short_addresses_nonupdate, all_props, short_msg, subject, DEBUG) # send emails to all long format addresses logging.debug('Sending long addresses...') if len(long_addresses_update): - version = send_emails(version, long_addresses_update, all_props, long_msg, subject_update, DEBUG) + version = send_emails( + version, long_addresses_update, all_props, long_msg, subject_update, DEBUG) if len(long_addresses_nonupdate): - version = send_emails(version, long_addresses_nonupdate, all_props, long_msg, subject, DEBUG) + version = send_emails( + version, long_addresses_nonupdate, all_props, long_msg, subject, DEBUG) # send emails to all pdf format addresses logging.debug('Sending pdf addresses...') @@ -300,12 +321,13 @@ def main(args): version = send_emails(version, pdf_addresses_nonupdate, all_props, long_msg, subject, DEBUG, attachments=attachments) - + logging.debug('Done.') except Exception as e: # if we have any errors, we want to back out the event and version we added above. # todo - the event might not be new, we can't just delete it, only if its empty - print('Exception "%s" on input %s Backing out any new events/versions.' % (str(e), args.directory)) + print('Exception "%s" on input %s Backing out any new events/versions.' % + (str(e), args.directory)) session.delete(version) if len(event.versions) == 0: session.delete(event) @@ -313,15 +335,15 @@ def main(args): session.commit() session.close() sys.exit(0) - - + + if __name__ == '__main__': # pdl passes in property arguments surrounded by double quotes. At the command shell, # or in ipython, this are replaced for you. When called from PDL (presumably from a Java # system call, they are NOT, and therefore those arguments are not parsed correctly. sys.argv = [arg.replace('"', '') for arg in sys.argv] - - desc='Send emails to PAGER users.' + + desc = 'Send emails to PAGER users.' argparser = argparse.ArgumentParser(description=desc, formatter_class=argparse.ArgumentDefaultsHelpFormatter) @@ -329,28 +351,28 @@ if __name__ == '__main__': help='Turn off emailing, print out who *would* be notified.') argparser.add_argument("--directory", help="Directory where PAGER data can be found", metavar='DIRECTORY') - argparser.add_argument("--type", + argparser.add_argument("--type", help="Product type", metavar='TYPE') - argparser.add_argument("--code", + argparser.add_argument("--code", help="Product code", metavar='CODE') - argparser.add_argument("--source", + argparser.add_argument("--source", help="Product source", metavar='SOURCE') - - argparser.add_argument("--status", + + argparser.add_argument("--status", help="Product status", metavar='STATUS') - argparser.add_argument("--action", + argparser.add_argument("--action", help="Product action", metavar='ACTION') argparser.add_argument("--preferred-latitude", type=float, help="Event latitude", metavar='LAT', dest='lat') argparser.add_argument("--preferred-longitude", type=float, help="Event longitude", metavar='LON', dest='lon') - argparser.add_argument("--preferred-eventid", + argparser.add_argument("--preferred-eventid", help="Event ID", metavar='ID', dest='eventid') argparser.add_argument("--preferred-depth", type=float, help="Event depth", metavar='DEPTH', dest='depth') argparser.add_argument("--preferred-magnitude", type=float, help="Event magnitude", metavar='MAG', dest='magnitude') - argparser.add_argument("--preferred-eventtime", + argparser.add_argument("--preferred-eventtime", help="Event time", metavar='TIME', dest='time') argparser.add_argument("--property-renotify", dest='renotify', diff --git a/hdfupdate b/bin/hdfupdate similarity index 54% rename from hdfupdate rename to bin/hdfupdate index 39381e9..4ee4379 100755 --- a/hdfupdate +++ b/bin/hdfupdate @@ -10,39 +10,30 @@ import glob # third party imports import pandas as pd +import openpyxl +from impactutils.io.container import HDFContainer from impactutils.io.cmd import get_command_output -sheets = {'BuildingTypes': 'Code', - 'RuralNonResidential': 'CountryCode', - 'RuralResidential': 'CountryCode', - 'UrbanNonResidential': 'CountryCode', - 'UrbanResidential': 'CountryCode', - 'Workforce': 'CountryCode'} -files = {'collapse': 'BuildingCode', - 'casualty': 'BuildingCode'} +def load_container_from_excel(excelfile, outfile): + container = HDFContainer.create(outfile) + wb = openpyxl.load_workbook(excelfile) + sheets = wb.sheetnames + wb.close() + for sheet in sheets: + df = pd.read_excel(excelfile, sheet_name=sheet) + container.setDataFrame(sheet, df) + + container.close() -def load_panel_from_excel(excelfile): - paneldict = {} - xl = pd.ExcelFile(excelfile) - for sheet in xl.sheet_names: - frame = pd.read_excel(excelfile, sheetname=sheet) - if sheet in sheets: - frame = frame.set_index(sheets[sheet]) - else: - for freg in files.keys(): - if excelfile.find(freg) > -1: - frame = frame.set_index(files[freg]) - break - paneldict[sheet] = frame.copy() - #print(sheet,'ShortDescription' in frame.columns) - panel = pd.Panel(paneldict.copy()) - return panel def main(args): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? - excelfiles = glob.glob(os.path.join(homedir, 'losspager', 'data', 'semi_*.xlsx')) - hdffiles = glob.glob(os.path.join(homedir, 'losspager', 'data', 'semi_*.hdf')) + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? + excelfiles = glob.glob(os.path.join( + homedir, '..', 'losspager', 'data', 'semi_*.xlsx')) + hdffiles = glob.glob(os.path.join( + homedir, '..', 'losspager', 'data', 'semi_*.hdf')) if args.undo: cmd = 'git checkout %s' % (' '.join(hdffiles)) res, stdout, stderr = get_command_output(cmd) @@ -52,21 +43,17 @@ def main(args): else: print('HDF files have been reverted back to their previous state.') sys.exit(0) - + if args.convert: for excelfile in excelfiles: fpath, fname = os.path.split(excelfile) - fbase, fext = os.path.splitext(fname) - outfile = os.path.join(fpath, fbase+'.hdf') - print('Loading data from %s...' % fname) - panel = load_panel_from_excel(excelfile) - print('Updating %s to %s' % (fname, fbase+'hdf')) - panel.to_hdf(outfile, fbase, mode='w') + fbase, _ = os.path.splitext(fname) + outfile = os.path.join(fpath, fbase + '.hdf') + print('Converting data from %s to HDF...' % fname) + load_container_from_excel(excelfile, outfile) print('Done. Do not forget to commit these changes.') sys.exit(0) - - if __name__ == '__main__': desc = '''This script is intended for developers and scientists involved @@ -87,6 +74,3 @@ if __name__ == '__main__': parser.print_help() sys.exit(0) main(pargs) - - - diff --git a/mailadmin b/bin/mailadmin similarity index 100% rename from mailadmin rename to bin/mailadmin diff --git a/pager b/bin/pager similarity index 85% rename from pager rename to bin/pager index 082645b..43c2c8a 100755 --- a/pager +++ b/bin/pager @@ -1,6 +1,9 @@ #!/usr/bin/env python # local imports +from impactutils.transfer.emailsender import EmailSender +from impactutils.comcat.query import ComCatInfo +from mapio.shake import getHeaderData from losspager.models.exposure import Exposure from losspager.models.econexposure import EconExposure from losspager.models.emploss import EmpiricalLoss @@ -35,6 +38,7 @@ import textwrap from urllib import request import tempfile import socket +import logging # third party imports import matplotlib @@ -43,23 +47,12 @@ import matplotlib # without a display matplotlib.use('Agg') -from mapio.shake import getHeaderData -from impactutils.comcat.query import ComCatInfo -from impactutils.transfer.emailsender import EmailSender - COUNTRY = Country() TIMEFMT = '%Y-%m-%d %H:%M:%S' TSUNAMI_MAG_THRESH = 7.3 -def _print_message(plog, message): - if plog is not None: - plog.Logger.info(message) - else: - print(message) - - def _is_url(gridfile): try: fh = request.urlopen(gridfile) @@ -127,7 +120,7 @@ def _get_release_status(pargs, config, # Are we past the release threshold? event_time = shake_tuple[1]['event_timestamp'] threshold_hours = datetime.timedelta( - seconds=config['release_threshold']*3600) + seconds=config['release_threshold'] * 3600) time_threshold = event_time + threshold_hours if datetime.datetime.utcnow() > time_threshold: is_released = True @@ -181,10 +174,11 @@ def _get_pop_year(event_year, popyears): popyear = popdict['population_year'] popgrid = popdict['population_grid'] if not os.path.isfile(popgrid): - print('Population grid file %s does not exist.' % popgrid) + logging.warning( + 'Population grid file %s does not exist.' % popgrid) sys.exit(1) - if abs(popyear-event_year) < tmin: - tmin = abs(popyear-event_year) + if abs(popyear - event_year) < tmin: + tmin = abs(popyear - event_year) pop_year = popyear popfile = popgrid return (pop_year, popfile) @@ -287,8 +281,6 @@ def _cancel(eventid, config): def main(pargs, config): # get the users home directory homedir = os.path.expanduser('~') - script_dir = os.path.dirname( - os.path.abspath(__file__)) # where is this script? # handle cancel messages if pargs.cancel: @@ -316,21 +308,14 @@ def main(pargs, config): admin = PagerAdmin(pager_folder, pager_archive) - plog = None - - # TODO: figure out how may make the stdout/stderr redirect functionality - # work again. Turned off for now. - if not pargs.debug: - # stdout will now be logged as INFO, stderr will be logged as WARNING - mail_hosts = config['mail_hosts'] - mail_from = config['mail_from'] - logfile = os.path.join(pager_folder, 'pager.log') - #print('Writing content to %s' % logfile) - plog = PagerLogger(logfile, redirect=False, - from_address=mail_from, - mail_hosts=mail_hosts) - # failover email alert system - plog.addEmailHandler(config['developers']) + # stdout will now be logged as INFO, stderr will be logged as WARNING + mail_host = config['mail_hosts'][0] + mail_from = config['mail_from'] + developers = config['developers'] + logfile = os.path.join(pager_folder, 'pager.log') + plog = PagerLogger(logfile, developers, mail_from, + mail_host, debug=pargs.debug) + logger = plog.getLogger() try: eid = None @@ -378,23 +363,22 @@ def main(pargs, config): # flag the event as a scenario and yell about it. if etime > datetime.datetime.utcnow(): is_scenario = True - print('Event origin time is in the future! Flagging this as a scenario.') + logger.warning( + 'Event origin time is in the future! Flagging this as a scenario.') if is_scenario: if re.search('scenario', location.lower()) is None: - location = 'Scenario '+location + location = 'Scenario ' + location # create the event directory (if it does not exist), and start logging there - _print_message(plog, 'Creating event directory') + logger.info('Creating event directory') event_folder = admin.createEventFolder(authid, etime) # Stop processing if there is a "stop" file in the event folder stopfile = os.path.join(event_folder, 'stop') if os.path.isfile(stopfile): - _print_message( - plog, '"stop" file found in %s. Stopping processing, returning with 1.' % (event_folder)) - print('"stop" file found in %s. Stopping processing, returning with 1.' % ( - event_folder)) + fmt = '"stop" file found in %s. Stopping processing, returning with 1.' + logger.info(fmt % (event_folder)) sys.exit(1) pager_version = get_pager_version(event_folder) @@ -402,8 +386,10 @@ def main(pargs, config): event_folder, 'version.%03d' % pager_version) os.makedirs(version_folder) event_logfile = os.path.join(version_folder, 'event.log') - if plog is not None: - plog.switchToEventFileHandler(event_logfile) + + # this will turn off the global rotating log file + # and switch to the one in the version folder. + plog.setVersionHandler(event_logfile) # Copy the grid.xml file to the version folder # sometimes (usu when testing) the input grid isn't called grid.xml. Rename it here. @@ -420,7 +406,6 @@ def main(pargs, config): etime = shake_tuple[1]['event_timestamp'] elat = shake_tuple[1]['lat'] elon = shake_tuple[1]['lon'] - edepth = shake_tuple[1]['depth'] emag = shake_tuple[1]['magnitude'] # get the year of the event @@ -429,11 +414,11 @@ def main(pargs, config): # find the population data collected most closely to the event_year pop_year, popfile = _get_pop_year( event_year, config['model_data']['population_data']) - _print_message( - plog, 'Population year: %i Population file: %s\n' % (pop_year, popfile)) + logger.info('Population year: %i Population file: %s\n' % + (pop_year, popfile)) # Get exposure results - _print_message(plog, 'Calculating population exposure.') + logger.info('Calculating population exposure.') isofile = config['model_data']['country_grid'] expomodel = Exposure(popfile, pop_year, isofile) exposure = None @@ -447,15 +432,15 @@ def main(pargs, config): else: ccode = cdict['ISO2'] - _print_message(plog, 'Country code at epicenter is %s' % ccode) + logger.info('Country code at epicenter is %s' % ccode) # get fatality results, if requested - _print_message(plog, 'Calculating empirical fatalities.') + logger.info('Calculating empirical fatalities.') fatmodel = EmpiricalLoss.fromDefaultFatality() fatdict = fatmodel.getLosses(exposure) # get economic results, if requested - _print_message(plog, 'Calculating economic exposure.') + logger.info('Calculating economic exposure.') econexpmodel = EconExposure(popfile, pop_year, isofile) ecomodel = EmpiricalLoss.fromDefaultEconomic() econexposure = econexpmodel.calcExposure(gridfile) @@ -463,7 +448,7 @@ def main(pargs, config): shakegrid = econexpmodel.getShakeGrid() # Get semi-empirical losses - _print_message(plog, 'Calculating semi-empirical fatalities.') + logger.info('Calculating semi-empirical fatalities.') urbanfile = config['model_data']['urban_rural_grid'] if not os.path.isfile(urbanfile): raise PagerException( @@ -474,7 +459,7 @@ def main(pargs, config): semiloss, resfat, nonresfat = semi.getLosses(gridfile) # get all of the other components of PAGER - _print_message(plog, 'Getting all comments.') + logger.info('Getting all comments.') # get the fatality and economic comments impact1, impact2 = get_impact_comments( fatdict, ecodict, econexposure, event_year, ccode) @@ -487,14 +472,14 @@ def main(pargs, config): elat, elon, emag, exposure, fatdict) # generate the probability plots - _print_message(plog, 'Drawing probability plots.') + logger.info('Drawing probability plots.') fat_probs_file, eco_probs_file = _draw_probs(fatmodel, fatdict, ecomodel, ecodict, version_folder) # generate the exposure map exposure_base = os.path.join(version_folder, 'exposure') - _print_message(plog, 'Generating exposure map...') + logger.info('Generating exposure map...') oceanfile = config['model_data']['ocean_vectors'] oceangrid = config['model_data']['ocean_grid'] cityfile = config['model_data']['city_file'] @@ -504,7 +489,7 @@ def main(pargs, config): pdf_file, png_file, mapcities = draw_contour(shake_grid, pop_grid, oceanfile, oceangrid, cityfile, exposure_base, borderfile, is_scenario=is_scenario) - _print_message(plog, 'Generated exposure map %s' % pdf_file) + logger.info('Generated exposure map %s' % pdf_file) # figure out whether this event has been "released". is_released = _get_release_status(pargs, config, @@ -514,24 +499,24 @@ def main(pargs, config): # Create a data object to encapsulate everything we know about the PAGER # results, and then serialize that to disk in the form of a number of JSON files. - _print_message(plog, 'Making PAGER Data object.') + logger.info('Making PAGER Data object.') doc = PagerData() timezone_file = config['model_data']['timezones_file'] elapsed = pargs.elapsed doc.setInputs(shakegrid, timezone_file, pager_version, shakegrid.getEventDict()['event_id'], authid, tsunami, location, is_released, elapsed=elapsed) - _print_message(plog, 'Setting inputs.') + logger.info('Setting inputs.') doc.setExposure(exposure, econexposure) - _print_message(plog, 'Setting exposure.') + logger.info('Setting exposure.') doc.setModelResults(fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat) - _print_message(plog, 'Setting comments.') + logger.info('Setting comments.') doc.setComments(impact1, impact2, struct_comment, historical_comment, secondary_comment) - _print_message(plog, 'Setting map info.') + logger.info('Setting map info.') doc.setMapInfo(cityfile, mapcities) - _print_message(plog, 'Validating.') + logger.info('Validating.') doc.validate() # if we have determined that the event is a scenario (origin time is in the future) @@ -542,12 +527,12 @@ def main(pargs, config): json_folder = os.path.join(version_folder, 'json') os.makedirs(json_folder) - _print_message(plog, 'Saving output to JSON.') + logger.info('Saving output to JSON.') doc.saveToJSON(json_folder) - _print_message(plog, 'Saving output to XML.') + logger.info('Saving output to XML.') doc.saveToLegacyXML(version_folder) - _print_message(plog, 'Creating onePAGER pdf...') + logger.info('Creating onePAGER pdf...') onepager_pdf, error = create_onepager(doc, version_folder) if onepager_pdf is None: raise PagerException( @@ -564,42 +549,40 @@ def main(pargs, config): message_pager(config, onepager_pdf, doc) # run transfer, as appropriate and as specified by config - res, msg = transfer(config, doc, authid, authsource, + # the PAGER product eventsource and eventsourcecode should + # match the input ShakeMap settings for these properties. + # This can possibly cause confusion if a regional ShakeMap is + # trumped with one from NEIC, but this should happen less often + # than an NEIC origin being made authoritative over a regional one. + eventsource = network + eventsourcecode = eid + res, msg = transfer(config, doc, eventsourcecode, eventsource, version_folder, is_scenario=is_scenario) - _print_message(plog, msg) + logger.info(msg) if not res: - pass - # TODO: figure out what's broken with logging, send log email here + logger.critical('Error transferring PAGER content. "%s"' % msg) - if plog is not None: - plog.stopLogging() print('Created onePAGER pdf %s' % onepager_pdf) + logger.info('Created onePAGER pdf %s' % onepager_pdf) - print('Done.') + logger.info('Done.') sys.exit(0) except Exception as e: - if plog is not None: - f = io.StringIO() - traceback.print_exc(file=f) - msg = e - msg = '%s\n %s' % (str(msg), f.getvalue()) - hostname = socket.gethostname() - msg = msg + '\n' + 'Error occurred on %s\n' % (hostname) - if gridfile is not None: - msg = msg + '\n' + 'Error on file: %s\n' % (gridfile) - if eid is not None: - msg = msg + '\n' + 'Error on event: %s\n' % (eid) - if pager_version is not None: - msg = msg + '\n' + 'Error on version: %i\n' % (pager_version) - f.close() - plog.scream(msg) - plog.stopLogging() - print('Sent error to email') - - else: - sys.stderr.write(str(e)+'\n') - traceback.print_exc() - print('Something bad happened!') + f = io.StringIO() + traceback.print_exc(file=f) + msg = e + msg = '%s\n %s' % (str(msg), f.getvalue()) + hostname = socket.gethostname() + msg = msg + '\n' + 'Error occurred on %s\n' % (hostname) + if gridfile is not None: + msg = msg + '\n' + 'Error on file: %s\n' % (gridfile) + if eid is not None: + msg = msg + '\n' + 'Error on event: %s\n' % (eid) + if pager_version is not None: + msg = msg + '\n' + 'Error on version: %i\n' % (pager_version) + f.close() + logger.critical(msg) + logger.info('Sent error to email') sys.exit(1) diff --git a/pagerlite b/bin/pagerlite similarity index 100% rename from pagerlite rename to bin/pagerlite diff --git a/sync_users b/bin/sync_users similarity index 100% rename from sync_users rename to bin/sync_users diff --git a/twopager b/bin/twopager similarity index 100% rename from twopager rename to bin/twopager diff --git a/updatepager b/bin/updatepager similarity index 100% rename from updatepager rename to bin/updatepager diff --git a/install.sh b/install.sh index edeecca..748ae48 100755 --- a/install.sh +++ b/install.sh @@ -128,7 +128,7 @@ package_list=' pycrypto pyproj pytables - python=3.5 + python=3.6 pytest pytest-cov pytest-mpl diff --git a/losspager/data/expocat.xlsx b/losspager/data/expocat.xlsx index bc67f15..eb4ad1f 100644 Binary files a/losspager/data/expocat.xlsx and b/losspager/data/expocat.xlsx differ diff --git a/losspager/data/pager_regions.xlsx b/losspager/data/pager_regions.xlsx index 22b0070..864b5e5 100644 Binary files a/losspager/data/pager_regions.xlsx and b/losspager/data/pager_regions.xlsx differ diff --git a/losspager/data/semi_casualty.hdf b/losspager/data/semi_casualty.hdf index 63a5b28..6626571 100644 Binary files a/losspager/data/semi_casualty.hdf and b/losspager/data/semi_casualty.hdf differ diff --git a/losspager/data/semi_collapse_mmi.hdf b/losspager/data/semi_collapse_mmi.hdf index 02b1597..af1c774 100644 Binary files a/losspager/data/semi_collapse_mmi.hdf and b/losspager/data/semi_collapse_mmi.hdf differ diff --git a/losspager/data/semi_inventory.hdf b/losspager/data/semi_inventory.hdf index 5a474fa..478f810 100644 Binary files a/losspager/data/semi_inventory.hdf and b/losspager/data/semi_inventory.hdf differ diff --git a/losspager/data/semi_workforce.hdf b/losspager/data/semi_workforce.hdf index 06ca49a..cea7cd5 100644 Binary files a/losspager/data/semi_workforce.hdf and b/losspager/data/semi_workforce.hdf differ diff --git a/losspager/io/hazus.py b/losspager/io/hazus.py index d7a8f9a..8f9a175 100644 --- a/losspager/io/hazus.py +++ b/losspager/io/hazus.py @@ -5,6 +5,7 @@ from urllib.request import urlopen from urllib.parse import urljoin import shutil +import logging # third party imports import fiona @@ -97,7 +98,7 @@ def fetch_hazus(url_or_dir, version_folder): outfile = os.path.join(version_folder, href) with open(outfile, 'wb') as f: f.write(bytes) - print('Wrote %s.' % outfile) + logging.info('Wrote %s.' % outfile) files_retrieved[linkname] = outfile found_link = True break @@ -441,11 +442,11 @@ def drawHazusMap(self, shakegrid, filename, model_config): # zorder=EPICENTER_ZORDER, transform=geoproj) # save our map out to a file - print('Saving to %s' % filename) + logging.info('Saving to %s' % filename) t0 = time.time() plt.savefig(filename, dpi=300) t1 = time.time() - print('Done saving map - %.2f seconds' % (t1-t0)) + logging.info('Done saving map - %.2f seconds' % (t1-t0)) def createTaggingTables(self): df = pd.read_csv(self._occupancy_file) diff --git a/losspager/io/pagerdata.py b/losspager/io/pagerdata.py index c5aaba9..05e6462 100644 --- a/losspager/io/pagerdata.py +++ b/losspager/io/pagerdata.py @@ -3,6 +3,7 @@ from datetime import datetime import os.path import json +import logging # third party libraries from lxml import etree @@ -215,11 +216,11 @@ def validate(self): self._pagerdict['population_exposure'] = self._setPopulationExposure() self._pagerdict['economic_exposure'] = self._setEconomicExposure() self._pagerdict['model_results'] = self._setModelResults() - print('In pagerdata, getting city table.') + logging.info('In pagerdata, getting city table.') self._pagerdict['city_table'], self._pagerdict['map_cities'] = self._getCityTable() - print('In pagerdata, getting historical earthquakes.') + logging.info('In pagerdata, getting historical earthquakes.') self._pagerdict['historical_earthquakes'] = self._getHistoricalEarthquakes() - print('In pagerdata, getting comments.') + logging.info('In pagerdata, getting comments.') self._pagerdict['comments'] = self._getComments() self._is_validated = True #########Setters######## @@ -1205,13 +1206,13 @@ def _getHistoricalEarthquakes(self): # if we're re-running an older event, don't include more recent events than that in the table. expocat.excludeFutureEvents(self.time) clat, clon = self._event_dict['lat'], self._event_dict['lon'] - print('Select events by radius.') + logging.info('Select events by radius.') inbounds = expocat.selectByRadius(clat, clon, EVENT_RADIUS) maxmmi = self._pagerdict['pager']['maxmmi'] nmmi = self._nmmi deaths = self._fatmodel_results['TotalFatalities'] etime = self._event_dict['event_timestamp'] - print('Select historical earthquakes.') + logging.info('Select historical earthquakes.') eventlist = inbounds.getHistoricalEvents( maxmmi, nmmi, deaths, clat, clon) for event in eventlist: diff --git a/losspager/io/twopager.py b/losspager/io/twopager.py index 29ecd97..1bfdf60 100644 --- a/losspager/io/twopager.py +++ b/losspager/io/twopager.py @@ -3,6 +3,7 @@ from datetime import datetime from collections import OrderedDict from math import log10 +import logging # third party imports from impactutils.textformat.text import pop_round_short, round_to_nearest @@ -153,14 +154,14 @@ def create_twopager(pdata, hazinfo, version_dir): texify(pdict['comments']['impact2'])) # Hazus arrow color and relative position - hazdel = (hazinfo.hazloss)/LOSS_CONV + hazdel = (hazinfo.hazloss) / LOSS_CONV if hazdel < 0.1: hazdelval = 0.1 elif hazdel > 1000000: hazdelval = 1000000 else: hazdelval = hazdel - arrowloc = (((6 - log10(hazdelval))*0.83)-0.07) + arrowloc = (((6 - log10(hazdelval)) * 0.83) - 0.07) # distance (in cm) to the left from right end of the econ histogram template = template.replace("[ARROWSHIFT]", '%.2f' % arrowloc) @@ -207,7 +208,7 @@ def create_twopager(pdata, hazinfo, version_dir): try: ccinfo = ComCatInfo(eventid) eventid, allids = ccinfo.getAssociatedIds() - event_url = ccinfo.getURL()+'#pager' + event_url = ccinfo.getURL() + '#pager' except: event_url = DEFAULT_PAGER_URL @@ -228,7 +229,7 @@ def create_twopager(pdata, hazinfo, version_dir): os.chdir(version_dir) cmd = '%s -interaction nonstopmode --output-directory %s %s' % ( LATEX_TO_PDF_BIN, version_dir, tex_output) - print('Running %s...' % cmd) + logging.info('Running %s...' % cmd) res, stdout, stderr = get_command_output(cmd) os.chdir(cwd) if not res: diff --git a/losspager/models/exposure.py b/losspager/models/exposure.py index 4582cd5..c4eca6a 100644 --- a/losspager/models/exposure.py +++ b/losspager/models/exposure.py @@ -16,6 +16,7 @@ SCENARIO_WARNING = 10 # number of years after date of population data to issue a warning SCENARIO_ERROR = 20 # number of years after date of population data to raise an exception + def calc_exposure(mmidata, popdata, isodata): """Calculate population exposure to shaking per country. @@ -37,16 +38,18 @@ def calc_exposure(mmidata, popdata, isodata): cidx = np.ravel_multi_index(np.where(isodata == ccode), isodata.shape) expsum = np.zeros((10), dtype=np.uint32) for mmi in range(1, 11): - mmi_lower = mmi-0.5 - mmi_upper = mmi+0.5 - midx = np.ravel_multi_index(np.where((mmidata >= mmi_lower) & (mmidata < mmi_upper)), mmidata.shape) + mmi_lower = mmi - 0.5 + mmi_upper = mmi + 0.5 + midx = np.ravel_multi_index( + np.where((mmidata >= mmi_lower) & (mmidata < mmi_upper)), mmidata.shape) idx = np.unravel_index(np.intersect1d(cidx, midx), mmidata.shape) popsum = np.nansum(popdata[idx]) - expsum[mmi-1] = int(popsum) + expsum[mmi - 1] = int(popsum) exposures[ccode] = expsum[:] return exposures + class Exposure(object): def __init__(self, popfile, popyear, isofile, popgrowth=None): """Create Exposure object, with population and country code grid files, @@ -72,7 +75,7 @@ def __init__(self, popfile, popyear, isofile, popgrowth=None): self._country = Country() self._pop_class = get_file_type(self._popfile) self._iso_class = get_file_type(self._isofile) - + def calcExposure(self, shakefile): """Calculate population exposure to shaking, per country, plus total exposure across all countries. @@ -87,7 +90,7 @@ def calcExposure(self, shakefile): """ # get shakemap geodict shakedict = ShakeGrid.getFileGeoDict(shakefile, adjust='res') - + # get population geodict popdict, t = self._pop_class.getFileGeoDict(self._popfile) @@ -99,7 +102,7 @@ def calcExposure(self, shakefile): if not popdict.intersects(shakedict): expdict = {'UK': np.zeros((10,)), 'TotalExposure': np.zeros((10,))} return expdict - + if popdict == shakedict == isodict: # special case, probably for testing... self._shakegrid = ShakeGrid.load(shakefile, adjust='res') @@ -133,12 +136,13 @@ def calcExposure(self, shakefile): msg = '''The input ShakeMap event year is more than %i years from the population date. PAGER results for events this far in the future are not valid. Stopping.''' % SCENARIO_ERROR raise PagerException(msg) - + ucodes = np.unique(isodata) for ccode in ucodes: cidx = (isodata == ccode) - popdata[cidx] = self._popgrowth.adjustPopulation(popdata[cidx], ccode, self._popyear, eventyear) - + popdata[cidx] = self._popgrowth.adjustPopulation( + popdata[cidx], ccode, self._popyear, eventyear) + exposure_dict = calc_exposure(mmidata, popdata, isodata) newdict = {} # Get rolled up exposures @@ -157,11 +161,12 @@ def calcExposure(self, shakefile): # get the maximum MMI value along any of the four map edges nrows, ncols = mmidata.shape top = mmidata[0, 0:ncols].max() - bottom = mmidata[nrows-1, 0:ncols].max() + bottom = mmidata[nrows - 1, 0:ncols].max() left = mmidata[0:nrows, 0].max() - right = mmidata[0:nrows, ncols-1].max() - newdict['maximum_border_mmi'] = np.array([top, bottom, left, right]).max() - + right = mmidata[0:nrows, ncols - 1].max() + newdict['maximum_border_mmi'] = np.array( + [top, bottom, left, right]).max() + return newdict def getPopulationGrid(self): @@ -193,7 +198,3 @@ def getShakeGrid(self): if self._shakegrid is None: raise PagerException('calcExposure() method must be called first.') return self._shakegrid - - - - diff --git a/losspager/models/semimodel.py b/losspager/models/semimodel.py index d32cea7..623dfdb 100644 --- a/losspager/models/semimodel.py +++ b/losspager/models/semimodel.py @@ -4,6 +4,7 @@ from datetime import datetime, timedelta import os.path import tempfile +import logging # third party imports import pandas as pd @@ -12,6 +13,8 @@ from mapio.gmt import GMTGrid from mapio.geodict import GeoDict +# neic imports +from impactutils.io.container import HDFContainer # local imports from .growth import PopulationGrowth @@ -188,39 +191,39 @@ def pop_dist(popi, workforce, time, dclass): FNOWFA = 0.001 if time == 'day': - respop = popi * (FDRWF * fnwf + - FDRWFI * fwf * f_ind + - FDRWFS * fwf * fser + - FDRWFA * fwf * fagr) - nrpop = popi * (FDNRWFI * fwf * f_ind + - FDNRWFS * fwf * fser + FDNRWFA * fwf * fagr + FDNRSCH * fnwf) - outpop = popi * (FDOWF * fnwf + - FDOWFI * fwf * f_ind + - FDOWFS * fwf * fser + - FDOWFA * fwf * fagr) + respop = popi * (FDRWF * fnwf + + FDRWFI * fwf * f_ind + + FDRWFS * fwf * fser + + FDRWFA * fwf * fagr) + nrpop = popi * (FDNRWFI * fwf * f_ind + + FDNRWFS * fwf * fser + FDNRWFA * fwf * fagr + FDNRSCH * fnwf) + outpop = popi * (FDOWF * fnwf + + FDOWFI * fwf * f_ind + + FDOWFS * fwf * fser + + FDOWFA * fwf * fagr) elif time == 'transit': - respop = popi * (FTRWF * fnwf + - FTRWFI * fwf * f_ind + - FTRWFS * fwf * fser + - FTRWFA * fwf * fagr) - nrpop = popi * (FTNRWFI * fwf * f_ind + - FTNRWFS * fwf * fser + FTNRWFA * fwf * fagr) - outpop = popi * (FTOWF * fnwf + - FTOWFI * fwf * f_ind + - FTOWFS * fwf * fser + - FTOWFA * fwf * fagr) + respop = popi * (FTRWF * fnwf + + FTRWFI * fwf * f_ind + + FTRWFS * fwf * fser + + FTRWFA * fwf * fagr) + nrpop = popi * (FTNRWFI * fwf * f_ind + + FTNRWFS * fwf * fser + FTNRWFA * fwf * fagr) + outpop = popi * (FTOWF * fnwf + + FTOWFI * fwf * f_ind + + FTOWFS * fwf * fser + + FTOWFA * fwf * fagr) elif time == 'night': - respop = popi * (FNRWF * fnwf + - FNRWFI * fwf * f_ind + - FNRWFS * fwf * fser + - FNRWFA * fwf * fagr) - nrpop = popi * (FNNRWFI * fwf * f_ind + - FNNRWFS * fwf * fser + FNNRWFA * fwf * fagr) - outpop = popi * (FNOWF * fnwf + - FNOWFI * fwf * f_ind + - FNOWFS * fwf * fser + - FNOWFA * fwf * fagr) + respop = popi * (FNRWF * fnwf + + FNRWFI * fwf * f_ind + + FNRWFS * fwf * fser + + FNRWFA * fwf * fagr) + nrpop = popi * (FNNRWFI * fwf * f_ind + + FNNRWFS * fwf * fser + FNNRWFA * fwf * fagr) + outpop = popi * (FNOWF * fnwf + + FNOWFI * fwf * f_ind + + FNOWFS * fwf * fser + + FNOWFA * fwf * fagr) respop = np.atleast_1d(np.squeeze(respop)) nrpop = np.atleast_1d(np.squeeze(nrpop)) @@ -256,25 +259,25 @@ def get_time_of_day(dtime, lon): Tuple of time of day,local event year, and local event hour """ # inputs datetime in utc, and local longitude - toffset = lon/15 - event_time = dtime + timedelta(0, toffset*3600) + toffset = lon / 15 + event_time = dtime + timedelta(0, toffset * 3600) event_year = event_time.year event_hour = event_time.hour timeofday = None if event_hour >= DAY_START_HOUR and event_hour < DAY_END_HOUR: timeofday = 'day' - transit1 = (event_hour >= TRANSIT_START_HOUR_MORNING and event_hour < - TRANSIT_END_HOUR_MORNING) - transit2 = (event_hour >= TRANSIT_START_HOUR_EVENING and event_hour < - TRANSIT_END_HOUR_EVENING) + transit1 = (event_hour >= TRANSIT_START_HOUR_MORNING and event_hour + < TRANSIT_END_HOUR_MORNING) + transit2 = (event_hour >= TRANSIT_START_HOUR_EVENING and event_hour + < TRANSIT_END_HOUR_EVENING) if transit1 or transit2: timeofday = 'transit' - night1 = (event_hour >= NIGHT_START_HOUR_EVENING and event_hour <= - NIGHT_END_HOUR_EVENING) - night2 = (event_hour >= NIGHT_START_HOUR_MORNING and event_hour <= - NIGHT_END_HOUR_MORNING) + night1 = (event_hour >= NIGHT_START_HOUR_EVENING and event_hour + <= NIGHT_END_HOUR_EVENING) + night2 = (event_hour >= NIGHT_START_HOUR_MORNING and event_hour + <= NIGHT_END_HOUR_MORNING) if night1 or night2: timeofday = 'night' return (timeofday, event_year, event_hour) @@ -327,10 +330,10 @@ def make_test_semi_model(ccode, timeofday, density, popvalue, mmi): 'map_status': 'RELEASED', 'shakemap_event_type': 'SCENARIO'} uncdict = {'mmi': (1.0, 1)} - popdata = np.ones((2, 2), dtype=np.float32)*(popvalue)/4 - isodata = np.ones((2, 2), dtype=np.int16)*ucode - urbdata = np.ones((2, 2), dtype=np.int16)*density - mmidata = np.ones((2, 2), dtype=np.float32)*mmi + popdata = np.ones((2, 2), dtype=np.float32) * (popvalue) / 4 + isodata = np.ones((2, 2), dtype=np.int16) * ucode + urbdata = np.ones((2, 2), dtype=np.int16) * density + mmidata = np.ones((2, 2), dtype=np.float32) * mmi geodict = GeoDict({'xmin': 0.5, 'xmax': 1.5, 'ymin': 0.5, 'ymax': 1.5, 'dx': 1.0, 'dy': 1.0, 'nx': 2, 'ny': 2}) popgrid = GMTGrid(popdata, geodict) @@ -368,13 +371,13 @@ def make_test_semi_model(ccode, timeofday, density, popvalue, mmi): for key, value in resfat[ccode].items(): if value < 1: value = np.floor(value) - newresfat[ccode][key] = value/4.0 - popsum += value/4.0 + newresfat[ccode][key] = value / 4.0 + popsum += value / 4.0 for key, value in nonresfat[ccode].items(): - newnonresfat[ccode][key] = value/4.0 + newnonresfat[ccode][key] = value / 4.0 if value < 1: value = np.floor(value) - popsum += value/4.0 + popsum += value / 4.0 popsum = int(popsum) finally: files = [popfile, isofile, urbfile, shakefile] @@ -389,7 +392,7 @@ def __init__(self, inventory, collapse, casualty, workforce, growth): """Create Semi-Empirical Fatality Model object. :param inventory: - Pandas Panel, containing DataFrames named: + HDFContainer, containing DataFrames named: - 'BuildingTypes', - 'RuralNonResidential' - 'RuralResidential' @@ -407,18 +410,18 @@ def __init__(self, inventory, collapse, casualty, workforce, growth): - Building Codes, as listed and described in BuildingTypes Dataframe. :param collapse: - Pandas Panel, where first index is country (by two-letter country code), columns are: + HDFContainer, where first index is country (by two-letter country code), columns are: - BuildingCode: Building code as above. - (IMT)_6.0 -> (IMT)_9.0 Columns with collapse rates at each (IMT) interval, where (IMT) could be MMI,PGA,PGV,PSA1.0, etc. :param casualty: - Pandas panel, where first index is country (by two-letter country code), columns are: + HDFContainer, where first index is country (by two-letter country code), columns are: - BuildingCode: Building code as above. - CasualtyDay: Casualty rate, given collapse, during the day. - CasualtyNight: Casualty rate, given collapse, during the night. :param workforce: - Pandas panel consisting of a single dataframe, where rows are by country, and columns are: + HDFContainer consisting of a single dataframe, where rows are by country, and columns are: - CountryCode two letter ISO country code. - CountryCode name of country. - WorkforceTotal Fraction of the country population in the workforce. @@ -455,24 +458,26 @@ def fromFiles(cls, inventory_file, collapse_file, casualty_file, workforce_file) """Create SemiEmpiricalFatality object from a number of input files. :param inventory_file: - HDF5 file containing Semi-Empirical building inventory data in a Pandas Panel. (described in __init__). + HDF5 file containing Semi-Empirical building inventory data in an HDFContainer. (described in __init__). :param collapse_file: - HDF5 file containing Semi-Empirical collapse rate data in a Pandas Panel. (described in __init__). + HDF5 file containing Semi-Empirical collapse rate data in an HDFContainer. (described in __init__). :param casualty_file: - HDF5 file containing Semi-Empirical casualty rate data in a Pandas Panel.(described in __init__). + HDF5 file containing Semi-Empirical casualty rate data in an HDFContainer.(described in __init__). :param workforce_file: - HDF5 file containing Semi-Empirical workforce data in a Pandas Panel. (described in __init__). + HDF5 file containing Semi-Empirical workforce data in an HDFContainer. (described in __init__). :param growth_file: Excel spreadsheet containing population growth rate data (described in PopulationGrowth.fromUNSpreadsheet()). :returns: SemiEmpiricalFatality object. """ # turn the inventory,collapse, and casualty spreadsheets into Panels... - inventory = pd.read_hdf(inventory_file) - collapse = pd.read_hdf(collapse_file) - casualty = pd.read_hdf(casualty_file) - workforce = pd.read_hdf(workforce_file) - workforce = workforce.Workforce # extract the one dataframe from the Panel + inventory = HDFContainer.load(inventory_file) + collapse = HDFContainer.load(collapse_file) + casualty = HDFContainer.load(casualty_file) + workforce = HDFContainer.load(workforce_file) + # extract the one dataframe from the Panel + workforce = workforce.getDataFrame('Workforce') + workforce = workforce.set_index('CountryCode') # read the growth spreadsheet into a PopulationGrowth object... popgrowth = PopulationGrowth.fromDefault() @@ -515,7 +520,8 @@ def getBuildingDesc(self, btype, desctype='short'): :returns: Either a short, operational, or long description of building types. """ - bsheet = self._inventory.BuildingTypes + bsheet = self._inventory.getDataFrame('BuildingTypes') + bsheet = bsheet.set_index('Code') row = bsheet.loc[btype] if desctype == 'short': return row['ShortDescription'] @@ -551,7 +557,9 @@ def getCollapse(self, ccode, mmi, inventory): :returns: Pandas Series object containing the collapse rates for given building types, ccode, and MMI. """ - collapse_frame = self._collapse.loc[ccode].loc[inventory.index] + collapse_frame = self._collapse.getDataFrame(ccode) + collapse_frame = collapse_frame.set_index('BuildingCode') + collapse_frame = collapse_frame.loc[inventory.index] mmicol = 'MMI_%s' % str(mmi) collapse = collapse_frame[mmicol] return collapse @@ -568,7 +576,8 @@ def getFatalityRates(self, ccode, timeofday, inventory): :returns: Pandas Series object containing fatality rates for given country, time of day, and inventory. """ - fatalframe = self._casualty[ccode] + fatalframe = self._casualty.getDataFrame(ccode) + fatalframe = fatalframe.set_index('BuildingCode') timecol = TIMES[timeofday] fatrates = fatalframe.loc[inventory.index][timecol] return fatrates @@ -584,20 +593,20 @@ def getInventories(self, ccode, density): Two Pandas Series: 1) Residential Inventory and 2) Non-Residential Inventory. """ if density == URBAN: - resinv = self._inventory.UrbanResidential - nresinv = self._inventory.UrbanNonResidential + resinv = self._inventory.getDataFrame('UrbanResidential') + nresinv = self._inventory.getDataFrame('UrbanNonResidential') else: - resinv = self._inventory.RuralResidential - nresinv = self._inventory.RuralNonResidential - resrow = resinv.loc[ccode] # pandas series of residential inventory + resinv = self._inventory.getDataFrame('RuralResidential') + nresinv = self._inventory.getDataFrame('RuralNonResidential') + resinv = resinv.set_index('CountryCode') + nresinv = nresinv.set_index('CountryCode') + + # pandas series of residential inventory + resrow = resinv.loc[ccode] + resrow = resrow.drop('CountryName') # pandas series of non-residential inventory nresrow = nresinv.loc[ccode] - # remove the indices that aren't building type codes - these are present because - # Panels have the same columns in every dataframe - resrow = resrow.drop(['ShortDescription', 'OperationalDescription', 'LongDescription', - 'CountryName']) - nresrow = nresrow.drop(['ShortDescription', 'OperationalDescription', 'LongDescription', - 'CountryName']) + nresrow = nresrow.drop('CountryName') # now trim down the series to only include finite and non-zero values resrow = resrow[resrow.notnull()] @@ -657,7 +666,7 @@ def getLosses(self, shakefile): # should become 5.5, 5.24 should become 5.0, etc.) # TODO: Someday, make this more general to include perhaps grids of all IMT values, or # at least the ones we have collapse data for. - mmidata = np.round(shakegrid.getLayer('mmi').getData()/0.5)*0.5 + mmidata = np.round(shakegrid.getLayer('mmi').getData() / 0.5) * 0.5 # get arrays from our other grids popdata = popgrid.getData() @@ -697,7 +706,8 @@ def getLosses(self, shakefile): # get the workforce Series data for the current country wforce = self.getWorkforce(ccode) if wforce is None: - print('No workforce data for %s. Skipping.' % (cdict['Name'])) + logging.info('No workforce data for %s. Skipping.' % + (cdict['Name'])) continue # loop over MMI values 6-9 @@ -790,7 +800,7 @@ def getLosses(self, shakefile): nonres_fatal_by_ccode[ccode] = nonres_fatal_by_btype.copy() # increment the total number of fatalities - ntotal += int(sum(res_fatal_by_btype.values()) + - sum(nonres_fatal_by_btype.values())) + ntotal += int(sum(res_fatal_by_btype.values()) + + sum(nonres_fatal_by_btype.values())) return (ntotal, res_fatal_by_ccode, nonres_fatal_by_ccode) diff --git a/losspager/onepager/comment.py b/losspager/onepager/comment.py index 9a27a58..e5b86f0 100644 --- a/losspager/onepager/comment.py +++ b/losspager/onepager/comment.py @@ -1,5 +1,6 @@ # stdlib imports import re +import logging # third party imports from scipy.special import erf @@ -65,6 +66,7 @@ EPS = 1e-12 # if expected value is zero, take the log of this instead SEARCH_RADIUS = 400 # kilometer radius to search for historical earthquakes + def get_gdp_comment(ecodict, ecomodel, econexposure, event_year, epicode): """Create a comment on the GDP impact of a given event in the most impacted country. @@ -89,8 +91,8 @@ def get_gdp_comment(ecodict, ecomodel, econexposure, event_year, epicode): # get the country code of the country with the highest losses dccode = '' dmax = 0 - expected = ecodict['TotalDollars']/1e6 - if ecodict['TotalDollars'] > 0: + expected = ecodict['TotalDollars'] / 1e6 + if ecodict['TotalDollars'] > 0: for ccode, value in ecodict.items(): if ccode == 'TotalDollars': continue @@ -114,24 +116,27 @@ def get_gdp_comment(ecodict, ecomodel, econexposure, event_year, epicode): gdp_obj = GDP.fromDefault() gdp, outccode = gdp_obj.getGDP(dccode, event_year) country = Country() - print('ccode: %s, dccode: %s, outccode: %s' % (ccode, dccode, outccode)) + logging.info('ccode: %s, dccode: %s, outccode: %s' % + (ccode, dccode, outccode)) cinfo = country.getCountry(outccode) if cinfo != 'UK': pop = cinfo['Population'] else: pop = 0 - T = (pop * gdp)/1e6 + T = (pop * gdp) / 1e6 if T == 0: return '' - percent = erf(1/np.sqrt(2)) - plow = round(np.exp(np.log(max(expected, EPS))-eco_gvalue * invphi(percent))) - phigh =round(np.exp(eco_gvalue * invphi(percent) + np.log(max(expected, EPS)))) + percent = erf(1 / np.sqrt(2)) + plow = round(np.exp(np.log(max(expected, EPS)) + - eco_gvalue * invphi(percent))) + phigh = round(np.exp(eco_gvalue * invphi(percent) + + np.log(max(expected, EPS)))) if plow != 0: - ptlow = int(plow*1e2/T) + ptlow = int(plow * 1e2 / T) else: ptlow = 0 if phigh != 0: - pthigh = int(phigh*1e2/T) + pthigh = int(phigh * 1e2 / T) else: pthigh = 0 if dccode in ['XF', 'EU', 'WU']: @@ -152,9 +157,10 @@ def get_gdp_comment(ecodict, ecomodel, econexposure, event_year, epicode): if pthigh >= 100: strtxt = 'Estimated economic losses may exceed the GDP of %s.' % cname else: - strtxt = 'Estimated economic losses are %i-%i%% GDP of %s.' % (ptlow, pthigh, cname) + strtxt = 'Estimated economic losses are %i-%i%% GDP of %s.' % ( + ptlow, pthigh, cname) return strtxt - + def get_impact_comments(fatdict, ecodict, econexposure, event_year, ccode): """Create comments for a given event, describing economic and human (fatality) impacts. @@ -176,7 +182,7 @@ def get_impact_comments(fatdict, ecodict, econexposure, event_year, ccode): of these will be the first string. Under certain situations, the second comment could be blank. """ # first, figure out what the alert levels are for each loss result - + fatmodel = EmpiricalLoss.fromDefaultFatality() ecomodel = EmpiricalLoss.fromDefaultEconomic() fatlevel = fatmodel.getAlertLevel(fatdict) @@ -185,7 +191,8 @@ def get_impact_comments(fatdict, ecodict, econexposure, event_year, ccode): rlevels = {0: 'green', 1: 'yellow', 2: 'orange', 3: 'red'} fat_higher = levels[fatlevel] > levels[ecolevel] eco_higher = levels[ecolevel] > levels[fatlevel] - gdpcomment = get_gdp_comment(ecodict, ecomodel, econexposure, event_year, ccode) + gdpcomment = get_gdp_comment( + ecodict, ecomodel, econexposure, event_year, ccode) if fat_higher: if fatlevel == 'green': @@ -249,6 +256,7 @@ def get_impact_comments(fatdict, ecodict, econexposure, event_year, ccode): impact2 = impact2.replace('\n', ' ') return (impact1, impact2) + def _add_dicts(d1, d2): """Add two dictionaries of losses per building type together. Dictionaries must contain identical keys. @@ -266,6 +274,7 @@ def _add_dicts(d1, d2): df4 = df3.sort_values('fats', axis=1, ascending=False) return df4.loc['fats'] + def get_structure_comment(resfat, nonresfat, semimodel): """Create a paragraph describing the vulnerability of buildings in the most impacted country. @@ -297,28 +306,29 @@ def get_structure_comment(resfat, nonresfat, semimodel): RURAL = 1 URBAN = 2 semi = SemiEmpiricalFatality.fromDefault() - all_collapse_by_btype = pd.Series({}) - #get the inventories in this ccode for both densities and - #both residency classes - res_urban_inv,non_res_urban_inv = semi.getInventories(maxccode,URBAN) - res_rural_inv,non_res_rural_inv = semi.getInventories(maxccode,RURAL) - #find unique building types from these four Series - urban_inv_keys = set(res_urban_inv.index).union(set(non_res_urban_inv.index)) - rural_inv_keys = set(res_rural_inv.index).union(set(non_res_rural_inv.index)) + # get the inventories in this ccode for both densities and + # both residency classes + res_urban_inv, non_res_urban_inv = semi.getInventories(maxccode, URBAN) + res_rural_inv, non_res_rural_inv = semi.getInventories(maxccode, RURAL) + # find unique building types from these four Series + urban_inv_keys = set(res_urban_inv.index).union( + set(non_res_urban_inv.index)) + rural_inv_keys = set(res_rural_inv.index).union( + set(non_res_rural_inv.index)) inv_keys = set(urban_inv_keys).union(set(rural_inv_keys)) - null_inventory = pd.Series(dict.fromkeys(inv_keys,0.0)) - collapse_by_btype = pd.Series(dict.fromkeys(inv_keys,0.0)) - for mmi in np.arange(6.0,9.5,0.5): - collapse = semi.getCollapse(ccode,mmi,null_inventory) + null_inventory = pd.Series(dict.fromkeys(inv_keys, 0.0)) + collapse_by_btype = pd.Series(dict.fromkeys(inv_keys, 0.0)) + for mmi in np.arange(6.0, 9.5, 0.5): + collapse = semi.getCollapse(ccode, mmi, null_inventory) collapse_by_btype += collapse - collapse_by_btype.sort_values(ascending=False,inplace=True) + collapse_by_btype.sort_values(ascending=False, inplace=True) stypes = collapse_by_btype[0:2] else: - # get a pandas Series of all the unique building types in the + # get a pandas Series of all the unique building types in the # country of greatest impact, sorted by losses (high to low). stypes = _add_dicts(resfat[maxccode], nonresfat[maxccode]) - + pregions = PagerRegions() regioncode = pregions.getRegion(maxccode) default = pregions.getComment(regioncode) @@ -328,7 +338,8 @@ def get_structure_comment(resfat, nonresfat, semimodel): else: return 'There are likely to be no affected structures in this region.' - tstarts = ['W*', 'S*', 'C*', 'P*', 'RM*', 'MH', 'M*', 'A*', 'RE', 'RS*', 'DS*', 'UFB*', 'UCB', 'MS', 'TU', 'INF', 'UNK'] + tstarts = ['W*', 'S*', 'C*', 'P*', 'RM*', 'MH', 'M*', 'A*', + 'RE', 'RS*', 'DS*', 'UFB*', 'UCB', 'MS', 'TU', 'INF', 'UNK'] categories = [] btypes = [] for stype in stypes.index: @@ -361,6 +372,7 @@ def get_structure_comment(resfat, nonresfat, semimodel): regtext = fmt1 % b1 return default + ' ' + regtext + def get_secondary_hazards(expocat, mag): WAVETHRESH = .50 fireevents = expocat.selectByHazard('fire') @@ -399,6 +411,7 @@ def get_secondary_hazards(expocat, mag): return hazards + def get_secondary_comment(lat, lon, mag): expocat = ExpoCat.fromDefault() expocat = expocat.selectByRadius(lat, lon, SEARCH_RADIUS) @@ -422,6 +435,7 @@ def get_secondary_comment(lat, lon, mag): hazcomm = sfmt % fstr return hazcomm + def get_historical_comment(lat, lon, mag, expodict, fatdict): default = """There were no earthquakes with significant population exposure to shaking within a 400 km radius of this event.""" expocat = ExpoCat.fromDefault() @@ -430,8 +444,9 @@ def get_historical_comment(lat, lon, mag, expodict, fatdict): df = expocat.getDataFrame() # sort df by totaldeaths (inverse), then by maxmmmi, then by nmaxmmi. - df = df.sort_values(['TotalDeaths', 'MaxMMI', 'NumMaxMMI'], ascending=False) - + df = df.sort_values( + ['TotalDeaths', 'MaxMMI', 'NumMaxMMI'], ascending=False) + if len(df) == 0: return default if len(df) >= 1: @@ -439,22 +454,25 @@ def get_historical_comment(lat, lon, mag, expodict, fatdict): desc = get_quake_desc(worst_event, lat, lon, True) return desc + def get_quake_desc(event, lat, lon, isMainEvent): ndeaths = event['TotalDeaths'] # summarize the exposure values exposures = np.array([event['MMI1'], event['MMI2'], event['MMI3'], event['MMI4'], event['MMI5'], - event['MMI6'], event['MMI7'], event['MMI8'], event['MMI9+']]) + event['MMI6'], event['MMI7'], event['MMI8'], event['MMI9+']]) exposures = np.array([round_to_nearest(exp, 1000) for exp in exposures]) # get the highest two exposures greater than zero iexp = np.where(exposures > 0)[0][::-1] - romans = ['I', 'II', 'III', 'IV', 'V', 'VI', 'VII', 'VIII', 'IX or greater'] + romans = ['I', 'II', 'III', 'IV', 'V', + 'VI', 'VII', 'VIII', 'IX or greater'] if len(iexp) >= 2: exposures = [exposures[iexp[1]], exposures[iexp[0]]] ilevels = [romans[iexp[1]], romans[iexp[0]]] expfmt = ', with estimated population exposures of %s at intensity' expfmt = expfmt + ' %s and %s at intensity %s' - exptxt = expfmt % (commify(int(exposures[0])), ilevels[0], commify(int(exposures[1])), ilevels[1]) + exptxt = expfmt % (commify(int(exposures[0])), ilevels[0], commify( + int(exposures[1])), ilevels[1]) else: exptxt = '' @@ -466,7 +484,8 @@ def get_quake_desc(event, lat, lon, isMainEvent): etime = re.sub(' 0', ' ', etime) country = Country() if pd.isnull(event['Name']): - if event['CountryCode'] == 'UM' and event['Lat'] > 40: # hack for persistent error in expocat + # hack for persistent error in expocat + if event['CountryCode'] == 'UM' and event['Lat'] > 40: cdict = country.getCountry('US') else: cdict = country.getCountry(event['CountryCode']) @@ -476,19 +495,21 @@ def get_quake_desc(event, lat, lon, isMainEvent): cname = 'in the open ocean' else: cname = event['Name'].replace('"', '') - + cdist = round(geodetic_distance(event['Lat'], event['Lon'], lat, lon)) - cdir = get_compass_dir(lat, lon, event['Lat'], event['Lon'], format='long').lower() + cdir = get_compass_dir( + lat, lon, event['Lat'], event['Lon'], format='long').lower() if ndeaths and str(ndeaths) != "nan": dfmt = dfmt + ', resulting in a reported %s %s.' - + if ndeaths > 1: dstr = 'fatalities' else: dstr = 'fatality' ndeathstr = commify(int(ndeaths)) - eqdesc = dfmt % (mag, cdist, cdir, cname, etime, exptxt, ndeathstr, dstr) + eqdesc = dfmt % (mag, cdist, cdir, cname, + etime, exptxt, ndeathstr, dstr) else: dfmt = dfmt + ', with no reported fatalities.' eqdesc = dfmt % (mag, cdist, cdir, cname, etime, exptxt) diff --git a/losspager/onepager/onepager.py b/losspager/onepager/onepager.py index 6b98ea6..827e6cb 100644 --- a/losspager/onepager/onepager.py +++ b/losspager/onepager/onepager.py @@ -2,6 +2,7 @@ import os from datetime import datetime from collections import OrderedDict +import logging # third party imports from impactutils.textformat.text import pop_round_short, round_to_nearest @@ -27,13 +28,15 @@ DEFAULT_PAGER_URL = 'http://earthquake.usgs.gov/data/pager/' MIN_DISPLAY_POP = 1000 + def texify(text): newtext = text for original, replacement in LATEX_SPECIAL_CHARACTERS.items(): newtext = newtext.replace(original, replacement) return newtext -def create_onepager(pdata, version_dir, debug = False): + +def create_onepager(pdata, version_dir, debug=False): """ :param pdata: PagerData object. @@ -43,9 +46,9 @@ def create_onepager(pdata, version_dir, debug = False): bool for whether or not to add textpos boxes to onepager. """ - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- # Sort out some paths - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- # Locaiton of this module mod_dir, dummy = os.path.split(__file__) @@ -62,27 +65,27 @@ def create_onepager(pdata, version_dir, debug = False): # Onepager latex template file template_file = os.path.join(data_dir, 'onepager2.tex') - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- # Read in pager data and latex template - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- json_dir = os.path.join(version_dir, 'json') pdict = pdata._pagerdict edict = pdata.getEventInfo() - + with open(template_file, 'r') as f: template = f.read() - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- # Fill in template values - #--------------------------------------------------------------------------- + # --------------------------------------------------------------------------- # Sort out origin time olat = edict['lat'] olon = edict['lon'] otime_utc = edict['time'] date_utc = datetime.strptime(otime_utc, "%Y-%m-%d %H:%M:%S") - + date_local = pdata.local_time DoW = date_local.strftime('%a') otime_local = date_local.strftime('%H:%M:%S') @@ -123,14 +126,16 @@ def create_onepager(pdata, version_dir, debug = False): # Tsunami warning? --- need to fix to be a function of tsunamic flag if edict['tsunami']: - template = template.replace("[TSUNAMI]", "FOR TSUNAMI INFORMATION, SEE: tsunami.gov") + template = template.replace( + "[TSUNAMI]", "FOR TSUNAMI INFORMATION, SEE: tsunami.gov") else: template = template.replace("[TSUNAMI]", "") if pdata.isScenario(): elapse = '' else: - elapse = "Created: " + pdict['pager']['elapsed_time'] + " after earthquake" + elapse = "Created: " + \ + pdict['pager']['elapsed_time'] + " after earthquake" template = template.replace("[ELAPSED]", elapse) template = template.replace("[IMPACT1]", texify(pdict['comments']['impact1'])) @@ -139,7 +144,6 @@ def create_onepager(pdata, version_dir, debug = False): template = template.replace("[STRUCTCOMMENT]", texify(pdict['comments']['struct_comment'])) - # Summary alert color template = template.replace("[SUMMARYCOLOR]", pdata.summary_alert.capitalize()) @@ -151,7 +155,7 @@ def create_onepager(pdata, version_dir, debug = False): explist = pdata.getTotalExposure() pophold = 0 for mmi in range(1, 11): - iexp = mmi-1 + iexp = mmi - 1 if mmi == 2: pophold += explist[iexp] continue @@ -169,12 +173,10 @@ def create_onepager(pdata, version_dir, debug = False): else: if pop < 1000: pop = round_to_nearest(pop, round_value=1000) - popstr = pop_round_short(pop)+'*' + popstr = pop_round_short(pop) + '*' else: popstr = pop_round_short(pop) template = template.replace(macro, popstr) - - # MMI color pal pal = ColorPalette.fromPreset('mmi') @@ -206,16 +208,16 @@ def create_onepager(pdata, version_dir, debug = False): mag = str(htab[i]['Magnitude']) mmi = dec_to_roman(np.round(htab[i]['MaxMMI'], 0)) col = pal.getDataColor(htab[i]['MaxMMI']) - texcol = "%s,%s,%s" %(col[0], col[1], col[2]) + texcol = "%s,%s,%s" % (col[0], col[1], col[2]) nmmi = pop_round_short(htab[i]['NumMaxMMI']) - mmicell = '%s(%s)' %(mmi, nmmi) + mmicell = '%s(%s)' % (mmi, nmmi) shakedeath = htab[i]['ShakingDeaths'] if np.isnan(shakedeath): death = "--" else: death = pop_round_short(shakedeath) row = '%s & %s & %s & \cellcolor[rgb]{%s} %s & %s \\\\ '\ - '\n' %(date, dist, mag, texcol, mmicell, death) + '\n' % (date, dist, mag, texcol, mmicell, death) tabledata = tabledata + row htex = htex.replace("[TABLEDATA]", tabledata) template = template.replace("[HISTORICAL_BLOCK]", htex) @@ -239,23 +241,24 @@ def create_onepager(pdata, version_dir, debug = False): pop = '$<$1k' else: if ctab['pop'].iloc[i] < 1000: - popnum = round_to_nearest(ctab['pop'].iloc[i], round_value=1000) + popnum = round_to_nearest( + ctab['pop'].iloc[i], round_value=1000) else: popnum = ctab['pop'].iloc[i] pop = pop_round_short(popnum) col = pal.getDataColor(ctab['mmi'].iloc[i]) - texcol = "%s,%s,%s" %(col[0], col[1], col[2]) + texcol = "%s,%s,%s" % (col[0], col[1], col[2]) if ctab['on_map'].iloc[i] == 1: if ctab['pop'].iloc[i] == 0: pop = '\\boldmath$<$\\textbf{1k}' row = '\\rowcolor[rgb]{%s}\\textbf{%s} & \\textbf{%s} & '\ - '%s\\\\ \n' %(texcol, mmi, city, pop) + '%s\\\\ \n' % (texcol, mmi, city, pop) else: row = '\\rowcolor[rgb]{%s}\\textbf{%s} & \\textbf{%s} & '\ - '\\textbf{%s}\\\\ \n' %(texcol, mmi, city, pop) + '\\textbf{%s}\\\\ \n' % (texcol, mmi, city, pop) else: row = '\\rowcolor[rgb]{%s}%s & %s & '\ - '%s\\\\ \n' %(texcol, mmi, city, pop) + '%s\\\\ \n' % (texcol, mmi, city, pop) tabledata = tabledata + row ctex = ctex.replace("[TABLEDATA]", tabledata) template = template.replace("[CITYTABLE]", ctex) @@ -267,7 +270,7 @@ def create_onepager(pdata, version_dir, debug = False): try: ccinfo = ComCatInfo(eventid) eventid, allids = ccinfo.getAssociatedIds() - event_url = ccinfo.getURL()+'#pager' + event_url = ccinfo.getURL() + '#pager' except: event_url = DEFAULT_PAGER_URL @@ -285,8 +288,9 @@ def create_onepager(pdata, version_dir, debug = False): try: cwd = os.getcwd() os.chdir(version_dir) - cmd = '%s -interaction nonstopmode --output-directory %s %s' % (LATEX_TO_PDF_BIN, version_dir, tex_output) - print('Running %s...' % cmd) + cmd = '%s -interaction nonstopmode --output-directory %s %s' % ( + LATEX_TO_PDF_BIN, version_dir, tex_output) + logging.info('Running %s...' % cmd) res, stdout, stderr = get_command_output(cmd) os.chdir(cwd) if not res: diff --git a/losspager/schema/emailschema.py b/losspager/schema/emailschema.py index 664f83f..67d60b7 100644 --- a/losspager/schema/emailschema.py +++ b/losspager/schema/emailschema.py @@ -5,6 +5,7 @@ import enum import json import os +import logging # third-party imports import numpy as np @@ -23,7 +24,7 @@ from losspager.utils.exception import PagerException # constants -MAX_ELAPSED_SECONDS = 8*3600 +MAX_ELAPSED_SECONDS = 8 * 3600 TIME_FORMAT = '%Y-%m-%d %H:%M:%S' MAX_EIS_LEVELS = 2 @@ -35,31 +36,36 @@ """ + class GeoThing(object): def __init__(self, d): self.__geo_interface__ = d # create our bridge tables here + # link the address and region tables address_region_bridge = Table('address_region_bridge', Base.metadata, - Column('address_id', Integer, ForeignKey('address.id')), - Column('region_id', Integer, ForeignKey('region.id'))) + Column('address_id', Integer, + ForeignKey('address.id')), + Column('region_id', Integer, ForeignKey('region.id'))) -# link the version and address tables +# link the version and address tables # sendtime column keeps track of who got notified about what when version_address_bridge = Table('version_address_bridge', Base.metadata, Column('version_id', Integer, ForeignKey('version.id'), - primary_key = True,), + primary_key=True,), Column('address_id', Integer, ForeignKey('address.id'), - primary_key = True), + primary_key=True), Column('sendtime', DateTime)) profile_region_bridge = Table('profile_region_bridge', Base.metadata, - Column('profile_id', Integer, ForeignKey('profile.id')), - Column('region_id', Integer, ForeignKey('region.id'))) + Column('profile_id', Integer, + ForeignKey('profile.id')), + Column('region_id', Integer, ForeignKey('region.id'))) + class _AlertEnum(enum.Enum): """Simple enum class for alert levels. @@ -69,13 +75,14 @@ class _AlertEnum(enum.Enum): orange = 2 red = 3 + class User(Base): """Class representing a PAGER user. - + Relationship descriptions: - A User can have many Addresses. - A User can belong to one Organization. - + """ __tablename__ = 'user' id = Column(Integer, primary_key=True) @@ -85,7 +92,8 @@ class User(Base): organization_id = Column(Integer, ForeignKey('organization.id')) # A User can have many addresses - addresses = relationship("Address", back_populates="user", cascade="all, delete, delete-orphan") + addresses = relationship( + "Address", back_populates="user", cascade="all, delete, delete-orphan") # A user can belong to one organization organization = relationship("Organization", back_populates="users") @@ -108,19 +116,24 @@ def __repr__(self): return fmt % tpl def fromDict(self, session, userdict): - reqfields = set(['lastname', 'firstname', 'createdon', 'org', 'addresses']) + reqfields = set( + ['lastname', 'firstname', 'createdon', 'org', 'addresses']) if reqfields <= set(userdict.keys()): pass else: missing = list(reqfields - set(userdict.keys())) - raise PagerException('Missing required fields for user: %s' % str(missing)) + raise PagerException( + 'Missing required fields for user: %s' % str(missing)) # set the user fields self.lastname = userdict['lastname'] self.firstname = userdict['firstname'] - self.createdon = datetime.strptime(userdict['createdon'], TIME_FORMAT) # will this be a string or a datetime? - org = session.query(Organization).filter(Organization.shortname == userdict['org']).first() + # will this be a string or a datetime? + self.createdon = datetime.strptime(userdict['createdon'], TIME_FORMAT) + org = session.query(Organization).filter( + Organization.shortname == userdict['org']).first() if org is None: - raise PagerException('No organization named %s exists in the database.' % userdict['org']) + raise PagerException( + 'No organization named %s exists in the database.' % userdict['org']) self.organization = org self.addresses = [] @@ -146,15 +159,16 @@ def toDict(self): userdict['addresses'] = addresses return userdict + class Address(Base): """Class representing a PAGER address. - + Relationship descriptions: - An Address has one User. - An Address can have many Versions. - An Address can have many Thresholds. - An Address can have many Regions. - + """ __tablename__ = 'address' id = Column(Integer, primary_key=True) @@ -176,7 +190,7 @@ class Address(Base): # An address can have many thresholds profiles = relationship("Profile", back_populates="address") - + def __repr__(self): return "" % self.email @@ -198,7 +212,6 @@ def shouldAlert(self, version, renotify=False, release=False, ignore_time_limit= Boolean indicating whether this address has been notified for the event before. """ levels = ['green', 'yellow', 'orange', 'red'] - # get the alert level for the most recently alerted version this user received # first get the event id for this version of an event @@ -237,12 +250,12 @@ def shouldAlert(self, version, renotify=False, release=False, ignore_time_limit= if not ignore_time_limit: if datetime.utcnow() > version.time + timedelta(seconds=MAX_ELAPSED_SECONDS): return (False, notified_before) - + # shortcut to True here if the most recent version for this event # was NOT released (i.e., pending), but only if this version has been released. if (len(sversions) and not sversions[-1].released) and version.released: return (True, True) - + should_alert = False for profile in self.profiles: if profile.shouldAlert(version, highest_level): @@ -252,27 +265,32 @@ def shouldAlert(self, version, renotify=False, release=False, ignore_time_limit= return (should_alert, notified_before) def fromDict(self, session, addressdict): - reqfields = set(['email', 'is_primary', 'priority', 'profiles', 'format']) + reqfields = set( + ['email', 'is_primary', 'priority', 'profiles', 'format']) if reqfields <= set(addressdict.keys()): pass else: missing = list(reqfields - set(addressdict.keys())) - raise PagerException('Missing required fields for address: %s' % str(missing)) + raise PagerException( + 'Missing required fields for address: %s' % str(missing)) # set the fields for the address object self.email = addressdict['email'] self.is_primary = addressdict['is_primary'] self.priority = addressdict['priority'] self.format = addressdict['format'] if not len(addressdict['profiles']): - print('Warning: Address %s has NO profiles in the JSON file. Continuing.' % self.email) + logging.warning( + 'Warning: Address %s has NO profiles in the JSON file. Continuing.' % self.email) for profiledict in addressdict['profiles']: profile = Profile() try: profile.fromDict(session, profiledict) except PagerException as pe: - raise PagerException('Error: "%s" when loading profile for address %s.' % (str(pe), self.email)) + raise PagerException( + 'Error: "%s" when loading profile for address %s.' % (str(pe), self.email)) if not len(profile.thresholds): - print('Warning: Address %s has NO thresholds in one of the profiles. Continuing.' % self.email) + logging.warning( + 'Warning: Address %s has NO thresholds in one of the profiles. Continuing.' % self.email) self.profiles.append(profile) def toDict(self): @@ -287,6 +305,7 @@ def toDict(self): addressdict['profiles'] = profiles return addressdict + class Profile(Base): """Class representing a user's profile. @@ -348,15 +367,17 @@ def fromDict(self, session, profiledict): pass else: missing = list(reqfields - set(profiledict.keys())) - raise PagerException('Missing required fields for profile: %s' % str(missing)) - + raise PagerException( + 'Missing required fields for profile: %s' % str(missing)) + for regiondict in profiledict['regions']: rgroup, rname = regiondict['name'].split('-') region = session.query(Region).filter(Region.name == rname).first() if region is None: - raise PagerException('No region named %s found in the database.' % regiondict['name']) + raise PagerException( + 'No region named %s found in the database.' % regiondict['name']) self.regions.append(region) - + for thresholddict in profiledict['thresholds']: threshold = Threshold() threshold.fromDict(session, thresholddict) @@ -366,7 +387,7 @@ def toDict(self): profiledict = {} regions = [] for region in self.regions: - # remember that we're not deflating a Region object, we just want the reference to + # remember that we're not deflating a Region object, we just want the reference to # it (i.e., its name). rgroup = region.regiongroup.groupname regiondict = {'name': rgroup + '-' + region.name} @@ -378,7 +399,8 @@ def toDict(self): profiledict['regions'] = regions profiledict['thresholds'] = thresholds return profiledict - + + class Organization(Base): """Class representing an organization (USGS, FEMA, etc.) @@ -391,8 +413,9 @@ class Organization(Base): shortname = Column(String, nullable=False) # An organization can have many users - users = relationship("User", order_by=User.id, back_populates="organization") - + users = relationship("User", order_by=User.id, + back_populates="organization") + def __repr__(self): return "" % (self.name, len(self.users)) @@ -402,12 +425,14 @@ def fromDict(self, session, orgdict): pass else: missing = list(reqfields - set(orgdict.keys())) - raise PagerException('Missing required fields for user: %s' % str(missing)) + raise PagerException( + 'Missing required fields for user: %s' % str(missing)) self.shortname = orgdict['shortname'] self.name = orgdict['name'] session.add(self) session.commit() + class Event(Base): """Class representing an earthquake event. @@ -419,11 +444,13 @@ class Event(Base): eventcode = Column(String, nullable=False) # An event can have many versions - versions = relationship("Version", back_populates="event", cascade="all, delete, delete-orphan") + versions = relationship("Version", back_populates="event", + cascade="all, delete, delete-orphan") def __repr__(self): return "" % (self.eventcode, len(self.versions)) + class Version(Base): """Class representing a version of an earthquake event. @@ -433,7 +460,8 @@ class Version(Base): """ __tablename__ = 'version' id = Column(Integer, primary_key=True) - event_id = Column(Integer, ForeignKey('event.id', ondelete='CASCADE'), nullable=False) + event_id = Column(Integer, ForeignKey( + 'event.id', ondelete='CASCADE'), nullable=False) versioncode = Column(String, nullable=False) time = Column(DateTime, nullable=False) lat = Column(Float, nullable=False) @@ -461,7 +489,8 @@ class Version(Base): def __repr__(self): return "" % (self.versioncode, self.number, str(self.time), self.magnitude) - + + class AlertScheme(Base): """Class representing an alert scheme (Magnitude, MMI, Earthquake Impact Scale, etc.). @@ -484,12 +513,14 @@ def __repr__(self): return "" % (self.name) def fromDict(self, session, schemedict): - reqfields = set(['name', 'adesc', 'valuetype', 'isdiscrete', 'minlevel', 'maxlevel']) + reqfields = set(['name', 'adesc', 'valuetype', + 'isdiscrete', 'minlevel', 'maxlevel']) if reqfields <= set(schemedict.keys()): pass else: missing = list(reqfields - set(schemedict.keys())) - raise PagerException('Missing required fields for alert scheme: %s' % str(missing)) + raise PagerException( + 'Missing required fields for alert scheme: %s' % str(missing)) tscheme = AlertScheme(name=schemedict['name'], adesc=schemedict['adesc'], valuetype=schemedict['valuetype'], @@ -506,7 +537,8 @@ def toDict(self): 'minlevel': self.minlevel, 'maxlevel': self.maxlevel} return schemedict - + + class Threshold(Base): """Class representing an alert threshold (magnitude value, EIS level, etc.) @@ -528,7 +560,7 @@ class Threshold(Base): def __repr__(self): return "" % (self.alertscheme.name, self.value) - + def isMet(self, version, highest_level): """Determine if input earthquake event version meets or exceeds this threshold. @@ -551,7 +583,7 @@ def isMet(self, version, highest_level): # this event before and the current level exceeds the threshold, then they should # be notified. If they haven't been notified and current level is below threshold, then no notification. # If the user HAS been notified about this event, then if the current alert level is two or more - # levels *different* from the + # levels *different* from the if self.alertscheme.name == 'eis': threshlevel = alertdict[self.value] if highest_level < 0: @@ -560,7 +592,8 @@ def isMet(self, version, highest_level): else: return False else: - two_levels_different = np.abs(version.summarylevel - highest_level) >= MAX_EIS_LEVELS + two_levels_different = np.abs( + version.summarylevel - highest_level) >= MAX_EIS_LEVELS if two_levels_different: return True else: @@ -577,16 +610,19 @@ def isMet(self, version, highest_level): def fromDict(self, session, thresholddict): tvalue = thresholddict['value'] - scheme = session.query(AlertScheme).filter(AlertScheme.name == thresholddict['alertscheme']).first() + scheme = session.query(AlertScheme).filter( + AlertScheme.name == thresholddict['alertscheme']).first() if scheme is None: - raise PagerException('No alert scheme named %s exists in the database.' % thresholddict['alertscheme']) + raise PagerException( + 'No alert scheme named %s exists in the database.' % thresholddict['alertscheme']) if not scheme.isdiscrete: if scheme.valuetype == 'Float': tvalue = float(tvalue) else: tvalue = int(tvalue) if tvalue < scheme.minlevel or tvalue > scheme.maxlevel: - raise PagerException('Threshold for %s is outside range.' % scheme.name) + raise PagerException( + 'Threshold for %s is outside range.' % scheme.name) self.alertscheme = scheme self.value = thresholddict['value'] @@ -595,6 +631,7 @@ def toDict(self): 'alertscheme': self.alertscheme.name} return thresholddict + class Level(Base): """Class representing an alert threshold (magnitude value, EIS level, etc.) @@ -606,10 +643,11 @@ class Level(Base): name = Column(String, nullable=False) alertscheme = relationship("AlertScheme", back_populates="levels") - + def __repr__(self): return "" % (self.name) - + + class RegionGroup(Base): """Class representing a group of regions (FEMA, US military, OFDA, etc.) @@ -626,6 +664,7 @@ class RegionGroup(Base): def __repr__(self): return "" % (self.groupname) + class Region(Base): """Class representing a region of interest (US military Northern Command, or NORTHCOM, for example.) @@ -645,8 +684,8 @@ class Region(Base): # A region can have many profiles profiles = relationship("Profile", - secondary=profile_region_bridge, - back_populates="regions") + secondary=profile_region_bridge, + back_populates="regions") regiongroup = relationship("RegionGroup", back_populates="regions") @@ -658,7 +697,7 @@ def getPolygon(self): polydict = json.loads(polystr) m = shape(GeoThing(polydict)) return m - + def containsPoint(self, lat, lon): """Determine whether a given lat/lon is inside the region. @@ -685,16 +724,18 @@ def fromDict(self, session, regiondict): pass else: missing = list(reqfields - set(regiondict.keys())) - raise PagerException('Missing required fields for region: %s' % str(missing)) + raise PagerException( + 'Missing required fields for region: %s' % str(missing)) regioninfo = regiondict['properties']['code'] rgroupname, regioncode = regioninfo.split('-') regiondesc = regiondict['properties']['desc'] # try to find this region in the database - regiongroup = session.query(RegionGroup).filter(RegionGroup.groupname == rgroupname).first() + regiongroup = session.query(RegionGroup).filter( + RegionGroup.groupname == rgroupname).first() if regiongroup is None: regiongroup = RegionGroup(groupname=rgroupname) - + poly = regiondict['geometry'] polybytes = bytes(json.dumps(poly), 'utf-8') tshape = shape(poly) @@ -718,10 +759,10 @@ def toDict(self): regiondict = {'type': 'Feature', 'geometry': polydata, 'properties': {'code': regioncode, - 'desc': self.desc}} + 'desc': self.desc}} return regiondict - - + + def get_session(url='sqlite:///:memory:', create_db=False): """Get a SQLAlchemy Session instance for input database URL. @@ -739,17 +780,18 @@ def get_session(url='sqlite:///:memory:', create_db=False): else: if create_db: create_database(url) - + engine = create_engine(url, echo=False) Base.metadata.create_all(engine) - # create a session object that we can use to insert and + # create a session object that we can use to insert and # extract information from the database Session = sessionmaker(bind=engine, autoflush=False) session = Session() return session + def create_db(url, schemadir, users_jsonfile=None, orgs_jsonfile=None): session = get_session(url, create_db=True) @@ -774,7 +816,7 @@ def create_db(url, schemadir, users_jsonfile=None, orgs_jsonfile=None): maxlevel=scheme['maxlevel']) session.add(tscheme) session.commit() - + eis = session.query(AlertScheme).filter(AlertScheme.name == 'eis').first() # Level table levels = [{'ordernum': 0, 'name': 'green'}, @@ -800,8 +842,8 @@ def create_db(url, schemadir, users_jsonfile=None, orgs_jsonfile=None): regions = json.loads(data) for regioncode, regiondict in regions.items(): region = Region() - region.fromDict(session, regiondict) # this adds to session and commits() - + # this adds to session and commits() + region.fromDict(session, regiondict) # load whatever organizations we have orgs = json.loads(open(orgs_jsonfile, 'rt').read()) @@ -815,9 +857,10 @@ def create_db(url, schemadir, users_jsonfile=None, orgs_jsonfile=None): for userdict in users: user = User() user.fromDict(session, userdict) - + return session + def serialize_users(session, jsonfile): # Back up the list of users to a JSON file users = session.query(User).all() @@ -829,6 +872,7 @@ def serialize_users(session, jsonfile): json.dump(userlist, f, indent=2) f.close() + def get_file_url(dbfile): fileurl = 'sqlite:///%s' % dbfile return fileurl diff --git a/losspager/utils/admin.py b/losspager/utils/admin.py index d5105a4..8393a52 100644 --- a/losspager/utils/admin.py +++ b/losspager/utils/admin.py @@ -8,6 +8,7 @@ import json import getpass from distutils.spawn import find_executable +import logging # local imports from losspager.utils.exception import PagerException @@ -102,7 +103,7 @@ def transfer(config, pagerdata, authid, authsource, version_folder, if 'status' in config and config['status'] == 'primary': if 'transfer' in config: if 'methods' in config['transfer']: - print('Running transfer.') + logging.info('Running transfer.') for method in config['transfer']['methods']: if method not in config['transfer']: sys.stderr.write( @@ -111,6 +112,8 @@ def transfer(config, pagerdata, authid, authsource, version_folder, params = config['transfer'][method] if is_scenario: params['type'] = 'losspager-scenario' + if renotify: + params['type'] = 'losspager-admin' if 'remote_directory' in params: vpath, vfolder = os.path.split(version_folder) # append the event id and version folder to our pre-specified output directory @@ -331,6 +334,18 @@ def getAllEventFolders(self): event_folders.append(event_folder) return event_folders + def getEventsBeforeDate(self, beforedate): + events = [] + event_folders = self.getAllEventFolders() + for event_folder in event_folders: + version_folder = self.getLastVersion(event_folder) + json_folder = os.path.join(version_folder,'json') + pdata = PagerData() + pdata.loadFromJSON(json_folder) + if pdata.time < beforedate: + events.append(pdata.id) + return events + def getAllEvents(self): """Get a list of event IDs from PAGER folder. @@ -377,25 +392,15 @@ def archive(self, events=[], all_events=False, events_before=None): else: nerrors += 1 else: + if not len(events): + events = self.getEventsBeforeDate(events_before) for eventid in events: eventfolder = self.getEventFolder(eventid) - if events_before is not None: - t, etimestr = eventfolder.split('_') - etime = datetime.datetime.strptime(etimestr, DATETIMEFMT) - if etime < events_before: - result = self.archiveEvent(eventid) - if result: - narchived += 1 - else: - nerrors += 1 - else: - continue + result = self.archiveEvent(eventid) + if result: + narchived += 1 else: - result = self.archiveEvent(eventid) - if result: - narchived += 1 - else: - nerrors += 1 + nerrors += 1 return (narchived, nerrors) @@ -762,7 +767,10 @@ def query(self, start_time=datetime.datetime(1800, 1, 1), end_time=datetime.date if not pdata._is_validated: broken.append(jsonfolder) - meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold] + try: + meetsLevel = levels[pdata.summary_alert] >= levels[alert_threshold] + except Exception as e: + x = 1 meetsMag = pdata.magnitude >= mag_threshold if pdata.time >= start_time and pdata.time <= end_time and meetsLevel and meetsMag: row = pdata.toSeries(processtime=do_process_time) diff --git a/losspager/utils/expocat.py b/losspager/utils/expocat.py index 5fa3768..1225dfd 100755 --- a/losspager/utils/expocat.py +++ b/losspager/utils/expocat.py @@ -100,7 +100,7 @@ def fromExcel(cls, excelfile): :returns: ExpoCat object. """ - df = pd.read_excel(excelfile, converters={0: str}) + df = pd.read_excel(excelfile, converters={'EventID': str}) #df = df.drop('Unnamed: 0',1) # cols = ['EventID','Time','Name','Lat','Lon','Depth','Magnitude','CountryCode', @@ -113,7 +113,8 @@ def fromExcel(cls, excelfile): 'MMI5', 'MMI6', 'MMI7', 'MMI8', 'MMI9+'] # find the highest MMI column in each row with at least 1000 people exposed. - mmidata = df.ix[:, mmicols].values + # mmidata = df.ix[:, mmicols].values + mmidata = df[mmicols].values tf = mmidata > 1000 nrows, ncols = mmidata.shape colmat = np.tile(np.arange(0, ncols), (nrows, 1)) @@ -148,7 +149,7 @@ def fromCSV(cls, csvfile): 'MMI5', 'MMI6', 'MMI7', 'MMI8', 'MMI9+'] # find the highest MMI column in each row with at least 1000 people exposed. - mmidata = df.ix[:, mmicols].values + mmidata = df[mmicols].values tf = mmidata > 1000 nrows, ncols = mmidata.shape colmat = np.tile(np.arange(0, ncols), (nrows, 1)) @@ -371,20 +372,20 @@ def getHistoricalEvents(self, maxmmi, nmmi, ndeaths, clat, clon): if less_bad is not None: lessdict = to_ordered_dict(less_bad) rgbval = colormap.getDataColor(lessdict['MaxMMI']) - rgb255 = tuple([int(c*255) for c in rgbval])[0:3] + rgb255 = tuple([int(c * 255) for c in rgbval])[0:3] lessdict['Color'] = '#%02x%02x%02x' % rgb255 events.append(lessdict) if more_bad is not None: moredict = to_ordered_dict(more_bad) rgbval = colormap.getDataColor(moredict['MaxMMI']) - rgb255 = tuple([int(c*255) for c in rgbval])[0:3] + rgb255 = tuple([int(c * 255) for c in rgbval])[0:3] moredict['Color'] = '#%02x%02x%02x' % rgb255 events.append(moredict) worstdict = to_ordered_dict(worst) rgbval = colormap.getDataColor(worstdict['MaxMMI']) - rgb255 = tuple([int(c*255) for c in rgbval])[0:3] + rgb255 = tuple([int(c * 255) for c in rgbval])[0:3] worstdict['Color'] = '#%02x%02x%02x' % rgb255 events.append(worstdict) diff --git a/losspager/utils/ftype.py b/losspager/utils/ftype.py index 5a76d6d..4e49a7b 100644 --- a/losspager/utils/ftype.py +++ b/losspager/utils/ftype.py @@ -3,10 +3,10 @@ from mapio.gmt import GMTGrid from mapio.gdal import GDALGrid - + def get_file_type(file): - """Internal method to figure out which file type (GMT or GDAL) the population/country code - grid files are. + """Internal method to figure out which file type (GMT or GDAL) + the population/country code grid files are. """ if GMTGrid.getFileType(file) == 'unknown': try: diff --git a/losspager/utils/logger.py b/losspager/utils/logger.py index 2eaaaea..f7f59f2 100644 --- a/losspager/utils/logger.py +++ b/losspager/utils/logger.py @@ -2,253 +2,59 @@ # stdlib imports import logging -from logging.handlers import SMTPHandler -import sys -import socket -from datetime import datetime -import smtplib +from logging import StreamHandler, FileHandler +from logging.handlers import (SMTPHandler, TimedRotatingFileHandler) + + +class PagerLogger(object): + def __init__(self, logfile, developers, + from_address, mail_host, debug=False): + self._fmt = '%(levelname)s -- %(asctime)s -- %(module)s.%(funcName)s -- %(message)s' + self._datefmt = '%Y-%m-%d %H:%M:%S' + self._level = logging.DEBUG + + self._formatter = logging.Formatter(self._fmt, self._datefmt) + + # turn this on only if debug is True + self._stream_handler = StreamHandler() + self._stream_handler.setFormatter(self._formatter) + self._stream_handler.setLevel(self._level) + + # turn this on only if debug is False + if mail_host is not None: + self._mail_handler = SMTPHandler(mail_host, from_address, + developers, 'PAGER Error') + self._mail_handler.setFormatter(self._formatter) + self._mail_handler.setLevel(logging.CRITICAL) + + # turn this on only if debug is False + self._global_handler = TimedRotatingFileHandler( + logfile, when='midnight') + self._global_handler.setFormatter(self._formatter) + self._global_handler.setLevel(self._level) + + # set up logger + logging.captureWarnings(True) + self._logger = logging.getLogger() + self._logger.setLevel(self._level) + if debug: + self._logger.addHandler(self._stream_handler) + else: + if mail_host is not None: + self._logger.addHandler(self._mail_handler) + self._logger.addHandler(self._global_handler) -class NullHandler(logging.Handler): - def emit(self, record): - pass - -class OutStream: - """ - Sends text to a logging object as INFO. - """ - def __init__(self, logger): - """ - Create an OutStream with an instance of a logger from the python logging module. - - :param logger: - Result of calling logging.getLogger('LogName') - """ - self.Logger = logger - - def write(self, text): - """ - Write output to Logger instance as INFO. - - :param text: - Text to write to Logger instance. - """ - if text and not text.isspace(): - self.Logger.info(text) - - def flush(self): - """Method to complete the stream interface. Does nothing. - - """ - pass - -class ErrStream: - """ - Sends text to a logging object as ERROR. - """ - def __init__(self, logger): - """ - Create an ErrStream with an instance of a logger from the python logging module. - - :param logger: - Result of calling logging.getLogger('LogName') - """ - self.Logger = logger - - def write(self, text): - """ - Write output to Logger instance as ERROR. - - :param text: - Text to write to Logger instance. - """ - if text and not text.isspace(): - self.Logger.error(text) - - def flush(self): - """Method to complete the stream interface. Does nothing. - - """ - pass - -class PagerLogger: - """ - Wrapper around Python logging module. - - This class simplifies some of the configuration for logging - PAGER errors. Simple usage:: - from util.logger import PagerLogger - plog = PagerLogger(logfile) - plog.addEmailHandler(['dev@developer.gov']) - plogger = plog.getLogger() - try: - print('This should show up as information!') - sys.stderr.write('This should show up as an error!') - raise Exception,"This should show up as critical (and generate an email)!" - except Exception,msg: - plogger.critical(msg) - """ - Subject = 'PAGER Error' - EmailLogLevel = logging.CRITICAL - OldOut = None - OldErr = None - isOnline = False - EventLogHandler = None - MailHandler = None - PagerLogHandler = None - def __init__(self, pagerlogfile, from_address=None, mail_hosts=None, redirect=True): - """ - Create a PagerLogger instance, and begin logging to the PAGER (not event) log file. - - Initially, without calling any other functions, this will log results only to this file. - When the version folder is created, users should call addEventFileHandler(), which will turn off - the general pager log file. - - :param pagerlogfile: - Name of local file where logging information should be written. - :param from_address: - Email address where error messages will come from. - :param mail_hosts: - String indicating the hostname or IP address of a valid SMTP server. - :param redirect: - Boolean indicating whether or not stdout and stderr should be redirected to - log file. Wise to turn off when debugging. - """ - #h = NullHandler() - self.redirected = False - logger = logging.getLogger('PagerLog') - # logger.addHandler(h) - formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') - #formatter = logging.Formatter(fmt='${levelname} ${asctime} ${message}') - self.PagerLogHandler = logging.FileHandler(pagerlogfile) - self.PagerLogHandler.setFormatter(formatter) - logger.addHandler(self.PagerLogHandler) - - logger.setLevel(logging.INFO) - self.Logger = logger - self.Formatter = formatter - if redirect: - self.enableRedirect() - - # check to verify that we are connected to a network. If not, email handler - # can't be added later - self.isOnline = True - if socket.gethostbyname(socket.gethostname()) == '127.0.0.1': - self.isOnline = False - - self.FromAddress = from_address - self.MailHosts = mail_hosts - - def scream(self, msg): - """ - Send out a CRITICAL level warning message, which will go through the email handler if it is turned on. - - :param msg: - Text message to be logged as CRITICAL. - """ - self.disableRedirect() - self.Logger.critical(msg) - self.enableRedirect() - - - def enableRedirect(self): - """ - Send stdout/stderr to OutStream object. - """ - self.redirected = True - self.OldOut = sys.stdout - self.OldErr = sys.stderr - sys.stdout = OutStream(self.Logger) - sys.stderr = OutStream(self.Logger) - - def disableRedirect(self): - """ - Send stdout/stderr back to original output streams. - """ - if self.OldOut is None: - return - sys.stdout = self.OldOut - sys.stderr = self.OldErr - self.OldOut = None - self.OldErr = None - - def switchToEventFileHandler(self, eventlogfile): - """ - Add event log to logger. Also removes general pager log from the logger object. - :param eventlogfile: - Desired full path to event log file. - """ - # add the event file handler - self.EventLogHandler = logging.FileHandler(eventlogfile) - self.EventLogHandler.setFormatter(self.Formatter) - self.Logger.addHandler(self.EventLogHandler) - - # remove the pager log handler - self.Logger.removeHandler(self.PagerLogHandler) - - def addEmailHandler(self, emails): - """ - Add an email handler. - - Important notes: The email handler uses the following class variables: - - FromAddress - Emails show up as being from this address. - - MailHosts - Mails are processed through one of these mail servers. - - Subject - Emails have this subject line. - - LogLevel - (IMPORTANT!) Only log messages with this log level and above - will be sent by email. - - Use this email functionality with caution. - - :param emails: - List of email addresses that should receive error notifications. - """ - if not self.isOnline: - print('Could not add mail handler as this system is not networked.') - return - self.MailHandler = None - print('Creating SMTP handler with %s,%s,%s,%s' % (self.MailHosts, self.FromAddress, emails, self.Subject)) - # we supply a list of smtp hosts, just in case some of them are off-line... here we find the first - # one that seems to be responsive. - try: - subject = self.Subject + ': %s' % datetime.utcnow().strftime('%b %d %Y %H:%M:%S') - smtphost = None - for host in self.MailHosts: - with smtplib.SMTP(host) as smtp: - # if I can connect, and send this noop message, then this server - # should be good to go. Various sources indicate that calling noop() too many - # times can be interpreted as a form of DOS attack, so use with caution. - res = smtp.noop() - smtp.close() - if res[0] == 250: - smtphost = host - break - if smtphost is None: - raise PagerException('Could not connect to any mail hosts: %s' % str(self.MailHosts)) - - self.MailHandler = SMTPHandler(smtphost, self.FromAddress, emails, subject) - self.MailHandler.setLevel(self.EmailLogLevel) - self.MailHandler.setFormatter(self.Formatter) - self.Logger.addHandler(self.MailHandler) - except: - msg = '' - print('Could not add mail handler (%s).' % (msg)) - - def stopLogging(self): - """ - Stop logging, return stdout and stderr to normal state. - """ - if self.PagerLogHandler: - self.Logger.removeHandler(self.PagerLogHandler) - if self.MailHandler: - self.Logger.removeHandler(self.MailHandler) - if self.EventLogHandler: - self.Logger.removeHandler(self.EventLogHandler) - if self.redirected: - sys.stdout = self.OldOut - sys.stderr = self.OldErr - - def getLogger(self): - """ - Return the logger instance. - """ - return self.Logger + return self._logger + + def setVersionHandler(self, filename): + self._logger.removeHandler(self._global_handler) + local_handler = FileHandler(filename) + local_handler.setFormatter(self._formatter) + local_handler.setLevel(self._level) + self._logger.addHandler(local_handler) + + def close(self): + for handler in self._logger.handlers: + self._logger.removeHandler(handler) + del self._logger diff --git a/setup.py b/setup.py index 48de49d..0d413fc 100644 --- a/setup.py +++ b/setup.py @@ -15,6 +15,8 @@ os.path.join('data', 'schema', '*'), os.path.join('logos', '*') ]}, - scripts=['pagerlite', 'emailpager', 'pager', 'adminpager', - 'callpager', 'updatepager', 'mailadmin', 'sync_users', - 'twopager']) + scripts=['bin/pagerlite', 'bin/emailpager', + 'bin/pager', 'bin/adminpager', + 'bin/callpager', 'bin/updatepager', + 'bin/mailadmin', 'bin/sync_users', + 'bin/twopager']) diff --git a/test/io/pagerdata_test.py b/test/io/pagerdata_test.py index 8b72fa3..57f3589 100755 --- a/test/io/pagerdata_test.py +++ b/test/io/pagerdata_test.py @@ -7,12 +7,7 @@ import shutil from datetime import datetime -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np from mapio.shake import getHeaderData @@ -32,10 +27,11 @@ DATETIMEFMT = '%Y-%m-%d %H:%M:%S' TSUNAMI_MAG_THRESH = 7.3 + def tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment,): eventinfo = doc.getEventInfo() assert eventinfo['mag'] == shakegrid.getEventDict()['magnitude'] - + imp1, imp2 = doc.getImpactComments() assert imp1 == impact1 and imp2 == impact2 @@ -50,7 +46,7 @@ def tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment scomm = doc.getStructureComment() assert scomm == struct_comment - + hcomm = doc.getHistoricalComment() assert hcomm == hist_comment @@ -64,23 +60,33 @@ def tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment assert doc.magnitude == shakegrid.getEventDict()['magnitude'] assert doc.time == shakegrid.getEventDict()['event_timestamp'] assert doc.summary_alert == 'yellow' - assert doc.processing_time == datetime.strptime(doc._pagerdict['pager']['processing_time'], DATETIMEFMT) + assert doc.processing_time == datetime.strptime( + doc._pagerdict['pager']['processing_time'], DATETIMEFMT) assert doc.version == doc._pagerdict['pager']['version_number'] + def test(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? fatfile = os.path.join(homedir, '..', 'data', 'fatality.xml') ecofile = os.path.join(homedir, '..', 'data', 'economy.xml') cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt') event = 'northridge' - shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) - popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) - isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) - urbanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_urban.bil') - oceanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json') - oceangridfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil') - timezonefile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp') - + shakefile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_grid.xml' % event) + popfile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_gpw.flt' % event) + isofile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_isogrid.bil' % event) + urbanfile = os.path.join(homedir, '..', 'data', + 'eventdata', 'northridge', 'northridge_urban.bil') + oceanfile = os.path.join( + homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.json') + oceangridfile = os.path.join( + homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_ocean.bil') + timezonefile = os.path.join( + homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_timezone.shp') + invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf') colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf') casfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf') @@ -93,24 +99,25 @@ def test(): results = exp.calcExposure(shakefile) shakegrid = exp.getShakeGrid() popgrid = exp.getPopulationGrid() - - pdffile, pngfile, mapcities = draw_contour(shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename) + + pdffile, pngfile, mapcities = draw_contour( + shakegrid, popgrid, oceanfile, oceangridfile, cityfile, basename) shutil.rmtree(tdir) - + popyear = 2012 shake_tuple = getHeaderData(shakefile) tsunami = shake_tuple[1]['magnitude'] >= TSUNAMI_MAG_THRESH - + semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, popyear, urbanfile, isofile) semiloss, resfat, nonresfat = semi.getLosses(shakefile) - + popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile, 2012, isofile) fatmodel = EmpiricalLoss.fromDefaultFatality() expobject = Exposure(popfile, 2012, isofile, popgrowth) - + expdict = expobject.calcExposure(shakefile) fatdict = fatmodel.getLosses(expdict) econexpdict = econexp.calcExposure(shakefile) @@ -142,21 +149,24 @@ def test(): location = 'At the top of the world.' is_released = True - + doc = PagerData() eventcode = shakegrid.getEventDict()['event_id'] versioncode = eventcode - doc.setInputs(shakegrid, timezonefile, pagerversion, versioncode, eventcode, tsunami, location, is_released) + doc.setInputs(shakegrid, timezonefile, pagerversion, + versioncode, eventcode, tsunami, location, is_released) doc.setExposure(expdict, econexpdict) doc.setModelResults(fatmodel, ecomodel, fatdict, ecodict, semiloss, resfat, nonresfat) - doc.setComments(impact1, impact2, struct_comment, hist_comment, secondary_comment) + doc.setComments(impact1, impact2, struct_comment, + hist_comment, secondary_comment) doc.setMapInfo(cityfile, mapcities) doc.validate() # let's test the property methods - tdoc(doc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) + tdoc(doc, shakegrid, impact1, impact2, + expdict, struct_comment, hist_comment) # see if we can save this to a bunch of files then read them back in try: @@ -164,16 +174,16 @@ def test(): doc.saveToJSON(tdir) newdoc = PagerData() newdoc.loadFromJSON(tdir) - tdoc(newdoc, shakegrid, impact1, impact2, expdict, struct_comment, hist_comment) + tdoc(newdoc, shakegrid, impact1, impact2, + expdict, struct_comment, hist_comment) # test the xml saving method xmlfile = doc.saveToLegacyXML(tdir) except Exception as e: - assert 1==2 + assert 1 == 2 finally: shutil.rmtree(tdir) - - + if __name__ == '__main__': test() diff --git a/test/mail/formatter_test.py b/test/mail/formatter_test.py index 7b07ca3..e8af63e 100755 --- a/test/mail/formatter_test.py +++ b/test/mail/formatter_test.py @@ -10,19 +10,14 @@ from textwrap import dedent import re -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np import matplotlib.pyplot as plt import pandas as pd # local imports from losspager.mail.formatter import format_exposure, format_city_table,\ - format_earthquakes, format_msg, format_short, format_long + format_earthquakes, format_msg, format_short, format_long from losspager.schema import emailschema as es # def test_format_exposure(): @@ -137,7 +132,7 @@ # {'inside':False, 'exposure':947968}, # {'inside':False, 'exposure':54893}, # {'inside':False, 'exposure':654}] - + # expstr = format_exposure(exposures,'short') # shortmsg = format_short(version,expstr) @@ -171,11 +166,11 @@ # {'pop':550000, 'name':"Changwon", 'mmi':3.43}, # {'pop':3678555, 'name':"Busan", 'mmi':3.35}] -# historical_events = [{'distance':363.70022777,'magnitude':6.7, +# historical_events = [{'distance':363.70022777,'magnitude':6.7, # 'maxmmi':9, 'deaths':0, 'date':datetime(2000,10,6,4,30,20),'maxmmiexp':37718}, -# {'distance':120.744540337, 'magnitude':6.6, +# {'distance':120.744540337, 'magnitude':6.6, # 'maxmmi':9, 'deaths':1, 'date':datetime(2005,3,20,1,53,42), 'maxmmiexp':73948}, -# {'distance':220.632206626, 'magnitude':6.8, +# {'distance':220.632206626, 'magnitude':6.8, # 'maxmmi':8, 'deaths':2, 'date':datetime(2001,3,24,6,27,53), 'maxmmiexp':4737}] # eventinfo = {} @@ -203,11 +198,9 @@ # longmsg = format_long(version,eventinfo,expstr) # print('Long message:') # print(longmsg) - + if __name__ == '__main__': test_format_exposure() test_format_city_table() # test_format_msg() - - diff --git a/test/models/econ_test.py b/test/models/econ_test.py index 5752530..f699c18 100755 --- a/test/models/econ_test.py +++ b/test/models/econ_test.py @@ -8,12 +8,7 @@ from datetime import datetime from collections import OrderedDict -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np from mapio.geodict import GeoDict from mapio.gmt import GMTGrid @@ -26,17 +21,25 @@ from losspager.models.econexposure import EconExposure, GDP from losspager.models.growth import PopulationGrowth + def test(): event = 'northridge' - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? xmlfile = os.path.join(homedir, '..', 'data', 'economy.xml') - growthfile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') - gdpfile = os.path.join(homedir, '..', 'data', 'API_NY.GDP.PCAP.CD_DS2_en_excel_v2.xls') - shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) - popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) - isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) - shapefile = os.path.join(homedir, '..', 'data', 'eventdata', event, 'City_BoundariesWGS84', 'City_Boundaries.shp') - + growthfile = os.path.join(homedir, '..', 'data', + 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') + gdpfile = os.path.join(homedir, '..', 'data', + 'API_NY.GDP.PCAP.CD_DS2_en_excel_v2.xls') + shakefile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_grid.xml' % event) + popfile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_gpw.flt' % event) + isofile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_isogrid.bil' % event) + shapefile = os.path.join(homedir, '..', 'data', 'eventdata', + event, 'City_BoundariesWGS84', 'City_Boundaries.shp') + print('Test loading economic exposure from inputs...') popgrowth = PopulationGrowth.fromDefault() econexp = EconExposure(popfile, 2012, isofile) @@ -47,7 +50,7 @@ def test(): print('Passed loading empirical fatality model from XML file.') print('Testing calculating probabilities for standard PAGER ranges...') - expected = {'UK': 6819.883892*1e6, 'TotalDollars': 6819.883892*1e6} + expected = {'UK': 6819.883892 * 1e6, 'TotalDollars': 6819.883892 * 1e6} G = 2.5 probs = ecomodel.getProbabilities(expected, G) testprobs = {'0-1': 0.00020696841425738358, @@ -60,17 +63,18 @@ def test(): for key, value in probs.items(): np.testing.assert_almost_equal(value, testprobs[key]) print('Passed combining G values from all countries that contributed to losses...') - + print('Test retrieving economic model data from XML file...') model = ecomodel.getModel('af') - testmodel = LognormalModel('dummy', 9.013810, 0.100000, 4.113200, alpha=15.065400) + testmodel = LognormalModel( + 'dummy', 9.013810, 0.100000, 4.113200, alpha=15.065400) assert model == testmodel print('Passed retrieving economic model data from XML file.') print('Testing with known exposures/losses for 1994 Northridge EQ...') exposure = {'xf': np.array([0, 0, 556171936.807, 718990717350.0, 2.40385709638e+12, - 2.47073141687e+12, 1.2576210799e+12, 698888019337.0, - 1913733716.16, 0.0])} + 2.47073141687e+12, 1.2576210799e+12, 698888019337.0, + 1913733716.16, 0.0])} expodict = ecomodel.getLosses(exposure) testdict = {'xf': 25945225582} assert expodict['xf'] == testdict['xf'] @@ -100,8 +104,10 @@ def test(): for row in f: shapes.append(row) f.close() - ecoshapes, toteco = ecomodel.getLossByShapes(mmidata, popdata, isodata, shapes, popdict) - ecoshapes = sorted(ecoshapes, key=lambda shape: shape['properties']['dollars_lost'], reverse=True) + ecoshapes, toteco = ecomodel.getLossByShapes( + mmidata, popdata, isodata, shapes, popdict) + ecoshapes = sorted( + ecoshapes, key=lambda shape: shape['properties']['dollars_lost'], reverse=True) lalosses = 17323352577 for shape in ecoshapes: if shape['id'] == '312': # Los Angeles @@ -111,7 +117,6 @@ def test(): assert cname == 'Los Angeles' print('Passed assigning economic losses to polygons...') - if __name__ == '__main__': test() diff --git a/test/models/exposure_test.py b/test/models/exposure_test.py index d0961fc..eeba36a 100755 --- a/test/models/exposure_test.py +++ b/test/models/exposure_test.py @@ -6,18 +6,14 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports from losspager.models.exposure import Exposure, calc_exposure from losspager.models.growth import PopulationGrowth + def basic_test(): print('Testing very basic exposure calculation...') mmidata = np.array([[7, 8, 8, 8, 7], @@ -25,12 +21,12 @@ def basic_test(): [8, 9, 10, 9, 8], [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]], dtype=np.float32) - popdata = np.ones_like(mmidata)*1e7 + popdata = np.ones_like(mmidata) * 1e7 isodata = np.array([[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156], [156, 156, 156, 156, 156], - [156, 156, 156, 156, 156]], dtype=np.int32) + [156, 156, 156, 156, 156]], dtype=np.int32) expdict = calc_exposure(mmidata, popdata, isodata) testdict = {4: np.array([0, 0, 0, 0, 0, 0, 2e7, 6e7, 4e7, 0]), 156: np.array([0, 0, 0, 0, 1e7, 1e7, 1e7, 6e7, 3e7, 1e7])} @@ -41,23 +37,29 @@ def basic_test(): print('Passed very basic exposure calculation...') + def test(): print('Testing Northridge exposure check (with GPW data).') events = ['northridge'] - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? - excelfile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? + excelfile = os.path.join(homedir, '..', 'data', + 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') for event in events: - shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) - popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) - isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) - + shakefile = os.path.join( + homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) + popfile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_gpw.flt' % event) + isofile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_isogrid.bil' % event) + growth = PopulationGrowth.fromDefault() exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) - cmpexposure = [0, 0, 1817, 1767260, 5840985, 5780298, 2738374, 1559657, 4094, 0] + cmpexposure = [0, 0, 1817, 1767260, 5840985, + 5780298, 2738374, 1559657, 4094, 0] np.testing.assert_almost_equal(cmpexposure, results['TotalExposure']) print('Passed Northridge exposure check (with GPW data).') - if __name__ == '__main__': diff --git a/test/models/fatality_test.py b/test/models/fatality_test.py index 0b35f09..9638c52 100755 --- a/test/models/fatality_test.py +++ b/test/models/fatality_test.py @@ -8,12 +8,7 @@ from datetime import datetime from collections import OrderedDict -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np from mapio.geodict import GeoDict from mapio.gmt import GMTGrid @@ -26,11 +21,13 @@ from losspager.models.exposure import Exposure from losspager.models.growth import PopulationGrowth + def get_temp_file_name(): foo, tmpfile = tempfile.mkstemp() os.close(foo) return tmpfile + def lognormal_object_test(): model_dict = {'AF': (11.613073, 0.180683), 'CN': (10.328811, 0.100058), @@ -44,57 +41,61 @@ def lognormal_object_test(): print('Testing that fatality rates calculation is correct...') rates = model3.getLossRates(np.arange(5, 10)) - testrates = np.array([ 4.62833172e-10, 1.27558914e-06, 2.28027416e-04, 6.81282956e-03, 6.04383822e-02]) + testrates = np.array([4.62833172e-10, 1.27558914e-06, + 2.28027416e-04, 6.81282956e-03, 6.04383822e-02]) np.testing.assert_almost_equal(rates, testrates) print('Fatality rates calculation is correct.') print('Testing model fatality comparison...') - assert model1 > model2 + assert model1 > model2 print('Passed model fatality comparison.') - + print('More complete test of model fatality comparison...') mlist = [] for key, values in model_dict.items(): mlist.append(LognormalModel(key, values[0], values[1], 0.0)) - + mlist.sort() names = [m.name for m in mlist] assert names != ['JP', 'US', 'CN', 'IR', 'AF'] print('Passed more complete test of model fatality comparison.') - + print('Sorted list of country models:') - print('%5s %6s %6s %-6s %-14s' % ('Name', 'Theta', 'Beta', 'Area', 'Deaths')) + print('%5s %6s %6s %-6s %-14s' % + ('Name', 'Theta', 'Beta', 'Area', 'Deaths')) for model in mlist: exp_pop = np.array([1e6, 1e6, 1e6, 1e6, 1e6]) mmirange = np.arange(5, 10) deaths = model.getLosses(exp_pop, mmirange) - print('%5s %6.3f %6.3f %6.4f %14.4f' % (model.name, model.theta, model.beta, model.getArea(), deaths)) + print('%5s %6.3f %6.3f %6.4f %14.4f' % + (model.name, model.theta, model.beta, model.getArea(), deaths)) + def basic_test(): - + mmidata = np.array([[7, 8, 8, 8, 7], [8, 9, 9, 9, 8], [8, 9, 10, 9, 8], [8, 9, 9, 8, 8], [7, 8, 8, 6, 5]], dtype=np.float32) - popdata = np.ones_like(mmidata)*1e7 + popdata = np.ones_like(mmidata) * 1e7 isodata = np.array([[4, 4, 4, 4, 4], [4, 4, 4, 4, 4], [4, 4, 156, 156, 156], [156, 156, 156, 156, 156], - [156, 156, 156, 156, 156]], dtype=np.int32) - + [156, 156, 156, 156, 156]], dtype=np.int32) shakefile = get_temp_file_name() popfile = get_temp_file_name() isofile = get_temp_file_name() - geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, 'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5}) + geodict = GeoDict({'xmin': 0.5, 'xmax': 4.5, 'ymin': 0.5, + 'ymax': 4.5, 'dx': 1.0, 'dy': 1.0, 'nx': 5, 'ny': 5}) layers = OrderedDict([('mmi', mmidata), ]) event_dict = {'event_id': 'us12345678', 'magnitude': 7.8, - 'depth': 10.0, 'lat': 34.123, 'lon': -118.123, - 'event_timestamp': datetime.utcnow(), - 'event_description': 'foo', - 'event_network': 'us'} + 'depth': 10.0, 'lat': 34.123, 'lon': -118.123, + 'event_timestamp': datetime.utcnow(), + 'event_description': 'foo', + 'event_network': 'us'} shake_dict = {'event_id': 'us12345678', 'shakemap_id': 'us12345678', 'shakemap_version': 1, 'code_version': '4.5', 'process_timestamp': datetime.utcnow(), 'shakemap_originator': 'us', 'map_status': 'RELEASED', 'shakemap_event_type': 'ACTUAL'} @@ -107,25 +108,27 @@ def basic_test(): isogrid.save(isofile) ratedict = {4: {'start': [2010, 2012, 2014, 2016], - 'end': [2012, 2014, 2016, 2018], - 'rate': [0.01, 0.02, 0.03, 0.04]}, + 'end': [2012, 2014, 2016, 2018], + 'rate': [0.01, 0.02, 0.03, 0.04]}, 156: {'start': [2010, 2012, 2014, 2016], - 'end': [2012, 2014, 2016, 2018], - 'rate': [0.02, 0.03, 0.04, 0.05]}} - + 'end': [2012, 2014, 2016, 2018], + 'rate': [0.02, 0.03, 0.04, 0.05]}} + popgrowth = PopulationGrowth(ratedict) popyear = datetime.utcnow().year exposure = Exposure(popfile, popyear, isofile, popgrowth=popgrowth) expdict = exposure.calcExposure(shakefile) - + modeldict = [LognormalModel('AF', 11.613073, 0.180683, 1.0), LognormalModel('CN', 10.328811, 0.100058, 1.0)] fatmodel = EmpiricalLoss(modeldict) # for the purposes of this test, let's override the rates # for Afghanistan and China with simpler numbers. - fatmodel.overrideModel('AF', np.array([0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0], dtype=np.float32)) - fatmodel.overrideModel('CN', np.array([0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0], dtype=np.float32)) + fatmodel.overrideModel('AF', np.array( + [0, 0, 0, 0, 1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 0], dtype=np.float32)) + fatmodel.overrideModel('CN', np.array( + [0, 0, 0, 0, 1e-7, 1e-6, 1e-5, 1e-4, 1e-3, 0], dtype=np.float32)) print('Testing very basic fatality calculation...') fatdict = fatmodel.getLosses(expdict) @@ -136,7 +139,7 @@ def basic_test(): for key, value in fatdict.items(): assert value == testdict[key] print('Passed very basic fatality calculation...') - + print('Testing grid fatality calculations...') mmidata = exposure.getShakeGrid().getLayer('mmi').getData() popdata = exposure.getPopulationGrid().getData() @@ -152,24 +155,32 @@ def basic_test(): modrates = rates * 2 # does this make event twice as deadly? # roughly the exposures from 2015-9-16 CL event - expo_pop = np.array([0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0]) + expo_pop = np.array( + [0, 0, 0, 1047000, 7314000, 1789000, 699000, 158000, 0, 0]) mmirange = np.arange(5, 10) chile_deaths = chile.getLosses(expo_pop[4:9], mmirange) - chile_double_deaths = chile.getLosses(expo_pop[4:9], mmirange, rates=modrates) + chile_double_deaths = chile.getLosses( + expo_pop[4:9], mmirange, rates=modrates) print('Chile model fatalities: %f' % chile_deaths) print('Chile model x2 fatalities: %f' % chile_double_deaths) - + def test(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? xmlfile = os.path.join(homedir, '..', 'data', 'fatality.xml') - ratefile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') + ratefile = os.path.join(homedir, '..', 'data', + 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') event = 'northridge' - shakefile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_grid.xml' % event) - popfile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_gpw.flt' % event) - isofile = os.path.join(homedir, '..', 'data', 'eventdata', event, '%s_isogrid.bil' % event) - shapefile = os.path.join(homedir, '..', 'data', 'eventdata', event, 'City_BoundariesWGS84', 'City_Boundaries.shp') - + shakefile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_grid.xml' % event) + popfile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_gpw.flt' % event) + isofile = os.path.join(homedir, '..', 'data', + 'eventdata', event, '%s_isogrid.bil' % event) + shapefile = os.path.join(homedir, '..', 'data', 'eventdata', + event, 'City_BoundariesWGS84', 'City_Boundaries.shp') + print('Test loading empirical fatality model from XML file...') fatmodel = EmpiricalLoss.fromDefaultFatality() print('Passed loading empirical fatality model from XML file.') @@ -179,9 +190,10 @@ def test(): assert fatmodel.getAlertLevel({'TotalFatalities': 5}) == 'yellow' assert fatmodel.getAlertLevel({'TotalFatalities': 100}) == 'orange' assert fatmodel.getAlertLevel({'TotalFatalities': 1000}) == 'red' - assert fatmodel.getAlertLevel({'TotalFatalities': 1e13}) == 'red' # 1000 times Earth's population + # 1000 times Earth's population + assert fatmodel.getAlertLevel({'TotalFatalities': 1e13}) == 'red' print('Passed getting alert level from various losses.') - + print('Test retrieving fatality model data from XML file...') model = fatmodel.getModel('af') testmodel = LognormalModel('dummy', 11.613073, 0.180683, 8.428822) @@ -189,7 +201,8 @@ def test(): print('Passed retrieving fatality model data from XML file.') print('Testing with known exposures/fatalities for 1994 Northridge EQ...') - exposure = {'xf': np.array([0, 0, 1506.0, 1946880.0, 6509154.0, 6690236.0, 3405381.0, 1892446.0, 5182.0, 0])} + exposure = {'xf': np.array( + [0, 0, 1506.0, 1946880.0, 6509154.0, 6690236.0, 3405381.0, 1892446.0, 5182.0, 0])} fatdict = fatmodel.getLosses(exposure) testdict = {'xf': 22} assert fatdict['xf'] == testdict['xf'] @@ -217,7 +230,6 @@ def test(): np.testing.assert_almost_equal(value, testprobs[key]) print('Passed combining G values from all countries that contributed to losses...') - print('Testing calculating total fatalities for Northridge...') expobject = Exposure(popfile, 2012, isofile) expdict = expobject.calcExposure(shakefile) @@ -225,7 +237,7 @@ def test(): testdict = {'XF': 18} assert fatdict['XF'] == testdict['XF'] print('Passed calculating total fatalities for Northridge...') - + print('Testing creating a fatality grid...') mmidata = expobject.getShakeGrid().getLayer('mmi').getData() popdata = expobject.getPopulationGrid().getData() @@ -241,7 +253,8 @@ def test(): for row in f: shapes.append(row) f.close() - fatshapes, totfat = fatmodel.getLossByShapes(mmidata, popdata, isodata, shapes, popdict) + fatshapes, totfat = fatmodel.getLossByShapes( + mmidata, popdata, isodata, shapes, popdict) fatalities = 12 for shape in fatshapes: if shape['id'] == '312': # Los Angeles @@ -252,7 +265,8 @@ def test(): break print('Passed assigning fatalities to polygons...') + if __name__ == '__main__': lognormal_object_test() - basic_test() + basic_test() test() diff --git a/test/models/growth_test.py b/test/models/growth_test.py index 9a36ca3..0ba6646 100755 --- a/test/models/growth_test.py +++ b/test/models/growth_test.py @@ -6,12 +6,7 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports @@ -19,6 +14,7 @@ POPURL = 'http://esa.un.org/unpd/wpp/DVD/Files/1_Indicators%20(Standard)/EXCEL_FILES/1_Population/WPP2015_POP_F02_POPULATION_GROWTH_RATE.XLS' + def test_adjust_pop(): print('Testing simple positive population growth...') tpop = 2015 @@ -27,7 +23,7 @@ def test_adjust_pop(): pop = 1e6 rate = 0.01 # 1% growth rate newpop = adjust_pop(pop, tpop, tevent, rate) - assert newpop == pop + pop*rate + assert newpop == pop + pop * rate print('Passed simple positive population growth.') print('Testing simple negative population growth...') @@ -47,17 +43,20 @@ def test_adjust_pop(): newpop = adjust_pop(pop, tpop, tevent, rate) assert newpop == pop print('Passed simple zero population growth.') - + + def test_pop_growth(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? print('Testing loading Population Growth from UN spreadsheet...') pg = PopulationGrowth.fromDefault() print('Passed loading Population Growth from UN spreadsheet...') print('Testing getting growth rates for the US...') rate = pg.getRate(840, 1963) - assert rate == 1.373/100.0 - allrates = np.array([1.581, 1.724, 1.373, 0.987, 0.885, 0.948, 0.945, 0.985, 1.035, 1.211, 0.915, 0.907, 0.754])/100.0 + assert rate == 1.373 / 100.0 + allrates = np.array([1.581, 1.724, 1.373, 0.987, 0.885, 0.948, + 0.945, 0.985, 1.035, 1.211, 0.915, 0.907, 0.754]) / 100.0 starts, usrates = pg.getRates(840) np.testing.assert_almost_equal(usrates, allrates) print('Passed getting growth rate for the US...') @@ -74,8 +73,9 @@ def test_pop_growth(): tevent = 2016 ccode = 840 pop = 1e6 - newpop = pg.adjustPopulation(pop, ccode, tpop, tevent) # TODO: hand-verify this result! - + # TODO: hand-verify this result! + newpop = pg.adjustPopulation(pop, ccode, tpop, tevent) + # #2: time population data was "collected" is after the event time tpop = 2016 tevent = 2012 @@ -89,6 +89,7 @@ def test_pop_growth(): # pop = 1e6 # newpop = pg.adjustPopulation(pop,ccode,tpop,tevent) + if __name__ == '__main__': test_adjust_pop() test_pop_growth() diff --git a/test/models/semimodel_test.py b/test/models/semimodel_test.py index 4720e4b..86dcf20 100755 --- a/test/models/semimodel_test.py +++ b/test/models/semimodel_test.py @@ -8,12 +8,7 @@ from datetime import datetime from collections import OrderedDict -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np import pandas as pd from mapio.gmt import GMTGrid @@ -23,27 +18,33 @@ # local imports from losspager.models.semimodel import get_time_of_day, pop_dist, SemiEmpiricalFatality, URBAN, RURAL, make_test_semi_model + def test_times(): - tday, year, hour = get_time_of_day(datetime(2016, 6, 3, 16, 39, 0), -117.6981) + tday, year, hour = get_time_of_day( + datetime(2016, 6, 3, 16, 39, 0), -117.6981) assert tday == 'transit' assert year == 2016 assert hour == 8 - tday, year, hour = get_time_of_day(datetime(2016, 6, 3, 19, 39, 0), -117.6981) + tday, year, hour = get_time_of_day( + datetime(2016, 6, 3, 19, 39, 0), -117.6981) assert tday == 'day' assert year == 2016 assert hour == 11 - tday, year, hour = get_time_of_day(datetime(2016, 6, 4, 7, 39, 0), -117.6981) + tday, year, hour = get_time_of_day( + datetime(2016, 6, 4, 7, 39, 0), -117.6981) assert tday == 'night' assert year == 2016 assert hour == 23 - tday, year, hour = get_time_of_day(datetime(2017, 1, 1, 1, 0, 0), -117.6981) + tday, year, hour = get_time_of_day( + datetime(2017, 1, 1, 1, 0, 0), -117.6981) assert tday == 'transit' assert year == 2016 assert hour == 17 + def test_work(): popi = 2000 wforce = pd.Series({'WorkForceTotal': 0.5, @@ -58,23 +59,26 @@ def test_work(): np.testing.assert_almost_equal(outdoor, 725) - - - def test_model_real(): # test with real data popyear = 2012 - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf') colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf') fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf') workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf') - growthfile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') - popfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_gpw.flt') - shakefile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_grid.xml') - - isofile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_isogrid.bil') - urbanfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_urban.bil') + growthfile = os.path.join(homedir, '..', 'data', + 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') + popfile = os.path.join(homedir, '..', 'data', 'eventdata', + 'northridge', 'northridge_gpw.flt') + shakefile = os.path.join(homedir, '..', 'data', + 'eventdata', 'northridge', 'northridge_grid.xml') + + isofile = os.path.join(homedir, '..', 'data', 'eventdata', + 'northridge', 'northridge_isogrid.bil') + urbanfile = os.path.join(homedir, '..', 'data', + 'eventdata', 'northridge', 'northridge_urban.bil') semi = SemiEmpiricalFatality.fromDefault() semi.setGlobalFiles(popfile, popyear, urbanfile, isofile) @@ -86,15 +90,14 @@ def test_model_real(): def test_manual_calcs(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? - invfile = os.path.join(homedir, '..', 'data', 'semi_inventory.hdf') - colfile = os.path.join(homedir, '..', 'data', 'semi_collapse_mmi.hdf') - fatfile = os.path.join(homedir, '..', 'data', 'semi_casualty.hdf') - workfile = os.path.join(homedir, '..', 'data', 'semi_workforce.hdf') - growthfile = os.path.join(homedir, '..', 'data', 'WPP2015_POP_F02_POPULATION_GROWTH_RATE.xls') - popfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_gpw.flt') - urbfile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_urban.bil') - isofile = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_isogrid.bil') + # where is this script? + homedir = os.path.dirname(os.path.abspath(__file__)) + popfile = os.path.join(homedir, '..', 'data', 'eventdata', + 'northridge', 'northridge_gpw.flt') + urbfile = os.path.join(homedir, '..', 'data', 'eventdata', + 'northridge', 'northridge_urban.bil') + isofile = os.path.join(homedir, '..', 'data', 'eventdata', + 'northridge', 'northridge_isogrid.bil') ccode = 'ID' timeofday = 'day' density = URBAN @@ -116,15 +119,15 @@ def test_manual_calcs(): nonres_fats = nonres * nonresinv * nonres_collapse * nonres_fat_rates # print(res_fats) # print(nonres_fats) - fatsum = int(res_fats.sum()+nonres_fats.sum()) + fatsum = int(res_fats.sum() + nonres_fats.sum()) print('Testing that "manual" calculations achieve tested result...') assert fatsum == 383 print('Passed.') - + + if __name__ == '__main__': test_times() test_work() test_manual_calcs() # test_model_single() test_model_real() - diff --git a/test/onepager/comment_test.py b/test/onepager/comment_test.py index 784da02..fc1953c 100755 --- a/test/onepager/comment_test.py +++ b/test/onepager/comment_test.py @@ -4,12 +4,7 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np from impactutils.textformat.text import commify @@ -19,13 +14,16 @@ from losspager.onepager.comment import get_impact_comments, get_structure_comment, get_secondary_hazards from losspager.onepager.comment import get_historical_comment, get_secondary_comment, SEARCH_RADIUS + def test_impact(): # both impacts are green - tz_exp = np.array([0, 0, 0, 102302926316, 13976446978, 9127080479, 7567231, 0, 0, 0]) - ug_exp = np.array([0, 0, 240215309, 255785480321, 33062696103, 1965288263, 0, 0, 0, 0]) + tz_exp = np.array([0, 0, 0, 102302926316, 13976446978, + 9127080479, 7567231, 0, 0, 0]) + ug_exp = np.array([0, 0, 240215309, 255785480321, + 33062696103, 1965288263, 0, 0, 0, 0]) econexp = {'TZ': tz_exp, 'UG': ug_exp, - 'TotalEconomicExposure': tz_exp+ug_exp} + 'TotalEconomicExposure': tz_exp + ug_exp} fatdict = {'TZ': 0, 'UG': 0, 'TotalFatalities': 0} @@ -33,7 +31,8 @@ def test_impact(): 'UG': 262049, 'TotalDollars': 262332} event_year = 2016 - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c1 = 'Green alert for shaking-related fatalities and economic losses. There is a low likelihood of casualties and damage.' assert impact1 == impact1_c1 assert impact2 == '' @@ -45,7 +44,8 @@ def test_impact(): ecodict = {'TZ': 283, 'UG': 262049, 'TotalDollars': 262332} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c2 = 'Yellow alert for shaking-related fatalities. Some casualties are possible and the impact should be relatively localized. Past events with this alert level have required a local or regional level response.' impact2_c2 = 'Green alert for economic losses. There is a low likelihood of damage.' assert impact1 == impact1_c2 @@ -58,7 +58,8 @@ def test_impact(): ecodict = {'TZ': 283, 'UG': 262049, 'TotalDollars': 262332} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c3 = 'Orange alert for shaking-related fatalities. Significant casualties are likely and the disaster is potentially widespread. Past events with this alert level have required a regional or national level response.' impact2_c3 = 'Green alert for economic losses. There is a low likelihood of damage.' assert impact1 == impact1_c3 @@ -71,7 +72,8 @@ def test_impact(): ecodict = {'TZ': 283, 'UG': 262049, 'TotalDollars': 262332} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c4 = 'Red alert for shaking-related fatalities. High casualties are probable and the disaster is likely widespread. Past events with this alert level have required a national or international level response.' impact2_c4 = 'Green alert for economic losses. There is a low likelihood of damage.' assert impact1 == impact1_c4 @@ -84,7 +86,8 @@ def test_impact(): ecodict = {'TZ': 0, 'UG': 1000001, 'TotalDollars': 1000001} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Yellow alert for economic losses. Some damage is possible and the impact should be relatively localized. Estimated economic losses are less than 1% of GDP of Uganda. Past events with this alert level have required a local or regional level response.' impact2_c5 = 'Green alert for shaking-related fatalities. There is a low likelihood of casualties.' assert impact1 == impact1_c5 @@ -95,9 +98,10 @@ def test_impact(): 'UG': 0, 'TotalFatalities': 0} ecodict = {'TZ': 0, - 'UG': 100e6+1, - 'TotalDollars': 100e6+1} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + 'UG': 100e6 + 1, + 'TotalDollars': 100e6 + 1} + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Orange alert for economic losses. Significant damage is likely and the disaster is potentially widespread. Estimated economic losses are 0-1% GDP of Uganda. Past events with this alert level have required a regional or national level response.' impact2_c5 = 'Green alert for shaking-related fatalities. There is a low likelihood of casualties.' assert impact1 == impact1_c5 @@ -108,9 +112,10 @@ def test_impact(): 'UG': 0, 'TotalFatalities': 0} ecodict = {'TZ': 0, - 'UG': 1000e6+1, - 'TotalDollars': 1000e6+1} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + 'UG': 1000e6 + 1, + 'TotalDollars': 1000e6 + 1} + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Red alert for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses are 1-10% GDP of Uganda. Past events with this alert level have required a national or international level response.' impact2_c5 = 'Green alert for shaking-related fatalities. There is a low likelihood of casualties.' assert impact1 == impact1_c5 @@ -123,7 +128,8 @@ def test_impact(): ecodict = {'TZ': 0, 'UG': 15e9, 'TotalDollars': 15e9} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Red alert for economic losses. Extensive damage is probable and the disaster is likely widespread. Estimated economic losses may exceed the GDP of Uganda. Past events with this alert level have required a national or international level response.' impact2_c5 = 'Green alert for shaking-related fatalities. There is a low likelihood of casualties.' assert impact1 == impact1_c5 @@ -134,9 +140,10 @@ def test_impact(): 'UG': 1, 'TotalFatalities': 1} ecodict = {'TZ': 0, - 'UG': 1e6+1, - 'TotalDollars': 1e6+1} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + 'UG': 1e6 + 1, + 'TotalDollars': 1e6 + 1} + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Yellow alert for shaking-related fatalities and economic losses. Some casualties and damage are possible and the impact should be relatively localized. Past yellow alerts have required a local or regional level response.' impact2_c5 = 'Estimated economic losses are less than 1% of GDP of Uganda.' assert impact1 == impact1_c5 @@ -147,9 +154,10 @@ def test_impact(): 'UG': 101, 'TotalFatalities': 101} ecodict = {'TZ': 0, - 'UG': 100e6+1, - 'TotalDollars': 100e6+1} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + 'UG': 100e6 + 1, + 'TotalDollars': 100e6 + 1} + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Orange alert for shaking-related fatalities and economic losses. Significant casualties and damage are likely and the disaster is potentially widespread. Past orange alerts have required a regional or national level response.' impact2_c5 = 'Estimated economic losses are 0-1% GDP of Uganda.' assert impact1 == impact1_c5 @@ -160,14 +168,16 @@ def test_impact(): 'UG': 1001, 'TotalFatalities': 1001} ecodict = {'TZ': 0, - 'UG': 1e9+1, - 'TotalDollars': 1e9+1} - impact1, impact2 = get_impact_comments(fatdict, ecodict, econexp, event_year, 'TZ') + 'UG': 1e9 + 1, + 'TotalDollars': 1e9 + 1} + impact1, impact2 = get_impact_comments( + fatdict, ecodict, econexp, event_year, 'TZ') impact1_c5 = 'Red alert for shaking-related fatalities and economic losses. High casualties and extensive damage are probable and the disaster is likely widespread. Past red alerts have required a national or international response.' impact2_c5 = 'Estimated economic losses are 1-10% GDP of Uganda.' assert impact1 == impact1_c5 assert impact2 == impact2_c5 + def test_structure(): resfat = {'IN': {'A1': 434, 'A2': 837}, 'NP': {'UFB': 200, 'W1': 100}} @@ -178,6 +188,7 @@ def test_structure(): cmpstr = 'Overall, the population in this region resides in structures that are vulnerable to earthquake shaking, though resistant structures exist. The predominant vulnerable building type is adobe block with light roof construction.' assert structure_comment == cmpstr + def test_hazards(): clat = 0.37 clon = -79.94 @@ -189,7 +200,8 @@ def test_hazards(): for hazard in hazards: print('Looking for %s in comment string...' % hazard) assert comment.find(hazard) > -1 - + + def test_historical(): clat = 0.37 clon = -79.94 @@ -202,12 +214,13 @@ def test_historical(): expocat = ExpoCat.fromDefault() minicat = expocat.selectByRadius(clat, clon, SEARCH_RADIUS) df = minicat.getDataFrame() - df = df.sort_values(['TotalDeaths', 'MaxMMI', 'NumMaxMMI'], ascending=False) + df = df.sort_values( + ['TotalDeaths', 'MaxMMI', 'NumMaxMMI'], ascending=False) assert histcomment.find(commify(int(df.iloc[0]['TotalDeaths']))) > -1 - + + if __name__ == '__main__': test_hazards() test_historical() test_structure() test_impact() - diff --git a/test/onepager/pagercity_test.py b/test/onepager/pagercity_test.py index 8510083..fb9ce7d 100755 --- a/test/onepager/pagercity_test.py +++ b/test/onepager/pagercity_test.py @@ -7,12 +7,7 @@ import sys from datetime import datetime -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np from mapio.basemapcity import BasemapCities from mapio.city import Cities @@ -21,11 +16,15 @@ # local imports from losspager.onepager.pagercity import PagerCities + def test(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? cityfile = os.path.join(homedir, '..', 'data', 'cities1000.txt') - shakefile1 = os.path.join(homedir, '..', 'data', 'eventdata', 'northridge', 'northridge_grid.xml') - shakefile2 = os.path.join(homedir, '..', 'data', 'eventdata', 'lomaprieta', 'lomaprieta_grid.xml') + shakefile1 = os.path.join(homedir, '..', 'data', + 'eventdata', 'northridge', 'northridge_grid.xml') + shakefile2 = os.path.join(homedir, '..', 'data', + 'eventdata', 'lomaprieta', 'lomaprieta_grid.xml') shakefiles = [shakefile1, shakefile2] lengths = [11, 11] first_city = ['Santa Clarita', 'Lexington Hills'] @@ -50,6 +49,7 @@ def test(): assert rows.iloc[-1]['name'] == last_city[ic] print('Passed.') ic += 1 - + + if __name__ == '__main__': test() diff --git a/test/schema/emailschema_test.py b/test/schema/emailschema_test.py index 700f057..755c701 100755 --- a/test/schema/emailschema_test.py +++ b/test/schema/emailschema_test.py @@ -9,12 +9,7 @@ from datetime import datetime import shutil -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np import matplotlib.pyplot as plt from shapely.geometry.point import Point @@ -28,6 +23,7 @@ DATEFMT = '%Y-%m-%d %H:%M:%S' + def test_create_db(userfile=None, orgfile=None): memurl = 'sqlite://' # create a user in a dictionary @@ -55,7 +51,8 @@ def test_create_db(userfile=None, orgfile=None): f.close() try: schemadir = get_data_path('schema') - session = emailschema.create_db(memurl, schemadir, users_jsonfile=userfile, orgs_jsonfile=orgfile) + session = emailschema.create_db( + memurl, schemadir, users_jsonfile=userfile, orgs_jsonfile=orgfile) print('Testing contents of in-memory database...') assert session.query(emailschema.User).count() == 1 assert session.query(emailschema.Region).count() == 47 @@ -74,24 +71,24 @@ def test_region_serialization(): [-112.060547, 31.791221], [-126.298828, 31.416944], [-125.771484, 41.957448]]] - + lon, lat = (-119.443359, 37.149371) regiondict = {'type': 'Feature', 'properties': {'code': 'US_States-California', - 'desc': 'California'}, + 'desc': 'California'}, 'geometry': {'type': 'Polygon', - 'coordinates': ca_coords}} - + 'coordinates': ca_coords}} region = emailschema.Region() region.fromDict(session, regiondict) assert region.containsPoint(lat, lon) == True - + regiondict2 = region.toDict() assert regiondict == regiondict2 - + session.close() + def test_user_serialization(): memurl = 'sqlite://' schemadir = get_data_path('schema') @@ -113,7 +110,7 @@ def test_user_serialization(): 'createdon': datetime.utcnow().strftime(emailschema.TIME_FORMAT), 'org': 'USGS', 'addresses': [address]} - + user = emailschema.User() # inflate the user from the dictionary user.fromDict(session, userdict) @@ -143,9 +140,10 @@ def test_user_serialization(): tvalue = userdict['addresses'][0]['profiles'][0]['thresholds'][0]['value'] tvalue2 = userdict2['addresses'][0]['profiles'][0]['thresholds'][0]['value'] assert tvalue == tvalue2 - + session.close() + def test_get_polygon(): memurl = 'sqlite://' schemadir = get_data_path('schema') @@ -200,17 +198,21 @@ def test_get_polygon(): 'USCENTCOM': [(21.767152, 49.350586)]} for regioncode, plist in POINTS.items(): - region = session.query(emailschema.Region).filter(emailschema.Region.name == regioncode).first() + region = session.query(emailschema.Region).filter( + emailschema.Region.name == regioncode).first() if region is None: - raise Exception('Could not find region %s in database!' % regioncode) + raise Exception( + 'Could not find region %s in database!' % regioncode) for point in plist: lat, lon = point print('Checking region %s...' % regioncode) if not region.containsPoint(lat, lon): - raise Exception('Region %s does not contain point (%.4f,%.4f).' % (regioncode, lat, lon)) + raise Exception( + 'Region %s does not contain point (%.4f,%.4f).' % (regioncode, lat, lon)) session.close() + def test_delete_cascade(): memurl = 'sqlite://' schemadir = get_data_path('schema') @@ -256,7 +258,6 @@ def test_delete_cascade(): assert addresses_after_delete == 0 print('No users, no addresses after deleting user.') - # test deleting cascades with events event = emailschema.Event(eventcode='us2017abcd') version = emailschema.Version(versioncode='us2017abcd', @@ -280,7 +281,7 @@ def test_delete_cascade(): versions_before_add = session.query(emailschema.Version).count() assert events_before_add == 0 assert versions_before_add == 0 - + event.versions.append(version) session.add(event) session.commit() @@ -293,15 +294,15 @@ def test_delete_cascade(): session.delete(event) session.commit() - + events_after_delete = session.query(emailschema.Event).count() versions_after_delete = session.query(emailschema.Version).count() assert events_after_delete == 0 assert versions_after_delete == 0 - + session.close() - + # def test_get_email(): # dbfile = os.path.abspath(os.path.join(homedir,'..','data','losspager_test.db')) # url = 'sqlite:///%s' % dbfile @@ -326,7 +327,8 @@ def test_delete_cascade(): # # for threshold in profile.thresholds: # # print('\t\t'+str(threshold)) # assert test_num_addresses == naddresses - + + if __name__ == '__main__': userfile = None orgfile = None @@ -334,12 +336,10 @@ def test_delete_cascade(): userfile = sys.argv[1] if len(sys.argv) > 2: orgfile = sys.argv[2] - + test_create_db(userfile=userfile, orgfile=orgfile) test_region_serialization() test_user_serialization() test_get_polygon() test_delete_cascade() # test_get_email() - - diff --git a/test/utils/compass_test.py b/test/utils/compass_test.py index 6d8baf2..1f6ccda 100755 --- a/test/utils/compass_test.py +++ b/test/utils/compass_test.py @@ -6,17 +6,13 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports from losspager.utils import compass + def test(): print('Testing various different compass angles...') assert compass.get_compass_dir(0, 0, 1, 0, format='short') == 'N' @@ -30,5 +26,6 @@ def test(): assert compass.get_compass_dir(0, 0, -1, 1, format='short') == 'SE' print('Passed.') + if __name__ == '__main__': test() diff --git a/test/utils/country_test.py b/test/utils/country_test.py index c9e7f0d..dd80362 100755 --- a/test/utils/country_test.py +++ b/test/utils/country_test.py @@ -6,22 +6,20 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports from losspager.utils.country import Country + def test(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? - countryfile = os.path.abspath(os.path.join(homedir, '..', '..', 'data', 'countries.csv')) - testdict = {'ISON': 840, 'ISO3': 'USA', - 'ISO2': 'US', 'LongName': 'United States', + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? + countryfile = os.path.abspath(os.path.join( + homedir, '..', '..', 'data', 'countries.csv')) + testdict = {'ISON': 840, 'ISO3': 'USA', + 'ISO2': 'US', 'LongName': 'United States', 'Name': 'United States', 'Population': 324515000} print('Test creating Country object from CSV file...') @@ -61,11 +59,11 @@ def test(): # test multiple kinds of numbers... print('Make sure all kinds of numpy numbers are supported...') - numbers = [840,840.0, - np.int16(840),np.uint16(840), - np.int32(840),np.uint32(840), - np.int64(840),np.uint64(840), - np.float32(840),np.float64(840)] + numbers = [840, 840.0, + np.int16(840), np.uint16(840), + np.int32(840), np.uint32(840), + np.int64(840), np.uint64(840), + np.float32(840), np.float64(840)] for number in numbers: row_t = country.getCountry(number) assert row_t == testdict diff --git a/test/utils/expocat_test.py b/test/utils/expocat_test.py index bd34b12..a20f08d 100755 --- a/test/utils/expocat_test.py +++ b/test/utils/expocat_test.py @@ -7,37 +7,37 @@ import sys from datetime import datetime -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed pager stuff - -# third party imports +# third party imports import numpy as np # local imports from losspager.utils.expocat import ExpoCat + def commify(value): if np.isnan(value): return 'NaN' return format(int(value), ",d") + def get_max_mmi(tdict, minimum=1000): - indices = ['MMI1', 'MMI2', 'MMI3', 'MMI4', 'MMI5', 'MMI6', 'MMI7', 'MMI8', 'MMI9+'] + indices = ['MMI1', 'MMI2', 'MMI3', 'MMI4', + 'MMI5', 'MMI6', 'MMI7', 'MMI8', 'MMI9+'] exparray = np.array([tdict[idx] for idx in indices]) imax = (exparray > 1000).nonzero()[0].max() - return (imax+1, exparray[imax]) + return (imax + 1, exparray[imax]) + def test(): - homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? + homedir = os.path.dirname(os.path.abspath( + __file__)) # where is this script? expocat = ExpoCat.fromDefault() clat = 0.37 clon = -79.94 radius = 400 ndeaths = 9 minicat = expocat.selectByRadius(clat, clon, radius) - + print('Testing that historical events returned are correct...') maxmmi = 8 nmaxmmi = 103000 @@ -53,7 +53,7 @@ def test(): tsunami = expocat.selectByHazard('tsunami') liquefaction = expocat.selectByHazard('liquefaction') landslide = expocat.selectByHazard('landslide') - + assert fire._dataframe['Fire'].sum() == len(fire) assert tsunami._dataframe['Tsunami'].sum() == len(tsunami) assert liquefaction._dataframe['Liquefaction'].sum() == len(liquefaction) @@ -63,9 +63,9 @@ def test(): test_time = datetime(1994, 1, 1) expocat.excludeFutureEvents(test_time) assert expocat._dataframe['Time'].max() < test_time - + print('Passed.') - - + + if __name__ == '__main__': test() diff --git a/test/utils/logger_test.py b/test/utils/logger_test.py index 6ff971e..88e8e56 100755 --- a/test/utils/logger_test.py +++ b/test/utils/logger_test.py @@ -6,51 +6,55 @@ import sys import shutil -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - # local imports from losspager.utils.logger import PagerLogger -def test(email=None, host=None): +# this test is disabled for the purposes of pytest +# because if this runs before any other test code +# that calls any code that does logging, +# that code will be looking for the logger +# that we set up here. According to one mailing +# list entry, loggers persist until the shell is closed. + + +def _test(email=None, host=None): tdir = tempfile.mkdtemp() try: - logfile = os.path.join(tdir, 'logfile.log') - print('Logfile will be %s' % logfile) + global_log = os.path.join(tdir, 'global.log') + + plog = PagerLogger(global_log, email, email, + host, debug=False) + logger = plog.getLogger() + logger.info('Test 1') + + version_log = os.path.join(tdir, 'version.log') + plog.setVersionHandler(version_log) + + logger.info('Test 2') - redirect = True if email is not None: - redirect = False - - plog = PagerLogger(logfile, from_address=email, mail_host=host, redirect=redirect) - plogger = plog.getLogger() - - if not redirect: - plog.addEmailHandler([email]) - - if redirect: - print('This should show up as information! (print function)') - sys.stderr.write('This should show up as an error! (write method)') - else: - plogger.info('This should show up as information! (info method)') - plogger.error('This should show up as an error! (error method)') - try: - raise Exception("This should show up as critical (and perhaps generate an email)!") - except Exception as e: - plogger.critical(e) - - plog.stopLogging() - f = open(logfile, 'rt') - data = f.read() - print(data) - assert len(data) - except Exception as e1: + logger.critical('Help me!') + + for handler in logger.handlers: + handler.close() + + with open(global_log, 'rt') as f: + global_text = f.read() + + with open(version_log, 'rt') as f: + version_text = f.read() + + assert global_text.find('Test 1') > -1 + assert version_text.find('Test 2') > -1 + + plog.close() + + except Exception: pass finally: shutil.rmtree(tdir) + if __name__ == '__main__': if len(sys.argv) == 3: email = sys.argv[1] @@ -58,4 +62,4 @@ def test(email=None, host=None): else: email = None host = None - test(email=email, host=host) + _test(email=email, host=host) diff --git a/test/utils/probs_test.py b/test/utils/probs_test.py index 91c62db..b4774a1 100755 --- a/test/utils/probs_test.py +++ b/test/utils/probs_test.py @@ -6,12 +6,7 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports @@ -23,9 +18,12 @@ def test(): assert phi(5.0) == 0.99999971334842808 assert invphi(0.99999971334842808) == 4.9999999999701759 assert calcEmpiricalProbFromValue(2.5, 1e6, 10e6) == 0.82148367161911606 - assert calcEmpiricalValueFromProb(2.5, 1e6, 0.82148367161911606) == 10000000.00000999 - assert calcEmpiricalProbFromRange(2.5, 1e6, [0, 1]) == 1.6362032828176688e-08 + assert calcEmpiricalValueFromProb( + 2.5, 1e6, 0.82148367161911606) == 10000000.00000999 + assert calcEmpiricalProbFromRange( + 2.5, 1e6, [0, 1]) == 1.6362032828176688e-08 print('Passed testing all probs functions.') + if __name__ == '__main__': test() diff --git a/test/utils/region_test.py b/test/utils/region_test.py index c3ba884..9825fd5 100755 --- a/test/utils/region_test.py +++ b/test/utils/region_test.py @@ -6,17 +6,13 @@ import os.path import sys -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np # local imports from losspager.utils.region import PagerRegions + def test(): pregions = PagerRegions() assert pregions.getRegion('NZ') == 1 @@ -26,13 +22,18 @@ def test(): assert pregions.getRegion('GT') == 5 assert pregions.getRegion('SS') == 6 - assert pregions.getComment(1) == 'Overall, the population in this region resides in structures that are highly resistant to earthquake shaking, though some vulnerable structures exist.' - assert pregions.getComment(2) == 'Overall, the population in this region resides in structures that are resistant to earthquake shaking, though vulnerable structures exist.' - assert pregions.getComment(3) == 'Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction.' - assert pregions.getComment(4) == 'Overall, the population in this region resides in structures that are vulnerable to earthquake shaking, though resistant structures exist.' - assert pregions.getComment(5) == 'Overall, the population in this region resides in structures that are highly vulnerable to earthquake shaking, though some resistant structures exist.' - assert pregions.getComment(6) == 'Overall, the population in this region resides in structures that are extremely vulnerable to earthquake shaking, though some resistant structures exist.' - + assert pregions.getComment( + 1) == 'Overall, the population in this region resides in structures that are highly resistant to earthquake shaking, though some vulnerable structures exist.' + assert pregions.getComment( + 2) == 'Overall, the population in this region resides in structures that are resistant to earthquake shaking, though vulnerable structures exist.' + assert pregions.getComment( + 3) == 'Overall, the population in this region resides in structures that are a mix of vulnerable and earthquake resistant construction.' + assert pregions.getComment( + 4) == 'Overall, the population in this region resides in structures that are vulnerable to earthquake shaking, though resistant structures exist.' + assert pregions.getComment( + 5) == 'Overall, the population in this region resides in structures that are highly vulnerable to earthquake shaking, though some resistant structures exist.' + assert pregions.getComment( + 6) == 'Overall, the population in this region resides in structures that are extremely vulnerable to earthquake shaking, though some resistant structures exist.' if __name__ == '__main__': diff --git a/test/vis/contourmap_test.py b/test/vis/contourmap_test.py index 487d090..d71545b 100755 --- a/test/vis/contourmap_test.py +++ b/test/vis/contourmap_test.py @@ -3,19 +3,10 @@ # stdlib imports import tempfile import os.path -import sys -from collections import OrderedDict import warnings import shutil -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports -import numpy as np - +# third party imports # non-interactive backend import matplotlib matplotlib.use('Agg') @@ -23,28 +14,32 @@ import matplotlib.pyplot as plt from mapio.shake import ShakeGrid from mapio.gdal import GDALGrid +from mapio.gmt import GMTGrid +from losspager.utils.ftype import get_file_type # local imports from losspager.vis.contourmap import draw_contour from losspager.models.exposure import Exposure, calc_exposure +# from losspager.utils.logger import PagerLogger def test(outfolder=None): + homedir = os.path.dirname(os.path.abspath(__file__)) topdata = os.path.abspath(os.path.join(homedir, '..', 'data')) datadir = os.path.abspath(os.path.join(topdata, 'eventdata', 'northridge')) - + cityfile = os.path.join(topdata, 'cities1000.txt') oceanfile = os.path.join(datadir, 'northridge_ocean.json') shakefile = os.path.join(datadir, 'northridge_grid.xml') popfile = os.path.join(datadir, 'northridge_gpw.flt') ogridfile = os.path.join(datadir, 'northridge_ocean.bil') isofile = os.path.join(datadir, 'northridge_isogrid.bil') - + exp = Exposure(popfile, 2012, isofile) results = exp.calcExposure(shakefile) shakegrid = exp.getShakeGrid() popgrid = exp.getPopulationGrid() - + print('Testing to see if PAGER can successfully create contour map...') hasfolder = False if outfolder is not None: @@ -53,7 +48,8 @@ def test(outfolder=None): if not hasfolder: outfolder = tempfile.mkdtemp() basefile = os.path.join(outfolder, 'output') - pdffile, pngfile, mapcities = draw_contour(shakegrid, popgrid, oceanfile, ogridfile, cityfile, basefile) + pdffile, pngfile, mapcities = draw_contour( + shakegrid, popgrid, oceanfile, ogridfile, cityfile, basefile) print('Output pdf is %s, output png is %s.' % (pdffile, pngfile)) assert os.path.isfile(pngfile) and os.path.isfile(pdffile) @@ -63,7 +59,8 @@ def test(outfolder=None): if os.path.isdir(outfolder) and not hasfolder: shutil.rmtree(outfolder) print('Passed.') - + + if __name__ == '__main__': warnings.filterwarnings("ignore") outfolder = os.path.expanduser('~') diff --git a/test/vis/impactscale_test.py b/test/vis/impactscale_test.py index 9adcf4b..a6fb4e1 100755 --- a/test/vis/impactscale_test.py +++ b/test/vis/impactscale_test.py @@ -8,12 +8,7 @@ import hashlib import shutil -# hack the path so that I can debug these functions if I need to -homedir = os.path.dirname(os.path.abspath(__file__)) # where is this script? -pagerdir = os.path.abspath(os.path.join(homedir, '..', '..')) -sys.path.insert(0, pagerdir) # put this at the front of the system path, ignoring any installed shakemap stuff - -# third party imports +# third party imports import numpy as np import matplotlib.pyplot as plt import matplotlib @@ -23,8 +18,9 @@ matplotlib.use('Agg') + def img_test(): - # NOTE: This isn't a great test, so I am turning it off for now, + # NOTE: This isn't a great test, so I am turning it off for now, # until I can find a more reliable way to test images. testhash1 = b'\\\x03\xa9\x04\xe6\x8e\x99\x87r\xf2\xd9\xb9\xd9\xf8T\x83' testhash2 = b'\xe0\x19\xee$\x1a\xdcp\xdfX\x16\x8c\xb4\x95!t\xe0' @@ -49,14 +45,14 @@ def img_test(): print('Testing first economic plot is consistent with prior results...') assert m.digest() == testhash1 print('Passed.') - + ranges = OrderedDict([('0-1', 0.8), - ('1-10', 0.1), - ('10-100', 0.05), - ('100-1000', 0.03), - ('1000-10000', 0.02), - ('10000-100000', 0.0), - ('100000-10000000', 0.0)]) + ('1-10', 0.1), + ('10-100', 0.05), + ('100-1000', 0.03), + ('1000-10000', 0.02), + ('10000-100000', 0.0), + ('100000-10000000', 0.0)]) f = drawImpactScale(ranges, 'fatality', debug=False) outfile = os.path.join(homedir, 'test2.png') f.savefig(outfile) @@ -69,12 +65,12 @@ def img_test(): print('Passed.') ranges = OrderedDict([('0-1', 0.1), - ('1-10', 0.6), - ('10-100', 0.25), - ('100-1000', 0.03), - ('1000-10000', 0.02), - ('10000-100000', 0.0), - ('100000-10000000', 0.0)]) + ('1-10', 0.6), + ('10-100', 0.25), + ('100-1000', 0.03), + ('1000-10000', 0.02), + ('10000-100000', 0.0), + ('100000-10000000', 0.0)]) f = drawImpactScale(ranges, 'fatality', debug=False) outfile = os.path.join(homedir, 'test3.png') f.savefig(outfile) @@ -87,12 +83,12 @@ def img_test(): print('Passed.') ranges = OrderedDict([('0-1', 0.1), - ('1-10', 0.6), - ('10-100', 0.25), - ('100-1000', 0.03), - ('1000-10000', 0.0), - ('10000-100000', 0.45), - ('100000-10000000', 0.)]) + ('1-10', 0.6), + ('10-100', 0.25), + ('100-1000', 0.03), + ('1000-10000', 0.0), + ('10000-100000', 0.45), + ('100000-10000000', 0.)]) f = drawImpactScale(ranges, 'fatality', debug=False) outfile = os.path.join(homedir, 'test4.png') f.savefig(outfile) @@ -103,12 +99,13 @@ def img_test(): print('Testing third fatality plot is consistent with prior results...') assert m.digest() == testhash4 print('Passed.') - + except Exception as error: raise error finally: if os.path.isdir(homedir): shutil.rmtree(homedir) + if __name__ == '__main__': img_test()