diff --git a/caldates/master_wls.csv b/caldates/master_wls.csv new file mode 100644 index 000000000..c2f659fbf --- /dev/null +++ b/caldates/master_wls.csv @@ -0,0 +1,3 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +1.0, 2000-01-01 00:00:00, 2024-02-03 00:00:00, /data/reference_fits/430LFCWLS.fits, +2.0, 2024-02-03 00:00:01, 3000-01-01 00:00:00, /data/reference_fits/Era2_RoughWLS_woCAL.fits, diff --git a/caldates/order_mask.csv b/caldates/order_mask.csv new file mode 100644 index 000000000..c2a6008e4 --- /dev/null +++ b/caldates/order_mask.csv @@ -0,0 +1,4 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +0.5, 2000-01-01 00:00:00, 2024-02-03 00:00:00, /data/reference_fits/kpf_20230730_order_mask_untrimmed_made20230804.fits, +1.5, 2024-02-03 00:00:01, 2024-02-11 00:00:00, /data/reference_fits/kpf_20240206_order_mask_untrimmed_made20240206.fits, +2.0, 2024-02-11 00:00:01, 3000-01-01 00:00:00, /data/reference_fits/kpf_20240211_order_mask_untrimmed_made20240212.fits, \ No newline at end of file diff --git a/caldates/order_trace.csv b/caldates/order_trace.csv new file mode 100644 index 000000000..5dfa20006 --- /dev/null +++ b/caldates/order_trace.csv @@ -0,0 +1,3 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +1.0, 2000-01-01 00:00:00, 2024-02-03 00:00:00, "['/data/reference_fits/kpf_20230920_master_flat_GREEN_CCD.csv', '/data/reference_fits/kpf_20230920_master_flat_RED_CCD.csv']" +2.0, 2024-02-03 00:00:01, 3000-01-01 00:00:00, "['/data/reference_fits/kpf_20240211_master_flat_GREEN_CCD.csv', '/data/reference_fits/kpf_20240211_master_flat_GREEN_CCD.csv']" diff --git a/caldates/smooth_lamp_pattern.csv b/caldates/smooth_lamp_pattern.csv new file mode 100644 index 000000000..c19867a30 --- /dev/null +++ b/caldates/smooth_lamp_pattern.csv @@ -0,0 +1,6 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +0.5, 2000-01-01 00:00:00, 2023-06-23 00:00:00, /data/reference_fits/kpf_20230619_smooth_lamp_made20230817_float32.fits, +0.6, 2023-06-23 00:00:01, 2023-07-30 00:00:00, /data/reference_fits/kpf_20230628_smooth_lamp_made20230803_float32.fits, +1.0, 2023-07-30 00:00:01, 2024-02-03 00:00:00, /data/reference_fits/kpf_20230804_smooth_lamp_made20230808_float32.fits, +1.5, 2024-02-03 00:00:01, 2024-02-11 00:00:00, /data/reference_fits/kpf_20240206_smooth_lamp_made20240206.fits, +2.0, 2024-02-11 00:00:01, 3000-01-01 00:00:00, /data/reference_fits/kpf_20240211_smooth_lamp_made20240212.fits, \ No newline at end of file diff --git a/caldates/start_order.csv b/caldates/start_order.csv new file mode 100644 index 000000000..c5a5ab63f --- /dev/null +++ b/caldates/start_order.csv @@ -0,0 +1,3 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +1.0, 2000-01-01 00:00:00, 2024-02-03 00:00:00, "[-1, 0]" +2.0, 2024-02-03 00:00:01, 3000-01-01 00:00:00, "[0, 1]" diff --git a/caldates/trace_flat.csv b/caldates/trace_flat.csv new file mode 100644 index 000000000..144dbbb76 --- /dev/null +++ b/caldates/trace_flat.csv @@ -0,0 +1,3 @@ +CALTAG, UT_start_date, UT_end_date, CALPATH, comment +1.0, 2000-01-01 00:00:00, 2024-02-03 00:00:00, /data/reference_fits/kpf_20230730_master_flat.fits, +2.0, 2024-02-03 00:00:01, 3000-01-01 00:00:00, /data/reference_fits/kpf_20240211_master_flat.fits, diff --git a/configs/era_specific.cfg b/configs/era_specific.cfg deleted file mode 100644 index 9b672b37a..000000000 --- a/configs/era_specific.cfg +++ /dev/null @@ -1,63 +0,0 @@ -[ARGUMENTS] -order_trace_flat = {'0.5': '/data/reference_fits/kpf_20230920_master_flat.fits', - '0.6': '/data/reference_fits/kpf_20230920_master_flat.fits', - '0.7': '/data/reference_fits/kpf_20230920_master_flat.fits', - '1.0': '/data/reference_fits/kpf_20230920_master_flat.fits', - '1.5': '/data/reference_fits/kpf_20240206_master_flat.fits', - '1.6': '/data/reference_fits/kpf_20240206_master_flat.fits', - '2.0': '/data/reference_fits/kpf_20240211_master_flat.fits'} - -order_trace_files = {'0.5': [ '/data/reference_fits/kpf_20230920_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20230920_master_flat_RED_CCD.csv'], - '0.6': [ '/data/reference_fits/kpf_20230920_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20230920_master_flat_RED_CCD.csv'], - '0.7': [ '/data/reference_fits/kpf_20230920_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20230920_master_flat_RED_CCD.csv'], - '1.0': [ '/data/reference_fits/kpf_20230920_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20230920_master_flat_RED_CCD.csv'], - '1.5': [ '/data/reference_fits/kpf_20240211_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20240211_master_flat_RED_CCD.csv'], - '1.6': [ '/data/reference_fits/kpf_20240211_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20240211_master_flat_RED_CCD.csv'], - '2.0': [ '/data/reference_fits/kpf_20240211_master_flat_GREEN_CCD.csv', - '/data/reference_fits/kpf_20240211_master_flat_RED_CCD.csv']} - -start_order = {'0.5': [-1, 0], - '0.6': [-1, 0], - '0.7': [-1, 0], - '1.0': [-1, 0], - '1.5': [0, 1], - '1.6': [0, 1], - '2.0': [0, 1]} - -flat_file = {'0.5': '/data/reference_fits/kpf_20230730_master_flat.fits', - '0.6': '/data/reference_fits/kpf_20230730_master_flat.fits', - '0.7': '/data/reference_fits/kpf_20230730_master_flat.fits', - '1.0': '/data/reference_fits/kpf_20230730_master_flat.fits', - '1.5': '/data/reference_fits/kpf_20240211_master_flat.fits', - '1.6': '/data/reference_fits/kpf_20240211_master_flat.fits', - '2.0': '/data/reference_fits/kpf_20240211_master_flat.fits'} - -master_wls_file = {'0.5': '/data/reference_fits/430LFCWLS.fits', - '0.6': '/data/reference_fits/430LFCWLS.fits', - '0.7': '/data/reference_fits/430LFCWLS.fits', - '1.0': '/data/reference_fits/430LFCWLS.fits', - '1.5': '/data/reference_fits/Era2_RoughWLS_woCAL.fits', - '1.6': '/data/reference_fits/Era2_RoughWLS_woCAL.fits', - '2.0': '/data/reference_fits/Era2_RoughWLS_woCAL.fits'} - -ordermask_path = {'0.5': '/data/reference_fits/kpf_20230730_order_mask_untrimmed_made20230804.fits', - '0.6': '/data/reference_fits/kpf_20230730_order_mask_untrimmed_made20230804.fits', - '0.7': '/data/reference_fits/kpf_20230730_order_mask_untrimmed_made20230804.fits', - '1.0': '/data/reference_fits/kpf_20230730_order_mask_untrimmed_made20230804.fits', - '1.5': '/data/reference_fits/kpf_20240206_order_mask_untrimmed_made20240206.fits', - '1.6': '/data/reference_fits/kpf_20240211_order_mask_untrimmed_made20240212.fits', - '2.0': '/data/reference_fits/kpf_20240211_order_mask_untrimmed_made20240212.fits'} - -smoothlamppattern_path = {'0.5': '/data/reference_fits/kpf_20230619_smooth_lamp_made20230817_float32.fits', - '0.6': '/data/reference_fits/kpf_20230619_smooth_lamp_made20230817_float32.fits', - '0.7': '/data/reference_fits/kpf_20230628_smooth_lamp_made20230803_float32.fits', - '1.0': '/data/reference_fits/kpf_20230804_smooth_lamp_made20230808_float32.fits', - '1.5': '/data/reference_fits/kpf_20240206_smooth_lamp_made20240206.fits', - '1.6': '/data/reference_fits/kpf_20240211_smooth_lamp_made20240212.fits', - '2.0': '/data/reference_fits/kpf_20240211_smooth_lamp_made20240212.fits'} diff --git a/configs/kpf_drp.cfg b/configs/kpf_drp.cfg index cb6e54604..e00a3bf21 100644 --- a/configs/kpf_drp.cfg +++ b/configs/kpf_drp.cfg @@ -124,7 +124,7 @@ do_order_trace = False do_spectral_extraction = True do_rv = True do_rv_reweighting = True -do_hk = True +do_hk = True do_wavecopy_in_sp = True do_bk_subtraction = True do_bc = True @@ -142,9 +142,6 @@ channel_datasec_nrows_red = 2040 channel_orientation_ref_path_red = /code/KPF-Pipeline/static/kpfsim_ccd_orient_red.txt channel_orientation_ref_path_green = /code/KPF-Pipeline/static/kpfsim_ccd_orient_green.txt do_db_query_for_master_files = True -masterbias_path = /data/masters/20240223/kpf_20240223_master_bias_autocal-bias.fits -masterdark_path = /data/masters/20240223/kpf_20240223_master_dark_autocal-dark.fits -masterflat_path = /data/masters/20240223/kpf_20240223_master_flat.fits bad_pixel_masks = [] prescan_reg = [0,4] # overscan_method = clippedmean is n_sigma clipping and average over entire post-overscan strip. @@ -179,4 +176,12 @@ quicklook = modules/quicklook/configs/default.cfg bias_subtraction = modules/bias_subtraction/configs/default.cfg flat_fielding = modules/flat_fielding/configs/default.cfg quality_control = modules/quality_control/configs/default.cfg - +calibration_lookup = modules/calibration_lookup/configs/default.cfg + +[WLS_INTERPOLATION] +cal_type = 'WLS' +object_before = 'autocal-lfc-all-eve' +object_after = 'autocal-lfc-all-morn' +#master_file_before_default = /masters/20240128/kpf_20240128_master_WLS_autocal-etalon-all-eve_L1.fits +#master_file_after_default = /masters/20240129/kpf_20240129_master_WLS_autocal-etalon-all-morn_L1.fits +do_db_query_for_before_after_master_files = True diff --git a/configs/test_wls_interpolation.cfg b/configs/test_wls_interpolation.cfg index f9204cff3..aef1254eb 100644 --- a/configs/test_wls_interpolation.cfg +++ b/configs/test_wls_interpolation.cfg @@ -26,3 +26,4 @@ wave_ext = ['GREEN_SCI_WAVE1', 'GREEN_SCI_WAVE2', 'GREEN_SCI_WAVE3', 'GREEN_SKY_ [MODULE_CONFIGS] wls = modules/wavelength_cal/configs/default.cfg +calibration_lookup = modules/calibration_lookup/configs/default.cfg diff --git a/database/modules/query_db_before_after_master_files/src/query_db_before_after_master_files_framework.py b/database/modules/query_db_before_after_master_files/src/query_db_before_after_master_files_framework.py index c35fb1af9..bc60fe529 100644 --- a/database/modules/query_db_before_after_master_files/src/query_db_before_after_master_files_framework.py +++ b/database/modules/query_db_before_after_master_files/src/query_db_before_after_master_files_framework.py @@ -10,22 +10,11 @@ from kpfpipe.models.level0 import KPF0 from kpfpipe.primitives.level0 import KPF0_Primitive from keckdrpframework.models.arguments import Arguments +from database.modules.query_db_nearest_master_files.src.query_db_nearest_master_files_framework import md5 # Global read-only variables DEFAULT_CFG_PATH = 'database/modules/query_db_before_after_master_files/configs/default.cfg' -def md5(fname): - hash_md5 = hashlib.md5() - - try: - with open(fname, "rb") as f: - for chunk in iter(lambda: f.read(4096), b""): - hash_md5.update(chunk) - return hash_md5.hexdigest() - except: - print("*** Error: Cannot open file =",fname,"; quitting...") - exit(65) - class QueryDBBeforeAfterMasterFilesFramework(KPF0_Primitive): """ diff --git a/modules/Utils/data_handler.py b/modules/Utils/data_handler.py index e08645e9a..79fd202b6 100644 --- a/modules/Utils/data_handler.py +++ b/modules/Utils/data_handler.py @@ -260,7 +260,7 @@ def __init__(self, KPF_Primitive.__init__(self, action, context) self.kpfobj = action.args[0] - self.key_list = [str] if isinstance(action.args[1], str) else action.args[1] + self.key_list = [action.args[1]] if isinstance(action.args[1], str) else action.args[1] self.logger = None if not self.logger: diff --git a/modules/Utils/era_specific_parameters.py b/modules/Utils/era_specific_parameters.py deleted file mode 100644 index b8092150b..000000000 --- a/modules/Utils/era_specific_parameters.py +++ /dev/null @@ -1,45 +0,0 @@ - -from datetime import datetime - -from kpfpipe.models.level0 import KPF0 -from kpfpipe.primitives.level0 import KPF0_Primitive -from keckdrpframework.models.arguments import Arguments -from kpfpipe.config.pipeline_config import ConfigClass - -import pandas as pd - -class EraSpecific(KPF0_Primitive): - """This utility looks up the KPFERA for a file and then returns the - appropriate era-specific configuration parameters. - - """ - def __init__(self, action, context): - - #Initialize parent class - KPF0_Primitive.__init__(self, action, context) - - #Input arguments - self.input_file = self.action.args[0] # L0 object - self.parameter_name = self.action.args[1] - - era_file = 'static/kpfera_definitions.csv' - config_file = 'configs/era_specific.cfg' - self.config = ConfigClass(config_file) - - self.eras = pd.read_csv(era_file, dtype='str', - sep='\s*,\s*') - - def _perform(self): - - dt = datetime.strptime(self.input_file.header['PRIMARY']['DATE-OBS'], "%Y-%m-%d") - for i,row in self.eras.iterrows(): - start = datetime.strptime(row['UT_start_date'], "%Y-%m-%d %H:%M:%S") - end = datetime.strptime(row['UT_end_date'], "%Y-%m-%d %H:%M:%S") - if dt > start and dt <= end: - break - - era = row['KPFERA'] - options = eval(self.config.ARGUMENTS[self.parameter_name]) - value = options[era] - - return Arguments(value) diff --git a/modules/calibration_lookup/__init__.py b/modules/calibration_lookup/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/calibration_lookup/configs/default.cfg b/modules/calibration_lookup/configs/default.cfg new file mode 100644 index 000000000..8a3b5f3e5 --- /dev/null +++ b/modules/calibration_lookup/configs/default.cfg @@ -0,0 +1,37 @@ +[LOGGER] +log = True +log_path = logs/calibrations.log +log_level = debug +log_verbose = True + +[PARAM] +date_files = {'rough_wls': 'caldates/master_wls.csv', + 'order_trace': 'caldates/order_trace.csv', + 'start_order': 'caldates/start_order.csv', + 'order_mask': 'caldates/order_mask.csv', + 'smooth_lamp_pattern': 'caldates/smooth_lamp_pattern.csv', + 'trace_flat': 'caldates/trace_flat.csv'} + +# List of possible calibration types and how to look them up +# possible values are 'database' | 'file' | 'wls' +lookup_map = {'bias': 'database', + 'dark': 'database', + 'flat': 'database', + 'wls': 'wls', # wls has a special lookup to get before/after solutions + 'rough_wls': 'file', + 'order_trace': 'file', + 'start_order': 'file', + 'order_mask': 'file', + 'smooth_lamp_pattern': 'file', + 'trace_flat': 'file'} + +defaults = {'bias': '/data/reference_fits/kpf_20240223_master_bias_autocal-bias.fits', + 'dark': '/data/reference_fits/kpf_20240223_master_dark_autocal-dark.fits', + 'flat': '/data/reference_fits/kpf_20240223_master_flat.fits', + 'wls': ['/data/reference_fits/Era2_RoughWLS_woCAL.fits', '/data/reference_fits/Era2_RoughWLS_woCAL.fits']} + +db_cal_file_levels = [0, 0, 0] +db_cal_types = [['Bias','autocal-bias'],['Dark', 'autocal-dark'],['Flat','autocal-flat-all']] + +wls_cal_types = [['WLS','autocal-lfc-all'], ['WLS', 'autocal-thar-all']] +max_cal_age = '3 days' \ No newline at end of file diff --git a/modules/calibration_lookup/src/__init__.py b/modules/calibration_lookup/src/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/calibration_lookup/src/alg.py b/modules/calibration_lookup/src/alg.py new file mode 100644 index 000000000..19c5c29c4 --- /dev/null +++ b/modules/calibration_lookup/src/alg.py @@ -0,0 +1,438 @@ + +from datetime import datetime +import pandas as pd +import os +import re +import psycopg2 + +from database.modules.query_db_nearest_master_files.src.query_db_nearest_master_files_framework import md5 +from keckdrpframework.models.arguments import Arguments +from kpfpipe.config.pipeline_config import ConfigClass +from kpfpipe.logger import start_logger + +def query_database(date, cal_types, cal_file_levels, log): + # Get database connection parameters from environment. + # *** This code is duplicated in QueryDBNearestMasterFilesFramework and needs + # to be consolidated + + dbport = os.getenv('DBPORT') + dbname = os.getenv('DBNAME') + dbuser = os.getenv('DBUSER') + dbpass = os.getenv('DBPASS') + dbserver = os.getenv('DBSERVER') + + + # Connect to database + + try: + conn = psycopg2.connect(host=dbserver,database=dbname,port=dbport,user=dbuser,password=dbpass) + except: + log.warning("Could not connect to database...") + return Arguments(64) + + # Open database cursor. + + cur = conn.cursor() + + + # Select database version. + + q1 = 'SELECT version();' + log.debug('q1 = {}'.format(q1)) + cur.execute(q1) + db_version = cur.fetchone() + log.debug('PostgreSQL database version = {}'.format(db_version)) + + + # Check database current_user. + + q2 = 'SELECT current_user;' + log.debug('q2 = {}'.format(q2)) + cur.execute(q2) + for record in cur: + log.debug('record = {}'.format(record)) + + + # Define query template. + + query_template =\ + "select * from getCalFile(" +\ + "cast(OBSDATE as date)," +\ + "cast(LEVEL as smallint)," +\ + "cast('CALTYPE' as character varying(32))," +\ + "cast('OBJECT' as character varying(32))," +\ + "cast(CONTENTBITMASK as integer)) as " +\ + "(cId integer," +\ + " level smallint," +\ + " caltype varchar(32)," +\ + " object varchar(32)," +\ + " filename varchar(255)," +\ + " checksum varchar(32)," +\ + " infobits integer," +\ + " startDate date);" + + obsdate = "'" + date[0:4] + "-" + date[4:6] + "-" + date[6:8] + "'" + + + # Query database for all cal_types. + + contentbitmask_list = [3] # Mask values for GREEN, RED, and CA_HK together, and then for just GREEN and RED. + + nearest_master_files_list = [] + + log.debug('----> self.cal_file_levels = {}'.format(cal_file_levels)) + log.debug('----> self.cal_types = {}'.format(cal_types)) + + for contentbitmask in contentbitmask_list: + for level,cal_type_pair in zip(cal_file_levels,cal_types): + log.debug('level = {}'.format(level)) + levelstr = str(level) + log.debug('cal_type_pair = {}'.format(cal_type_pair)) + cal_type = cal_type_pair[0] + object = cal_type_pair[1] + + rep = {"OBSDATE": obsdate, + "LEVEL": levelstr, + "CALTYPE": cal_type, + "OBJECT": object} + + rep["CONTENTBITMASK"] = str(contentbitmask) + + rep = dict((re.escape(k), v) for k, v in rep.items()) + pattern = re.compile("|".join(rep.keys())) + query = pattern.sub(lambda m: rep[re.escape(m.group(0))], query_template) + + log.debug('query = {}'.format(query)) + + cur.execute(query) + record = cur.fetchone() + + if record is not None: + cId = record[0] + db_level = record[1] + db_cal_type = record[2] + db_object = record[3] + filename = '/' + record[4] # docker run has -v /data/kpf/masters:/masters + checksum = record[5] + infobits = record[6] + + log.debug('cId = {}'.format(cId)) + log.debug('filename = {}'.format(filename)) + log.debug('checksum = {}'.format(checksum)) + + + # See if file exists. + + isExist = os.path.exists(filename) + log.debug('File existence = {}'.format(isExist)) + + + # Compute checksum and compare with database value. + + cksum = md5(filename) + log.debug('cksum = {}'.format(cksum)) + + if cksum == checksum: + log.debug("File checksum is correct...") + else: + log.debug("*** Error: File checksum is incorrect; quitting...") + exitcode = 64 + + cal_file_record = [cId, db_level, db_cal_type, db_object, contentbitmask, infobits, filename] + nearest_master_files_list.append(cal_file_record) + + query_db_nearest_master_files_exit_code = 0 + + + # Close database cursor and then connection. + + try: + cur.close() + except (Exception, psycopg2.DatabaseError) as error: + log.error(error) + query_db_nearest_master_files_exit_code = 1 + finally: + if conn is not None: + conn.close() + log.debug('Database connection closed.') + + exit_list = [query_db_nearest_master_files_exit_code,nearest_master_files_list] + + return exit_list + +def query_wls(datetime, cal_type, max_cal_file_age, log): + """ + Returns [exitcode_for_before_query,[before_master_file_record],exitcode_for_after_query,[after_master_file_record]]. + + """ + + # Get database connection parameters from environment. + dbport = os.getenv('DBPORT') + dbname = os.getenv('DBNAME') + dbuser = os.getenv('DBUSER') + dbpass = os.getenv('DBPASS') + dbserver = os.getenv('DBSERVER') + + # hard code some parameters for WLS lookup + # cal_type = [['WLS','autocal-lfc-all'], ['WLS', 'autocal-thar-all']] + cal_file_level = 1 # can assume WLS is in L1 format + contentbitmask = 3 + # max_cal_file_age = '3 days' + + # Connect to database + + try: + conn = psycopg2.connect(host=dbserver,database=dbname,port=dbport,user=dbuser,password=dbpass) + except: + log.warning("Could not connect to database...") + return [64] + + + # Open database cursor. + + cur = conn.cursor() + + + # Select database version. + + q1 = 'SELECT version();' + log.debug('q1 = {}'.format(q1)) + cur.execute(q1) + db_version = cur.fetchone() + log.debug('PostgreSQL database version = {}'.format(db_version)) + + + # Check database current_user. + + q2 = 'SELECT current_user;' + log.debug('q2 = {}'.format(q2)) + cur.execute(q2) + for record in cur: + log.debug('record = {}'.format(record)) + + + # Define query templates for database stored functions defined in database/schema/kpfOpsProcs.sql + + query_template_before =\ + "select * from getCalFileBefore(" +\ + "cast('OBSDATETIME' as timestamp)," +\ + "cast(LEVEL as smallint)," +\ + "cast('CALTYPE' as character varying(32))," +\ + "cast('OBJECT' as character varying(32))," +\ + "cast(CONTENTBITMASK as integer), " +\ + "cast('MAXFILEAGE' as interval)) as " +\ + "(cId integer," +\ + " level smallint," +\ + " caltype varchar(32)," +\ + " object varchar(32)," +\ + " filename varchar(255)," +\ + " checksum varchar(32)," +\ + " infobits integer," +\ + " startDate date);" + + query_template_after =\ + "select * from getCalFileAfter(" +\ + "cast('OBSDATETIME' as timestamp)," +\ + "cast(LEVEL as smallint)," +\ + "cast('CALTYPE' as character varying(32))," +\ + "cast('OBJECT' as character varying(32))," +\ + "cast(CONTENTBITMASK as integer), " +\ + "cast('MAXFILEAGE' as interval)) as " +\ + "(cId integer," +\ + " level smallint," +\ + " caltype varchar(32)," +\ + " object varchar(32)," +\ + " filename varchar(255)," +\ + " checksum varchar(32)," +\ + " infobits integer," +\ + " startDate date);" + + + # Populate query-template dictionaries with parameters. + + log.debug('----> self.cal_file_level = {}'.format(cal_file_level)) + log.debug('----> self.contentbitmask = {}'.format(contentbitmask)) + log.debug('----> self.cal_type = {}'.format(cal_type)) + + contentbitmask = contentbitmask + level = cal_file_level + cal_type = cal_type + + log.debug('level = {}'.format(level)) + levelstr = str(level) + log.debug('cal_type = {}'.format(cal_type)) + + object_before = cal_type[0][1] + "-eve" + object_after = object_before.replace('eve', 'morn') + + rep_before = {"OBSDATETIME": datetime, + "LEVEL": levelstr, + "CALTYPE": 'WLS', + "OBJECT": object_before, + "MAXFILEAGE": max_cal_file_age} + + rep_after = {"OBSDATETIME": datetime, + "LEVEL": levelstr, + "CALTYPE": 'WLS', + "OBJECT": object_after, + "MAXFILEAGE": max_cal_file_age} + + rep_before["CONTENTBITMASK"] = str(contentbitmask) + rep_after["CONTENTBITMASK"] = str(contentbitmask) + + + # Execute database queries. + + + exit_code_before,results_before = run_query(cur,rep_before,query_template_before, contentbitmask, log) + + exit_code_after,results_after = run_query(cur,rep_after,query_template_after, contentbitmask, log) + + + # Close database cursor and then connection. + + try: + cur.close() + except (Exception, psycopg2.DatabaseError) as error: + log.error(error) + finally: + if conn is not None: + conn.close() + log.debug('Database connection closed.') + + exit_list = [exit_code_before,results_before,exit_code_after,results_after] + + return exit_list + + +def run_query(cur,rep,query_template, contentbitmask, log): + + rep = dict((re.escape(k), v) for k, v in rep.items()) + pattern = re.compile("|".join(rep.keys())) + query = pattern.sub(lambda m: rep[re.escape(m.group(0))], query_template) + + log.debug('query = {}'.format(query)) + + cur.execute(query) + record = cur.fetchone() + + exit_code = 1 + results_list = [] + + if record is not None: + + exit_code = 0 + + cId = record[0] + db_level = record[1] + db_cal_type = record[2] + db_object = record[3] + filename = '/' + record[4] # docker run has -v /data/kpf/masters:/masters + checksum = record[5] + infobits = record[6] + + log.debug('cId = {}'.format(cId)) + log.debug('filename = {}'.format(filename)) + log.debug('checksum = {}'.format(checksum)) + + + # See if file exists. + + isExist = os.path.exists(filename) + log.debug('File existence = {}'.format(isExist)) + + + # Compute checksum and compare with database value. + + cksum = md5(filename) + log.debug('cksum = {}'.format(cksum)) + + if cksum == checksum: + log.debug("File checksum is correct...") + else: + log.debug("*** Error: File checksum is incorrect; quitting...") + exit_code = 3 + + results_list = [cId, db_level, db_cal_type, db_object, contentbitmask, infobits, filename] + + return exit_code,results_list + +def extract_from_db_results(results, cal_type): + if results[0] == 1: + return '' + elif cal_type.lower() == 'wls': + return [results[1][6], results[3][6]] + else: + cal_list = results[1] + for cal in cal_list: + if cal_type.lower() == cal[2].lower(): + return cal[6] + + cals = [] + for i in enumerate(results): + cal_list = results + +class GetCalibrations: + """This utility looks up the associated calibrations for a given datetime and + returns a dictionary with all calibration types. + + """ + def __init__(self, datetime, default_config_path, logger=None): + + # Initialize DB class + # self.db_lookup = QueryDBNearestMasterFilesFramework(self.action, self.context) + + #Input arguments + self.datetime = datetime # ISO datetime string + self.config = ConfigClass(default_config_path) + if logger == None: + self.log = start_logger('GetCalibrations', default_config_path) + else: + self.log = logger + + self.caldate_files = eval(self.config['PARAM']['date_files']) + self.lookup_map = eval(self.config['PARAM']['lookup_map']) + self.db_cal_types = eval(self.config['PARAM']['db_cal_types']) + self.db_cal_file_levels = eval(self.config['PARAM']['db_cal_file_levels']) + self.wls_cal_types = eval(self.config['PARAM']['wls_cal_types']) + self.max_age = eval(self.config['PARAM']['max_cal_age']) + self.defaults = eval(self.config['PARAM']['defaults']) + + def lookup(self): + dt = datetime.strptime(self.datetime, "%Y-%m-%dT%H:%M:%S.%f") + date_str = datetime.strftime(dt, "%Y%m%d") + + output_cals = {} + db_results = None + for cal,lookup in self.lookup_map.items(): + if lookup == 'file': + filename = self.caldate_files[cal] + df = pd.read_csv(filename, header=0, skipinitialspace=True) + for i, row in df.iterrows(): + start = datetime.strptime(row['UT_start_date'], "%Y-%m-%d %H:%M:%S") + end = datetime.strptime(row['UT_end_date'], "%Y-%m-%d %H:%M:%S") + if start <= dt < end: + try: + output_cals[cal] = eval(row['CALPATH']) + except SyntaxError: + output_cals[cal] = row['CALPATH'] + elif lookup == 'database' and db_results == None: + db_results = query_database(date_str, self.db_cal_types, self.db_cal_file_levels, self.log) + if db_results[0] == 0: + output_cals[cal] = extract_from_db_results(db_results, cal) + else: + output_cals[cal] = self.defaults[cal] + elif lookup == 'database' and db_results != None: + if db_results[0] == 0: + output_cals[cal] = extract_from_db_results(db_results, cal) + else: + output_cals[cal] = self.defaults[cal] + elif lookup == 'wls': + wls_results = query_wls(self.datetime, self.wls_cal_types, self.max_age, self.log) + if wls_results[0] == 0 and wls_results[2] == 0: + output_cals[cal] = extract_from_db_results(wls_results, cal) + else: + output_cals[cal] = self.defaults[cal] + + return output_cals + diff --git a/modules/calibration_lookup/src/calibration_lookup.py b/modules/calibration_lookup/src/calibration_lookup.py new file mode 100644 index 000000000..90ab46a71 --- /dev/null +++ b/modules/calibration_lookup/src/calibration_lookup.py @@ -0,0 +1,69 @@ + +import configparser + +from kpfpipe.primitives.level0 import KPF0_Primitive +from modules.calibration_lookup.src.alg import GetCalibrations +from keckdrpframework.models.arguments import Arguments + +# Global read-only variables +DEFAULT_CFG_PATH = 'modules/calibration_lookup/configs/default.cfg' + +class CalibrationLookup(KPF0_Primitive): + """This utility looks up the associated calibrations for a given datetime and + returns a dictionary with all calibration types. + + Description: + * Method `__init__`: + + CalibrationLookup constructor, the following arguments are passed to `__init__`, + + - `action (keckdrpframework.models.action.Action)`: `action.args` contains positional arguments and + keyword arguments passed by the `BarycentricCorrection` event issued in the recipe: + + - `action.args[0] (dict)`: Datetime string in ISO format + + - `context (keckdrpframework.models.processing_context.ProcessingContext)`: `context.config_path` + contains the path of the default config file defined for the CalibrationLookup module. + + + * Method `__perform`: + + CalibrationLookup returns the result in `Arguments` object which contains a dictionary of calibration file paths + for the input datetime + + Usage: + For the recipe, the CalibrationLookup primitive is called like:: + + : + dt_string = GetHeaderValue(l1_obj, 'DATE-MID') + cals = CalibrationLookup(dt_string) + : + + + """ + def __init__(self, action, context): + + #Initialize parent class + KPF0_Primitive.__init__(self, action, context) + + #Input arguments + self.datetime = self.action.args[0] # ISO datetime string + + # input configuration + self.config = configparser.ConfigParser() + try: + self.config_path = context.config_path['calibration_lookup'] + except: + self.config_path = DEFAULT_CFG_PATH + self.config.read(self.config_path) + + self.caldate_files = self.config['PARAM']['date_files'] + self.caltypes = self.config['PARAM']['lookup_map'] + + def _perform(self): + + cal_look = GetCalibrations(self.datetime, self.config_path) + output_cals = cal_look.lookup() + + return Arguments(output_cals) + diff --git a/modules/wavelength_cal/src/alg.py b/modules/wavelength_cal/src/alg.py index 77da5d970..406552a27 100644 --- a/modules/wavelength_cal/src/alg.py +++ b/modules/wavelength_cal/src/alg.py @@ -1920,7 +1920,10 @@ def wave_interpolation(self, method='linear'): deltat = (self.l1_timestamp - self.wls_timestamp[0]).total_seconds() else: self.logger.error("l1_timestamp not in a recognized format") - frac = deltat / tdiff + if tdiff == 0: + frac = 0.0 + else: + frac = deltat / tdiff # Perform linear interpolation between wls1 and wls2 new_wls_arrays = {} diff --git a/recipes/kpf_drp.recipe b/recipes/kpf_drp.recipe index 2e3403277..330763f30 100644 --- a/recipes/kpf_drp.recipe +++ b/recipes/kpf_drp.recipe @@ -17,7 +17,6 @@ from modules.Utils.data_handler import SelectObs from modules.Utils.orientation_ref_reader import OrientationReference from modules.Utils.overscan_subtract import OverscanSubtraction from modules.Utils.amplifier_mask import AmplifierMask -from modules.Utils.era_specific_parameters import EraSpecific from modules.image_processing.src.image_process import ImageProcessing from modules.order_trace.src.order_mask import OrderMask from modules.spectral_extraction.src.bary_corr import BaryCorrTable @@ -29,6 +28,9 @@ from modules.quality_control.src.quality_control_framework import QualityControl from modules.read_noise.src.read_noise_framework import ReadNoiseFramework from modules.var_exts.src.var_exts_framework import VarExtsFramework from modules.quicklook.src.diagnostics_framework import DiagnosticsFramework +from modules.calibration_lookup.src.calibration_lookup import CalibrationLookup +from modules.wavelength_cal.src.alg import WaveInterpolation +from modules.wavelength_cal.src.wavelength_cal import WaveInterpolate # set the flags for each process # note: no rv reweighting is made here. @@ -52,6 +54,9 @@ overwrite = config.ARGUMENT.overwrite # if copy wavelength solution data to L1 from file samples with wls data. do_sp_wavecopy = config.ARGUMENT.do_wavecopy_in_sp +# Peroform wavelength interpolation +do_db_query_for_before_after_master_files = config.WLS_INTERPOLATION.do_db_query_for_before_after_master_files + # file path for lev0 and lev1 data lev0_file_path = "" lev1_file_path = "" @@ -162,25 +167,25 @@ contentbitmask = config.ARGUMENT.contentbitmask cal_type_pairs = config.ARGUMENT.cal_type_pairs max_cal_file_age = config.ARGUMENT.max_cal_file_age -if do_db_query_for_one_nearest_wls_master_file and do_spectral_extraction: - db_wls_exit_code = 1 - for cal_type_pair in cal_type_pairs: - if db_wls_exit_code != 0: - query_one_nearest_master_file_list = QueryDBOneNearestMasterFileFramework(data_type, - date_dir_db_query, - cal_file_level, - contentbitmask, - cal_type_pair, - max_cal_file_age) +# if do_db_query_for_one_nearest_wls_master_file and do_spectral_extraction: +# db_wls_exit_code = 1 +# for cal_type_pair in cal_type_pairs: +# if db_wls_exit_code != 0: +# query_one_nearest_master_file_list = QueryDBOneNearestMasterFileFramework(data_type, +# date_dir_db_query, +# cal_file_level, +# contentbitmask, +# cal_type_pair, +# max_cal_file_age) - db_wls_exit_code = query_one_nearest_master_file_list[0] - if db_wls_exit_code == 0: +# db_wls_exit_code = query_one_nearest_master_file_list[0] +# if db_wls_exit_code == 0: - db_wls_rec = query_one_nearest_master_file_list[1] - db_wls_master_file = db_wls_rec[6] - wave_fits = [] - for wls in wls_list: - wave_fits = wave_fits + [db_wls_master_file] +# db_wls_rec = query_one_nearest_master_file_list[1] +# db_wls_master_file = db_wls_rec[6] +# wave_fits = [] +# for wls in wls_list: +# wave_fits = wave_fits + [db_wls_master_file] #### variables related to input/output and process conditions for order_trace, spectral extraction, CA-HK, rv @@ -424,9 +429,11 @@ if do_spectral_extraction or do_hk or do_bc: for input_lev0_file in lev0_files: # lev0_data is KPF0 instance for input_lev0_file lev0_data = kpf0_from_fits(input_lev0_file, data_type=data_type) - start_order = EraSpecific(lev0_data, 'start_order') - output_order_trace = EraSpecific(lev0_data, 'order_trace_files') - order_trace_flat = EraSpecific(lev0_data, 'order_trace_flat') + dt_string = GetHeaderValue(lev0_data, 'DATE-MID') + cals = CalibrationLookup(dt_string) + start_order = cals['start_order'] + output_order_trace = cals['order_trace'] + order_trace_flat = cals['trace_flat'] # prepare trace files and rectified L0 flat data for spectral extraction if do_spectral_extraction: @@ -531,13 +538,20 @@ if do_spectral_extraction or do_hk or do_bc: # copy wls to the proper extension of lev1 data if do_sp_wavecopy and exists(output_lev1_file): output_data = kpf1_from_fits(output_lev1_file, data_type = data_type) - for idx in ccd_idx: - if wave_fits[idx] != None and exists(wave_fits[idx]): - wavecal_data = kpf1_from_fits(wave_fits[idx], data_type=data_type) - for ext in wave_to_ext[idx]: - ExtCopy(wavecal_data, ext, ext, to_data_model=output_data) - - SetHeaderValue(output_data, 'WLSFILE', wave_fits[0]) + obs_date_time = GetHeaderValue(output_data,'DATE-MID') + if do_db_query_for_before_after_master_files == True: + invoke_subrecipe("recipes/wls_interpolation.subrecipe") #HTI + output_data = interpolated_l1 + SetHeaderValue(output_data, 'WLSFILE', master_file_before) # db_rec_before from subrecipe + SetHeaderValue(output_data, 'WLSFILE2', master_file_after) # db_rec_after from subrecipe + else: + for idx in ccd_idx: + if wave_fits[idx] != None and exists(wave_fits[idx]): + # wavecal_data will be replaced by wavelength interpolation. + wavecal_data = kpf1_from_fits(wave_fits[idx], data_type=data_type) + for ext in wave_to_ext[idx]: + ExtCopy(wavecal_data, ext, ext, to_data_model=output_data) # only this needs changed. + SetHeaderValue(output_data, 'WLSFILE', wave_fits[0]) # Quality Control - L1 level # (this should be moved below CaHK, but there are several to_fits calls for L1 -- let's fix this) diff --git a/recipes/test_wls_interpolation.recipe b/recipes/test_wls_interpolation.recipe index db4570a22..d6e2ffd39 100644 --- a/recipes/test_wls_interpolation.recipe +++ b/recipes/test_wls_interpolation.recipe @@ -1,17 +1,24 @@ # This is an example recipe showing use of the WLS interpolation framework from modules.Utils.string_proc import date_from_kpffile +from modules.Utils.data_handler import GetHeaderValue from modules.wavelength_cal.src.wavelength_cal import WaveInterpolate +from modules.calibration_lookup.src.calibration_lookup import CalibrationLookup data_type = config.ARGUMENT.data_type -wls1_file = config.ARGUMENT.input_dir + config.ARGUMENT.wls1_file -wls2_file = config.ARGUMENT.input_dir + config.ARGUMENT.wls2_file -wls1_l1 = kpf1_from_fits(wls1_file, data_type=data_type) -wls2_l1 = kpf1_from_fits(wls2_file, data_type=data_type) obsid_l1 = config.ARGUMENT.obsid_l1 datecode_l1 = date_from_kpffile(obsid_l1) + l1_file = config.ARGUMENT.input_dir + 'L1/' + datecode_l1 + '/' + obsid_l1 + '_L1.fits' l1 = kpf1_from_fits(l1_file, data_type=data_type) +datetime_string = GetHeaderValue(l1, 'DATE-MID') + +cals = CalibrationLookup(datetime_string) +wls1_file = cals['wls'][0] +wls2_file = cals['wls'][1] + +wls1_l1 = kpf1_from_fits(wls1_file, data_type=data_type) +wls2_l1 = kpf1_from_fits(wls2_file, data_type=data_type) l1_out = WaveInterpolate(wls1_l1, wls2_l1, l1) l1_out_filename = config.ARGUMENT.output_dir + 'L1/' + datecode_l1 + '/' + obsid_l1 + '_L1.fits' diff --git a/recipes/watchfor_kpf_l0.recipe b/recipes/watchfor_kpf_l0.recipe index ec0e6276e..1f1431ac8 100644 --- a/recipes/watchfor_kpf_l0.recipe +++ b/recipes/watchfor_kpf_l0.recipe @@ -15,9 +15,10 @@ lev0_ffi_ext_cahk = config.WATCHFOR_L0.lev0_ffi_ext_cahk prescan_reg = config.WATCHFOR_L0.prescan_reg gain_key = config.WATCHFOR_L0.gain_keyword do_db_query_for_master_files = config.WATCHFOR_L0.do_db_query_for_master_files -masterbias_path = config.WATCHFOR_L0.masterbias_path -masterdark_path = config.WATCHFOR_L0.masterdark_path -masterflat_path = config.WATCHFOR_L0.masterflat_path +#masterbias_path = config.WATCHFOR_L0.masterbias_path +#masterdark_path = config.WATCHFOR_L0.masterdark_path +#masterflat_path = config.WATCHFOR_L0.masterflat_path + bad_pixel_masks = config.WATCHFOR_L0.bad_pixel_masks quicklook = config.WATCHFOR_L0.quicklook n_sigma = config.WATCHFOR_L0.n_sigma @@ -25,39 +26,39 @@ n_sigma_read_noise = config.WATCHFOR_L0.n_sigma_read_noise date_dir = context.date_dir + '/' -if do_db_query_for_master_files: +# if do_db_query_for_master_files: - query_nearest_master_files_list = QueryDBNearestMasterFilesFramework(data_type, - date_dir) +# query_nearest_master_files_list = QueryDBNearestMasterFilesFramework(data_type, +# date_dir) - db_exit_code = query_nearest_master_files_list[0] +# db_exit_code = query_nearest_master_files_list[0] - flag_masterbias = 0 - flag_masterdark = 0 - flag_masterflat = 0 +# flag_masterbias = 0 +# flag_masterdark = 0 +# flag_masterflat = 0 - for db_rec in query_nearest_master_files_list[1]: - db_level = db_rec[1] - db_cal_type = db_rec[2] - db_master_file = db_rec[6] +# for db_rec in query_nearest_master_files_list[1]: +# db_level = db_rec[1] +# db_cal_type = db_rec[2] +# db_master_file = db_rec[6] - if flag_masterbias == 0: - if db_level == 0: - if db_cal_type == 'bias': - masterbias_path = db_master_file - flag_masterbias = 1 +# if flag_masterbias == 0: +# if db_level == 0: +# if db_cal_type == 'bias': +# masterbias_path = db_master_file +# flag_masterbias = 1 - if flag_masterdark == 0: - if db_level == 0: - if db_cal_type == 'dark': - masterdark_path = db_master_file - flag_masterdark = 1 +# if flag_masterdark == 0: +# if db_level == 0: +# if db_cal_type == 'dark': +# masterdark_path = db_master_file +# flag_masterdark = 1 - if flag_masterflat == 0: - if db_level == 0: - if db_cal_type == 'flat': - masterflat_path = db_master_file - flag_masterflat = 1 +# if flag_masterflat == 0: +# if db_level == 0: +# if db_cal_type == 'flat': +# masterflat_path = db_master_file +# flag_masterflat = 1 search_string = config.WATCHFOR_L0.input_dir + "/" + date_dir + '/*.fits' @@ -104,7 +105,11 @@ for raw_file_path in file_list: # read file l0 = kpf0_from_fits(raw_file_path, data_type) - + dt_string = GetHeaderValue(l0, 'DATE-MID') + cals = CalibrationLookup(dt_string) + masterbias_path = cals['bias'] + masterdark_path = cals['dark'] + masterflat_path = cals['flat'] # Quality Control & Diagnostics - L0 level data_level_str = 'L0' kpf_object = l0 diff --git a/recipes/wls_auto.recipe b/recipes/wls_auto.recipe index b524cd9a3..305fefd62 100755 --- a/recipes/wls_auto.recipe +++ b/recipes/wls_auto.recipe @@ -1,6 +1,8 @@ from modules.wavelength_cal.src.wavelength_cal import WaveCalibrate from modules.Utils.string_proc import str_replace -from modules.Utils.era_specific_parameters import EraSpecific +from modules.Utils.data_handler import GetHeaderValue +from modules.calibration_lookup.src.calibration_lookup import CalibrationLookup + masters_dir = config.ARGUMENT.masters_dir output_dir = config.ARGUMENT.output_dir + '/' @@ -34,7 +36,9 @@ for cal_type in ['ThAr', 'LFC', 'Etalon']: completed = False else: l1_obj = kpf1_from_fits(L1_file, data_type='KPF') - master_wls_file = EraSpecific(l1_obj, 'master_wls_file') + dt_string = GetHeaderValue(l1_obj, 'DATE-MID') + cals = CalibrationLookup(dt_string) + master_wls_file = cals['rough_wls'] full_master_wls = kpf1_from_fits(master_wls_file, data_type='KPF') base_path = masters_dir + date_dir obj_string = str_replace(L1_file, base_path + '/kpf_' + date_dir + '_master_arclamp_', '') diff --git a/recipes/wls_interpolation.subrecipe b/recipes/wls_interpolation.subrecipe new file mode 100644 index 000000000..1d711dbe9 --- /dev/null +++ b/recipes/wls_interpolation.subrecipe @@ -0,0 +1,9 @@ +# Find the nearest wavelength solutions in time and generate an interpolated wavelength solution. + +cals = CalibrationLookup(obs_date_time) +master_file_before = cals['wls'][0] +master_file_after = cals['wls'][1] + +master_file_before_l1 = kpf1_from_fits(master_file_before,data_type=data_type) +master_file_after_l1 = kpf1_from_fits(master_file_after,data_type=data_type) +interpolated_l1 = WaveInterpolate(master_file_before_l1,master_file_after_l1,output_data) \ No newline at end of file diff --git a/tests/regression/test_eras.py b/tests/regression/test_eras.py deleted file mode 100644 index 52f1eba5f..000000000 --- a/tests/regression/test_eras.py +++ /dev/null @@ -1,20 +0,0 @@ -from kpfpipe.tools.recipe_test_unit import recipe_test - -recipe = """from modules.Utils.era_specific_parameters import EraSpecific - -test_param = 'order_trace_files' -fname = "/testdata/kpf/L0/20230730/KP.20230730.29130.27.fits" - -l0 = kpf0_from_fits(fname) - -value = EraSpecific(l0, test_param) - -""" - -cfg = master_stacks_config = "examples/default_recipe_test_neid.cfg" - -def test_kpf_eras(): - recipe_test(recipe) - -if __name__ == '__main__': - test_kpf_eras() \ No newline at end of file diff --git a/tests/regression/test_single_night.py b/tests/regression/test_single_night.py index cf3fb5ad2..fcdcf4a8d 100644 --- a/tests/regression/test_single_night.py +++ b/tests/regression/test_single_night.py @@ -8,8 +8,9 @@ from kpfpipe.config.pipeline_config import ConfigClass from kpfpipe.tools.recipe_test_unit import recipe_test from kpfpipe.pipelines.kpf_parse_ast import RecipeError -from .test_masters_recipe import masters_test_date +# from .test_masters_recipe import masters_test_date +masters_test_date = '20230730' drp_recipe = open('recipes/kpf_drp.recipe', 'r').read() drp_config = ConfigClass('configs/kpf_drp.cfg') diff --git a/tests/regression/test_wls_interpolation.py b/tests/regression/test_wls_interpolation.py deleted file mode 100644 index ac9eebd3f..000000000 --- a/tests/regression/test_wls_interpolation.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -Test of interpolation between two wavelength solutions. -""" -from kpfpipe.tools.recipe_test_unit import recipe_test - -this_recipe = """from modules.wavelength_cal.src.wavelength_cal import WaveInterpolate - -data_type = 'KPF' -wls1_file = '/data/reference_fits/kpf_20240101_master_WLS_autocal-lfc-all-eve_L1.fits' -wls2_file = '/data/reference_fits/kpf_20240101_master_WLS_autocal-lfc-all-morn_L1.fits' -wls1_l1 = kpf1_from_fits(wls1_file, data_type=data_type) -wls2_l1 = kpf1_from_fits(wls2_file, data_type=data_type) -obsid_l1 = 'KP.20240101.24368.88' -l1_file = '/data/reference_fits/' + obsid_l1 + '_L1.fits' -l1 = kpf1_from_fits(l1_file, data_type=data_type) -l1_out = WaveInterpolate(wls1_l1, wls2_l1, l1) -""" - -this_config = "examples/default_neid.cfg" #dummy cfg - -def test_wls_interpolation(): - recipe_test(this_recipe, this_config) - -if __name__ == '__main__': - test_wls_interpolation() diff --git a/tests/regression/test_wls_interpolation_recipe.py b/tests/regression/test_wls_interpolation_recipe.py new file mode 100644 index 000000000..e58f7cb3c --- /dev/null +++ b/tests/regression/test_wls_interpolation_recipe.py @@ -0,0 +1,14 @@ +""" +Test of interpolation between two wavelength solutions. +""" +from kpfpipe.config.pipeline_config import ConfigClass +from kpfpipe.tools.recipe_test_unit import recipe_test + +recipe = open('recipes/test_wls_interpolation.recipe', 'r').read() +config = 'configs/test_wls_interpolation.cfg' + +def test_wls_interpolation_recipe(): + recipe_test(recipe, config, date_dir='20240101') + +if __name__ == '__main__': + test_wls_interpolation_recipe()