Skip to content

Commit

Permalink
Merge pull request #788 from Keck-DataReductionPipelines/develop
Browse files Browse the repository at this point in the history
Version 2.5.4
  • Loading branch information
bjfultn authored Feb 1, 2024
2 parents 82cf70d + 6701d18 commit 58fa047
Show file tree
Hide file tree
Showing 71 changed files with 6,933 additions and 2,185 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Data Reduction Pipeline (DRP) for the Keck Planet Finder (KPF)
# Data Reduction Pipeline (DRP) for the Keck Planet Finder (KPF)

[![Build Status](http://shrek.caltech.edu:4444/buildStatus/icon?job=KPF+CI)](http://shrek.caltech.edu:4444/job/KPF%20CI/)
[![Documentation Status](https://readthedocs.org/projects/kpf-pipeline/badge/?version=latest)](https://kpf-pipeline.readthedocs.io/en/latest/)
Expand All @@ -19,3 +19,4 @@ If you use the KPF Data Reduction Pipeline in your research, please cite the fol
If there is no place to include the relevant citations in the text of the publication, please include the following acknowledgment (in LaTeX using the [provided BibTeX entry](kpf_bibliography.bib)):

"This research made use of the KPF Data Reduction Pipeline \citep{kpf:gibson2020}."

13 changes: 7 additions & 6 deletions configs/kpf_drp.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,10 @@ orderlet_names = [['GREEN_SKY_FLUX', 'GREEN_SCI_FLUX1', 'GREEN_SCI_FLUX2', 'GREE
orderlet_widths_ccds = [[-1, -1, -1, 1, -1],[-1, -1, -1, -1, -1]]

# rectification_method: norect|vertical|normal, method to rectify the trace.
# extraction_method: summ|optimal, method to do spectral extraction.
# extraction_method: summ|optimal|fox, method to do spectral extraction.
rectification_method = norect
extraction_method = optimal
#extraction_method = fox

# - fits with wavelength calibration data
# wls_fits: [ <wavelength solution file for each ccd>].
Expand All @@ -81,8 +82,9 @@ do_db_query_for_one_nearest_wls_master_file = True
cal_file_level = 1
# contentbitmask = 3 means require at least GREEN and RED CCDs in the WLS master file database-queried nearest in time.
contentbitmask = 3
cal_type_pairs = [['WLS','cal-LFC-eve'], ['WLS', 'cal-LFC-morn'],
cal_type_pairs = [['WLS','autocal-lfc-all-eve'], ['WLS', 'autocal-lfc-all-morn'],
['WLS', 'autocal-thar-all-eve'], ['WLS', 'autocal-thar-all-morn']]
# cal_type_pairs = [['WLS', 'autocal-thar-all-eve']]
# Maximum start-date age of WLS file relative to context.date_dir at 00:00:00 UT, otherwise fall back on wls_fits.
max_cal_file_age = '4 days'

Expand All @@ -95,11 +97,10 @@ max_cal_file_age = '4 days'
orderlet_names_rv = [['GREEN_SCI_FLUX1', 'GREEN_SCI_FLUX2', 'GREEN_SCI_FLUX3', 'GREEN_CAL_FLUX', 'GREEN_SKY_FLUX'],['RED_SCI_FLUX1', 'RED_SCI_FLUX2', 'RED_SCI_FLUX3', 'RED_CAL_FLUX', 'RED_SKY_FLUX']]
rv_correct_by_cal = False
reweighting_method = ccf_static
#reweighting_enable_masks = [['espresso', 'lfc', 'thar', 'etalon'], ['espresso', 'lfc', 'thar', 'etalon']]
reweighting_enable_masks = [['espresso'], ['espresso']]
reweighting_enable_masks = [['espresso', 'sun','lfc', 'thar', 'etalon'], ['espresso','sun', 'lfc', 'thar', 'etalon']]
ccf_ext = ['GREEN_CCF', 'RED_CCF']
rv_ext = RV
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_2.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_2.csv']
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_lfc_orders.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_lfc_orders.csv']
# starting and ending location for CCF calculation, >= 0, position relative to left end of the image,
# < 0, position relative to the right end of the image
rv_start_x = 500
Expand Down Expand Up @@ -132,7 +133,7 @@ do_wavecopy_in_sp = True
do_bk_subtraction = True
do_bc = True
do_outlier_rejection = True
outlier_mask_path = None
outlier_mask_path = /data/outliers/

# for L0->2D process
[WATCHFOR_L0]
Expand Down
2 changes: 1 addition & 1 deletion configs/kpf_drp_local.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ reweighting_method = ccf_static
ccf_ext = ['GREEN_CCF', 'RED_CCF']
rv_ext = RV
reweighting_enable_masks = [['espresso'], ['espresso']]
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_2.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_2.csv']
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_lfc_orders.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_lfc_orders.csv']

# for ca_hk:
# hk_fiber_list: [<CA-HK spectrometer fibers>]
Expand Down
5 changes: 3 additions & 2 deletions configs/kpf_masters_l1.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,8 @@ do_db_query_for_one_nearest_wls_master_file = True
cal_file_level = 1
# contentbitmask = 3 means require at least GREEN and RED CCDs in the WLS master file database-queried nearest in time.
contentbitmask = 3
cal_type_pairs = [['WLS','cal-LFC-eve'], ['WLS', 'cal-LFC-morn'], ['WLS', 'autocal-thar-all-eve'], ['WLS', 'autocal-thar-all-morn']]
cal_type_pairs = [['WLS','autocal-lfc-all-eve'], ['WLS', 'autocal-lfc-all-morn'],
['WLS', 'autocal-thar-all-eve'], ['WLS', 'autocal-thar-all-morn']]
# Maximum start-date age of WLS file relative to context.date_dir at 00:00:00 UT, otherwise fall back on wls_fits.
max_cal_file_age = '4 days'

Expand All @@ -111,7 +112,7 @@ reweighting_method = ccf_static
reweighting_enable_masks = [['espresso'], ['espresso']]
ccf_ext = ['GREEN_CCF', 'RED_CCF']
rv_ext = RV
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_2.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_2.csv']
static_ccf_ratio = ['/code/KPF-Pipeline/static/static_green_ccf_ratio_lfc_orders.csv', '/code/KPF-Pipeline/static/static_red_ccf_ratio_lfc_orders.csv']
# starting and ending location for CCF calculation, >= 0, position relative to left end of the image,
# < 0, position relative to the right end of the image
rv_start_x = 500
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ data_type = KPF
#output_fits_filename = /testdata/L1/20231030/KP.20231030.00336.79_L1.fits

data_level_str = L0
input_fits_filename = /data/L0/20230829/KP.20230829.76026.81.fits
output_fits_filename = /testdata/L0/20230829/KP.20230829.76026.81.fits
input_fits_filename = /data/L0/20231204/KP.20231204.43445.39.fits
output_fits_filename = /testdata/L0/20231204/KP.20231204.43445.39.fits

[MODULE_CONFIGS]
quality_control = modules/quality_control/configs/default.cfg
4 changes: 2 additions & 2 deletions configs/quality_control_exposure.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ log_directory = /data/logs/
data_type = KPF
# With the correct form of file glob, one can ingest into the L0Files database table
# a single L0 file, or all L0 files for certain date(s), or even all L0 files in a month.
#lev0_fits_file_glob = /data/L0/20230430/KP.20230430.05981.93.fits
lev0_fits_file_glob = /data/L0/20230430/KP.20230430.05981.93.fits
#lev0_fits_file_glob = /data/L0/20230525/KP.*.fits
lev0_fits_file_glob = /data/L0/202301??/KP.*.fits
#lev0_fits_file_glob = /data/L0/202301??/KP.*.fits
# The following is input list of all readout channels in the L0 file.
lev0_ffi_exts = ['GREEN_AMP1','GREEN_AMP2','GREEN_AMP3','GREEN_AMP4','RED_AMP1','RED_AMP2','CA_HK']
# The following maps to the /data/kpf/L0 subdirectory in the docker-run command option: -v /data/kpf:/data
Expand Down
2 changes: 1 addition & 1 deletion configs/quicklook_match.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ log_directory = /data/logs_QLP/

[ARGUMENT]
# see quicklook_match.recipe for a description of how to set fullpath
fullpath = '/data/L0/202?????/KP.20231204.5????.??*.fits'
fullpath = '/data/L1/202?????/KP.20240116.?????.??*.fits'
#fullpath = '/data/masters/20230429/*.fits'

[MODULE_CONFIGS]
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions configs/wls_auto.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,13 @@ quicklook = 0
f0_key = 'LFCCEOFR'
frep_key = 'LFCFRREF'

red_linelist = /data/reference_fits/kpfMaster_ThArLines20221005_red.npy
thar_linelist = static/stellarmasks/Thorium_mask_031921.mas

red_cal_orderlet_name = ['RED_CAL_FLUX', 'RED_SCI_FLUX1','RED_SCI_FLUX2','RED_SCI_FLUX3', 'RED_SKY_FLUX']
red_output_ext = ['RED_CAL_WAVE', 'RED_SCI_WAVE1','RED_SCI_WAVE2','RED_SCI_WAVE3', 'RED_SKY_WAVE']
red_min_order = 0
red_max_order = 31

green_linelist = /data/reference_fits/kpfMaster_ThArLines20230221_green.npy
green_cal_orderlet_name = ['GREEN_CAL_FLUX', 'GREEN_SCI_FLUX1','GREEN_SCI_FLUX2','GREEN_SCI_FLUX3', 'GREEN_SKY_FLUX']
green_output_ext = ['GREEN_CAL_WAVE', 'GREEN_SCI_WAVE1','GREEN_SCI_WAVE2','GREEN_SCI_WAVE3', 'GREEN_SKY_WAVE']
green_min_order = 0
Expand Down
2 changes: 1 addition & 1 deletion configs/write_headers_match_files.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ log_directory = /data/logs_QLP/

[ARGUMENT]
# see write_headers_match_files.cfg for a description of how to set fullpath
fullpath = '/data/L1/202?????/KP.20231104.39907.04*.fits'
fullpath = '/data/2D/202?????/KP.20231124.52767.84*.fits'
output_dir = /testdata/
do_dark_curr = True # dark current headers
do_gdhdr = True # guider headers
Expand Down
94 changes: 94 additions & 0 deletions cronjobs/generateWLSScriptBetweenTwoDates.pl
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#! /usr/local/bin/perl

use strict;
use warnings;

my $startyyyymmdd = shift @ARGV; # YYYYMMDD command-line parameter.
my $endyyyymmdd = shift @ARGV; # YYYYMMDD command-line parameter.

if ((! (defined $startyyyymmdd)) or (! ($startyyyymmdd =~ /^\d\d\d\d\d\d\d\d$/))) {
die "startyyyymmdd either not defined or not correct format; quitting...\n";
}

if ((! (defined $endyyyymmdd)) or (! ($endyyyymmdd =~ /^\d\d\d\d\d\d\d\d$/))) {
die "endyyyymmdd either not defined or not correct format; quitting...\n";
}

my ($year, $month, $day);

($year, $month, $day) = $startyyyymmdd =~ /(\d\d\d\d)(\d\d)(\d\d)/;
my $startdate = $year . '-' . $month . '-' . $day;

($year, $month, $day) = $endyyyymmdd =~ /(\d\d\d\d)(\d\d)(\d\d)/;
my $enddate = $year . '-' . $month . '-' . $day;


my $cmdforjdstart = "sqlite3 test.db \"SELECT julianday(\'$startdate 00:00:00.0\');\"";
print "Executing cmd = [$cmdforjdstart]\n";
my $computedjdstart = `$cmdforjdstart`;
chomp $computedjdstart;
print "computedjdstart = $computedjdstart\n";


my $cmdforjdend = "sqlite3 test.db \"SELECT julianday(\'$enddate 00:00:00.0\');\"";
print "Executing cmd = [$cmdforjdend]\n";
my $computedjdend = `$cmdforjdend`;
chomp $computedjdend;
print "computedjdend = $computedjdend\n";

my (@yyyymmdd);

for (my $i = int($computedjdstart); $i <= int($computedjdend); $i++) {

my $jdi = $i + 0.5;
my $cmd = "sqlite3 test.db \"SELECT datetime($jdi);\"";

#print "Executing cmd = [$cmd]\n";

my $computedatetime = `$cmd`;
chomp $computedatetime;

my ($obsyear, $obsmonth, $obsday) = $computedatetime =~ /(\d\d\d\d)-(\d\d)-(\d\d)/;

my $obsdate = $obsyear . $obsmonth . $obsday;
print "jdi, obsdate = $jdi, $obsdate\n";

push @yyyymmdd, $obsdate;
}


my $scriptfile = "runWLSPipelineFrom" . $yyyymmdd[0] . "To" . $yyyymmdd[$#yyyymmdd] . ".sh";

if (! open(SCR, ">$scriptfile") ) {
die "*** Error: Could not open $scriptfile for writing; quitting...\n";
}

my $shebang = '#! /bin/bash -l';

print SCR "$shebang\n";

foreach my $yyyymmdd (@yyyymmdd) {
print "yyyymmdd=$yyyymmdd\n";

my @op = `cat runDailyPipelines.sh`;

foreach my $op (@op) {
if ($op =~ /^\s+$/) { next; }
if ($op =~ /^procdate/) { next; }
if ($op =~ /\/bin\/bash/) { next; }
if ($op =~ /^printenv/) { next; }
if ($op =~ /kpfmastersruncmd/) { next; }
$op =~ s/\$procdate/$yyyymmdd/g;
print SCR "$op";
}
}

if (! close(SCR) ) {
die "*** Error: Could not close $scriptfile; quitting...\n";
}

`chmod +x $scriptfile`;

exit 0;


4 changes: 2 additions & 2 deletions cronjobs/kpfmasters_wls_auto.pl
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
# Initialize fixed parameters and read command-line parameter.

my $iam = 'kpfmasters_wls_auto.pl';
my $version = '1.8';
my $version = '1.9';

my $procdate = shift @ARGV; # YYYYMMDD command-line parameter.

Expand Down Expand Up @@ -127,11 +127,11 @@
"mkdir -p /data/masters/${procdate}\n" .
"cp -pr /masters/${procdate}/kpf_${procdate}*L1.fits /data/masters/${procdate}\n" .
"kpf -r $recipe -c $config --date ${procdate}\n" .
"rm /masters/${procdate}/*master_WLS*\n" .
"cp -p /data/masters/${procdate}/*master_WLS* /masters/${procdate}\n" .
"mkdir -p /masters/${procdate}/wlpixelfiles\n" .
"cp -p /data/masters/wlpixelfiles/*kpf_${procdate}* /masters/${procdate}/wlpixelfiles\n" .
"cp -p /code/KPF-Pipeline/pipeline_${procdate}.log /masters/${procdate}/pipeline_wls_auto_${procdate}.log\n" .
"find /masters/${procdate}/* -type f -mtime +7 -exec rm {} +\n" .
"rm /code/KPF-Pipeline/pipeline_${procdate}.log\n" .
"exit\n";
my $makescriptcmd = "echo \"$script\" > $dockercmdscript";
Expand Down
19 changes: 19 additions & 0 deletions database/configs/backfill_acf.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Pipeline logger configurations
[LOGGER]
start_log = True
log_path = pipeline.log
log_level = info
log_verbose = True
log_directory = /data/logs/

[ARGUMENT]
data_type = KPF
lev0_fits_file = /data/L0/20231212/KP.20231212.51850.00.fits
# Database primary key of L0 FITS file, as stored in the L0Files database table.
rid = 248848

[MODULE_CONFIGS]
backfill_acf = database/modules/backfill_acf/configs/default.cfg



Empty file.
16 changes: 16 additions & 0 deletions database/modules/backfill_acf/configs/default.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
## Default configuration for BackfillAcfFramework primitive
[LOGGER]
start_log = True
log_path = logs/backfill_acf_framework_debug.log
log_level = info
log_verbose = True


## Module related parameters
[PARAM]
# Normally keep the backfill_repopulate_db_recs parameter set to zero; otherwise, if set to one,
# the code will backfill/repopulate new columns gracffln and rdacffln in L0Files database table,
# according to the database query below, which must return rid,filename,checksum.
# Depending on the query, the database backfilling/updating may take some time.
backfill_repopulate_db_recs = 1
backfill_repopulate_db_query_template = select rid,filename,checksum from L0Files where (gracffln is null and rdacffln is null) order by mjdobs;
Empty file.
Loading

0 comments on commit 58fa047

Please sign in to comment.