Skip to content

Commit

Permalink
Merge pull request #926 from Keck-DataReductionPipelines/develop
Browse files Browse the repository at this point in the history
Version 2.7.2
  • Loading branch information
bjfultn authored Jul 25, 2024
2 parents 4b344d1 + 60aadd7 commit 7699e5d
Show file tree
Hide file tree
Showing 52 changed files with 3,774 additions and 4,272 deletions.
2 changes: 1 addition & 1 deletion configs/kpf_masters_l1.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ input_dir_root = /data/masters/

# for module process:
do_l0_to_2d = False
do_order_trace = True
do_order_trace = False
do_spectral_extraction = True
do_outlier_rejection = False
do_rv = True
Expand Down
14 changes: 7 additions & 7 deletions configs/qc_diagnostics_example.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ start_log = True
log_path = pipeline.log
log_level = info
log_verbose = True
log_directory = /data/logs/
log_directory = /testdata/logs/

[ARGUMENT]
data_type = KPF
Expand All @@ -15,13 +15,13 @@ data_type = KPF
#input_fits_filename = /data/L1/20230608/KP.20230608.68393.35_L1.fits
#output_fits_filename = /testdata/L1/20230608/KP.20230608.68393.35_L1.fits

#data_level_str = L1
#input_fits_filename = /data/L1/20231030/KP.20231030.00336.79_L1.fits
#output_fits_filename = /testdata/L1/20231030/KP.20231030.00336.79_L1.fits
data_level_str = L1
input_fits_filename = /data/L1/20231030/KP.20231030.00336.79_L1.fits
output_fits_filename = /testdata/L1/20231030/KP.20231030.00336.79_L1.fits

data_level_str = L0
input_fits_filename = /data/L0/20230618/KP.20230618.67942.84.fits
output_fits_filename = /testdata/L0/20230618/KP.20230618.67942.84.fits
#data_level_str = L0
#input_fits_filename = /data/L0/20230618/KP.20230618.67942.84.fits
#output_fits_filename = /testdata/L0/20230618/KP.20230618.67942.84.fits

[MODULE_CONFIGS]
quality_control = modules/quality_control/configs/default.cfg
1 change: 1 addition & 0 deletions configs/quicklook_parallel.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ log_verbose = True
log_directory = /data/logs_QLP/

[ARGUMENT]
outdir= '/data/QLP/'
fullpath = 'INSERT_FITS_PATH'

[MODULE_CONFIGS]
Expand Down
132 changes: 132 additions & 0 deletions cronjobs/generateDailyRunScriptsBetweenTwoDates.pl
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
#! /usr/local/bin/perl

use strict;
use warnings;

###########################################################################################
# This perl script requires the sqlite3 command to be installed:
# [rlaher@shrek ~]$ which sqlite3
# ~/sw/anaconda3/bin/sqlite3
#
# Instructions: Provide start and end dates on command line. E.g.,
# export KPFCRONJOB_CODE=/data/user/rlaher/git/KPF-Pipeline
# mkdir -p ${KPFCRONJOB_CODE}/cronjobs/current_jobs
# cd ${KPFCRONJOB_CODE}/cronjobs/current_jobs
# ${KPFCRONJOB_CODE}/cronjobs/generateDailyRunScriptsBetweenTwoDates.pl 20230601 20230831
###########################################################################################

my $startyyyymmdd = shift @ARGV; # YYYYMMDD command-line parameter.
my $endyyyymmdd = shift @ARGV; # YYYYMMDD command-line parameter.

if ((! (defined $startyyyymmdd)) or (! ($startyyyymmdd =~ /^\d\d\d\d\d\d\d\d$/))) {
die "startyyyymmdd either not defined or not correct format; quitting...\n";
}

if ((! (defined $endyyyymmdd)) or (! ($endyyyymmdd =~ /^\d\d\d\d\d\d\d\d$/))) {
die "endyyyymmdd either not defined or not correct format; quitting...\n";
}

my ($year, $month, $day);

($year, $month, $day) = $startyyyymmdd =~ /(\d\d\d\d)(\d\d)(\d\d)/;
my $startdate = $year . '-' . $month . '-' . $day;

($year, $month, $day) = $endyyyymmdd =~ /(\d\d\d\d)(\d\d)(\d\d)/;
my $enddate = $year . '-' . $month . '-' . $day;


my $cmdforjdstart = "sqlite3 test.db \"SELECT julianday(\'$startdate 00:00:00.0\');\"";
print "Executing cmd = [$cmdforjdstart]\n";
my $computedjdstart = `$cmdforjdstart`;
chomp $computedjdstart;
print "computedjdstart = $computedjdstart\n";


my $cmdforjdend = "sqlite3 test.db \"SELECT julianday(\'$enddate 00:00:00.0\');\"";
print "Executing cmd = [$cmdforjdend]\n";
my $computedjdend = `$cmdforjdend`;
chomp $computedjdend;
print "computedjdend = $computedjdend\n";

my (@yyyymmdd);

for (my $i = int($computedjdstart); $i <= int($computedjdend); $i++) {

my $jdi = $i + 0.5;
my $cmd = "sqlite3 test.db \"SELECT datetime($jdi);\"";

#print "Executing cmd = [$cmd]\n";

my $computedatetime = `$cmd`;
chomp $computedatetime;

my ($obsyear, $obsmonth, $obsday) = $computedatetime =~ /(\d\d\d\d)-(\d\d)-(\d\d)/;

my $obsdate = $obsyear . $obsmonth . $obsday;
print "jdi, obsdate = $jdi, $obsdate\n";

push @yyyymmdd, $obsdate;
}

my @reverse_yyyymmdd = reverse @yyyymmdd;


my $pwd = $ENV{"PWD"};
print "PWD=$pwd\n";

my $cronjob_code = $ENV{"KPFCRONJOB_CODE"};

my $scriptdir = $pwd;
my $scriptfile = "runMastersPipeline_From_${startyyyymmdd}_To_${endyyyymmdd}.sh";

if (! open(SCR, ">$scriptfile") ) {
die "*** Error: Could not open $scriptfile for writing; quitting...\n";
}

my $shebang = '#! /bin/bash -l';

print SCR "$shebang\n";

foreach my $yyyymmdd (@reverse_yyyymmdd) {
print "yyyymmdd=$yyyymmdd\n";

my $outfile = "runDailyPipelines_${yyyymmdd}.sh";

if (! open(OUT, ">$outfile") ) {
die "*** Error: Could not open $outfile for writing; quitting...\n";
}

my @op = `cat $cronjob_code/cronjobs/runDailyPipelines.sh`;

foreach my $op (@op) {
if ($op=~/^procdate/) {next;}
$op =~ s/\$procdate/$yyyymmdd/g;
print OUT "$op";
}

if (! close(OUT) ) {
die "*** Error: Couldn't close $outfile; quitting...\n";
}

`chmod +x $outfile`;

my $cmd = $scriptdir . '/runDailyPipelines_' .
$yyyymmdd .
'.sh >& ' . $cronjob_code . '/runDailyPipelines_' .
$yyyymmdd .
'.out &';

print SCR "echo \"Executing command: $cmd\"\n";
print SCR "$cmd\n";
print SCR "echo \"Sleeping 2400 seconds\"\n";
print SCR "sleep 2400\n";
}

if (! close(SCR) ) {
die "*** Error: Could not close $scriptfile; quitting...\n";
}

`chmod +x $scriptfile`;

exit 0;

2 changes: 1 addition & 1 deletion cronjobs/kpfmastersruncmd_l1.pl
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@
"mkdir -p /data/masters/${procdate}\n" .
"cp -pr /masters/${procdate}/kpf_${procdate}*.fits /data/masters/${procdate}\n" .
"rm /data/masters/${procdate}/kpf_${procdate}_smooth_lamp.fits\n" .
"kpf -r $recipe -c $config --date ${procdate}\n" .
"kpf --ncpus 32 --watch /data/masters/${procdate}/ --reprocess --masters -r $recipe -c $config \n" .
"cp -p /data/masters/${procdate}/* /masters/${procdate}\n" .
"cp -p /data/logs/${procdate}/pipeline_${procdate}.log /masters/${procdate}/pipeline_masters_drp_l1_${procdate}.log\n" .
"exit\n";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,14 @@
from kpfpipe.models.level0 import KPF0
from kpfpipe.primitives.level0 import KPF0_Primitive
from keckdrpframework.models.arguments import Arguments
from database.modules.query_db_nearest_master_files.src.query_db_nearest_master_files_framework import md5

import database.modules.utils.kpf_db as db


# Global read-only variables
DEFAULT_CFG_PATH = 'database/modules/query_db_before_after_master_files/configs/default.cfg'


class QueryDBBeforeAfterMasterFilesFramework(KPF0_Primitive):

"""
Expand Down Expand Up @@ -148,7 +151,7 @@ def run_query(self,cur,rep,query_template):

# Compute checksum and compare with database value.

cksum = md5(filename)
cksum = db.md5(filename)
self.logger.info('cksum = {}'.format(cksum))

if cksum == checksum:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,18 @@
from kpfpipe.pipelines.fits_primitives import to_fits
from keckdrpframework.models.arguments import Arguments

import database.modules.utils.kpf_db as db


# Global read-only variables
DEFAULT_CFG_PATH = 'database/modules/query_db_nearest_master_files/configs/default.cfg'

def md5(fname):
hash_md5 = hashlib.md5()

try:
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
except:
print("*** Error: Cannot open file =",fname,"; quitting...")
exit(65)

class QueryDBNearestMasterFilesFramework(KPF0_Primitive):

"""
Description:
Queries the KPF pipeline-operations database for the nearest-in-time master files.
Currently, only master files made for data earlier than the observation date are returned.
Arguments:
data_type (str): Type of data (e.g., KPF).
Expand Down Expand Up @@ -219,7 +209,7 @@ def _perform(self):

# Compute checksum and compare with database value.

cksum = md5(filename)
cksum = db.md5(filename)
self.logger.info('cksum = {}'.format(cksum))

if cksum == checksum:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,18 @@
from kpfpipe.pipelines.fits_primitives import to_fits
from keckdrpframework.models.arguments import Arguments

import database.modules.utils.kpf_db as db


# Global read-only variables
DEFAULT_CFG_PATH = 'database/modules/query_db_one_nearest_master_file/configs/default.cfg'

def md5(fname):
hash_md5 = hashlib.md5()

try:
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
except:
print("*** Error: Cannot open file =",fname,"; quitting...")
exit(65)

class QueryDBOneNearestMasterFileFramework(KPF0_Primitive):

"""
Description:
Queries the KPF pipeline-operations database for the one nearest-in-time master file.
Currently, only a master file made for data earlier than the observation date is returned.
Arguments:
data_type (str): Type of data (e.g., KPF).
Expand Down Expand Up @@ -252,7 +243,7 @@ def _perform(self):

# Compute checksum and compare with database value.

cksum = md5(filename)
cksum = db.md5(filename)
self.logger.info('cksum = {}'.format(cksum))

if cksum == checksum:
Expand Down
5 changes: 5 additions & 0 deletions database/modules/utils/kpf_db.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[LOGGER]
log = True
log_path = logs/database.log
log_level = debug
log_verbose = True
Loading

0 comments on commit 7699e5d

Please sign in to comment.