-
Notifications
You must be signed in to change notification settings - Fork 8
/
reduce_frame.py
489 lines (391 loc) · 16.8 KB
/
reduce_frame.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
import os
import logging
import numpy as np
from astropy.io import fits
import config
import DrpException
import ReducedDataSet
import reduce_order
import wavelength_utils
import nsdrp
import Flat
from logging import INFO
import Order
import image_lib
import imp
logger = logging.getLogger('obj')
# main_logger = logging.getLogger('main')
# main_logger = logging.getLogger('main')
def reduce_frame(raw, out_dir, flatCacher=None):
"""
Arguments:
raw: RawDataSet object
out_dir: Data product root directory
"""
# initialize per-object logger and check output directory
if raw.isPair:
init(raw.baseNames['AB'], out_dir)
else:
init(raw.baseNames['A'], out_dir)
# if no flats in raw data set then fail
if (len(raw.flatFns) < 1):
logger.error("no flats for {}".format(raw.baseName))
raise DrpException.DrpException('no flats')
# create reduced data set
reduced = ReducedDataSet.ReducedDataSet(raw)
# read raw object image data into reduced data set object
reduced.objImg['A'] = fits.getdata(raw.objAFn, ignore_missing_end=True)
if raw.isPair:
reduced.objImg['B'] = fits.getdata(raw.objBFn, ignore_missing_end=True)
# put object summary info into per-object log
log_start_summary(reduced)
# Get fully processed flat in the form of a Flat object
reduced.Flat = getFlat(raw, flatCacher)
logger.info('using flat {}'.format(reduced.Flat.getBaseName()))
# clean cosmic ray hits on object frame(s)
if config.params['no_cosmic']:
logger.info(
"cosmic ray rejection on object frame inhibited by command line flag")
else:
logger.info('cosmic ray cleaning object frame A')
reduced.objImg['A'], cosmicMethod = image_lib.cosmic_clean(
reduced.objImg['A'])
logger.debug('cosmic ray cleaning object frame A complete')
if reduced.isPair:
logger.info('cosmic ray cleaning object frame B')
reduced.objImg['B'], _ = image_lib.cosmic_clean(reduced.objImg['B'])
logger.debug('cosmic ray cleaning object frame B complete')
reduced.cosmicCleaned = True
logger.info(cosmicMethod)
# if darks are available, combine them if there are more than one
# and subtract from object frame(s) and flat
process_darks(raw, reduced)
# if AB pair then subtract B from A
if reduced.isPair:
reduced.objImg['AB'] = np.subtract(
reduced.objImg['A'], reduced.objImg['B'])
# reduce orders
try:
reduce_orders(reduced)
except IOError as e:
# might want to do something else here
raise
# find and apply wavelength solution
imp.reload(wavelength_utils)
if find_global_wavelength_soln(reduced) is True:
apply_wavelength_soln(reduced)
else:
logger.info('not applying wavelength solution')
for order in reduced.orders:
order.waveScale = order.flatOrder.gratingEqWaveScale
order.calMethod = 'grating equation'
return(reduced)
def getFlat(raw, flatCacher):
"""Given a raw data set and a flat cacher, creates and returns a Flat object
If there is no flat cacher then we are in command line mode where there is a single
flat and the flats are not reused. In this case 1. the flat image data is read from the
flat file specified in the raw data set, 2. unless cosmic ray rejection is inhibited
cosmic ray artifacts are removed from the flat, and 3. a Flat object is created from
the flat image data.
If there is a flat cacher then a possibly cached Flat object is retrieved from it.
Note that in either case, the Flat object represents a fully reduced flat, with orders on
the flat identified, traced, cut out and rectified.
Args:
raw: A RawDataSet object containing, among other things, one or more flat file names
flatCacher: A FlatCacher object or None
Returns:
A Flat object
"""
if flatCacher is None:
# in command line mode with only one flat
if config.params['no_cosmic']:
logger.info(
'cosmic ray rejection on flat inhibited by command line flag')
flat_data = fits.PrimaryHDU.readfrom(
raw.flatFns[0], ignore_missing_end=True).data
else:
logger.info('starting cosmic ray cleaning flat')
flat_data, cosmicMethod = image_lib.cosmic_clean(fits.PrimaryHDU.readfrom(
raw.flatFns[0], ignore_missing_end=True).data)
logger.info(cosmicMethod)
logger.info('cosmic ray cleaning on flat complete')
return(Flat.Flat(
raw.flatFns[0], [raw.flatFns[0]],
fits.PrimaryHDU.readfrom(
raw.flatFns[0], ignore_missing_end=True).header,
flat_data))
else:
return(flatCacher.getFlat(raw.flatFns))
def reduce_orders(reduced):
"""Reduces each order in the frame.
Starting order is determined from a lookup table indexed by filter name.
The grating equation is evaluated for y-axis location of the short wavelength end
of the order on the detector.
If the order is on the detector then extract_order() is called to cut out from the full
frame a rectangular array of pixels containing the entire order plus padding.
Then, reduce_order() is called to reduce the order.
reduce_order() returns an order object which is and instance of the Order class
and contains all of the reduced data for this order. The order object is then
appended to the list of order objects in the ReducedDataSet object representing
the current frame.
After the first order that should be on the detector is found, processing continues
through each order in descending order number, working toward higher pixel row numbers
on the detector, until the first off-detector order is found.
"""
for flatOrder in reduced.Flat.flatOrders:
if flatOrder.valid is not True:
continue
logger.info('***** order ' + str(flatOrder.orderNum) + ' *****')
order = Order.Order(reduced.frames, reduced.baseNames, flatOrder)
order.isPair = reduced.isPair
for frame in order.frames:
order.objCutout[frame] = np.array(image_lib.cut_out(reduced.objImg[frame],
flatOrder.highestPoint, flatOrder.lowestPoint, flatOrder.cutoutPadding))
order.integrationTime = reduced.getIntegrationTime() # used in noise calc
try:
# reduce order, i.e. rectify, extract spectra, identify sky lines
reduce_order.reduce_order(order)
# add reduced order to list of reduced orders in Reduced object
reduced.orders.append(order)
except DrpException.DrpException as e:
logger.warning('failed to reduce order {}: {}'.format(
str(flatOrder.orderNum), e.message))
# end if order is on the detector
# end for each order
loggers = ['obj']
if config.params['cmnd_line_mode'] is False:
loggers.append('main')
# for l in loggers:
# logging.getLogger(l).log(INFO, 'n orders on the detector = {}'.format(n_orders_on_detector))
# logging.getLogger(l).log(INFO, 'n orders reduced = {}'.format(len(reduced.orders)))
if len(reduced.orders) == 0:
return
for l in loggers:
reduced.snrMean = sum(reduced.orders[i].snr['A'] for i in range(len(reduced.orders))) / \
len(reduced.orders)
logging.getLogger(l).log(INFO, 'mean signal-to-noise ratio = {:.1f}'.format(
reduced.snrMean))
snr = []
for i in range(len(reduced.orders)):
snr.append(reduced.orders[i].snr['A'])
for l in loggers:
reduced.snrMin = np.amin(snr)
logging.getLogger(l).log(INFO, 'minimum signal-to-noise ratio = {:.1f}'.format(
reduced.snrMin))
try:
reduced.wMean = sum(abs(reduced.orders[i].gaussianParams['A'][2])
for i in range(len(reduced.orders))) / len(reduced.orders)
logging.getLogger(l).log(INFO, 'mean spatial peak width = {:.1f} pixels'.format(
reduced.wMean))
except BaseException:
logging.getLogger(l).log(
logging.WARNING,
'mean spatial peak width = unknown')
w = []
for i in range(len(reduced.orders)):
if reduced.orders[i].gaussianParams['A'] is not None:
w.append(reduced.orders[i].gaussianParams['A'][2])
try:
reduced.wMax = np.amax(w)
logging.getLogger(l).log(INFO, 'maximum spatial peak width = {:.1f} pixels'.format(
reduced.wMax))
except BaseException:
logging.getLogger(l).error(
'maximum spatial peak width cannot be determined')
logging.getLogger(l).log(
INFO, 'maximum spatial peak width = unknown')
return
def find_global_wavelength_soln(reduced):
loggers = ['obj']
if config.params['cmnd_line_mode'] is False:
loggers.append('main')
# create arrays of col, 1/order, accepted wavelength
# TODO: modify twodfit() to take list of lines rather than these
# constructed arrays
col = []
centroid = []
order_inv = []
accepted = []
for order in reduced.orders:
for line in order.lines:
col.append(line.col)
centroid.append(line.centroid)
order_inv.append(1.0 / order.flatOrder.orderNum)
accepted.append(line.waveAccepted)
reduced.nLinesFound = len(col)
for l in loggers:
logging.getLogger(l).log(
INFO, 'n sky lines identified = {:d}'.format(
reduced.nLinesFound))
if config.params['int_c'] is True:
logger.warning('using integer column numbers in wavelength fit')
c = np.asarray(col, dtype='float32')
else:
c = np.asarray(centroid, dtype='float32')
reduced.frameCalCoeffs, wave_fit, wave_exp, reduced.frameCalRmsRes = wavelength_utils.twodfit(
c,
np.asarray(order_inv, dtype='float32'),
np.asarray(accepted, dtype='float32'))
if wave_fit is None:
reduced.frameCalAvailable = False
return False
reduced.frameCalAvailable = True
reduced.calFrame = 'self'
reduced.nLinesUsed = len(wave_fit)
for l in ['main', 'obj']:
logging.getLogger(l).log(INFO, 'n lines used in wavelength fit = {:d}'.format(
reduced.nLinesUsed))
logging.getLogger(l).log(INFO, 'rms wavelength fit residual = {:.3f}'.format(
reduced.frameCalRmsRes))
# There must be a more pythonic way of doing this
for i, exp in enumerate(wave_exp):
found = False
for order in reduced.orders:
for line in order.lines:
if abs(line.waveAccepted - exp) <= 0.1:
line.frameFitOutlier = False
line.frameWaveFit = wave_fit[i]
found = True
break
if found:
break
if not found:
logger.error(
'cannot find line for expected wavelength ' +
str(exp))
# raw_input('waiting')
# for each line used in the fit, compute slope of wavelength solution at
# that point
for order in reduced.orders:
for line in order.lines:
if (line.frameFitOutlier is False):
line.frameFitSlope = \
reduced.frameCalCoeffs[1] + \
(2.0 * reduced.frameCalCoeffs[2] * line.col) + \
(reduced.frameCalCoeffs[4] / order.flatOrder.orderNum) + \
(2.0 *
reduced.frameCalCoeffs[5] *
line.col /
order.flatOrder.orderNum)
return True
def apply_wavelength_soln(reduced):
if reduced.frameCalCoeffs is None:
return
for order in reduced.orders:
x = np.arange(1024)
y = 1.0 / order.flatOrder.orderNum
order.frameCalWaveScale = np.ravel(
reduced.frameCalCoeffs[0] +
reduced.frameCalCoeffs[1] * x +
reduced.frameCalCoeffs[2] * x ** 2 +
reduced.frameCalCoeffs[3] * y +
reduced.frameCalCoeffs[4] * x * y +
reduced.frameCalCoeffs[5] * (x ** 2) * y)
dx = np.diff(order.frameCalWaveScale)
if np.all(dx <= 0) or np.all(dx >= 0):
# wavelength scale is monotonic
order.waveScale = order.frameCalWaveScale
order.calMethod = 'frame sky line cal'
else:
logger.warning('wavelength scale not monotonic for order {}'.format(
order.flatOrder.orderNum))
logger.warning('using theoretical wavelength scale ')
order.waveScale = order.flatOrder.gratingEqWaveScale
order.calMethod = 'grating equation'
return
def init(baseName, out_dir):
"""
Sets up per-object logger
Returns True if errors were encountered, othewise returns False
"""
# set up obj logger
logger.handlers = []
if config.params['debug']:
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s ' +
'%(levelname)s - %(filename)s:%(lineno)s - %(message)s')
else:
logger.setLevel(logging.INFO)
formatter = logging.Formatter(
'%(asctime)s %(levelname)s - %(message)s')
if config.params['cmnd_line_mode'] is True:
fn = out_dir + '/nsdrp.log'
else:
# fn = out_dir + '/' + objFileName[objFileName.find("NS"):].rstrip('.gz').rstrip('.fits') + '.log'
fn = out_dir + '/' + baseName + '.log'
if config.params['subdirs'] is False and config.params['cmnd_line_mode'] is False:
parts = fn.split('/')
parts.insert(len(parts) - 1, 'log')
fn = '/'.join(parts)
if os.path.exists(fn):
os.rename(fn, fn + '.prev')
fh = logging.FileHandler(filename=fn)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
if config.params['verbose'] is True:
if config.params['debug']:
sformatter = logging.Formatter(
'%(asctime)s %(levelname)s - %(filename)s:%(lineno)s - %(message)s')
else:
sformatter = logging.Formatter(
'%(asctime)s %(levelname)s - %(message)s')
sh = logging.StreamHandler()
sh.setLevel(logging.DEBUG)
sh.setFormatter(sformatter)
logger.addHandler(sh)
# if output directory does not exist try to create it
if not os.path.exists(out_dir):
try:
os.mkdir(out_dir)
except BaseException:
logger.critical('output directory cannot be created')
return
def log_start_summary(reduced):
"""
"""
# TODO: structure and format this like nsdrp_cmnd.write_summary()
logger.info('nsdrp version {}'.format(nsdrp.VERSION))
loggers = ['obj']
if config.params['cmnd_line_mode'] is False:
loggers.append('main')
for l in loggers:
if reduced.isPair:
baseName = reduced.baseNames['AB']
else:
baseName = reduced.baseNames['A']
logging.getLogger(l).log(INFO, 'starting reduction of ' + baseName)
logging.getLogger(l).log(INFO, 'time of observation = ' +
str(reduced.getDate()) + ' ' + str(reduced.getTime()) + ' UT')
logging.getLogger(l).log(INFO, 'target name = ' +
str(reduced.getTargetName()))
logging.getLogger(l).log(INFO, 'filter name = ' +
str(reduced.getFullFilterName()))
logging.getLogger(l).log(INFO, 'slit name = ' + str(reduced.getSlit()))
logging.getLogger(l).log(INFO, 'integration time = ' +
str(reduced.getITime()) + ' sec')
logging.getLogger(l).log(INFO, 'n coadds = ' +
str(reduced.getNCoadds()))
logger.info('echelle angle = ' + str(reduced.getEchPos()) + ' deg')
logger.info('cross disperser angle = ' +
str(reduced.getDispPos()) + ' deg')
return
def process_darks(raw, reduced):
"""
"""
# TODO if there are < 3 darks should cosmic clean
if len(raw.darkFns) > 0:
reduced.hasDark = True
logger.info(str(len(raw.darkFns)) + ' darks: ' +
', '.join(str(x) for x in ([s[s.find("NS"):s.rfind(".")] for s in raw.darkFns])))
reduced.dark = raw.combineDarks()
if len(raw.darkFns) > 1:
logger.info(str(len(raw.darkFns)) +
' darks have been median combined')
else:
logger.info('no darks')
# if dark(s) exist, subtract from obj and flat
if reduced.hasDark:
reduced.subtractDark()
logger.info('dark subtracted from obj and flat')
return