Skip to content

Commit

Permalink
Create new release candidate 351rc1 (#1492)
Browse files Browse the repository at this point in the history
* Reset crder units (#1485)

* Report the correct value with the correct units for CRDER keywords

* Add comment to CRDER keywords

* Update WFPC2 RT to work with new DRZ name (#1487)

* Avoid running runastrodriz on calibration data (#1488)

* Fix logic in astroquery_utils to properly support regression testing (#1486)

* Handle empty images more gracefully (#1490)

* Update to use photutils catalog wcs correctly (#1489)

* Let photeq fail without crashing pipeline (#1491)

Co-authored-by: Warren J. Hack <[email protected]>
  • Loading branch information
mdlpstsci and stsci-hack authored Jan 19, 2023
1 parent 5fcff69 commit b637827
Show file tree
Hide file tree
Showing 8 changed files with 171 additions and 83 deletions.
93 changes: 69 additions & 24 deletions drizzlepac/align.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ def check_and_get_data(input_list: list, **pars: object) -> list:
candidate_list = [] # File names gathered from *_asn.fits file
ipppssoot_list = [] # ipppssoot names used to avoid duplicate downloads
total_input_list = [] # Output full filename list of data on disk
member_suffix = '_flc.fits'

# Loop over the input_list to determine if the item in the input_list is a full association file
# (*_asn.fits), a full individual image file (aka singleton, *_flt.fits), or a root name specification
Expand All @@ -99,24 +98,7 @@ def check_and_get_data(input_list: list, **pars: object) -> list:
# in this manner (vs just the ipppssoot of the association).
# This "if" block just collects the wanted full file names.
if suffix == 'asn':
try:
asntab = Table.read(input_item, format='fits')
except FileNotFoundError:
log.error('File {} not found.'.format(input_item))
return(empty_list)
for row in asntab:
if row['MEMTYPE'].startswith('PROD'):
continue
memname = row['MEMNAME'].lower().strip()
# Need to check if the MEMNAME is a full filename or an ipppssoot
if memname.find('_') != -1:
candidate_list.append(memname)
else:
# Define suffix for all members based on what files are present
if not os.path.exists(memname + member_suffix):
member_suffix = '_flt.fits'

candidate_list.append(memname + member_suffix)
candidate_list.extend(_get_asn_members(input_item))
elif suffix in ['flc', 'flt', 'c0m']:
if lc_input_item not in candidate_list:
candidate_list.append(lc_input_item)
Expand All @@ -132,6 +114,7 @@ def check_and_get_data(input_list: list, **pars: object) -> list:
elif len(input_item) == 9:
try:
if input_item not in ipppssoot_list:
input_item = input_item.lower()
# An ipppssoot of an individual file which is part of an association cannot be
# retrieved from MAST
retrieve_list = aqutils.retrieve_observation(input_item, **pars)
Expand All @@ -144,8 +127,28 @@ def check_and_get_data(input_list: list, **pars: object) -> list:
total_input_list += retrieve_list
ipppssoot_list.append(input_item)
else:
log.error('File {} cannot be retrieved from MAST.'.format(input_item))
return(empty_list)
# log.error('File {} cannot be retrieved from MAST.'.format(input_item))
# return(empty_list)
log.warn('File {} cannot be retrieved from MAST.'.format(input_item))
log.warn(f" using pars: {pars}")
# look for already downloaded ASN and related files instead
# ASN filenames are the only ones that end in a digit
if input_item[-1].isdigit():
_asn_name = f"{input_item}_asn.fits"
if not os.path.exists(_asn_name):
_ = aqutils.retrieve_observation([f"{input_item}"],
suffix=['ASN'],
clobber=True)
_local_files = _get_asn_members(_asn_name)
if _local_files:
log.warn(f"Using local files instead:\n {_local_files}")
total_input_list.extend(_local_files)
else:
_lfiles = os.listdir()
log.error(f"No suitable files found for input {input_item}")
log.error(f" in directory with files: \n {_lfiles}")
return(total_input_list)

except Exception:
exc_type, exc_value, exc_tb = sys.exc_info()
traceback.print_exception(exc_type, exc_value, exc_tb, file=sys.stdout)
Expand All @@ -162,7 +165,38 @@ def check_and_get_data(input_list: list, **pars: object) -> list:
return(empty_list)

log.info("TOTAL INPUT LIST: {}".format(total_input_list))
return(total_input_list)
return total_input_list

# ----------------------------------------------------------------------------------------------------------


def _get_asn_members(asnfile):

# default ASN member type
member_suffix = '_flc.fits'

candidate_list = []
try:
asntab = Table.read(asnfile, format='fits')
except FileNotFoundError:
log.error('File {} not found.'.format(asnfile))
return ([])
for row in asntab:
if row['MEMTYPE'].startswith('PROD'):
continue
memname = row['MEMNAME'].lower().strip()
# Need to check if the MEMNAME is a full filename or an ipppssoot
if memname.find('_') != -1:
candidate_list.append(memname)
else:
# Define suffix for all members based on what files are present
if not os.path.exists(memname + member_suffix):
member_suffix = '_flt.fits'

candidate_list.append(memname + member_suffix)

return candidate_list



# ------------------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -292,6 +326,7 @@ def perform_align(input_list, catalog_list, num_sources, archive=False, clobber=
log.info(str(starting_dt))
imglist = check_and_get_data(input_list, archive=archive, clobber=clobber, product_type=product_type)
log.info("SUCCESS")
log.info(f"Processing: {imglist}")

log.info(make_label('Processing time of [STEP 1]', starting_dt))
starting_dt = datetime.datetime.now()
Expand Down Expand Up @@ -1063,6 +1098,13 @@ def generate_astrometric_catalog(imglist, **pars):
imglist : list
List of one or more calibrated fits images that will be used for catalog generation.
output : str, optional
If specified as part of input pars dict, it provides the name of the output catalog file
overwrite : bool, optional
If specified as part of the input pars dict, it specifies whether or not to overwrite any
catalog file already present with the same path/filename as specified in `output`.
Returns
=======
ref_table : object
Expand All @@ -1075,12 +1117,15 @@ def generate_astrometric_catalog(imglist, **pars):
pars['output'] = 'ref_cat.ecsv'
else:
pars['output'] = None

overwrite = pars.get('clobber', True)

out_catalog = amutils.create_astrometric_catalog(imglist, **pars)
pars = temp_pars.copy()
# if the catalog has contents, write the catalog to ascii text file
if len(out_catalog) > 0 and pars['output']:
catalog_filename = "refcatalog.cat"
out_catalog.write(catalog_filename, format="ascii.fast_commented_header")
out_catalog.write(catalog_filename, format="ascii.fast_commented_header", overwrite=overwrite)
log.info("Wrote reference catalog {}.".format(catalog_filename))

return(out_catalog)
Expand All @@ -1102,4 +1147,4 @@ def get_default_pars(instrument, detector, step='alignment',
apars = par_class(full_cfg_index[step], condition, hap_pipeline_name,
pars_dir, step, True, None)

return apars.outpars
return apars.outpars
14 changes: 10 additions & 4 deletions drizzlepac/haputils/align_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1320,12 +1320,18 @@ def update_image_wcs_info(tweakwcs_output, headerlet_filenames=None, fit_label=N
updatehdr.update_wcs(hdulist, sci_extn, item.wcs, wcsname=wcs_name, reusename=True)
info = item.meta['fit_info']
if info['catalog'] and info['catalog'] != '':
rms_ra_val = info['RMS_RA'].value if info['RMS_RA'] is not None else -1.0
rms_dec_val = info['RMS_DEC'].value if info['RMS_DEC'] is not None else -1.0
# Explicitly report the RMS values in units of mas.
rms_ra_val = info['RMS_RA'].mas if info['RMS_RA'] is not None else -1.0
rms_dec_val = info['RMS_DEC'].mas if info['RMS_DEC'] is not None else -1.0
hdulist[sci_extn].header['RMS_RA'] = (rms_ra_val, RMS_RA_COMMENT)
hdulist[sci_extn].header['RMS_DEC'] = (rms_dec_val, RMS_DEC_COMMENT)
hdulist[sci_extn].header['CRDER1'] = info['RMS_RA'].value/3600. if info['RMS_RA'] is not None else -1.0
hdulist[sci_extn].header['CRDER2'] = info['RMS_DEC'].value/3600. if info['RMS_DEC'] is not None else -1.0
# convert RMS values from units of mas to deg in order to be consistent
# with CUNIT keyword value, as per FITS Paper I standard.
# https://www.aanda.org/articles/aa/full/2002/45/aah3859/aah3859.right.html Section 2.6
cr1_comment = RMS_RA_COMMENT.replace('mas', 'deg')
cr2_comment = RMS_DEC_COMMENT.replace('mas', 'deg')
hdulist[sci_extn].header['CRDER1'] = (info['RMS_RA'].deg, cr1_comment) if info['RMS_RA'] is not None else -1.0
hdulist[sci_extn].header['CRDER2'] = (info['RMS_DEC'].deg, cr2_comment) if info['RMS_DEC'] is not None else -1.0
hdulist[sci_extn].header['NMATCHES'] = len(info['ref_mag']) if info['ref_mag'] is not None else 0
hdulist[sci_extn].header['FITGEOM'] = info['fitgeom'] if info['fitgeom'] is not None else 'N/A'
else:
Expand Down
5 changes: 3 additions & 2 deletions drizzlepac/haputils/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,9 +617,10 @@ def verify_guiding(filename, min_length=33):
# Trying to ignore small sources (<= 4x4 pixels in size, or npixels < 17)
# which are either noise peaks or head-on CRs.
segm = detect_sources(imgarr, 0, npixels=17)
log.debug(f'Detected {segm.nlabels} raw sources in {filename}')
if segm.nlabels < 2:
if segm is None or segm.nlabels < 2:
log.debug(f'Did NOT detect enough raw sources in {filename} for guiding verification.')
return False
log.debug(f'Detected {segm.nlabels} raw sources in {filename} for guiding verification.')

src_cat = SourceCatalog(imgarr, segm)
# Remove likely cosmic-rays based on central_moments classification
Expand Down
14 changes: 14 additions & 0 deletions drizzlepac/haputils/astroquery_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def retrieve_observation(obsid, suffix=['FLC'], archive=False, clobber=False,
log.info(
"WARNING: No FLC or FLT files found for {}.".format(obsid))
return local_files

all_images = data_products_by_id['productFilename'].tolist()
log.info(all_images)
if not clobber:
Expand All @@ -118,9 +119,22 @@ def retrieve_observation(obsid, suffix=['FLC'], archive=False, clobber=False,
rows_to_remove.append(row_idx)
data_products_by_id.remove_rows(rows_to_remove)

# Protect against cases where all requested observations are already
# present on local disk and clobber was turned off, so there are no
# files to be downloaded.
if len(data_products_by_id) == 0:
log.warn("No new files identified to be retrieved.")
return local_files

manifest = Observations.download_products(data_products_by_id,
mrp_only=False)

# Protect against any other problems with finding files to retrieve based on the
# input file specification.
if not manifest:
log.warn(f"File {data_products_by_id} could not be retrieved. No files returned.")
return local_files

if not clobber:
for rownum in rows_to_remove[::-1]:
if manifest:
Expand Down
9 changes: 6 additions & 3 deletions drizzlepac/haputils/catalog_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2937,7 +2937,10 @@ def enforce_icrs_compatibility(catalog):
# header keyword REFFRAME can be populated with anything specified by the user in the original
# proposal.
"""
if catalog._wcs.wcs.radesys.upper() not in RADESYS_OPTIONS:
catalog._wcs.wcs.radesys = 'ICRS'
log.warning(f"Assuming input coordinates are ICRS, instead of {catalog._wcs.wcs.radesys}")
# We need to check whether or not the catalog was generated with photutils<1.6.0 or not
# since photutils=1.6.0 (apparently) renamed the 'catalog._wcs' to 'catalog.wcs'.
cat_wcs = catalog.wcs if hasattr(catalog, 'wcs') else catalog._wcs
if cat_wcs.wcs.radesys.upper() not in RADESYS_OPTIONS:
cat_wcs.wcs.radesys = 'ICRS'
log.warning(f"Assuming input coordinates are ICRS, instead of {cat_wcs.wcs.radesys}")
log.warning(f"Sky coordinates of source objects may not be accurate.")
Loading

0 comments on commit b637827

Please sign in to comment.