diff --git a/tessreduce/catalog_tools.py b/tessreduce/catalog_tools.py index a56356a..7915350 100755 --- a/tessreduce/catalog_tools.py +++ b/tessreduce/catalog_tools.py @@ -31,10 +31,8 @@ def Get_Catalogue(tpf, Catalog = 'gaia'): Gmag array Gmags of sources """ c1 = SkyCoord(tpf.ra, tpf.dec, frame='icrs', unit='deg') - # Use pixel scale for query size - pix_scale = 4.0 # arcseconds / pixel for Kepler, default - if tpf.mission == 'TESS': - pix_scale = 21.0 + # Use pixel scale for query size + pix_scale = 21.0 # We are querying with a diameter as the radius, overfilling by 2x. from astroquery.vizier import Vizier Vizier.ROW_LIMIT = -1 @@ -50,10 +48,10 @@ def Get_Catalogue(tpf, Catalog = 'gaia'): raise ValueError("{} not recognised as a catalog. Available options: 'gaia', 'dist','ps1'") if Catalog == 'gaia': result = Vizier.query_region(c1, catalog=[catalog], - radius=Angle(1890, "arcsec"),column_filters={'Gmag':'<19'}) + radius=Angle(np.max(tpf.shape[1:]) * pix_scale + 60, "arcsec"),column_filters={'Gmag':'<19'}) else: result = Vizier.query_region(c1, catalog=[catalog], - radius=Angle(np.max(tpf.shape[1:]) * pix_scale, "arcsec")) + radius=Angle(np.max(tpf.shape[1:]) * pix_scale + 60, "arcsec")) no_targets_found_message = ValueError('Either no sources were found in the query region ' 'or Vizier is unavailable') diff --git a/tessreduce/helpers.py b/tessreduce/helpers.py new file mode 100644 index 0000000..621a6fe --- /dev/null +++ b/tessreduce/helpers.py @@ -0,0 +1,655 @@ +import os +import pandas as pd +import matplotlib.pyplot as plt +import numpy as np + +from photutils.detection import StarFinder + +from copy import deepcopy +from scipy.ndimage import shift +from scipy.ndimage import gaussian_filter +from skimage.restoration import inpaint + +from scipy.signal import savgol_filter + + +from scipy.interpolate import interp1d +from scipy.interpolate import griddata + +from astropy.stats import sigma_clipped_stats +from astropy.stats import sigma_clip + +from tess_stars2px import tess_stars2px_function_entry as focal_plane +from tabulate import tabulate + +package_directory = os.path.dirname(os.path.abspath(__file__)) + '/' + +from astropy.coordinates import SkyCoord +from astropy import units as u +from astropy.time import Time + +import requests +import json + +def strip_units(data): + if type(data) != np.ndarray: + data = data.value + return data + +def sigma_mask(data,sigma=3): + """ + Just does a sigma clip on an array. + + Parameters + ---------- + data : array + A single image + + sigma : float + sigma used in the sigma clipping + + Returns + ------- + clipped : array + A boolean array to mask the original array + """ + clipped = ~sigma_clip(data,sigma=sigma).mask + return clipped + +def Source_mask(Data, grid=0): + """ + Makes a mask of sources in the image using conditioning on percentiles. + The grid option breakes the image up into sections the size of grid to + do a basic median subtraction of the background. This is useful for + large fovs where the background has a lot of structure. + + Parameters + ---------- + data : array + A single image + + grid : int + size of the averaging square used to do a median background subtraction + before finding the sources. + + Returns + ------- + mask : array + Boolean mask array for the sources in the image + """ + data = deepcopy(Data) + # catch for if there are no pixels that escape the mask + if np.nansum(np.isfinite(data)) > 10: + if grid > 0: + data[data<0] = 0 + data[data >= np.nanpercentile(data,95)] = np.nan + grid = np.zeros_like(data) + size = grid + for i in range(grid.shape[0]//size): + for j in range(grid.shape[1]//size): + section = data[i*size:(i+1)*size,j*size:(j+1)*size] + section = section[np.isfinite(section)] + lim = np.nanpercentile(section,1) + grid[i*size:(i+1)*size,j*size:(j+1)*size] = lim + thing = data - grid + else: + thing = data + ind = np.isfinite(thing) + mask = ((thing <= np.percentile(thing[ind],95,axis=0)) & + (thing <= np.percentile(thing[ind],10))) * 1. + else: + mask = np.zeros_like(data) + + return mask + +def unknown_mask(image): + mask = np.zeros_like(image) + for i in range(image.shape[1]): + d = image.copy() + m = np.array([]) + masked = image.copy() + x = np.arange(image.shape[0]) + y = d * 1. + + y[y==0] = np.nan + g = np.gradient(y) + + m = np.append(m,sigma_clip(g,sigma=3).mask) + + masked[m>0] = np.nan + for k in range(5): + nonan = np.isfinite(masked) + filled = interp1d(x[nonan],masked[nonan],bounds_error=False,fill_value='extrapolate',kind='linear') + filled = filled(x) + + sav = savgol_filter(filled,image.shape[1]//2+1,2) + dif = masked-sav + m2 = sigma_clip(dif,sigma=3).mask + mm = np.zeros(len(masked)) + mm[nonan] = 1 + mask[:,i][m2] = 1 + return mask + + +def _parallel_bkg3(data,mask): + data[mask] = np.nan + estimate = inpaint.inpaint_biharmonic(data,mask) + return estimate + +def Smooth_bkg(data, gauss_smooth=2, interpolate=False, extrapolate=True): + """ + Interpolate over the masked objects to derive a background estimate. + + Parameters + ---------- + data : array + A single image + + extrapolate: Bool + switch for using extrapolation in the background + + Returns + ------- + estimate : array + an estimate of the smooth background in the TESS image + + bitmask : array + an array indicating where extrapolation was used + + """ + #data[data == 0] = np.nan + if (~np.isnan(data)).any(): + x = np.arange(0, data.shape[1]) + y = np.arange(0, data.shape[0]) + arr = np.ma.masked_invalid(deepcopy(data)) + xx, yy = np.meshgrid(x, y) + #get only the valid values + x1 = xx[~arr.mask] + y1 = yy[~arr.mask] + newarr = arr[~arr.mask] + if (len(x1) > 10) & (len(y1) > 10): + if interpolate: + estimate = griddata((x1, y1), newarr.ravel(), + (xx, yy),method='linear') + nearest = griddata((x1, y1), newarr.ravel(), + (xx, yy),method='nearest') + if extrapolate: + estimate[np.isnan(estimate)] = nearest[np.isnan(estimate)] + + estimate = gaussian_filter(estimate,gauss_smooth) + + #estimate = median_filter(estimate,5) + else: + # try inpaint stuff + mask = deepcopy(arr.mask) + mask = mask.astype(bool) + # end inpaint + estimate = inpaint.inpaint_biharmonic(data,mask) + #estimate = signal.fftconvolve(estimate,self.prf,mode='same') + estimate = gaussian_filter(estimate,gauss_smooth) + else: + estimate = np.zeros_like(data) * np.nan + else: + estimate = np.zeros_like(data) #* np.nan + + return estimate + +def Calculate_shifts(data,mx,my,finder): + """ + Calculate the offsets of sources identified by photutils from a reference + + Parameters + ---------- + data : array + a single frame from the tpf + + mx : array + mean row positions for the centroids from the reference image + + my : array + mean col positions for the centroids from the reference image + + daofind : DAOStarFinder + module to find the centroid positions + + Returns + ------- + shifts : array + row and col shift to match the data centroids to the reference image + + """ + shifts = np.zeros((2,len(mx))) * np.nan + if np.nansum(data) > 0: + mean, med, std = sigma_clipped_stats(data, sigma=3.0) + try: + #s = daofind(data - med) + s = finder.find_stars(deepcopy(data)-med) + except: + s = None + print('bad frame') + if type(s) != type(None): + x = s['xcentroid'] + y = s['ycentroid'] + dist = np.zeros((len(mx),len(x))) + dist = dist + np.sqrt((x[np.newaxis,:] - mx[:,np.newaxis])**2 + + (y[np.newaxis,:] - my[:,np.newaxis])**2) + ind = np.argmin(dist,axis=1) + indo = (np.nanmin(dist) < 1) + ind = ind[indo] + shifts[1,indo] = mx[indo] - x[ind] + shifts[0,indo] = my[indo] - y[ind] + else: + shifts[0,indo] = np.nan + shifts[1,indo] = np.nan + return shifts + +def image_sub(theta, image, ref): + dx, dy = theta + s = shift(image,([dx,dy]),order=5) + #translation = np.float64([[1,0,dx],[0,1, dy]]) + #s = cv2.warpAffine(image, translation, image.shape[::-1], flags=cv2.INTER_CUBIC,borderValue=0) + diff = (ref-s)**2 + return np.nansum(diff[20:-20,20:-20]) + +def difference_shifts(image,ref): + """ + Calculate the offsets of sources identified by photutils from a reference + + Parameters + ---------- + data : array + a single frame from the tpf + + mx : array + mean row positions for the centroids from the reference image + + my : array + mean col positions for the centroids from the reference image + + daofind : DAOStarFinder + module to find the centroid positions + + Returns + ------- + shifts : array + row and col shift to match the data centroids to the reference image + + """ + if np.nansum(abs(image)) > 0: + x0= [0,0] + bds = [(-2,2),(-2,2)] + res = minimize(image_sub,x0,args=(image,ref),method = 'Powell')#,bounds= bds) + s = res.x + else: + s = np.zeros((2)) * np.nan + return s + +def Smooth_motion(Centroids,tpf): + """ + Calculate the smoothed centroid shift + + Parameters + ---------- + Centroids : array + centroid shifts from all frames + + + TPF : lightkurve targetpixelfile + tpf + + Returns + ------- + smoothed : array + smoothed displacement of the centroids + + """ + smoothed = np.zeros_like(Centroids) * np.nan + try: + try: + split = np.where(np.diff(tpf.time.mjd) > 0.5)[0][0] + 1 + # ugly, but who cares + ind1 = np.nansum(tpf.flux[:split],axis=(1,2)) + ind1 = np.where(ind1 != 0)[0] + ind2 = np.nansum(tpf.flux[split:],axis=(1,2)) + ind2 = np.where(ind2 != 0)[0] + split + smoothed[ind1,0] = savgol_filter(Centroids[ind1,0],25,3) + smoothed[ind2,0] = savgol_filter(Centroids[ind2,0],25,3) + + smoothed[ind1,1] = savgol_filter(Centroids[ind1,1],25,3) + smoothed[ind2,1] = savgol_filter(Centroids[ind2,1],25,3) + except: + split = np.where(np.diff(tpf.time.mjd) > 0.5)[0][0] + 1 + # ugly, but who cares + ind1 = np.nansum(tpf.flux[:split],axis=(1,2)) + ind1 = np.where(ind1 != 0)[0] + ind2 = np.nansum(tpf.flux[split:],axis=(1,2)) + ind2 = np.where(ind2 != 0)[0] + split + smoothed[ind1,0] = savgol_filter(Centroids[ind1,0],11,3) + smoothed[ind2,0] = savgol_filter(Centroids[ind2,0],11,3) + + smoothed[ind1,1] = savgol_filter(Centroids[ind1,1],11,3) + smoothed[ind2,1] = savgol_filter(Centroids[ind2,1],11,3) + + except IndexError: + smoothed[:,0] = savgol_filter(Centroids[:,0],25,3) + smoothed[:,1] = savgol_filter(Centroids[:,1],25,3) + return smoothed + + +def smooth_zp(zp,time): + """ + Calculate the smoothed centroid shift + + Parameters + ---------- + zp : array + centroid shifts from all frames + + + time : lightkurve targetpixelfile + tpf + + Returns + ------- + smoothed : array + smoothed displacement of the centroids + + """ + smoothed = np.zeros_like(zp) * np.nan + plt.figure() + plt.plot(time,zp,'.') + try: + split = np.where(np.diff(time) > 0.5)[0][0] + 1 + # ugly, but who cares + ind1 = np.isfinite(zp[:split]) + ind2 = np.isfinite(zp[split:]) + split + + smoothed[ind1] = savgol_filter(zp[ind1],15,3) + smoothed[ind2] = savgol_filter(zp[ind2],15,3) + + smoothed[ind1] = savgol_filter(zp[ind1],15,3) + smoothed[ind2] = savgol_filter(zp[ind2],15,3) + except IndexError: + smoothed[:] = savgol_filter(zp[:],15,3) + smoothed[:] = savgol_filter(zp[:],15,3) + err = np.nanstd(zp - smoothed) + + return smoothed, err + + + +def grads_rad(flux): + rad = np.sqrt(np.gradient(flux)**2+np.gradient(np.gradient(flux))**2) + return rad + +def grad_flux_rad(flux): + rad = np.sqrt(flux**2+np.gradient(flux)**2) + return rad + + +def sn_lookup(name,buffer=10,print_table=True): + """ + Check for overlapping TESS ovservations for a transient. Uses TNS for + discovery time and coordinates. + + ------ + Inputs + ------ + name : str + catalog name + time : str + reference time to use, can be either disc, or max + buffer : float + overlap buffer time in days + + ------- + Options + ------- + print_table : bool + if true then the lookup table is printed + + ------- + Returns + ------- + tr_list : list + list of ra, dec, and sector that can be put into tessreduce. + """ + url = f'https://www.wis-tns.org/object/{name[name.index('2'):]}' # hard coding in that the event is in the 2000s + headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'} + result = requests.get(url, headers=headers) + if result.ok: + ra, dec = result.text.split('