diff --git a/notebooks/psf_photometry/NIRCam_PSF_Photometry_Example.ipynb b/notebooks/psf_photometry/NIRCam_PSF_Photometry_Example.ipynb index 2afd8b07b..000bd9923 100644 --- a/notebooks/psf_photometry/NIRCam_PSF_Photometry_Example.ipynb +++ b/notebooks/psf_photometry/NIRCam_PSF_Photometry_Example.ipynb @@ -54,109 +54,54 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Data Download Functions" + "## Imports" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "scrolled": true + }, "outputs": [], "source": [ + "import glob as glob\n", "import os\n", "import tarfile\n", - "import urllib.request\n", - "\n", - "# Set environmental variables\n", - "os.environ[\"WEBBPSF_PATH\"] = \"./webbpsf-data/webbpsf-data\"\n", - "os.environ[\"PYSYN_CDBS\"] = \"./grp/redcat/trds/\"\n", - "\n", - "# WEBBPSF Data\n", - "boxlink = 'https://stsci.box.com/shared/static/34o0keicz2iujyilg4uz617va46ks6u9.gz' \n", - "boxfile = './webbpsf-data/webbpsf-data-1.0.0.tar.gz'\n", - "synphot_url = 'http://ssb.stsci.edu/trds/tarfiles/synphot5.tar.gz'\n", - "synphot_file = './synphot5.tar.gz'\n", - "\n", - "webbpsf_folder = './webbpsf-data'\n", - "synphot_folder = './grp'\n", - "\n", - "# Gather webbpsf files\n", - "psfExist = os.path.exists(webbpsf_folder)\n", - "if not psfExist:\n", - " os.makedirs(webbpsf_folder)\n", - " urllib.request.urlretrieve(boxlink, boxfile)\n", - " gzf = tarfile.open(boxfile)\n", - " gzf.extractall(webbpsf_folder)\n", - "\n", - "# Gather synphot files\n", - "synExist = os.path.exists(synphot_folder)\n", - "if not synExist:\n", - " os.makedirs(synphot_folder)\n", - " urllib.request.urlretrieve(synphot_url, synphot_file)\n", - " gzf = tarfile.open(synphot_file)\n", - " gzf.extractall('./')\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Import Functions" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "import sys\n", "import time\n", + "from urllib import request\n", "\n", "import numpy as np\n", - "\n", "import pandas as pd\n", - "\n", - "import glob as glob\n", - "\n", - "import jwst\n", - "from jwst.datamodels import ImageModel\n", - "\n", - "import tarfile\n", - "\n", - "import urllib.request\n", - "\n", - "from astropy import wcs\n", + "import webbpsf\n", "from astropy import units as u\n", + "from astropy.coordinates import SkyCoord, match_coordinates_sky\n", "from astropy.io import fits\n", - "from astropy.visualization import (ZScaleInterval, SqrtStretch, ImageNormalize)\n", - "from astropy.visualization import simple_norm\n", - "from astropy.nddata import Cutout2D, NDData\n", - "from astropy.stats import gaussian_sigma_to_fwhm\n", - "from astropy.table import Table, QTable\n", "from astropy.modeling.fitting import LevMarLSQFitter\n", - "from astropy.wcs.utils import pixel_to_skycoord\n", - "from astropy.coordinates import SkyCoord, match_coordinates_sky\n", + "from astropy.nddata import NDData\n", "from astropy.stats import sigma_clipped_stats\n", - "\n", - "from photutils import CircularAperture, EPSFBuilder, find_peaks, CircularAnnulus\n", - "from photutils.detection import DAOStarFinder, IRAFStarFinder\n", - "from photutils.psf import DAOGroup, IntegratedGaussianPRF, extract_stars, IterativelySubtractedPSFPhotometry\n", - "from photutils.background import MMMBackground, MADStdBackgroundRMS\n", - "from photutils.centroids import centroid_2dg\n", - "from photutils import aperture_photometry\n", - "\n", - "from ipywidgets import interact\n", - "\n", - "import webbpsf\n", - "from webbpsf.utils import to_griddedpsfmodel" + "from astropy.table import QTable, Table\n", + "from astropy.visualization import simple_norm\n", + "from jwst.datamodels import ImageModel\n", + "from photutils.aperture import (CircularAnnulus, CircularAperture,\n", + " aperture_photometry)\n", + "from photutils.background import MADStdBackgroundRMS, MMMBackground\n", + "from photutils.detection import DAOStarFinder\n", + "from photutils.psf import (EPSFBuilder, IterativePSFPhotometry, SourceGrouper,\n", + " extract_stars)" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "execution": { + "iopub.execute_input": "2024-02-22T19:19:46.987188Z", + "iopub.status.busy": "2024-02-22T19:19:46.986921Z", + "iopub.status.idle": "2024-02-22T19:19:46.990183Z", + "shell.execute_reply": "2024-02-22T19:19:46.989446Z", + "shell.execute_reply.started": "2024-02-22T19:19:46.987167Z" + } + }, "source": [ "## Import Plotting Functions" ] @@ -168,8 +113,7 @@ "outputs": [], "source": [ "%matplotlib inline\n", - "from matplotlib import style, pyplot as plt\n", - "import matplotlib.patches as patches\n", + "from matplotlib import pyplot as plt\n", "import matplotlib.ticker as ticker\n", "\n", "plt.rcParams['image.cmap'] = 'viridis'\n", @@ -181,6 +125,47 @@ "font2 = {'family': 'helvetica', 'color': 'black', 'weight': 'normal', 'size': '20'}" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download WebbPSF and Synphot Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Set environmental variables\n", + "os.environ[\"WEBBPSF_PATH\"] = \"./webbpsf-data/webbpsf-data\"\n", + "os.environ[\"PYSYN_CDBS\"] = \"./grp/redcat/trds/\"\n", + "\n", + "# WEBBPSF Data\n", + "boxlink = 'https://stsci.box.com/shared/static/qxpiaxsjwo15ml6m4pkhtk36c9jgj70k.gz'\n", + "boxfile = './webbpsf-data/webbpsf-data-LATEST.tar.gz'\n", + "synphot_url = 'http://ssb.stsci.edu/trds/tarfiles/synphot5.tar.gz'\n", + "synphot_file = './synphot5.tar.gz'\n", + "\n", + "webbpsf_folder = './webbpsf-data'\n", + "synphot_folder = './grp'\n", + "\n", + "# Gather webbpsf files\n", + "if not os.path.exists(webbpsf_folder):\n", + " os.makedirs(webbpsf_folder)\n", + " request.urlretrieve(boxlink, boxfile)\n", + " gzf = tarfile.open(boxfile)\n", + " gzf.extractall(webbpsf_folder)\n", + "\n", + "# Gather synphot files\n", + "if not os.path.exists(synphot_folder):\n", + " os.makedirs(synphot_folder)\n", + " request.urlretrieve(synphot_url, synphot_file)\n", + " gzf = tarfile.open(synphot_file)\n", + " gzf.extractall('./')" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -221,12 +206,11 @@ "filtlist_long = []\n", "\n", "if not glob.glob('./*cal*fits'):\n", - "\n", " print(\"Downloading images\")\n", "\n", " boxlink_images_lev2 = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/images_level2.tar.gz'\n", " boxfile_images_lev2 = './images_level2.tar.gz'\n", - " urllib.request.urlretrieve(boxlink_images_lev2, boxfile_images_lev2)\n", + " request.urlretrieve(boxlink_images_lev2, boxfile_images_lev2)\n", "\n", " tar = tarfile.open(boxfile_images_lev2, 'r')\n", " tar.extractall()\n", @@ -235,12 +219,10 @@ " images = sorted(glob.glob(os.path.join(images_dir, \"*cal.fits\")))\n", "\n", "else:\n", - "\n", " images_dir = './'\n", " images = sorted(glob.glob(os.path.join(images_dir, \"*cal.fits\")))\n", "\n", "for image in images:\n", - "\n", " im = fits.open(image)\n", " f = im[0].header['FILTER']\n", " d = im[0].header['DETECTOR']\n", @@ -252,7 +234,7 @@ " else:\n", " d = d\n", "\n", - " wv = np.float(f[1:3])\n", + " wv = float(f[1:3])\n", "\n", " if wv > 24: \n", " ff_long.append(f)\n", @@ -269,9 +251,7 @@ " unique_list_filters_long = []\n", "\n", " for x in ff_short:\n", - "\n", " if x not in unique_list_filters_short:\n", - "\n", " dict_filter_short.setdefault(x, {})\n", "\n", " for x in ff_long:\n", @@ -350,56 +330,7 @@ "source": [ "## Display the images\n", "\n", - "To check that our images do not present artifacts and can be used in the analysis, we display them using an interactive cursor that allows to shuffle through the different images for each filter.\n", - "\n", - "### Note for developers: \n", - "\n", - "this is only a sketch of what I would like to show (I am not very familiar with ipywidgets). Would it be possible to show both filters at the same time, in a 2 window panel as in the static plot below? Or even better, have a widget control that allows to select the filters available and then use interact to cycle through the images? " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# cell for display images using ipywidgets\n", - "\n", - "def browse_images(images):\n", - " n = len(images)\n", - "\n", - " def view_image(image):\n", - " det = 'NRCB1'\n", - " filt = 'F115W'\n", - " im = fits.open(dict_images[det][filt]['images'][image])\n", - "\n", - " data_sb = im[1].data\n", - " norm = simple_norm(data_sb, 'sqrt', percent=99.) \n", - " plt.figure(figsize=(10, 10))\n", - "\n", - " plt.title(filt)\n", - " plt.imshow(data_sb, norm=norm, cmap='Greys') \n", - " plt.show()\n", - "\n", - " interact(view_image, image=(0, n - 1))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "browse_images(dict_images['NRCB1']['F115W']['images'])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Note for developers: \n", - "\n", - "Cell below should be removed once we finalize the interactive one above." + "Check that our images do not present artifacts and can be used in the analysis." ] }, { @@ -412,7 +343,6 @@ "\n", "for det in dets_short:\n", " for i, filt in enumerate(filts_short):\n", - "\n", " image = fits.open(dict_images[det][filt]['images'][0])\n", " data_sb = image[1].data\n", "\n", @@ -477,33 +407,28 @@ "outputs": [], "source": [ "def create_psf_model(fov=11, create_grid=False, num=9, save_psf=False, detsampled=False):\n", - "\n", " nrc = webbpsf.NIRCam()\n", - "\n", " nrc.detector = det \n", " nrc.filter = filt\n", "\n", " src = webbpsf.specFromSpectralType('G5V', catalog='phoenix')\n", " if detsampled:\n", " print(\"Creating a detector sampled PSF\")\n", - " aa = 'detector sampled'\n", " fov = 21\n", " else:\n", - " print(\"Creating a oversampled PSF\")\n", - " aa = 'oversampled'\n", + " print(\"Creating an oversampled PSF\")\n", " fov = fov\n", "\n", - " print(\"Using a {field}\".format(field=fov), \"px fov\")\n", + " print(f\"Using a {fov} px fov\")\n", "\n", " if create_grid:\n", " print(\"\")\n", - " print(\"Creating a grid of PSF for filter {filt} and detector {det}\".format(filt=filt, det=det))\n", + " print(f\"Creating a grid of PSF for filter {filt} and detector {det}\")\n", " print(\"\")\n", " num = num\n", "\n", " if save_psf:\n", - "\n", - " outname = \"./PSF_%s_samp4_G5V_fov%d_npsfs%d.fits\" % (filt, fov, num)\n", + " outname = f\"./PSF_{filt}_samp4_G5V_fov{fov}_npsfs{num}.fits\"\n", " nrc.psf_grid(num_psfs=num, oversample=4, source=src, all_detectors=False, fov_pixels=fov,\n", " save=True, outfile=outname, use_detsampled_psf=detsampled)\n", " else:\n", @@ -512,11 +437,11 @@ " dict_psfs_webbpsf[det][filt]['psf model grid'] = grid_psf\n", " else:\n", " print(\"\")\n", - " print(\"Creating a single PSF for filter {filt} and detector {det}\".format(filt=filt, det=det))\n", + " print(f\"Creating a single PSF for filter {filt} and detector {det}\")\n", " print(\"\")\n", " num = 1\n", " if save_psf:\n", - " outname = \"./PSF_%s_samp4_G5V_fov%d_npsfs%d.fits\" % (filt, fov, num)\n", + " outname = f\"./PSF_{filt}_samp4_G5V_fov{fov}_npsfs{num}.fits\"\n", " nrc.psf_grid(num_psfs=num, oversample=4, source=src, all_detectors=False, fov_pixels=fov,\n", " save=True, outfile=outname, use_detsampled_psf=detsampled)\n", " else:\n", @@ -600,15 +525,10 @@ ] }, { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Developer Note: Currently, display_psf_grid has a bug which has been fixed, but will not merged into the latest WebbPSF release until after commissioning. We have turned off this cell for the time being." - ] - }, - { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ "webbpsf.gridded_library.display_psf_grid(dict_psfs_webbpsf[dets_short[0]][filts_short[0]]['psf model grid'],\n", " zoom_in=False, figsize=(14, 14))" @@ -620,7 +540,7 @@ "source": [ "## II. Create the PSF model building an Effective PSF (ePSF)\n", "\n", - "More information on the PhotUtils Effective PSF can be found [here](https://photutils.readthedocs.io/en/stable/epsf.html).\n", + "More information on the photutils Effective PSF can be found [here](https://photutils.readthedocs.io/en/stable/epsf.html).\n", "\n", "* Select the stars from the images we want to use for building the PSF. We use the [DAOStarFinder](https://photutils.readthedocs.io/en/stable/api/photutils.detection.DAOStarFinder.html) function to find bright stars in the images (setting a high detection threshold). [DAOStarFinder](https://photutils.readthedocs.io/en/stable/api/photutils.detection.DAOStarFinder.html#photutils.detection.DAOStarFinder) detects stars in an image using the DAOFIND ([Stetson 1987](https://ui.adsabs.harvard.edu/abs/1987PASP...99..191S/abstract)) algorithm. DAOFIND searches images for local density maxima that have a peak amplitude greater than `threshold` (approximately; threshold is applied to a convolved image) and have a size and shape similar to the defined 2D Gaussian kernel. \\\n", " **Note**: The threshold and the maximum distance to the closest neighbour depend on the user science case (i.e.; number of stars in the field of view, crowding, number of bright sources, minimum number of stars required to build the ePSF, etc.) and must be modified accordingly. \n", @@ -680,14 +600,15 @@ " data_sb = image[1].data\n", " imh = image[1].header\n", "\n", - " print(\"Finding PSF stars on image {number} of filter {f}, detector {d}\".format(number=img_num + 1, f=filt, d=det))\n", + " print(f\"Finding PSF stars on image {img_num + 1} of filter {filt}, detector {det}\")\n", "\n", " data = data_sb / imh['PHOTMJSR']\n", - " print(\"Conversion factor from {units} to DN/s for filter {f}:\".format(units=imh['BUNIT'], f=filt), imh['PHOTMJSR'])\n", + " units = imh['BUNIT']\n", + " print(f\"Conversion factor from {units} to DN/s for filter {filt}: {imh['PHOTMJSR']}\")\n", "\n", " sigma_psf = dict_utils[filt]['psf fwhm']\n", "\n", - " print(\"FWHM for the filter {f}:\".format(f=filt), sigma_psf, \"px\")\n", + " print(f\"FWHM for the filter {filt}: {sigma_psf} px\")\n", "\n", " std = bkgrms(data)\n", " bkg = mmm_bkg(data)\n", @@ -728,12 +649,12 @@ "\n", " print(\"Minimum distance required:\", min_sep[filt_num], \"px\")\n", " print(\"\")\n", - " print(\"Number of isolated sources found in the image used to build ePSF for {f}:\".format(f=filt), len(psf_stars))\n", + " print(f\"Number of isolated sources found in the image used to build ePSF for {filt}: {len(psf_stars)}\")\n", " print(\"-----------------------------------------------------\")\n", " print(\"\")\n", " else:\n", " print(\"\")\n", - " print(\"Number of sources used to build ePSF for {f}:\".format(f=filt), len(psf_stars))\n", + " print(f\"Number of sources used to build ePSF for {filt}: {len(psf_stars)}\")\n", " print(\"--------------------------------------------\")\n", " print(\"\")" ] @@ -802,7 +723,7 @@ " nddata = NDData(data=data_bkgsub)\n", " stars = extract_stars(nddata, stars_tbl, size=sizes[j])\n", "\n", - " print(\"Creating ePSF for image {number} of filter {f}, detector {d}\".format(number=i + 1, f=filt, d=det))\n", + " print(f\"Creating ePSF for image {i + 1} of filter {filt}, detector {det}\")\n", "\n", " epsf_builder = EPSFBuilder(oversampling=oversample, maxiters=3, progress_bar=False)\n", "\n", @@ -898,7 +819,7 @@ " num_grid_calc = num_grid_calc[::-1]\n", "\n", " for num in num_grid_calc:\n", - " print(\"Calculating the number of PSF stars in a %d x %d grid:\" % (num, num))\n", + " print(f\"Calculating the number of PSF stars in a {num} x {num} grid\")\n", " print(\"\")\n", "\n", " image = fits.open(dict_images[det][filt]['images'][i])\n", @@ -914,7 +835,7 @@ "\n", " x = dict_psfs_epsf[det][filt]['table psf stars'][i + 1]['xcentroid']\n", " y = dict_psfs_epsf[det][filt]['table psf stars'][i + 1]['ycentroid']\n", - " flux = dict_psfs_epsf[det][filt]['table psf stars'][i + 1]['flux']\n", + " # flux = dict_psfs_epsf[det][filt]['table psf stars'][i + 1]['flux']\n", "\n", " half_size = (size - 1) / 2\n", "\n", @@ -925,13 +846,11 @@ "\n", " test = (x > lim1) & (x < lim2) & (y > lim3) & (y < lim4)\n", "\n", - " # if np.count_nonzero(test) < min_numpsf:\n", - " # raise ValueError(\"Not enough PSF stars in all the cells (> %d): Decrease your grid size or the minimum number of PSF stars in each cell or change parameters in the finder\" %(min_numpsf))\n", " if np.count_nonzero(test) < min_numpsf:\n", - " print(\"Center Coordinates of grid cell %d are (%d, %d) --- Not enough PSF stars in the cell (number of PSF stars < %d)\" % (i + 1, val[0], val[1], min_numpsf))\n", + " print(f\"Center Coordinates of grid cell {i + 1} are ({val[0]}, {val[1]}) --- Not enough PSF stars in the cell (number of PSF stars < {min_numpsf})\")\n", "\n", " else:\n", - " print(\"Center Coordinate of grid cell %d are (%d, %d) --- Number of PSF stars:\" % (n + 1, val[0], val[1]), np.count_nonzero(test)) \n", + " print(f\"Center Coordinate of grid cell {n + 1} are ({val[0]}, {val[1]}) --- Number of PSF stars: {np.count_nonzero(test)}\") \n", " print(\"\")" ] }, @@ -945,7 +864,7 @@ " for j, filt in enumerate(filts_short):\n", " for i in np.arange(0, len(dict_images[det][filt]['images']), 1):\n", "\n", - " print(\"Analyzing image {number} of filter {f}, detector {d} \".format(number=i + 1, f=filt, d=det))\n", + " print(f\"Analyzing image {i + 1} of filter {filt}, detector {det}\")\n", " print(\"\")\n", "\n", " count_PSFstars_grid(grid_points=5, size=15, min_numpsf=40)" @@ -1005,7 +924,6 @@ " dict_phot[det][filt]['output photometry tables'] = {}\n", "\n", " for i in np.arange(0, len(dict_images[det][filt]['images']), 1):\n", - "\n", " dict_phot[det][filt]['residual images'][i + 1] = None\n", " dict_phot[det][filt]['output photometry tables'][i + 1] = None" ] @@ -1014,7 +932,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Note**: since performing the PSF photometry on the images takes some time (for the 8 images in this example ~ 4 hours), to speed up the notebook, we use a high threshold in the finding algorithm (threshold ~ 2000) and we will use in the analyis below the catalogs obtained with a sigma threshold = 10 from a previous reduction run. To perform a meaningful data reduction, the user should modify the threshold accordingly. \n", + "**Note**: to speed up the notebook, we use a high threshold in the finding algorithm (threshold ~ 2000) and we will use in the analyis below the catalogs obtained with a sigma threshold = 10 from a previous reduction run. To perform a meaningful data reduction, the user should modify the threshold accordingly. \n", "\n", "Here we use as PSF model the grid of WebbPSF PSFs, but the users can change the model and use the others available (i.e., single WebbPSF PSF, single ePSF) modifying the `psf` parameter in the function." ] @@ -1043,11 +961,12 @@ "\n", " data = data_sb / imh['PHOTMJSR']\n", "\n", - " print(\"Conversion factor from {units} to DN/s for filter {f}:\".format(units=imh['BUNIT'], f=filt), imh['PHOTMJSR'])\n", + " units = imh['BUNIT']\n", + " print(f\"Conversion factor from {units} to DN/s for filter {filt}: {imh['PHOTMJSR']}\")\n", " print(\"Applying conversion to the data\")\n", " \n", " sigma_psf = dict_utils[filt]['psf fwhm']\n", - " print(\"FWHM for the filter {f}:\".format(f=filt), sigma_psf)\n", + " print(f\"FWHM for the filter {filt}: {sigma_psf}\")\n", " \n", " std = bkgrms(data)\n", " bkg = mmm_bkg(data)\n", @@ -1055,7 +974,7 @@ " daofind = DAOStarFinder(threshold=th * std + bkg, fwhm=sigma_psf, roundhi=1.0, roundlo=-1.0,\n", " sharplo=0.30, sharphi=1.40)\n", " \n", - " daogroup = DAOGroup(5.0 * sigma_psf)\n", + " grouper = SourceGrouper(5.0 * sigma_psf)\n", " \n", " # grid PSF\n", "\n", @@ -1075,22 +994,25 @@ " print(\"Using as PSF model single ePSF\")\n", " psf_model = dict_psfs_epsf[det][filt]['epsf single'][i + 1].copy()\n", "\n", - " print(\"Performing the photometry on image {number} of filter {f}, detector {d}\".format(number=i + 1, f=filt, d=det))\n", + " print(f\"Performing the photometry on image {i + 1} of filter {filt}, detector {det}\")\n", " \n", " tic = time.perf_counter()\n", + "\n", + " data_sub = data - mmm_bkg(data)\n", + " psf_shape = (11, 11)\n", " \n", - " phot = IterativelySubtractedPSFPhotometry(finder=daofind, group_maker=daogroup,\n", - " bkg_estimator=mmm_bkg, psf_model=psf_model,\n", - " fitter=LevMarLSQFitter(),\n", - " niters=2, fitshape=(11, 11), aperture_radius=ap_radius[j])\n", - " result = phot(data)\n", + " phot = IterativePSFPhotometry(psf_model, psf_shape, daofind,\n", + " grouper=grouper, fitter=fitter,\n", + " maxiters=2, aperture_radius=ap_radius[j])\n", + " result = phot(data_sub)\n", " \n", " toc = time.perf_counter()\n", - " \n", - " print(\"Time needed to perform photometry on image {number}:\".format(number=i + 1), \"%.2f\" % ((toc - tic) / 3600), \"hours\")\n", - " print(\"Number of sources detected in image {number} for filter {f}:\".format(number=i + 1, f=filt), len(result))\n", + "\n", + " dtime = (toc - tic)\n", + " print(f\"Time needed to perform photometry on image {i + 1}: {dtime:.2f} sec\")\n", + " print(f\"Number of sources detected in image {i + 1} for filter {filt}: {len(result)}\")\n", " \n", - " residual_image = phot.get_residual_image()\n", + " residual_image = phot.make_residual_image(data_sub, psf_shape)\n", " \n", " dict_phot[det][filt]['residual images'][i + 1] = residual_image\n", " dict_phot[det][filt]['output photometry tables'][i + 1] = result\n", @@ -1100,16 +1022,14 @@ " if save_residuals:\n", " hdu = fits.PrimaryHDU(residual_image)\n", " hdul = fits.HDUList([hdu])\n", - " residual_outname = 'residual_%s_%s_webbPSF_gridPSF_%dof%d_%dof%d.fits' % (d, filt, prim_dith_pos, prim_dith_num, subpx_dith_pos, subpx_dith_num)\n", + " residual_outname = f'residual_{d}_{filt}_webbPSF_gridPSF_{prim_dith_pos}of{prim_dith_num}_{subpx_dith_pos}of{subpx_dith_num}.fits'\n", "\n", " dir_output_phot = './'\n", "\n", " hdul.writeto(os.path.join(dir_output_phot, residual_outname))\n", - "\n", - " outname = 'phot_%s_%s_webbPSF_gridPSF_level2_%dof%d_%dof%d.pkl' % (d, filt, prim_dith_pos, prim_dith_num, subpx_dith_pos, subpx_dith_num)\n", + " outname = 'phot_{d}_{filt}_webbPSF_gridPSF_level2_{prim_dith_pos}of{prim_dith_num}_{subpx_dith_pos}of{subpx_dith_num}.pkl'\n", "\n", " # save the output photometry Tables\n", - "\n", " if save_output:\n", " tab = result.to_pandas()\n", " tab.to_pickle(os.path.join(dir_output_phot, outname))" @@ -1138,19 +1058,16 @@ " psf_phot(det=det, filt=filt, th=2000, psf='grid_webbpsf', save_residuals=True, save_output=False) \n", "\n", "toc_tot = time.perf_counter()\n", - "print(\"Time elapsed to perform the photometry of the {number} images:\".format(number=(len(filts_short) * len(dict_images[det][filt]['images']))), \"%.2f\" % ((toc_tot - tic_tot) / 3600), \"hours\") " + "number = len(filts_short) * len(dict_images[det][filt]['images'])\n", + "dtime = (toc_tot - tic_tot)\n", + "print(f\"Time elapsed to perform the photometry of the {number} images: {dtime:.2f} sec\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Output Photometry Table\n", - "\n", - "\n", - "### Note for developer: \n", - "\n", - "It would be really useful, if PhotUtils can provide some diagnostics to identify the quality of the photometry in the final catalog for each source (similarly to all the other PSF photometry programs available)." + "## Output Photometry Table" ] }, { @@ -1243,14 +1160,14 @@ "\n", " boxlink_cat_f115w = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/phot_cat_F115W.tar.gz'\n", " boxfile_cat_f115w = './phot_cat_F115W.tar.gz'\n", - " urllib.request.urlretrieve(boxlink_cat_f115w, boxfile_cat_f115w)\n", + " request.urlretrieve(boxlink_cat_f115w, boxfile_cat_f115w)\n", "\n", " tar = tarfile.open(boxfile_cat_f115w, 'r')\n", " tar.extractall()\n", "\n", " boxlink_cat_f200w = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/phot_cat_F200W.tar.gz'\n", " boxfile_cat_f200w = './phot_cat_F200W.tar.gz'\n", - " urllib.request.urlretrieve(boxlink_cat_f200w, boxfile_cat_f200w)\n", + " request.urlretrieve(boxlink_cat_f200w, boxfile_cat_f200w)\n", "\n", " tar = tarfile.open(boxfile_cat_f200w, 'r')\n", " tar.extractall()\n", @@ -1450,7 +1367,7 @@ "\n", " ax.set_xlabel(filt1 + '-' + filt2, fontdict=font2)\n", " ax.set_ylabel(filt1, fontdict=font2)\n", - " ax.text(xlim0 + 0.1, -8.65, \"Image %s\" % j, fontdict=font2)\n", + " ax.text(xlim0 + 0.1, -8.65, f\"Image {j}\", fontdict=font2)\n", " \n", "plt.tight_layout()" ] @@ -1501,12 +1418,12 @@ "\n", "ax1.scatter(delta_x_f115w, delta_y_f115w, s=1, color='gray')\n", "\n", - "ax1.set_xlabel('$\\Delta$ X (px)', fontdict=font2)\n", - "ax1.set_ylabel('$\\Delta$ Y (px)', fontdict=font2)\n", + "ax1.set_xlabel(r'$\\Delta$ X (px)', fontdict=font2)\n", + "ax1.set_ylabel(r'$\\Delta$ Y (px)', fontdict=font2)\n", "ax1.set_title(filt1, fontdict=font2)\n", - "ax1.text(xlim0 + 0.05, ylim1 - 0.15, ' $\\Delta$ X = %5.3f $\\pm$ %5.3f' % (d_x_f115w, sigma_d_x_f115w),\n", + "ax1.text(xlim0 + 0.05, ylim1 - 0.15, rf'$\\Delta$ X = {d_x_f115w:5.3f} $\\pm$ {sigma_d_x_f115w:5.3f}',\n", " color='k', fontdict=font2)\n", - "ax1.text(xlim0 + 0.05, ylim1 - 0.30, ' $\\Delta$ Y = %5.3f $\\pm$ %5.3f' % (d_y_f115w, sigma_d_y_f115w),\n", + "ax1.text(xlim0 + 0.05, ylim1 - 0.30, rf'$\\Delta$ Y = {d_y_f115w:5.3f} $\\pm$ {sigma_d_y_f115w:5.3f}',\n", " color='k', fontdict=font2)\n", "ax1.plot([0, 0], [ylim0, ylim1], color='k', lw=2, ls='--')\n", "ax1.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", @@ -1534,15 +1451,15 @@ "_, d_y_f200w, sigma_d_y_f200w = sigma_clipped_stats(delta_y_f200w)\n", "\n", "ax2.scatter(delta_x_f200w, delta_y_f200w, s=1, color='gray')\n", - "ax2.text(xlim0 + 0.05, ylim1 - 0.15, ' $\\Delta$ X = %5.3f $\\pm$ %5.3f' % (d_x_f200w, sigma_d_x_f200w),\n", + "ax2.text(xlim0 + 0.05, ylim1 - 0.15, rf'$\\Delta$ X = {d_x_f200w:5.3f} $\\pm$ {sigma_d_x_f200w:5.3f}',\n", " color='k', fontdict=font2)\n", - "ax2.text(xlim0 + 0.05, ylim1 - 0.30, ' $\\Delta$ Y = %5.3f $\\pm$ %5.3f' % (d_y_f200w, sigma_d_y_f200w),\n", + "ax2.text(xlim0 + 0.05, ylim1 - 0.30, rf'$\\Delta$ Y = {d_y_f200w:5.3f} $\\pm$ {sigma_d_y_f200w:5.3f}',\n", " color='k', fontdict=font2)\n", "ax2.plot([0, 0], [ylim0, ylim1], color='k', lw=2, ls='--')\n", "ax2.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", - "ax2.set_xlabel('$\\Delta$ X (px)', fontdict=font2)\n", - "ax2.set_ylabel('$\\Delta$ Y (px)', fontdict=font2)\n", + "ax2.set_xlabel(r'$\\Delta$ X (px)', fontdict=font2)\n", + "ax2.set_ylabel(r'$\\Delta$ Y (px)', fontdict=font2)\n", "ax2.set_title(filt2, fontdict=font2)\n", "\n", "plt.tight_layout()" @@ -1577,7 +1494,7 @@ "ax1.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", "ax1.set_xlabel(filt1 + '_inst', fontdict=font2)\n", - "ax1.set_ylabel('$\\Delta$ X (px)', fontdict=font2)\n", + "ax1.set_ylabel(r'$\\Delta$ X (px)', fontdict=font2)\n", "\n", "ax2 = plt.subplot(2, 2, 2)\n", "\n", @@ -1593,7 +1510,7 @@ "ax2.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", "ax2.set_xlabel(filt1 + '_inst', fontdict=font2)\n", - "ax2.set_ylabel('$\\Delta$ Y (px)', fontdict=font2)\n", + "ax2.set_ylabel(r'$\\Delta$ Y (px)', fontdict=font2)\n", "\n", "ax3 = plt.subplot(2, 2, 3)\n", "\n", @@ -1611,7 +1528,7 @@ "ax3.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", "ax3.set_xlabel(filt2 + '_inst', fontdict=font2)\n", - "ax3.set_ylabel('$\\Delta$ X (px)', fontdict=font2)\n", + "ax3.set_ylabel(r'$\\Delta$ X (px)', fontdict=font2)\n", "\n", "ax4 = plt.subplot(2, 2, 4)\n", "\n", @@ -1627,7 +1544,7 @@ "ax4.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", "ax4.set_xlabel(filt2 + '_inst', fontdict=font2)\n", - "ax4.set_ylabel('$\\Delta$ Y (px)', fontdict=font2)\n", + "ax4.set_ylabel(r'$\\Delta$ Y (px)', fontdict=font2)\n", "\n", "plt.tight_layout()" ] @@ -1813,7 +1730,7 @@ "ax2 = plt.subplot(2, 2, 2)\n", "\n", "ax2.set_xlabel(filt1 + '_inst', fontdict=font2)\n", - "ax2.set_ylabel('$\\sigma$' + filt1, fontdict=font2)\n", + "ax2.set_ylabel(r'$\\sigma$' + filt1, fontdict=font2)\n", "\n", "xlim0 = -9\n", "xlim1 = -1.5\n", @@ -1833,7 +1750,7 @@ "ax3 = plt.subplot(2, 2, 4)\n", "\n", "ax3.set_xlabel(filt2 + '_inst', fontdict=font2)\n", - "ax3.set_ylabel('$\\sigma$' + filt2, fontdict=font2)\n", + "ax3.set_ylabel(r'$\\sigma$' + filt2, fontdict=font2)\n", "\n", "ax3.set_xlim(xlim0, xlim1)\n", "ax3.set_ylim(ylim0, ylim1)\n", @@ -1896,7 +1813,7 @@ "\n", " boxlink_images_lev3 = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/images_level3.tar.gz'\n", " boxfile_images_lev3 = './images_level3.tar.gz'\n", - " urllib.request.urlretrieve(boxlink_images_lev3, boxfile_images_lev3)\n", + " request.urlretrieve(boxlink_images_lev3, boxfile_images_lev3)\n", "\n", " tar = tarfile.open(boxfile_images_lev3, 'r')\n", " tar.extractall()\n", @@ -1922,7 +1839,7 @@ " else:\n", " d = d\n", "\n", - " wv = np.float(f[1:3])\n", + " wv = float(f[1:3])\n", "\n", " if wv > 24:\n", " ff_long.append(f)\n", @@ -2056,14 +1973,15 @@ " data_sb = image[1].data\n", " imh = image[1].header\n", "\n", - " print(\"Selecting stars for aperture photometry on image {number} of filter {f}, detector {d}\".format(number=i + 1, f=filt, d=det))\n", + " print(f\"Selecting stars for aperture photometry on image {i + 1} of filter {filt}, detector {det}\")\n", "\n", " data = data_sb / imh['PHOTMJSR']\n", - " print(\"Conversion factor from {units} to DN/s for filter {f}:\".format(units=imh['BUNIT'], f=filt), imh['PHOTMJSR'])\n", + " units = imh['BUNIT']\n", + " print(f\"Conversion factor from {units} to DN/s for filter {filt}: {imh['PHOTMJSR']}\")\n", "\n", " sigma_psf = dict_utils[filt]['psf fwhm']\n", "\n", - " print(\"FWHM for the filter {f}:\".format(f=filt), sigma_psf, \"px\")\n", + " print(f\"FWHM for the filter {filt}: {sigma_psf} px\")\n", "\n", " std = bkgrms(data)\n", " bkg = mmm_bkg(data)\n", @@ -2104,12 +2022,12 @@ "\n", " print(\"Minimum distance required:\", min_sep[j], \"px\")\n", " print(\"\")\n", - " print(\"Number of bright isolated sources found in the image for {f}:\".format(f=filt), len(apcorr_stars))\n", + " print(f\"Number of bright isolated sources found in the image for {filt}: {len(apcorr_stars)}\")\n", " print(\"-----------------------------------------------------\")\n", " print(\"\")\n", " else:\n", " print(\"\")\n", - " print(\"Number of bright sources found in the image for {f}:\".format(f=filt), len(apcorr_stars))\n", + " print(f\"Number of bright sources found in the image for {filt}: {len(apcorr_stars)}\")\n", " print(\"--------------------------------------------\")\n", " print(\"\") \n", " \n", @@ -2208,10 +2126,10 @@ "\n", " boxlink_apcorr_table = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/aperture_correction_table.txt'\n", " boxfile_apcorr_table = './aperture_correction_table.txt'\n", - " urllib.request.urlretrieve(boxlink_apcorr_table, boxfile_apcorr_table)\n", + " request.urlretrieve(boxlink_apcorr_table, boxfile_apcorr_table)\n", " ap_tab = './aperture_correction_table.txt'\n", "\n", - "aper_table = pd.read_csv(ap_tab, header=None, sep='\\s+', index_col=0,\n", + "aper_table = pd.read_csv(ap_tab, header=None, sep=r'\\s+', index_col=0,\n", " names=['filter', 'pupil', 'wave', 'r10', 'r20', 'r30', 'r40', 'r50', 'r60', 'r70', 'r80',\n", " 'r85', 'r90', 'sky_flux_px', 'apcorr10', 'apcorr20', 'apcorr30', 'apcorr40',\n", " 'apcorr50', 'apcorr60', 'apcorr70', 'apcorr80', 'apcorr85', 'apcorr90', 'sky_in',\n", @@ -2256,7 +2174,7 @@ " table_aper = Table()\n", "\n", " for ee, radius in ee_radii.items():\n", - " print(\"Performing aperture photometry for radius equivalent to EE = {0}% for filter {1}\".format(ee, filt))\n", + " print(f\"Performing aperture photometry for radius equivalent to EE = {ee}% for filter {filt}\")\n", " aperture = CircularAperture(positions, r=radius)\n", " annulus_aperture = CircularAnnulus(positions, r_in=sky[\"sky_in\"], r_out=sky[\"sky_out\"])\n", " annulus_mask = annulus_aperture.to_mask(method='center')\n", @@ -2351,7 +2269,7 @@ "\n", "ax1.scatter(f115w_psf_matched, diff_f115w, s=50, color='k')\n", "ax1.plot([xlim0, xlim1], [zp_f115w, zp_f115w], color='r', lw=5, ls='--')\n", - "ax1.text(xlim0 + 0.05, ylim1 - 0.15, filt1 + ' Zeropoint = %5.3f $\\pm$ %5.3f' % (zp_f115w, zp_sigma_f115w), color='k', fontdict=font2)\n", + "ax1.text(xlim0 + 0.05, ylim1 - 0.15, filt1 + rf' Zeropoint = {zp_f115w:5.3f} $\\pm$ {zp_sigma_f115w:5.3f}', color='k', fontdict=font2)\n", " \n", "ax2 = plt.subplot(2, 1, 2)\n", "\n", @@ -2383,7 +2301,7 @@ "\n", "ax2.scatter(f200w_psf_matched, diff_f200w, s=50, color='k')\n", "ax2.plot([xlim0, xlim1], [zp_f200w, zp_f200w], color='r', lw=5, ls='--')\n", - "ax2.text(xlim0 + 0.05, ylim1 - 0.15, filt2 + ' Zeropoint = %5.3f $\\pm$ %5.3f' % (zp_f200w, zp_sigma_f200w), color='k', fontdict=font2)\n", + "ax2.text(xlim0 + 0.05, ylim1 - 0.15, filt2 + rf' Zeropoint = {zp_f200w:5.3f} $\\pm$ {zp_sigma_f200w:5.3f}', color='k', fontdict=font2)\n", " \n", "plt.tight_layout()" ] @@ -2410,11 +2328,11 @@ "\n", " boxlink_input_cat = 'https://data.science.stsci.edu/redirect/JWST/jwst-data_analysis_tools/stellar_photometry/pointsource.cat'\n", " boxfile_input_cat = './pointsource.cat'\n", - " urllib.request.urlretrieve(boxlink_input_cat, boxfile_input_cat)\n", + " request.urlretrieve(boxlink_input_cat, boxfile_input_cat)\n", " input_cat = './pointsource.cat'\n", "\n", - "cat = pd.read_csv(input_cat, header=None, sep='\\s+', names=['ra_in', 'dec_in', 'f070w_in', 'f115w_in',\n", - " 'f200w_in', 'f277w_in', 'f356w_in', 'f444w_in'],\n", + "cat = pd.read_csv(input_cat, header=None, sep=r'\\s+', names=['ra_in', 'dec_in', 'f070w_in', 'f115w_in',\n", + " 'f200w_in', 'f277w_in', 'f356w_in', 'f444w_in'],\n", " comment='#', skiprows=7, usecols=range(0, 8))\n", "\n", "cat.head()" @@ -2545,7 +2463,7 @@ "ax1 = plt.subplot(2, 1, 1)\n", "\n", "ax1.set_xlabel(filt1, fontdict=font2)\n", - "ax1.set_ylabel('$\\Delta$ Mag', fontdict=font2)\n", + "ax1.set_ylabel(r'$\\Delta$ Mag', fontdict=font2)\n", "\n", "radec_input = SkyCoord(cat_sel['ra_in'], cat_sel['dec_in'], unit='deg')\n", "\n", @@ -2574,13 +2492,13 @@ "\n", "ax1.scatter(f115w_psf_cfr, diff_f115w_cfr, s=5, color='k')\n", "ax1.plot([xlim0, xlim1], [0, 0], color='r', lw=5, ls='--')\n", - "ax1.text(xlim0 + 0.05, ylim1 - 0.15, filt1 + ' $\\Delta$ Mag = %5.3f $\\pm$ %5.3f'\n", - " % (med_diff_f115w_cfr, sig_diff_f115w_cfr), color='k', fontdict=font2)\n", + "text = rf'{filt1} $\\Delta$ Mag = {med_diff_f115w_cfr:5.3f} $\\pm$ {sig_diff_f115w_cfr:5.3f}'\n", + "ax1.text(xlim0 + 0.05, ylim1 - 0.15, text, color='k', fontdict=font2)\n", "\n", "ax2 = plt.subplot(2, 1, 2)\n", "\n", "ax2.set_xlabel(filt2, fontdict=font2)\n", - "ax2.set_ylabel('$\\Delta$ Mag', fontdict=font2)\n", + "ax2.set_ylabel(r'$\\Delta$ Mag', fontdict=font2)\n", "\n", "idx_f200w_cfr, d2d_f200w_cfr, _ = match_coordinates_sky(radec_input, radec_f200w)\n", "\n", @@ -2607,8 +2525,10 @@ "\n", "ax2.scatter(f200w_psf_cfr, diff_f200w_cfr, s=5, color='k')\n", "ax2.plot([xlim0, xlim1], [0, 0], color='r', lw=5, ls='--')\n", - "ax2.text(xlim0 + 0.05, ylim1 - 0.15, filt2 + ' $\\Delta$ Mag = %5.3f $\\pm$ %5.3f'\n", - " % (med_diff_f200w_cfr, sig_diff_f200w_cfr), color='k', fontdict=font2)\n", + "\n", + "text = rf'{filt2} $\\Delta$ Mag = {med_diff_f200w_cfr:5.3f} $\\pm$ {sig_diff_f200w_cfr:5.3f}'\n", + "\n", + "ax2.text(xlim0 + 0.05, ylim1 - 0.15, text, color='k', fontdict=font2)\n", "\n", "plt.tight_layout()" ] @@ -2636,8 +2556,8 @@ "ax1.yaxis.set_major_locator(ticker.AutoLocator())\n", "ax1.yaxis.set_minor_locator(ticker.AutoMinorLocator())\n", "\n", - "ax1.set_xlabel('$\\Delta$ RA (mas)', fontdict=font2)\n", - "ax1.set_ylabel('$\\Delta$ Dec (mas)', fontdict=font2)\n", + "ax1.set_xlabel(r'$\\Delta$ RA (mas)', fontdict=font2)\n", + "ax1.set_ylabel(r'$\\Delta$ Dec (mas)', fontdict=font2)\n", "ax1.set_title(filt1, fontdict=font2)\n", "\n", "ra_f115w_inp_cfr = np.array(cat_sel['ra_in'][sep_f115w_cfr])\n", @@ -2660,10 +2580,11 @@ "ax1.plot([0, 0], [ylim0, ylim1], color='k', lw=2, ls='--')\n", "ax1.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", - "ax1.text(xlim0 + 0.05, ylim1 - 1.50, ' $\\Delta$ RA (mas) = %5.3f $\\pm$ %5.3f'\n", - " % (med_diffra_f115w_cfr, sig_diffra_f115w_cfr), color='k', fontdict=font2)\n", - "ax1.text(xlim0 + 0.05, ylim1 - 3.0, ' $\\Delta$ Dec (mas) = %5.3f $\\pm$ %5.3f'\n", - " % (med_diffdec_f115w_cfr, sig_diffdec_f115w_cfr), color='k', fontdict=font2)\n", + "text = rf'$\\Delta$ RA (mas) = {med_diffra_f115w_cfr:5.3f} $\\pm$ {sig_diffra_f115w_cfr:5.3f}'\n", + "ax1.text(xlim0 + 0.05, ylim1 - 1.50, text, color='k', fontdict=font2)\n", + "\n", + "text = rf'$\\Delta$ Dec (mas) = {med_diffdec_f115w_cfr:5.3f} $\\pm$ {sig_diffdec_f115w_cfr:5.3f}'\n", + "ax1.text(xlim0 + 0.05, ylim1 - 3.0, text, color='k', fontdict=font2)\n", "\n", "ax2 = plt.subplot(1, 2, 2)\n", "\n", @@ -2681,8 +2602,8 @@ "ax2.yaxis.set_major_locator(ticker.AutoLocator())\n", "ax2.yaxis.set_minor_locator(ticker.AutoMinorLocator())\n", "\n", - "ax2.set_xlabel('$\\Delta$ RA (mas)', fontdict=font2)\n", - "ax2.set_ylabel('$\\Delta$ Dec (mas)', fontdict=font2)\n", + "ax2.set_xlabel(r'$\\Delta$ RA (mas)', fontdict=font2)\n", + "ax2.set_ylabel(r'$\\Delta$ Dec (mas)', fontdict=font2)\n", "\n", "ra_f200w_inp_cfr = np.array(cat_sel['ra_in'][sep_f200w_cfr])\n", "ra_f200w_psf_cfr = np.array(radec_f200w.ra[idx_f200w_cfr[sep_f200w_cfr]])\n", @@ -2704,10 +2625,11 @@ "ax2.plot([0, 0], [ylim0, ylim1], color='k', lw=2, ls='--')\n", "ax2.plot([xlim0, xlim1], [0, 0], color='k', lw=2, ls='--')\n", "\n", - "ax2.text(xlim0 + 0.05, ylim1 - 1.50, ' $\\Delta$ RA (mas) = %5.3f $\\pm$ %5.3f'\n", - " % (med_diffra_f200w_cfr, sig_diffra_f200w_cfr), color='k', fontdict=font2)\n", - "ax2.text(xlim0 + 0.05, ylim1 - 3.0, ' $\\Delta$ Dec (mas) = %5.3f $\\pm$ %5.3f'\n", - " % (med_diffdec_f200w_cfr, sig_diffdec_f200w_cfr), color='k', fontdict=font2)\n", + "text = rf'$\\Delta$ Dec (mas) = {med_diffdec_f200w_cfr:5.3f} $\\pm$ {sig_diffdec_f200w_cfr:5.3f}'\n", + "ax2.text(xlim0 + 0.05, ylim1 - 1.50, text, color='k', fontdict=font2)\n", + "\n", + "text = rf'$\\Delta$ RA (mas) = {med_diffra_f200w_cfr:5.3f} $\\pm$ {sig_diffra_f200w_cfr:5.3f}'\n", + "ax2.text(xlim0 + 0.05, ylim1 - 3.0, text, color='k', fontdict=font2)\n", "\n", "plt.tight_layout()" ] @@ -2718,7 +2640,7 @@ "source": [ "## Final notes\n", "\n", - "This notebook provides a general overview on how to perform PSF photometry using the [PhotUtils](https://photutils.readthedocs.io/en/stable/) package. The choice of the different parameters adopted in all the reduction steps as well as the choice of the PSF model depend on the specific user science case. Moreover, a detailed analysis that allow to provide recommendations on how to set those parameters and outline the differences in the output photometry when different PSF models are adopted (single vs PSF grid, number of PSFs in the grid, etc.) will be possible only when real data will be available after the instrument commissioning. In this context, we note that one of the selected ERS program (ERS 1334 - The Resolved Stellar Populations Early Release Science Program) will provide a fundamental test benchmark to explore how the different choices outlined above will impact the quality of the PSF photometry in a crowded stellar region." + "This notebook provides a general overview on how to perform PSF photometry using the [photutils](https://photutils.readthedocs.io/en/stable/) package. The choice of the different parameters adopted in all the reduction steps as well as the choice of the PSF model depend on the specific user science case. Moreover, a detailed analysis that allow to provide recommendations on how to set those parameters and outline the differences in the output photometry when different PSF models are adopted (single vs PSF grid, number of PSFs in the grid, etc.) will be possible only when real data will be available after the instrument commissioning. In this context, we note that one of the selected ERS program (ERS 1334 - The Resolved Stellar Populations Early Release Science Program) will provide a fundamental test benchmark to explore how the different choices outlined above will impact the quality of the PSF photometry in a crowded stellar region." ] }, { @@ -2757,7 +2679,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/notebooks/psf_photometry/requirements.txt b/notebooks/psf_photometry/requirements.txt index ef0875748..17553e36e 100644 --- a/notebooks/psf_photometry/requirements.txt +++ b/notebooks/psf_photometry/requirements.txt @@ -1,9 +1,9 @@ -numpy>=1.22.4 -pandas>=1.4.2 -jwst==1.5.2 -astropy>=5.1 -photutils==1.4.0 -ipywidgets>=7.7.0 -webbpsf>=1.0.0 -matplotlib>=3.5.2 -stsynphot==1.1.0 +numpy>=1.25.2 +pandas>=2.1.0 +jwst>=1.11.4 +astropy>=5.3.3 +photutils>=1.11.0 +ipywidgets>=8.1.1 +matplotlib>=3.7.2 +webbpsf>=1.2.1 +stsynphot>=1.2.0