diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ae70d9f --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +RELIC-INFO +relic/ +*/version.py +build/ +dist/ +*.pyc +*.o +*.so +*.egg-info +*.swp diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000..9cb6183 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,22 @@ +if (utils.scm_checkout()) return + + +data_config = new DataConfig() +data_config.server_id = "bytesalad" +data_config.root = 'tests_output' +data_config.match_prefix = '(.*)_result' // .json is appended automatically + + +bc0 = new BuildConfig() +bc0.nodetype = "linux" +bc0.name = "Install" +bc0.test_configs = [data_config] +bc0.build_cmds = ["conda env update --file=doc/environment.yml -q", + "with_env -n stistools python setup.py install", + "with_env -n stistools conda install -q -y -c http://ssb.stsci.edu/astroconda hstcal", + "with_env -n stistools conda install -q -y -c http://ssb.stsci.edu/astroconda crds"] +bc0.test_cmds = ["with_env -n stistools conda install -q -y pytest", + "with_env -n stistools conda install -q -y pytest-remotedata", + "with_env -n stistools pytest --basetemp=tests_output --junitxml results.xml --bigdata"] + +utils.run([bc0]) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..93362df --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include RELIC-INFO +exclude stistools/version.py +recursive-include stistools/pars *.cfg* + diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..c755454 --- /dev/null +++ b/conftest.py @@ -0,0 +1,50 @@ +"""Project default for pytest""" +import os +import pytest + +from astropy.tests.plugins.display import PYTEST_HEADER_MODULES +from astropy.tests.helper import enable_deprecations_as_exceptions + + +# Uncomment the following line to treat all DeprecationWarnings as exceptions +enable_deprecations_as_exceptions() + +def pytest_addoption(parser): + # Add option to run slow tests + parser.addoption( + "--runslow", + action="store_true", + help="run slow tests" + ) + + parser.addoption( + "--slow", + action="store_true", + help="run slow tests" + ) + + # Add option to use big data sets + parser.addoption( + "--bigdata", + action="store_true", + help="use big data sets (intranet)" + ) + + parser.addoption( + "--env", + choices=['dev', 'stable', ''], + default='', + help="specify what environment to test" + ) + + +@pytest.fixture(scope='function', autouse=True) +def _jail(tmpdir): + """ Perform test in a pristine temporary working directory + """ + os.chdir(tmpdir.strpath) + yield + +@pytest.fixture +def envopt(request): + return request.config.getoption("env") diff --git a/defsetup.py b/defsetup.py deleted file mode 100644 index 4783607..0000000 --- a/defsetup.py +++ /dev/null @@ -1,16 +0,0 @@ -from __future__ import division # confidence high - -pkg = "stistools" - -setupargs = { - 'version' : "1.1", - 'description' : "Python Tools for STIS Data", - 'author' : "Paul Barrett, Phil Hodge", - 'author_email' : "help@stsci.edu", - 'license' : "http://www.stsci.edu/resources/software_hardware/pyraf/LICENSE", - 'platforms' : ["Linux","Solaris","Mac OS X","Win"], - 'package_dir' : { 'stistools' : 'lib/stistools', }, - -} - - diff --git a/distribute_setup.py b/distribute_setup.py deleted file mode 100644 index bbb6f3c..0000000 --- a/distribute_setup.py +++ /dev/null @@ -1,485 +0,0 @@ -#!python -"""Bootstrap distribute installation - -If you want to use setuptools in your package's setup.py, just include this -file in the same directory with it, and add this to the top of your setup.py:: - - from distribute_setup import use_setuptools - use_setuptools() - -If you want to require a specific version of setuptools, set a download -mirror, or use an alternate download directory, you can do so by supplying -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import sys -import time -import fnmatch -import tempfile -import tarfile -from distutils import log - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -try: - import subprocess - - def _python_cmd(*args): - args = (sys.executable,) + args - return subprocess.call(args) == 0 - -except ImportError: - # will be used for python 2.3 - def _python_cmd(*args): - args = (sys.executable,) + args - # quoting arguments if windows - if sys.platform == 'win32': - def quote(arg): - if ' ' in arg: - return '"%s"' % arg - return arg - args = [quote(arg) for arg in args] - return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 - -DEFAULT_VERSION = "0.6.19" -DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" -SETUPTOOLS_FAKED_VERSION = "0.6c11" - -SETUPTOOLS_PKG_INFO = """\ -Metadata-Version: 1.0 -Name: setuptools -Version: %s -Summary: xxxx -Home-page: xxx -Author: xxx -Author-email: xxx -License: xxx -Description: xxx -""" % SETUPTOOLS_FAKED_VERSION - - -def _install(tarball): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # installing - log.warn('Installing Distribute') - if not _python_cmd('setup.py', 'install'): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - finally: - os.chdir(old_wd) - - -def _build_egg(egg, tarball, to_dir): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # building an egg - log.warn('Building a Distribute egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - - finally: - os.chdir(old_wd) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - tarball = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, tarball, to_dir) - sys.path.insert(0, egg) - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15, no_fake=True): - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - was_imported = 'pkg_resources' in sys.modules or \ - 'setuptools' in sys.modules - try: - try: - import pkg_resources - if not hasattr(pkg_resources, '_distribute'): - if not no_fake: - _fake_setuptools() - raise ImportError - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("distribute>="+version) - return - except pkg_resources.VersionConflict: - e = sys.exc_info()[1] - if was_imported: - sys.stderr.write( - "The required version of distribute (>=%s) is not available,\n" - "and can't be installed while this script is running. Please\n" - "install a more recent version first, using\n" - "'easy_install -U distribute'." - "\n\n(Currently using %r)\n" % (version, e.args[0])) - sys.exit(2) - else: - del pkg_resources, sys.modules['pkg_resources'] # reload ok - return _do_download(version, download_base, to_dir, - download_delay) - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, - download_delay) - finally: - if not no_fake: - _create_fake_setuptools_pkg_info(to_dir) - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15): - """Download distribute from a specified location and return its filename - - `version` should be a valid distribute version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen - tgz_name = "distribute-%s.tar.gz" % version - url = download_base + tgz_name - saveto = os.path.join(to_dir, tgz_name) - src = dst = None - if not os.path.exists(saveto): # Avoid repeated downloads - try: - log.warn("Downloading %s", url) - src = urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = src.read() - dst = open(saveto, "wb") - dst.write(data) - finally: - if src: - src.close() - if dst: - dst.close() - return os.path.realpath(saveto) - -def _no_sandbox(function): - def __no_sandbox(*args, **kw): - try: - from setuptools.sandbox import DirectorySandbox - if not hasattr(DirectorySandbox, '_old'): - def violation(*args): - pass - DirectorySandbox._old = DirectorySandbox._violation - DirectorySandbox._violation = violation - patched = True - else: - patched = False - except ImportError: - patched = False - - try: - return function(*args, **kw) - finally: - if patched: - DirectorySandbox._violation = DirectorySandbox._old - del DirectorySandbox._old - - return __no_sandbox - -def _patch_file(path, content): - """Will backup the file then patch it""" - existing_content = open(path).read() - if existing_content == content: - # already patched - log.warn('Already patched.') - return False - log.warn('Patching...') - _rename_path(path) - f = open(path, 'w') - try: - f.write(content) - finally: - f.close() - return True - -_patch_file = _no_sandbox(_patch_file) - -def _same_content(path, content): - return open(path).read() == content - -def _rename_path(path): - new_name = path + '.OLD.%s' % time.time() - log.warn('Renaming %s into %s', path, new_name) - os.rename(path, new_name) - return new_name - -def _remove_flat_installation(placeholder): - if not os.path.isdir(placeholder): - log.warn('Unkown installation at %s', placeholder) - return False - found = False - for file in os.listdir(placeholder): - if fnmatch.fnmatch(file, 'setuptools*.egg-info'): - found = True - break - if not found: - log.warn('Could not locate setuptools*.egg-info') - return - - log.warn('Removing elements out of the way...') - pkg_info = os.path.join(placeholder, file) - if os.path.isdir(pkg_info): - patched = _patch_egg_dir(pkg_info) - else: - patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) - - if not patched: - log.warn('%s already patched.', pkg_info) - return False - # now let's move the files out of the way - for element in ('setuptools', 'pkg_resources.py', 'site.py'): - element = os.path.join(placeholder, element) - if os.path.exists(element): - _rename_path(element) - else: - log.warn('Could not find the %s element of the ' - 'Setuptools distribution', element) - return True - -_remove_flat_installation = _no_sandbox(_remove_flat_installation) - -def _after_install(dist): - log.warn('After install bootstrap.') - placeholder = dist.get_command_obj('install').install_purelib - _create_fake_setuptools_pkg_info(placeholder) - -def _create_fake_setuptools_pkg_info(placeholder): - if not placeholder or not os.path.exists(placeholder): - log.warn('Could not find the install location') - return - pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) - setuptools_file = 'setuptools-%s-py%s.egg-info' % \ - (SETUPTOOLS_FAKED_VERSION, pyver) - pkg_info = os.path.join(placeholder, setuptools_file) - if os.path.exists(pkg_info): - log.warn('%s already exists', pkg_info) - return - - log.warn('Creating %s', pkg_info) - f = open(pkg_info, 'w') - try: - f.write(SETUPTOOLS_PKG_INFO) - finally: - f.close() - - pth_file = os.path.join(placeholder, 'setuptools.pth') - log.warn('Creating %s', pth_file) - f = open(pth_file, 'w') - try: - f.write(os.path.join(os.curdir, setuptools_file)) - finally: - f.close() - -_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) - -def _patch_egg_dir(path): - # let's check if it's already patched - pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') - if os.path.exists(pkg_info): - if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): - log.warn('%s already patched.', pkg_info) - return False - _rename_path(path) - os.mkdir(path) - os.mkdir(os.path.join(path, 'EGG-INFO')) - pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') - f = open(pkg_info, 'w') - try: - f.write(SETUPTOOLS_PKG_INFO) - finally: - f.close() - return True - -_patch_egg_dir = _no_sandbox(_patch_egg_dir) - -def _before_install(): - log.warn('Before install bootstrap.') - _fake_setuptools() - - -def _under_prefix(location): - if 'install' not in sys.argv: - return True - args = sys.argv[sys.argv.index('install')+1:] - for index, arg in enumerate(args): - for option in ('--root', '--prefix'): - if arg.startswith('%s=' % option): - top_dir = arg.split('root=')[-1] - return location.startswith(top_dir) - elif arg == option: - if len(args) > index: - top_dir = args[index+1] - return location.startswith(top_dir) - if arg == '--user' and USER_SITE is not None: - return location.startswith(USER_SITE) - return True - - -def _fake_setuptools(): - log.warn('Scanning installed packages') - try: - import pkg_resources - except ImportError: - # we're cool - log.warn('Setuptools or Distribute does not seem to be installed.') - return - ws = pkg_resources.working_set - try: - setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', - replacement=False)) - except TypeError: - # old distribute API - setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) - - if setuptools_dist is None: - log.warn('No setuptools distribution found') - return - # detecting if it was already faked - setuptools_location = setuptools_dist.location - log.warn('Setuptools installation detected at %s', setuptools_location) - - # if --root or --preix was provided, and if - # setuptools is not located in them, we don't patch it - if not _under_prefix(setuptools_location): - log.warn('Not patching, --root or --prefix is installing Distribute' - ' in another location') - return - - # let's see if its an egg - if not setuptools_location.endswith('.egg'): - log.warn('Non-egg installation') - res = _remove_flat_installation(setuptools_location) - if not res: - return - else: - log.warn('Egg installation') - pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') - if (os.path.exists(pkg_info) and - _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): - log.warn('Already patched.') - return - log.warn('Patching...') - # let's create a fake egg replacing setuptools one - res = _patch_egg_dir(setuptools_location) - if not res: - return - log.warn('Patched done.') - _relaunch() - - -def _relaunch(): - log.warn('Relaunching...') - # we have to relaunch the process - # pip marker to avoid a relaunch bug - if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: - sys.argv[0] = 'setup.py' - args = [sys.executable] + sys.argv - sys.exit(subprocess.call(args)) - - -def _extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - import copy - import operator - from tarfile import ExtractError - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 448 # decimal for oct 0700 - self.extract(tarinfo, path) - - # Reverse sort directories. - if sys.version_info < (2, 4): - def sorter(dir1, dir2): - return cmp(dir1.name, dir2.name) - directories.sort(sorter) - directories.reverse() - else: - directories.sort(key=operator.attrgetter('name'), reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError: - e = sys.exc_info()[1] - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - -def main(argv, version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - tarball = download_setuptools() - _install(tarball) - - -if __name__ == '__main__': - main(sys.argv[1:]) diff --git a/doc/source/conf.py b/doc/source/conf.py index a6ff7f7..037ce47 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -12,7 +12,7 @@ # serve to show the default. import sys, os -sys.path.insert(0, os.path.abspath('../../lib')) +sys.path.insert(0, os.path.abspath('../../stistools')) #from stsci.sphinxext.conf import * import stsci_rtd_theme diff --git a/setup.cfg b/setup.cfg index bc9ead2..2d902eb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,36 +1,9 @@ -[metadata] -name = stistools -version = 1.1 -author = Paul Barrett, Phil Hodge -author-email = help@stsci.edu -home-page = http://www.stsci.edu/resources/software_hardware/stsci_python -summary = Tools for STIS (Space Telescope Imaging Spectrograph) -classifier = - Intended Audience :: Science/Research - License :: OSI Approved :: BSD License - Operating System :: OS Independent - Programming Language :: Python - Topic :: Scientific/Engineering :: Astronomy - Topic :: Software Development :: Libraries :: Python Modules -requires-python = >=2.6 -requires-dist = - stsci.tools - numpy (>=1.5.1) - astropy (>=0.3.1) - scipy (>=0.14) - -[files] -packages_root = lib -packages = stistools -data_files = - stistools/pars = lib/stistools/pars/* - -[install_data] -pre-hook.glob-data-files = stsci.distutils.hooks.glob_data_files - -[global] -setup_hooks = - stsci.distutils.hooks.use_packages_root - stsci.distutils.hooks.tag_svn_revision - stsci.distutils.hooks.version_setup_hook - +[bdist_wheel] +# This flag says that the code is written to work on both Python 2 and Python +# 3. If at all possible, it is good practice to do this. If you cannot, you +# will need to generate wheels for each Python version that you support. +universal=1 + +[tool:pytest] +minversion = 3 +norecursedirs = .eggs build docs/_build relic \ No newline at end of file diff --git a/setup.py b/setup.py index 26bc0d2..b0e1c36 100755 --- a/setup.py +++ b/setup.py @@ -1,15 +1,64 @@ #!/usr/bin/env python +import os +import pkgutil +import sys +from glob import glob +from setuptools import setup, find_packages, Extension +from subprocess import check_call, CalledProcessError -try: - from setuptools import setup -except ImportError: - from distribute_setup import use_setuptools - use_setuptools() - from setuptools import setup + +if not pkgutil.find_loader('relic'): + relic_local = os.path.exists('relic') + relic_submodule = (relic_local and + os.path.exists('.gitmodules') and + not os.listdir('relic')) + try: + if relic_submodule: + check_call(['git', 'submodule', 'update', '--init', '--recursive']) + elif not relic_local: + check_call(['git', 'clone', 'https://github.com/spacetelescope/relic.git']) + + sys.path.insert(1, 'relic') + except CalledProcessError as e: + print(e) + exit(1) + +import relic.release + +version = relic.release.get_info() +relic.release.write_template(version, 'stistools') setup( - setup_requires=['d2to1>=0.2.9', 'stsci.distutils>=0.3.2'], - d2to1=True, - use_2to3=False, - zip_safe=False + name = 'stistools', + version = version.pep386, + author = 'Paul Barrett, Phil Hodge', + author_email = 'help@stsci.edu', + description = 'Tools for STIS (Space Telescope Imaging Spectrograph)', + url = 'https://github.com/spacetelescope/stistools', + classifiers = [ + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: BSD License', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Topic :: Scientific/Engineering :: Astronomy', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + install_requires = [ + 'astropy', + 'nose', + 'numpy', + 'numpydoc', + 'scipy', + 'sphinx', + 'sphinx_rtd_theme', + 'stsci_rtd_theme', + 'stsci.tools', + ], + packages = find_packages(), + package_data = { + 'stistools': [ + 'pars/*', + 'LICENSE.txt', + ] + }, ) diff --git a/lib/stistools/__init__.py b/stistools/__init__.py similarity index 100% rename from lib/stistools/__init__.py rename to stistools/__init__.py diff --git a/lib/stistools/basic2d.py b/stistools/basic2d.py similarity index 92% rename from lib/stistools/basic2d.py rename to stistools/basic2d.py index d46d6fd..e8e86c6 100755 --- a/lib/stistools/basic2d.py +++ b/stistools/basic2d.py @@ -1,13 +1,13 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown + import os import sys import getopt import glob import subprocess -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Perform basic 2-D calibration of STIS data. @@ -43,6 +43,7 @@ __vdate__ = "13-November-2013" __author__ = "Phil Hodge, STScI, November 2013." + def main(args): if len(args) < 1: @@ -87,6 +88,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -102,15 +104,16 @@ def prtOptions(): print("One or more output file names may be specified (the same number") print(" as the input file names).") + def basic2d(input, output="", outblev="", - dqicorr="perform", atodcorr="omit", blevcorr="perform", - doppcorr="perform", - lorscorr="perform", glincorr="perform", lflgcorr="perform", - biascorr="perform", darkcorr="perform", flatcorr="perform", - shadcorr="omit", photcorr="perform", statflag=True, - darkscale="", - verbose=False, timestamps=False, - trailer="", print_version=False, print_revision=False): + dqicorr="perform", atodcorr="omit", blevcorr="perform", + doppcorr="perform", + lorscorr="perform", glincorr="perform", lflgcorr="perform", + biascorr="perform", darkcorr="perform", flatcorr="perform", + shadcorr="omit", photcorr="perform", statflag=True, + darkscale="", + verbose=False, timestamps=False, + trailer="", print_version=False, print_revision=False): """Perform basic 2-D calibration of STIS raw data. Some calibration steps are relevant only for CCD or only for MAMA, and @@ -241,7 +244,7 @@ def basic2d(input, output="", outblev="", files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 if output: @@ -272,8 +275,8 @@ def basic2d(input, output="", outblev="", n_infiles = len(infiles) if outfiles and len(outfiles) != n_infiles: same_length = False - print("You specified %d input files but %d output files." % - (n_infiles, len(outfiles))) + print("You specified {} input files but {} output files.".format( + n_infiles, len(outfiles))) print("The number of input and output files must be the same.") if outblev_txt and len(outblev_txt) != n_infiles: same_length = False @@ -283,7 +286,7 @@ def basic2d(input, output="", outblev="", if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -333,14 +336,14 @@ def basic2d(input, output="", outblev="", arglist.append("-stat") if verbose: - print("Running basic2d on %s" % infile) - print(" %s" % str(arglist)) + print("Running basic2d on {}".format(infile)) + print(" {}".format(str(arglist))) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: cumulative_status = 1 if verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) if f_trailer is not None: f_trailer.close() @@ -351,10 +354,12 @@ def basic2d(input, output="", outblev="", # Interfaces used by TEAL # #-------------------------# + def getHelpAsString(fulldoc=True): """Return documentation on the basic2d function.""" return basic2d.__doc__ + def run(configobj=None): """TEAL interface for the basic2d function.""" basic2d(input=configobj["input"], diff --git a/lib/stistools/calstis.py b/stistools/calstis.py similarity index 94% rename from lib/stistools/calstis.py rename to stistools/calstis.py index a77082e..6adc08b 100755 --- a/lib/stistools/calstis.py +++ b/stistools/calstis.py @@ -1,13 +1,12 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown import os import sys import getopt import glob import subprocess -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Calibrate STIS data. @@ -49,6 +48,7 @@ __vdate__ = "13-November-2013" __author__ = "Phil Hodge, STScI, November 2013." + def main(args): if len(args) < 1: @@ -99,6 +99,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -116,6 +117,7 @@ def prtOptions(): print("An output directory (include a trailing '/') or a root name for") print(" the output files may be specified.") + def calstis(input, wavecal="", outroot="", savetmp=False, verbose=False, timestamps=False, trailer="", print_version=False, print_revision=False): @@ -189,12 +191,12 @@ def calstis(input, wavecal="", outroot="", savetmp=False, files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -218,14 +220,14 @@ def calstis(input, wavecal="", outroot="", savetmp=False, arglist.append("%s" % wavecal) if verbose: - print("Running calstis on %s" % infile) - print(" %s" % str(arglist)) + print("Running calstis on {}".format(infile)) + print(" {}".format(str(arglist))) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: cumulative_status = 1 if verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) if f_trailer is not None: f_trailer.close() @@ -240,6 +242,7 @@ def getHelpAsString(fulldoc=True): """Return documentation on the calstis function.""" return calstis.__doc__ + def run(configobj=None): """TEAL interface for the calstis function.""" calstis(input=configobj["input"], diff --git a/lib/stistools/evaldisp.py b/stistools/evaldisp.py similarity index 90% rename from lib/stistools/evaldisp.py rename to stistools/evaldisp.py index bb5f87c..4cbdd75 100644 --- a/lib/stistools/evaldisp.py +++ b/stistools/evaldisp.py @@ -1,6 +1,6 @@ -from __future__ import division # confidence high -def newton (x, coeff, cenwave, niter=4): + +def newton(x, coeff, cenwave, niter=4): """Return the wavelength corresponding to pixel x. The dispersion solution is evaluated iteratively, and the slope @@ -28,18 +28,19 @@ def newton (x, coeff, cenwave, niter=4): """ wl = cenwave - x0 = evalDisp (coeff, wl) + x0 = evalDisp(coeff, wl) delta_wl = 1. # one Angstrom - for i in range (niter): - x1 = evalDisp (coeff, wl+delta_wl) + for i in range(niter): + x1 = evalDisp(coeff, wl+delta_wl) dispersion = delta_wl / (x1 - x0) wl += dispersion * (x - x0) - x0 = evalDisp (coeff, wl) + x0 = evalDisp(coeff, wl) return wl -def evalDisp (coeff, wl): + +def evalDisp(coeff, wl): """Return the pixel corresponding to wavelength wl. Notes diff --git a/lib/stistools/gettable.py b/stistools/gettable.py similarity index 73% rename from lib/stistools/gettable.py rename to stistools/gettable.py index 3a604dd..f2d36b3 100644 --- a/lib/stistools/gettable.py +++ b/stistools/gettable.py @@ -1,16 +1,13 @@ -from __future__ import division, print_function # confidence high -import os import math - -import numpy as N - -from astropy.io import fits as pyfits +import numpy as np +from astropy.io import fits STRING_WILDCARD = "ANY" INT_WILDCARD = -1 -def getTable (table, filter, sortcol=None, - exactly_one=False, at_least_one=False): + +def getTable(table, filter, sortcol=None, + exactly_one=False, at_least_one=False): """Return row(s) of a table that match the filter. Rows that match every item in the filter (a dictionary of @@ -54,7 +51,7 @@ def getTable (table, filter, sortcol=None, """ - fd = pyfits.open (table, mode="readonly") + fd = fits.open(table, mode="readonly") data = fd[1].data # There will be one element of select_arrays for each non-trivial @@ -65,69 +62,71 @@ def getTable (table, filter, sortcol=None, if filter[key] == STRING_WILDCARD: continue - column = data.field (key) - if len (column) == 0: + column = data.field(key) + if len(column) == 0: return None selected = (column == filter[key]) # Test for for wildcards in the table. wild = None - if isinstance (column, N.chararray): + if isinstance(column, np.chararray): wild = (column == STRING_WILDCARD) - elif isinstance (column[0], N.integer): + elif isinstance(column[0], np.integer): wild = (column == INT_WILDCARD) if wild is not None: - selected = N.logical_or (selected, wild) + selected = np.logical_or(selected, wild) - select_arrays.append (selected) + select_arrays.append(selected) - if len (select_arrays) > 0: + if len(select_arrays) > 0: selected = select_arrays[0] for sel_i in select_arrays[1:]: - selected = N.logical_and (selected, sel_i) + selected = np.logical_and(selected, sel_i) newdata = data[selected] else: newdata = fd[1].data.copy() fd.close() - nselect = len (newdata) + nselect = len(newdata) if nselect < 1: newdata = None if (exactly_one or at_least_one) and nselect < 1: message = "Table has no matching row;\n" + \ "table name is " + table + "\n" + \ - "row selection is " + repr (filter) + "row selection is " + repr(filter) raise RuntimeError(message) if exactly_one and nselect > 1: print("Table has more than one matching row;") print("table name is", table) - print("row selection is", repr (filter)) + print("row selection is", repr(filter)) print("only the first will be used.") - if len (newdata) > 1 and sortcol is not None: - newdata = sortrows (newdata, sortcol) + if len(newdata) > 1 and sortcol is not None: + newdata = sortrows(newdata, sortcol) return newdata -def sortrows (rowdata, sortcol, ascend=True): + +def sortrows(rowdata, sortcol, ascend=True): """Return a copy of rowdata, sorted on sortcol.""" - if len (rowdata) <= 1: + if len(rowdata) <= 1: return rowdata - column = rowdata.field (sortcol) + column = rowdata.field(sortcol) index = column.argsort() if not ascend: - ind = list (index) + ind = list(index) ind.reverse() - index = N.array (ind) + index = np.array(ind) return rowdata[index] -def rotateTrace (trace_info, expstart): + +def rotateTrace(trace_info, expstart): """Rotate a2displ, if MJD and DEGPERYR are in the trace table. Parameters @@ -146,18 +145,18 @@ def rotateTrace (trace_info, expstart): # If these columns are not in the table, just return. names = [] for name in trace_info.names: - names.append (name.lower()) + names.append(name.lower()) if "degperyr" in names and "mjd" in names: - degperyr = trace_info.field ("degperyr") - mjd = trace_info.field ("mjd") + degperyr = trace_info.field("degperyr") + mjd = trace_info.field("mjd") else: return - a2displ = trace_info.field ("a2displ") - nelem = trace_info.field ("nelem") - for i in range (len (trace_info)): + a2displ = trace_info.field("a2displ") + nelem = trace_info.field("nelem") + for i in range(len(trace_info)): angle = (degperyr[i] * (expstart - mjd[i]) / 365.25) - tan_angle = math.tan (angle * math.pi / 180.) - x = N.arange (nelem[i], dtype=N.float64) + tan_angle = math.tan(angle * math.pi / 180.) + x = np.arange(nelem[i], dtype=np.float64) x -= (nelem[i] // 2) a2displ[i][:] -= (x * tan_angle) diff --git a/lib/stistools/mktrace.py b/stistools/mktrace.py similarity index 66% rename from lib/stistools/mktrace.py rename to stistools/mktrace.py index 34984db..9dd84e9 100644 --- a/lib/stistools/mktrace.py +++ b/stistools/mktrace.py @@ -26,10 +26,10 @@ - Python version: Nadia Dencheva """ -from __future__ import division, print_function # confidence high -import numpy as N -from astropy.io import fits as pyfits + +import numpy as np +from astropy.io import fits import os.path from scipy import signal from scipy import ndimage as ni @@ -37,22 +37,18 @@ from stsci.tools import gfit, linefit from stsci.tools import fileutil as fu -__version__ = '1.2' -__vdate__ = '2016-03-02' +__version__ = '2.0.0' +__vdate__ = '2017-03-20' def mktrace(fname, tracecen=0.0, weights=None): """ Refine a stis spectroscopic trace. """ - #import time - - #start=time.time() - try: - hdulist = pyfits.open(fname) + hdulist = fits.open(fname) except IOError: - print("\nUNABLE TO OPEN FITS FILE: %s \n" % fname) + print("\nUNABLE TO OPEN FITS FILE: {} \n".format(fname)) return data = hdulist[1].data @@ -63,45 +59,44 @@ def mktrace(fname, tracecen=0.0, weights=None): kwinfo = getKWInfo(hdr0, hdr1) if kwinfo['instrument'] != 'STIS': print("This trace tool works only on STIS spectroscopic observations.\n") - print("Not processing file %s.\n" %fname) + print("Not processing file {}.\n".format(fname)) return sizex, sizey = data.shape - if weights == None: - wei = N.ones(sizey) + if weights is None: + wei = np.ones(sizey) else: if not iterable(weights) or not iterable(weights[0]): print("Weights must be a list of tuples, for example:\n") - print("weights=[(23,45),(300,670)] \n") + print("weights=[(23, 45),(300, 670)] \n") return - wei = N.zeros(sizey) + wei = np.zeros(sizey) - for i in N.arange(len(weights)): - for j in N.arange(weights[i][0], weights[i][1]): + for i in np.arange(len(weights)): + for j in np.arange(weights[i][0], weights[i][1]): wei[j] = 1 - #wind are weights indices in the image frame which may be a subarray - wind = N.nonzero(wei)[0] + # wind are weights indices in the image frame which may be a subarray + wind = np.nonzero(wei)[0] tr = Trace(fname, kwinfo) - a2center, trace1024 = tr.generateTrace(data,kwinfo, tracecen=tracecen, wind=wind) - #compute the full frame a2center + a2center, trace1024 = tr.generateTrace(data, kwinfo, tracecen=tracecen, wind=wind) + # compute the full frame a2center ffa2center = a2center*kwinfo['binaxis2'] tr_ind, a2disp_ind = tr.getTraceInd(ffa2center) - #print 'tr_ind', tr_ind tr2 = tr.readTrace(tr_ind) if tr_ind != a2disp_ind[0]: - tr1 = tr.readTrace(tr_ind -1) + tr1 = tr.readTrace(tr_ind - 1) interp_trace = trace_interp(tr1, tr2, ffa2center) else: interp_trace = tr2 - #convert the weights array into full frame - ind = N.nonzero(wei)[0] * kwinfo['binaxis1'] - w = N.zeros(1024) + # convert the weights array into full frame + ind = np.nonzero(wei)[0] * kwinfo['binaxis1'] + w = np.zeros(1024) w[ind] = 1 - X = N.arange(1024).astype(N.float) + X = np.arange(1024).astype(np.float) sparams = linefit.linefit(X, trace1024, weights=w) rparams = linefit.linefit(X, interp_trace, weights=w) sciline = sparams[0] + sparams[1] * X @@ -109,8 +104,8 @@ def mktrace(fname, tracecen=0.0, weights=None): deltaline = sciline - refline - #create a complete trace similar to a row in a _1dt file - #used only for debugging + # create a complete trace similar to a row in a _1dt file + # used only for debugging tr._a2displ = trace1024 tr._a1center = tr1['a1center'] tr._a2center = a2center @@ -118,12 +113,13 @@ def mktrace(fname, tracecen=0.0, weights=None): tr._pedigree = tr1['pedigree'] tr._snr_thresh = tr1['snr_thresh'] - tr.writeTrace(fname, sciline, refline, interp_trace, trace1024, tr_ind, a2disp_ind) + tr.writeTrace(fname, sciline, refline, interp_trace, + trace1024, tr_ind, a2disp_ind) #print 'time', time.time()-start #the minus sign is for consistency withthe way x2d reports the rotation - print("Traces were rotated by %f degrees \n" % (-(sparams[1]-rparams[1])*180 / N.pi)) - print('trace is centered on row %f' % tr._a2center) + print("Traces were rotated by {:.10f} degrees \n".format((-(sparams[1]-rparams[1])*180 / np.pi))) + print('trace is centered on row {:.10f}'.format(tr._a2center)) return tr @@ -135,35 +131,35 @@ def iterable(v): return False -def interp(y,n): +def interp(y, n): """ - Given a 1D array of size m, interpolates it to a size n (m a2center)[0][0] + ind[0][0] + ind = np.nonzero(a2disp_ind) + i = np.nonzero(self.sptrctab[ind].field('A2CENTER') > a2center)[0][0] + ind[0][0] return i, a2disp_ind @@ -269,8 +263,9 @@ def readTrace(self, tr_ind): return tr + def writeTrace(self, fname, sciline, refline, interp_trace, trace1024, + tr_ind, a2disp_ind): - def writeTrace(self, fname, sciline, refline, interp_trace, trace1024, tr_ind, a2disp_ind): """ The 'writeTrace' method performs the following steps: @@ -290,105 +285,111 @@ def writeTrace(self, fname, sciline, refline, interp_trace, trace1024, tr_ind, a infile = fname.split('.') newname = infile[0] + '_1dt.' + infile[1] - #refine all traces for this CENWAVE, OPT_ELEM + # refine all traces for this CENWAVE, OPT_ELEM fu.copyFile(fpath, newname) - hdulist = pyfits.open(newname, mode='update') + hdulist = fits.open(newname, mode='update') tab = hdulist[1].data - ind = N.nonzero(a2disp_ind)[0] - for i in N.arange(ind[0], ind[-1]+1): - tab[i].setfield('A2DISPL', tab[i].field('A2DISPL') + (sciline-refline)) + ind = np.nonzero(a2disp_ind)[0] + for i in np.arange(ind[0], ind[-1] + 1): + tab[i].setfield('A2DISPL', tab[i].field('A2DISPL') + (sciline - refline)) if 'DEGPERYR' in tab.names: - for i in N.arange(ind[0], ind[-1]+1): + for i in np.arange(ind[0], ind[-1] + 1): tab[i].setfield('DEGPERYR', 0.0) hdulist.flush() hdulist.close() - #update SPTRCTAB keyword in the science file primary header - hdulist = pyfits.open(fname, mode='update') + # update SPTRCTAB keyword in the science file primary header + hdulist = fits.open(fname, mode='update') hdr0 = hdulist[0].header hdr0['SPTRCTAB'] = newname hdulist.close() - #write out the fit to the interpolated trace ('_interpfit' file) - refhdu = pyfits.PrimaryHDU(refline) - refname=infile[0] + '_1dt_interpfit.' + infile[1] + # write out the fit to the interpolated trace ('_interpfit' file) + refhdu = fits.PrimaryHDU(refline) + refname = infile[0] + '_1dt_interpfit.' + infile[1] if os.path.exists(refname): os.remove(refname) refhdu.writeto(refname) - #write out the interpolated trace ('_interp' file) - inthdu = pyfits.PrimaryHDU(interp_trace) - intname=infile[0] + '_1dt_interp.' + infile[1] + # write out the interpolated trace ('_interp' file) + inthdu = fits.PrimaryHDU(interp_trace) + intname = infile[0] + '_1dt_interp.' + infile[1] if os.path.exists(intname): os.remove(intname) inthdu.writeto(intname) - #write out the the fit to the science trace ('_scifit' file) - scihdu = pyfits.PrimaryHDU(sciline) + # write out the the fit to the science trace ('_scifit' file) + scihdu = fits.PrimaryHDU(sciline) sciname = infile[0] + '_1dt_scifit.' + infile[1] if os.path.exists(sciname): os.unlink(sciname) scihdu.writeto(sciname) - #write out the science trace ('_sci' file) - trhdu = pyfits.PrimaryHDU(trace1024) + # write out the science trace ('_sci' file) + trhdu = fits.PrimaryHDU(trace1024) trname = infile[0] + '_1dt_sci.' + infile[1] if os.path.exists(trname): os.unlink(trname) trhdu.writeto(trname) - def generateTrace(self, data, kwinfo, tracecen=0.0, wind=None): """ Generates a trace from a science file. """ - if kwinfo['sizaxis2'] != None and kwinfo['sizaxis2'] < 1023: + if kwinfo['sizaxis2'] is not None and kwinfo['sizaxis2'] < 1023: subarray = True - else: subarray = False + else: + subarray = False if tracecen == 0: if subarray: - _tracecen = kwinfo['sizaxis2']/2.0 + _tracecen = kwinfo['sizaxis2'] / 2.0 else: _tracecen = kwinfo['crpix2'] else: _tracecen = tracecen - sizex,sizey = data.shape + sizex, sizey = data.shape subim_size = 40 y1 = int(_tracecen - subim_size/2.) y2 = int(_tracecen + subim_size/2.) - if y1 < 0: y1 = 0 - if y2 > (sizex -1): y2 = sizex - 1 - specimage = data[y1:y2+1,:] + if y1 < 0: + y1 = 0 + if y2 > (sizex -1): + y2 = sizex - 1 + specimage = data[y1:y2+1, :] smoytrace = self.gFitTrace(specimage, y1, y2) - yshift = int(N.median(smoytrace) - 20) + yshift = int(np.median(smoytrace) - 20) y1 = y1 + yshift y2 = y2 + yshift - if (y1 < 0): y1 = 0 - if y2 > sizex: y2 = sizex - specimage = data[y1:y2+1,:] + if y1 < 0: + y1 = 0 + if y2 > sizex: + y2 = sizex + specimage = data[y1:y2+1, :] smoytrace = self.gFitTrace(specimage, y1, y2) - med11smoytrace = ni.median_filter(smoytrace,11) + med11smoytrace = ni.median_filter(smoytrace, 11) med11smoytrace[0] = med11smoytrace[2] diffmed = abs(smoytrace - med11smoytrace) - tolerence = 3 * N.median(abs(smoytrace[wind] - med11smoytrace[wind])) - if tolerence < 0.1: tolerence = 0.1 - badpoint = N.where(diffmed > tolerence)[0] + tolerence = 3 * np.median(abs(smoytrace[wind] - med11smoytrace[wind])) + if tolerence < 0.1: + tolerence = 0.1 + badpoint = np.where(diffmed > tolerence)[0] if len(badpoint) != 0: - N.put(smoytrace, badpoint, med11smoytrace[badpoint]) + np.put(smoytrace, badpoint, med11smoytrace[badpoint]) - #convolve with a gaussian to smooth it + # Convolve with a gaussian to smooth it. fwhm = 10. - sigma = fwhm/2.355 + sigma = fwhm / 2.355 gaussconvxsmoytrace = ni.gaussian_filter1d(smoytrace, sigma) - #compute the trace center as the median of the pixels with nonzero weights - tracecen = N.median(gaussconvxsmoytrace[wind]) + # Compute the trace center as the median of the pixels + # with nonzero weights. + tracecen = np.median(gaussconvxsmoytrace[wind]) gaussconvxsmoytrace = gaussconvxsmoytrace - tracecen - trace1024 = interp(gaussconvxsmoytrace,1024) * kwinfo['binaxis2'] - tracecen = tracecen + y1 +1.0 + trace1024 = interp(gaussconvxsmoytrace, 1024) * kwinfo['binaxis2'] + tracecen = tracecen + y1 + 1.0 if subarray: tracecen = tracecen - kwinfo['ltv2'] self.trace1024 = trace1024 @@ -399,15 +400,15 @@ def gFitTrace(self, specimage, y1, y2): Fit a gaussian to each column of an image. """ - sizex,sizey = specimage.shape - smoytrace = N.zeros(sizey).astype(N.float) + sizex, sizey = specimage.shape + smoytrace = np.zeros(sizey).astype(np.float) boxcar_kernel = signal.boxcar(3) / 3.0 - for c in N.arange(sizey): - col = specimage[:,c] - col = col - N.median(col) - smcol = ni.convolve(col, boxcar_kernel).astype(N.float) + for c in np.arange(sizey): + col = specimage[:, c] + col = col - np.median(col) + smcol = ni.convolve(col, boxcar_kernel).astype(np.float) fit = gfit.gfit1d(smcol, quiet=1, maxiter=15) smoytrace[c] = fit.params[1] - return N.array(smoytrace) + return np.array(smoytrace) diff --git a/lib/stistools/ocrreject.py b/stistools/ocrreject.py similarity index 93% rename from lib/stistools/ocrreject.py rename to stistools/ocrreject.py index 0d4d347..1d46e15 100755 --- a/lib/stistools/ocrreject.py +++ b/stistools/ocrreject.py @@ -1,13 +1,13 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown + import os import sys import getopt import glob import subprocess -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Add STIS exposures, rejecting cosmic rays. @@ -43,6 +43,7 @@ __vdate__ = "13-November-2013" __author__ = "Phil Hodge, STScI, November 2013." + def main(args): if len(args) < 2: @@ -84,6 +85,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -97,6 +99,7 @@ def prtOptions(): print(" (enclosed in quotes if more than one file name is specified") print(" and/or if wildcards are used) and one output file name.") + def ocrreject(input, output, all=True, crrejtab="", scalense="", initgues="", skysub="", crsigmas="", @@ -215,7 +218,7 @@ def ocrreject(input, output, files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 outfiles = [] @@ -230,21 +233,21 @@ def ocrreject(input, output, n_outfiles = len(outfiles) if all: if n_outfiles != 1: - print("You specified %d output files; when all is True," % - n_outfiles) + print("You specified {} output files; when all is True,".format( + n_outfiles)) print("output must be exactly one file name.") return 2 else: n_infiles = len(infiles) if n_outfiles != n_infiles: - print("You specified %d input files but %d output files;" % - (n_infiles, n_outfiles)) + print("You specified {} input files but {} output files;".format( + n_infiles, n_outfiles)) print("the number of input and output files must be the same.") return 2 if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -303,13 +306,13 @@ def ocrreject(input, output, arglist.extend(optional_args) if verbose: - print("'%s'" % str(arglist)) - print("Running ocrreject on %s" % infilestr) - del(infilestr) + print("'{}'".format(str(arglist))) + print("Running ocrreject on {}".format(infilestr)) + del infilestr status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status and verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) cumulative_status = status else: @@ -326,14 +329,14 @@ def ocrreject(input, output, arglist.extend(optional_args) if verbose: - print("Running ocrreject on %s" % infile) - print(" %s" % str(arglist)) + print("Running ocrreject on {}".format(infile)) + print(" {}".format(str(arglist))) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: cumulative_status = 1 if verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) if f_trailer is not None: f_trailer.close() @@ -344,10 +347,12 @@ def ocrreject(input, output, # Interfaces used by TEAL # #-------------------------# + def getHelpAsString(fulldoc=True): """Return documentation on the ocrreject function.""" return ocrreject.__doc__ + def run(configobj=None): """TEAL interface for the ocrreject function.""" ocrreject(input=configobj["input"], diff --git a/lib/stistools/pars/basic2d.cfg b/stistools/pars/basic2d.cfg similarity index 100% rename from lib/stistools/pars/basic2d.cfg rename to stistools/pars/basic2d.cfg diff --git a/lib/stistools/pars/basic2d.cfgspc b/stistools/pars/basic2d.cfgspc similarity index 100% rename from lib/stistools/pars/basic2d.cfgspc rename to stistools/pars/basic2d.cfgspc diff --git a/lib/stistools/pars/calstis.cfg b/stistools/pars/calstis.cfg similarity index 100% rename from lib/stistools/pars/calstis.cfg rename to stistools/pars/calstis.cfg diff --git a/lib/stistools/pars/calstis.cfgspc b/stistools/pars/calstis.cfgspc similarity index 100% rename from lib/stistools/pars/calstis.cfgspc rename to stistools/pars/calstis.cfgspc diff --git a/lib/stistools/pars/ocrreject.cfg b/stistools/pars/ocrreject.cfg similarity index 100% rename from lib/stistools/pars/ocrreject.cfg rename to stistools/pars/ocrreject.cfg diff --git a/lib/stistools/pars/ocrreject.cfgspc b/stistools/pars/ocrreject.cfgspc similarity index 100% rename from lib/stistools/pars/ocrreject.cfgspc rename to stistools/pars/ocrreject.cfgspc diff --git a/lib/stistools/pars/wavecal.cfg b/stistools/pars/wavecal.cfg similarity index 100% rename from lib/stistools/pars/wavecal.cfg rename to stistools/pars/wavecal.cfg diff --git a/lib/stistools/pars/wavecal.cfgspc b/stistools/pars/wavecal.cfgspc similarity index 100% rename from lib/stistools/pars/wavecal.cfgspc rename to stistools/pars/wavecal.cfgspc diff --git a/lib/stistools/pars/x1d.cfg b/stistools/pars/x1d.cfg similarity index 100% rename from lib/stistools/pars/x1d.cfg rename to stistools/pars/x1d.cfg diff --git a/lib/stistools/pars/x1d.cfgspc b/stistools/pars/x1d.cfgspc similarity index 100% rename from lib/stistools/pars/x1d.cfgspc rename to stistools/pars/x1d.cfgspc diff --git a/lib/stistools/pars/x2d.cfg b/stistools/pars/x2d.cfg similarity index 100% rename from lib/stistools/pars/x2d.cfg rename to stistools/pars/x2d.cfg diff --git a/lib/stistools/pars/x2d.cfgspc b/stistools/pars/x2d.cfgspc similarity index 100% rename from lib/stistools/pars/x2d.cfgspc rename to stistools/pars/x2d.cfgspc diff --git a/lib/stistools/r_util.py b/stistools/r_util.py similarity index 63% rename from lib/stistools/r_util.py rename to stistools/r_util.py index bfc0511..fc9e516 100644 --- a/lib/stistools/r_util.py +++ b/stistools/r_util.py @@ -1,9 +1,10 @@ -from __future__ import division # confidence high import os import os.path import copy -def expandFileName (filename): + + +def expandFileName(filename): """Expand environment variable in a file name. If the input file name begins with either a Unix-style or IRAF-style @@ -13,31 +14,32 @@ def expandFileName (filename): Parameters ----------- - filename : string - a file name, possibly including an environment variable + filename : str + A file name, possibly including an environment variable. Returns -------- - fullname : string - the file name with environment variable expanded + fullname : str + The file name with environment variable expanded. """ - n = filename.find ("$") + n = filename.find("$") if n == 0: if filename != NOT_APPLICABLE: # Unix-style file name. - filename = os.path.expandvars (filename) + filename = os.path.expandvars(filename) elif n > 0: # IRAF-style file name. temp = "$" + filename[0:n] + os.sep + filename[n+1:] - filename = os.path.expandvars (temp) + filename = os.path.expandvars(temp) # If filename contains "//", delete all of them. double_sep = os.sep + os.sep - filename = filename.replace(double_sep,os.sep) + filename = filename.replace(double_sep, os.sep) return filename -def interpolate (x, values, xp): + +def interpolate(x, values, xp): """Interpolate. Linear interpolation is used. If the specified indpendent variable @@ -47,36 +49,36 @@ def interpolate (x, values, xp): Parameters ----------- x : a sequence object, e.g. an array, int or float - array of independent variable values + Array of independent variable values. values : a sequence object, e.g. an array (not character) - array of dependent variable values + Array of dependent variable values. xp : int or float - independent variable value at which to interpolate + Independent variable value at which to interpolate. Returns ------- interp_vals : the same type as one element of values - linearly interpolated value + Linearly interpolated value. """ - nvalues = len (values) + nvalues = len(values) if nvalues == 1 or xp <= x.item(0): - value = copy.deepcopy (values[0]) + value = copy.deepcopy(values[0]) elif xp >= x.item(nvalues-1): - value = copy.deepcopy (values[nvalues-1]) + value = copy.deepcopy(values[nvalues-1]) else: # search for independent variable values that bracket the specified xp - for i in range (nvalues-1): + for i in range(nvalues-1): x0 = x.item(i) x1 = x.item(i+1) if xp >= x0 and xp <= x1: if x0 == x1: - value = copy.deepcopy (values[i]) + value = copy.deepcopy(values[i]) else: - p = float (x1 - xp) - q = float (xp - x0) + p = float(x1 - xp) + q = float(xp - x0) value = (p * values[i] + q * values[i+1]) / (p + q) break diff --git a/lib/stistools/radialvel.py b/stistools/radialvel.py similarity index 71% rename from lib/stistools/radialvel.py rename to stistools/radialvel.py index 2fc53f6..38ca43b 100644 --- a/lib/stistools/radialvel.py +++ b/stistools/radialvel.py @@ -1,4 +1,3 @@ -from __future__ import division # confidence high import numpy as N DEG_RAD = N.pi / 180. # degrees to radians @@ -8,7 +7,8 @@ KM_AU = 1.4959787e8 # kilometers per astronomical unit SEC_DAY = 86400. # seconds per day -def radialVel (ra_targ, dec_targ, mjd): + +def radialVel(ra_targ, dec_targ, mjd): """Compute the heliocentric velocity of the Earth. This function computes the radial velocity of a target based on the @@ -35,23 +35,24 @@ def radialVel (ra_targ, dec_targ, mjd): # Convert target position to rectangular coordinate unit vector. ra = ra_targ * DEG_RAD dec = dec_targ * DEG_RAD - target = N.zeros (3, dtype=N.float64) - target[0] = N.cos (dec) * N.cos (ra) - target[1] = N.cos (dec) * N.sin (ra) - target[2] = N.sin (dec) + target = N.zeros(3, dtype=N.float64) + target[0] = N.cos(dec) * N.cos(ra) + target[1] = N.cos(dec) * N.sin(ra) + target[2] = N.sin(dec) # Precess the target coordinates from J2000 to the current date. - target = precess (mjd, target) + target = precess(mjd, target) # Get the Earth's velocity vector (km/sec). - velocity = earthVel (mjd) + velocity = earthVel(mjd) # Dot product. - vel_r = N.dot (velocity, target) + vel_r = N.dot(velocity, target) + + return -vel_r - return (-vel_r) -def earthVel (mjd): +def earthVel(mjd): """Compute and return the velocity of the Earth at the specified time. This function computes the Earth's orbital velocity around the Sun @@ -124,33 +125,34 @@ def earthVel (mjd): L = ((280.461 + 0.9856474 * tdays) % 360.) * DEG_RAD # 1.915 degrees 0.02 degree - elong = L + 0.033423 * N.sin (g) + 0.000349 * N.sin (2.*g) + elong = L + 0.033423 * N.sin(g) + 0.000349 * N.sin(2.*g) elong_dot = L_dot + \ - 0.033423 * N.cos (g) * g_dot + \ - 0.000349 * N.cos (2.*g) * 2.*g_dot + 0.033423 * N.cos(g) * g_dot + \ + 0.000349 * N.cos(2.*g) * 2.*g_dot - radius = 1.00014 - 0.01671 * N.cos (g) - 0.00014 * N.cos (2.*g) - radius_dot = 0.01671 * N.sin (g) * g_dot + \ - 0.00014 * N.sin (2.*g) * 2.*g_dot + radius = 1.00014 - 0.01671 * N.cos(g) - 0.00014 * N.cos(2.*g) + radius_dot = 0.01671 * N.sin(g) * g_dot + \ + 0.00014 * N.sin(2.*g) * 2.*g_dot - x_dot = radius_dot * N.cos (elong) - \ - radius * N.sin (elong) * elong_dot + x_dot = radius_dot * N.cos(elong) - \ + radius * N.sin(elong) * elong_dot - y_dot = radius_dot * N.cos (eps) * N.sin (elong) + \ - radius * N.cos (eps) * N.cos (elong) * elong_dot + y_dot = radius_dot * N.cos(eps) * N.sin(elong) + \ + radius * N.cos(eps) * N.cos(elong) * elong_dot - z_dot = radius_dot * N.sin (eps) * N.sin (elong) + \ - radius * N.sin (eps) * N.cos (elong) * elong_dot + z_dot = radius_dot * N.sin(eps) * N.sin(elong) + \ + radius * N.sin(eps) * N.cos(elong) * elong_dot # Convert to km/sec with Sun as origin. - velocity = N.zeros (3, dtype=N.float64) + velocity = N.zeros(3, dtype=N.float64) velocity[0] = -x_dot * KM_AU / SEC_DAY velocity[1] = -y_dot * KM_AU / SEC_DAY velocity[2] = -z_dot * KM_AU / SEC_DAY return velocity -def precess (mjd, target): + +def precess(mjd, target): """Precess target coordinates from J2000 to the date mjd. Notes @@ -185,44 +187,44 @@ def precess (mjd, target): i.e. either (3,) or (n,3) """ - target_j2000 = N.array (target, dtype=N.float64) + target_j2000 = N.array(target, dtype=N.float64) target_mjd = target_j2000.copy() dt = (mjd - REFDATE) / 36525. dt2 = dt**2 dt3 = dt**3 - zeta = (2306.2181 * dt + 0.30188 * dt2 + 0.017998 * dt3) * ARCSEC_RAD + zeta = (2306.2181 * dt + 0.30188 * dt2 + 0.017998 * dt3) * ARCSEC_RAD - z = (2306.2181 * dt + 1.09468 * dt2 + 0.018203 * dt3) * ARCSEC_RAD + z = (2306.2181 * dt + 1.09468 * dt2 + 0.018203 * dt3) * ARCSEC_RAD theta = (2004.3109 * dt - 0.42665 * dt2 - 0.041833 * dt3) * ARCSEC_RAD - cos_zeta = N.cos (zeta) - sin_zeta = N.sin (zeta) - cos_z = N.cos (z) - sin_z = N.sin (z) - cos_theta = N.cos (theta) - sin_theta = N.sin (theta) + cos_zeta = N.cos(zeta) + sin_zeta = N.sin(zeta) + cos_z = N.cos(z) + sin_z = N.sin(z) + cos_theta = N.cos(theta) + sin_theta = N.sin(theta) # Create the rotation matrix. - a = N.identity (3, dtype=N.float64) + a = N.identity(3, dtype=N.float64) - a[0,0] = cos_z * cos_theta * cos_zeta - sin_z * sin_zeta - a[0,1] = -cos_z * cos_theta * sin_zeta - sin_z * cos_zeta - a[0,2] = -cos_z * sin_theta + a[0, 0] = cos_z * cos_theta * cos_zeta - sin_z * sin_zeta + a[0, 1] = -cos_z * cos_theta * sin_zeta - sin_z * cos_zeta + a[0, 2] = -cos_z * sin_theta - a[1,0] = sin_z * cos_theta * cos_zeta + cos_z * sin_zeta - a[1,1] = -sin_z * cos_theta * sin_zeta + cos_z * cos_zeta - a[1,2] = -sin_z * sin_theta + a[1, 0] = sin_z * cos_theta * cos_zeta + cos_z * sin_zeta + a[1, 1] = -sin_z * cos_theta * sin_zeta + cos_z * cos_zeta + a[1, 2] = -sin_z * sin_theta - a[2,0] = sin_theta * cos_zeta - a[2,1] = -sin_theta * sin_zeta - a[2,2] = cos_theta + a[2, 0] = sin_theta * cos_zeta + a[2, 1] = -sin_theta * sin_zeta + a[2, 2] = cos_theta # Convert to matrix objects. - m_a = N.matrix (a) - m_target_j2000 = N.matrix (target_j2000) + m_a = N.matrix(a) + m_target_j2000 = N.matrix(target_j2000) # The prefix "m_" indicates that the product is actually a matrix. m_target_mjd = m_a * m_target_j2000.T diff --git a/lib/stistools/sshift.py b/stistools/sshift.py similarity index 79% rename from lib/stistools/sshift.py rename to stistools/sshift.py index 85913f2..d47eb05 100755 --- a/lib/stistools/sshift.py +++ b/stistools/sshift.py @@ -6,38 +6,38 @@ POSTARG2 keyword is used to determine the number of rows to be shifted. """ -from __future__ import division, print_function # confidence high -from astropy.io import fits as pyfits +from astropy.io import fits __version__ = '1.7 (2010-Apr-27)' + def shiftimage(infile, outfile, shift=0): """ -Shift each image extension of an input file by N rows and write the -new image extension to the output file. -""" + Shift each image extension of an input file by N rows and write the + new image extension to the output file. + """ - fin = pyfits.open(infile) # flat-fielded file + fin = fits.open(infile) # flat-fielded file - fout = pyfits.HDUList() # shifted flat-field file + fout = fits.HDUList() # shifted flat-field file phdr = fin[0].header phdr.add_history('SSHIFT complete ...') phdr.add_history(' all extensions were shifted by %d rows' % shift) - fout.append(pyfits.PrimaryHDU(header=phdr)) + fout.append(fits.PrimaryHDU(header=phdr)) for exten in fin[1:]: image = exten.data.copy() - image[:,:] = 0 - if shift > 0: + image[:, :] = 0 + if shift > 0: image[shift:] = exten.data[:-shift] elif shift < 0: image[:shift] = exten.data[-shift:] else: image[:] = exten.data[:] - fout.append(pyfits.ImageHDU(header=exten.header, data=image)) + fout.append(fits.ImageHDU(header=exten.header, data=image)) fout.writeto(outfile) @@ -92,9 +92,6 @@ def sshift(input, output=None, shifts=None, platescale=None, # check for binned data and non-integral # shifts. # 2004/09/24 PEB - version 1.6 - add keyword consistency checks - - import types - # Setup input and output filename lists, so iteration can be done # over a list of zipped filenames. @@ -102,7 +99,7 @@ def sshift(input, output=None, shifts=None, platescale=None, input = [input] elif not input: raise ValueError( - 'No input files found. Possibly using wrong directory.') + 'No input files found. Possibly using wrong directory.') if output is None: output = len(input)*[None] @@ -119,7 +116,7 @@ def sshift(input, output=None, shifts=None, platescale=None, if len(input) != len(output): raise ValueError( - 'number of output files is not equal to number input files') + 'number of output files is not equal to number input files') if shifts is not None: for shift in shifts: @@ -128,9 +125,8 @@ def sshift(input, output=None, shifts=None, platescale=None, xposs, yposs, xpos0, ypos0 = [], [], None, None proposid, obset_id, targname = None, None, None - propaper, opt_elem, cenwave = None, None, None + propaper, opt_elem, cenwave = None, None, None binaxis1, binaxis2 = None, None - samefiles = [] for infile in input: # Read the POSTARG2 keyword (in the primary header) and the @@ -138,18 +134,18 @@ def sshift(input, output=None, shifts=None, platescale=None, # relative shift of each input file. Choose a reference # position that is closest to Y-pixel 512. - infil = pyfits.open(infile) - phdr = infil[0].header + infil = fits.open(infile) + phdr = infil[0].header if platescale is None: - #platescale = phdr['PLATESC'] + # platescale = phdr['PLATESC'] platescale = 0.05077 else: platescale = float(platescale) if phdr['FLATCORR'].upper() != 'COMPLETE': raise ValueError( - 'Input file has not been flat-fielded corrected.') + 'Input file has not been flat-fielded corrected.') # Check that TARGNAME is the same. if targname is None: @@ -162,17 +158,17 @@ def sshift(input, output=None, shifts=None, platescale=None, proposid = phdr['PROPOSID'] obset_id = phdr['OBSET_ID'] elif proposid != phdr['PROPOSID'] or obset_id != phdr['OBSET_ID']: - raise ValueError('%s %s' % - ('Not all exposures are from the same visit;', - 'placement of the spectrum on the detector will differ.')) + raise ValueError(' Not all exposures are from the same visit;' + ' placement of the spectrum on the detector will' + ' differ.') - # Check that PROPAPER, OPT_ELEM, CENWAVE are the same. + # Check that PROPAPER, OPT_ELEM, CENWAVE are the same. if propaper is None: propaper = phdr['PROPAPER'] opt_elem = phdr['OPT_ELEM'] - cenwave = phdr['CENWAVE'] + cenwave = phdr['CENWAVE'] elif propaper != phdr['PROPAPER'] or opt_elem != phdr['OPT_ELEM'] or \ - cenwave != phdr['CENWAVE']: + cenwave != phdr['CENWAVE']: raise ValueError('Different observing configurations have been used.') # Check that BINAXIS1 and BINAXIS2 are the same. @@ -183,16 +179,16 @@ def sshift(input, output=None, shifts=None, platescale=None, raise ValueError('Different binnings have been used.') # Check that all POSTARG1 values are the same (within reason). - xpos = phdr['POSTARG1'] + xpos = phdr['POSTARG1'] if xpos0 is None: xpos0 = xpos elif abs(xpos - xpos0) > 0.05: raise ValueError('POSTARG1 values of input files are not equal.') # Get the POSTARG2 values and the one that is nearest to row 512. - ypos = phdr['POSTARG2']/platescale - ypix = infil[1].header['CRPIX2']-512 - if ypos0 is None or abs(ypix+ypos) < abs(ypix+ypos0): + ypos = phdr['POSTARG2'] / platescale + ypix = infil[1].header['CRPIX2'] - 512 + if ypos0 is None or abs(ypix + ypos) < abs(ypix + ypos0): ypos0 = ypos yposs.append(ypos) @@ -204,10 +200,9 @@ def sshift(input, output=None, shifts=None, platescale=None, for ypos in yposs: dypos = ypos - ypos0 if abs(abs(dypos) - int(abs(dypos)+0.5)) > tolerance: - raise ValueError('%s (%s pix) %s' % \ - ('POSTARG2 shift not within the specified tolerance', - tolerance, 'of integer-pixel shift')) - #'POSTARG2 shift greater than specified tolerance: %d' % tolerance + raise ValueError("POSTARG2 shift not within the specified " + "tolerance {} pix of integer-pixel shift".format(tolerance)) + # 'POSTARG2 shift greater than specified tolerance: %d' % tolerance # 'non-integral POSTARG2 value or incorrect plate scale.' if dypos < 0.: ishift = -int(dypos-0.5) @@ -220,7 +215,7 @@ def sshift(input, output=None, shifts=None, platescale=None, # Process each file using corresponding pixel shift. print('input-file pixel-shift') for infile, outfile, npixel in zip(input, output, shifts): - fin = pyfits.open(infile) + fin = fits.open(infile) # Use default output file name. if outfile is None: @@ -228,15 +223,16 @@ def sshift(input, output=None, shifts=None, platescale=None, outfile = re.sub('flt\.', 'sfl.', infile, count=1) if binaxis2 == 1: - print('%18s: %3d' % (infile, npixel)) + print('{:>18}: {:3}'.format(infile, npixel)) else: - print('%18s: %3d binned' % (infile, npixel)) + print('{:>18}: {:3} binned'.format(infile, npixel)) shiftimage(infile, outfile, shift=npixel) fin.close() if __name__ == '__main__': - import sys, getopt + import sys + import getopt output, shifts, scale, toler = None, None, None, None @@ -263,4 +259,4 @@ def sshift(input, output=None, shifts=None, platescale=None, tolerance=toler) else: print("""Usage: sshift [-o|--output 'files'] [-s|--shifts 'shifts'] - [-p|--platescale scale] [-t|--tolerance tol] [-h|--help] input-files""") + [-p|--platescale scale] [-t|--tolerance tol] [-h|--help] input-files""") diff --git a/lib/stistools/stisnoise.py b/stistools/stisnoise.py similarity index 81% rename from lib/stistools/stisnoise.py rename to stistools/stisnoise.py index ce2e1c7..1d734c7 100755 --- a/lib/stistools/stisnoise.py +++ b/stistools/stisnoise.py @@ -1,9 +1,8 @@ #!/usr/bin/env python -from __future__ import division, print_function # confidence medium import math -from astropy.io import fits as pyfits +from astropy.io import fits import numpy import numpy.fft as fft from scipy import ndimage @@ -43,13 +42,13 @@ def wipefilter(time_series, image_type, sst, freqmin, freqmax, scale): ntimep = ntime+14 else: ntimep = ntime+7 - t2 = numpy.zeros(ntimep, numpy.float64) + t2 = numpy.zeros(ntimep, numpy.float64) t2[:ntime] = time_series - freq = numpy.arange(ntimep)/(ntimep*sst*1.0e-6) + freq = numpy.arange(ntimep)/(ntimep*sst*1.0e-6) freq[ntimep//2+1:ntimep] = freq[1:ntimep//2][::-1] - tran = fft.fft(t2) / float(len(t2)) + tran = fft.fft(t2) / float(len(t2)) # apply filter - ind = numpy.nonzero((freq > freqmin)*(freq < freqmax)) + ind = numpy.nonzero((freq > freqmin)*(freq < freqmax)) tran[ind] = tran[ind]*scale # inverse transform time_series = fft.ifft(tran).real[:ntime+2] @@ -78,27 +77,27 @@ def windowfilter(time_series, image_type, sst, freqpeak, width, taper): ntimep = ntime+14 else: ntimep = ntime+7 - t2 = numpy.zeros(ntimep, numpy.float64) + t2 = numpy.zeros(ntimep, numpy.float64) t2[:ntime] = time_series - freq = numpy.arange(ntimep, dtype=numpy.float64) / (ntimep*sst*1.0e-6) + freq = numpy.arange(ntimep, dtype=numpy.float64) / (ntimep*sst*1.0e-6) freq[ntimep//2+1:ntimep] = freq[1:ntimep//2][::-1] - tran = fft.fft(t2) / float(len(t2)) + tran = fft.fft(t2) / float(len(t2)) # apply filter filter = numpy.ones(ntimep, numpy.float64) - ind = numpy.nonzero((freq > (freqpeak-width/2.0)) * \ - (freq < (freqpeak+width/2.0))) + ind = numpy.nonzero((freq > (freqpeak - width / 2.0)) * + (freq < (freqpeak + width / 2.0))) filter[ind] = 0.0 - freqstep = 1.0/(ntimep*sst*1.0e-6) - width = taper/freqstep # specify window width in freq steps + freqstep = 1.0 / (ntimep * sst * 1.0e-6) + width = taper / freqstep # specify window width in freq steps sigma = width/2.354820044 # convert fwhm to sigma kernw = int(5*sigma) # make kernel have width of 5 sigma - if kernw%2 == 0: - kernw = kernw+1 # make kernel odd + if kernw % 2 == 0: + kernw = kernw + 1 # make kernel odd kernx = numpy.arange(kernw) kerny = gauss(kernx, kernw//2, sigma, 1.0) # gaussian kernel kerny = kerny/numpy.sum(kerny) filterc = signal.correlate(filter, kerny, mode='same') - tran = tran*filterc + tran = tran * filterc # inverse transform time_series = fft.ifft(tran).real[:ntime+2] time_series *= time_series.shape[0] @@ -187,7 +186,7 @@ def stisnoise(infile, exten=1, outfile=None, dc=1, verbose=1, # from __future__ import division # Check filter options - if ((boxcar > 0) + (wipe != None) + (window != None)) > 1: + if ((boxcar > 0) + (wipe is not None) + (window is not None)) > 1: raise ValueError('conflicting filter options') # Define physical characteristics of STIS CCD @@ -200,49 +199,49 @@ def stisnoise(infile, exten=1, outfile=None, dc=1, verbose=1, pps = pst/sst # number of serial shift intervals in parallel interval # Retrieve exposure information from header - fin = pyfits.open(infile) + fin = fits.open(infile) extname = fin[exten].header['EXTNAME'] inimage = fin[exten].data - himage = fin[0].data + himage = fin[0].data - amp = fin[0].header['CCDAMP'] + amp = fin[0].header['CCDAMP'] if verbose == 1: - print('Target: %s, Amp: %s, Gain: %d' % \ - (fin[0].header['TARGNAME'], amp, fin[0].header['CCDGAIN'])) + print('Target: {}, Amp: {}, Gain: {}'.format( + fin[0].header['TARGNAME'], amp, fin[0].header['CCDGAIN'])) # Check to ensure the SCI extension is being used if extname != 'SCI': raise RuntimeError( - 'You should only run this on a SCI extension, not %s.'%extname) + 'You should only run this on a SCI extension, not %s.' % extname) nr, nc = inimage.shape - if (nr, nc) == (nr0, nc0): + if (nr, nc) == (nr0, nc0): image_type = 'raw' elif (nr, nc) == (fltxy, fltxy): image_type = 'flt' else: raise RuntimeError('This program should be run on 1062x1044 ' - 'or 1024x1024 data only.') + 'or 1024x1024 data only.') # Pad data with fake "OVERSCAN" if data have been overscan trimmed if image_type == 'flt': temp = numpy.zeros((fltxy, nc0), numpy.float32) for row in range(fltxy): - temp[row,:] = _median(inimage[row,:]) - temp[:,nos:nc0-nos] = inimage + temp[row, :] = _median(inimage[row, :]) + temp[:, nos:nc0-nos] = inimage nc = nc0 else: temp = inimage # Translate frame so that it is in readout order - if amp == 'A': + if amp == 'A': image = temp # amp A data -> leave as is elif amp == 'B': - image = temp[::-1,:] # amp B data -> flip left<->right + image = temp[::-1, :] # amp B data -> flip left<->right elif amp == 'C': - image = temp[:,::-1] # amp C data -> flip top<->bottom + image = temp[:, ::-1] # amp C data -> flip top<->bottom elif amp == 'D': - image = temp[::-1,::-1] # amp D data -> rotate by 180 degrees + image = temp[::-1, ::-1] # amp D data -> rotate by 180 degrees else: raise RuntimeError('No amplifier given in header.') @@ -253,26 +252,26 @@ def stisnoise(infile, exten=1, outfile=None, dc=1, verbose=1, for i in range(nr): k = int(i*nx) # (note that non-integer nx prevents phase wandering) - time_series[k:k+nc] = image[i,:] + time_series[k:k+nc] = image[i, :] # pad dead-time - medval = _median(image[i,:]) + medval = _median(image[i, :]) time_series[k+nc:int(k+nc+pps)] = ds + medval if int((i+1)*nx) != int(k+nc+pps): time_series[int((i+1)*nx)-1] = medval # Begin filtering options *************** - #if median != None: + # if median is not None: # time_series = medianfilter(time_series, median) if boxcar > 0: boxcar_filter = signal.boxcar(boxcar) / boxcar time_series = ndimage.convolve(time_series, boxcar_filter) - elif wipe != None: + elif wipe is not None: time_series = wipefilter(time_series, image_type, sst, wipe[0], wipe[1], wipe[2]) - elif window != None: + elif window is not None: time_series = windowfilter(time_series, image_type, sst, window[0], window[1], window[2]) @@ -281,19 +280,19 @@ def stisnoise(infile, exten=1, outfile=None, dc=1, verbose=1, # Recreate 2-D image from time series outimage = numpy.zeros((nr, nc), numpy.float32) for i in range(nr): - outimage[i,:] = time_series[int(i*nx):int(i*nx+nc)] + outimage[i, :] = time_series[int(i*nx):int(i*nx+nc)] if image_type == 'flt': - outimage = outimage[:,nos:(nc0-nos)] + outimage = outimage[:, nos:(nc0-nos)] # Restore original image orientation - if amp == 'A': + if amp == 'A': pass # amp A data -> leave as is elif amp == 'B': - outimage = outimage[::-1,:] # amp B data -> flip left<->right + outimage = outimage[::-1, :] # amp B data -> flip left<->right elif amp == 'C': - outimage = outimage[:,::-1] # amp C data -> flip top<->bottom + outimage = outimage[:, ::-1] # amp C data -> flip top<->bottom elif amp == 'D': - outimage = outimage[::-1,::-1] # amp D data -> rotate by 180 degrees + outimage = outimage[::-1, ::-1] # amp D data -> rotate by 180 degrees # Trim vector to power of 2 for FFT # (this is the fastest fft calculation but it doesn't preserve all @@ -312,9 +311,9 @@ def stisnoise(infile, exten=1, outfile=None, dc=1, verbose=1, if outfile: # write primary header then append ext - fout = pyfits.HDUList() - fout.append(pyfits.PrimaryHDU(header=fin[0].header)) - fout.append(pyfits.ImageHDU(header=fin[1].header, data=outimage)) + fout = fits.HDUList() + fout.append(fits.PrimaryHDU(header=fin[0].header)) + fout.append(fits.ImageHDU(header=fin[1].header, data=outimage)) fout.writeto(outfile) - return (freq, magnitude) + return freq, magnitude diff --git a/lib/stistools/wavecal.py b/stistools/wavecal.py similarity index 93% rename from lib/stistools/wavecal.py rename to stistools/wavecal.py index 8a6cd01..cd89688 100755 --- a/lib/stistools/wavecal.py +++ b/stistools/wavecal.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown import os import sys import getopt @@ -8,9 +7,9 @@ import subprocess import numpy.random as rn # used by mkRandomName -from astropy.io import fits as pyfits +from astropy.io import fits -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Perform wavelength calibration of STIS data. @@ -51,6 +50,7 @@ # MJD after which the external shutter was closed for CCD HITM wavecals. SH_CLOSED = 51126.0 + def main(args): if len(args) < 2: @@ -102,6 +102,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -115,6 +116,7 @@ def prtOptions(): print("Following the options, list the input flt file names and") print(" the associated raw (or calibrated) wavecal file names.") + def wavecal(input, wavecal, debugfile="", savetmp=False, option="linear", angle=None, verbose=False, timestamps=False, @@ -219,7 +221,7 @@ def wavecal(input, wavecal, debugfile="", savetmp=False, files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 wavecal_files = [] @@ -243,7 +245,7 @@ def wavecal(input, wavecal, debugfile="", savetmp=False, n_infiles = len(infiles) if wavecal_files and len(wavecal_files) != n_infiles: same_length = False - print("You specified %d input files but %d wavecal files." % + print("You specified {} input files but {} wavecal files.".format (n_infiles, len(wavecal_files))) print("The number of input and wavecal files must be the same.") if dbgfiles and len(dbgfiles) != n_infiles: @@ -254,7 +256,7 @@ def wavecal(input, wavecal, debugfile="", savetmp=False, if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -290,18 +292,19 @@ def wavecal(input, wavecal, debugfile="", savetmp=False, if not savetmp: for tmp_file in tempfnames: if verbose: - print(" ... deleting temporary file %s" % tmp_file) + print(" ... deleting temporary file {}".format(tmp_file)) try: os.remove(tmp_file) except OSError: - print("Warning: couldn't delete temporary file %s." % - tmp_file) + print("Warning: couldn't delete temporary file {}.".format + (tmp_file)) if f_trailer is not None: f_trailer.close() return 0 + def mkRandomNameW(prefix="wavecal_", suffix="_tmp.fits", n=100000000): MAX_TRIES = 100 done = False @@ -320,12 +323,13 @@ def mkRandomNameW(prefix="wavecal_", suffix="_tmp.fits", n=100000000): else: return None + def runBasic2d(wavecal, tempfnames, verbose, timestamps, fd_trailer): flag = False # initial value # First check whether the wavecal file is already calibrated. - fd = pyfits.open(wavecal) + fd = fits.open(wavecal) dqicorr = fd[0].header.get("dqicorr", default="missing") blevcorr = fd[0].header.get("blevcorr", default="missing") darkcorr = fd[0].header.get("darkcorr", default="missing") @@ -368,22 +372,23 @@ def runBasic2d(wavecal, tempfnames, verbose, timestamps, fd_trailer): arglist.append("-flat") if verbose: - print("Running cs1.e on %s" % wavecal) - print(" %s" % str(arglist)) + print("Running cs1.e on {}".format(wavecal)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: - raise RuntimeError("status = %d from cs1.e" % status) + raise RuntimeError("status = {} from cs1.e".format(status)) tempfnames.append(fwv_file) flag = True - return (flag, fwv_file) + return flag, fwv_file + def runCs11(fwv_file, infile, tempfnames, verbose, timestamps, fd_trailer): """Subtract a fraction of the science image from the wavecal image.""" # Check whether we need to run cs11.e. - fd = pyfits.open(fwv_file) + fd = fits.open(fwv_file) sclamp = fd[0].header.get("sclamp", default="missing") detector = fd[0].header.get("detector", default="missing") texpstrt = fd[0].header.get("texpstrt", default="missing") @@ -409,25 +414,26 @@ def runCs11(fwv_file, infile, tempfnames, verbose, timestamps, fd_trailer): if verbose: print("Running cs11.e") - print(" %s" % str(arglist)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: - raise RuntimeError("status = %d from cs11.e" % status) + raise RuntimeError("status = {} from cs11.e".format(status)) tempfnames.append(cwv_file) flag = True else: cwv_file = fwv_file flag = False - return (flag, cwv_file) + return flag, cwv_file + def runX2d(cwv_file, angle, tempfnames, verbose, timestamps, fd_trailer): flag = False # initial value - fd = pyfits.open(cwv_file) + fd = fits.open(cwv_file) opt_elem = fd[0].header.get("opt_elem", default="missing") x2dcorr = fd[0].header.get("x2dcorr", default="missing") fd.close() @@ -466,8 +472,8 @@ def runX2d(cwv_file, angle, tempfnames, arglist.append("%.20g" % angle) if verbose: - print("Running x2d on %s" % cwv_file) - print(" %s" % str(arglist)) + print("Running x2d on {}".format(cwv_file)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: @@ -475,7 +481,8 @@ def runX2d(cwv_file, angle, tempfnames, tempfnames.append(w2d_file) flag = True - return (flag, w2d_file) + return flag, w2d_file + def runWavecal(w2d_file, dbg, angle, verbose, timestamps, fd_trailer): @@ -493,13 +500,14 @@ def runWavecal(w2d_file, dbg, angle, verbose, timestamps, fd_trailer): arglist.append(dbg) if verbose: - print("Running cs4.e on %s" % w2d_file) - print(" %s" % str(arglist)) + print("Running cs4.e on {}".format(w2d_file)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: raise RuntimeError("status = %d from cs4.e" % status) + def runCs12(w2d_file, infile, option, verbose, timestamps, fd_trailer): arglist = ["cs12.e"] @@ -514,7 +522,7 @@ def runCs12(w2d_file, infile, option, verbose, timestamps, fd_trailer): if verbose: print("Running cs12.e") - print(" %s" % str(arglist)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: @@ -524,10 +532,12 @@ def runCs12(w2d_file, infile, option, verbose, timestamps, fd_trailer): # Interfaces used by TEAL # #-------------------------# + def getHelpAsString(fulldoc=True): """Return documentation on the wavecal function.""" return wavecal.__doc__ + def run(configobj=None): """TEAL interface for the wavecal function.""" wavecal(input=configobj["input"], diff --git a/lib/stistools/wavelen.py b/stistools/wavelen.py similarity index 55% rename from lib/stistools/wavelen.py rename to stistools/wavelen.py index b9e9b06..0c40af0 100644 --- a/lib/stistools/wavelen.py +++ b/stistools/wavelen.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import, division, print_function # confidence high -import os import numpy as N @@ -11,16 +9,17 @@ DEG_RAD = N.pi / 180. # degrees to radians SPEED_OF_LIGHT = 299792.458 # km / s -def compute_wavelengths (shape, phdr, hdr, helcorr): + +def compute_wavelengths(shape, phdr, hdr, helcorr): """Compute a 2-D array of wavelengths, one value for each image pixel. Parameters ---------- shape : tuple of two ints the number of rows and columns in the output image - phdr : pyfits Header object + phdr : fits Header object primary header - hdr : pyfits Header object + hdr : fits Header object extension header helcorr : string "PERFORM" if heliocentric correction should be done @@ -38,34 +37,34 @@ def compute_wavelengths (shape, phdr, hdr, helcorr): (nrows, ncols) = shape - opt_elem = phdr.get ("opt_elem", default="keyword_missing") - cenwave = phdr.get ("cenwave", default=0) - aperture = phdr.get ("aperture", default="keyword_missing") - propaper = phdr.get ("propaper", default="keyword_missing") - sclamp = phdr.get ("sclamp", default="NONE") - disptab = phdr.get ("disptab", default="keyword_missing") - apdestab = phdr.get ("apdestab", default="keyword_missing") - inangtab = phdr.get ("inangtab", default="keyword_missing") - ra_targ = phdr.get ("ra_targ", default="keyword_missing") - dec_targ = phdr.get ("dec_targ", default="keyword_missing") - - expstart = hdr.get ("expstart", default=0.) - expend = hdr.get ("expend", default=0.) - crpix2 = hdr.get ("crpix2", default=0.) - ltm = hdr.get ("ltm1_1", default=1.) - ltv1 = hdr.get ("ltv1", default=0.) - ltv2 = hdr.get ("ltv2", default=0.) - shifta1 = hdr.get ("shifta1", default=0.) - shifta2 = hdr.get ("shifta2", default=0.) - - disptab = r_util.expandFileName (disptab) - apdestab = r_util.expandFileName (apdestab) - inangtab = r_util.expandFileName (inangtab) + opt_elem = phdr.get("opt_elem", default="keyword_missing") + cenwave = phdr.get("cenwave", default=0) + aperture = phdr.get("aperture", default="keyword_missing") + propaper = phdr.get("propaper", default="keyword_missing") + sclamp = phdr.get("sclamp", default="NONE") + disptab = phdr.get("disptab", default="keyword_missing") + apdestab = phdr.get("apdestab", default="keyword_missing") + inangtab = phdr.get("inangtab", default="keyword_missing") + ra_targ = phdr.get("ra_targ", default="keyword_missing") + dec_targ = phdr.get("dec_targ", default="keyword_missing") + + expstart = hdr.get("expstart", default=0.) + expend = hdr.get("expend", default=0.) + crpix2 = hdr.get("crpix2", default=0.) + ltm = hdr.get("ltm1_1", default=1.) + ltv1 = hdr.get("ltv1", default=0.) + ltv2 = hdr.get("ltv2", default=0.) + shifta1 = hdr.get("shifta1", default=0.) + shifta2 = hdr.get("shifta2", default=0.) + + disptab = r_util.expandFileName(disptab) + apdestab = r_util.expandFileName(apdestab) + inangtab = r_util.expandFileName(inangtab) # Modify ltv and crpix2 for zero-indexed pixels. ltv1 += (ltm - 1.) crpix2 -= 1. - binaxis1 = round (1. / ltm) # should be 1, 2, 4 or 8 + binaxis1 = round(1. / ltm) # should be 1, 2, 4 or 8 # These offsets have not been converted from one-indexing to # zero-indexing, but that's OK because the input image must not @@ -73,14 +72,14 @@ def compute_wavelengths (shape, phdr, hdr, helcorr): offset = shifta2 - ltv2 if sclamp != "NONE": - nchar = len (propaper) + nchar = len(propaper) ending = propaper[nchar-2:nchar] if ending in PSEUDOAPERTURES: aperture = aperture + ending if helcorr == "PERFORM": - v_helio = radialvel.radialVel (ra_targ, dec_targ, - (expstart + expend) / 2.) + v_helio = radialvel.radialVel(ra_targ, dec_targ, + (expstart + expend) / 2.) hfactor = (1. - v_helio / SPEED_OF_LIGHT) else: hfactor = 1. @@ -89,53 +88,54 @@ def compute_wavelengths (shape, phdr, hdr, helcorr): # a set of coefficients at several positions along the slit, i.e. # disp_coeff[i] is the set of coefficients at position a2center[i]. filter = {"opt_elem": opt_elem, "cenwave": cenwave} - disp_info = gettable.getTable (disptab, filter, - sortcol="a2center", at_least_one=True) - ref_aper = disp_info.field ("ref_aper")[0] # name of reference aperture - a2center = disp_info.field ("a2center") - 1. # zero indexing - ncoeff = disp_info.field ("ncoeff")[0] # same for all rows - disp_coeff = disp_info.field ("coeff") - delta_offset1 = get_delta_offset1 (apdestab, aperture, ref_aper) - - apdes_info = gettable.getTable (apdestab, {"aperture": aperture}, - exactly_one=True) + disp_info = gettable.getTable(disptab, filter, + sortcol="a2center", at_least_one=True) + ref_aper = disp_info.field("ref_aper")[0] # name of reference aperture + a2center = disp_info.field("a2center") - 1. # zero indexing + ncoeff = disp_info.field("ncoeff")[0] # same for all rows + disp_coeff = disp_info.field("coeff") + delta_offset1 = get_delta_offset1(apdestab, aperture, ref_aper) + + apdes_info = gettable.getTable(apdestab, {"aperture": aperture}, + exactly_one=True) # Check whether ANGLE is a column in this table. names = [] for name in apdes_info.names: - names.append (name.lower()) + names.append(name.lower()) if "angle" in names: - angle = apdes_info.field ("angle")[0] + angle = apdes_info.field("angle")[0] else: print("Warning: Column ANGLE not found in", apdestab) angle = REF_ANGLE del names - delta_tan = N.tan (angle * DEG_RAD) - N.tan (REF_ANGLE * DEG_RAD); + delta_tan = N.tan(angle * DEG_RAD) - N.tan(REF_ANGLE * DEG_RAD) # Note: this assumes a first-order spectrum, but at the time of # writing there's actually no distinction in any of the iac tables. filter = {"opt_elem": opt_elem, "cenwave": cenwave, "sporder": 1} - inang_info = gettable.getTable (inangtab, filter, exactly_one=True) + inang_info = gettable.getTable(inangtab, filter, exactly_one=True) - wavelengths = N.zeros ((nrows, ncols), dtype=N.float64) - image_pixels = N.arange (ncols, dtype=N.float64) + wavelengths = N.zeros((nrows, ncols), dtype=N.float64) + image_pixels = N.arange(ncols, dtype=N.float64) # Convert from image pixels to reference pixels (but zero indexed). pixels = (image_pixels - ltv1) * binaxis1 - for j in range (nrows): - row = float (j) + offset # account for possible subarray + for j in range(nrows): + row = float(j) + offset # account for possible subarray # Interpolate to get dispersion relation for current (0-indexed) row. - coeff = r_util.interpolate (a2center, disp_coeff, row) + coeff = r_util.interpolate(a2center, disp_coeff, row) # Apply corrections. - adjust_disp (ncoeff, coeff, delta_offset1, shifta1, inang_info, - delta_tan, row-crpix2, binaxis1) + adjust_disp(ncoeff, coeff, delta_offset1, shifta1, inang_info, + delta_tan, row-crpix2, binaxis1) # Compute wavelength from pixel numbers. - wl = evaldisp.newton (pixels, coeff, cenwave) + wl = evaldisp.newton(pixels, coeff, cenwave) wl *= hfactor wavelengths[j] = wl.copy() return wavelengths -def get_delta_offset1 (apdestab, aperture, ref_aper): + +def get_delta_offset1(apdestab, aperture, ref_aper): """Get the incidence angle offset. Parameters @@ -156,20 +156,21 @@ def get_delta_offset1 (apdestab, aperture, ref_aper): """ # Get the offset for the aperture that was used for the observation. - apdes_info = gettable.getTable (apdestab, {"aperture": aperture}, - exactly_one=True) - aperture_offset1 = apdes_info.field ("offset1")[0] + apdes_info = gettable.getTable(apdestab, {"aperture": aperture}, + exactly_one=True) + aperture_offset1 = apdes_info.field("offset1")[0] # Get the offset for the aperture that was used for creating the # dispersion relation. - apdes_info = gettable.getTable (apdestab, {"aperture": ref_aper}, - exactly_one=True) - ref_aper_offset1 = apdes_info.field ("offset1")[0] + apdes_info = gettable.getTable(apdestab, {"aperture": ref_aper}, + exactly_one=True) + ref_aper_offset1 = apdes_info.field("offset1")[0] return aperture_offset1 - ref_aper_offset1 -def adjust_disp (ncoeff, coeff, delta_offset1, shifta1, inang_info, - delta_tan, delta_row, binaxis1): + +def adjust_disp(ncoeff, coeff, delta_offset1, shifta1, inang_info, + delta_tan, delta_row, binaxis1): """Adjust the dispersion coefficients. The changes to the coefficients are for the incidence angle @@ -197,12 +198,12 @@ def adjust_disp (ncoeff, coeff, delta_offset1, shifta1, inang_info, """ - iac_ncoeff1 = inang_info.field ("ncoeff1")[0] - iac_coeff1 = inang_info.field ("coeff1")[0] - iac_ncoeff2 = inang_info.field ("ncoeff2")[0] - iac_coeff2 = inang_info.field ("coeff2")[0] + iac_ncoeff1 = inang_info.field("ncoeff1")[0] + iac_coeff1 = inang_info.field("coeff1")[0] + iac_ncoeff2 = inang_info.field("ncoeff2")[0] + iac_coeff2 = inang_info.field("coeff2")[0] - for i in range (iac_ncoeff1): + for i in range(iac_ncoeff1): coeff[i] += iac_coeff1[i] * delta_offset1 if iac_ncoeff2 > 0: diff --git a/lib/stistools/wx2d.py b/stistools/wx2d.py similarity index 65% rename from lib/stistools/wx2d.py rename to stistools/wx2d.py index 05eb8a8..74f5654 100755 --- a/lib/stistools/wx2d.py +++ b/stistools/wx2d.py @@ -1,6 +1,5 @@ #! /usr/bin/env python -from __future__ import absolute_import, division, print_function # confidence high import sys import os import os.path @@ -9,17 +8,18 @@ import numpy as N from scipy import signal as convolve -from astropy.io import fits as pyfits +from astropy.io import fits from . import gettable from . import wavelen from . import r_util __version__ = "1.3 (2016 Feb 24)" -def wx2d (input, output, wavelengths=None, helcorr="", - algorithm="wavelet", - trace=None, order=7, subdiv=8, psf_width=0., rows=None, - subsampled=None, convolved=None): + +def wx2d(input, output, wavelengths=None, helcorr="", + algorithm="wavelet", + trace=None, order=7, subdiv=8, psf_width=0., rows=None, + subsampled=None, convolved=None): """Resample the input, correcting for geometric distortion. Parameters @@ -62,69 +62,69 @@ def wx2d (input, output, wavelengths=None, helcorr="", if algorithm != "wavelet" \ and (subsampled is not None or convolved is not None): raise ValueError( - "cannot specify 'subsampled' or 'convolved' if algorithm='kd'") + "cannot specify 'subsampled' or 'convolved' if algorithm='kd'") helcorr = helcorr.upper() - ft = pyfits.open (input) - nextend = ft[0].header.get ("nextend", default=3) + ft = fits.open(input) + nextend = ft[0].header.get("nextend", default=3) n_imsets = nextend // 3 # number of image sets - if ft[1].header.get ("ltm2_2", default=1.0) != 1.0: + if ft[1].header.get("ltm2_2", default=1.0) != 1.0: raise RuntimeError("can't handle data binned in the Y direction") phdu = ft[0] # primary header/data unit # If trace was not specified, get the file name and selection keywords # from the header. - tracefile = trace_name (trace, phdu.header) + tracefile = trace_name(trace, phdu.header) # Update the primary header, in preparation for writing to a new file. - phdu.header.update ("WX2DCORR", "COMPLETE", - comment="this file is output from wx2d", - before="X2DCORR") - is_an_array = isinstance (tracefile, N.ndarray) + phdu.header.set("WX2DCORR", "COMPLETE", "this file is output from wx2d", + before="X2DCORR") + is_an_array = isinstance(tracefile, N.ndarray) if is_an_array: - phdu.header.add_history ("trace array was specified explicitly") + phdu.header.add_history("trace array was specified explicitly") else: - phdu.header.add_history ("trace file " + tracefile) - phdu.header.add_history ("order = " + repr (order)) - phdu.header.add_history ("subdiv = " + repr (subdiv)) - phdu.header.add_history ("psf_width = " + repr (psf_width)) + phdu.header.add_history("trace file " + tracefile) + phdu.header.add_history("order = " + repr(order)) + phdu.header.add_history("subdiv = " + repr(subdiv)) + phdu.header.add_history("psf_width = " + repr(psf_width)) # Write the primary header to the output file(s); # we'll append each extension in wx2d_imset. - phdu.header.update ("nextend", 0) # no extensions yet - phdu.header.update ("filename", os.path.basename (output)) - phdu.writeto (output) + phdu.header.set("nextend", 0) # no extensions yet + phdu.header.set("filename", os.path.basename(output)) + phdu.writeto(output) if wavelengths is not None: - phdu.header.update ("filename", os.path.basename (wavelengths)) - phdu.writeto (wavelengths) + phdu.header.set("filename", os.path.basename(wavelengths)) + phdu.writeto(wavelengths) if subsampled is not None: - phdu.header.update ("filename", os.path.basename (subsampled)) - phdu.writeto (subsampled) + phdu.header.set("filename", os.path.basename(subsampled)) + phdu.writeto(subsampled) if convolved is not None: - phdu.header.update ("filename", os.path.basename (convolved)) - phdu.writeto (convolved) + phdu.header.set("filename", os.path.basename(convolved)) + phdu.writeto(convolved) - for imset0 in range (n_imsets): - wx2d_imset (ft, imset0+1, output, wavelengths, helcorr, - algorithm, - tracefile, order, subdiv, psf_width, rows, - subsampled, convolved) + for imset0 in range(n_imsets): + wx2d_imset(ft, imset0+1, output, wavelengths, helcorr, + algorithm, + tracefile, order, subdiv, psf_width, rows, + subsampled, convolved) ft.close() -def wx2d_imset (ft, imset, output, wavelengths, helcorr, - algorithm, - tracefile, order, subdiv, psf_width, rows, - subsampled, convolved): + +def wx2d_imset(ft, imset, output, wavelengths, helcorr, + algorithm, + tracefile, order, subdiv, psf_width, rows, + subsampled, convolved): """Resample one image set, and append to output file(s). Parameters ---------- ft : HDUList - pyfits HDUList object for the input file + Fits HDUList object for the input file. imset : int one-indexed image set number @@ -153,8 +153,8 @@ def wx2d_imset (ft, imset, output, wavelengths, helcorr, """ - hdu = ft[("SCI",imset)] - errhdu = ft[("ERR",imset)] + hdu = ft[("SCI", imset)] + errhdu = ft[("ERR", imset)] header = hdu.header nrows, ncols = hdu.data.shape original_nrows = nrows @@ -162,10 +162,10 @@ def wx2d_imset (ft, imset, output, wavelengths, helcorr, if rows is None: rows = (0, nrows) else: - rows = (max (rows[0], 0), min (rows[1], nrows)) + rows = (max(rows[0], 0), min(rows[1], nrows)) nrows = rows[1] - rows[0] if original_nrows > nrows and imset == 1: - ft[0].header.add_history ("rows from %d to %d" % (rows[0]+1, rows[1])) + ft[0].header.add_history("rows from %d to %d" % (rows[0]+1, rows[1])) # extract the subset if original_nrows > nrows: @@ -176,81 +176,82 @@ def wx2d_imset (ft, imset, output, wavelengths, helcorr, errimg = errhdu.data # offset of a row from nominal - shifta2 = header.get ("shifta2", default=0.) + shifta2 = header.get("shifta2", default=0.) # offset to be applied to the trace array - offset = rows[0] - header.get ("ltv2", default=0.) + shifta2 + offset = rows[0] - header.get("ltv2", default=0.) + shifta2 # Read the array of spectral traces (a2displ), and bin if the input # data are binned in the dispersion direction. - (a2center, a2displ) = get_trace (tracefile, ft[0].header, header) + (a2center, a2displ) = get_trace(tracefile, ft[0].header, header) if algorithm == "wavelet": - (hdu.data, errhdu.data) = wavelet_resampling (hdu, img, errimg, - original_nrows, nrows, ncols, rows, - a2center, a2displ, offset, shifta2, - imset, order, subdiv, psf_width, - subsampled, convolved) + (hdu.data, errhdu.data) = wavelet_resampling(hdu, img, errimg, + original_nrows, nrows, ncols, rows, + a2center, a2displ, offset, shifta2, + imset, order, subdiv, psf_width, + subsampled, convolved) else: - (hdu.data, errhdu.data) = kd_resampling (img, errimg, - original_nrows, nrows, ncols, rows, - a2center, a2displ, offset, shifta2) + (hdu.data, errhdu.data) = kd_resampling(img, errimg, + original_nrows, nrows, ncols, rows, + a2center, a2displ, offset, shifta2) del img # Write the SCI and ERR HDUs to the output file. - ofd = pyfits.open (output, mode="update") - ofd.append (hdu) - ofd.append (errhdu) + ofd = fits.open(output, mode="update") + ofd.append(hdu) + ofd.append(errhdu) ofd.close() if wavelengths is not None: # Write an array of wavelengths. - wl_hdu = pyfits.ImageHDU (header=hdu.header) + wl_hdu = fits.ImageHDU(header=hdu.header) if not helcorr: - helcorr = ft[0].header.get ("helcorr", default="OMIT").upper() - if ft[0].header.get ("sclamp", default="NONE") != "NONE": + helcorr = ft[0].header.get("helcorr", default="OMIT").upper() + if ft[0].header.get("sclamp", default="NONE") != "NONE": helcorr = "OMIT" - wl_hdu.data = wavelen.compute_wavelengths ((original_nrows, ncols), + wl_hdu.data = wavelen.compute_wavelengths((original_nrows, ncols), ft[0].header, header, helcorr) - ofd = pyfits.open (wavelengths, mode="update") - ofd[0].header.update ("nextend", imset) + ofd = fits.open(wavelengths, mode="update") + ofd[0].header.set("nextend", imset) if helcorr == "PERFORM": - ofd[0].header.update ("helcorr", "COMPLETE") + ofd[0].header.set("helcorr", "COMPLETE") else: - ofd[0].header.update ("helcorr", "OMIT") - ofd.append (wl_hdu) + ofd[0].header.set("helcorr", "OMIT") + ofd.append(wl_hdu) ofd.close() # Create data quality array. - hdu = ft[("DQ",imset)] + hdu = ft[("DQ", imset)] im3 = hdu.data[rows[0]:rows[1]] nrows, ncols = im3.shape - image3 = N.zeros ((subdiv*nrows,ncols), dtype=im3.dtype) + image3 = N.zeros((subdiv*nrows, ncols), dtype=im3.dtype) for j in range(subdiv): - image3[j::subdiv,:] = im3 + image3[j::subdiv, :] = im3 del im3 - hdu.data[:,:] = 4 # "bad detector pixel or beyond aperture" + hdu.data[:, :] = 4 # "bad detector pixel or beyond aperture" hdu.data[rows[0]:rows[1]] = \ - apply_trace (image3, a2center, a2displ, subdiv, offset, shifta2, "DQ") + apply_trace(image3, a2center, a2displ, subdiv, offset, shifta2, "DQ") del image3 # Write the DQ HDU to the output file. - ofd = pyfits.open (output, mode="update") - ofd[0].header.update ("nextend", imset*3) - ofd.append (hdu) + ofd = fits.open(output, mode="update") + ofd[0].header.set("nextend", imset*3) + ofd.append(hdu) ofd.close() -def wavelet_resampling (hdu, img, errimg, - original_nrows, nrows, ncols, rows, - a2center, a2displ, offset, shifta2, - imset, order, subdiv, psf_width, - subsampled, convolved): + +def wavelet_resampling(hdu, img, errimg, + original_nrows, nrows, ncols, rows, + a2center, a2displ, offset, shifta2, + imset, order, subdiv, psf_width, + subsampled, convolved): """Resample img and errimg using wavelets. Parameters ---------- - hdu : pyfits header/data unit object + hdu : fits header/data unit object header/data unit for a SCI extension img : ndarray SCI image array (could be a subset of full image) @@ -313,21 +314,21 @@ def wavelet_resampling (hdu, img, errimg, # readout noise divided by the square root of the number of images # that have been combined, but for MAMA data it could be 0. # minerr is this minimum value, but set a lower limit of 1. - minerr = max (1., errimg.min()) + minerr = max(1., errimg.min()) step = subdiv while step > 1: - tmp = N.zeros ((nrows*2*subdiv//step, ncols), dtype=N.float32) + tmp = N.zeros((nrows*2*subdiv//step, ncols), dtype=N.float32) err = tmp.copy() tmp[::2] = sub5[::step] tmp[1::2] = inv_avg_interp(order, tmp[::2]) tmp = inv_haar(tmp) # for computing the error, use minerr as the minimum flux - err_tmp0 = N.maximum (tmp[0::2], minerr) - err_tmp1 = N.maximum (tmp[1::2], minerr) - err[0::2] = N.sqrt (2.*err_tmp0 / (err_tmp0 + err_tmp1)) * err5[::step] - err[1::2] = N.sqrt (2.*err_tmp1 / (err_tmp0 + err_tmp1)) * err5[::step] + err_tmp0 = N.maximum(tmp[0::2], minerr) + err_tmp1 = N.maximum(tmp[1::2], minerr) + err[0::2] = N.sqrt(2.*err_tmp0 / (err_tmp0 + err_tmp1)) * err5[::step] + err[1::2] = N.sqrt(2.*err_tmp1 / (err_tmp0 + err_tmp1)) * err5[::step] step //= 2 sub5[::step] = tmp @@ -340,47 +341,48 @@ def wavelet_resampling (hdu, img, errimg, if subsampled is not None: hdu.data = sub5.copy() - ofd = pyfits.open (subsampled, mode="update") - ofd[0].header.update ("nextend", imset) - ofd.append (hdu) + ofd = fits.open(subsampled, mode="update") + ofd[0].header.set("nextend", imset) + ofd.append(hdu) ofd.close() # Optional PSF convolution if psf_width > 0.: cnv = N.zeros(sub5.shape, dtype=N.float32) - krn = N.array([stis_psf(float (j), psf_width*subdiv) \ + krn = N.array([stis_psf(float(j), psf_width*subdiv) for j in range(-32, 32)]) - krn = krn/N.sum(krn) + krn = krn / N.sum(krn) for j in range(sub5.shape[1]): - cnv[:,j] = convolve.convolve (sub5[:,j], krn, mode='same') # same size + cnv[:, j] = convolve.convolve(sub5[:, j], krn, mode='same') # same size if convolved is not None: hdu.data = cnv.copy() - ofd = pyfits.open (convolved, mode="update") - ofd[0].header.update ("nextend", imset) - ofd.append (hdu) + ofd = fits.open(convolved, mode="update") + ofd[0].header.set("nextend", imset) + ofd.append(hdu) ofd.close() else: cnv = sub5 if original_nrows > nrows: - result = N.zeros ((original_nrows, ncols), dtype=N.float32) - err_result = N.zeros ((original_nrows, ncols), dtype=N.float32) - result[rows[0]:rows[1]] = apply_trace (cnv, a2center, a2displ, - subdiv, offset, shifta2, "SCI") - err_result[rows[0]:rows[1]] = apply_trace (err5, a2center, a2displ, - subdiv, offset, shifta2, "ERR") + result = N.zeros((original_nrows, ncols), dtype=N.float32) + err_result = N.zeros((original_nrows, ncols), dtype=N.float32) + result[rows[0]:rows[1]] = apply_trace(cnv, a2center, a2displ, + subdiv, offset, shifta2, "SCI") + err_result[rows[0]:rows[1]] = apply_trace(err5, a2center, a2displ, + subdiv, offset, shifta2, "ERR") else: - result = apply_trace (cnv, a2center, a2displ, - subdiv, offset, shifta2, "SCI") - err_result = apply_trace (err5, a2center, a2displ, - subdiv, offset, shifta2, "ERR") + result = apply_trace(cnv, a2center, a2displ, + subdiv, offset, shifta2, "SCI") + err_result = apply_trace(err5, a2center, a2displ, + subdiv, offset, shifta2, "ERR") - return (result, err_result) + return result, err_result -def kd_resampling (img, errimg, - original_nrows, nrows, ncols, rows, - a2center, a2displ, offset, shifta2): + +def kd_resampling(img, errimg, + original_nrows, nrows, ncols, rows, + a2center, a2displ, offset, shifta2): """Apply Kris Davidson's resampling method. Parameters @@ -417,25 +419,26 @@ def kd_resampling (img, errimg, # image2 is for the error image subdiv = 8 - image2 = N.zeros ((subdiv*nrows,ncols), dtype=N.float32) - for j in range (subdiv): - image2[j::subdiv,:] = errimg + image2 = N.zeros((subdiv*nrows, ncols), dtype=N.float32) + for j in range(subdiv): + image2[j::subdiv, :] = errimg if original_nrows > nrows: - result = N.zeros ((original_nrows, ncols), dtype=N.float32) - err_result = N.zeros ((original_nrows, ncols), dtype=N.float32) - result[rows[0]:rows[1]] = kd_apply_trace (img, a2center, a2displ, - offset, shifta2) - err_result[rows[0]:rows[1]] = apply_trace (image2, a2center, a2displ, - subdiv, offset, shifta2, "ERR") + result = N.zeros((original_nrows, ncols), dtype=N.float32) + err_result = N.zeros((original_nrows, ncols), dtype=N.float32) + result[rows[0]:rows[1]] = kd_apply_trace(img, a2center, a2displ, + offset, shifta2) + err_result[rows[0]:rows[1]] = apply_trace(image2, a2center, a2displ, + subdiv, offset, shifta2, "ERR") else: - result = kd_apply_trace (img, a2center, a2displ, offset, shifta2) - err_result = apply_trace (image2, a2center, a2displ, - subdiv, offset, shifta2, "ERR") + result = kd_apply_trace(img, a2center, a2displ, offset, shifta2) + err_result = apply_trace(image2, a2center, a2displ, + subdiv, offset, shifta2, "ERR") + + return result, err_result - return (result, err_result) -def kd_apply_trace (image, a2center, a2displ, offset=0., shifta2=0.): +def kd_apply_trace(image, a2center, a2displ, offset=0., shifta2=0.): """Kris Davidson's resampling algorithm, following the trace. Parameters @@ -461,53 +464,54 @@ def kd_apply_trace (image, a2center, a2displ, offset=0., shifta2=0.): """ shape = image.shape - x2d_shape = N.array (shape) - x2d = N.zeros (x2d_shape, dtype=N.float32) + x2d_shape = N.array(shape) + x2d = N.zeros(x2d_shape, dtype=N.float32) total = shape[0] * shape[1] - flat_im = N.ravel (image) + flat_im = N.ravel(image) - for i in range (x2d_shape[0]): + for i in range(x2d_shape[0]): # y is the location in the cross-dispersion direction. - trace = interpolate_trace (a2center, a2displ, float (i) + offset, - x2d_shape[1]) - y = float (i) + shifta2 + trace + trace = interpolate_trace(a2center, a2displ, float(i) + offset, + x2d_shape[1]) + y = float(i) + shifta2 + trace - nint_y = N.around (y) + nint_y = N.around(y) s = y - nint_y - n = nint_y.astype (N.int32) - col_range = N.arange (x2d_shape[1], dtype=N.int32) + n = nint_y.astype(N.int32) + col_range = N.arange(x2d_shape[1], dtype=N.int32) # These are indices into the flattened image. nm2 = (n-2) * x2d_shape[1] + col_range nm1 = (n-1) * x2d_shape[1] + col_range - n0 = n * x2d_shape[1] + col_range + n0 = n * x2d_shape[1] + col_range np1 = (n+1) * x2d_shape[1] + col_range np2 = (n+2) * x2d_shape[1] + col_range - nm2 = N.where (nm2 < 0, 0, nm2) - nm1 = N.where (nm1 < 0, 0, nm1) - n0 = N.where (n0 < 0, 0, n0) - np1 = N.where (np1 < 0, 0, np1) - np2 = N.where (np2 < 0, 0, np2) - - nm2 = N.where (nm2 >= total, total-1, nm2) - nm1 = N.where (nm1 >= total, total-1, nm1) - n0 = N.where (n0 >= total, total-1, n0) - np1 = N.where (np1 >= total, total-1, np1) - np2 = N.where (np2 >= total, total-1, np2) - - a = -0.050 * flat_im[nm2] + 0.165 * flat_im[nm1] + 0.77 * flat_im[n0] \ - + 0.165 * flat_im[np1] - 0.050 * flat_im[np2] - b = 0.08 * flat_im[nm2] - 0.66 * flat_im[nm1] \ - + 0.66 * flat_im[np1] - 0.08 * flat_im[np2] - c = 0.04 * flat_im[nm2] + 0.34 * flat_im[nm1] - 0.76 * flat_im[n0] \ - +0.34 * flat_im[np1] + 0.04 * flat_im[np2] + nm2 = N.where(nm2 < 0, 0, nm2) + nm1 = N.where(nm1 < 0, 0, nm1) + n0 = N.where(n0 < 0, 0, n0) + np1 = N.where(np1 < 0, 0, np1) + np2 = N.where(np2 < 0, 0, np2) + + nm2 = N.where(nm2 >= total, total-1, nm2) + nm1 = N.where(nm1 >= total, total-1, nm1) + n0 = N.where(n0 >= total, total-1, n0) + np1 = N.where(np1 >= total, total-1, np1) + np2 = N.where(np2 >= total, total-1, np2) + + a = - 0.050 * flat_im[nm2] + 0.165 * flat_im[nm1] + 0.77 * flat_im[n0] \ + + 0.165 * flat_im[np1] - 0.050 * flat_im[np2] + b = 0.08 * flat_im[nm2] - 0.66 * flat_im[nm1] \ + + 0.66 * flat_im[np1] - 0.08 * flat_im[np2] + c = 0.04 * flat_im[nm2] + 0.34 * flat_im[nm1] - 0.76 * flat_im[n0] \ + + 0.34 * flat_im[np1] + 0.04 * flat_im[np2] x2d[i] = a + b * s + c * s**2 return x2d + def stis_psf(x, a): """Evaluate the cross-dispersion PSF at x. @@ -528,8 +532,9 @@ def stis_psf(x, a): return (1. + (x/float(a))**2)**-2 -def apply_trace (image, a2center, a2displ, subdiv, - offset=0., shifta2=0., extname="SCI"): + +def apply_trace(image, a2center, a2displ, subdiv, + offset=0., shifta2=0., extname="SCI"): """Add together 'subdiv' rows of 'image', following the trace. Parameters @@ -580,30 +585,31 @@ def apply_trace (image, a2center, a2displ, subdiv, """ shape = image.shape - x2d_shape = N.array (shape) + x2d_shape = N.array(shape) x2d_shape[0] //= subdiv if extname == "DQ": - x2d = N.zeros (x2d_shape, dtype=N.int16) + x2d = N.zeros(x2d_shape, dtype=N.int16) else: - x2d = N.zeros (x2d_shape, dtype=N.float32) + x2d = N.zeros(x2d_shape, dtype=N.float32) - for i in range (x2d_shape[0]): + for i in range(x2d_shape[0]): # y is the location in the output, binned image (x2d), while # locn is the location in the input, oversampled image. - trace = interpolate_trace (a2center, a2displ, float (i) + offset, - x2d_shape[1]) - y = float (i) + shifta2 + trace + trace = interpolate_trace(a2center, a2displ, float(i) + offset, + x2d_shape[1]) + y = float(i) + shifta2 + trace locn = y * subdiv + (subdiv - 1.) / 2. if extname == "SCI": - x2d[i] = extract (image, locn, subdiv) + x2d[i] = extract(image, locn, subdiv) elif extname == "ERR": - x2d[i] = extract_err (image, locn, subdiv) + x2d[i] = extract_err(image, locn, subdiv) else: - x2d[i] = extract_i16 (image, locn, subdiv) + x2d[i] = extract_i16(image, locn, subdiv) return x2d -def extract (image, locn, subdiv): + +def extract(image, locn, subdiv): """Add together 'subdiv' rows of 'image', centered on 'locn'. Parameters @@ -630,13 +636,13 @@ def extract (image, locn, subdiv): shape = image.shape hw = subdiv // 2 - fhw = float (hw) + fhw = float(hw) # floating point locations of upper and lower edges fhigh = locn0 + fhw flow = locn0 - fhw # integer endpoints of range of whole pixels - high = N.floor (fhigh).astype (N.int32) - low = N.ceil (flow).astype (N.int32) + high = N.floor(fhigh).astype(N.int32) + low = N.ceil(flow).astype(N.int32) # fractions of pixels at upper and lower edges dhigh = fhigh - high dlow = low - flow @@ -647,13 +653,14 @@ def extract (image, locn, subdiv): s_high = high.item(j) if s_low < 1 or s_high > shape[0]-1: continue - spec[j] = N.sum(image[s_low:s_high,j]) - spec[j] += image.item(s_high,j) * dhigh.item(j) - spec[j] += image.item(s_low-1,j) * dlow.item(j) + spec[j] = N.sum(image[s_low: s_high, j]) + spec[j] += image.item(s_high, j) * dhigh.item(j) + spec[j] += image.item(s_low-1, j) * dlow.item(j) return spec -def extract_err (image, locn, subdiv): + +def extract_err(image, locn, subdiv): """Average 'subdiv' rows of 'image', centered on 'locn'. Parameters @@ -684,30 +691,31 @@ def extract_err (image, locn, subdiv): shape = image.shape hw = subdiv // 2 - fhw = float (hw) + fhw = float(hw) # floating point locations of upper and lower edges fhigh = locn0 + fhw flow = locn0 - fhw # integer endpoints of range of whole pixels - high = N.floor (fhigh).astype (N.int32) - low = N.ceil (flow).astype (N.int32) + high = N.floor(fhigh).astype(N.int32) + low = N.ceil(flow).astype(N.int32) - spec = N.zeros (shape[1], dtype=N.float32) - for j in range (shape[1]): + spec = N.zeros(shape[1], dtype=N.float32) + for j in range(shape[1]): s_low = low.item(j) s_high = high.item(j) if s_low < 1 or s_high > shape[0]-1: continue sum = 0. - for i in range (s_low, s_high+1): - s_image = image.item(i,j) + for i in range(s_low, s_high+1): + s_image = image.item(i, j) sum += s_image**2 sum /= (s_high - s_low + 1.) - spec[j] = math.sqrt (sum) + spec[j] = math.sqrt(sum) return spec -def extract_i16 (image, locn, subdiv): + +def extract_i16(image, locn, subdiv): """Bitwise OR 'subdiv' rows of 'image', centered on 'locn'. Parameters @@ -733,28 +741,29 @@ def extract_i16 (image, locn, subdiv): shape = image.shape hw = subdiv // 2 - fhw = float (hw) + fhw = float(hw) # floating point locations of upper and lower edges fhigh = locn0 + fhw flow = locn0 - fhw # integer endpoints of range of whole pixels - high = N.floor (fhigh).astype (N.int32) - low = N.ceil (flow).astype (N.int32) + high = N.floor(fhigh).astype(N.int32) + low = N.ceil(flow).astype(N.int32) - spec = N.zeros (shape[1], dtype=N.int16) - for j in range (shape[1]): + spec = N.zeros(shape[1], dtype=N.int16) + for j in range(shape[1]): s_low = low.item(j) s_high = high.item(j) if s_low < 1 or s_high > shape[0]-1: continue sum = 0 - for i in range (s_low, s_high+1): - sum |= image.item(i,j) + for i in range(s_low, s_high+1): + sum |= image.item(i, j) spec[j] = sum return spec -def interpolate_trace (a2center, a2displ, y, length): + +def interpolate_trace(a2center, a2displ, y, length): """Interpolate within the array of traces, and return a trace. Parameters @@ -770,15 +779,16 @@ def interpolate_trace (a2center, a2displ, y, length): """ - if len (a2displ) < 1: - trace = N.zeros (length, dtype=N.float32) + if len(a2displ) < 1: + trace = N.zeros(length, dtype=N.float32) else: # interpolate to get the trace at y - trace = r_util.interpolate (a2center, a2displ, y) + trace = r_util.interpolate(a2center, a2displ, y) return trace -def trace_name (trace, phdr): + +def trace_name(trace, phdr): """Return the 1dt table name or array. Parameters @@ -788,7 +798,7 @@ def trace_name (trace, phdr): gotten from phdr; else if this is a string it should be the name of a trace file (possibly using an environment variable); otherwise, it should be a trace, in which case it will be returned unchanged - phdr : pyfits Header object + phdr : fits Header object primary header, used only if trace is None Returns @@ -804,25 +814,26 @@ def trace_name (trace, phdr): tracefile = phdr["sptrctab"] except KeyError: raise ValueError("Keyword SPTRCTAB not found; specify trace explicitly.") - tracefile = r_util.expandFileName (tracefile) - elif isinstance (trace, str): - tracefile = r_util.expandFileName (trace) + tracefile = r_util.expandFileName(tracefile) + elif isinstance(trace, str): + tracefile = r_util.expandFileName(trace) else: # trace could already be an array object tracefile = trace return tracefile -def get_trace (tracefile, phdr, hdr): + +def get_trace(tracefile, phdr, hdr): """Read 1-D traces from the 1dt table (sptrctab). Parameters ---------- tracefile : string or array either a trace array or the name of a FITS 1dt table - phdr : pyfits Header object + phdr : fits Header object primary header of input file - hdr : pyfits Header object + hdr : fits Header object extension header of input image (for binning info and time of exposure) @@ -846,37 +857,38 @@ def get_trace (tracefile, phdr, hdr): # If the input is already an array object, return a2center # (arbitrarily set to 0) and the trace array. - is_an_array = isinstance (tracefile, N.ndarray) + is_an_array = isinstance(tracefile, N.ndarray) if is_an_array: a2center = [0.] a2displ = [tracefile] - return (a2center, a2displ) + return a2center, a2displ # These are the row-selection values. - opt_elem = phdr.get ("opt_elem", default="") - cenwave = phdr.get ("cenwave", default=0) + opt_elem = phdr.get("opt_elem", default="") + cenwave = phdr.get("cenwave", default=0) # These describe the binning in the dispersion direction. - ltm = hdr.get ("ltm1_1", default=1.) - ltv = hdr.get ("ltv1", default=0.) # one indexing - binaxis1 = int (round (1. / ltm)) + ltm = hdr.get("ltm1_1", default=1.) + ltv = hdr.get("ltv1", default=0.) # one indexing + binaxis1 = int(round(1. / ltm)) if binaxis1 not in [1, 2, 4, 8]: raise ValueError("LTM1_1 should be either 1., 0.5, 0.25, or 0.125") filter = {"opt_elem": opt_elem, "cenwave": cenwave} - trace_info = gettable.getTable (tracefile, filter, - sortcol="a2center", at_least_one=True) - expstart = hdr.get ("expstart", default=-1.) - gettable.rotateTrace (trace_info, expstart) - a2center = trace_info.field ("a2center") - 1. # zero indexing - a2displ = trace_info.field ("a2displ") + trace_info = gettable.getTable(tracefile, filter, + sortcol="a2center", at_least_one=True) + expstart = hdr.get("expstart", default=-1.) + gettable.rotateTrace(trace_info, expstart) + a2center = trace_info.field("a2center") - 1. # zero indexing + a2displ = trace_info.field("a2displ") + + if binaxis1 > 1 and len(a2displ) > 0: + a2displ = bin_traces(a2displ, binaxis1, ltv) - if binaxis1 > 1 and len (a2displ) > 0: - a2displ = bin_traces (a2displ, binaxis1, ltv) + return a2center, a2displ - return (a2center, a2displ) -def bin_traces (a2displ, binaxis1, ltv): +def bin_traces(a2displ, binaxis1, ltv): """bin the traces by the factor binaxis1 Parameters @@ -896,10 +908,10 @@ def bin_traces (a2displ, binaxis1, ltv): """ - if len (a2displ) < 1 or binaxis1 == 1: + if len(a2displ) < 1 or binaxis1 == 1: return a2displ - oldlen = len (a2displ[0]) + oldlen = len(a2displ[0]) if binaxis1 == 2: newlen = 511 elif binaxis1 == 4: @@ -917,30 +929,32 @@ def bin_traces (a2displ, binaxis1, ltv): # the offset of our binned image (in reference pixel units). left_edge = (0.5 - ltv) * binaxis1 offset = left_edge - 0.5 # should be an integer - offset = int (round (offset)) + offset = int(round(offset)) for trace in a2displ: - newtrace = N.zeros (newlen, dtype=N.float32) - for i in range (binaxis1): + newtrace = N.zeros(newlen, dtype=N.float32) + for i in range(binaxis1): # this slice can be longer than newlen temp = trace[i+offset:oldlen:binaxis1] newtrace += temp[0:newlen] - newtrace /= float (binaxis1) - newtraces.append (newtrace) + newtrace /= float(binaxis1) + newtraces.append(newtrace) + + return N.array(newtraces) - return N.array (newtraces) def inv_haar(image): image[0::2] = (image[0::2] + image[1::2]) / 2. image[1::2] = (image[0::2] - image[1::2]) return image + def inv_avg_interp(order, image): side, j0, j1 = (order-1)//2, (order-1)//2, (order+1)//2 rows, cols = image.shape - order_2 = float (order) / 2. + order_2 = float(order) / 2. n = order + 1 x = N.arange(n, dtype=N.float64) @@ -952,6 +966,7 @@ def inv_avg_interp(order, image): d[j] = -((y[j1] + y[j0]) - 2. * polynomial(x, y, order_2, n)) return d + def polynomial(x, y, z, n): """used for interpolation @@ -979,7 +994,7 @@ def polynomial(x, y, z, n): # Note that the command-line options do not include all of the # wx2d function arguments. - if len (sys.argv) < 3 or len (sys.argv) > 7: + if len(sys.argv) < 3 or len(sys.argv) > 7: print("Syntax: wx2d.py input output " "[wavelengths trace minrow maxrow]") sys.exit() @@ -989,18 +1004,18 @@ def polynomial(x, y, z, n): wavelengths = None trace = None - if len (sys.argv) > 3: + if len(sys.argv) > 3: if sys.argv[3] != "None": wavelengths = sys.argv[3] - if len (sys.argv) > 4: + if len(sys.argv) > 4: if sys.argv[4] != "None": trace = sys.argv[4] - if len (sys.argv) == 7: - row0 = int (sys.argv[5]) - 1 - row1 = int (sys.argv[6]) - wx2d (sys.argv[1], sys.argv[2], wavelengths=wavelengths, trace=trace, - rows=(row0,row1)) + if len(sys.argv) == 7: + row0 = int(sys.argv[5]) - 1 + row1 = int(sys.argv[6]) + wx2d(sys.argv[1], sys.argv[2], wavelengths=wavelengths, trace=trace, + rows=(row0, row1)) else: - wx2d (sys.argv[1], sys.argv[2], wavelengths=wavelengths, trace=trace) + wx2d(sys.argv[1], sys.argv[2], wavelengths=wavelengths, trace=trace) diff --git a/lib/stistools/x1d.py b/stistools/x1d.py similarity index 95% rename from lib/stistools/x1d.py rename to stistools/x1d.py index d83db09..0311015 100644 --- a/lib/stistools/x1d.py +++ b/stistools/x1d.py @@ -1,13 +1,12 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown import os import sys import getopt import glob import subprocess -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Extract 1-D spectrum. @@ -43,6 +42,7 @@ __vdate__ = "13-November-2013" __author__ = "Phil Hodge, STScI, November 2013." + def main(args): if len(args) < 1: @@ -86,6 +86,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -101,6 +102,7 @@ def prtOptions(): print("One or more output file names may be specified (the same number") print(" as the input file names).") + def x1d(input, output="", backcorr="perform", ctecorr="perform", dispcorr="perform", helcorr="perform", fluxcorr="perform", @@ -240,7 +242,7 @@ def x1d(input, output="", files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 if output: @@ -257,14 +259,14 @@ def x1d(input, output="", n_infiles = len(infiles) if outfiles and len(outfiles) != n_infiles: - print("You specified %d input files but %d output files." % + print("You specified {} input files but {} output files.".format (n_infiles, len(outfiles))) print("The number of input and output files must be the same.") return 2 if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -362,7 +364,7 @@ def x1d(input, output="", arglist.append("%d" % bksorder) else: raise RuntimeError("bksmode must be one of 'off'," - " 'median', 'average'; you specified '%s'" % bksmode) + " 'median', 'average'; you specified '%s'" % bksmode) if algorithm: if algorithm == "unweighted": @@ -374,21 +376,21 @@ def x1d(input, output="", arglist.append("-idt") else: raise RuntimeError("algorithm must be either 'unweighted'" - " or 'sc2d'; you specified '%s'" % algorithm) + " or 'sc2d'; you specified '%s'" % algorithm) if xoffset is not None: arglist.append("-st") arglist.append("%.10g" % xoffset) if verbose: - print("Running x1d on %s" % infile) - print(" %s" % str(arglist)) + print("Running x1d on {}".format(infile)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: cumulative_status = 1 if verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) if f_trailer is not None: f_trailer.close() @@ -399,10 +401,12 @@ def x1d(input, output="", # Interfaces used by TEAL # #-------------------------# + def getHelpAsString(fulldoc=True): """Return documentation on the x1d function.""" return x1d.__doc__ + def run(configobj=None): """TEAL interface for the x1d function.""" x1d(input=configobj["input"], diff --git a/lib/stistools/x2d.py b/stistools/x2d.py similarity index 94% rename from lib/stistools/x2d.py rename to stistools/x2d.py index 4fe58a2..fa921ac 100755 --- a/lib/stistools/x2d.py +++ b/stistools/x2d.py @@ -1,13 +1,12 @@ #! /usr/bin/env python -from __future__ import division, print_function # confidence unknown import os import sys import getopt import glob import subprocess -from stsci.tools import parseinput,teal +from stsci.tools import parseinput, teal """ Rectify 2-D STIS spectral data. @@ -43,6 +42,7 @@ __vdate__ = "13-November-2013" __author__ = "Phil Hodge, STScI, November 2013." + def main(args): if len(args) < 1: @@ -87,6 +87,7 @@ def main(args): sys.exit(status) + def prtOptions(): """Print a list of command-line options and arguments.""" @@ -102,6 +103,7 @@ def prtOptions(): print("One or more output file names may be specified (the same number") print(" as the input file names).") + def x2d(input, output="", helcorr="perform", fluxcorr="perform", statflag=True, center=False, blazeshift=None, err_alg="wgt_var", @@ -198,7 +200,7 @@ def x2d(input, output="", files = glob.glob(in2) infiles.extend(files) if input1 and not infiles: - print("No file name matched the string '%s'" % input) + print("No file name matched the string '{}'".format(input)) return 2 if output: @@ -215,14 +217,14 @@ def x2d(input, output="", n_infiles = len(infiles) if outfiles and len(outfiles) != n_infiles: - print("You specified %d input files but %d output files." % + print("You specified {} input files but {} output files.".format (n_infiles, len(outfiles))) print("The number of input and output files must be the same.") return 2 if trailer: if verbose and os.access(trailer, os.F_OK): - print("Appending to trailer file %s" % trailer) + print("Appending to trailer file {}".format(trailer)) f_trailer = open(trailer, "a") fd_trailer = f_trailer.fileno() else: @@ -257,21 +259,21 @@ def x2d(input, output="", arglist.append("-wgt_err") elif err_alg != "wgt_var": raise RuntimeError("err_alg must be either 'wgt_err'" - " or 'wgt_var'; you specified '%s'" % err_alg) + " or 'wgt_var'; you specified '%s'" % err_alg) if blazeshift is not None: arglist.append("-b") arglist.append("%.10g" % blazeshift) if verbose: - print("Running x2d on %s" % infile) - print(" %s" % str(arglist)) + print("Running x2d on {}".format(infile)) + print(" {}".format(arglist)) status = subprocess.call(arglist, stdout=fd_trailer, stderr=subprocess.STDOUT) if status: cumulative_status = 1 if verbose: - print("Warning: status = %d" % status) + print("Warning: status = {}".format(status)) if f_trailer is not None: f_trailer.close() @@ -282,10 +284,12 @@ def x2d(input, output="", # Interfaces used by TEAL # #-------------------------# + def getHelpAsString(fulldoc=True): """Return documentation on the x2d function.""" return x2d.__doc__ + def run(configobj=None): """TEAL interface for the x2d function.""" x2d(input=configobj["input"], diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 0000000..19a513b --- /dev/null +++ b/tests/helpers/__init__.py @@ -0,0 +1,3 @@ +from .io import * +from .mark import * +from .utils import * diff --git a/tests/helpers/io.py b/tests/helpers/io.py new file mode 100644 index 0000000..2ab1437 --- /dev/null +++ b/tests/helpers/io.py @@ -0,0 +1,118 @@ +import copy +import json +import os +import shutil + +from .utils import check_url, download + +UPLOAD_SCHEMA = {"files": [ + {"pattern": "", + "target": "", + "props": None, + "recursive": "false", + "flat": "true", + "regexp": "false", + "explode": "false", + "excludePatterns": [] + } + ] + } + +__all__ = ['BigdataError', 'get_bigdata', 'upload_results'] + + +BIGDATA_PATHS = [ + os.environ.get('TEST_BIGDATA', '/srv/rt/betadrizzle'), + 'https://bytesalad.stsci.edu/artifactory/datb-stistools' +] + + +class BigdataError(Exception): + pass + + +def _select_bigdata(): + """ Find and returns the path to the nearest big datasets + """ + for path in BIGDATA_PATHS: + if os.path.exists(path) or check_url(path): + return path + + return None + + +def get_bigdata(*args): + """ Acquire requested data from a managed resource + Usage: + filename = get_bigdata('abc', '123', 'sample.fits') + with open(filename, 'rb') as data: + example = data.read() + Returns: + Absolute path to local copy of data (i.e. /path/to/example.fits) + """ + src = os.path.join(_select_bigdata(), *args) + filename = os.path.basename(src) + dest = os.path.abspath(os.path.join(os.curdir, filename)) + + if os.path.exists(src): + if src == dest: + raise BigdataError('Source and destination paths are identical: ' + '{}'.format(src)) + shutil.copy2(src, dest) + + elif check_url(src): + download(src, dest) + + else: + raise BigdataError('Failed to retrieve data: {}'.format(src)) + + return dest + + +def upload_results(**kwargs): + """Write out JSON file to upload results from test to storage area. + This function relies on the JFROG JSON schema for uploading data into + artifactory using the Jenkins plugin. Docs can be found at:: + https://www.jfrog.com/confluence/display/RTF/Using+File+Specs + Parameters + ---------- + pattern : str or list of strings + Specifies the local file system path to test results which should be + uploaded to Artifactory. You can specify multiple artifacts by using + wildcards or a regular expression as designated by the regexp property. + target : str + Specifies the target path in Artifactory in the following format: + [repository_name]/[repository_path] + testname : str + Name of test that generate the results. This will be used to create the + name of the JSON file to enable these results to be uploaded to Artifactory. + recursive : bool, optional + Specify whether or not to identify files listed in sub-directories + for uploading. Default: False + """ + # Interpret mandatory inputs + pattern = kwargs.get("pattern") + target = kwargs.get("target") + testname = kwargs.get("testname") + + # Finish interpreting inputs + jsonfile = "{}_results.json".format(testname) + recursive = repr(kwargs.get("recursive", False)).lower() + + if isinstance(pattern, list): + # Populate schema for this test's data + upload_schema = {"files": []} + + for p in pattern: + temp_schema = copy.deepcopy(UPLOAD_SCHEMA["files"][0]) + temp_schema.update({"pattern": p, "target": target, "recursive": recursive}) + upload_schema["files"].append(temp_schema) + + else: + # Populate schema for this test's data + upload_schema = copy.deepcopy(UPLOAD_SCHEMA) + upload_schema["files"][0].update({"pattern": pattern, "target": target, "recursive": recursive}) + + # Write out JSON file with description of test results + with open(jsonfile, 'w') as outfile: + json.dump(upload_schema, outfile) diff --git a/tests/helpers/mark.py b/tests/helpers/mark.py new file mode 100644 index 0000000..6ac793b --- /dev/null +++ b/tests/helpers/mark.py @@ -0,0 +1,52 @@ +import crds +import os +import pytest +import re + +__all__ = ['runslow', 'slow', 'require_bigdata', + 'not_under_travis', 'require_crds_context'] + + +# Decorator to indicate slow tests +runslow = pytest.mark.skipif( + not pytest.config.getoption("--runslow"), + reason="need --runslow option to run" +) + +# pytest marker to mark resource-intensive tests that should not be +# executed with every commit. +slow = runslow + +# Decorator to indicate BIGDATA required +require_bigdata = pytest.mark.skipif( + not pytest.config.getoption('--bigdata'), + reason='need --bigdata option to run' +) + +# Decorator to skip test if running under a TravisCI +not_under_travis = pytest.mark.skipif( + "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true", + reason='Temporarily disable due to performance issues' +) + + +# Decorator to skip if CRDS_CONTEXT is not at lest a certain level. +def require_crds_context(required_context): + """Ensure CRDS context is a certain level + Parameters + ---------- + level: int + The minimal level required + Returns + ------- + pytest.mark.skipif decorator + """ + current_context_string = crds.get_context_name('jwst') + match = re.match('jwst_(\d\d\d\d)\.pmap', current_context_string) + current_context = int(match.group(1)) + return pytest.mark.skipif( + current_context < required_context, + reason='CRDS context {} less than required context {}'.format( + current_context_string, required_context + ) + ) diff --git a/tests/helpers/utils.py b/tests/helpers/utils.py new file mode 100644 index 0000000..265c800 --- /dev/null +++ b/tests/helpers/utils.py @@ -0,0 +1,222 @@ +import os +import re +import requests +from astropy.io import fits + +__all__ = ['cmp_fitshdr', 'cmp_gen_hdrkeywords', + 'word_precision_check', 'abspath', + 'download', 'check_url'] + +RE_URL = re.compile('\w+://\S+') + +default_compare = dict( + ignore_keywords=['DATE', 'CAL_VER', 'CAL_VCS', 'CRDS_VER', 'CRDS_CTX'], + keys=['primary', 'sci', 'dq'], + rtol=0.000001, +) + + +def cmp_fitshdr(left, right, **kwargs): + """Compare FITS header values using keywords + Parameters + ---------- + left, right: str + The FITS files to compare + keys: list + The header keywords to compare `left` and `right` + (See `defaults_compare` for initial values) + rtol: float + The relative difference to allow when comparing two float values either + in header values, image arrays, or table columns. + (See `defaults_compare` for initial values) + no_assert: boolean + Return result object instead of asserting + kwargs: dict + Additional arguments to be passed to `FITSDiff` + Returns + ------- + None + Assert left and right are identical + """ + assert isinstance(left, str) + assert isinstance(right, str) + + local_kwargs = dict( + keys=kwargs.get('keys', default_compare['keys']), + rtol=kwargs.get('rtol', default_compare['rtol']), + ignore_keywords = kwargs.get('ignore_keywords', + default_compare['ignore_keywords']) + ) + + keys = local_kwargs['keys'] + rtol = local_kwargs['rtol'] + ignore_keywords = local_kwargs['ignore_keywords'] + + assert isinstance(keys, list) + assert isinstance(rtol, float) + assert isinstance(ignore_keywords, list) + + with fits.open(left) as a: + with fits.open(right) as b: + result = fits.diff.FITSDiff(fits.HDUList([a[kw] for kw in keys]), + fits.HDUList([b[kw] for kw in keys]), + ignore_keywords=ignore_keywords, + rtol=rtol, + **kwargs) + + if no_assert: + return result + + assert result.identical, result.report() + + +def cmp_gen_hdrkeywords(fitsobj, base, keywords, limit=0, start=0): + """Generate list of FITS header elements to compare + TODO: Use correct terminology + Parameters + ---------- + fitsobj: HDUList + TODO + base: str + Primary keyword + keywords: list + Additional keywords to use + limit: int + Number of extensions + Note: 1-indexed + start: int + Start with extension number + Returns + ------- + output: list + Keywords to compare + """ + assert isinstance(fitsobj, fits.HDUList) + assert isinstance(base, str) + assert isinstance(keywords, list) + + output = list(fitsobj[base]) + + if limit and not start: + start += 1 + + for idx in range(start, limit + 1): + for key in keywords: + if not limit: + output.append(fitsobj[key]) + else: + output.append(fitsobj[key, idx]) + + return output + + +# Check strings based on words using length precision +def word_precision_check(str1, str2, length=5): + """Check to strings word-by-word based for word length + The strings are checked word for word, but only for the first + `length` characters + Parameters + ---------- + str1, str2: str + The strings to compare + length: int + The number of characters in each word to check. + Returns + ------- + match: boolean + True if the strings match + """ + words1 = str1.split() + words2 = str2.split() + if len(words1) != len(words2): + return False + for w1, w2 in zip(words1, words2): + if w1[:length] != w2[:length]: + break + else: + return True + return False + + +def test_word_precision_check(): + """Test word_precision_check""" + s1 = "a b c" + s2 = "aa bb cc" + s3 = "aa bb cc dd" + s4 = "aazz bbzz cczz" + + assert word_precision_check(s1, s1) + assert not word_precision_check(s1, s2) + assert word_precision_check(s1, s2, length=1) + assert not word_precision_check(s2, s3) + assert word_precision_check(s2, s4, length=2) + + +def abspath(filepath): + """Get the absolute file path""" + return os.path.abspath(os.path.expanduser(os.path.expandvars(filepath))) + + +def download(url, dest): + """Simple http/https downloader + """ + dest = os.path.abspath(dest) + + with requests.get(url, stream=True) as r: + with open(dest, 'w+b') as data: + for chunk in r.iter_content(chunk_size=0x4000): + data.write(chunk) + + return dest + + +def check_url(url): + """ Determine if `url` can be resolved without error + """ + if RE_URL.match(url) is None: + return False + + r = requests.head(url, allow_redirects=True) + if r.status_code >= 400: + return False + return True + + +def add_suffix(fname, suffix, range=None): + """Add suffix to file name + Parameters + ---------- + fname: str + The file name to add the suffix to + suffix: str + The suffix to add_suffix + range: range + If specified, the set of indexes will be added to the + outputs. + Returns + ------- + fname, fname_with_suffix + 2-tuple of the original file name and name with suffix. + If `range` is defined, `fname_with_suffix` will be a list. + """ + fname_root, fname_ext = os.splitext(fname) + if range is None: + with_suffix = ''.join([ + fname_root, + '_', + suffix, + fname_ext + ]) + else: + with_suffix = [] + for idx in range: + with_suffix.append(''.join([ + fname_root, + '_', + str(idx), + '_', + suffix, + fname_ext + ])) + + return fname, with_suffix diff --git a/tests/resources.py b/tests/resources.py new file mode 100644 index 0000000..28c2e90 --- /dev/null +++ b/tests/resources.py @@ -0,0 +1,375 @@ +"""HSTCAL regression test helpers.""" +from astropy.extern.six.moves import urllib + +import getpass +import os +import sys +import math +from io import StringIO +import shutil +import datetime +from os.path import splitext +from difflib import unified_diff + +import pytest +import requests +from astropy.io import fits +from astropy.io.fits import FITSDiff +from astropy.table import Table +from astropy.utils.data import conf + + +from .helpers.mark import require_bigdata +from .helpers.io import get_bigdata, upload_results + +__all__ = ['download_crds', + 'ref_from_image', 'raw_from_asn', 'BaseACS', + 'BaseSTIS', 'BaseWFC3IR', 'BaseWFC3UVIS', 'BaseWFPC2'] + + +def _download_file(url, filename, filemode='wb', timeout=None): + """Generic remote data download.""" + if url.startswith('http'): + r = requests.get(url, timeout=timeout) + with open(filename, filemode) as fout: + fout.write(r.content) + elif url.startswith('ftp'): # TODO: Support filemode and timeout. + urllib.request.urlretrieve(url, filename=filename) + else: # pragma: no cover + raise ValueError('Unsupported protocol for {}'.format(url)) + + +def download_crds(refdir, refname, timeout=None): + """Download a CRDS file from HTTP or FTP to current directory.""" + # CRDS file for given name never changes, so no need to re-download. + if os.path.exists(refname): + return + + try: + url = 'http://ssb.stsci.edu/cdbs/{}/{}'.format(refdir, refname) + local_file = os.path.abspath(refname) + print("Downloading CRDS file: {}".format(local_file)) + _download_file(url, refname, timeout=timeout) + except Exception: # Fall back to FTP + url = 'ftp://ftp.stsci.edu/cdbs/{}/{}'.format(refdir, refname) + _download_file(url, refname, timeout=timeout) + + +def _get_reffile(hdr, key): + """Get ref file from given key in given FITS header.""" + ref_file = None + if key in hdr: # Keyword might not exist + ref_file = hdr[key].strip() + if ref_file.upper() == 'N/A': # Not all ref file is defined + ref_file = None + return ref_file + + +def ref_from_image(input_image): + """ + Return a list of reference filenames, as defined in the primary + header of the given input image, necessary for calibration; i.e., + only those associated with ``*CORR`` set to ``PERFORM`` will be + considered. + """ + # NOTE: Add additional mapping as needed. + # Map mandatory CRDS reference file for instrument/detector combo. + + reffile_lookup = ['BPIXTAB', 'DARKFILE', 'PFLTFILE', 'LFLTFILE', 'PHOTTAB', + 'IMPHTTAB', 'APERTAB', 'CCDTAB', 'BIASFILE', 'CRREJTAB', + 'IDCTAB', 'TDSTAB', 'SPTRCTAB', 'SDCTAB', 'PHOTTAB', + 'PCTAB', 'TDCTAB', 'MLINTAB', 'GACTAB', 'WCPTAB', + 'LAMPTAB', 'APDESTAB', 'XTRACTAB', 'DISPTAB', 'INANGTAB', + 'CDSTAB', 'ECHSCTAB', 'EXSTAB', 'HALOTAB', 'TELTAB', + 'RIPTAB', 'SRWTAB'] + + ref_files = [] + hdr = fits.getheader(input_image, ext=0) + + for reffile in reffile_lookup: + s = _get_reffile(hdr, reffile) + if s is not None: + ref_files.append(s) + + return ref_files + + +def raw_from_asn(asn_file, suffix='_raw.fits'): + """Return a list of RAW input files in a given ASN.""" + raw_files = [] + tab = Table.read(asn_file, format='fits') + + for row in tab: + if row['MEMTYPE'].startswith('PROD'): + continue + pfx = row['MEMNAME'].lower().strip().replace('\x00', '') + raw_files.append(pfx + suffix) + + return raw_files + + +# Base classes for actual tests. +# NOTE: Named in a way so pytest will not pick them up here. +# @pytest.mark.require_bigdata +class BaseCal(object): + prevdir = os.getcwd() + use_ftp_crds = True + timeout = 30 # seconds + tree = '' + results_root = 'datb-stistools/results' + + # Numpy default for allclose comparison + rtol = 5e-7 + + atol = 0 + + # To be defined by instrument + refstr = '' + prevref = '' + input_loc = '' + ref_loc = '' + ignore_keywords = [] + + # To be defined by individual test + subdir = '' + + @pytest.fixture(autouse=True) + def setup_class(self, tmpdir, envopt): + """ + Run test in own dir so we can keep results separate from + other tests. + """ + if not tmpdir.ensure(self.subdir, dir=True): + p = tmpdir.mkdir(self.subdir).strpath + else: + p = tmpdir.join(self.subdir).strpath + os.chdir(p) + + # NOTE: This could be explicitly controlled using pytest fixture + # but too many ways to do the same thing would be confusing. + # Refine this logic if using pytest fixture. + # HSTCAL cannot open remote CRDS on FTP but central storage is okay. + # So use central storage if available to avoid FTP. + if self.prevref is None or self.prevref.startswith(('ftp', 'http')): + os.environ[self.refstr] = p + os.sep + self.use_ftp_crds = True + + # Turn off Astrometry updates + os.environ['ASTROMETRY_STEP_CONTROL'] = 'OFF' + + # This controls astropy.io.fits timeout + conf.remote_timeout = self.timeout + + # Update tree to point to correct environment + self.tree = envopt + + def teardown_class(self): + """Reset path and variables.""" + conf.reset('remote_timeout') + os.chdir(self.prevdir) + if self.use_ftp_crds and self.prevref is not None: + os.environ[self.refstr] = self.prevref + + def get_data(self, *args): + """ + Download `filename` into working directory using + `helpers/io/get_bigdata`. This will then return the full path to + the local copy of the file. + """ + local_file = get_bigdata(self.tree, self.input_loc, *args) + + return local_file + + def get_input_file(self, *args, refsep='$'): + """ + Download or copy input file (e.g., RAW) into the working directory. + The associated CRDS reference files in ``refstr`` are also + downloaded, if necessary. + """ + filename = self.get_data(*args) + + print(filename) + + ref_files = ref_from_image(filename) + print("Looking for REF_FILES: {}".format(ref_files)) + + for ref_file in ref_files: + if ref_file.strip() == '': + continue + if refsep not in ref_file: # Local file + refname = self.get_data('customRef', ref_file) + else: # Download from FTP, if applicable + s = ref_file.split(refsep) + refdir = s[0] + refname = s[1] + if self.use_ftp_crds: + download_crds(refdir, refname, timeout=self.timeout) + return filename + + def compare_outputs(self, outputs, raise_error=True, delete_history=False): + """ + Compare output with "truth" using appropriate + diff routine; namely, + ``fitsdiff`` for FITS file comparisons + ``unified_diff`` for ASCII products. + Parameters + ---------- + outputs : list of tuple + A list of tuples, each containing filename (without path) + of CALXXX output and truth, in that order. + raise_error : bool + Raise ``AssertionError`` if difference is found. + Returns + ------- + report : str + Report from ``fitsdiff``. + This is part of error message if ``raise_error=True``. + """ + all_okay = True + creature_report = '' + # Create instructions for uploading results to artifactory for use + # as new comparison/truth files + testpath, testname = os.path.split(os.path.abspath(os.curdir)) + # organize results by day test was run...could replace with git-hash + whoami = getpass.getuser() or 'nobody' + dt = datetime.datetime.now().strftime("%d%b%YT") + ttime = datetime.datetime.now().strftime("%H_%M_%S") + user_tag = 'NOT_CI_{}_{}'.format(whoami, ttime) + build_tag = os.environ.get('BUILD_TAG', user_tag) + build_suffix = os.environ.get('BUILD_MATRIX_SUFFIX', 'standalone') + testdir = "{}_{}_{}".format(testname, build_tag, build_suffix) + tree = os.path.join(self.results_root, self.input_loc, + dt, testdir) + os.sep + + updated_outputs = [] + for actual, desired in outputs: + # Get "truth" image + s = self.get_data('truth', desired) + if s is not None: + desired = s + + if actual.endswith('fits'): + # Working with FITS files... + if delete_history is True: + actual = fits.open(actual) + desired = fits.open(desired) + if 'HISTORY' in actual[0].header: + del actual[0].header['HISTORY'] + if 'HISTORY' in desired[0].header: + del desired[0].header['HISTORY'] + + fdiff = FITSDiff(actual, desired, rtol=self.rtol, + atol=self.atol, + ignore_keywords=self.ignore_keywords) + + if delete_history is True: + actual.close() + desired.close() + + creature_report += fdiff.report() + if not fdiff.identical: + # Only keep track of failed results which need to + # be used to replace the truth files (if OK). + updated_outputs.append((actual, desired)) + if not fdiff.identical and all_okay: + all_okay = False + else: + # Try ASCII-based diff + with open(actual) as afile: + actual_lines = afile.readlines() + with open(desired) as dfile: + desired_lines = dfile.readlines() + udiff = unified_diff(actual_lines, desired_lines, + fromfile=actual, tofile=desired) + + old_stdout = sys.stdout + udiffIO = StringIO() + sys.stdout = udiffIO + sys.stdout.writelines(udiff) + sys.stdout = old_stdout + udiff_report = udiffIO.getvalue() + creature_report += udiff_report + if len(udiff_report) > 2 and all_okay: + all_okay = False + if len(udiff_report) > 2: + # Only keep track of failed results which need to + # be used to replace the truth files (if OK). + updated_outputs.append((actual, desired)) + + if not all_okay: + # Write out JSON file to enable retention of different results + new_truths = [os.path.abspath(i[1]) for i in updated_outputs] + for files in updated_outputs: + print("Renaming {} as new 'truth' file: {}".format( + files[0], files[1])) + shutil.move(files[0], files[1]) + log_pattern = [os.path.join(os.path.dirname(x), '*.log') + for x in new_truths] + upload_results(pattern=new_truths + log_pattern, + testname=testname, + target=tree) + + if not all_okay and raise_error: + raise AssertionError(os.linesep + creature_report) + + return creature_report + + +class BaseSTIS(BaseCal): + + refstr = 'oref' + prevref = os.environ.get(refstr) + input_loc = '' + ref_loc = '/ref' + ignore_keywords = ['date', 'filename', 'iraf-tlm', 'fitsdate', 'history'] + #''cal_ver'] + + def read_image(self, filename): + """ + Read the image from a fits file + """ + hdu = fits.open(filename) + + image = hdu[1].data + hdu.close() + return image + + +def add_suffix(fname, suffix, range=None): + """Add suffix to file name + Parameters + ---------- + fname: str + The file name to add the suffix to + suffix: str + The suffix to add_suffix + range: range + If specified, the set of indexes will be added to the + outputs. + Returns + ------- + fname, fname_with_suffix + 2-tuple of the original file name and name with suffix. + If `range` is defined, `fname_with_suffix` will be a list. + """ + fname_root, fname_ext = splitext(fname) + if range is None: + with_suffix = ''.join([ + fname_root, + '_', + suffix, + fname_ext + ]) + else: + with_suffix = [] + for idx in range: + with_suffix.append(''.join([ + fname_root, + '_', + str(idx), + '_', + suffix, + fname_ext + ])) + + return fname, with_suffix diff --git a/tests/test_basic2d.py b/tests/test_basic2d.py new file mode 100644 index 0000000..41d1e27 --- /dev/null +++ b/tests/test_basic2d.py @@ -0,0 +1,130 @@ +from stistools.basic2d import basic2d +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestBasic2d(BaseSTIS): + + input_loc = 'basic2d' + ref_loc = 'basic2d/ref' + + def test_basic2d_lev1a(self): + """ + BASIC2D - stsdas/hst_calib/stis/basic2d: level 1a + This regression test for this level of this task is a running of the + task with ALL parameters blazing (equal to 'perform'). This includes + the additional creation of an output test file for bias levels. + The input data for this test is a STIS NUV_MAMA CCD image. + """ + + # Prepare input files. + self.get_input_file("input", "o6d806030_raw.fits") + + # Run basic2d + basic2d("o6d806030_raw.fits", output="basic2d_lev1a_flt.fits") + + # Compare results + outputs = [("basic2d_lev1a_flt.fits", "basic2d_lev1a_flt_ref.fits")] + self.compare_outputs(outputs) + + def test_basic2d_lev3a_blev(self): + """ + The regression test for this level of this task is a running of the + task with ONLY the bias level parameter parameter set (equal to + 'perform'). This includes the additional creation of an output test + file for bias levels. The input data for this test is a STIS CCD image. + """ + + # Prepare input files. + self.get_input_file("input", "o3wd01060_raw.fits") + self.get_data("input", "o3wd01060_wav.fits") + self.get_data("input", "o3wd01060_spt.fits") + + # Run basic2d + basic2d('o3wd01060_raw.fits', output="basic2d_lev3a_blev.fits", + outblev="basic2d_lev3a_blev.dat", dqicorr="omit", + doppcorr="omit", lorscorr="omit", glincorr="omit", + lflgcorr="omit", biascorr="omit", darkcorr="omit", + flatcorr="omit", photcorr="omit") + + # Compare results + outputs = [("basic2d_lev3a_blev.fits", "basic2d_lev3a_blev_ref.fits"), + ("basic2d_lev3a_blev.dat", "basic2d_lev3a_blev_ref.dat")] + self.compare_outputs(outputs) + + def test_basic2d_lev3b_blev(self): + """ + The regression test for this level of this task is a running of the + task with the bias level parameter and Doppler smoothing correction + parameters set (equal to 'perform'). This includes the additional + creation of an output test file for bias levels. The input data for + this test is a STIS CCD image. + """ + + # Prepare input files. + self.get_input_file("input", "o3wd01060_raw.fits") + self.get_data("input", "o3wd01060_wav.fits") + self.get_data("input", "o3wd01060_spt.fits") + + # Run basic2d + basic2d('o3wd01060_raw.fits', output="basic2d_lev3b_blev.fits", + outblev="basic2d_lev3b_blev.dat", dqicorr="omit", + lorscorr="omit", glincorr="omit", lflgcorr="omit", + biascorr="omit", darkcorr="omit", flatcorr="omit", + photcorr="omit") + + # Compare results + outputs = [("basic2d_lev3b_blev.fits", "basic2d_lev3b_blev_ref.fits"), + ("basic2d_lev3b_blev.dat", "basic2d_lev3b_blev_ref.dat")] + self.compare_outputs(outputs) + + def test_basic2d_lev3c_blev(self): + """ + The regression test for this level of this task is a running of the + task with the bias level, Doppler smoothing correction and BIAS image + sub. parameters set (equal to 'perform'). This includes the additional + creation of an output test file for bias levels. The input data for + this test is a STIS CCD image. + """ + + # Prepare input files. + self.get_input_file("input", "o3wd01060_raw.fits") + self.get_data("input", "o3wd01060_wav.fits") + self.get_data("input", "o3wd01060_spt.fits") + + # Run basic2d + basic2d('o3wd01060_raw.fits', output="basic2d_lev3c_blev.fits", + outblev="basic2d_lev3c_blev.dat", dqicorr="omit", + lorscorr="omit", glincorr="omit", lflgcorr="omit", + darkcorr="omit", flatcorr="omit", photcorr="omit") + + # Compare results + outputs = [("basic2d_lev3c_blev.fits", "basic2d_lev3c_blev_ref.fits"), + ("basic2d_lev3c_blev.dat", "basic2d_lev3c_blev_ref.dat")] + self.compare_outputs(outputs) + + def test_basic2d_lev3d_blev(self): + """ + The regression test for this level of this task is a running of the + task with the bias level, Doppler smoothing correction, BIAS image + subtraction, and dark subtraction parameters set (equal to 'perform'). + This includes the additional creation of an output test file for bias + levels. The input data for this test is a STIS CCD image. + """ + + # Prepare input files. + self.get_input_file("input", "o3wd01060_raw.fits") + self.get_data("input", "o3wd01060_wav.fits") + self.get_data("input", "o3wd01060_spt.fits") + + # Run basic2d + basic2d('o3wd01060_raw.fits', output="basic2d_lev3d_blev.fits", + outblev="basic2d_lev3d_blev.dat", dqicorr="omit", + lorscorr="omit", glincorr="omit", lflgcorr="omit", + flatcorr="omit", photcorr="omit") + + # Compare results + outputs = [("basic2d_lev3d_blev.fits", "basic2d_lev3d_blev_ref.fits"), + ("basic2d_lev3d_blev.dat", "basic2d_lev3d_blev_ref.dat")] + self.compare_outputs(outputs) diff --git a/tests/test_calstis.py b/tests/test_calstis.py new file mode 100644 index 0000000..af13507 --- /dev/null +++ b/tests/test_calstis.py @@ -0,0 +1,129 @@ +from stistools.calstis import calstis +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestCalstis(BaseSTIS): + + input_loc = 'calstis' + ref_loc = 'calstis/ref' + + def test_ccd_imaging(self): + """ + This test is for calstis on CCD imaging data + """ + + # Prepare input files. + self.get_input_file("input", "oci402010_raw.fits") + self.get_data("input", "oci402dnj_epc.fits") + self.get_data("input", "oci402doj_epc.fits") + + outroot = "stis_test1" + outfile = outroot + "_flt.fits" + outcrj = outroot + "_crj.fits" + outsx2 = outroot + "_sx2.fits" + reffile_flt = 'reference_stis_01_flt.fits' + reffile_crj = 'reference_stis_01_crj.fits' + reffile_sx2 = 'reference_stis_01_sx2.fits' + + calstis("oci402010_raw.fits", outroot=outroot) + + # Compare results + outputs = [(outfile, reffile_flt), (outcrj, reffile_crj), + (outsx2, reffile_sx2)] + self.compare_outputs(outputs) + + # def test1_lev1_FUV(self): + # """ + # This test is for level 1? FUV data + # """ + # + # # Prepare input files. + # self.get_input_file("input", "o5cl02040_raw.fits") + # self.get_input_file("input", "o5cl02040_wav.fits") + # outroot = "calstis_lev1_FUVspec" + # + # # Run test + # calstis("o5cl02040_raw.fits", outroot=outroot) + # + # # Compare results + # outputs = [(outroot+"_flt.fits", outroot+"_flt_ref.fits"), + # (outroot+"_x1d.fits", outroot+"_x1d_ref.fits")] + # self.compare_outputs(outputs) + + + def test2_lev1_FUV(self): + """ + This test is for level 1? FUV data + """ + + # Prepare input files. + self.get_input_file("input", "odj102010_raw.fits") + self.get_input_file("input", "odj102010_wav.fits") + outroot = "calstis_1lev1_FUVspec" + + # Run test + calstis("odj102010_raw.fits", outroot=outroot) + + # Compare results + outputs = [(outroot+"_flt.fits", outroot+"_flt_ref.fits"), + (outroot+"_x1d.fits", outroot+"_x1d_ref.fits")] + self.compare_outputs(outputs) + + def test_lev2_CCD(self): + """ + This test is for level 2 CCD data + """ + + # Prepare input files. + self.get_input_file("input", "o3wd01060_raw.fits") + self.get_input_file("input", "o3wd01060_wav.fits") + self.get_data("input", "o3wd01060_spt.fits") + outroot = "calstis_lev2_CCD" + + # Run test + calstis("o3wd01060_raw.fits", outroot=outroot) + + # Compare results + outputs = [(outroot + "_flt.fits", outroot + "_flt_ref.fits"), + (outroot + "_crj.fits", outroot + "_crj_ref.fits"), + (outroot + "_sx1.fits", outroot + "_sx1_ref.fits"), + (outroot + "_sx2.fits", outroot + "_sx2_ref.fits")] + self.compare_outputs(outputs) + + def test_lev3_FUV(self): + """ + This test is for level 3 FUV data + """ + + # Prepare input files. + self.get_input_file("input", "o5in01tnq_raw.fits") + outroot = "calstis_lev3_FUV" + + # Run test + calstis("o5in01tnq_raw.fits", outroot=outroot) + + # Compare results + outputs = [(outroot + "_flt.fits", outroot + "_flt_ref.fits"), + (outroot + "_x2d.fits", outroot + "_x2d_ref.fits")] + self.compare_outputs(outputs) + + def test_lev3_NUV(self): + """ + This test is for level 3 NUV data + """ + + # Prepare input files. + self.get_input_file("input", "o6d806030_raw.fits") + self.get_data("input", "o6d806030_wav.fits") + outroot = "calstis_lev3_NUV" + + # Run test + calstis("o6d806030_raw.fits", outroot=outroot) + + # Compare results + outputs = [(outroot + "_flt.fits", outroot + "_flt_ref.fits"), + (outroot + "_x1d.fits", outroot + "_x1d_ref.fits"), + (outroot + "_x2d.fits", outroot + "_x2d_ref.fits")] + self.compare_outputs(outputs) diff --git a/tests/test_mktrace.py b/tests/test_mktrace.py new file mode 100644 index 0000000..baf2bc2 --- /dev/null +++ b/tests/test_mktrace.py @@ -0,0 +1,68 @@ +from stistools.mktrace import mktrace +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestMktrace(BaseSTIS): + + input_loc = 'mktrace' + ref_loc = 'mktrace' + + atol = 1e-14 + + def test_mktrace_t1(self, capsys): + """ + This tests a basic usage of stis.mktrace. To plot the results run + plot*.py + """ + + # Prepare input files. + self.get_input_file("input", "o45a03010_crj.fits") + + capsys.readouterr() + + # Run mktrace + mktrace('o45a03010_crj.fits') + + # Compare results + captured = capsys.readouterr() + assert captured.out == "Traces were rotated by 0.0066273929 degrees \n" \ + "\n" \ + "trace is centered on row 509.2475494293\n" + + outputs = [("o45a03010_crj_1dt.fits", "o45a03010_crj_1dt_ref.fits"), + ("o45a03010_crj_1dt_interp.fits", "o45a03010_crj_1dt_interp_ref.fits"), + ("o45a03010_crj_1dt_interpfit.fits", "o45a03010_crj_1dt_interpfit_ref.fits"), + ("o45a03010_crj_1dt_sci.fits", "o45a03010_crj_1dt_sci_ref.fits"), + ("o45a03010_crj_1dt_scifit.fits", "o45a03010_crj_1dt_scifit_ref.fits")] + self.compare_outputs(outputs) + + def test_mktrace_t2(self, capsys): + """ + This test uses E1 aperture position and places the + target near row 900. Originally there was a problem + with the interpolated trace due to incorrect determination + of the index of the trace inthe trace table. + """ + + # Prepare input files. + self.get_input_file("input", "o8pp31020_crj.fits") + + capsys.readouterr() + + # Run mktrace + mktrace('o8pp31020_crj.fits') + + # Compare results + captured = capsys.readouterr() + assert captured.out == "Traces were rotated by 0.0317233207 degrees \n" \ + "\n" \ + "trace is centered on row 893.7059688464\n" + + outputs = [("o8pp31020_crj_1dt.fits", "o8pp31020_crj_1dt_ref.fits"), + ("o8pp31020_crj_1dt_interp.fits", "o8pp31020_crj_1dt_interp_ref.fits"), + ("o8pp31020_crj_1dt_interpfit.fits", "o8pp31020_crj_1dt_interpfit_ref.fits"), + ("o8pp31020_crj_1dt_sci.fits", "o8pp31020_crj_1dt_sci_ref.fits"), + ("o8pp31020_crj_1dt_scifit.fits", "o8pp31020_crj_1dt_scifit_ref.fits")] + self.compare_outputs(outputs) diff --git a/tests/test_ocrreject.py b/tests/test_ocrreject.py new file mode 100644 index 0000000..af20750 --- /dev/null +++ b/tests/test_ocrreject.py @@ -0,0 +1,61 @@ +from stistools.ocrreject import ocrreject +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestOcrreject(BaseSTIS): + + input_loc = 'ocrreject' + ref_loc = 'ocrreject/ref' + + input_list = ["o58i01q7q_flt.fits", "o58i01q8q_flt.fits", + "o58i01q9q_flt.fits", "o58i01qcq_flt.fits", + "o58i01qdq_flt.fits", "o58i01qeq_flt.fits", + "o58i01qhq_flt.fits", "o58i01qiq_flt.fits", + "o58i01qjq_flt.fits", "o58i01qmq_flt.fits", + "o58i01qnq_flt.fits", "o58i01qoq_flt.fits", + "o58i01qrq_flt.fits", "o58i01qsq_flt.fits"] + + # Make input file string + input_file_string = ", ".join(input_list) + + def test_ocrrject_lev2(self): + """ + This regression test for this level of this task is different than + level three in two ways. Two parameters are set to give an initial + guess to the sky value and define a sky subraction method. It also + removes cosmic rays from 14 STIS/CCD images and creates a single + 'clean' image which is compared to a reference file using 'FITSDIFF'. + """ + + # Prepare input files. + for filename in self.input_list: + self.get_input_file("input", filename) + + # Run ocrreject + ocrreject(self.input_file_string, output="ocrreject_lev2_crj.fits", + initgues="med", skysub="mode") + + # Compare results + outputs = [("ocrreject_lev2_crj.fits", "ocrreject_lev2_crj_ref.fits")] + self.compare_outputs(outputs) + + def test_ocrrject_lev3(self): + """ + This regression test for this level on this task is a simple default + parameter execution of the task. It attempts to remove cosmic rays + from 14 STIS/CCD images. The resulting calibration is compared to a + reference file using 'FITSDIFF'. + """ + + # Prepare input files. + for filename in self.input_list: + self.get_input_file("input", filename) + + # Run ocrreject + ocrreject(self.input_file_string, output="ocrreject_lev3_crj.fits") + + # Compare results + outputs = [("ocrreject_lev3_crj.fits", "ocrreject_lev3_crj_ref.fits")] + self.compare_outputs(outputs) diff --git a/tests/test_stisnoise.py b/tests/test_stisnoise.py new file mode 100644 index 0000000..3f367ec --- /dev/null +++ b/tests/test_stisnoise.py @@ -0,0 +1,49 @@ +from stistools.stisnoise import stisnoise +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestStisnoise(BaseSTIS): + + input_loc = 'stisnoise' + ref_loc = 'stisnoise' + + # Really should add some output and return array checks for these tests + + def test_stisnoise_t1(self, capsys): + """ + stisnoise with boxcar test and check output + """ + + # Prepare input files. + self.get_input_file("input", "o6ih10060_crj.fits") + + capsys.readouterr() + + # Run stisnoise + stisnoise('o6ih10060_crj.fits', outfile='o6ih10060_fcrj1.fits', + boxcar=5) + + # Compare results + captured = capsys.readouterr() + assert captured.out == "Target: V1016-CYG, Amp: D, Gain: 1\n" + + outputs = [("o6ih10060_fcrj1.fits", "o6ih10060_fcrj1_ref.fits")] + self.compare_outputs(outputs) + + def test_stisnoise_t2(self): + """ + stisnoise with window test + """ + + # Prepare input files. + self.get_input_file("input", "o6ih10060_crj.fits") + + # Run stisnoise + stisnoise('o6ih10060_crj.fits', outfile='o6ih10060_fcrj2.fits', + window=[0.5, 0.5, 0.1]) + + # Compare results + outputs = [("o6ih10060_fcrj2.fits", "o6ih10060_fcrj2_ref.fits")] + self.compare_outputs(outputs) diff --git a/tests/test_wx2d.py b/tests/test_wx2d.py new file mode 100644 index 0000000..cf86ec5 --- /dev/null +++ b/tests/test_wx2d.py @@ -0,0 +1,48 @@ +import pytest + +from stistools.wx2d import wx2d +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestWx2d(BaseSTIS): + + input_loc = 'wx2d' + ref_loc = 'wx2d' + + + def test_wx2d_t1(self): + """ + Test for wx2d using rows parameter + """ + + # Prepare input files. + self.get_input_file("../stisnoise/input", "o6ih10060_crj.fits") + + # Run wx2d + wx2d("o6ih10060_crj.fits", "o6ih10060_wx2d.fits", + wavelengths="o6ih10060_wl.fits", helcorr="perform", + rows=(843, 942)) + + # Compare results + outputs = [("o6ih10060_wx2d.fits", "o6ih10060_wx2d_ref.fits"), + ("o6ih10060_wl.fits", "o6ih10060_wl_ref.fits")] + self.compare_outputs(outputs, delete_history=True) + + + def test_wx2d_t2(self): + """ + Test for wx2d + """ + + # Prepare input files. + self.get_input_file("input", "o4d301030_crj.fits") + + # Run wx2d + wx2d("o4d301030_crj.fits", "o4d301030_wx2d.fits", helcorr="perform") + + # Compare results + # Compare results + outputs = [("o4d301030_wx2d.fits", "o4d301030_wx2d_ref.fits")] + self.compare_outputs(outputs, delete_history=True) diff --git a/tests/test_x1d.py b/tests/test_x1d.py new file mode 100644 index 0000000..9a28e91 --- /dev/null +++ b/tests/test_x1d.py @@ -0,0 +1,25 @@ +from stistools.x1d import x1d +from .resources import BaseSTIS +from .helpers.mark import require_bigdata + + +@require_bigdata +class TestX1d(BaseSTIS): + + input_loc = 'x1d' + ref_loc = 'x1d/ref' + + def test_x1d(self): + """ + Basic x1d test, mostly using default parameters + """ + + # Prepare input files. + self.get_input_file("input", "o56j02020_flt.fits") + + # Run basic2d + x1d("o56j02020_flt.fits", output="x1d_lev3.fits", ctecorr="omit") + + # Compare results + outputs = [("x1d_lev3.fits", "x1d_lev3_ref.fits")] + self.compare_outputs(outputs)