Skip to content

Commit

Permalink
Merge pull request #13 from kadrlica/delve
Browse files Browse the repository at this point in the history
Merging DELVE developments for 2020A
  • Loading branch information
kadrlica authored Jan 27, 2020
2 parents fdbdac7 + f60af24 commit e34cbbc
Show file tree
Hide file tree
Showing 52 changed files with 7,282 additions and 14,161 deletions.
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,12 @@
build/
dist/

### Tests
field_test.json
chunk_test.json
test_target_fields.csv
test_windows.csv

### Latex
*.fls
*.aux
Expand Down
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ install:
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
- conda create -q -n travis-env python=$TRAVIS_PYTHON_VERSION numpy scipy pandas matplotlib basemap ephem astropy=1.3 fitsio=0.9.8 healpy=1.9.1 nose -c astropy -c kadrlica
- conda create -q -n travis-env python=$TRAVIS_PYTHON_VERSION numpy scipy pandas matplotlib=1.5.3 basemap=1.0.7 ephem astropy=1.3 fitsio=0.9.8 healpy=1.9.1 psycopg2 nose -c astropy -c kadrlica
- source activate travis-env
- python setup.py install
- export OBZTAK_SURVEY='obztak'
Expand Down
34 changes: 34 additions & 0 deletions bin/auto-obztak
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python
"""
Automated obztak scheduling
"""
__author__ = "Alex Drlica-Wagner"

from obztak import get_survey
from obztak.auto import AutoObz
from obztak.utils.parser import Parser

def main():
parser = Parser()
parser.add_argument("config", help="the configuration file")
parser.add_argument('-k','--chunk', default=6., type=float,
help = 'duration of chunk to schedule (minutes)')
parser.add_argument("-q","--min-queue-len",default=25, type=int,
help="minimum number of exposures in queue")
parser.add_argument("-t","--min-queue-time",default=30, type=int,
help="minimum queue time (minutes)")
parser.add_argument('-m','--mode',default=None,
help='mode for scheduler tactician.')
args = parser.parse_args()

print("REMINDER: Press 'Enable Auto' on the SISPI Exposure Control tab")
auto = AutoObz(args.config)
auto.chunk = args.chunk
if args.mode: auto.mode = args.mode
if args.min_queue_len: auto.min_queue_len = args.min_queue_len
if args.min_queue_time: auto.min_queue_time = args.min_queue_time

auto()

if __name__ == "__main__":
main()
8 changes: 4 additions & 4 deletions bin/fields2sispi
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@ Convert a csv field file to a SISPI json file.
"""
from os.path import splitext

from obztak.field import fields2sispi
from obztak.utils.parser import Parser
from obztak.utils.fileio import fields2sispi

parser = Parser(description=__doc__)
parser.add_argument('infile',help='Input file (csv format).')
parser.add_argument('filename',help='input file (csv or fits format).')
parser.add_argument('-f','--force',action='store_true',
help='Input file (csv format).')
help='overwrite output.')

if __name__ == "__main__":
args = parser.parse_args()
fields2sispi(args.infile,force=args.force)
fields2sispi(args.filename,force=args.force)

10 changes: 7 additions & 3 deletions bin/plot_json
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ Plot json files.
"""
import argparse
import os
import pylab as plt
import numpy as np
import matplotlib
import pylab as plt

from obztak import get_survey
from obztak.field import FieldArray
Expand Down Expand Up @@ -40,11 +41,14 @@ if __name__ == "__main__":
ext = os.path.splitext(args.outfile)[-1] if args.outfile else None

idx = []
fields = FieldArray()
fields = field_factory(survey)
for filename in args.infiles:
fields = fields + FieldArray.read(filename)
fields = fields + field_factory(survey).read(filename)
if args.chunk: idx.append(len(fields)-1)

if np.any(fields['RA']) < 0:
raise ValueError("Negative RA not allowed by SISPI.")

completed_fields = field_factory(survey)
if args.complete is not None:
for f in args.complete:
Expand Down
113 changes: 11 additions & 102 deletions bin/plot_nightsum
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,18 @@
"""
Plot a night summary.
"""
import argparse
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import pylab as plt

import ephem
import numpy as np

from obztak import get_survey
from obztak.factory import field_factory
from obztak.field import FieldArray
from obztak.utils.ortho import makePlot
from obztak.utils.ortho import plot_nightsum
from obztak.utils.date import get_nite, nite2utc, utc2nite, datestr
from obztak.utils.parser import Parser, DatetimeAction
from obztak.utils.database import Database
from obztak.utils.ortho import plot_bliss_coverage, DECamOrtho
from obztak.utils.ortho import plot_maglites_nightsum
from obztak.utils.constants import COLORS

if __name__ == "__main__":
parser = Parser(description=__doc__)
Expand All @@ -32,105 +28,18 @@ if __name__ == "__main__":
date = nite2utc(args.nite)
else:
# Yesterday...? Would 12*ephem.hour be better?
date = ephem.Date(ephem.now() - 8*ephem.hour)
date = ephem.Date(ephem.now() - 12*ephem.hour)
nitestr = utc2nite(date)

print "Plotting nightsum for: %s"%nitestr

#if args.nite:
# now = nite2utc(args.nite) if args.nite else ephem.now()
#nite = "%d%02d%d"%get_nite(now).tuple()[:3]
#date = "%d/%02d/%02d"%now.tuple()[:3]
#time = "%d/%02d/%02d"%date.tuple()[:3] + ' 00:00:00'

fields = field_factory(survey).load_database()
if survey == 'maglites':
plot_maglites_nightsum(fields,nitestr)
elif survey == 'bliss':

plot_bliss_coverage(fields)
plt.suptitle('Coverage (%s)'%nitestr,fontsize=18)
plt.savefig('nightsum_coverage_%s.png'%nitestr)

fig,axes = plt.subplots(1,2,figsize=(12,5))
plt.sca(axes[0])
bmap = DECamOrtho(date='2017/02/08 07:00:00')
for b in np.unique(fields['FILTER']):
f = fields[fields['FILTER']==b]
bmap.draw_focal_planes(f['RA'],f['DEC'],color=COLORS[b],alpha=0.3)
bmap.draw_bliss()
bmap.draw_galaxy()
bmap.draw_des()

plt.sca(axes[1])
bmap = DECamOrtho(date='2017/02/08 19:00:00')
for b in np.unique(fields['FILTER']):
f = fields[fields['FILTER']==b]
bmap.draw_focal_planes(f['RA'],f['DEC'],color=COLORS[b],alpha=0.3)
bmap.draw_bliss()
bmap.draw_galaxy()
bmap.draw_des()
plt.suptitle('Coverage (%s)'%nitestr,fontsize=16)
plt.savefig('nightsum_summary_%s.png'%nitestr)

new = (np.array(map(utc2nite,fields['DATE'])) == nitestr)
new_fields = fields[new]
old_fields = fields[~new]

db = Database()
db.connect()

query = """select id, qc_fwhm as psf, qc_teff as teff, filter from exposure
where exptime = 90 and delivered = True and propid = '%s'
and qc_teff is not NULL and qc_fwhm is not NULL
and to_timestamp(utc_beg) %s '%s'
"""

new = db.query2recarray(query%(fields.PROPID,'>',datestr(date)))
try:
old = db.query2recarray(query%(fields.PROPID,'<',date))
except ValueError as e:
print(e)
old = np.recarray(0,dtype=new.dtype)


nbins = 35
kwargs = dict(normed=True)
step_kwargs = dict(kwargs,histtype='step',lw=3.5)
fill_kwargs = dict(kwargs,histtype='stepfilled',lw=1.0,alpha=0.7)

plt.figure()
step_kwargs['bins'] = np.linspace(0.5,2.5,nbins)
fill_kwargs['bins'] = np.linspace(0.5,2.5,nbins)
plt.hist(new['psf'],color='green',zorder=10, label='Observed tonight', **fill_kwargs)
plt.hist(new['psf'],color='green',zorder=10, **step_kwargs)
plt.hist(old['psf'],color='0.5', label='Observed previously', **fill_kwargs)
plt.hist(old['psf'],color='0.5', **step_kwargs)
plt.axvline(1.20,ls='--',lw=2,color='gray')
plt.legend()
plt.title('Seeing (%s)'%nitestr)
plt.xlabel('FWHM (arcsec)')
plt.ylabel('Normalized Number of Exposures')
plt.savefig('nightsum_psf_%s.png'%nitestr,bbox_inches='tight')

plt.figure()
step_kwargs['bins'] = np.linspace(0,1.5,nbins)
fill_kwargs['bins'] = np.linspace(0,1.5,nbins)
plt.hist(new['teff'],color='green',zorder=10,label='Observed tonight', **fill_kwargs)
plt.hist(new['teff'],color='green',zorder=10, **step_kwargs)
plt.hist(old['teff'],color='0.5',label='Observed previously', **fill_kwargs)
plt.hist(old['teff'],color='0.5', **step_kwargs)
plt.axvline(0.25,ls='--',lw=2,color='gray')
plt.legend()
plt.title('Effective Depth (%s)'%nitestr)
plt.xlabel('Teff')
plt.ylabel('Normalized Number of Exposures')
plt.savefig('nightsum_teff_%s.png'%nitestr,bbox_inches='tight')

for b in ['g','r','i','z']:
f = new[new['filter'] == b]
print ' %s-band:'%b, len(f)
fields = fields[fields['PRIORITY'] >= 0]
plot_nightsum(fields,nitestr,date)

if survey in ['maglites'] :
from obztak import maglites
maglites.plot_nightsum(fields,nitestr)

if args.inspect:
raw_input(' ...finish...')
26 changes: 26 additions & 0 deletions bin/qcInv
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
#!/usr/bin/env python
"""
Query the database
"""
import logging
import pandas as pd
from obztak.utils.database import Database
from obztak.utils.parser import Parser

parser = Parser(description=__doc__)
parser.add_argument('--db',default='fnal',choices=['ctio','fnal'],
help='database to query for exposures')
parser.add_argument('-t','--timedelta',default='12h',
help='time to query')
parser.add_argument('-p','--propid',default=None,
help='propid for query')
args = parser.parse_args()

db = Database(dbname='db-'+args.db)
db.connect()
df = db.qcInv(timedelta=args.timedelta,propid=args.propid)
#pd.set_option('max_colwidth',18)
pd.set_option('display.width',None)
kwargs = dict(index=False, float_format='{:.2f}'.format, justify='right')
table = df.fillna('').to_string(**kwargs)
logging.info(table)
6 changes: 6 additions & 0 deletions bin/schedule_field
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import logging
from obztak import get_survey
from obztak.scheduler import Scheduler
from obztak.factory import scheduler_factory
from obztak.utils.date import datestring

def main():
parser = Scheduler.parser()
Expand All @@ -21,6 +22,10 @@ def main():

logging.info("Scheduling field for survey: '%s'"%get_survey())
date = ephem.Date(args.utc_start) if args.utc_start else ephem.now()
datestr = datestring(date,0)

logging.info("Start Time (UTC): %s"%(datestr))

#scheduler = Scheduler(args.fields,None,None)
scheduler = scheduler_factory(target_fields=args.fields)
field = scheduler.schedule_field(args.hex,args.tiling,band=args.band,date=date,plot=args.plot)
Expand All @@ -33,6 +38,7 @@ def main():
outfile = '%s_'%field['ID'][0]
outfile += '%4d%02d%02d_%02d:%02d:%02d.json'%date.tuple()

logging.info("Writing %s..."%(outfile))
field.write(outfile)

if __name__ == "__main__":
Expand Down
9 changes: 7 additions & 2 deletions bin/schedule_night
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,13 @@ def main():

if args.utc_start:
date = ephem.Date(args.utc_start)
start = date
elif args.nite:
date = nite2utc(str(args.nite))
start = None
else:
date = ephem.now()
start = None

nitestr = utc2nite(date)
datestr = datestring(date,0)
Expand All @@ -40,9 +43,11 @@ def main():
scheduler = scheduler_factory(target_fields=args.fields,
windows=args.windows,
completed_fields=args.complete)
chunks = scheduler.schedule_nite(date,chunk=args.chunk,plot=args.plot,mode=args.mode)
chunks = scheduler.schedule_nite(date,start=start,chunk=args.chunk,
plot=args.plot,mode=args.mode)




if not args.outfile:
outdir = nitestr
outfile = os.path.join(outdir,nitestr+'.json')
Expand Down
23 changes: 8 additions & 15 deletions bin/schedule_survey
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,25 @@ def main():
scheduler = scheduler_factory(target_fields=args.fields,
windows=args.windows,
completed_fields=args.complete)

basedir = args.outfile if args.outfile else 'survey'
if not os.path.exists(basedir): os.makedirs(basedir)
logging.info(basedir)

try:
survey = scheduler.schedule_survey(start=args.utc_start,end=args.utc_end,
chunk=args.chunk,plot=args.plot,
mode=args.mode)
mode=args.mode,write=True,
dirname=basedir)
except ValueError as error:
logging.warn(str(error))
survey = scheduler.scheduled_nites

# Write all completed fields
scheduler.completed_fields.write('survey_fields.csv')

basedir = args.outfile if args.outfile else 'survey'
if not os.path.exists(basedir): os.makedirs(basedir)
print(basedir)

for nite,chunks in survey.items():
outdir = os.path.join(basedir,nite)
if not os.path.exists(outdir): os.makedirs(outdir)
outfile = os.path.join(outdir,nite+'.json')
base,ext = os.path.splitext(outfile)

for i,chunk in enumerate(chunks):
if len(chunks) > 1:
outfile = base+'_%02d'%(i+1)+ext
logging.debug("Writing %s..."%outfile)
chunk.write(outfile)
survey.write_nite(nite,chunks,dirname=basedir)

return survey

Expand Down
Loading

0 comments on commit e34cbbc

Please sign in to comment.