forked from ebellm/pyraf-dbsp
-
Notifications
You must be signed in to change notification settings - Fork 2
/
dbsp_bd75.py
1880 lines (1623 loc) · 73.3 KB
/
dbsp_bd75.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
dbsp.py is a pyraf-based reduction pipeline for spectra taken with the
Palomar 200-inch Double Spectrograph.
"""
# Authors: Eric Bellm <[email protected]>,
# Branimir Sesar <[email protected]>
# License: MIT.
from pyraf import iraf
import numpy as np
import os
import subprocess
import shutil
import inspect
import pyfits
from scipy.optimize.minpack import leastsq
import copy
from glob import glob
import cosmics
# directory where the reduction code is stored
BASE_DIR = os.path.dirname(os.path.abspath(inspect.getfile(
inspect.currentframe())))
def is_new_red_camera():
"""Utility for determining red camera version from image size."""
ids = range(15)
for id in ids:
name = 'red{:04d}.fits'.format(id)
if os.path.exists(name):
hdr = pyfits.getheader(name)
if hdr['NAXIS1'] == 4141 or hdr['NAXIS1'] == 4114:
return True
elif hdr['NAXIS1'] == 1024 or hdr['NAXIS1'] == 1124:
return False
else:
raise ValueError('Unexpected image size')
else:
continue
# raise ValueError('Could not locate red side files')
print 'Could not locate red side files--defaulting to new camera'
return True
NEW_RED_SIDE = is_new_red_camera()
# load IRAF packages
iraf.noao(_doprint=0)
iraf.imred(_doprint=0)
iraf.ccdred(_doprint=0)
iraf.twodspec(_doprint=0)
iraf.longslit(_doprint=0)
iraf.kpnoslit(_doprint=0)
iraf.astutil(_doprint=0)
iraf.onedspec(_doprint=0)
# use my modified sproc to avoid annoying doslit prompts
#PkgName = iraf.curpack(); PkgBinary = iraf.curPkgbinary()
#iraf.reset(doslit = BASE_DIR+"/cl/doslit/")
#iraf.task(doslitDOTpkg = 'doslit$doslit.cl', PkgName=PkgName,PkgBinary=PkgBinary)
# defaults
# (blue trace is usually around 250-260)
det_pars = {'blue': {'gain': 0.8, 'readnoise': 2.7, 'trace': 253,
'crval': 4345, 'cdelt': -1.072, 'arc': 'FeAr_0.5.fits',
'fwhm_arc': 2.8, 'pixel_size': 15.}}
if NEW_RED_SIDE:
det_pars['red'] = {'gain': 2.8, 'readnoise': 8.5, 'trace': 166,
'crval': 7502, 'cdelt': 1.530, 'arc': 'HeNeAr_0.5.fits',
'biassec': '[4128:4141,1:440]', 'fwhm_arc': 2.4,
'pixel_size': 15.}
else:
# old CCD
det_pars['red'] = {'gain': 2.05, 'readnoise': 7.8, 'trace': 130,
'crval': 6600, 'cdelt': 2.46, 'arc': 'HeNeAr_0.5.fits',
'biassec': '[1080:1124,1:300]', 'fwhm_arc': 1.6,
'pixel_size': 24.}
# crval is in Angstrom, cdelt is Angstrom/pixel
# pixel size is in micron
def check_gratings_angles():
"""Check header values for grating and angles and update dispersion values
accordingly."""
ids = range(15)
for side in ['red','blue']:
for id in ids:
name = '{}{:04d}.fits'.format(side,id)
if os.path.exists(name):
hdr = pyfits.getheader(name)
grating = np.int(hdr['GRATING'].split('/')[0])
assert(grating in [158, 300, 316, 600, 1200])
angle_toks = hdr['ANGLE'].split()
deg = angle_toks[0]
if len(angle_toks) > 2:
# ANGLE = '27 deg 17 min' / Grating Angle
min = angle_toks[2]
else:
# ANGLE = '39.2 deg' / Grating Angle
min = 0.
angle = np.float(deg[:2]) + np.float(min)/60.
central_wavelength, dispersion = calculate_dispersion(
grating, angle, side=side)
crval = np.round(central_wavelength)
cdelt = dispersion / 1.E3 * det_pars[side]['pixel_size']
if side == 'blue':
cdelt *= -1.
det_pars[side]['crval'] = crval
det_pars[side]['cdelt'] = cdelt
else:
continue
def calculate_dispersion(grating, angle, side='blue', order=1):
"""Calculate parameters needed to initialize dispersion solution.
Modifies global variable det_pars with the computed values.
See http://www.astro.caltech.edu/palomar/200inch/dbl_spec/dbspoverview.html
for details.
Parameters
----------
grating : integer
grating lines/mm
angle : float
grating angle in degrees
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
order : int (default = 1)
Spectral order
Returns
-------
central_wavelength : float
central wavelength in Angstroms
dispersion : float
dispersion in Angstroms / mm
"""
MM_TO_ANGSTROM = 10000000
INCHES_TO_MM = 25.4
diffracted_angle = {'red':35.00, 'blue':38.50} # degrees
focal_length = {'red':12.*INCHES_TO_MM, 'blue':9.*INCHES_TO_MM}
line_spacing = 1./grating # mm
theta_m = diffracted_angle[side] - angle
central_wavelength = order * np.abs((np.sin(np.radians(theta_m)) -
np.sin(np.radians(angle))) * line_spacing * MM_TO_ANGSTROM) # Angstrom
dispersion = (line_spacing * np.cos(np.radians(theta_m)) /
(focal_length[side] * order)) * MM_TO_ANGSTROM
return central_wavelength, dispersion
def mark_bad(imgID_list, side='blue'):
"""Utility for excluding specific files from further analysis.
Saturated or mis-configured exposures are suffixed .bad so file searches
do not find them.
Parameters
----------
imgID_list : list of ints or int
image id(s) to be marked as bad.
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
"""
assert (side in ['blue', 'red'])
try:
for num in imgID_list:
name = '{:s}{:04d}.fits'.format(side, num)
os.rename(name, name+'.bad')
except TypeError:
# single number
num = imgID_list
name = '{:s}{:04d}.fits'.format(side, num)
os.rename(name, name+'.bad')
# run automatically
check_gratings_angles()
def create_arc_dome(side='both', trace=None, arcslit='0.5', overwrite=True):
"""Convenience function which subtracts bias, creates dome flats, and
creates arc frames.
Parameters
----------
side : {'both' (default), 'blue', 'red'}
'blue' or 'red' to indicate the arm of the spectrograph;
'both' to reduce both
trace : int
row or column of the spectral trace, if different from default.
arcslit : {'0.5','1.0', '1.5', '2.0'}
string indicating the preferred arc slit width
overwrite : boolean (default True)
If True, overwrites any existing files; if False, skips processing.
"""
assert ((side == 'both') or (side == 'blue') or (side == 'red'))
if side == 'both':
if len(glob('blue????.fits')) > 0:
create_arc_dome(side='blue', trace=trace, arcslit=arcslit,
overwrite=overwrite)
else:
print 'No blue side files found.'
if len(glob('red????.fits')) > 0:
create_arc_dome(side='red', trace=trace, arcslit=arcslit,
overwrite=overwrite)
else:
print 'No red side files found.'
return
if trace is None:
trace = det_pars[side]['trace']
bias_subtract(side=side, trace=trace)
if side == 'blue':
fix_bad_column_blue()
make_flats(side=side, overwrite=overwrite)
if side == 'blue':
make_arcs_blue(slit=arcslit, overwrite=overwrite)
else:
make_arcs_red(slit=arcslit, overwrite=overwrite)
def bias_subtract(side='blue', trace=None):
"""Use iraf.ccdproc to subtract bias from all frames.
Parameters
----------
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
trace : int
row or column of the spectral trace, if different from default.
"""
# update the headers
iraf.asthedit('%s????.fits' % side, BASE_DIR + '/cal/DBSP.hdr')
if side == 'blue':
iraf.hedit('blue*.fits', 'DISPAXIS', 2, update="yes",
verify="no", add="yes", show="no")
else:
iraf.hedit('red*.fits', 'DISPAXIS', 1, update="yes",
verify="no", add="yes", show="no")
# Need to define an instrument translation file in iraf 2.16.1
iraf.unlearn('setinst')
iraf.setinst.instrument = 'kpnoheaders'
iraf.setinst.review = 'no'
iraf.setinst.mode = 'h'
iraf.setinst()
# bias subtraction using the overscan
filenames = glob("%s????.fits" % side)
hdr = pyfits.getheader(filenames[0])
iraf.unlearn('ccdproc')
iraf.ccdproc.zerocor = "no"
iraf.ccdproc.flatcor = "no"
iraf.ccdproc.fixpix = "no"
# iraf.ccdproc.fixfile = "../bluebpm"
if side == 'blue':
iraf.ccdproc.biassec = hdr['BSEC1']
iraf.ccdproc.trimsec = "[%d:%d,*]" % (trace-100, trace+100)
iraf.ccdproc.function = "spline3"
iraf.ccdproc.order = 3
else:
# trim the specified region
iraf.ccdproc.biassec = det_pars['red']['biassec'] # this may not work for the old camera...
tsec_x = hdr['TSEC1'].split(',')[0]
iraf.ccdproc.trimsec = tsec_x + ",%d:%d]" % (trace-100, trace+100)
iraf.ccdproc.function = "legendre"
iraf.ccdproc.order = 1
iraf.ccdproc.darkcor = "no"
iraf.ccdproc.ccdtype = ""
iraf.ccdproc.niterate = 3
iraf.ccdproc('%s????.fits' % side)
def fix_bad_column_blue():
"""Uses a science exposure to find the bad column in the blue CCD.
Automatically corrects all blue exposures with iraf.fixpix.
"""
science = iraf.hselect('blue????.fits', '$I', 'TURRET == "APERTURE" & LAMPS == "0000000"', Stdout=1)
if len(science) > 0:
f = open('science_dump', 'w')
for fn in science:
f.write('%s\n' % fn)
f.close()
science = iraf.hselect('@science_dump', '$I', 'TURRET == "APERTURE" & LAMPS == "0000000" & AIRMASS > 1.01', Stdout=1)
os.unlink('science_dump')
f = pyfits.open(science[0])
bad_column = f[0].data[1608,:].argmin() + 1
f.close()
f = open('bluebpm', 'w')
f.write('%d %d 1 2835\n' % (bad_column, bad_column))
f.close()
iraf.fixpix('blue????.fits', "bluebpm")
def find_flats(aperture, side='blue'):
"""Finds flat images for the selected aperture and side using
header keywords.
Uses dome flats if present, otherwise falls back on internal flats.
Parameters
----------
aperture : {'0.5','1.0', '1.5', '2.0'}
string indicating the slit width
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
"""
# find dome flat images
domeflats = iraf.hselect('%s????.fits' % side, '$I', 'TURRET == "APERTURE" & APERTURE == "%s" & LAMPS == "0000000" & AIRMASS < 1.01 & IMGTYPE == "flat"' % aperture, Stdout=1)
# find internal flat (incandescent lamp) images
intflats = iraf.hselect('%s????.fits' % side, '$I', 'TURRET == "LAMPS" & APERTURE == "%s" & LAMPS == "0000001" & AIRMASS < 1.01' % aperture, Stdout=1)
# dome flats are prefered over internal flats
flats = []
if (len(intflats) > 0) & (len(domeflats) == 0):
flats = intflats
print "Using %d internal flats for the %s arcsec slit." % (len(intflats), aperture)
if len(domeflats) > 3:
flats = domeflats
print "Using %d dome flats for the %s arcsec slit." % (len(domeflats), aperture)
return flats
def make_flats(side='blue',overwrite=False):
"""Creates dome flat images using iraf.flatcombine.
Creates flats for all slit widths in ['0.5','1.0', '1.5', '2.0'] if present.
Uses dome flats if present, otherwise falls back on internal flats.
Stores flats as flat_{side}_{aper}.fits.
Parameters
----------
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
overwrite : boolean
If True, overwrites existing flats; if False, skips processing for
all slit widths with existing flats.
"""
iraf.unlearn('flatcombine')
iraf.flatcombine.ccdtype = ""
iraf.flatcombine.process = "no"
iraf.flatcombine.subsets = "no"
iraf.flatcombine.rdnoise = "RON"
iraf.flatcombine.gain = "GAIN"
for aperture in ['0.5', '1.0', '1.5', '2.0']:
flats = find_flats(aperture, side=side)
if len(flats) > 0:
if overwrite:
iraf.delete('flat_%s_%s.fits' % (side, aperture), verify='no')
if len(flats) < 3:
iraf.flatcombine(','.join(flats), output='temp', reject='pclip')
if len(flats) >= 3:
iraf.flatcombine(','.join(flats), output='temp', reject='avsigclip')
# normalize the flat
iraf.unlearn('response')
iraf.response.function = "spline3"
iraf.response.order = 100
iraf.response.niterate = 3
iraf.response.low_rej = 3
iraf.response.high_rej = 3
if side == 'blue':
iraf.twodspec.longslit.dispaxis = 2
else:
iraf.twodspec.longslit.dispaxis = 1
iraf.response('temp[0]', 'temp[0]',
'flat_%s_%s.fits' % (side, aperture), interactive="no")
os.rename('temp.fits', 'raw_flat_%s_%s.fits' % (side, aperture))
# measure flat-field error from sigma images
iraf.unlearn('imcombine')
iraf.imcombine.reject = 'avsigclip'
iraf.imcombine(','.join(flats), output='flat', sigma='sigma', scale='mode')
iraf.imarith('sigma', '/', 'flat', 'frac')
s = iraf.imstat('frac.fits', fields="mean", nclip=20, Stdout=1, format="no")
print 'Flat field error: ', np.float(s[0])
iraf.delete('flat.fits', verify="no")
iraf.delete('sigma.fits', verify="no")
iraf.delete('frac.fits', verify="no")
else:
print "No dome or internal flats for the %s arcsec slit." % aperture
def make_arcs_blue(slit='0.5', overwrite=False, zenith_only = True): #DaveC zenith = False; if no zenith arcs taken
"""Creates the master FeAr arc with iraf.imcombine.
Stores arc as FeAr_{slit}.fits.
Parameters
----------
slit : {'0.5' (default),'1.0', '1.5', '2.0'}
string indicating the slit width
overwrite : boolean
If True, overwrites existing arc; if False, skips processing.
zenith_only : boolean, default True
If True, only use arcs taken at zenith.
(Combining arcs taken at different elevations is not recommended
due to flexure.)
"""
aperture = slit
iraf.unlearn('imcombine')
iraf.imcombine.rdnoise = det_pars['blue']['readnoise']
iraf.imcombine.gain = det_pars['blue']['gain']
arcs = iraf.hselect('blue????.fits', '$I', 'TURRET == "LAMPS" & APERTURE == "{aperture}" & LAMPS == "0100000"'.format(aperture=aperture), Stdout=1)
if zenith_only:
try:
arcs = iraf.hselect(','.join(arcs), '$I', 'TURRET == "LAMPS" & APERTURE == "{aperture}" & LAMPS == "0100000" & AIRMASS < 1.01'.format(aperture=aperture), Stdout=1)
except:
pass
if overwrite:
iraf.delete('FeAr_{aperture}.fits'.format(aperture=aperture),
verify='no')
iraf.imcombine(','.join(arcs), 'FeAr_{}'.format(aperture), reject="none")
def make_arcs_red(slit='0.5', overwrite=False, zenith_only = True): #DaveC zenith = False
"""Creates the master HeNeAr arc with iraf.imcombine.
Stores arc as HeNeAr_{slit}.fits.
Parameters
----------
slit : {'0.5' (default), '1.0', '1.5', '2.0'}
string indicating the slit width
overwrite : boolean
If True, overwrites existing arc; if False, skips processing.
zenith_only : boolean, default True
If True, only use arcs taken at zenith.
(Combining arcs taken at different elevations is not recommended
due to flexure.)
"""
aperture = slit
iraf.unlearn('imcombine')
iraf.imcombine.rdnoise = det_pars['red']['readnoise']
iraf.imcombine.gain = det_pars['red']['gain']
arcs = iraf.hselect('red????.fits', '$I', 'TURRET == "LAMPS" & APERTURE == "{aperture}" & LAMPS == "0001110"'.format(aperture=aperture), Stdout=1)
if zenith_only:
try:
arcs = iraf.hselect(','.join(arcs), '$I', 'TURRET == "LAMPS" & APERTURE == "{aperture}" & LAMPS == "0001110" & AIRMASS < 1.01'.format(aperture=aperture), Stdout=1)
except:
pass
if overwrite:
iraf.delete('HeNeAr_{aperture}.fits'.format(aperture=aperture),
verify='no')
iraf.imcombine(','.join(arcs), 'HeNeAr_{}'.format(aperture), reject="none")
def preprocess_image(filename, side='blue', flatcor = 'yes',
remove_cosmics=True, trace=None):
"""Remove instrumental signatures from a CCD image.
Performs bias subtraction and flat correction,
adds header info if needed, and removes cosmic rays.
Parameters
----------
filename : string
Name of CCD image to process
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
flatcor : {'yes' (default), 'no'}
Divide the image by the flat field?
remove_cosmics : boolean
Apply cosmic ray rejection?
trace : int
Row or column of the spectral trace, if different from default.
"""
assert(flatcor in ['yes', 'no'])
if trace is None:
trace = det_pars[side]['trace']
# Need to define an instrument translation file in iraf 2.16.1
iraf.unlearn('setinst')
iraf.setinst.instrument = 'kpnoheaders'
iraf.setinst.review = 'no'
iraf.setinst.mode = 'h'
iraf.setinst()
# bias subtraction using the overscan
hdr = pyfits.getheader(filename)
iraf.unlearn('ccdproc')
iraf.ccdproc.zerocor = "no"
iraf.ccdproc.flatcor = flatcor
iraf.ccdproc.fixpix = "no"
iraf.hedit(filename, 'GAIN', det_pars[side]['gain'],
update="yes", verify="no", show="no")
iraf.hedit(filename, 'RON', det_pars[side]['readnoise'],
update="yes", verify="no", show="no")
if side == 'blue':
# update the header
iraf.hedit(filename, 'DISPAXIS', 2, update="yes", verify="no", add="yes", show="no")
# trim the specified region
iraf.ccdproc.biassec = hdr['BSEC1']
iraf.ccdproc.trimsec = "[%d:%d,*]" % (trace-100, trace+100)
iraf.ccdproc.function = "spline3"
iraf.ccdproc.order = 3
else:
# update the header
iraf.hedit(filename, 'DISPAXIS', 1, update="yes", verify="no", add="yes", show='no')
# trim the specified region
iraf.ccdproc.biassec = det_pars['red']['biassec']
tsec_x = hdr['TSEC1'].split(',')[0]
iraf.ccdproc.trimsec = tsec_x + ",%d:%d]" % (trace-100, trace+100)
iraf.ccdproc.function = "legendre"
iraf.ccdproc.order = 1
iraf.ccdproc.ccdtype = ""
iraf.ccdproc.darkcor = "no"
iraf.ccdproc.niterate = 3
iraf.ccdproc(filename,
flat="flat_%s_%s" % (side, hdr['APERTURE']))
if (side == 'blue') and ('FIXPIX' not in hdr):
iraf.fixpix('blue????.fits', "bluebpm")
if 'OBSERVAT' not in hdr:
# update the headers
iraf.asthedit(filename, BASE_DIR + '/cal/DBSP.hdr')
# remove cosmic rays with LA Cosmic
if remove_cosmics and ('COSMIC' not in hdr) and (hdr['EXPTIME'] > 60) and \
(hdr['TURRET'] == 'APERTURE'):
array, header = pyfits.getdata(filename, header=True)
c = cosmics.cosmicsimage(array, gain=det_pars[side]['gain'],
readnoise=det_pars[side]['readnoise'],
sigclip = 4.5, sigfrac = 0.5, objlim = 2.0, satlevel=60000,
skyOrder = 0, objectOrder = 0)
c.run(maxiter = 3)
#header.update('COSMIC', 1, '1 if we ran LA Cosmic')
header['COSMIC']= 1
pyfits.writeto(filename, c.cleanarray, header, clobber=True)
def store_standards(imgID_list, side='blue', trace=None,
arc=None, splot='no', redo='no', resize='yes',
crval=None, cdelt=None, extract=True, telluric_cal_id=None):
"""Extract spectra for spectroscopic standard stars and determine
corrections needed for fluxing.
Wraps iraf.standard and iraf.sensfunc.
After extracting spectra for each standard star, the code will query you
for the name of the standards and prompt you to edit the bandpasses.
The calibrated data are stored in std-{side}.
Next, the software fits a sensitivity function to the calibrated intensity
bandpasses, prompting you to edit the fit as needed. The fit sensitivity
is stored in sens-{side}.
See the README for step-by-step instructions for interacting with iraf.
Parameters
----------
imgID_list : list of ints
List of file numbers for images of spectroscopic standards,
e.g., red0011.fits and red0015.fits -> [11,15]
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
trace : int
Row or column of the spectral trace, if different from default.
arc : string
Arc image to use if different from the default, e.g. 'HeNeAr_0.5.fits'
splot : {'yes', 'no' (default)}
Plot extracted spectra with iraf.splot?
redo : {'yes', 'no' (default)}
Redo the spectral extraction from scratch? Passed to iraf.doslit.
**Warning**--discards previous wavelength solution!
Use extract=False if you simply want to redefine calibration bandpasses
and refit the sensitivity function.
resize : {'yes' (default), 'no'}
Resize the extraction aperture? Passed to iraf.doslit.
crval : int or None (default)
Spectrum reference dispersion coordinate, if different from default
[Angstroms of central pixel]
cdelt : int or None (default)
Spectral dispersion, if different from default [Angstroms per pixel]
extract : boolean (default True)
Extract spectra? If False, use existing extractions.
Useful for redefining the calibration bandpasses and refitting
the sensitivity function.
telluric_cal_id : int or None (default)
If defined, use specified image id to perform telluric correction.
"""
# first extract all the standard spectra
if extract:
for i, imgID in enumerate(imgID_list):
# only redo the first one so we don't have to keep re-defining the
# dispersion solution
if i == 0:
redo = redo
else:
redo = 'no'
extract1D(imgID, side=side, trace=trace, arc=arc, splot=splot,
redo=redo, resize=resize, flux=False, crval=crval, cdelt=cdelt,
telluric_cal_id = telluric_cal_id, quicklook = 'no')
iraf.delete('std-{}'.format(side), verify='no')
iraf.delete('sens-{}'.format(side), verify='no')
iraf.unlearn('standard')
iraf.standard.caldir = "onedstds$oke1990/"
iraf.standard.output = 'std-{}'.format(side)
# use the tabulated bandpasses for the standards
iraf.standard.bandwidth = "INDEF"
iraf.standard.bandsep = "INDEF"
# try these one at a time
for imgID in imgID_list:
# use the extracted spectrum!
iraf.standard('%s%04d.spec.fits' % (side, imgID))
iraf.unlearn('sensfunc')
iraf.sensfunc.standards = 'std-{}'.format(side)
iraf.sensfunc.sensitivity = 'sens-{}'.format(side)
if side == 'blue':
iraf.sensfunc.order = 3
else:
iraf.sensfunc.order = 6
# varun says to use ignoreaps, but it's causing me problems downstream
# iraf.sensfunc.ignoreaps = 'yes'
iraf.sensfunc()
def estimateFWHM(imgID, side='blue'):
"""Use IRAF's imexam to measure the FWHM of the trace.
Parameters
----------
imgID : int
File number of image to process, e.g., red0011.fits -> 11
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
"""
iraf.unlearn('imexam')
iraf.rimexam.fittype = "gaussian"
iraf.delete('trace.xy', verify="no")
iraf.delete('fwhm.log', verify="no")
# extract the position of the trace
f = open('database/ap%s%04d' % (side, imgID), 'r')
dat = f.read()
xy = dat.split('\n')[5].split()[1:3]
f.close()
f = open('trace.xy', 'w')
f.write('%s %s\n' % (xy[0], xy[1]))
f.close()
# run imexam
if side == 'blue':
defkey = 'j'
else:
defkey = 'k'
iraf.imexam('%s%04d' % (side, imgID), '1', logfile='fwhm.log', keeplog="yes", defkey=defkey, imagecur='trace.xy', use_display="no", autoredraw="no")
# load values
f = open('fwhm.log', 'r')
dat = f.read()
fwhm = float(dat.split('\n')[1].split('=')[4].split()[0])
f.close()
# cleanup
os.unlink("fwhm.log")
os.unlink("trace.xy")
# update the header
f = pyfits.open('%s%04d.spec.fits' % (side, imgID))
#f[0].header.update('FWHM', np.round(fwhm, 2), 'FWHM estimate of the trace [pix]')
f[0].header['FWHM']=np.round(fwhm, 2) #, 'FWHM estimate of the trace [pix]')
f.writeto('%s%04d.spec.fits' % (side, imgID), clobber=True)
f.close()
if os.access('%s%04d_flux.spec.fits' % (side, imgID), os.F_OK):
f = pyfits.open('%s%04d_flux.spec.fits' % (side, imgID))
#f[0].header.update('FWHM', np.round(fwhm, 2), 'FWHM estimate of the trace [pix]')
f[0].header['FWHM']= np.round(fwhm, 2)
f.writeto('%s%04d_flux.spec.fits' % (side, imgID), clobber=True)
f.close()
def extract1D(imgID, side='blue', trace=None, arc=None, splot='no',
resize='yes', flux=True, telluric_cal_id=None, reextract=False,
redo='no', crval=None, cdelt=None, quicklook='no'):
"""Extract spectra for science objects and apply flux and telluric
corrections if requested.
Wraps preprocess_image, iraf.doslit, iraf.calibrate, iraf.dispcor,
iraf.telluric, and some others.
Uses sky lines to correct the wavelength solution and estimate its
uncertainty.
Generates text and fits spectra of the form {side}####.spec.{fits/txt}
and uncertainties of the form {side}####.err.{fits/txt}. Fluxed
spectra and errors are stored as {side}####_flux.spec.{fits/txt}
and {side}####_flux.err.{fits/txt} in units of erg/cm2/sec/Ang.
Parameters
----------
imgID : int
File number of image to process, e.g., red0011.fits -> 11
side : {'blue' (default), 'red'}
'blue' or 'red' to indicate the arm of the spectrograph
trace : int
Row or column of the spectral trace, if different from default.
arc : string
Arc image to use if different from the default, e.g. 'HeNeAr_0.5.fits'
splot : {'yes', 'no' (default)}
Plot extracted spectra with iraf.splot?
resize : {'yes' (default), 'no'}
Resize the extraction aperture? Passed to iraf.doslit.
telluric_cal_id : int or None (default)
If defined, use specified image id to perform telluric correction.
reextract : boolean (default False)
Re-extract spectra? If False, use existing aperture definitions
if they exist. If True,
redo : {'yes', 'no' (default)}
Redo the spectral extraction from scratch? Passed to iraf.doslit.
**Warning**--discards previous wavelength solution! This is probably
not what you want!
Use reextract=True if you simply want to redefine the apertures.
crval : int or None (default)
Spectrum reference dispersion coordinate, if different from default
[Angstroms of central pixel]
cdelt : int or None (default)
Spectral dispersion, if different from default [Angstroms per pixel]
quicklook : {'yes', 'no' (default)}
Non-interactive aperture selection, tracing, and dispersion? Passed
to iraf.doslit.
"""
assert (side in ['blue', 'red'])
assert (splot in ['yes', 'no'])
assert (redo in ['yes', 'no'])
assert (resize in ['yes', 'no'])
assert (quicklook in ['yes', 'no'])
rootname = '%s%04d' % (side, imgID)
if trace is None:
trace = det_pars[side]['trace']
if arc is None:
arc = det_pars[side]['arc']
if crval is None:
crval = det_pars[side]['crval']
if cdelt is None:
cdelt = det_pars[side]['cdelt']
if reextract:
apfile = 'database/ap'+rootname
if os.path.exists(apfile):
os.remove(apfile)
# preprocess the science image
preprocess_image(rootname+'.fits', side=side, trace=trace)
# preprocess the arc image
preprocess_image(arc, side=side, trace=trace, flatcor='no',
remove_cosmics=False)
# set up doslit
fwhm = 5.6
iraf.unlearn('doslit')
iraf.unlearn('sparams')
iraf.unlearn('aidpars')
iraf.doslit.quicklook = quicklook
iraf.doslit.readnoise = "RON"
iraf.doslit.gain = "GAIN"
iraf.doslit.width = 1.4*fwhm
iraf.doslit.crval = crval
iraf.doslit.cdelt = cdelt
iraf.sparams.nsum = 50
iraf.doslit.clean = "yes"
if side == 'blue':
iraf.doslit.dispaxis = 2
else:
iraf.doslit.dispaxis = 1
iraf.sparams.extras = "yes"
iraf.sparams.lower = -1*int(round(iraf.doslit.width/2.))
iraf.sparams.upper = int(round(iraf.doslit.width/2.))
iraf.sparams.t_function = "legendre"
iraf.sparams.t_niter = 3
iraf.sparams.t_order = 4
iraf.sparams.t_high = 2
iraf.sparams.t_low = 2
iraf.sparams.weights = "variance"
iraf.sparams.b_order = 3
iraf.sparams.b_niterate = 1
iraf.sparams.select = "average"
anullus_start = fwhm*2
xL = np.floor(np.linspace(-80, -1*anullus_start, 10))
xR = np.floor(np.linspace(anullus_start, 80, 10))
background_range = ''
for i in np.arange(xL.size-1):
background_range += '%d:%d,' % (np.int(xL[i]), np.int(xL[i+1]-1))
for i in np.arange(xR.size-1):
background_range += '%d:%d,' % (np.int(xR[i]+1), np.int(xR[i+1]))
iraf.sparams.b_sample = background_range[:-1]
iraf.sparams.i_function = "legendre"
iraf.sparams.i_order = 4
if side == 'blue':
iraf.sparams.coordlist = BASE_DIR + '/cal/FeAr_dbsp.dat'
fwhm_arc = det_pars['blue']['fwhm_arc'] # input FWHM of arc lines here (in pixels)
else:
iraf.sparams.coordlist = BASE_DIR + '/cal/HeNeAr_dbsp.dat'
fwhm_arc = det_pars['red']['fwhm_arc'] # input FWHM of arc lines here (in pixels)
iraf.sparams.fwidth = fwhm_arc
iraf.sparams.match = 10. # positive number is angstrom, negative is pix
iraf.sparams.i_niterate = 5
iraf.sparams.addfeatures = 'no'
iraf.sparams.linearize = "yes"
# extract 1D spectrum
iraf.doslit(rootname+'.fits', arcs=arc, splot=splot, redo=redo, resize=resize)
# extract the trace from the flat-field image
hdr = pyfits.getheader(rootname + '.fits')
iraf.apall('raw_flat_%s_%s.fits' % (side, hdr['APERTURE']), interactive='no', extras='no', edit="no", nsum=10, recenter="no", trace="no", background="none", output='trace_flat', find="no", reference=rootname)
# normalize the response with mean
m = np.float(iraf.imstat('trace_flat.fits', fields='mean', Stdout=1, format="no")[0])
iraf.imarith('trace_flat.fits', '/', m, 'trace_flat.fits')
# transform from pixel to wavelength coordinates
iraf.hedit('trace_flat.fits', 'REFSPEC1', '%s%s.ms' % (rootname, os.path.splitext(arc)[0]), add="yes", verify="no", show="no")
iraf.dispcor('trace_flat.fits', 'd_trace_flat.fits', table=rootname + '.ms.fits')
# normalize the response with a low-order spline
iraf.unlearn('continuum')
iraf.continuum.order = 5
iraf.continuum.high_rej = 5
iraf.continuum.low_rej = 2
iraf.continuum.niterate = 10
iraf.continuum.type = "ratio"
iraf.continuum('d_trace_flat.fits', 'norm_d_trace_flat.fits',
interactive="no")
# correct tellurics, if requested
if telluric_cal_id is not None and side == 'red':
tell_rootname = '%s%04d' % (side, telluric_cal_id)
if not os.path.exists('norm_' + tell_rootname + '.fits'):
normalize_to_continuum(telluric_cal_id, side=side)
iraf.unlearn('telluric')
iraf.telluric.input = rootname + '.ms.fits'
iraf.telluric.output = ""
iraf.telluric.sample = "6277:6288,6860:7000,7584:7678,9252:9842"
iraf.telluric.interactive = "no"
iraf.telluric.cal = 'norm_%s.fits' % tell_rootname
iraf.telluric.ignoreaps = 'yes'
iraf.telluric.xcorr = 'yes'
iraf.telluric.tweakrms = 'yes'
iraf.telluric.threshold = 0.01
iraf.telluric()
# measure shift with sky lines *before* fluxing to avoid floating point errors
# measure the position and width of sky lines (do this only for exposures longer than 3 min)
hdr = pyfits.getheader(rootname + '.fits')
midpoint_loc = {'blue':4750,'red':7400}
if hdr['EXPTIME'] > 120:
iraf.unlearn('scopy')
# background band
iraf.delete(rootname + '.2001.fits', verify='no')
iraf.scopy(rootname + '.ms.fits', rootname, band=3, format="onedspec")
iraf.unlearn('fitprofs')
iraf.fitprofs.gfwhm = fwhm_arc
iraf.fitprofs.nerrsample = 100
iraf.fitprofs.sigma0 = det_pars[side]['readnoise']
iraf.fitprofs.invgain = 1./det_pars[side]['gain']
iraf.delete('skyfit*.dat', verify='no')
sky_lines = {'blue':
{'wavelength':[4046.565, 4358.335, 5460.750, 5577.340],
'regs':['4040 4054', '4350 4365', '5455 5469', '5572 5583']},
'red':
{'wavelength':[6923.21, 7340.885, 7821.51, 8430.174, 8885.83],
'regs':['6917 6930', '7333 7348', '7813 7830', '8422 8439', '8875 8895']}}
offsets = []
for i in range(len(sky_lines[side]['wavelength'])):
iraf.fitprofs(rootname + '.2001.fits',
reg=sky_lines[side]['regs'][i],
logfile='skyfit_{:s}_{:1d}.dat'.format(side, i),
pos=BASE_DIR + '/cal/skyline_{:s}_{:1d}.dat'.format(side, i),
verbose='no')
# dump useful data from skyfit?.dat (center width err_center err_width)
os.system('fgrep -v "#" skyfit_{side:s}_{num:1d}.dat |perl -pe "s/0\.\n/0\./g;s/^ +//;s/\(/ /g;s/\)/ /g;s/ +/ /g;" |cut -d" " -f1,6,8,13 > wavelength_offset_{side:s}_{num:1d}.dat'.format(side=side, num=i))
try:
dat = np.genfromtxt('wavelength_offset_{side:s}_{num:1d}.dat'.format(side=side, num=i), usecols=(0, 2), names="center, error")
except:
# keep going if there's a bad profile fit
print "Warning: bad fit for wavelength_offset_{side:s}_{num:1d}.dat".format(side=side, num=i)
continue
assert (dat['center'].size == 1)
offsets.append(dat['center'] - sky_lines[side]['wavelength'][i])
print offsets
offset_final = np.mean(offsets)
error_at_mid = np.std(offsets, ddof=1)/np.sqrt(len(offsets)) / \
midpoint_loc[side]*299792.458 # uncertainty in km/s at 4750 A
# add wavelength shifts/ uncertainty in km/s to headers
# (CRVAL1 doesn't seem to apply the shift correctly?)
f = pyfits.open(rootname + '.ms.fits')
hdr = f[0].header
if hdr['EXPTIME'] > 120:
#f[0].header.update('WOFF', '%.2f' % offset_final, 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#f[0].header.update('VERR', '%.2f' % error_at_mid, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
f[0].header['WOFF']= '%.2f' %offset_final
f[0].header['VERR']= '%.2f' %error_at_mid
else:
#print midpoint_loc[side]
#f[0].header.update('WOFF', '-99.99', 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#f[0].header.update('VERR', '-99.99', 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
f[0].header['WOFF']=-99.99 #('-99.99', 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
f[0].header['VERR']=-99.99 #, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
f.writeto(rootname + '.ms.fits', clobber=True)
f.close()
# extract counts and uncertainties
iraf.unlearn('scopy')
iraf.delete(rootname + '.0001.fits', verify='no')
iraf.delete(rootname + '.2001.fits', verify='no')
iraf.delete(rootname + '.3001.fits', verify='no')
iraf.scopy(rootname + '.ms.fits', rootname, band=1, format="onedspec")
iraf.scopy(rootname + '.ms.fits', rootname, band=4, format="onedspec")
# correct wavelength calibration using sky lines
f = pyfits.open(rootname + '.0001.fits')
g = pyfits.open(rootname + '.3001.fits')
hdr = f[0].header
if hdr['EXPTIME'] > 120:
#f[0].header.update('WOFF', '%.2f' % offset_final, 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#f[0].header.update('VERR', '%.2f' % error_at_mid, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
#g[0].header.update('WOFF', '%.2f' % offset_final, 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#g[0].header.update('VERR', '%.2f' % error_at_mid, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
#f[0].header.update('WOFF', '%.2f' % offset_final, 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#f[0].header.update('VERR', '%.2f' % error_at_mid, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
#g[0].header.update('WOFF', '%.2f' % offset_final, 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#g[0].header.update('VERR', '%.2f' % error_at_mid, 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
f[0].header['WOFF']= '%.2f' %offset_final
f[0].header['VERR']= '%.2f' %error_at_mid
g[0].header['WOFF']= '%.2f' %offset_final
g[0].header['VERR']= '%.2f' %error_at_mid
f[0].header['WOFF']= '%.2f' %offset_final
f[0].header['VERR']= '%.2f' %error_at_mid
g[0].header['WOFF']= '%.2f' %offset_final
g[0].header['VERR']= '%.2f' %error_at_mid
iraf.specshift(rootname + '.0001', '%.3f' % (-offset_final))
iraf.specshift(rootname + '.3001', '%.3f' % (-offset_final))
else:
#f[0].header.update('WOFF', '-99.99', 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#f[0].header.update('VERR', '-99.99', 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
#g[0].header.update('WOFF', '-99.99', 'Wavelength offset from sky lines in A at {} A'.format(midpoint_loc[side]))
#g[0].header.update('VERR', '-99.99', 'Uncertainty in km/s at {} A'.format(midpoint_loc[side]))
f[0].header['WOFF']=-99.99
f[0].header['VERR']=-99.99
g[0].header['WOFF']=-99.99
g[0].header['VERR']=-99.99
f.writeto(rootname + '.0001.fits', clobber=True)
g.writeto(rootname + '.3001.fits', clobber=True)
f.close()
g.close()
# normalize the spectrum and the uncertainty using the response function
iraf.imarith(rootname + '.0001', '/', 'norm_d_trace_flat.fits', rootname + '.0001')
iraf.imarith(rootname + '.3001', '/', 'norm_d_trace_flat.fits', rootname + '.3001')
iraf.delete('*trace_flat.fits', verify="no")
# flux, if requested
if flux:
iraf.unlearn('calibrate')
# mode switch to make the input noninteractive
iraf.calibrate.mode = 'h'
iraf.calibrate.input = rootname+'.0001,'+rootname+'.3001'
iraf.calibrate.output = rootname+'_flux.0001,'+rootname+'_flux.3001'
# TODO: run setairmass; ensure extinction is set up correctly
iraf.calibrate.extinction = ''
# I'm not sure yet why this gets moved to .0001...
iraf.calibrate.sensitivity = 'sens-{}[0]'.format(side)
iraf.calibrate.ignoreaps = 'yes'
iraf.calibrate()