Skip to content

Commit

Permalink
Updating fix_columns.py
Browse files Browse the repository at this point in the history
  • Loading branch information
ericmorganson committed Jun 24, 2016
1 parent 22b2cef commit ebfa79a
Showing 1 changed file with 31 additions and 21 deletions.
52 changes: 31 additions & 21 deletions python/pixcorrect/fix_columns.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from pixcorrect.corr_util import logger, do_once, items_must_match
from despyfits.DESImage import DESDataImage, DESImage, DESBPMImage
from pixcorrect.PixCorrectDriver import PixCorrectImStep
#from pixcorrect.clippedMean import clippedMean
from despyfits import maskbits

# Which section of the config file to read for this step
Expand All @@ -38,14 +37,15 @@ class FixColumns(PixCorrectImStep):
step_name = config_section
# BPM flag for correctable pixels
CORR = maskbits.BPMDEF_CORR
# These BPM flags may be present for correctable pixels
BPMOK = CORR | maskbits.BPMDEF_BIAS_COL | maskbits.BPMDEF_FUNKY_COL | maskbits.BPMDEF_SUSPECT
BPMOK |= maskbits.BPMDEF_NEAREDGE
# These image mask flags indicate bad pixels in the reference columns
#REJECT = maskbits.BADPIX_BPM + \
# maskbits.BADPIX_SATURATE +\
# maskbits.BADPIX_BADAMP + \
# maskbits.BADPIX_SUSPECT # bitmask for comparison pixels
# These BPM flags define pixels that are not correctable
BPMBAD = maskbits.BPMDEF_FLAT_MIN | \
maskbits.BPMDEF_FLAT_MAX | \
maskbits.BPMDEF_FLAT_MASK | \
maskbits.BPMDEF_BIAS_HOT | \
maskbits.BPMDEF_BIAS_WARM | \
maskbits.BPMDEF_BIAS_MASK | \
maskbits.BPMDEF_WACKY_PIX

MINIMUM_PIXELS = 100 # Smallest number of pixels in column to correct
CLIP_SIGMA = 4 # Rejection threshold for mean statistics

Expand Down Expand Up @@ -100,7 +100,8 @@ def _valid_pix(cls, image, bpm, icol):
"""
Return boolean array saying which pixels in column icol are useful for sky stats
"""
use = (bpm.mask[:,icol] & ~maskbits.BADPIX_NEAREDGE) == 0
#Allow NEAREDGE columns for reference
use = (image.mask[:,icol] & ~maskbits.BADPIX_NEAREDGE)==0
use &= ~np.isinf(image.data[:,icol])
use &= ~np.isnan(image.data[:,icol])
return use
Expand All @@ -124,15 +125,21 @@ def __call__(cls, image, bpm):
#Change VAR_TOLERANCE from 0.5 to 0.25
#Remove lower limit on correction
#Change mask bit usage
# -- correct all correctable pixels, but use only "good" pixels to compute correction
#Correct all correctable pixels, but use only "good" pixels to compute correction

logger.info('Fixing columns')

NEIGHBORS = 6 # Number of comparison columns to seek
RANGE = 12 # Farthest away to look for comparison columns
# Largest allowable fractional difference in variance between the fixable column
# and its neighbors:
VAR_TOLERANCE = 0.25
#We require the number of sky pixels in the target column to be not less than this
#fraction of the average number of sky pixels in the reference columns
#If the pixel values of a column vary in a bi-stable way, the high pixels may be
#interpreted as "objects" and the high variance may not be noticed.
COUNT_TOL = 0.80


if image.mask is None:
raise FixColumnsError('Input image does not have mask')
Expand Down Expand Up @@ -160,27 +167,24 @@ def __call__(cls, image, bpm):
# Also, we do not use any bad pixels.
coldata = image.data[:,icol]
colbpm = bpm.mask[:,icol]
#Pixels that can't be corrected
ignore = np.logical_or(colbpm & ~cls.BPMOK, np.isinf(coldata))
ignore = np.logical_or(colbpm & cls.BPMBAD, np.isinf(coldata))
ignore |= np.isnan(coldata)
corr_rows = np.logical_and(colbpm & cls.CORR, ~ignore)
#Additional pixels that are not used to compute the average
ignore |= image.mask[:,icol] & ~(maskbits.BADPIX_BPM | maskbits.BADPIX_NEAREDGE)
ignore |= image.mask[:,icol] & ~maskbits.BADPIX_BPM
use_rows = np.logical_and(colbpm & cls.CORR, ~ignore)

if np.count_nonzero(use_rows) < cls.MINIMUM_PIXELS:
logger.info("Not enough pixels to fix column {:d}".format(icol))
continue

# Get a robust estimate of mean level and slope in target column
#col_mean, col_var, col_n = clippedMean(coldata[use_rows], cls.CLIP_SIGMA)
y = colord[use_rows]
z = coldata[use_rows]
col_mean, col_slope, col_var, col_n = cls._clippedLine(y,z,doslope,cls.CLIP_SIGMA)
if col_var <= 0.0:
logger.info("Error in clipped line fit for column {:d}".format(icol))
continue

# Now want to collect stats on up to NEIGHBORS nearby columns
norm_stats = []
ilow = icol
Expand All @@ -202,7 +206,6 @@ def __call__(cls, image, bpm):
ref_col,ref_slope,ref_var, ref_n = cls._clippedLine(y,z,doslope,cls.CLIP_SIGMA)
if ref_var<=0.0: continue
norm_stats.append([ref_col,ref_slope,ref_var,ref_n])
#norm_stats.append(clippedMean(image.data[:,ilow][use],cls.CLIP_SIGMA))
break

while ihigh<high_limit:
Expand All @@ -219,8 +222,8 @@ def __call__(cls, image, bpm):
ref_col,ref_slope,ref_var,ref_n = cls._clippedLine(y,z,doslope,cls.CLIP_SIGMA)
if ref_var<=0.0: continue
norm_stats.append([ref_col,ref_slope,ref_var,ref_n])

break

if len(norm_stats) < NEIGHBORS:
# Don't fix the column if we did not get comparison columns
logger.info('Not enough comparison columns to fix col {:d}'.format(icol))
Expand All @@ -238,9 +241,16 @@ def __call__(cls, image, bpm):
# different from the comparison columns
norm_var = np.sum(var*wt)/np.sum(wt)
if np.abs(col_var - norm_var) > VAR_TOLERANCE * norm_var:
logger.info('Too much variance to fix col {:d}'.format(icol))
logger.info('Too much variance to fix column {:d}'.format(icol))
continue
#Check that number of target column sky pixels is not much less than
#the average of the reference columns
norm_n = np.sum(nc)/np.size(nc)
if col_n < COUNT_TOL*norm_n:
logger.info('Too few sky pixels to fix column {:d}'.format(icol))
continue

#Valid correction. Calculate correction & error estimate
norm_mean = np.sum(mean*wt)/np.sum(wt)
correction = norm_mean - col_mean
correction_var = 1./np.sum(wt) + col_var/col_n
Expand Down

0 comments on commit ebfa79a

Please sign in to comment.