Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

scripts to feather ALMA 12m + GBT #372

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
106 changes: 106 additions & 0 deletions aces/joint_deconvolution_cont/crop.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
from glob import glob
from astropy.io import fits
from reproject import reproject_interp
from reproject.mosaicking import find_optimal_celestial_wcs
import numpy as np
from tqdm.auto import tqdm
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
import astropy.units as u
import gc
import os

def get_croppeddata(hdu, region, square=False):

hdu_crop = hdu.copy() # Create a copy of the input HDU object
# del hdu # Delete the original HDU object to free up memory
# _ = gc.collect() # Perform garbage collection

wcs = WCS(hdu_crop) # Create a WCS object from the HDU header

try:
if square:
radius = max([region['width'], region['height']]) # Calculate the radius for the square cutout
cutout = Cutout2D(hdu_crop.data, region['position'], radius, wcs=wcs) # Create a square cutout
else:
cutout = Cutout2D(hdu_crop.data, region['position'], [region['width'], region['height']], wcs=wcs) # Create a rectangular cutout
except:
print('[INFO] NO cross-over')
return(None)

hdu_crop.data = cutout.data # Update the data in the cropped HDU object
hdu_crop.header.update(cutout.wcs.to_header()) # Update the header of the cropped HDU object with the cutout's WCS information

header = hdu.header
header_out = hdu_crop.header
header_keywords = np.array(header.cards)[:,0]
keys = ['BMAJ', 'BMIN', 'BPA', 'BUNIT']
for key in keys:
if key in header_keywords:
header_out[key] = header[key]

if 'BUNIT' not in header_keywords:
header_out['BUNIT'] = 'Jy/beam'

hdu_crop.header = header_out

del cutout # Delete the cutout to free up memory
_ = gc.collect() # Perform garbage collection

return hdu_crop # Return the cropped HDU object

def get_region(l, b, w, h, frame='galactic'):
region = {'position': SkyCoord(l=l *u.deg, b=b *u.deg, frame=frame),
'width': w *u.deg,
'height': h *u.deg}
return(region)

# Get a list of all .fits files in the 'data' directory
files = ['../data/processed/cont_tp.fits', '../data/processed/cont_12m.fits']

# Define the galactic coordinates and the dimensions of the desired region in the sky
l = 0.4697169 # galactic longitude
b = -0.0125704 # galactic latitude
width = 1.1808514
height = 1.1487904

# Get the defined region
region = get_region(l, b, height, width)

# Loop over each file. We're using tqdm to create a progress bar.
# tqdm automatically determines the total number of iterations from the length of the 'files' list.
for file in tqdm(files, desc="Processing files", unit="file"):

# Print the name of the current file being processed
print('[INFO] infile - %s' % file)

# Open the FITS file
hdu = fits.open(file)[0]

# Crop the data in the FITS file to the defined region
hdu_crop = get_croppeddata(hdu, region)

# Create a WCS (World Coordinate System) object from the cropped HDU
wcs = WCS(hdu_crop)

# Calculate the middle pixel coordinates of the cropped image
y_mid, x_mid = hdu_crop.shape[0]/2, hdu_crop.shape[1]/2

# Convert the middle pixel coordinates to galactic coordinates
l_mid = wcs.pixel_to_world(x_mid, y_mid).l
b_mid = wcs.pixel_to_world(x_mid, y_mid).b

# Update the header information of the cropped HDU
hdu_crop.header['BUNIT'] = 'Jy/beam' # unit of the data
hdu_crop.header['CRPIX1'] = x_mid # x-coordinate of the reference pixel
hdu_crop.header['CRPIX2'] = y_mid # y-coordinate of the reference pixel
hdu_crop.header['CRVAL1'] = l_mid.value # galactic longitude of the reference pixel
hdu_crop.header['CRVAL2'] = b_mid.value # galactic latitude of the reference pixel

# If the cropping was successful, save the cropped data to a new FITS file
if hdu_crop is not None:
# Construct the name of the output file by replacing '.fits' with '_cropped.fits' in the input file name
output_file = file.replace('.fits', '_cropped.fits')
# Write the cropped HDU to the output file, overwriting it if it already exists
hdu_crop.writeto(output_file, overwrite=True)
23 changes: 23 additions & 0 deletions aces/joint_deconvolution_cont/feather.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import os
from glob import glob
from casatasks import importfits, feather, exportfits

os.system('rm -rf *.last')
os.system('rm -rf *.log')
os.system('rm -rf *.img')

inputfile = './../data/processed/cont_tp_cropped.fits'
print(inputfile)
importfits(inputfile, inputfile.replace('fits', 'img'), overwrite=True)

inputfile = './../data/processed/cont_12m_cropped.fits'
print(inputfile)
importfits(inputfile, inputfile.replace('fits', 'img'), overwrite=True)

feather(imagename='./../data/feathered/cont_12mtp.img',
highres='./../data/processed/cont_12m_cropped.img',
lowres='./../data/processed/cont_tp_cropped.img')

exportfits(imagename='./../data/feathered/cont_12mtp.img',
fitsimage='./../data/feathered/cont_12mtp.fits',
overwrite=True)
51 changes: 51 additions & 0 deletions aces/joint_deconvolution_cont/finalise.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# importing required libraries
from glob import glob
from astropy.io import fits
from reproject import reproject_interp
import numpy as np
from tqdm.auto import tqdm
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
import astropy.units as u
import gc
import os

# defining the path of the FITS file to be opened and processed
file = './../data/feathered/cont_12mtp.fits'

# opening the FITS file and extracting the first Header Data Unit (HDU)
hdu_12mtp = fits.open(file)[0]

# removing the singleton dimensions of the data
hdu_12mtp.data = np.squeeze(hdu_12mtp.data)

# removing some specific header entries
del hdu_12mtp.header['*3*']
del hdu_12mtp.header['*4*']

# writing the modified HDU back to a FITS file, with the specified name
hdu_12mtp.writeto('./../data/feathered/cont_12mtp_final.fits')

# defining the path of another FITS file to be opened
file = './../data/processed/cont_tp_cropped.fits'

# opening the second FITS file and extracting the first HDU
hdu_tp = fits.open(file)[0]

# reprojecting the second HDU to match the coordinate system of the first HDU
# and ignoring the footprint (the second output of the reproject_interp function)
data, _ = reproject_interp(hdu_tp, hdu_12mtp.header)

# calculating the ratio of the beam sizes (based on the major and minor axes)
# of the two HDUs
beam_ratio = (hdu_12mtp.header['BMAJ']*hdu_12mtp.header['BMIN'])/(hdu_tp.header['BMAJ']*hdu_tp.header['BMIN'])

# multiplying the reprojected data by the beam ratio
data = data * beam_ratio

# replacing NaN values in the first HDU's data with corresponding values from the reprojected data
hdu_12mtp.data[np.isnan(hdu_12mtp.data)] = data[np.isnan(hdu_12mtp.data)]

# writing the final modified HDU back to a new FITS file
hdu_12mtp.writeto('./../data/feathered/cont_12mtp_final_filled.fits')
28 changes: 28 additions & 0 deletions aces/joint_deconvolution_cont/process_alma.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# importing required libraries
from glob import glob
from astropy.io import fits
from reproject import reproject_interp
from reproject.mosaicking import find_optimal_celestial_wcs
import numpy as np
from tqdm.auto import tqdm
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
import astropy.units as u
import gc
import os

# defining the path of the FITS file to be opened and processed
file = '../data/raw/12m_continuum_commonbeam_circular_reimaged_mosaic.fits'

# opening the FITS file and extracting the first Header Data Unit (HDU)
# HDU is the highest level component of the FITS file structure
hdu = fits.open(file)[0]

# setting the BUNIT (brightness unit) to 'Jy/beam'
# This indicates that the data is in units of Janskys per beam
hdu.header['BUNIT'] = 'Jy/beam'

# writing the modified HDU back to a FITS file, with the specified name,
# in the specified location, overwriting the file if it already exists.
hdu.writeto('../data/processed/cont_12m.fits', overwrite=True)
38 changes: 38 additions & 0 deletions aces/joint_deconvolution_cont/process_mustang.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# importing required libraries
from glob import glob
from astropy.io import fits
from reproject import reproject_interp
from reproject.mosaicking import find_optimal_celestial_wcs
import numpy as np
from tqdm.auto import tqdm
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
import astropy.units as u
import gc
import os

# defining the path of the FITS file to be opened and processed
file = '../data/raw/SgrB2_5pass_1_.0.2_10mJy_10mJy_w_session5_final_smooth4_PlanckCombined_10feb2020.fits'

# opening the FITS file and extracting the first Header Data Unit (HDU)
# HDU is the highest level component of the FITS file structure
hdu = fits.open(file)[0]

# modifying the BMAJ and BMIN header values
# BMAJ and BMIN typically represent the major and minor axes of the beam,
# in degrees. Here they are set to 9 arcseconds.
hdu.header['BMAJ'] = 9/3600
hdu.header['BMIN'] = 9/3600

# setting the BPA (Beam Position Angle) header value to 0
# This angle is usually measured in degrees from north through east
hdu.header['BPA'] = 0

# setting the BUNIT (brightness unit) to 'Jy/beam'
# This indicates that the data is in units of Janskys per beam
hdu.header['BUNIT'] = 'Jy/beam'

# writing the modified HDU back to a FITS file, with the specified name,
# in the specified location, overwriting the file if it already exists.
hdu.writeto('../data/processed/cont_tp.fits', overwrite=True)