Skip to content

Commit

Permalink
October 2024 bugfixes & additions (#432)
Browse files Browse the repository at this point in the history
* boost cyclefactor for field z and make rms map list verbose

* record casaversion.  increase cube velo ranges

* some switches to /red

* statsgrid: don't allow nans.  add ns+ mosaic

* cube mosaicing

* updates - especially, change parameters for giant cube analyses

* refactor - no masked max, add prints

* Updated masking

* try some dask refactoring

* make sure non-dask mode works, and increase chunk size dramatically to decrease graph size

* major refactor of giantcube to try to debug dask issues.  also refactored Ash's contributions for added verbosity

* try the save-to-tmpdir approach

* get_noise needs howargs

* reorder operations

* significant refactor of masking

* remove redundant moment

* more performance tweaks, plus skipping a step we decided was unnecessary on telecon

* more testing code

* writing out dask files requires a different approach

* disable rechunk step, save pngs

* weight mosaic -> pb mosaic

* primary beam mosaics and some figure cleanup

* add acetaldehyde

* add noTP HCO+ cube

* add purely downsampling code

* add latex_table (again?) and reorg downsample script

* fix units

* bugfix in cube downsampling & table fixups

* flake8

* next tables

* flake

* tablemaking, contmosaic alpha

* regions

* comment out latex stuff

* new aggregate clean commands, zoom regions

* minor: zoom region rename

* fix major unit error in common-beam FITS images

* whitespace

* flake

* seriously, ignore that indentation, it is right.

---------

Co-authored-by: Ashley Barnes <[email protected]>
  • Loading branch information
keflavich and ashleythomasbarnes authored Dec 10, 2024
1 parent 4f93de0 commit b676d21
Show file tree
Hide file tree
Showing 21 changed files with 14,021 additions and 193 deletions.
3 changes: 0 additions & 3 deletions aces/analysis/continuum_selection_diagnostic_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,6 @@ def assemble_new_contsels(convert_to_native=False, allow_missing_maxspec=False):
gous = os.getenv('GOUS') or 'A001_X1590_X30a9'

for sbname, allpars in cmds.items():
if 'TM1' not in sbname:
# handle 7m separately
continue
mous_ = allpars['mous']
mous = mous_[6:].replace("/", "_")
assert len(mous) in (14, 15, 16)
Expand Down
2 changes: 1 addition & 1 deletion aces/analysis/cube_stats_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def save_tbl(rows, colnames):
config = config.split(" ")[0]
rerun = 'original' in sbname

for suffix in (".image", ): # ".contsub.image"):#, ".contsub.JvM.image.fits", ".JvM.image.fits"):
for suffix in (".image", ".image.pbcor.statcont.contsub.fits"): # ".contsub.image"):#, ".contsub.JvM.image.fits", ".JvM.image.fits"):
globblob = f'{fullpath}/calibrated/working/*.iter1{suffix}'
fns = glob.glob(globblob)
globblob2 = f'{fullpath}/reclean/*.iter1{suffix}'
Expand Down
17 changes: 14 additions & 3 deletions aces/analysis/cube_stats_grid_feathered.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,12 @@ def save_tbl(rows, colnames):
if start_from_cached and os.path.exists(tbldir / 'feathered_cube_stats.ecsv'):
tbl = Table.read(tbldir / 'feathered_cube_stats.ecsv')
print(tbl)

if np.any(np.isnan(tbl['min'])):
print(f"There are {np.isnan(tbl['min']).sum()} NaNs in the table. Will recompute those. len(tbl)={len(tbl)}")
tbl = tbl[np.isfinite(tbl['min'])]
print(f"Cut-down table length = {len(tbl)}")

if len(tbl.colnames) != NCOLS:
warnings.warn("Cached file is BAD! Moving it.")
shutil.move(tbldir / 'feathered_cube_stats.ecsv',
Expand Down Expand Up @@ -167,7 +173,7 @@ def save_tbl(rows, colnames):
(tbl['Config'] == config) &
(tbl['spw'] == spw) &
(tbl['suffix'] == suffix))
if any(row_matches):
if any(row_matches) and not np.all(np.isfinite(tbl[row_matches]['min'])):
print(f"Skipping {fullpath} as complete: {tbl[row_matches]}", flush=True)
continue

Expand Down Expand Up @@ -220,8 +226,8 @@ def save_tbl(rows, colnames):
dt(cube)
dt(noiseest_cube)

minfreq = cube.spectral_axis.min()
maxfreq = cube.spectral_axis.max()
minfreq = cube.with_spectral_unit(u.GHz).spectral_axis.min()
maxfreq = cube.with_spectral_unit(u.GHz).spectral_axis.max()
restfreq = cube.wcs.wcs.restfrq

# print("getting filled data")
Expand All @@ -237,6 +243,8 @@ def save_tbl(rows, colnames):
dt("finished cube stats")
min = stats['min']
max = stats['max']
if np.isnan(min) or np.isnan(max):
raise ValueError("Cube stats reported a NAN min/max")
std = stats['sigma']
sum = stats['sum']
mean = stats['mean']
Expand Down Expand Up @@ -280,6 +288,9 @@ def save_tbl(rows, colnames):
print(f'len(rows): {len(rows)}, len(colnames): {len(colnames)}')
tbl = save_tbl(rows, colnames)

if np.any(np.isnan(tbl['min'])):
print(f"After processing {fn}, there are {np.isnan(tbl['min']).sum()} NaNs in the table.")

cache_stats_file.close()

print(tbl)
Expand Down
Loading

0 comments on commit b676d21

Please sign in to comment.