Skip to content

Commit

Permalink
Merge pull request macauff#80 from macauff/pytest_fixes
Browse files Browse the repository at this point in the history
Fix pytest warnings
Onoddil authored Feb 27, 2024
2 parents d0e9cc2 + e0de7d1 commit 9163236
Showing 7 changed files with 35 additions and 34 deletions.
2 changes: 1 addition & 1 deletion src/macauff/derive_psf_auf_params.py
Original file line number Diff line number Diff line change
@@ -861,7 +861,7 @@ def plot_fits(self): # pylint: disable=too-many-statements
diff[i, 2] = _li
diff[i, 3] = x
diff[i, 4] = dx
diff[i, 5] = dx_fit
diff[i, 5] = dx_fit[0]

pool.join()

4 changes: 2 additions & 2 deletions src/macauff/get_trilegal_wrapper.py
Original file line number Diff line number Diff line change
@@ -169,8 +169,8 @@ def trilegal_webcall(trilegal_version, l, b, area, binaries, av, sigma_av, filte
while not complete: # pylint: disable=too-many-nested-blocks
notconnected = True
busy = True
print("TRILEGAL is being called with \n l={l} deg, b={b} deg, area={area} sqrdeg\n "
"Av={av} with {sigma_av} fractional r.m.s. spread \n in the {filterset} system, complete "
print(f"TRILEGAL is being called with \n l={l} deg, b={b} deg, area={area} sqrdeg\n "
f"Av={av} with {sigma_av} fractional r.m.s. spread \n in the {filterset} system, complete "
f"down to mag={maglim} in its {magnum}th filter, use_binaries set to {binaries}.")
sp.Popen(cmd, shell=True).wait() # pylint: disable=consider-using-with
if (os.path.exists(f'{outfolder}/tmpfile') and
3 changes: 1 addition & 2 deletions src/macauff/group_sources.py
Original file line number Diff line number Diff line change
@@ -411,8 +411,7 @@ def _clean_overlaps(inds, size, n_pool):
y = len(unique_inds)
inds[:y, i] = unique_inds
inds[y:, i] = -1
if y > maxsize:
maxsize = y
maxsize = max(maxsize, y)
size[i] = y

pool.join()
16 changes: 8 additions & 8 deletions src/macauff/parse_catalogue.py
Original file line number Diff line number Diff line change
@@ -284,9 +284,9 @@ def npy_to_csv(input_csv_folders, input_match_folder, output_folder, csv_filenam
match_df = pd.DataFrame(columns=cols, index=np.arange(0, n_matches))

for i in column_name_lists[0]:
match_df[i].iloc[:] = cat_a[i].iloc[ac].values
match_df.loc[:, i] = cat_a.loc[ac, i].values
for i in column_name_lists[1]:
match_df[i].iloc[:] = cat_b[i].iloc[bc].values
match_df.loc[:, i] = cat_b.loc[bc, i].values
match_df.iloc[:, 6+n_amags+n_bmags] = p
match_df.iloc[:, 6+n_amags+n_bmags+1] = seps
match_df.iloc[:, 6+n_amags+n_bmags+2] = eta
@@ -299,10 +299,10 @@ def npy_to_csv(input_csv_folders, input_match_folder, output_folder, csv_filenam
match_df.iloc[:, 6+n_amags+n_bmags+6+acontprob.shape[0]+i] = bcontprob[i, :]
if extra_col_name_lists[0] is not None:
for i in extra_col_name_lists[0]:
match_df[i].iloc[:] = cat_a[i].iloc[ac].values
match_df.loc[:, i] = cat_a.loc[ac, i].values
if extra_col_name_lists[1] is not None:
for i in extra_col_name_lists[1]:
match_df[i].iloc[:] = cat_b[i].iloc[bc].values
match_df.loc[:, i] = cat_b.loc[bc, i].values

# FIT_SIG are the last 0-2 columns, after [ID+coords(x2)+mag(xN)]x2 +
# Q match-made columns, plus len(extra_col_name_lists)x2.
@@ -339,15 +339,15 @@ def npy_to_csv(input_csv_folders, input_match_folder, output_folder, csv_filenam
n_anonmatches = len(af)
a_nonmatch_df = pd.DataFrame(columns=cols, index=np.arange(0, n_anonmatches))
for i in column_name_lists[0]:
a_nonmatch_df[i].iloc[:] = cat_a[i].iloc[af].values
a_nonmatch_df.loc[:, i] = cat_a.loc[af, i].values
a_nonmatch_df.iloc[:, 3+n_amags] = p
a_nonmatch_df.iloc[:, 3+n_amags+1] = seps
a_nonmatch_df.iloc[:, 3+n_amags+2] = afeta
a_nonmatch_df.iloc[:, 3+n_amags+3] = afxi
a_nonmatch_df.iloc[:, 3+n_amags+4] = a_avg_cont
if extra_col_name_lists[0] is not None:
for i in extra_col_name_lists[0]:
a_nonmatch_df[i].iloc[:] = cat_a[i].iloc[af].values
a_nonmatch_df.loc[:, i] = cat_a.loc[af, i].values

if input_npy_folders[0] is not None:
ind = (len(column_name_lists[0]) + len(our_columns) +
@@ -373,15 +373,15 @@ def npy_to_csv(input_csv_folders, input_match_folder, output_folder, csv_filenam
n_bnonmatches = len(bf)
b_nonmatch_df = pd.DataFrame(columns=cols, index=np.arange(0, n_bnonmatches))
for i in column_name_lists[1]:
b_nonmatch_df[i].iloc[:] = cat_b[i].iloc[bf].values
b_nonmatch_df.loc[:, i] = cat_b.loc[bf, i].values
b_nonmatch_df.iloc[:, 3+n_bmags] = p
b_nonmatch_df.iloc[:, 3+n_bmags+1] = seps
b_nonmatch_df.iloc[:, 3+n_bmags+2] = bfeta
b_nonmatch_df.iloc[:, 3+n_bmags+3] = bfxi
b_nonmatch_df.iloc[:, 3+n_bmags+4] = b_avg_cont
if extra_col_name_lists[1] is not None:
for i in extra_col_name_lists[1]:
b_nonmatch_df[i].iloc[:] = cat_b[i].iloc[bf].values
b_nonmatch_df.loc[:, i] = cat_b.loc[bf, i].values

if input_npy_folders[1] is not None:
ind = (len(column_name_lists[1]) + len(our_columns) +
13 changes: 7 additions & 6 deletions tests/macauff/test_counterpart_pairing.py
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@
'''

import itertools
import math
import os

import numpy as np
@@ -662,7 +663,7 @@ def test_pair_sources(self): # pylint: disable=too-many-statements
def test_f90_comb():
for n in [2, 3, 4, 5]:
for k in range(2, n+1, 1):
n_combs = np.math.factorial(n) / np.math.factorial(k) / np.math.factorial(n - k)
n_combs = math.factorial(n) / math.factorial(k) / math.factorial(n - k)
combs = cpf.calc_combs(n, n_combs, k).T
new_combs = combs[np.lexsort([combs[:, i] for i in range(k)])]

@@ -674,8 +675,8 @@ def test_f90_comb():
def test_f90_perm_comb():
for n in [2, 3, 4, 5]:
for k in range(2, n+1, 1):
n_combs = np.math.factorial(n) / np.math.factorial(k) / np.math.factorial(n - k)
n_perms_per_comb = np.math.factorial(k)
n_combs = math.factorial(n) / math.factorial(k) / math.factorial(n - k)
n_perms_per_comb = math.factorial(k)
perms = cpf.calc_permcombs(n, k, n_perms_per_comb, n_combs).T
new_perms = perms[np.lexsort([perms[:, i] for i in range(k)])]

@@ -688,12 +689,12 @@ def test_f90_perm_comb():

def test_factorial():
for k in range(21):
assert np.math.factorial(k) == cpf.factorial(k, k-1)
assert np.math.factorial(k) == cpf.factorial(k, k)
assert math.factorial(k) == cpf.factorial(k, k-1)
assert math.factorial(k) == cpf.factorial(k, k)

for k in range(21):
assert cpf.factorial(k, 1) == k

for k in range(21):
for l in range(1, k+1):
assert cpf.factorial(k, l) == np.math.factorial(k) / np.math.factorial(k - l)
assert cpf.factorial(k, l) == math.factorial(k) / math.factorial(k - l)
6 changes: 3 additions & 3 deletions tests/macauff/test_make_set_list.py
Original file line number Diff line number Diff line change
@@ -4,6 +4,7 @@
'''

import os
import warnings

import numpy as np
import pytest
@@ -55,11 +56,10 @@ def test_set_list_maximum_exceeded():
f'{n_b}/{n_b+2} catalogue b stars'):
alist, blist, agrplen, bgrplen, _, _ = set_list(a_overlaps, b_overlaps, a_num, b_num, 2)
else:
with pytest.warns(None) as record:
with warnings.catch_warnings():
warnings.simplefilter("error")
# pylint: disable-next=unbalanced-tuple-unpacking
alist, blist, agrplen, bgrplen = set_list(a_overlaps, b_overlaps, a_num, b_num, 2)
# Should be empty if no warnings were raised.
assert not record
if i != 2:
assert np.all(agrplen == np.array([1, 1, 0]))
assert np.all(bgrplen == np.array([1, 0, 1]))
25 changes: 13 additions & 12 deletions tests/macauff/test_perturbation_auf.py
Original file line number Diff line number Diff line change
@@ -5,6 +5,7 @@

# pylint: disable=too-many-lines,duplicate-code

import math
import os

import numpy as np
@@ -102,9 +103,9 @@ def test_no_perturb_outputs(self):
def test_perturb_aufs():
# Poisson distribution with mean 0.08 gives 92.3% zero, 7.4% one, and 0.3% two draws.
mean = 0.08
prob_0_draw = mean**0 * np.exp(-mean) / np.math.factorial(0)
prob_1_draw = mean**1 * np.exp(-mean) / np.math.factorial(1)
prob_2_draw = mean**2 * np.exp(-mean) / np.math.factorial(2)
prob_0_draw = mean**0 * np.exp(-mean) / math.factorial(0)
prob_1_draw = mean**1 * np.exp(-mean) / math.factorial(1)
prob_2_draw = mean**2 * np.exp(-mean) / math.factorial(2)

n = np.array([1.0])
m = np.array([0.0])
@@ -553,9 +554,9 @@ def test_psf_algorithm(self): # pylint: disable=too-many-locals
'trilegal_auf_simulation_faint.dat', "w", encoding='utf-8') as f:
f.write(text)

prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / np.math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / np.math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / np.math.factorial(2)
prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / math.factorial(2)

ax1, ax2 = self.auf_points[0]

@@ -701,9 +702,9 @@ def test_compute_local_density(self, precompute_tri_hists):
'trilegal_auf_simulation_faint.dat', "w", encoding='utf-8') as f:
f.write(text)

prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / np.math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / np.math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / np.math.factorial(2)
prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / math.factorial(2)

ax1, ax2 = self.auf_points[0]

@@ -927,9 +928,9 @@ def test_with_galaxy_counts(self):
'trilegal_auf_simulation_faint.dat', "w", encoding='utf-8') as f:
f.write(text)

prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / np.math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / np.math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / np.math.factorial(2)
prob_0_draw = psf_mean**0 * np.exp(-psf_mean) / math.factorial(0)
prob_1_draw = psf_mean**1 * np.exp(-psf_mean) / math.factorial(1)
prob_2_draw = psf_mean**2 * np.exp(-psf_mean) / math.factorial(2)

ax1, ax2 = self.auf_points[0]

0 comments on commit 9163236

Please sign in to comment.