Skip to content

Commit

Permalink
improve test for solvers
Browse files Browse the repository at this point in the history
  • Loading branch information
PierreMarchand20 committed Nov 14, 2023
1 parent 958fea9 commit a8faa59
Show file tree
Hide file tree
Showing 2 changed files with 124 additions and 27 deletions.
9 changes: 9 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,15 @@ target_include_directories(
${MPI4PY_INCLUDE_DIR})
target_link_libraries(Htool PRIVATE ${MPI_LIBRARIES} ${BLAS_LIBRARIES} ${LAPACK_LIBRARIES} ${ARPACK_LIBRARIES} ${OpenMP_CXX_LIBRARIES})

if("${BLA_VENDOR}" STREQUAL "Intel10_32"
OR "${BLA_VENDOR}" STREQUAL "Intel10_64lp"
OR "${BLA_VENDOR}" STREQUAL "Intel10_64lp_seq"
OR "${BLA_VENDOR}" STREQUAL "Intel10_64ilp"
OR "${BLA_VENDOR}" STREQUAL "Intel10_64ilp_seq"
OR "${BLA_VENDOR}" STREQUAL "Intel10_64_dyn")
target_compile_definitions(htool PRIVATE "-DHPDDM_MKL -DHTOOL_MKL")
endif()

target_compile_definitions(Htool PRIVATE "-DPYTHON_INTERFACE" "-DWITH_HPDDM")

if(CODE_COVERAGE AND (CMAKE_C_COMPILER_ID MATCHES "GNU" OR CMAKE_CXX_COMPILER_ID MATCHES "GNU"))
Expand Down
142 changes: 115 additions & 27 deletions tests/test_ddm_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,63 +12,131 @@
import Htool


@pytest.mark.parametrize("epsilon", [1e-3, 1e-6])
@pytest.mark.parametrize("epsilon", [1e-6])
@pytest.mark.parametrize("eta", [10])
@pytest.mark.parametrize("tol", [1e-10])
@pytest.mark.parametrize("tol", [1e-6])
@pytest.mark.parametrize(
"mu,symmetry,setup_solver_dependencies,hpddm_schwarz_method,hpddm_schwarz_coarse_correction",
"mu,symmetry,overlap,hpddm_schwarz_method,hpddm_schwarz_coarse_correction",
[
(1, "S", True, "none", "none"),
(1, "S", True, "asm", "none"),
(1, "S", True, "ras", "none"),
(10, "S", True, "none", "none"),
(10, "S", True, "asm", "none"),
(10, "S", True, "ras", "none"),
(1, "N", True, "none", "none"),
(1, "N", False, "none", "none"),
(1, "N", False, "asm", "none"),
(1, "N", False, "ras", "none"),
(1, "N", True, "asm", "none"),
(1, "N", True, "ras", "none"),
(10, "N", True, "none", "none"),
(10, "N", False, "none", "none"),
(10, "N", False, "asm", "none"),
(10, "N", False, "ras", "none"),
(10, "N", True, "asm", "none"),
(10, "N", True, "ras", "none"),
# (1, "S", False, "asm", "additive"),
# (1, "S", False, "ras", "additive"),
# (10, "S", False, "asm", "additive"),
# (10, "S", False, "ras", "additive"),
(1, "S", False, "none", "none"),
(1, "S", False, "asm", "none"),
(1, "S", False, "ras", "none"),
(1, "S", True, "asm", "none"),
(1, "S", True, "ras", "none"),
(10, "S", False, "none", "none"),
(10, "S", False, "asm", "none"),
(10, "S", False, "ras", "none"),
(10, "S", True, "asm", "none"),
(10, "S", True, "ras", "none"),
(1, "S", True, "asm", "additive"),
(1, "S", True, "ras", "additive"),
(10, "S", True, "asm", "additive"),
(10, "S", True, "ras", "additive"),
],
indirect=["setup_solver_dependencies"],
# indirect=["setup_solver_dependencies"],
)
def test_ddm_solver(
setup_solver_dependencies,
load_data_solver,
epsilon,
eta,
mu,
overlap,
symmetry,
tol,
hpddm_schwarz_method,
hpddm_schwarz_coarse_correction,
):
(
solver,
# (
# solver,
# x_ref,
# f,
# distributed_operator,
# local_neumann_matrix,
# ) = setup_solver_dependencies

# Setup
[
A,
x_ref,
f,
distributed_operator,
cluster,
neighbors,
intersections,
symmetry,
UPLO,
cluster_to_ovr_subdomain,
ovr_subdomain_to_global,
local_neumann_matrix,
) = setup_solver_dependencies
] = load_data_solver

generator = GeneratorFromMatrix(cluster.get_permutation(), A)
default_approximation = Htool.ComplexDefaultApproximationBuilder(
generator,
cluster,
cluster,
epsilon,
eta,
symmetry,
UPLO,
mpi4py.MPI.COMM_WORLD,
)

solver = None
if not overlap:
default_solver_builder = Htool.ComplexDefaultSolverBuilder(
default_approximation.distributed_operator,
default_approximation.block_diagonal_hmatrix,
)
solver = default_solver_builder.solver
else:
default_solver_builder = Htool.ComplexDefaultDDMSolverBuilder(
default_approximation.distributed_operator,
default_approximation.block_diagonal_hmatrix,
generator,
ovr_subdomain_to_global,
cluster_to_ovr_subdomain,
neighbors,
intersections,
)
solver = default_solver_builder.solver

distributed_operator = default_approximation.distributed_operator

# Solver
x = np.zeros(len(f), dtype="complex128", order="F")
if mu > 1:
x = np.zeros((len(f), mu), dtype="complex128", order="F")

solver.set_hpddm_args(
"-hpddm_schwarz_method "
iterative_solver = "gmres"
restart = "" if (symmetry == "S") else " -hpddm_gmres_restart 200 "
hpddm_args = (
"-hpddm_krylov_method "
+ iterative_solver
+ restart
+ " -hpddm_schwarz_method "
+ hpddm_schwarz_method
+ " -hpddm_max_it 200 -hpddm_gmres_restart 200 -hpddm_tol "
+ " -hpddm_max_it 200 -hpddm_variant right -hpddm_compute_residual l2 -hpddm_tol "
+ str(tol)
)

# if mpi4py.MPI.COMM_WORLD.rank == 0:
# hpddm_args += " -hpddm_verbosity 100 "

solver.set_hpddm_args(hpddm_args)

if hpddm_schwarz_coarse_correction != "none" and mpi4py.MPI.COMM_WORLD.size > 1:
solver.set_hpddm_args(
"-hpddm_schwarz_coarse_correction " + hpddm_schwarz_coarse_correction
)
# print(local_neumann_matrix, local_neumann_matrix.shape)
solver.build_coarse_space(local_neumann_matrix)

if hpddm_schwarz_method == "asm" or hpddm_schwarz_method == "ras":
Expand All @@ -90,9 +158,29 @@ def test_ddm_solver(
distributed_operator @ x - f
) / np.linalg.norm(f)
solution_error = np.linalg.norm(x[:, 1] - x_ref) / np.linalg.norm(x_ref)
# error = np.linalg.norm(distributed_operator * x - f)
if mpi4py.MPI.COMM_WORLD.rank == 0:
print(
iterative_solver,
hpddm_schwarz_method,
hpddm_schwarz_coarse_correction,
epsilon,
solver.get_information("Nb_it"),
# error,
# np.linalg.norm(f),
# error / np.linalg.norm(f),
# hpddm_args,
)
# print(
# np.linalg.norm(distributed_operator * x - f),
# np.linalg.norm(f),
# tol,
# mpi4py.MPI.COMM_WORLD.rank,
# )
if mpi4py.MPI.COMM_WORLD.rank == 0:
print(solver.get_information())
assert convergence_error < tol
assert solution_error < epsilon * 10
# x.fill(0)

# # DDM one level ASM wo overlap
# if rank == 0:
Expand Down

0 comments on commit a8faa59

Please sign in to comment.