Skip to content

Commit

Permalink
Merge pull request #90 from PyLops/solvers-stacked
Browse files Browse the repository at this point in the history
CG/CGLS supporting StackedDistributedArray
  • Loading branch information
mrava87 authored Apr 12, 2024
2 parents cca6287 + 3ffdf8e commit 2d2b184
Show file tree
Hide file tree
Showing 6 changed files with 220 additions and 70 deletions.
6 changes: 5 additions & 1 deletion examples/plot_cgls.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,11 @@
# are then obtained in a :py:class:`pylops_mpi.DistributedArray`. To obtain the
# overall inversion of the entire MPIBlockDiag, you can utilize the ``asarray()``
# function of the DistributedArray as shown below.
xinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, niter=15, tol=1e-10, show=True)

# Set initial guess `x0` to zeroes
x0 = pylops_mpi.DistributedArray(BDiag.shape[1], dtype=np.float128)
x0[:] = 0
xinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, x0=x0, niter=15, tol=1e-10, show=True)
xinv_array = xinv.asarray()

if rank == 0:
Expand Down
13 changes: 11 additions & 2 deletions pylops_mpi/DistributedArray.py
Original file line number Diff line number Diff line change
Expand Up @@ -593,12 +593,17 @@ class StackedDistributedArray:
----------
distarrays : :obj:`list`
List of :class:`pylops_mpi.DistributedArray` objects.
base_comm : :obj:`mpi4py.MPI.Comm`, optional
Base MPI Communicator.
Defaults to ``mpi4py.MPI.COMM_WORLD``.
"""

def __init__(self, distarrays: List):
def __init__(self, distarrays: List, base_comm: MPI.Comm = MPI.COMM_WORLD):
self.distarrays = distarrays
self.narrays = len(distarrays)
self.base_comm = base_comm
self.rank = base_comm.Get_rank()
self.size = base_comm.Get_size()

def __getitem__(self, index):
return self.distarrays[index]
Expand Down Expand Up @@ -674,6 +679,8 @@ def iadd(self, stacked_array):
return self

def multiply(self, stacked_array):
"""Stacked Distributed Multiplication of arrays
"""
if isinstance(stacked_array, StackedDistributedArray):
self._check_stacked_size(stacked_array)
ProductArray = self.copy()
Expand All @@ -689,6 +696,8 @@ def multiply(self, stacked_array):
return ProductArray

def dot(self, stacked_array):
"""Dot Product of Stacked Distributed Arrays
"""
self._check_stacked_size(stacked_array)
dotprod = 0.
for iarr in range(self.narrays):
Expand Down
29 changes: 17 additions & 12 deletions pylops_mpi/optimization/basic.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
from typing import Callable, Optional, Tuple, Union

from pylops.utils import NDArray
from pylops_mpi import MPILinearOperator, DistributedArray, StackedDistributedArray
from pylops_mpi import (
MPILinearOperator,
DistributedArray,
StackedDistributedArray,
MPIStackedLinearOperator
)
from pylops_mpi.optimization.cls_basic import CG, CGLS


def cg(
Op: MPILinearOperator,
y: Union[DistributedArray, StackedDistributedArray] ,
x0: Optional[DistributedArray] = None,
Op: Union[MPILinearOperator, MPIStackedLinearOperator],
y: Union[DistributedArray, StackedDistributedArray],
x0: Union[DistributedArray, StackedDistributedArray],
niter: int = 10,
tol: float = 1e-4,
show: bool = False,
Expand All @@ -17,16 +22,16 @@ def cg(
) -> Tuple[Union[DistributedArray, StackedDistributedArray], int, NDArray]:
r"""Conjugate gradient
Solve a square system of equations given an MPILinearOperator ``Op`` and
Solve a square system of equations given either an MPILinearOperator or an MPIStackedLinearOperator ``Op`` and
distributed data ``y`` using conjugate gradient iterations.
Parameters
----------
Op : :obj:`pylops_mpi.MPILinearOperator`
Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`
Operator to invert of size :math:`[N \times N]`
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
DistributedArray of size (N,)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess
niter : :obj:`int`, optional
Number of iterations
Expand Down Expand Up @@ -66,9 +71,9 @@ def cg(


def cgls(
Op: MPILinearOperator,
Op: Union[MPILinearOperator, MPIStackedLinearOperator],
y: Union[DistributedArray, StackedDistributedArray],
x0: Optional[Union[DistributedArray, StackedDistributedArray]] = None,
x0: Union[DistributedArray, StackedDistributedArray],
niter: int = 10,
damp: float = 0.0,
tol: float = 1e-4,
Expand All @@ -78,16 +83,16 @@ def cgls(
) -> Tuple[DistributedArray, int, int, float, float, NDArray]:
r"""Conjugate gradient least squares
Solve an overdetermined system of equations given a MPILinearOperator ``Op`` and
Solve an overdetermined system of equations given either an MPILinearOperator or an MPIStackedLinearOperator``Op`` and
distributed data ``y`` using conjugate gradient iterations.
Parameters
----------
Op : :obj:`pylops_mpi.MPILinearOperator`
Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`
MPI Linear Operator to invert of size :math:`[N \times M]`
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
DistributedArray of size (N,)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess
niter : :obj:`int`, optional
Number of iterations
Expand Down
83 changes: 36 additions & 47 deletions pylops_mpi/optimization/cls_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@
class CG(Solver):
r"""Conjugate gradient
Solve a square system of equations given an MPILinearOperator ``Op`` and
Solve a square system of equations given either an MPILinearOperator or an MPIStackedLinearOperator ``Op`` and
distributed data ``y`` using conjugate gradient iterations.
Parameters
----------
Op : :obj:`pylops_mpi.MPILinearOperator`
Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`
Operator to invert of size :math:`[N \times N]`
Notes
Expand All @@ -42,14 +42,16 @@ def _print_setup(self, xcomplex: bool = False) -> None:
print(head1)

def _print_step(self, x: Union[DistributedArray, StackedDistributedArray]) -> None:
if isinstance(x, StackedDistributedArray):
x = x.distarrays[0]
strx = f"{x[0]:1.2e} " if np.iscomplexobj(x.local_array) else f"{x[0]:11.4e} "
msg = f"{self.iiter:6g} " + strx + f"{self.cost[self.iiter]:11.4e}"
print(msg)

def setup(
self,
y: Union[DistributedArray, StackedDistributedArray],
x0: Optional[Union[DistributedArray, StackedDistributedArray]] = None,
x0: Union[DistributedArray, StackedDistributedArray],
niter: Optional[int] = None,
tol: float = 1e-4,
show: bool = False,
Expand All @@ -60,9 +62,8 @@ def setup(
----------
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Data of size (N,)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional
Initial guess of size (N,). If ``None``, initialize
internally as zero vector (will always default to :obj:`pylops_mpi.DistributedArray`)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess of size (N,).
niter : :obj:`int`, optional
Number of iterations (default to ``None`` in case a user wants to manually step over the solver)
tol : :obj:`float`, optional
Expand All @@ -81,16 +82,8 @@ def setup(
self.niter = niter
self.tol = tol

if x0 is None:
self.r = self.y.copy()
x = DistributedArray(global_shape=self.Op.shape[1],
partition=self.r.partition,
local_shapes=self.r.local_shapes,
dtype=y.dtype)
x[:] = 0
else:
x = x0.copy()
self.r = self.y - self.Op.matvec(x)
x = x0.copy()
self.r = self.y - self.Op.matvec(x)
self.rank = x.rank
self.c = self.r.copy()
self.kold = np.abs(self.r.dot(self.r.conj()))
Expand All @@ -101,7 +94,10 @@ def setup(
self.iiter = 0

if show and self.rank == 0:
self._print_setup(np.iscomplexobj(x.local_array))
if isinstance(x, StackedDistributedArray):
self._print_setup(np.iscomplexobj([x1.local_array for x1 in x.distarrays]))
else:
self._print_setup(np.iscomplexobj(x.local_array))
return x

def step(self, x: Union[DistributedArray, StackedDistributedArray],
Expand Down Expand Up @@ -204,21 +200,20 @@ def finalize(self, show: bool = False) -> None:
def solve(
self,
y: Union[DistributedArray, StackedDistributedArray],
x0: Optional[Union[DistributedArray, StackedDistributedArray]] = None,
x0: Union[DistributedArray, StackedDistributedArray],
niter: int = 10,
tol: float = 1e-4,
show: bool = False,
itershow: Tuple[int, int, int] = (10, 10, 10),
) -> Tuple[DistributedArray, int, NDArray]:
) -> Tuple[Union[DistributedArray, StackedDistributedArray], int, NDArray]:
r"""Run entire solver
Parameters
----------
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Data of size (N,)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional
Initial guess of size (N,). If ``None``, initialize
internally as zero vector (will always default to :obj:`pylops_mpi.DistributedArray`)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess of size (N,).
niter : :obj:`int`, optional
Number of iterations
tol : :obj:`float`, optional
Expand Down Expand Up @@ -250,12 +245,12 @@ def solve(
class CGLS(Solver):
r"""Conjugate gradient least squares
Solve an overdetermined system of equations given a MPILinearOperator ``Op``
Solve an overdetermined system of equations given either an MPILinearOperator or an MPIStackedLinearOperator ``Op``
and distributed data ``y`` using conjugate gradient iterations.
Parameters
----------
Op : :obj:`pylops_mpi.MPILinearOperator`
Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`
Operator to invert of size :math:`[N \times M]`
Notes
Expand Down Expand Up @@ -288,6 +283,8 @@ def _print_setup(self, xcomplex: bool = False) -> None:
print(head1)

def _print_step(self, x: Union[DistributedArray, StackedDistributedArray]) -> None:
if isinstance(x, StackedDistributedArray):
x = x.distarrays[0]
strx = f"{x[0]:1.2e} " if np.iscomplexobj(x.local_array) else f"{x[0]:11.4e} "
msg = (
f"{self.iiter:6g} "
Expand All @@ -298,21 +295,20 @@ def _print_step(self, x: Union[DistributedArray, StackedDistributedArray]) -> No

def setup(self,
y: Union[DistributedArray, StackedDistributedArray],
x0: Optional[DistributedArray] = None,
x0: Union[DistributedArray, StackedDistributedArray],
niter: Optional[int] = None,
damp: float = 0.0,
tol: float = 1e-4,
show: bool = False,
) -> DistributedArray:
) -> Union[DistributedArray, StackedDistributedArray]:
r"""Setup solver
Parameters
----------
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Data of size :math:`[N \times 1]`
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional
Initial guess of size (M,). If ``None``, Defaults to a zero vector
(will always default to :obj:`pylops_mpi.DistributedArray`)
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess of size (M,).
niter : :obj:`int`, optional
Number of iterations (default to ``None`` in case a user wants to
manually step over the solver)
Expand All @@ -334,19 +330,10 @@ def setup(self,
self.tol = tol
self.niter = niter

# initialize solver
if x0 is None:
self.s = y.copy()
r = self.Op.rmatvec(self.s)
x = DistributedArray(global_shape=self.Op.shape[1], dtype=y.dtype,
local_shapes=r.local_shapes,
partition=r.partition)
x[:] = 0
else:
x = x0.copy()
self.s = self.y - self.Op.matvec(x)
damped_x = damp * x
r = self.Op.rmatvec(self.s) - damped_x
x = x0.copy()
self.s = self.y - self.Op.matvec(x)
damped_x = x * damp
r = self.Op.rmatvec(self.s) - damped_x
self.rank = x.rank
self.c = r.copy()
self.q = self.Op.matvec(self.c)
Expand All @@ -361,7 +348,10 @@ def setup(self,

# print setup
if show and self.rank == 0:
self._print_setup(np.iscomplexobj(x.local_array))
if isinstance(x, StackedDistributedArray):
self._print_setup(np.iscomplexobj([x1.local_array for x1 in x.distarrays]))
else:
self._print_setup(np.iscomplexobj(x.local_array))
return x

def step(self, x: Union[DistributedArray, StackedDistributedArray],
Expand Down Expand Up @@ -423,7 +413,7 @@ def run(self,
Returns
-------
x : :obj:`pylops_mpi.DistributedArray`
x : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray
Estimated model of size (M, ).
"""
Expand Down Expand Up @@ -467,7 +457,7 @@ def finalize(self, show: bool = False, **kwargs) -> None:

def solve(self,
y: Union[DistributedArray, StackedDistributedArray],
x0: Optional[Union[DistributedArray, StackedDistributedArray]] = None,
x0: Union[DistributedArray, StackedDistributedArray],
niter: int = 10,
damp: float = 0.0,
tol: float = 1e-4,
Expand All @@ -481,8 +471,7 @@ def solve(self,
y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Data of size (N, )
x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`
Initial guess of size (M, ). If ``None``, initialize internally as zero vector
(will always default to :obj:`pylops_mpi.DistributedArray`)
Initial guess of size (M, ).
niter : :obj:`int`, optional
Number of iterations (default to ``None`` in case a user wants to
manually step over the solver)
Expand Down
Loading

0 comments on commit 2d2b184

Please sign in to comment.