Skip to content

Commit

Permalink
deploy: 2d2b184
Browse files Browse the repository at this point in the history
  • Loading branch information
mrava87 committed Apr 13, 2024
1 parent 0103540 commit 8a46da2
Show file tree
Hide file tree
Showing 81 changed files with 185 additions and 159 deletions.
2 changes: 1 addition & 1 deletion .buildinfo
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
config: 6d660a6109fc62d4ade0f1a8479bb7b8
config: bba4525ffe87efa3cfac04f45c957dff
tags: 645f666f9bcd5a90fca523b33c5a78b7
Binary file not shown.
Binary file modified .doctrees/api/generated/pylops_mpi.optimization.basic.cg.doctree
Binary file not shown.
Binary file modified .doctrees/api/generated/pylops_mpi.optimization.basic.cgls.doctree
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file modified .doctrees/api/index.doctree
Binary file not shown.
Binary file modified .doctrees/environment.pickle
Binary file not shown.
Binary file modified .doctrees/gallery/plot_cgls.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/plot_derivative.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/plot_distributed_array.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/plot_mpilinop.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/plot_stacked_array.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/plot_stacking.doctree
Binary file not shown.
Binary file modified .doctrees/gallery/sg_execution_times.doctree
Binary file not shown.
Binary file modified .doctrees/sg_execution_times.doctree
Binary file not shown.
Binary file modified .doctrees/tutorials/lsm.doctree
Binary file not shown.
Binary file modified .doctrees/tutorials/poststack.doctree
Binary file not shown.
Binary file modified .doctrees/tutorials/sg_execution_times.doctree
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@
},
"outputs": [],
"source": [
"xinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, niter=15, tol=1e-10, show=True)\nxinv_array = xinv.asarray()\n\nif rank == 0:\n print(f\"CGLS Solution xinv={xinv_array}\")\n # Visualize\n plt.figure(figsize=(18, 5))\n plt.plot(cost, lw=2, label=\"CGLS\")\n plt.title(\"Cost Function\")\n plt.legend()\n plt.tight_layout()"
"# Set initial guess `x0` to zeroes\nx0 = pylops_mpi.DistributedArray(BDiag.shape[1], dtype=np.float128)\nx0[:] = 0\nxinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, x0=x0, niter=15, tol=1e-10, show=True)\nxinv_array = xinv.asarray()\n\nif rank == 0:\n print(f\"CGLS Solution xinv={xinv_array}\")\n # Visualize\n plt.figure(figsize=(18, 5))\n plt.plot(cost, lw=2, label=\"CGLS\")\n plt.title(\"Cost Function\")\n plt.legend()\n plt.tight_layout()"
]
}
],
Expand Down
5 changes: 4 additions & 1 deletion _downloads/afacec718d3958430d53150fa3b27203/lsm.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,10 @@
# solver.

# Inverse
minv_dist = pylops_mpi.cgls(VStack, d_dist, niter=100, show=True)[0]
# Initializing x0 to zeroes
x0 = pylops_mpi.DistributedArray(VStack.shape[1], partition=pylops_mpi.Partition.BROADCAST)
x0[:] = 0
minv_dist = pylops_mpi.cgls(VStack, d_dist, x0=x0, niter=100, show=True)[0]
minv = minv_dist.asarray().reshape((nx, nz))
d_inv_dist = VStack @ minv_dist
d_inv = d_inv_dist.asarray().reshape(nstot, nr, nt)
Expand Down
2 changes: 1 addition & 1 deletion _downloads/d5dae76a02f3b395d667034f44fd5555/lsm.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@
},
"outputs": [],
"source": [
"# Inverse\nminv_dist = pylops_mpi.cgls(VStack, d_dist, niter=100, show=True)[0]\nminv = minv_dist.asarray().reshape((nx, nz))\nd_inv_dist = VStack @ minv_dist\nd_inv = d_inv_dist.asarray().reshape(nstot, nr, nt)"
"# Inverse\n# Initializing x0 to zeroes\nx0 = pylops_mpi.DistributedArray(VStack.shape[1], partition=pylops_mpi.Partition.BROADCAST)\nx0[:] = 0\nminv_dist = pylops_mpi.cgls(VStack, d_dist, x0=x0, niter=100, show=True)[0]\nminv = minv_dist.asarray().reshape((nx, nz))\nd_inv_dist = VStack @ minv_dist\nd_inv = d_inv_dist.asarray().reshape(nstot, nr, nt)"
]
},
{
Expand Down
6 changes: 5 additions & 1 deletion _downloads/df45e4bd89b90a14841f9e115c4b2a59/plot_cgls.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,11 @@
# are then obtained in a :py:class:`pylops_mpi.DistributedArray`. To obtain the
# overall inversion of the entire MPIBlockDiag, you can utilize the ``asarray()``
# function of the DistributedArray as shown below.
xinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, niter=15, tol=1e-10, show=True)

# Set initial guess `x0` to zeroes
x0 = pylops_mpi.DistributedArray(BDiag.shape[1], dtype=np.float128)
x0[:] = 0
xinv, istop, niter, r1norm, r2norm, cost = pylops_mpi.cgls(BDiag, y, x0=x0, niter=15, tol=1e-10, show=True)
xinv_array = xinv.asarray()

if rank == 0:
Expand Down
Binary file modified _images/sphx_glr_lsm_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_lsm_002.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_lsm_003.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_lsm_004.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_lsm_005.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_cgls_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_derivative_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_derivative_002.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_derivative_003.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_derivative_004.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_002.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_003.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_004.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_005.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_006.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_007.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_008.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_009.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_distributed_array_010.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_mpilinop_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_mpilinop_002.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_stacking_001.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_stacking_002.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_plot_stacking_003.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified _images/sphx_glr_poststack_001.png
13 changes: 11 additions & 2 deletions _modules/pylops_mpi/DistributedArray.html
Original file line number Diff line number Diff line change
Expand Up @@ -1047,12 +1047,17 @@ <h1>Source code for pylops_mpi.DistributedArray</h1><div class="highlight"><pre>
<span class="sd"> ----------</span>
<span class="sd"> distarrays : :obj:`list`</span>
<span class="sd"> List of :class:`pylops_mpi.DistributedArray` objects.</span>

<span class="sd"> base_comm : :obj:`mpi4py.MPI.Comm`, optional</span>
<span class="sd"> Base MPI Communicator.</span>
<span class="sd"> Defaults to ``mpi4py.MPI.COMM_WORLD``.</span>
<span class="sd"> &quot;&quot;&quot;</span>

<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">distarrays</span><span class="p">:</span> <span class="n">List</span><span class="p">):</span>
<span class="k">def</span> <span class="fm">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">distarrays</span><span class="p">:</span> <span class="n">List</span><span class="p">,</span> <span class="n">base_comm</span><span class="p">:</span> <span class="n">MPI</span><span class="o">.</span><span class="n">Comm</span> <span class="o">=</span> <span class="n">MPI</span><span class="o">.</span><span class="n">COMM_WORLD</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">distarrays</span> <span class="o">=</span> <span class="n">distarrays</span>
<span class="bp">self</span><span class="o">.</span><span class="n">narrays</span> <span class="o">=</span> <span class="nb">len</span><span class="p">(</span><span class="n">distarrays</span><span class="p">)</span>
<span class="bp">self</span><span class="o">.</span><span class="n">base_comm</span> <span class="o">=</span> <span class="n">base_comm</span>
<span class="bp">self</span><span class="o">.</span><span class="n">rank</span> <span class="o">=</span> <span class="n">base_comm</span><span class="o">.</span><span class="n">Get_rank</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">size</span> <span class="o">=</span> <span class="n">base_comm</span><span class="o">.</span><span class="n">Get_size</span><span class="p">()</span>

<span class="k">def</span> <span class="fm">__getitem__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">index</span><span class="p">):</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">distarrays</span><span class="p">[</span><span class="n">index</span><span class="p">]</span>
Expand Down Expand Up @@ -1128,6 +1133,8 @@ <h1>Source code for pylops_mpi.DistributedArray</h1><div class="highlight"><pre>
<span class="k">return</span> <span class="bp">self</span>

<span class="k">def</span> <span class="nf">multiply</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">stacked_array</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Stacked Distributed Multiplication of arrays</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">stacked_array</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_check_stacked_size</span><span class="p">(</span><span class="n">stacked_array</span><span class="p">)</span>
<span class="n">ProductArray</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">copy</span><span class="p">()</span>
Expand All @@ -1143,6 +1150,8 @@ <h1>Source code for pylops_mpi.DistributedArray</h1><div class="highlight"><pre>
<span class="k">return</span> <span class="n">ProductArray</span>

<span class="k">def</span> <span class="nf">dot</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">stacked_array</span><span class="p">):</span>
<span class="w"> </span><span class="sd">&quot;&quot;&quot;Dot Product of Stacked Distributed Arrays</span>
<span class="sd"> &quot;&quot;&quot;</span>
<span class="bp">self</span><span class="o">.</span><span class="n">_check_stacked_size</span><span class="p">(</span><span class="n">stacked_array</span><span class="p">)</span>
<span class="n">dotprod</span> <span class="o">=</span> <span class="mf">0.</span>
<span class="k">for</span> <span class="n">iarr</span> <span class="ow">in</span> <span class="nb">range</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">narrays</span><span class="p">):</span>
Expand Down
29 changes: 17 additions & 12 deletions _modules/pylops_mpi/optimization/basic.html
Original file line number Diff line number Diff line change
Expand Up @@ -447,16 +447,21 @@ <h1>Source code for pylops_mpi.optimization.basic</h1><div class="highlight"><pr
<span></span><span class="kn">from</span> <span class="nn">typing</span> <span class="kn">import</span> <span class="n">Callable</span><span class="p">,</span> <span class="n">Optional</span><span class="p">,</span> <span class="n">Tuple</span><span class="p">,</span> <span class="n">Union</span>

<span class="kn">from</span> <span class="nn">pylops.utils</span> <span class="kn">import</span> <span class="n">NDArray</span>
<span class="kn">from</span> <span class="nn">pylops_mpi</span> <span class="kn">import</span> <span class="n">MPILinearOperator</span><span class="p">,</span> <span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span>
<span class="kn">from</span> <span class="nn">pylops_mpi</span> <span class="kn">import</span> <span class="p">(</span>
<span class="n">MPILinearOperator</span><span class="p">,</span>
<span class="n">DistributedArray</span><span class="p">,</span>
<span class="n">StackedDistributedArray</span><span class="p">,</span>
<span class="n">MPIStackedLinearOperator</span>
<span class="p">)</span>
<span class="kn">from</span> <span class="nn">pylops_mpi.optimization.cls_basic</span> <span class="kn">import</span> <span class="n">CG</span><span class="p">,</span> <span class="n">CGLS</span>


<div class="viewcode-block" id="cg">
<a class="viewcode-back" href="../../../api/generated/pylops_mpi.optimization.basic.cg.html#pylops_mpi.optimization.basic.cg">[docs]</a>
<span class="k">def</span> <span class="nf">cg</span><span class="p">(</span>
<span class="n">Op</span><span class="p">:</span> <span class="n">MPILinearOperator</span><span class="p">,</span>
<span class="n">y</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">]</span> <span class="p">,</span>
<span class="n">x0</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">,</span>
<span class="n">Op</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">MPILinearOperator</span><span class="p">,</span> <span class="n">MPIStackedLinearOperator</span><span class="p">],</span>
<span class="n">y</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">],</span>
<span class="n">x0</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">],</span>
<span class="n">niter</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="mi">10</span><span class="p">,</span>
<span class="n">tol</span><span class="p">:</span> <span class="nb">float</span> <span class="o">=</span> <span class="mf">1e-4</span><span class="p">,</span>
<span class="n">show</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">False</span><span class="p">,</span>
Expand All @@ -465,16 +470,16 @@ <h1>Source code for pylops_mpi.optimization.basic</h1><div class="highlight"><pr
<span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">],</span> <span class="nb">int</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">]:</span>
<span class="w"> </span><span class="sa">r</span><span class="sd">&quot;&quot;&quot;Conjugate gradient</span>

<span class="sd"> Solve a square system of equations given an MPILinearOperator ``Op`` and</span>
<span class="sd"> Solve a square system of equations given either an MPILinearOperator or an MPIStackedLinearOperator ``Op`` and</span>
<span class="sd"> distributed data ``y`` using conjugate gradient iterations.</span>

<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> Op : :obj:`pylops_mpi.MPILinearOperator`</span>
<span class="sd"> Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`</span>
<span class="sd"> Operator to invert of size :math:`[N \times N]`</span>
<span class="sd"> y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`</span>
<span class="sd"> DistributedArray of size (N,)</span>
<span class="sd"> x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional</span>
<span class="sd"> x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`</span>
<span class="sd"> Initial guess</span>
<span class="sd"> niter : :obj:`int`, optional</span>
<span class="sd"> Number of iterations</span>
Expand Down Expand Up @@ -517,9 +522,9 @@ <h1>Source code for pylops_mpi.optimization.basic</h1><div class="highlight"><pr
<div class="viewcode-block" id="cgls">
<a class="viewcode-back" href="../../../api/generated/pylops_mpi.optimization.basic.cgls.html#pylops_mpi.optimization.basic.cgls">[docs]</a>
<span class="k">def</span> <span class="nf">cgls</span><span class="p">(</span>
<span class="n">Op</span><span class="p">:</span> <span class="n">MPILinearOperator</span><span class="p">,</span>
<span class="n">Op</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">MPILinearOperator</span><span class="p">,</span> <span class="n">MPIStackedLinearOperator</span><span class="p">],</span>
<span class="n">y</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">],</span>
<span class="n">x0</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">]]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">,</span>
<span class="n">x0</span><span class="p">:</span> <span class="n">Union</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="n">StackedDistributedArray</span><span class="p">],</span>
<span class="n">niter</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="mi">10</span><span class="p">,</span>
<span class="n">damp</span><span class="p">:</span> <span class="nb">float</span> <span class="o">=</span> <span class="mf">0.0</span><span class="p">,</span>
<span class="n">tol</span><span class="p">:</span> <span class="nb">float</span> <span class="o">=</span> <span class="mf">1e-4</span><span class="p">,</span>
Expand All @@ -529,16 +534,16 @@ <h1>Source code for pylops_mpi.optimization.basic</h1><div class="highlight"><pr
<span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">DistributedArray</span><span class="p">,</span> <span class="nb">int</span><span class="p">,</span> <span class="nb">int</span><span class="p">,</span> <span class="nb">float</span><span class="p">,</span> <span class="nb">float</span><span class="p">,</span> <span class="n">NDArray</span><span class="p">]:</span>
<span class="w"> </span><span class="sa">r</span><span class="sd">&quot;&quot;&quot;Conjugate gradient least squares</span>

<span class="sd"> Solve an overdetermined system of equations given a MPILinearOperator ``Op`` and</span>
<span class="sd"> Solve an overdetermined system of equations given either an MPILinearOperator or an MPIStackedLinearOperator``Op`` and</span>
<span class="sd"> distributed data ``y`` using conjugate gradient iterations.</span>

<span class="sd"> Parameters</span>
<span class="sd"> ----------</span>
<span class="sd"> Op : :obj:`pylops_mpi.MPILinearOperator`</span>
<span class="sd"> Op : :obj:`pylops_mpi.MPILinearOperator` or :obj:`pylops_mpi.MPIStackedLinearOperator`</span>
<span class="sd"> MPI Linear Operator to invert of size :math:`[N \times M]`</span>
<span class="sd"> y : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`</span>
<span class="sd"> DistributedArray of size (N,)</span>
<span class="sd"> x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`, optional</span>
<span class="sd"> x0 : :obj:`pylops_mpi.DistributedArray` or :obj:`pylops_mpi.StackedDistributedArray`</span>
<span class="sd"> Initial guess</span>
<span class="sd"> niter : :obj:`int`, optional</span>
<span class="sd"> Number of iterations</span>
Expand Down
Loading

0 comments on commit 8a46da2

Please sign in to comment.