From b020b7bdddbf39be38c7e9f55f7ec3d4384b79bb Mon Sep 17 00:00:00 2001 From: gnthibault Date: Fri, 17 May 2024 11:52:35 +0200 Subject: [PATCH] WIP --- DifferentiatingPerceptron.ipynb | 6 +++--- InformationTheoryOptimization.ipynb | 2 +- OptimalTransportWasserteinDistance.ipynb | 4 ++-- RegularizationByDenoising.ipynb | 13 ++++++++----- bayesian_ab_testing.ipynb | 2 +- 5 files changed, 15 insertions(+), 12 deletions(-) diff --git a/DifferentiatingPerceptron.ipynb b/DifferentiatingPerceptron.ipynb index f5beb97..ee8b438 100644 --- a/DifferentiatingPerceptron.ipynb +++ b/DifferentiatingPerceptron.ipynb @@ -655,7 +655,7 @@ "views": {} }, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -669,9 +669,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.7" + "version": "3.12.1" } }, "nbformat": 4, - "nbformat_minor": 1 + "nbformat_minor": 4 } diff --git a/InformationTheoryOptimization.ipynb b/InformationTheoryOptimization.ipynb index 968d39a..87bbc23 100644 --- a/InformationTheoryOptimization.ipynb +++ b/InformationTheoryOptimization.ipynb @@ -191,7 +191,7 @@ "\\end{align*}\n", "where $p_1$ and $p_2$ are probability mass functions and $\\lambda \\in [0,1]$\n", "\n", - "Proof: Let $X$ be a discrete random variable with possible outcomes $\\mathcal{X} := {x_i, i \\in 0,1,\\dots N-1}$ and let $u(x)$ be the probability mass function of a discrete uniform distribution on $X \\in \\mathcal{X}$. Then, the entropy of an arbitrary probability mass function $p(x)$ can be rewritten as\n", + "Proof: Let $X$ be a discrete random variable with possible outcomes $\\mathcal{X} := {x_i, i \\in 0,1,\\dots N-1}$ and let $u(x)$ be the probability mass function of a discrete uniform distribution on $X \\in \\mathcal{X}$, ie $u(x_i)=\\frac{1}{N}$. Then, the entropy of an arbitrary probability mass function $p(x)$ can be rewritten as\n", "\n", "\\begin{align*} \\tag{1.2}\n", " H(X) &= - \\sum_{i=0}^{N-1} p(x_i)log(p(x_i)) \\\\\n", diff --git a/OptimalTransportWasserteinDistance.ipynb b/OptimalTransportWasserteinDistance.ipynb index 5c61bfc..b3064ba 100644 --- a/OptimalTransportWasserteinDistance.ipynb +++ b/OptimalTransportWasserteinDistance.ipynb @@ -559,7 +559,7 @@ "metadata": {}, "source": [ "### OT and statistical concepts\n", - "Some of the basics to understand the following statements can be found in the notebook \"InformationTheoryOptimization\" this part is also partly a direct reproduction of Marco Cuturi famous article \"Sinkhorn Distances: Lightspeed Computation of Optimal Transport\"\n", + "Some of the basics to understand the following statements can be found in the notebook \"InformationTheoryOptimization\", this part is also partly a direct reproduction of Marco Cuturi famous article \"Sinkhorn Distances: Lightspeed Computation of Optimal Transport\"\n", "\n", "I would like to stop and mention that as we now interpret $P$ as a joint probability matrix, we can define its entropy, the marginal probabiilty entropy, and KL-divergence between two different transportation matrix. These takes the form of\n", "\n", @@ -579,7 +579,7 @@ "\\begin{align*} \\tag{1.5}\n", " \\forall r,c \\in \\Sigma_d, \\forall P \\in U(r,c), h(P) \\leq h(r) + h(c)\n", "\\end{align*}\n", - "ie, by using log-sum inequality, proved in the notebook called InformationTheoryOptimization\n", + "ie, by using log-sum inequality, we proved in the notebook called InformationTheoryOptimization\n", "\\begin{align*}\\tag{1.6}\n", " \\sum_{i=0}^{N-1} a_i log\\left(\\frac{a_i}{b_i}\\right) &\\geq \\left(\\sum_{i=0}^{N-1} a_i\\right) log\\left(\\frac{\\sum_{i=0}^{N-1}a_i}{\\sum_{i=0}^{N-1}b_i}\\right)\n", "\\end{align*}\n", diff --git a/RegularizationByDenoising.ipynb b/RegularizationByDenoising.ipynb index a36354f..3734172 100644 --- a/RegularizationByDenoising.ipynb +++ b/RegularizationByDenoising.ipynb @@ -15,7 +15,7 @@ "\n", "## Introduction\n", "\n", - "This notebook intends to show what are the next steps in terms of regularized image reconstruction. We will try to focus especially in a framework that allows the introduction of deep learning in a proper mathematical framework that allows for prior and data fitting mitigation called: regularization by denoising (RED).\n", + "This notebook intends to show what are the next steps in terms of regularized image reconstruction. We will try to focus especially in a framework that allows the introduction of deep learning in a proper mathematical framework that allows for prior and data fitting mitigation called: regularization by denoising (RED) but not only.\n", "\n", "The following paper guided us to write this notebook:\n", "\n", @@ -38,7 +38,10 @@ "https://ieeexplore.ieee.org/document/9107406\n", "\n", "* Recovery Analysis for Plug-and-Play Priors using the Restricted Eigenvalue Condition\n", - "https://arxiv.org/abs/2106.03668" + "https://arxiv.org/abs/2106.03668\n", + "\n", + "* Deep inverse\n", + "https://github.com/deepinv/deepinv" ] }, { @@ -51,7 +54,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -65,9 +68,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.12.1" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/bayesian_ab_testing.ipynb b/bayesian_ab_testing.ipynb index b7ed817..83d151c 100644 --- a/bayesian_ab_testing.ipynb +++ b/bayesian_ab_testing.ipynb @@ -1960,7 +1960,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.6" + "version": "3.12.1" } }, "nbformat": 4,