From 7cda0088ce211cde7f4b1b1f7e708d1026ae3e79 Mon Sep 17 00:00:00 2001 From: Chad Scherrer Date: Sat, 16 Apr 2022 07:34:09 -0700 Subject: [PATCH] dev (#175) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Dirichlet(k::Integer, α) = Dirichlet(Fill(α, k)) * export TransformVariables as TV * drop redundant import * 0.0 => zero(Float64) * drop outdated Dists.logpdf * update StudentT * drop redundant import * update Uniform * bump MeasureBase version * reworking beta * small update to StudentT * basemeasure for discrete Distributions * using LogExpFunctions => import LogExpFunctions * quoteof(::Chain) * prettyprinting and chain-mucking * Some refactoring for Markov chains * import MeasureBase: ≪ * version bound for PrettyPrinting * copy(rng) might change its type (e.g. GLOBAL_RNG) * tests pass * cleaning up * more cleanup * big update * get tests passing * formatting * oops typo * move affine to MeasureTheory * updating * Val => StaticSymbol * more fixes * fix fix fix * more logdesnity => logdensity_def * more logdesnity fixes * debugging * formatting * bugfixes * working on tests * updates * working on tests * tests passing! * refactor * working on tests * drop static weight for now * fix sampling from ProductMeasure{<:Base.Generator} * tests passing!! * more stuff * constructor => constructorof * constructor =? construtorof * updates * working on tests * fix Dirichlet * update Bernoulli * working on tests * bugfixes for RealizedSamples * tests passing!! * tighten down inference * as(::PowerMeasure) * drop type-level stuff * using InverseFunctions.jl * update license * affero * copyright * update CI to 1.6 * xform => TV.as * oops missed a conflict * fix merge corruption * typo * fix license * Update README.md * merge * enumerate instead of zip * bugfix * inline rand * drop `static` from `insupport` results * update proxies * Move ConditionalMeasure to MeasureBase * IfElse.ifelse(p::Bernoulli, t, f) * IfElseMeasure * update some base measures * test broken :( * fix some redundancies * instance_type => Core.Typeof * update testvalue for Bernoulli and Binomial * un-break broken test (now passing) * Fall-back `For` method for when inference fails * drop extra spaces * more whitespace * bump MeasureBase dependency version * add newline * tidy up * ifelse tests * OEF newline * avoid type piracy * add Julia 1.7 to CI * make Julia 1.6 happy * approx instead of == * Require at least Julia 1.6 * Try Sebastian's idea test_measures ::Any[] * Another Any[] * Drop Likelihood test * drop 1.7 CI (seems buggy?) * bump version --- .github/workflows/ci.yml | 1 - LICENSE | 4 ++-- Project.toml | 8 +++++--- src/MeasureTheory.jl | 9 ++++++--- src/combinators/affine.jl | 2 +- src/combinators/conditional.jl | 14 -------------- src/combinators/for.jl | 10 ++++++---- src/combinators/ifelse.jl | 22 ++++++++++++++++++++++ src/combinators/product.jl | 14 +++++++------- src/combinators/transforms.jl | 10 +++++----- src/combinators/weighted.jl | 2 +- src/const.jl | 2 +- src/distproxy.jl | 25 ++----------------------- src/parameterized.jl | 2 +- src/parameterized/bernoulli.jl | 12 +++++++----- src/parameterized/beta.jl | 6 +++--- src/parameterized/binomial.jl | 10 ++++++---- src/parameterized/cauchy.jl | 6 +++--- src/parameterized/dirichlet.jl | 6 +++--- src/parameterized/exponential.jl | 17 +++++++++-------- src/parameterized/gumbel.jl | 4 ++-- src/parameterized/inverse-gamma.jl | 2 +- src/parameterized/laplace.jl | 2 +- src/parameterized/lkj-cholesky.jl | 6 +++--- src/parameterized/multinomial.jl | 2 +- src/parameterized/normal.jl | 10 +++++----- src/parameterized/poisson.jl | 2 +- src/parameterized/studentt.jl | 10 +++------- src/parameterized/uniform.jl | 20 +++++++++++++------- src/smart-constructors.jl | 2 +- src/transforms/ordered.jl | 4 ++-- test/runtests.jl | 13 ++++++++++--- 32 files changed, 133 insertions(+), 126 deletions(-) delete mode 100644 src/combinators/conditional.jl create mode 100644 src/combinators/ifelse.jl diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4c8cba3d..ce867052 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -23,7 +23,6 @@ jobs: matrix: version: - '1.6' - - '1' os: - ubuntu-latest - macOS-latest diff --git a/LICENSE b/LICENSE index 27ce3d56..4ec33e17 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2020-2021 Chad Scherrer and contributors +Copyright (c) 2020-2022 Chad Scherrer and contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file +SOFTWARE. diff --git a/Project.toml b/Project.toml index 2bd59bc9..ac237fab 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "MeasureTheory" uuid = "eadaa1a4-d27c-401d-8699-e962e1bbc33b" authors = ["Chad Scherrer and contributors"] -version = "0.14.0" +version = "0.15.0" [deps] Accessors = "7d9f7c33-5ae7-4f3b-8dc6-eff91059b697" @@ -14,6 +14,7 @@ Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DynamicIterators = "6c76993d-992e-5bf1-9e63-34920a5a5a38" FLoops = "cc61a311-1640-44b5-9fba-1b764f453329" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" +IfElse = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173" Infinities = "e1ba4f0e-776d-440f-acd9-e1d2e9742647" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112" @@ -49,6 +50,7 @@ Distributions = "0.25" DynamicIterators = "0.4" FLoops = "0.2" FillArrays = "0.12, 0.13" +IfElse = "0.1" Infinities = "0.1" InverseFunctions = "0.1" KeywordCalls = "0.2" @@ -57,7 +59,7 @@ LogExpFunctions = "0.3.3" MLStyle = "0.4" MacroTools = "0.5" MappedArrays = "0.4" -MeasureBase = "0.6" +MeasureBase = "0.7" NamedTupleTools = "0.13, 0.14" NestedTuples = "0.3" PositiveFactorizations = "0.2" @@ -69,7 +71,7 @@ StaticArrays = "1.3" StatsFuns = "0.9" TransformVariables = "0.5, 0.6" Tricks = "0.1" -julia = "1.5" +julia = "1.6" [extras] Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" diff --git a/src/MeasureTheory.jl b/src/MeasureTheory.jl index 99c25f3f..a5c49cae 100644 --- a/src/MeasureTheory.jl +++ b/src/MeasureTheory.jl @@ -45,8 +45,9 @@ import LogExpFunctions import NamedTupleTools import InverseFunctions: inverse export inverse +export ifelse -import MeasureBase: insupport, instance_type, instance, marginals +import MeasureBase: insupport, instance, marginals import MeasureBase: testvalue, logdensity_def, density_def, basemeasure, kleisli, params, paramnames, ∫, 𝒹, ∫exp import MeasureBase: ≪ @@ -64,6 +65,8 @@ import Base: rand using Reexport @reexport using MeasureBase +import IfElse: ifelse +@reexport using IfElse using Tricks: static_hasmethod @@ -114,7 +117,6 @@ include("combinators/weighted.jl") include("combinators/product.jl") include("combinators/transforms.jl") include("combinators/exponential-families.jl") -include("combinators/conditional.jl") include("resettable-rng.jl") include("realized.jl") @@ -123,7 +125,6 @@ include("combinators/chain.jl") include("distributions.jl") include("smart-constructors.jl") - include("parameterized/normal.jl") include("parameterized/studentt.jl") include("parameterized/cauchy.jl") @@ -142,6 +143,8 @@ include("parameterized/multinomial.jl") include("parameterized/lkj-cholesky.jl") include("parameterized/negativebinomial.jl") +include("combinators/ifelse.jl") + include("transforms/corrcholesky.jl") include("transforms/ordered.jl") diff --git a/src/combinators/affine.jl b/src/combinators/affine.jl index c4f538d3..d437658f 100644 --- a/src/combinators/affine.jl +++ b/src/combinators/affine.jl @@ -291,4 +291,4 @@ end @inline function insupport(d::Affine, x) insupport(d.parent, inverse(d.f)(x)) -end \ No newline at end of file +end diff --git a/src/combinators/conditional.jl b/src/combinators/conditional.jl deleted file mode 100644 index e227f4b5..00000000 --- a/src/combinators/conditional.jl +++ /dev/null @@ -1,14 +0,0 @@ -using NestedTuples: lazymerge - -struct ConditionalMeasure{M,C} <: AbstractMeasure - parent::M - constraint::C -end - -Base.:|(μ::AbstractMeasure, constraint) = ConditionalMeasure(μ, constraint) - -@inline function logdensity_def(cm::ConditionalMeasure, x) - logdensity_def(cm.parent, lazymerge(cm.constraint, x)) -end - -@inline basemeasure(cm::ConditionalMeasure) = basemeasure(cm.parent) | cm.constraint \ No newline at end of file diff --git a/src/combinators/for.jl b/src/combinators/for.jl index 0c7cb320..9af0766c 100644 --- a/src/combinators/for.jl +++ b/src/combinators/for.jl @@ -8,17 +8,19 @@ struct For{T, F, I} <: AbstractProductMeasure inds::I @inline function For{T}(f::F, inds::I) where {T,F,I<:Tuple} - new{T,instance_type(f),I}(f, inds) + new{T,Core.Typeof(f),I}(f, inds) end @inline For{T,F,I}(f::F, inds::I) where {T,F,I} = new{T,F,I}(f,inds) function For{Union{},F,I}(f::F, inds::I) where {F,I} println("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") + @warn "Empty `For` construction. This should not be happening" + @show f(first(zip(inds...))...) println.(stacktrace()) println("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") - @show f(first(zip(inds...))...) - @error "Empty `For` construction" + # @error "Empty `For` construction" + return ProductMeasure(mappedarray(i -> f(Tuple(i)...), CartesianIndices(inds...))) end end @@ -267,7 +269,7 @@ julia> For(eachrow(rand(4,2))) do x Normal(x[1], x[2]) end |> marginals |> colle function Random.rand!(rng::AbstractRNG, d::For{T,F,I}, x) where {T,F,I} mar = marginals(d) - @inbounds for (dⱼ, j) in zip(mar, eachindex(x)) + @inbounds for (j, dⱼ) in enumerate(mar) x[j] = rand(rng,dⱼ) end return x diff --git a/src/combinators/ifelse.jl b/src/combinators/ifelse.jl new file mode 100644 index 00000000..a0cb89ec --- /dev/null +++ b/src/combinators/ifelse.jl @@ -0,0 +1,22 @@ +struct IfElseMeasure{B,T,F} <: AbstractMeasure + b::B + t::T + f::F +end + +insupport(d::IfElseMeasure, x) = insupport(d.t, x) || insupport(d.f, x) + +function logdensity_def(d::IfElseMeasure, x) + p = mean(d.b) + logdensity_def(p * d.t + (1 - p) * d.f, x) +end + +basemeasure(d::IfElseMeasure) = d.t + d.f + +@inline function Base.rand(rng::AbstractRNG, ::Type{T}, m::IfElseMeasure) where {T} + c = rand(rng, T, m.b) + result = ifelse(c, m.t, m.f) + rand(rng, T, result) +end + +IfElse.ifelse(b::Bernoulli, t, f) = IfElseMeasure(b, t, f) diff --git a/src/combinators/product.jl b/src/combinators/product.jl index 709f1331..0a0e19a8 100644 --- a/src/combinators/product.jl +++ b/src/combinators/product.jl @@ -1,21 +1,21 @@ -function xform(d::PowerMeasure) - as(Array, xform(d.parent), length.(d.axes)...) +function TV.as(d::PowerMeasure) + as(Array, as(d.parent), length.(d.axes)...) end -function xform(d::ProductMeasure{A}) where {A<:AbstractArray} +function TV.as(d::ProductMeasure{A}) where {A<:AbstractArray} d1 = marginals(d).f(first(marginals(d).data)) - as(Array, xform(d1), size(marginals(d))...) + as(Array, TV.as(d1), size(marginals(d))...) end ############################################################################### # I <: Base.Generator -function xform(d::ProductMeasure{<:Base.Generator}) +function TV.as(d::ProductMeasure{<:Base.Generator}) d1 = marginals(d).f(first(marginals(d).iter)) - as(Array, xform(d1), size(marginals(d))...) + as(Array, as(d1), size(marginals(d))...) end -# function xform(d::ProductMeasure{Returns{T},F,A}) where {T,F,A<:AbstractArray} +# function TV.as(d::ProductMeasure{Returns{T},F,A}) where {T,F,A<:AbstractArray} # as(Array, as(d.f.f.value), size(d.xs)) # end diff --git a/src/combinators/transforms.jl b/src/combinators/transforms.jl index c92d5788..f9f88c7c 100644 --- a/src/combinators/transforms.jl +++ b/src/combinators/transforms.jl @@ -28,7 +28,7 @@ end Pullback(f, ν) = Pullback(f, ν, True()) -insupport(d::Pullback, x) = insupport(d.μ, d.f(x)) +insupport(d::Pullback, x) = insupport(d.ν, d.f(x)) function Pretty.tile(pf::Pullback{<:TV.CallableTransform}) Pretty.list_layout(Pretty.tile.([pf.f.t, pf.ν, pf.logjac]); prefix=:Pullback) @@ -87,14 +87,14 @@ basemeasure(μ::Pullback) = Pullback(μ.f, basemeasure(μ.ν), False()) basemeasure(ν::Pushforward) = Pushforward(ν.f, basemeasure(ν.μ), False()) -xform(ν::Pushforward) = ν.f ∘ as(ν.μ) +TV.as(ν::Pushforward) = ν.f ∘ as(ν.μ) -xform(μ::Pullback) = TV.inverse(μ.f) ∘ μ.ν +TV.as(μ::Pullback) = TV.inverse(μ.f) ∘ μ.ν -xform(::Lebesgue) = asℝ +TV.as(::Lebesgue) = asℝ # TODO: Make this work for affine embeddings -xform(d::Affine) = _as_affine(_firstval(d)) +TV.as(d::Affine) = _as_affine(_firstval(d)) _firstval(d::Affine) = first(values(getfield(getfield(d, :f), :par))) _as_affine(x::Real) = asℝ diff --git a/src/combinators/weighted.jl b/src/combinators/weighted.jl index 250e4cef..84aae8c4 100644 --- a/src/combinators/weighted.jl +++ b/src/combinators/weighted.jl @@ -1,2 +1,2 @@ -xform(μ::AbstractWeightedMeasure) = xform(μ.base) +TV.as(μ::AbstractWeightedMeasure) = TV.as(μ.base) diff --git a/src/const.jl b/src/const.jl index 66adf330..5f4e2add 100644 --- a/src/const.jl +++ b/src/const.jl @@ -5,7 +5,7 @@ end asConst(x) = AsConst(x) -xform(c::Returns) = AsConst(c.value) +TV.as(d::Dirac) = AsConst(d.x) TV.dimension(t::AsConst) = 0 diff --git a/src/distproxy.jl b/src/distproxy.jl index 09703018..052f3d47 100644 --- a/src/distproxy.jl +++ b/src/distproxy.jl @@ -1,7 +1,5 @@ -export distproxy -function distproxy end - -# import MonteCarloMeasurements +export proxy +function proxy end PROXIES = Dict( :Distributions => [ @@ -10,9 +8,6 @@ PROXIES = Dict( :entropy :cdf ], - # :MonteCarloMeasurements => [ - # :Particles - # ] ) for m in keys(PROXIES) @@ -24,19 +19,3 @@ for m in keys(PROXIES) end end end - -# MonteCarloMeasurements.Particles(N::Int, d::AbstractMeasure) = MonteCarloMeasurements.Particles(N, distproxy(d)) - -# using MonteCaroMeasurements - -# MonteCaroMeasurementsPROXIES = [ -# :Particles -# ] - -# for f in DistributionsPROXIES -# @eval begin -# import Distributions: $f -# export $f -# Distributions.$f(d::AbstractMeasure) = Distributions.$f(MeasureTheory.distproxy(d)) -# end -# end diff --git a/src/parameterized.jl b/src/parameterized.jl index 6d6ad147..354f7d01 100644 --- a/src/parameterized.jl +++ b/src/parameterized.jl @@ -74,4 +74,4 @@ function asparams(μ::M, nt::NamedTuple = NamedTuple()) where {M<:ParameterizedM asparams(constructorof(M), nt) end -xform(::Half) = asℝ₊ +TV.as(::Half) = asℝ₊ diff --git a/src/parameterized/bernoulli.jl b/src/parameterized/bernoulli.jl index ec606e85..7f83a25c 100644 --- a/src/parameterized/bernoulli.jl +++ b/src/parameterized/bernoulli.jl @@ -7,7 +7,11 @@ import Base @kwstruct Bernoulli(p) -basemeasure(::Bernoulli) = Counting(Bool) +basemeasure(::Bernoulli) = CountingMeasure() + +testvalue(::Bernoulli) = true + +insupport(::Bernoulli, x) = x == true || x == false @inline function logdensity_def(d::Bernoulli{(:p,)}, y) p = d.p @@ -40,7 +44,5 @@ Base.rand(rng::AbstractRNG, T::Type, d::Bernoulli{(:logitp,)}) = asparams(::Type{<:Bernoulli}, ::StaticSymbol{:p}) = as𝕀 asparams(::Type{<:Bernoulli}, ::StaticSymbol{:logitp}) = asℝ -distproxy(d::Bernoulli{(:p,)}) = Dists.Bernoulli(d.p) -distproxy(d::Bernoulli{(:logitp,)}) = Dists.Bernoulli(logistic(d.logitp)) - -insupport(::Bernoulli, x) = x ∈ (true, false) \ No newline at end of file +proxy(d::Bernoulli{(:p,)}) = Dists.Bernoulli(d.p) +proxy(d::Bernoulli{(:logitp,)}) = Dists.Bernoulli(logistic(d.logitp)) diff --git a/src/parameterized/beta.jl b/src/parameterized/beta.jl index 271ff717..5a9d9dc9 100644 --- a/src/parameterized/beta.jl +++ b/src/parameterized/beta.jl @@ -13,7 +13,7 @@ export Beta beta => β ] -xform(::Beta) = as𝕀 +TV.as(::Beta) = as𝕀 @inline function logdensity_def(d::Beta{(:α, :β),Tuple{A,B}}, x::X) where {A,B,X} return xlogy(d.α - 1, x) + xlog1py(d.β - 1, -x) @@ -28,10 +28,10 @@ end Base.rand(rng::AbstractRNG, T::Type, μ::Beta) = rand(rng, Dists.Beta(μ.α, μ.β)) -distproxy(d::Beta{(:α, :β)}) = Dists.Beta(d.α, d.β) +proxy(d::Beta{(:α, :β)}) = Dists.Beta(d.α, d.β) asparams(::Type{<:Beta}, ::StaticSymbol{:α}) = asℝ₊ asparams(::Type{<:Beta}, ::StaticSymbol{:β}) = asℝ₊ insupport(::Beta, x) = in𝕀(x) -insupport(::Beta) = in𝕀 \ No newline at end of file +insupport(::Beta) = in𝕀 diff --git a/src/parameterized/binomial.jl b/src/parameterized/binomial.jl index a378c13f..e3ee5a7a 100644 --- a/src/parameterized/binomial.jl +++ b/src/parameterized/binomial.jl @@ -9,7 +9,9 @@ probit(p) = sqrt2 * erfinv(2p - 1) @parameterized Binomial(n, p) -basemeasure(d::Binomial) = Counting(BoundedInts(static(0), d.n)) +basemeasure(d::Binomial) = CountingMeasure() + +testvalue(::Binomial) = 0 ############################################################################### @kwstruct Binomial(n, p) @@ -53,9 +55,9 @@ function Base.rand(rng::AbstractRNG, ::Type, d::Binomial{(:n, :probitp), Tuple{I rand(rng, Dists.Binomial(d.n, Φ(d.probitp))) end -distproxy(d::Binomial{(:n, :p), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, d.p) -distproxy(d::Binomial{(:n, :logitp), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, logistic(d.logitp)) -distproxy(d::Binomial{(:n, :probitp), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, Φ(d.probitp)) +proxy(d::Binomial{(:n, :p), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, d.p) +proxy(d::Binomial{(:n, :logitp), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, logistic(d.logitp)) +proxy(d::Binomial{(:n, :probitp), Tuple{I, A}}) where {I<:Integer, A} = Dists.Binomial(d.n, Φ(d.probitp)) asparams(::Type{<:Binomial}, ::StaticSymbol{:p}) = as𝕀 asparams(::Type{<:Binomial}, ::StaticSymbol{:logitp}) = asℝ diff --git a/src/parameterized/cauchy.jl b/src/parameterized/cauchy.jl index 538c4f6e..207258df 100644 --- a/src/parameterized/cauchy.jl +++ b/src/parameterized/cauchy.jl @@ -40,12 +40,12 @@ end ≪(::Cauchy, ::Lebesgue{X}) where {X<:Real} = true -xform(::Cauchy) = asℝ +TV.as(::Cauchy) = asℝ @half Cauchy HalfCauchy(σ) = HalfCauchy(σ = σ) -distproxy(d::Cauchy{()}) = Dists.Cauchy() +proxy(d::Cauchy{()}) = Dists.Cauchy() -insupport(::Cauchy, x) = true \ No newline at end of file +insupport(::Cauchy, x) = true diff --git a/src/parameterized/dirichlet.jl b/src/parameterized/dirichlet.jl index 241e663c..078fec7b 100644 --- a/src/parameterized/dirichlet.jl +++ b/src/parameterized/dirichlet.jl @@ -7,11 +7,11 @@ using FillArrays @parameterized Dirichlet(α) -xform(d::Dirichlet{(:α,)}) = TV.UnitSimplex(length(d.α)) +TV.as(d::Dirichlet{(:α,)}) = TV.UnitSimplex(length(d.α)) @inline function basemeasure(μ::Dirichlet{(:α,)}) α = μ.α - t = xform(μ) + t = as(μ) d = TV.dimension(t) logw = loggamma(sum(α)) - sum(loggamma, α) return WeightedMeasure(logw, Lebesgue(Simplex())) @@ -32,7 +32,7 @@ end Base.rand(rng::AbstractRNG, T::Type, μ::Dirichlet) = rand(rng, Dists.Dirichlet(μ.α)) -distproxy(d::Dirichlet{(:α,)}) = Dists.Dirichlet(d.α) +proxy(d::Dirichlet{(:α,)}) = Dists.Dirichlet(d.α) function testvalue(d::Dirichlet{(:α,)}) n = length(d.α) diff --git a/src/parameterized/exponential.jl b/src/parameterized/exponential.jl index bdd4fd22..d2748d3f 100644 --- a/src/parameterized/exponential.jl +++ b/src/parameterized/exponential.jl @@ -3,7 +3,10 @@ export Exponential -@parameterized Exponential(β) ≃ Lebesgue(ℝ₊) +@parameterized Exponential(β) + +insupport(::Exponential, x) = x ≥ 0 +basemeasure(::Exponential) = Lebesgue() @kwstruct Exponential() @@ -13,7 +16,7 @@ end Base.rand(rng::AbstractRNG, T::Type, μ::Exponential{()}) = randexp(rng, T) -xform(::Exponential) = asℝ₊ +TV.as(::Exponential) = asℝ₊ ########################## # Scale β @@ -29,7 +32,7 @@ end return logdensity_def(Exponential(), z) - log(d.β) end -distproxy(d::Exponential{(:β,)}) = Dists.Exponential(d.β) +proxy(d::Exponential{(:β,)}) = Dists.Exponential(d.β) asparams(::Type{<:Exponential}, ::StaticSymbol{:β}) = asℝ₊ @@ -47,7 +50,7 @@ end return logdensity_def(Exponential(), z) - d.logβ end -distproxy(d::Exponential{(:logβ,)}) = Dists.Exponential(exp(d.logβ)) +proxy(d::Exponential{(:logβ,)}) = Dists.Exponential(exp(d.logβ)) asparams(::Type{<:Exponential}, ::StaticSymbol{:logβ}) = asℝ @@ -65,7 +68,7 @@ end return logdensity_def(Exponential(), z) + log(d.λ) end -distproxy(d::Exponential{(:λ,)}) = Dists.Exponential(1 / d.λ) +proxy(d::Exponential{(:λ,)}) = Dists.Exponential(1 / d.λ) asparams(::Type{<:Exponential}, ::StaticSymbol{:λ}) = asℝ₊ @@ -83,8 +86,6 @@ end return logdensity_def(Exponential(), z) + d.logλ end -distproxy(d::Exponential{(:logλ,)}) = Dists.Exponential(exp(-d.logλ)) +proxy(d::Exponential{(:logλ,)}) = Dists.Exponential(exp(-d.logλ)) asparams(::Type{<:Exponential}, ::StaticSymbol{:logλ}) = asℝ - -insupport(::Exponential, x) = x ≥ 0 \ No newline at end of file diff --git a/src/parameterized/gumbel.jl b/src/parameterized/gumbel.jl index bcad375a..2ed3dd44 100644 --- a/src/parameterized/gumbel.jl +++ b/src/parameterized/gumbel.jl @@ -33,10 +33,10 @@ function Base.rand(rng::AbstractRNG, d::Gumbel{()}) -log(-log(u)) end -xform(::Gumbel) = asℝ +TV.as(::Gumbel) = asℝ ≪(::Gumbel, ::Lebesgue{X}) where {X<:Real} = true -distproxy(::Gumbel{()}) = Dists.Gumbel() +proxy(::Gumbel{()}) = Dists.Gumbel() insupport(::Gumbel, x) = true \ No newline at end of file diff --git a/src/parameterized/inverse-gamma.jl b/src/parameterized/inverse-gamma.jl index 1bd3140f..f2e5b87c 100644 --- a/src/parameterized/inverse-gamma.jl +++ b/src/parameterized/inverse-gamma.jl @@ -17,6 +17,6 @@ Base.rand(rng::AbstractRNG, T::Type, μ::InverseGamma{(:shape,)}) = ≪(::InverseGamma, ::Lebesgue{X}) where {X<:Real} = true -xform(::InverseGamma) = asℝ₊ +TV.as(::InverseGamma) = asℝ₊ # @μσ_methods InverseGamma(shape) diff --git a/src/parameterized/laplace.jl b/src/parameterized/laplace.jl index 7fe8c967..2c39ff6f 100644 --- a/src/parameterized/laplace.jl +++ b/src/parameterized/laplace.jl @@ -39,4 +39,4 @@ Base.rand(rng::AbstractRNG, ::Type{T}, μ::Laplace) where {T} = Base.rand(rng, T ≪(::Laplace, ::Lebesgue{X}) where {X<:Real} = true -xform(::Laplace) = asℝ +TV.as(::Laplace) = asℝ diff --git a/src/parameterized/lkj-cholesky.jl b/src/parameterized/lkj-cholesky.jl index 2e91a11b..78c46154 100644 --- a/src/parameterized/lkj-cholesky.jl +++ b/src/parameterized/lkj-cholesky.jl @@ -77,17 +77,17 @@ end return s end -xform(d::LKJCholesky) = CorrCholesky(d.k) +TV.as(d::LKJCholesky) = CorrCholesky(d.k) @inline function basemeasure(d::LKJCholesky{(:k, :η)}) - t = xform(d) + t = TV.as(d) base = Pushforward(t, Lebesgue(ℝ)^TV.dimension(t), False()) WeightedMeasure(Dists.lkj_logc0(d.k, d.η), base) end @inline function basemeasure(d::LKJCholesky{(:k, :logη)}) - t = xform(d) + t = TV.as(d) η = exp(d.logη) base = Pushforward(t, Lebesgue(ℝ)^TV.dimension(t), False()) WeightedMeasure(Dists.lkj_logc0(d.k, η), base) diff --git a/src/parameterized/multinomial.jl b/src/parameterized/multinomial.jl index 520a1dc3..6ececb92 100644 --- a/src/parameterized/multinomial.jl +++ b/src/parameterized/multinomial.jl @@ -27,7 +27,7 @@ end Base.rand(rng::AbstractRNG, T::Type, μ::Multinomial) = rand(rng, Dists.Multinomial(μ.n, μ.p)) -distproxy(d::Multinomial{(:p,)}) = Dists.Multinomial(d.n, d.p) +proxy(d::Multinomial{(:p,)}) = Dists.Multinomial(d.n, d.p) # Based on # https://github.com/JuliaMath/Combinatorics.jl/blob/c2114a71ccfc93052efb9a9379e62b81b9388ef8/src/factorials.jl#L99 diff --git a/src/parameterized/normal.jl b/src/parameterized/normal.jl index b2e5acd6..1947dadd 100644 --- a/src/parameterized/normal.jl +++ b/src/parameterized/normal.jl @@ -32,9 +32,9 @@ for N in AFFINEPARS end end -insupport(d::Normal, x) = static(true) +insupport(d::Normal, x) = true -insupport(d::Normal) = Returns(static(true)) +insupport(d::Normal) = Returns(true) @inline logdensity_def(d::Normal{()}, x) = -x^2 / 2 @inline basemeasure(::Normal{()}) = WeightedMeasure(static(-0.5 * log2π), Lebesgue(ℝ)) @@ -51,7 +51,7 @@ Normal(μ, σ) = Normal((μ = μ, σ = σ)) Normal(nt::NamedTuple{N,Tuple{Vararg{AbstractArray}}}) where {N} = MvNormal(nt) -xform(::Normal) = asℝ +TV.as(::Normal) = asℝ # `@kwalias` defines some alias names, giving users flexibility in the names # they use. For example, σ² is standard notation for the variance parameter, but @@ -86,13 +86,13 @@ asparams(::Type{<:Normal}, ::StaticSymbol{:τ}) = asℝ₊ asparams(::Type{<:Normal}, ::StaticSymbol{:logτ}) = asℝ # Rather than try to reimplement everything in Distributions, measures can have -# a `distproxy` method. This just delegates some methods to the corresponding +# a `proxy` method. This just delegates some methods to the corresponding # Distributions.jl methods. For example, # # julia> entropy(Normal(2,4)) # 2.805232894324563 # -distproxy(d::Normal{()}) = Dists.Normal() +proxy(d::Normal{()}) = Dists.Normal() ############################################################################### # Some distributions have a "standard" version that takes no parameters diff --git a/src/parameterized/poisson.jl b/src/parameterized/poisson.jl index 0358b720..e4248867 100644 --- a/src/parameterized/poisson.jl +++ b/src/parameterized/poisson.jl @@ -32,4 +32,4 @@ Base.rand(rng::AbstractRNG, T::Type, d::Poisson{(:logλ,)}) = @inline function insupport(::Poisson, x) isinteger(x) && x ≥ 0 -end \ No newline at end of file +end diff --git a/src/parameterized/studentt.jl b/src/parameterized/studentt.jl index fae83093..034a9d32 100644 --- a/src/parameterized/studentt.jl +++ b/src/parameterized/studentt.jl @@ -59,12 +59,7 @@ xform(::StudentT) = asℝ Base.rand(rng::AbstractRNG, T::Type, μ::StudentT{(:ν,)}) = rand(rng, Dists.TDist(μ.ν)) -distproxy(d::StudentT{(:ν,)}) = Dists.TDist(d.ν) -distproxy(d::StudentT{(:ν, :μ)}) = Dists.LocationScale(d.μ, 1.0, Dists.TDist(d.ν)) -distproxy(d::StudentT{(:ν, :σ)}) = Dists.LocationScale(0.0, d.σ, Dists.TDist(d.ν)) -distproxy(d::StudentT{(:ν, :ω)}) = Dists.LocationScale(0.0, inv(d.ω), Dists.TDist(d.ν)) -distproxy(d::StudentT{(:ν, :μ, :σ)}) = Dists.LocationScale(d.μ, d.σ, Dists.TDist(d.ν)) -distproxy(d::StudentT{(:ν, :μ, :ω)}) = Dists.LocationScale(d.μ, inv(d.ω), Dists.TDist(d.ν)) +proxy(d::StudentT{(:ν,)}) = Dists.TDist(d.ν) @half StudentT @@ -72,4 +67,5 @@ HalfStudentT(ν, σ) = HalfStudentT((ν = ν, σ = σ)) asparams(::Type{<:StudentT}, ::StaticSymbol{:ν}) = asℝ₊ -insupport(::StudentT, x) = static(true) \ No newline at end of file +insupport(::StudentT, x) = true +insupport(::StudentT) = Returns(true) diff --git a/src/parameterized/uniform.jl b/src/parameterized/uniform.jl index 8a4040d0..8eaf66a1 100644 --- a/src/parameterized/uniform.jl +++ b/src/parameterized/uniform.jl @@ -5,6 +5,7 @@ export Uniform @parameterized Uniform() @kwstruct Uniform() +@kwstruct Uniform(a,b) ############################################################################### # Standard Uniform @@ -13,14 +14,11 @@ export Uniform insupport(::Uniform{()}) = in𝕀 insupport(::Uniform{()}, x) = in𝕀(x) -@inline function basemeasure(::Uniform{()}) - constℓ = static(0.0) - varℓ = Returns(static(0.0)) - base = Lebesgue(ℝ) - FactoredBase(constℓ, varℓ, base) -end +@inline basemeasure(::Uniform{()}) = Lebesgue(ℝ) -distproxy(::Uniform{()}) = Dists.Uniform() +proxy(::Uniform{()}) = Dists.Uniform() + +density_def(::Uniform{()}, x) = 1.0 logdensity_def(d::Uniform{()}, x) = 0.0 @@ -30,3 +28,11 @@ Base.rand(rng::AbstractRNG, T::Type, μ::Uniform{()}) = rand(rng, T) ############################################################################### # Uniform + +@inline insupport(d::Uniform{(:a,:b)}, x) = d.a ≤ x ≤ d.b + +Uniform(a,b) = Uniform((a=a,b=b)) + +proxy(d::Uniform{(:a,:b)}) = affine((μ=d.a, σ=d.b - d.a), Uniform()) +@useproxy Uniform{(:a,:b)} +Base.rand(rng::Random.AbstractRNG, ::Type{T}, μ::Uniform) where {T} = rand(rng, T, proxy(μ)) \ No newline at end of file diff --git a/src/smart-constructors.jl b/src/smart-constructors.jl index 24e4674f..347c9d53 100644 --- a/src/smart-constructors.jl +++ b/src/smart-constructors.jl @@ -26,4 +26,4 @@ end function affine(f::AffineTransform{(:μ, :ω)}, parent::Lebesgue{RealNumbers}) affine(AffineTransform((ω = f.ω,)), parent) -end \ No newline at end of file +end diff --git a/src/transforms/ordered.jl b/src/transforms/ordered.jl index 3b9023c8..6b63fe07 100644 --- a/src/transforms/ordered.jl +++ b/src/transforms/ordered.jl @@ -37,11 +37,11 @@ function TV.transform_with(flag::TV.LogJacFlag, t::Ordered, x, index::T) where { x = mappedarray(xj -> xj + OrderedΔx, x) - @inbounds (y[1], ℓ, _) = TV.transform_with(flag, xform(Real, lo, hi), x, index) + @inbounds (y[1], ℓ, _) = TV.transform_with(flag, TV.as(Real, lo, hi), x, index) index += 1 @inbounds for i in 2:len - (y[i], Δℓ, _) = TV.transform_with(flag, xform(Real, y[i-1], hi), x, index) + (y[i], Δℓ, _) = TV.transform_with(flag, as(Real, y[i-1], hi), x, index) ℓ = addlogjac(ℓ, Δℓ) index += 1 end diff --git a/test/runtests.jl b/test/runtests.jl index 69dc5887..1f1b15f4 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -26,7 +26,7 @@ function test_measure(μ) logdensity_def(μ, testvalue(μ)) isa AbstractFloat end -test_measures = [ +test_measures = Any[ # Chain(x -> Normal(μ=x), Normal(μ=0.0)) For(3) do j Normal(σ=j) end For(2,3) do i,j Normal(i,j) end @@ -52,11 +52,10 @@ test_measures = [ Dirac(0.0) + Normal() ] -testbroken_measures = [ +testbroken_measures = Any[ Pushforward(as𝕀, Normal()) # InverseGamma(2) # Not defined yet # MvNormal(I(3)) # Entirely broken for now - Likelihood TrivialMeasure() ] @@ -535,6 +534,7 @@ end d = ∫exp(x -> -x^2, Lebesgue(ℝ)) μ = randn(3) + # σ = LowerTriangular(randn(3, 3)) σ = let x = randn(10,3) cholesky(x' * x).L end @@ -558,3 +558,10 @@ end @test logdensityof(b, x) ≈ logdensityof(d, inverse(b.f)(x)[1]) @test logdensityof(b, b.f(y)) ≈ logdensityof(d^1, y) end + +@testset "IfElseMeasure" begin + p = rand() + x = randn() + @test logdensityof(MeasureTheory.ifelse(Bernoulli(p), Normal(), Normal()), x) ≈ logdensityof(Normal(), x) + @test logdensityof(MeasureTheory.ifelse(Bernoulli(p), Normal(2,3), Normal()), x) ≈ logdensityof(p*Normal(2,3) + (1-p) * Normal(), x) +end