-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add a test that runs everything (#12)
- Loading branch information
Showing
6 changed files
with
99 additions
and
28 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -25,12 +25,16 @@ dependencies = [ | |
"dm-haiku>=0.0.9", | ||
"flax>=0.6.3", | ||
"optax>=0.1.3", | ||
"surjectors@git+https://[email protected]/dirmeier/[email protected]", | ||
] | ||
dynamic = ["version"] | ||
|
||
[project.urls] | ||
homepage = "https://github.com/dirmeier/sbijax" | ||
|
||
[tool.hatch.metadata] | ||
allow-direct-references = true | ||
|
||
[tool.hatch.version] | ||
path = "sbijax/__init__.py" | ||
|
||
|
@@ -50,7 +54,7 @@ dependencies = [ | |
|
||
[tool.hatch.envs.test.scripts] | ||
lint = 'pylint sbijax' | ||
test = 'pytest -v --doctest-modules --cov=./sbi --cov-report=xml sbijax' | ||
test = 'pytest -v --doctest-modules --cov=./sbijax --cov-report=xml sbijax' | ||
|
||
|
||
[tool.black] | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,80 @@ | ||
# pylint: skip-file | ||
import chex | ||
|
||
import distrax | ||
import haiku as hk | ||
import optax | ||
from jax import numpy as jnp | ||
from surjectors import Chain, MaskedCoupling, TransformedDistribution | ||
from surjectors.conditioners import mlp_conditioner | ||
from surjectors.util import make_alternating_binary_mask | ||
|
||
from sbijax import SNL | ||
|
||
|
||
def prior_model_fns(): | ||
p = distrax.Independent( | ||
distrax.Uniform(jnp.full(2, -3.0), jnp.full(2, 3.0)), 1 | ||
) | ||
return p.sample, p.log_prob | ||
|
||
|
||
def simulator_fn(seed, theta): | ||
p = distrax.MultivariateNormalDiag(theta, 0.1 * jnp.ones_like(theta)) | ||
y = p.sample(seed=seed) | ||
return y | ||
|
||
|
||
def log_density_fn(theta, y): | ||
prior = distrax.Uniform(jnp.full(2, -3.0), jnp.full(2, 3.0)) | ||
likelihood = distrax.MultivariateNormalDiag( | ||
theta, 0.1 * jnp.ones_like(theta) | ||
) | ||
|
||
lp = jnp.sum(prior.log_prob(theta)) + jnp.sum(likelihood.log_prob(y)) | ||
return lp | ||
|
||
|
||
def make_model(dim): | ||
def _bijector_fn(params): | ||
means, log_scales = jnp.split(params, 2, -1) | ||
return distrax.ScalarAffine(means, jnp.exp(log_scales)) | ||
|
||
def _flow(method, **kwargs): | ||
layers = [] | ||
for i in range(2): | ||
mask = make_alternating_binary_mask(dim, i % 2 == 0) | ||
layer = MaskedCoupling( | ||
mask=mask, | ||
bijector=_bijector_fn, | ||
conditioner=mlp_conditioner([8, 8, dim * 2]), | ||
) | ||
layers.append(layer) | ||
chain = Chain(layers) | ||
base_distribution = distrax.Independent( | ||
distrax.Normal(jnp.zeros(dim), jnp.ones(dim)), | ||
1, | ||
) | ||
td = TransformedDistribution(base_distribution, chain) | ||
return td(method, **kwargs) | ||
|
||
td = hk.transform(_flow) | ||
td = hk.without_apply_rng(td) | ||
return td | ||
|
||
|
||
def test_snl(): | ||
chex.assert_equal(1, 1) | ||
rng_seq = hk.PRNGSequence(0) | ||
y_observed = jnp.array([-1.0, 1.0]) | ||
|
||
prior_simulator_fn, prior_logdensity_fn = prior_model_fns() | ||
fns = (prior_simulator_fn, prior_logdensity_fn), simulator_fn | ||
|
||
snl = SNL(fns, make_model(2)) | ||
params, info = snl.fit( | ||
next(rng_seq), | ||
y_observed, | ||
n_rounds=1, | ||
optimizer=optax.adam(1e-4), | ||
sampler="slice", | ||
) | ||
_ = snl.sample_posterior(params, 2, 100, 50, sampler="slice") |