-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathposter.bib
62 lines (57 loc) · 3.59 KB
/
poster.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
@article{breiman:2001,
author = {Breiman, L.},
pages = {5--32},
title = {Random Forests},
journal = {Machine Learning},
volume = {45},
number = {1},
year = {2001}
}
@book{breiman:etal:1984,
title = {Classification and Regression Trees},
author = {Breiman, L. and Friedman, J. and Stone, C. J. and Olshen, R. A.},
series = {The Wadsworth and Brooks-Cole statistics-probability series},
year = {1984},
publisher = {Taylor \& Francis}
}
@article{marin2012approximate,
title = {Approximate Bayesian computational methods},
author = {Marin, Jean-Michel and Pudlo, Pierre and Robert, Christian P and Ryder, Robin J},
journal = {Statistics and Computing},
volume = {22},
number = {6},
pages = {1167--1180},
year = {2012},
publisher = {Springer}
}
@article{pudlo2015reliable,
title = {Reliable ABC model choice via random forests},
author = {Pudlo, Pierre and Marin, Jean-Michel and Estoup, Arnaud and Cornuet, Jean-Marie and Gautier, Mathieu and Robert, Christian P},
journal = {Bioinformatics},
volume = {32},
number = {6},
pages = {859--866},
year = {2015},
publisher = {Oxford University Press}
}
@article{raynal2016abc,
author = {Raynal, Louis and Marin, Jean-Michel and Pudlo, Pierre and Ribatet, Mathieu and Robert, Christian P and Estoup, Arnaud},
title = {{ABC random forests for Bayesian parameter inference}},
journal = {Bioinformatics},
volume = {35},
number = {10},
pages = {1720-1728},
year = {2018},
month = {10},
abstract = {{Approximate Bayesian computation (ABC) has grown into a standard methodology that manages Bayesian inference for models associated with intractable likelihood functions. Most ABC implementations require the preliminary selection of a vector of informative statistics summarizing raw data. Furthermore, in almost all existing implementations, the tolerance level that separates acceptance from rejection of simulated parameter values needs to be calibrated.We propose to conduct likelihood-free Bayesian inferences about parameters with no prior selection of the relevant components of the summary statistics and bypassing the derivation of the associated tolerance level. The approach relies on the random forest (RF) methodology of Breiman (2001) applied in a (non-parametric) regression setting. We advocate the derivation of a new RF for each component of the parameter vector of interest. When compared with earlier ABC solutions, this method offers significant gains in terms of robustness to the choice of the summary statistics, does not depend on any type of tolerance level, and is a good trade-off in term of quality of point estimator precision and credible interval estimations for a given computing time. We illustrate the performance of our methodological proposal and compare it with earlier ABC methods on a Normal toy example and a population genetics example dealing with human population evolution.All methods designed here have been incorporated in the R package abcrf (version 1.7.1) available on CRAN.Supplementary data are available at Bioinformatics online.}},
issn = {1367-4803},
doi = {10.1093/bioinformatics/bty867},
url = {https://doi.org/10.1093/bioinformatics/bty867},
eprint = {http://oup.prod.sis.lan/bioinformatics/article-pdf/35/10/1720/28639964/bty867.pdf}
}
@article{wright2015ranger,
title = {Ranger: a fast implementation of random forests for high dimensional data in C++ and R},
author = {Wright, Marvin N and Ziegler, Andreas},
journal = {arXiv preprint arXiv:1508.04409},
year = {2015}
}