@article{ghahramani_probabilistic_2015,
title = {Probabilistic machine learning and artificial intelligence},
volume = {521},
issn = {0028-0836, 1476-4687},
url = {http://www.nature.com/articles/nature14541},
doi = {10/gdxwhq},
language = {en},
number = {7553},
urldate = {2019-11-28},
journal = {Nature},
author = {Ghahramani, Zoubin},
month = may,
year = {2015},
note = {ZSCC: 0000611},
keywords = {Bayesian inference, Classical ML, Machine learning, Probabilistic programming},
pages = {452--459}
}
@article{scibior_functional_2018,
title = {Functional programming for modular {Bayesian} inference},
volume = {2},
issn = {24751421},
url = {http://dl.acm.org/citation.cfm?doid=3243631.3236778},
doi = {10/gft39x},
language = {en},
number = {ICFP},
urldate = {2019-11-27},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ścibior, Adam and Kammar, Ohad and Ghahramani, Zoubin},
month = jul,
year = {2018},
note = {ZSCC: 0000005},
keywords = {Bayesian inference, Implementation, Probabilistic programming},
pages = {1--29}
}
@inproceedings{scibior_practical_2015,
address = {New York, NY, USA},
series = {Haskell '15},
title = {Practical {Probabilistic} {Programming} with {Monads}},
isbn = {978-1-4503-3808-0},
url = {http://doi.acm.org/10.1145/2804302.2804317},
doi = {10/gft39z},
abstract = {The machine learning community has recently shown a lot of interest in practical probabilistic programming systems that target the problem of Bayesian inference. Such systems come in different forms, but they all express probabilistic models as computational processes using syntax resembling programming languages. In the functional programming community monads are known to offer a convenient and elegant abstraction for programming with probability distributions, but their use is often limited to very simple inference problems. We show that it is possible to use the monad abstraction to construct probabilistic models for machine learning, while still offering good performance of inference in challenging models. We use a GADT as an underlying representation of a probability distribution and apply Sequential Monte Carlo-based methods to achieve efficient inference. We define a formal semantics via measure theory. We demonstrate a clean and elegant implementation that achieves performance comparable with Anglican, a state-of-the-art probabilistic programming system.},
urldate = {2019-11-26},
booktitle = {Proceedings of the 2015 {ACM} {SIGPLAN} {Symposium} on {Haskell}},
publisher = {ACM},
author = {Ścibior, Adam and Ghahramani, Zoubin and Gordon, Andrew D.},
year = {2015},
note = {ZSCC: 0000048
event-place: Vancouver, BC, Canada},
keywords = {Bayesian inference, Implementation, Probabilistic programming, Programming language theory},
pages = {165--176}
}
@article{scibior_denotational_2017,
title = {Denotational validation of higher-order {Bayesian} inference},
volume = {2},
issn = {24751421},
url = {http://arxiv.org/abs/1711.03219},
doi = {10.1145/3158148},
abstract = {We present a modular semantic account of Bayesian inference algorithms for probabilistic programming languages, as used in data science and machine learning. Sophisticated inference algorithms are often explained in terms of composition of smaller parts. However, neither their theoretical justification nor their implementation reflects this modularity. We show how to conceptualise and analyse such inference algorithms as manipulating intermediate representations of probabilistic programs using higher-order functions and inductive types, and their denotational semantics. Semantic accounts of continuous distributions use measurable spaces. However, our use of higher-order functions presents a substantial technical difficulty: it is impossible to define a measurable space structure over the collection of measurable functions between arbitrary measurable spaces that is compatible with standard operations on those functions, such as function application. We overcome this difficulty using quasi-Borel spaces, a recently proposed mathematical structure that supports both function spaces and continuous distributions. We define a class of semantic structures for representing probabilistic programs, and semantic validity criteria for transformations of these representations in terms of distribution preservation. We develop a collection of building blocks for composing representations. We use these building blocks to validate common inference algorithms such as Sequential Monte Carlo and Markov Chain Monte Carlo. To emphasize the connection between the semantic manipulation and its traditional measure theoretic origins, we use Kock's synthetic measure theory. We demonstrate its usefulness by proving a quasi-Borel counterpart to the Metropolis-Hastings-Green theorem.},
number = {POPL},
urldate = {2019-10-10},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ścibior, Adam and Kammar, Ohad and Vákár, Matthijs and Staton, Sam and Yang, Hongseok and Cai, Yufei and Ostermann, Klaus and Moss, Sean K. and Heunen, Chris and Ghahramani, Zoubin},
month = dec,
year = {2017},
note = {arXiv: 1711.03219},
pages = {1--29}
}