@article{scibior_denotational_2017,
title = {Denotational validation of higher-order {Bayesian} inference},
volume = {2},
issn = {24751421},
url = {http://arxiv.org/abs/1711.03219},
doi = {10.1145/3158148},
abstract = {We present a modular semantic account of Bayesian inference algorithms for probabilistic programming languages, as used in data science and machine learning. Sophisticated inference algorithms are often explained in terms of composition of smaller parts. However, neither their theoretical justification nor their implementation reflects this modularity. We show how to conceptualise and analyse such inference algorithms as manipulating intermediate representations of probabilistic programs using higher-order functions and inductive types, and their denotational semantics. Semantic accounts of continuous distributions use measurable spaces. However, our use of higher-order functions presents a substantial technical difficulty: it is impossible to define a measurable space structure over the collection of measurable functions between arbitrary measurable spaces that is compatible with standard operations on those functions, such as function application. We overcome this difficulty using quasi-Borel spaces, a recently proposed mathematical structure that supports both function spaces and continuous distributions. We define a class of semantic structures for representing probabilistic programs, and semantic validity criteria for transformations of these representations in terms of distribution preservation. We develop a collection of building blocks for composing representations. We use these building blocks to validate common inference algorithms such as Sequential Monte Carlo and Markov Chain Monte Carlo. To emphasize the connection between the semantic manipulation and its traditional measure theoretic origins, we use Kock's synthetic measure theory. We demonstrate its usefulness by proving a quasi-Borel counterpart to the Metropolis-Hastings-Green theorem.},
number = {POPL},
urldate = {2019-10-10},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ścibior, Adam and Kammar, Ohad and Vákár, Matthijs and Staton, Sam and Yang, Hongseok and Cai, Yufei and Ostermann, Klaus and Moss, Sean K. and Heunen, Chris and Ghahramani, Zoubin},
month = dec,
year = {2017},
note = {arXiv: 1711.03219},
pages = {1--29}
}
@article{jacobs_formal_2017,
title = {A {Formal} {Semantics} of {Influence} in {Bayesian} {Reasoning}},
url = {http://drops.dagstuhl.de/opus/volltexte/2017/8089/},
doi = {10/ggdgbc},
abstract = {This paper proposes a formal deﬁnition of inﬂuence in Bayesian reasoning, based on the notions of state (as probability distribution), predicate, validity and conditioning. Our approach highlights how conditioning a joint entwined/entangled state with a predicate on one of its components has ‘crossover’ inﬂuence on the other components. We use the total variation metric on probability distributions to quantitatively measure such inﬂuence. These insights are applied to give a rigorous explanation of the fundamental concept of d-separation in Bayesian networks.},
language = {en},
urldate = {2019-11-24},
journal = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik GmbH, Wadern/Saarbruecken, Germany},
author = {Jacobs, Bart and Zanasi, Fabio},
year = {2017},
note = {ZSCC: 0000012},
keywords = {Bayesianism, Categorical probability theory, Programming language theory, Semantics}
}
@article{jacobs_predicate/state_2016,
series = {The {Thirty}-second {Conference} on the {Mathematical} {Foundations} of {Programming} {Semantics} ({MFPS} {XXXII})},
title = {A {Predicate}/{State} {Transformer} {Semantics} for {Bayesian} {Learning}},
volume = {325},
issn = {1571-0661},
url = {http://www.sciencedirect.com/science/article/pii/S1571066116300883},
doi = {10/ggdgbb},
abstract = {This paper establishes a link between Bayesian inference (learning) and predicate and state transformer operations from programming semantics and logic. Specifically, a very general definition of backward inference is given via first applying a predicate transformer and then conditioning. Analogously, forward inference involves first conditioning and then applying a state transformer. These definitions are illustrated in many examples in discrete and continuous probability theory and also in quantum theory.},
language = {en},
urldate = {2019-11-24},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Jacobs, Bart and Zanasi, Fabio},
month = oct,
year = {2016},
note = {ZSCC: 0000030},
keywords = {Bayesianism, Categorical ML, Categorical probability theory, Effectus theory, Programming language theory, Semantics},
pages = {185--200}
}