@misc{borchert_amzn/milan_2019,
title = {amzn/milan},
copyright = {Apache-2.0},
url = {https://github.com/amzn/milan},
abstract = {Milan is a Scala API and runtime infrastructure for building data-oriented systems, built on top of Apache Flink.},
urldate = {2019-11-27},
publisher = {Amazon},
author = {Borchert, Tom},
month = nov,
year = {2019},
note = {ZSCC: NoCitationData[s0]
original-date: 2019-11-06T22:37:51Z},
keywords = {Implementation, Machine learning, Probabilistic programming}
}
@misc{murfet_dmurfet/2simplicialtransformer_2019,
title = {dmurfet/2simplicialtransformer},
url = {https://github.com/dmurfet/2simplicialtransformer},
abstract = {Code for the 2-simplicial Transformer paper. Contribute to dmurfet/2simplicialtransformer development by creating an account on GitHub.},
urldate = {2019-11-22},
author = {Murfet, Daniel},
month = oct,
year = {2019},
note = {ZSCC: NoCitationData[s0]
original-date: 2019-08-29T13:26:13Z},
keywords = {Abstract machines, Algebra, Implementation, Machine learning, Semantics}
}
@article{hamrick_analogues_2019,
series = {{SI}: 29: {Artificial} {Intelligence} (2019)},
title = {Analogues of mental simulation and imagination in deep learning},
volume = {29},
issn = {2352-1546},
url = {http://www.sciencedirect.com/science/article/pii/S2352154618301670},
doi = {10.1016/j.cobeha.2018.12.011},
abstract = {Mental simulation—the capacity to imagine what will or what could be—is a salient feature of human cognition, playing a key role in a wide range of cognitive abilities. In artificial intelligence, the last few years have seen the development of methods which are analogous to mental models and mental simulation. This paper outlines recent methods in deep learning for constructing such models from data and learning to use them via reinforcement learning, and compares such approaches to human mental simulation. Model-based methods in deep learning can serve as powerful tools for building and scaling cognitive models. However, a number of challenges remain in matching the capacity of human mental simulation for efficiency, compositionality, generalization, and creativity.},
urldate = {2019-10-10},
journal = {Current Opinion in Behavioral Sciences},
author = {Hamrick, Jessica B},
month = oct,
year = {2019},
pages = {8--16}
}
@article{vytiniotis_differentiable_2019,
title = {The {Differentiable} {Curry}},
url = {https://openreview.net/forum?id=ryxuz9SzDB},
abstract = {We revisit the automatic differentiation (AD) of programs that contain higher-order functions, in a statically typed setting. Our presentation builds on a recent formulation of AD based on...},
urldate = {2019-11-22},
author = {Vytiniotis, Dimitrios and Belov, Dan and Wei, Richard and Plotkin, Gordon and Abadi, Martin},
month = sep,
year = {2019},
note = {ZSCC: 0000000},
keywords = {Automatic differentiation, Differentiation, Programming language theory}
}
@article{murfet_logic_2019,
title = {Logic and the \$2\$-{Simplicial} {Transformer}},
url = {http://arxiv.org/abs/1909.00668},
abstract = {We introduce the \$2\$-simplicial Transformer, an extension of the Transformer which includes a form of higher-dimensional attention generalising the dot-product attention, and uses this attention to update entity representations with tensor products of value vectors. We show that this architecture is a useful inductive bias for logical reasoning in the context of deep reinforcement learning.},
urldate = {2019-11-21},
journal = {arXiv:1909.00668 [cs, stat]},
author = {Murfet, Daniel and Clift, James and Doryn, Dmitry and Wallbridge, James},
month = sep,
year = {2019},
note = {ZSCC: 0000000
arXiv: 1909.00668
version: 1},
keywords = {Abstract machines, Algebra, Machine learning, Semantics}
}
@article{baudart_reactive_2019,
title = {Reactive {Probabilistic} {Programming}},
url = {http://arxiv.org/abs/1908.07563},
abstract = {Synchronous reactive languages were introduced for designing and implementing real-time control software. These domain-specific languages allow for writing a modular and mathematically precise specification of the system, enabling a user to simulate, test, verify, and, finally, compile the system into executable code. However, to date these languages have had limited modern support for modeling uncertainty -- probabilistic aspects of the software's environment or behavior -- even though modeling uncertainty is a primary activity when designing a control system. In this paper we extend Z{\textbackslash}'elus, a synchronous programming language, to deliver ProbZ{\textbackslash}'elus, the first synchronous probabilistic programming language. ProbZ{\textbackslash}'elus is a probabilistic programming language in that it provides facilities for probabilistic models and inference: inferring latent model parameters from data. We present ProbZ{\textbackslash}'elus's measure-theoretic semantics in the setting of probabilistic, stateful stream functions. We then demonstrate a semantics-preserving compilation strategy to a first-order functional core calculus that lends itself to a simple semantic presentation of ProbZ{\textbackslash}'elus's inference algorithms. We also redesign the delayed sampling inference algorithm to provide bounded and streaming delayed sampling inference for ProbZ{\textbackslash}'elus models. Together with our evaluation on several reactive programs, our results demonstrate that ProbZ{\textbackslash}'elus provides efficient, bounded memory probabilistic inference.},
urldate = {2019-11-28},
journal = {arXiv:1908.07563 [cs]},
author = {Baudart, Guillaume and Mandel, Louis and Atkinson, Eric and Sherman, Benjamin and Pouzet, Marc and Carbin, Michael},
month = aug,
year = {2019},
note = {ZSCC: 0000001
arXiv: 1908.07563},
keywords = {Bayesian inference, Denotational semantics, Implementation, Probabilistic programming, Programming language theory}
}
@misc{wilkinson_write_2019,
title = {Write your own general-purpose monadic probabilistic programming language from scratch in 50 lines of ({Scala}) code},
url = {https://darrenjw.wordpress.com/2019/08/07/write-your-own-general-purpose-monadic-probabilistic-programming-language-from-scratch-in-50-lines-of-scala-code/},
abstract = {Background In May I attended a great workshop on advances and challenges in machine learning languages at the CMS in Cambridge. There was an a good mix of people from different disciplines, and a b…},
language = {en},
urldate = {2019-11-27},
journal = {Darren Wilkinson's research blog},
author = {Wilkinson, Darren},
month = aug,
year = {2019},
note = {ZSCC: NoCitationData[s0]},
keywords = {Bayesian inference, Implementation, Probabilistic programming}
}
@article{law_functional_2019,
title = {Functional probabilistic programming for scalable {Bayesian} modelling},
url = {http://arxiv.org/abs/1908.02062},
abstract = {Bayesian inference involves the specification of a statistical model by a statistician or practitioner, with careful thought about what each parameter represents. This results in particularly interpretable models which can be used to explain relationships present in the observed data. Bayesian models are useful when an experiment has only a small number of observations and in applications where transparency of data driven decisions is important. Traditionally, parameter inference in Bayesian statistics has involved constructing bespoke MCMC (Markov chain Monte Carlo) schemes for each newly proposed statistical model. This results in plausible models not being considered since efficient inference schemes are challenging to develop or implement. Probabilistic programming aims to reduce the barrier to performing Bayesian inference by developing a domain specific language (DSL) for model specification which is decoupled from the parameter inference algorithms. This paper introduces functional programming principles which can be used to develop an embedded probabilistic programming language. Model inference can be carried out using any generic inference algorithm. In this paper Hamiltonian Monte Carlo (HMC) is used, an efficient MCMC method requiring the gradient of the un-normalised log-posterior, calculated using automatic differentiation. The concepts are illustrated using the Scala programming language.},
urldate = {2019-11-27},
journal = {arXiv:1908.02062 [stat]},
author = {Law, Jonathan and Wilkinson, Darren},
month = aug,
year = {2019},
note = {ZSCC: 0000000
arXiv: 1908.02062},
keywords = {Automatic differentiation, Bayesian inference, Differentiation, Implementation, Probabilistic programming, Programming language theory}
}
@article{jacobs_causal_2019,
title = {Causal {Inference} by {String} {Diagram} {Surgery}},
url = {http://arxiv.org/abs/1811.08338},
abstract = {Extracting causal relationships from observed correlations is a growing area in probabilistic reasoning, originating with the seminal work of Pearl and others from the early 1990s. This paper develops a new, categorically oriented view based on a clear distinction between syntax (string diagrams) and semantics (stochastic matrices), connected via interpretations as structure-preserving functors. A key notion in the identification of causal effects is that of an intervention, whereby a variable is forcefully set to a particular value independent of any prior propensities. We represent the effect of such an intervention as an endofunctor which performs `string diagram surgery' within the syntactic category of string diagrams. This diagram surgery in turn yields a new, interventional distribution via the interpretation functor. While in general there is no way to compute interventional distributions purely from observed data, we show that this is possible in certain special cases using a calculational tool called comb disintegration. We demonstrate the use of this technique on a well-known toy example, where we predict the causal effect of smoking on cancer in the presence of a confounding common cause. After developing this specific example, we show this technique provides simple sufficient conditions for computing interventions which apply to a wide variety of situations considered in the causal inference literature.},
urldate = {2019-11-21},
journal = {arXiv:1811.08338 [cs, math]},
author = {Jacobs, Bart and Kissinger, Aleks and Zanasi, Fabio},
month = jul,
year = {2019},
note = {ZSCC: 0000003
arXiv: 1811.08338},
keywords = {Bayesianism, Categorical probability theory, Implementation}
}
@article{blute_differential_2019,
title = {Differential {Categories} {Revisited}},
issn = {1572-9095},
url = {https://doi.org/10.1007/s10485-019-09572-y},
doi = {10/ggdm44},
abstract = {Differential categories were introduced to provide a minimal categorical doctrine for differential linear logic. Here we revisit the formalism and, in particular, examine the two different approaches to defining differentiation which were introduced. The basic approach used a deriving transformation, while a more refined approach, in the presence of a bialgebra modality, used a codereliction. The latter approach is particularly relevant to linear logic settings, where the coalgebra modality is monoidal and the Seely isomorphisms give rise to a bialgebra modality. Here, we prove that these apparently distinct notions of differentiation, in the presence of a monoidal coalgebra modality, are completely equivalent. Thus, for linear logic settings, there is only one notion of differentiation. This paper also presents a number of separating examples for coalgebra modalities including examples which are and are not monoidal, as well as examples which do and do not support differential structure. Of particular interest is the observation that—somewhat counter-intuitively—differential algebras never induce a differential category although they provide a monoidal coalgebra modality. On the other hand, Rota–Baxter algebras—which are usually associated with integration—provide an example of a differential category which has a non-monoidal coalgebra modality.},
language = {en},
urldate = {2019-11-28},
journal = {Applied Categorical Structures},
author = {Blute, R. F. and Cockett, J. R. B. and Lemay, J.-S. P. and Seely, R. A. G.},
month = jul,
year = {2019},
note = {ZSCC: NoCitationData[s1]},
keywords = {Differential Linear Logic, Differentiation, Linear logic}
}
@inproceedings{dal_lago_geometry_2019,
title = {The {Geometry} of {Bayesian} {Programming}},
doi = {10/ggdk85},
author = {Dal Lago, Ugo and Hoshino, Naohiko},
month = jun,
year = {2019},
note = {ZSCC: 0000000},
keywords = {Bayesian inference, Denotational semantics, Linear logic, Probabilistic programming, Programming language theory, Rewriting theory, Transition systems},
pages = {1--13}
}
@book{winn_model-based_2019,
title = {Model-{Based} {Machine} {Learning}},
isbn = {978-1-4987-5681-5},
abstract = {This book is unusual for a machine learning text book in that the authors do not review dozens of different algorithms. Instead they introduce all of the key ideas through a series of case studies involving real-world applications. Case studies play a central role because it is only in the context of applications that it makes sense to discuss modelling assumptions. Each chapter therefore introduces one case study which is drawn from a real-world application that has been solved using a model-based approach.},
language = {en},
publisher = {Taylor \& Francis Incorporated},
author = {Winn, John Michael},
month = jun,
year = {2019},
note = {ZSCC: NoCitationData[s1]
Google-Books-ID: 84KRtgEACAAJ},
keywords = {Bayesian inference, Classical ML, Implementation}
}
@article{harris_characterizing_2019,
title = {Characterizing the invariances of learning algorithms using category theory},
url = {http://arxiv.org/abs/1905.02072},
abstract = {Many learning algorithms have invariances: when their training data is transformed in certain ways, the function they learn transforms in a predictable manner. Here we formalize this notion using concepts from the mathematical field of category theory. The invariances that a supervised learning algorithm possesses are formalized by categories of predictor and target spaces, whose morphisms represent the algorithm's invariances, and an index category whose morphisms represent permutations of the training examples. An invariant learning algorithm is a natural transformation between two functors from the product of these categories to the category of sets, representing training datasets and learned functions respectively. We illustrate the framework by characterizing and contrasting the invariances of linear regression and ridge regression.},
urldate = {2019-10-10},
journal = {arXiv:1905.02072 [cs, math, stat]},
author = {Harris, Kenneth D.},
month = may,
year = {2019},
note = {arXiv: 1905.02072}
}
@article{fong_backprop_2019,
title = {Backprop as {Functor}: {A} compositional perspective on supervised learning},
shorttitle = {Backprop as {Functor}},
url = {http://arxiv.org/abs/1711.10455},
abstract = {A supervised learning algorithm searches over a set of functions \$A {\textbackslash}to B\$ parametrised by a space \$P\$ to find the best approximation to some ideal function \$f{\textbackslash}colon A {\textbackslash}to B\$. It does this by taking examples \$(a,f(a)) {\textbackslash}in A{\textbackslash}times B\$, and updating the parameter according to some rule. We define a category where these update rules may be composed, and show that gradient descent---with respect to a fixed step size and an error function satisfying a certain property---defines a monoidal functor from a category of parametrised functions to this category of update rules. This provides a structural perspective on backpropagation, as well as a broad generalisation of neural networks.},
urldate = {2019-11-23},
journal = {arXiv:1711.10455 [cs, math]},
author = {Fong, Brendan and Spivak, David I. and Tuyéras, Rémy},
month = may,
year = {2019},
note = {ZSCC: 0000015
arXiv: 1711.10455},
keywords = {Categorical ML, Machine learning, Purely theoretical}
}
@article{dong_neural_2019,
title = {Neural {Logic} {Machines}},
url = {http://arxiv.org/abs/1904.11694},
abstract = {We propose the Neural Logic Machine (NLM), a neural-symbolic architecture for both inductive learning and logic reasoning. NLMs exploit the power of both neural networks---as function approximators, and logic programming---as a symbolic processor for objects with properties, relations, logic connectives, and quantifiers. After being trained on small-scale tasks (such as sorting short arrays), NLMs can recover lifted rules, and generalize to large-scale tasks (such as sorting longer arrays). In our experiments, NLMs achieve perfect generalization in a number of tasks, from relational reasoning tasks on the family tree and general graphs, to decision making tasks including sorting arrays, finding shortest paths, and playing the blocks world. Most of these tasks are hard to accomplish for neural networks or inductive logic programming alone.},
urldate = {2019-11-24},
journal = {arXiv:1904.11694 [cs, stat]},
author = {Dong, Honghua and Mao, Jiayuan and Lin, Tian and Wang, Chong and Li, Lihong and Zhou, Denny},
month = apr,
year = {2019},
note = {ZSCC: 0000008
arXiv: 1904.11694},
keywords = {Abstract machines, Machine learning, Symbolic logic}
}
@article{jacobs_differential_2019,
title = {The differential calculus of causal functions},
url = {http://arxiv.org/abs/1904.10611},
abstract = {Causal functions of sequences occur throughout computer science, from theory to hardware to machine learning. Mealy machines, synchronous digital circuits, signal flow graphs, and recurrent neural networks all have behaviour that can be described by causal functions. In this work, we examine a differential calculus of causal functions which includes many of the familiar properties of standard multivariable differential calculus. These causal functions operate on infinite sequences, but this work gives a different notion of an infinite-dimensional derivative than either the Fr{\textbackslash}'echet or Gateaux derivative used in functional analysis. In addition to showing many standard properties of differentiation, we show causal differentiation obeys a unique recurrence rule. We use this recurrence rule to compute the derivative of a simple recurrent neural network called an Elman network by hand and describe how the computed derivative can be used to train the network.},
urldate = {2019-11-21},
journal = {arXiv:1904.10611 [cs]},
author = {Jacobs, Bart and Sprunger, David},
month = apr,
year = {2019},
note = {ZSCC: 0000001
arXiv: 1904.10611},
keywords = {Automatic differentiation, Differentiation}
}
@article{fritz_probability_2019,
title = {A {Probability} {Monad} as the {Colimit} of {Spaces} of {Finite} {Samples}},
url = {http://arxiv.org/abs/1712.05363},
abstract = {We define and study a probability monad on the category of complete metric spaces and short maps. It assigns to each space the space of Radon probability measures on it with finite first moment, equipped with the Kantorovich-Wasserstein distance. This monad is analogous to the Giry monad on the category of Polish spaces, and it extends a construction due to van Breugel for compact and for 1-bounded complete metric spaces. We prove that this Kantorovich monad arises from a colimit construction on finite power-like constructions, which formalizes the intuition that probability measures are limits of finite samples. The proof relies on a criterion for when an ordinary left Kan extension of lax monoidal functors is a monoidal Kan extension. The colimit characterization allows the development of integration theory and the treatment of measures on spaces of measures, without measure theory. We also show that the category of algebras of the Kantorovich monad is equivalent to the category of closed convex subsets of Banach spaces with short affine maps as morphisms.},
urldate = {2019-11-28},
journal = {arXiv:1712.05363 [cs, math]},
author = {Fritz, Tobias and Perrone, Paolo},
month = mar,
year = {2019},
note = {ZSCC: NoCitationData[s1]
arXiv: 1712.05363},
keywords = {Categorical probability theory, Purely theoretical}
}
@article{ehrhard_differentials_2019,
title = {Differentials and distances in probabilistic coherence spaces},
url = {http://arxiv.org/abs/1902.04836},
abstract = {In probabilistic coherence spaces, a denotational model of probabilistic functional languages, mor-phisms are analytic and therefore smooth. We explore two related applications of the corresponding derivatives. First we show how derivatives allow to compute the expectation of execution time in the weak head reduction of probabilistic PCF (pPCF). Next we apply a general notion of "local" differential of morphisms to the proof of a Lipschitz property of these morphisms allowing in turn to relate the observational distance on pPCF terms to a distance the model is naturally equipped with. This suggests that extending probabilistic programming languages with derivatives, in the spirit of the differential lambda-calculus, could be quite meaningful.},
urldate = {2019-11-28},
journal = {arXiv:1902.04836 [cs]},
author = {Ehrhard, Thomas},
month = feb,
year = {2019},
note = {ZSCC: 0000000
arXiv: 1902.04836},
keywords = {Coherence spaces, Denotational semantics, Differential Linear Logic, Differentiation, Linear logic, Probabilistic programming, Programming language theory}
}
@article{murfet_derivatives_2019,
title = {Derivatives of {Turing} machines in {Linear} {Logic}},
url = {http://arxiv.org/abs/1805.11813},
abstract = {We calculate denotations under the Sweedler semantics of the Ehrhard-Regnier derivatives of various encodings of Turing machines into linear logic. We show that these derivatives calculate the rate of change of probabilities naturally arising in the Sweedler semantics of linear logic proofs. The resulting theory is applied to the problem of synthesising Turing machines by gradient descent.},
urldate = {2019-11-21},
journal = {arXiv:1805.11813 [math]},
author = {Murfet, Daniel and Clift, James},
month = jan,
year = {2019},
note = {ZSCC: NoCitationData[s0]
arXiv: 1805.11813},
keywords = {Abstract machines, Categorical ML, Differentiation, Linear logic, Machine learning}
}
@misc{wilkinson_compositional_2019,
title = {A compositional approach to scalable {Bayesian} computation and probabilistic programming},
language = {en},
author = {Wilkinson, Darren},
year = {2019},
note = {ZSCC: 0000000[s0]},
keywords = {Bayesian inference, Implementation, Probabilistic programming, Programming language theory}
}
@inproceedings{sprunger_differentiable_2019,
address = {Vancouver, BC, Canada},
title = {Differentiable {Causal} {Computations} via {Delayed} {Trace}},
isbn = {978-1-72813-608-0},
url = {https://ieeexplore.ieee.org/document/8785670/},
doi = {10/ggdf98},
abstract = {We investigate causal computations taking sequences of inputs to sequences of outputs where the nth output depends on the ﬁrst n inputs only. We model these in category theory via a construction taking a Cartesian category C to another category St(C) with a novel trace-like operation called “delayed trace”, which misses yanking and dinaturality axioms of the usual trace. The delayed trace operation provides a feedback mechanism in St(C) with an implicit guardedness guarantee.},
language = {en},
urldate = {2019-11-23},
booktitle = {2019 34th {Annual} {ACM}/{IEEE} {Symposium} on {Logic} in {Computer} {Science} ({LICS})},
publisher = {IEEE},
author = {Sprunger, David and Katsumata, Shin-ya},
month = jun,
year = {2019},
note = {ZSCC: 0000002},
keywords = {Categorical ML, Differentiation},
pages = {1--12}
}
@inproceedings{kerjean_higher-order_2019,
address = {Cham},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Higher-{Order} {Distributions} for {Differential} {Linear} {Logic}},
isbn = {978-3-030-17127-8},
doi = {10/ggdmrj},
abstract = {Linear Logic was introduced as the computational counterpart of the algebraic notion of linearity. Differential Linear Logic refines Linear Logic with a proof-theoretical interpretation of the geometrical process of differentiation. In this article, we construct a polarized model of Differential Linear Logic satisfying computational constraints such as an interpretation for higher-order functions, as well as constraints inherited from physics such as a continuous interpretation for spaces. This extends what was done previously by Kerjean for first order Differential Linear Logic without promotion. Concretely, we follow the previous idea of interpreting the exponential of Differential Linear Logic as a space of higher-order distributions with compact-support, which is constructed as an inductive limit of spaces of distributions on Euclidean spaces. We prove that this exponential is endowed with a co-monadic like structure, with the notable exception that it is functorial only on isomorphisms. Interestingly, as previously argued by Ehrhard, this still allows the interpretation of differential linear logic without promotion.},
language = {en},
booktitle = {Foundations of {Software} {Science} and {Computation} {Structures}},
publisher = {Springer International Publishing},
author = {Kerjean, Marie and Pacaud Lemay, Jean-Simon},
editor = {Bojańczyk, Mikołaj and Simpson, Alex},
year = {2019},
note = {ZSCC: NoCitationData[s1]},
keywords = {Denotational semantics, Differential Linear Logic, Differentiation, Linear logic},
pages = {330--347}
}
@article{jacobs_disintegration_2019,
title = {Disintegration and {Bayesian} {Inversion} via {String} {Diagrams}},
volume = {29},
issn = {0960-1295, 1469-8072},
url = {http://arxiv.org/abs/1709.00322},
doi = {10/ggdf9v},
abstract = {The notions of disintegration and Bayesian inversion are fundamental in conditional probability theory. They produce channels, as conditional probabilities, from a joint state, or from an already given channel (in opposite direction). These notions exist in the literature, in concrete situations, but are presented here in abstract graphical formulations. The resulting abstract descriptions are used for proving basic results in conditional probability theory. The existence of disintegration and Bayesian inversion is discussed for discrete probability, and also for measure-theoretic probability --- via standard Borel spaces and via likelihoods. Finally, the usefulness of disintegration and Bayesian inversion is illustrated in several examples.},
number = {7},
urldate = {2019-11-21},
journal = {Mathematical Structures in Computer Science},
author = {Jacobs, Bart and Cho, Kenta},
month = aug,
year = {2019},
note = {ZSCC: 0000007
arXiv: 1709.00322},
keywords = {Bayesianism, Categorical probability theory},
pages = {938--971}
}
@article{heller_homunculus_2019,
title = {Homunculus' {Brain} and {Categorical} {Logic}},
volume = {abs/1903.03424},
abstract = {The interaction between syntax (formal language) and its semantics (meanings of language) is well studied in categorical logic. Results of this study are employed to understand how the brain could create meanings. To emphasize the toy character of the proposed model, we prefer to speak on homunculus' brain rather than just on the brain. Homunculus' brain consists of neurons, each of which is modeled by a category, and axons between neurons, which are modeled by functors between the corresponding neuron-categories. Each neuron (category) has its own program enabling its working, i.e. a "theory" of this neuron. In analogy with what is known from categorical logic, we postulate the existence of the pair of adjoint functors, called Lang and Syn, from a category, now called BRAIN, of categories, to a category, now called MIND, of theories. Our homunculus is a kind of "mathematical robot", the neuronal architecture of which is not important. Its only aim is to provide us with the opportunity to study how such a simple brain-like structure could "create meanings" out of its purely syntactic program. The pair of adjoint functors Lang and Syn models mutual dependencies between the syntactical structure of a given theory of MIND and the internal logic of its semantics given by a category of BRAIN. In this way, a formal language (syntax) and its meanings (semantics) are interwoven with each other in a manner corresponding to the adjointness of the functors Lang and Syn. Categories BRAIN and MIND interact with each other with their entire structures and, at the same time, these very structures are shaped by this interaction.},
journal = {ArXiv},
author = {Heller, Michael},
year = {2019},
note = {ZSCC: 0000000
arXiv: 1903.03424},
keywords = {Emergence, Sketchy}
}
@article{tuyeras_category_2018,
title = {Category theory for genetics {I}: mutations and sequence alignments},
shorttitle = {Category theory for genetics {I}},
url = {http://arxiv.org/abs/1805.07002},
abstract = {The present article is the first of a series whose goal is to define a logical formalism in which it is possible to reason about genetics. In this paper, we introduce the main concepts of our language whose domain of discourse consists of a class of limit-sketches and their associated models. While our program will aim to show that different phenomena of genetics can be modeled by changing the category in which the models take their values, in this paper, we study models in the category of sets to capture mutation mechanisms such as insertions, deletions, substitutions, duplications and inversions. We show how the proposed formalism can be used for constructing multiple sequence alignments with an emphasis on mutation mechanisms.},
urldate = {2019-11-28},
journal = {arXiv:1805.07002 [math]},
author = {Tuyéras, Rémy},
month = dec,
year = {2018},
note = {ZSCC: 0000001
arXiv: 1805.07002},
keywords = {Biology}
}
@article{paquet_continuous_2018,
series = {Proceedings of the {Thirty}-{Fourth} {Conference} on the {Mathematical} {Foundations} of {Programming} {Semantics} ({MFPS} {XXXIV})},
title = {Continuous {Probability} {Distributions} in {Concurrent} {Games}},
volume = {341},
issn = {1571-0661},
url = {http://www.sciencedirect.com/science/article/pii/S1571066118300975},
doi = {10/ggdmwv},
abstract = {We present a model of concurrent games in which strategies are probabilistic and support both discrete and continuous distributions. This is a generalisation of the probabilistic concurrent strategies of Winskel, based on event structures. We first introduce measurable event structures, discrete fibrations of event structures in which each fibre is turned into a measurable space. We then construct a bicategory of measurable games and measurable strategies based on measurable event structures, and add probability to measurable strategies using standard techniques of measure theory. We illustrate the model by giving semantics to an affine, higher-order, probabilistic language with a type of real numbers and continuous distributions.},
language = {en},
urldate = {2019-11-28},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Paquet, Hugo and Winskel, Glynn},
month = dec,
year = {2018},
note = {ZSCC: 0000002},
keywords = {Game semantics, Interactive semantics, Probabilistic programming, Programming language theory},
pages = {321--344}
}
@article{vakar_domain_2018,
title = {A {Domain} {Theory} for {Statistical} {Probabilistic} {Programming}},
url = {http://arxiv.org/abs/1811.04196},
abstract = {We give an adequate denotational semantics for languages with recursive higher-order types, continuous probability distributions, and soft constraints. These are expressive languages for building Bayesian models of the kinds used in computational statistics and machine learning. Among them are untyped languages, similar to Church and WebPPL, because our semantics allows recursive mixed-variance datatypes. Our semantics justifies important program equivalences including commutativity. Our new semantic model is based on `quasi-Borel predomains'. These are a mixture of chain-complete partial orders (cpos) and quasi-Borel spaces. Quasi-Borel spaces are a recent model of probability theory that focuses on sets of admissible random elements. Probability is traditionally treated in cpo models using probabilistic powerdomains, but these are not known to be commutative on any class of cpos with higher order functions. By contrast, quasi-Borel predomains do support both a commutative probabilistic powerdomain and higher-order functions. As we show, quasi-Borel predomains form both a model of Fiore's axiomatic domain theory and a model of Kock's synthetic measure theory.},
urldate = {2019-10-10},
journal = {arXiv:1811.04196 [cs]},
author = {Vákár, Matthijs and Kammar, Ohad and Staton, Sam},
month = nov,
year = {2018},
note = {arXiv: 1811.04196}
}
@article{jacobs_categorical_2018,
title = {Categorical {Aspects} of {Parameter} {Learning}},
url = {http://arxiv.org/abs/1810.05814},
abstract = {Parameter learning is the technique for obtaining the probabilistic parameters in conditional probability tables in Bayesian networks from tables with (observed) data --- where it is assumed that the underlying graphical structure is known. There are basically two ways of doing so, referred to as maximal likelihood estimation (MLE) and as Bayesian learning. This paper provides a categorical analysis of these two techniques and describes them in terms of basic properties of the multiset monad M, the distribution monad D and the Giry monad G. In essence, learning is about the reltionships between multisets (used for counting) on the one hand and probability distributions on the other. These relationsips will be described as suitable natural transformations.},
urldate = {2019-11-21},
journal = {arXiv:1810.05814 [cs]},
author = {Jacobs, Bart},
month = oct,
year = {2018},
note = {ZSCC: 0000001
arXiv: 1810.05814},
keywords = {Bayesianism, Categorical ML, Categorical probability theory, Machine learning}
}
@article{elliott_simple_2018,
title = {The simple essence of automatic differentiation},
url = {http://arxiv.org/abs/1804.00746},
abstract = {Automatic differentiation (AD) in reverse mode (RAD) is a central component of deep learning and other uses of large-scale optimization. Commonly used RAD algorithms such as backpropagation, however, are complex and stateful, hindering deep understanding, improvement, and parallel execution. This paper develops a simple, generalized AD algorithm calculated from a simple, natural specification. The general algorithm is then specialized by varying the representation of derivatives. In particular, applying well-known constructions to a naive representation yields two RAD algorithms that are far simpler than previously known. In contrast to commonly used RAD implementations, the algorithms defined here involve no graphs, tapes, variables, partial derivatives, or mutation. They are inherently parallel-friendly, correct by construction, and usable directly from an existing programming language with no need for new data types or programming style, thanks to use of an AD-agnostic compiler plugin.},
urldate = {2019-11-22},
journal = {arXiv:1804.00746 [cs]},
author = {Elliott, Conal},
month = oct,
year = {2018},
note = {ZSCC: 0000019
arXiv: 1804.00746},
keywords = {Automatic differentiation, Differentiation, Implementation}
}
@article{tuyeras_category_2018,
title = {Category theory for genetics {II}: genotype, phenotype and haplotype},
shorttitle = {Category theory for genetics {II}},
url = {http://arxiv.org/abs/1805.07004},
abstract = {In this paper, we use the language of pedigrads, introduced in previous work, to formalize the relationship between genotypes, phenotypes and haplotypes. We show how this formalism can help us localize the variations in the genotype that cause a given phenotype. We then use the concept of haplotype to formalize the process of predicting a certain phenotype for a given set of genotypes.},
urldate = {2019-11-28},
journal = {arXiv:1805.07004 [math]},
author = {Tuyéras, Rémy},
month = aug,
year = {2018},
note = {ZSCC: 0000001
arXiv: 1805.07004},
keywords = {Biology}
}
@article{scibior_functional_2018,
title = {Functional programming for modular {Bayesian} inference},
volume = {2},
issn = {24751421},
url = {http://dl.acm.org/citation.cfm?doid=3243631.3236778},
doi = {10/gft39x},
language = {en},
number = {ICFP},
urldate = {2019-11-27},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ścibior, Adam and Kammar, Ohad and Ghahramani, Zoubin},
month = jul,
year = {2018},
note = {ZSCC: 0000005},
keywords = {Bayesian inference, Implementation, Probabilistic programming},
pages = {1--29}
}
@misc{murfet_dmurfet/deeplinearlogic_2018,
title = {dmurfet/deeplinearlogic},
url = {https://github.com/dmurfet/deeplinearlogic},
abstract = {Deep learning and linear logic. Contribute to dmurfet/deeplinearlogic development by creating an account on GitHub.},
urldate = {2019-11-22},
author = {Murfet, Daniel},
month = jul,
year = {2018},
note = {ZSCC: NoCitationData[s0]
original-date: 2016-11-05T09:17:10Z},
keywords = {Categorical ML, Implementation, Linear logic, Machine learning, Semantics}
}
@article{boutillier_kappa_2018,
title = {The {Kappa} platform for rule-based modeling},
volume = {34},
issn = {1367-4803},
url = {https://academic.oup.com/bioinformatics/article/34/13/i583/5045802},
doi = {10/gdrhw6},
abstract = {AbstractMotivation. We present an overview of the Kappa platform, an integrated suite of analysis and visualization techniques for building and interactively e},
language = {en},
number = {13},
urldate = {2019-11-23},
journal = {Bioinformatics},
author = {Boutillier, Pierre and Maasha, Mutaamba and Li, Xing and Medina-Abarca, Héctor F. and Krivine, Jean and Feret, Jérôme and Cristescu, Ioana and Forbes, Angus G. and Fontana, Walter},
month = jul,
year = {2018},
note = {ZSCC: 0000017},
keywords = {Biology, Implementation, Rewriting theory, Systems biology},
pages = {i583--i592}
}
@article{fages_influence_2018,
title = {Influence {Networks} {Compared} with {Reaction} {Networks}: {Semantics}, {Expressivity} and {Attractors}},
volume = {15},
issn = {1545-5963},
shorttitle = {Influence {Networks} {Compared} with {Reaction} {Networks}},
url = {https://doi.org/10.1109/TCBB.2018.2805686},
doi = {10/ggdf94},
abstract = {Biochemical reaction networks are one of the most widely used formalisms in systems biology to describe the molecular mechanisms of high-level cell processes. However, modellers also reason with influence diagrams to represent the positive and negative influences between molecular species and may find an influence network useful in the process of building a reaction network. In this paper, we introduce a formalism of influence networks with forces, and equip it with a hierarchy of Boolean, Petri net, stochastic and differential semantics, similarly to reaction networks with rates. We show that the expressive power of influence networks is the same as that of reaction networks under the differential semantics, but weaker under the discrete semantics. Furthermore, the hierarchy of semantics leads us to consider a positive Boolean semantics that cannot test the absence of a species, that we compare with the negative Boolean semantics with test for absence of a species in gene regulatory networks à la Thomas. We study the monotonicity properties of the positive semantics and derive from them an algorithm to compute attractors in both the positive and negative Boolean semantics. We illustrate our results on models of the literature about the p53/Mdm2 DNA damage repair system, the circadian clock, and the influence of MAPK signaling on cell-fate decision in urinary bladder cancer.},
number = {4},
urldate = {2019-11-23},
journal = {IEEE/ACM Trans. Comput. Biol. Bioinformatics},
author = {Fages, Francois and Martinez, Thierry and Rosenblueth, David A. and Soliman, Sylvain},
month = jul,
year = {2018},
note = {ZSCC: 0000002},
keywords = {Biology, Rewriting theory, Symbolic logic, Systems biology},
pages = {1138--1151}
}
@article{battaglia_relational_2018,
title = {Relational inductive biases, deep learning, and graph networks},
url = {http://arxiv.org/abs/1806.01261},
abstract = {Artificial intelligence (AI) has undergone a renaissance recently, making major progress in key domains such as vision, language, control, and decision-making. This has been due, in part, to cheap data and cheap compute resources, which have fit the natural strengths of deep learning. However, many defining characteristics of human intelligence, which developed under much different pressures, remain out of reach for current approaches. In particular, generalizing beyond one's experiences--a hallmark of human intelligence from infancy--remains a formidable challenge for modern AI. The following is part position paper, part review, and part unification. We argue that combinatorial generalization must be a top priority for AI to achieve human-like abilities, and that structured representations and computations are key to realizing this objective. Just as biology uses nature and nurture cooperatively, we reject the false choice between "hand-engineering" and "end-to-end" learning, and instead advocate for an approach which benefits from their complementary strengths. We explore how using relational inductive biases within deep learning architectures can facilitate learning about entities, relations, and rules for composing them. We present a new building block for the AI toolkit with a strong relational inductive bias--the graph network--which generalizes and extends various approaches for neural networks that operate on graphs, and provides a straightforward interface for manipulating structured knowledge and producing structured behaviors. We discuss how graph networks can support relational reasoning and combinatorial generalization, laying the foundation for more sophisticated, interpretable, and flexible patterns of reasoning. As a companion to this paper, we have released an open-source software library for building graph networks, with demonstrations of how to use them in practice.},
urldate = {2019-10-10},
journal = {arXiv:1806.01261 [cs, stat]},
author = {Battaglia, Peter W. and Hamrick, Jessica B. and Bapst, Victor and Sanchez-Gonzalez, Alvaro and Zambaldi, Vinicius and Malinowski, Mateusz and Tacchetti, Andrea and Raposo, David and Santoro, Adam and Faulkner, Ryan and Gulcehre, Caglar and Song, Francis and Ballard, Andrew and Gilmer, Justin and Dahl, George and Vaswani, Ashish and Allen, Kelsey and Nash, Charles and Langston, Victoria and Dyer, Chris and Heess, Nicolas and Wierstra, Daan and Kohli, Pushmeet and Botvinick, Matt and Vinyals, Oriol and Li, Yujia and Pascanu, Razvan},
month = jun,
year = {2018},
note = {arXiv: 1806.01261}
}
@article{tuyeras_category_2018,
title = {Category {Theory} for {Genetics}},
url = {http://arxiv.org/abs/1708.05255},
abstract = {We introduce a categorical language in which it is possible to talk about DNA sequencing, alignment methods, CRISPR, homologous recombination, haplotypes, and genetic linkage. This language takes the form of a class of limit-sketches whose categories of models can model different concepts of Biology depending on what their categories of values are. We discuss examples of models in the category of sets and in the category of modules over the Boolean semi-ring \${\textbackslash}\{0,1{\textbackslash}\}\$. We identify a subclass of models in sets that models the genetic material of living beings and another subclass of models in modules that models haplotypes. We show how the two classes are related via a universal property.},
urldate = {2019-11-28},
journal = {arXiv:1708.05255 [math]},
author = {Tuyéras, Rémy},
month = may,
year = {2018},
note = {ZSCC: 0000001
arXiv: 1708.05255},
keywords = {Biology}
}
@article{brown_adversarial_2018,
title = {Adversarial {Patch}},
url = {http://arxiv.org/abs/1712.09665},
abstract = {We present a method to create universal, robust, targeted adversarial image patches in the real world. The patches are universal because they can be used to attack any scene, robust because they work under a wide variety of transformations, and targeted because they can cause a classifier to output any target class. These adversarial patches can be printed, added to any scene, photographed, and presented to image classifiers; even when the patches are small, they cause the classifiers to ignore the other items in the scene and report a chosen target class. To reproduce the results from the paper, our code is available at https://github.com/tensorflow/cleverhans/tree/master/examples/adversarial\_patch},
urldate = {2019-11-23},
journal = {arXiv:1712.09665 [cs]},
author = {Brown, Tom B. and Mané, Dandelion and Roy, Aurko and Abadi, Martín and Gilmer, Justin},
month = may,
year = {2018},
note = {ZSCC: NoCitationData[s0]
arXiv: 1712.09665},
keywords = {Adversarial attacks, Classical ML, Machine learning}
}
@misc{murfet_dmurfet/polysemantics_2018,
title = {dmurfet/polysemantics},
url = {https://github.com/dmurfet/polysemantics},
abstract = {Polynomial semantics of linear logic. Contribute to dmurfet/polysemantics development by creating an account on GitHub.},
urldate = {2019-11-22},
author = {Murfet, Daniel},
month = apr,
year = {2018},
note = {ZSCC: NoCitationData[s0]
original-date: 2016-02-23T03:29:42Z},
keywords = {Categorical ML, Implementation, Linear logic, Machine learning, Semantics}
}
@article{jacobs_logical_2018,
title = {The {Logical} {Essentials} of {Bayesian} {Reasoning}},
url = {http://arxiv.org/abs/1804.01193},
abstract = {This chapter offers an accessible introduction to the channel-based approach to Bayesian probability theory. This framework rests on algebraic and logical foundations, inspired by the methodologies of programming language semantics. It offers a uniform, structured and expressive language for describing Bayesian phenomena in terms of familiar programming concepts, like channel, predicate transformation and state transformation. The introduction also covers inference in Bayesian networks, which will be modelled by a suitable calculus of string diagrams.},
urldate = {2019-11-21},
journal = {arXiv:1804.01193 [cs]},
author = {Jacobs, Bart and Zanasi, Fabio},
month = apr,
year = {2018},
note = {ZSCC: 0000016
arXiv: 1804.01193},
keywords = {Bayesianism, Categorical probability theory}
}
@article{eykholt_robust_2018,
title = {Robust {Physical}-{World} {Attacks} on {Deep} {Learning} {Models}},
url = {http://arxiv.org/abs/1707.08945},
abstract = {Recent studies show that the state-of-the-art deep neural networks (DNNs) are vulnerable to adversarial examples, resulting from small-magnitude perturbations added to the input. Given that that emerging physical systems are using DNNs in safety-critical situations, adversarial examples could mislead these systems and cause dangerous situations.Therefore, understanding adversarial examples in the physical world is an important step towards developing resilient learning algorithms. We propose a general attack algorithm,Robust Physical Perturbations (RP2), to generate robust visual adversarial perturbations under different physical conditions. Using the real-world case of road sign classification, we show that adversarial examples generated using RP2 achieve high targeted misclassification rates against standard-architecture road sign classifiers in the physical world under various environmental conditions, including viewpoints. Due to the current lack of a standardized testing method, we propose a two-stage evaluation methodology for robust physical adversarial examples consisting of lab and field tests. Using this methodology, we evaluate the efficacy of physical adversarial manipulations on real objects. Witha perturbation in the form of only black and white stickers,we attack a real stop sign, causing targeted misclassification in 100\% of the images obtained in lab settings, and in 84.8\%of the captured video frames obtained on a moving vehicle(field test) for the target classifier.},
urldate = {2019-11-23},
journal = {arXiv:1707.08945 [cs]},
author = {Eykholt, Kevin and Evtimov, Ivan and Fernandes, Earlence and Li, Bo and Rahmati, Amir and Xiao, Chaowei and Prakash, Atul and Kohno, Tadayoshi and Song, Dawn},
month = apr,
year = {2018},
note = {ZSCC: 0000005
arXiv: 1707.08945},
keywords = {Adversarial attacks, Classical ML, Machine learning}
}
@article{jacobs_neural_2018,
title = {Neural {Nets} via {Forward} {State} {Transformation} and {Backward} {Loss} {Transformation}},
url = {http://arxiv.org/abs/1803.09356},
abstract = {This article studies (multilayer perceptron) neural networks with an emphasis on the transformations involved --- both forward and backward --- in order to develop a semantical/logical perspective that is in line with standard program semantics. The common two-pass neural network training algorithms make this viewpoint particularly fitting. In the forward direction, neural networks act as state transformers. In the reverse direction, however, neural networks change losses of outputs to losses of inputs, thereby acting like a (real-valued) predicate transformer. In this way, backpropagation is functorial by construction, as shown earlier in recent other work. We illustrate this perspective by training a simple instance of a neural network.},
urldate = {2019-11-21},
journal = {arXiv:1803.09356 [cs]},
author = {Jacobs, Bart and Sprunger, David},
month = mar,
year = {2018},
note = {ZSCC: 0000001
arXiv: 1803.09356},
keywords = {Categorical ML, Effectus theory, Machine learning}
}
@article{martin-maroto_algebraic_2018,
title = {Algebraic {Machine} {Learning}},
url = {http://arxiv.org/abs/1803.05252},
abstract = {Machine learning algorithms use error function minimization to fit a large set of parameters in a preexisting model. However, error minimization eventually leads to a memorization of the training dataset, losing the ability to generalize to other datasets. To achieve generalization something else is needed, for example a regularization method or stopping the training when error in a validation dataset is minimal. Here we propose a different approach to learning and generalization that is parameter-free, fully discrete and that does not use function minimization. We use the training data to find an algebraic representation with minimal size and maximal freedom, explicitly expressed as a product of irreducible components. This algebraic representation is shown to directly generalize, giving high accuracy in test data, more so the smaller the representation. We prove that the number of generalizing representations can be very large and the algebra only needs to find one. We also derive and test a relationship between compression and error rate. We give results for a simple problem solved step by step, hand-written character recognition, and the Queens Completion problem as an example of unsupervised learning. As an alternative to statistical learning, algebraic learning may offer advantages in combining bottom-up and top-down information, formal concept derivation from data and large-scale parallelization.},
urldate = {2019-10-10},
journal = {arXiv:1803.05252 [cs, math]},
author = {Martin-Maroto, Fernando and de Polavieja, Gonzalo G.},
month = mar,
year = {2018},
note = {arXiv: 1803.05252}
}
@article{baydin_automatic_2018,
title = {Automatic differentiation in machine learning: a survey},
shorttitle = {Automatic differentiation in machine learning},
url = {http://arxiv.org/abs/1502.05767},
abstract = {Derivatives, mostly in the form of gradients and Hessians, are ubiquitous in machine learning. Automatic differentiation (AD), also called algorithmic differentiation or simply "autodiff", is a family of techniques similar to but more general than backpropagation for efficiently and accurately evaluating derivatives of numeric functions expressed as computer programs. AD is a small but established field with applications in areas including computational fluid dynamics, atmospheric sciences, and engineering design optimization. Until very recently, the fields of machine learning and AD have largely been unaware of each other and, in some cases, have independently discovered each other's results. Despite its relevance, general-purpose AD has been missing from the machine learning toolbox, a situation slowly changing with its ongoing adoption under the names "dynamic computational graphs" and "differentiable programming". We survey the intersection of AD and machine learning, cover applications where AD has direct relevance, and address the main implementation techniques. By precisely defining the main differentiation techniques and their interrelationships, we aim to bring clarity to the usage of the terms "autodiff", "automatic differentiation", and "symbolic differentiation" as these are encountered more and more in machine learning settings.},
urldate = {2019-11-22},
journal = {arXiv:1502.05767 [cs, stat]},
author = {Baydin, Atilim Gunes and Pearlmutter, Barak A. and Radul, Alexey Andreyevich and Siskind, Jeffrey Mark},
month = feb,
year = {2018},
note = {ZSCC: 0000318
arXiv: 1502.05767},
keywords = {Automatic differentiation, Classical ML, Differentiation, Machine learning}
}
@misc{kammar_diffeological_2018,
title = {Diffeological {Spaces} and {Denotational} {Semantics} for {Differential} {Programming}},
language = {en},
author = {Kammar, Ohad and Staton, Sam and Vákár, Matthijs},
year = {2018},
note = {ZSCC: NoCitationData[s1]},
keywords = {Automatic differentiation, Differentiation, Programming language theory}
}
@article{jacobs_probability_2018,
title = {From probability monads to commutative effectuses},
volume = {94},
issn = {23522208},
url = {https://linkinghub.elsevier.com/retrieve/pii/S2352220816301122},
doi = {10/gct2wr},
abstract = {Eﬀectuses have recently been introduced as categorical models for quantum computation, with probabilistic and Boolean (classical) computation as special cases. These ‘probabilistic’ models are called commutative eﬀectuses, and are the focus of attention here. The paper describes the main known ‘probability’ monads: the monad of discrete probability measures, the Giry monad, the expectation monad, the probabilistic power domain monad, the Radon monad, and the Kantorovich monad. It also introduces successive properties that a monad should satisfy so that its Kleisli category is a commutative eﬀectus. The main properties are: partial additivity, strong aﬃneness, and commutativity. It is shown that the resulting commutative eﬀectus provides a categorical model of probability theory, including a logic using eﬀect modules with parallel and sequential conjunction, predicate- and state-transformers, normalisation and conditioning of states.},
language = {en},
urldate = {2019-11-28},
journal = {Journal of Logical and Algebraic Methods in Programming},
author = {Jacobs, Bart},
month = jan,
year = {2018},
note = {ZSCC: 0000028},
keywords = {Categorical probability theory, Effectus theory},
pages = {200--237}
}
@article{ehrhard_probabilistic_2018,
title = {Probabilistic call by push value},
url = {http://arxiv.org/abs/1607.04690},
doi = {10/ggdk8z},
abstract = {We introduce a probabilistic extension of Levy's Call-By-Push-Value. This extension consists simply in adding a " flipping coin " boolean closed atomic expression. This language can be understood as a major generalization of Scott's PCF encompassing both call-by-name and call-by-value and featuring recursive (possibly lazy) data types. We interpret the language in the previously introduced denotational model of probabilistic coherence spaces, a categorical model of full classical Linear Logic, interpreting data types as coalgebras for the resource comonad. We prove adequacy and full abstraction, generalizing earlier results to a much more realistic and powerful programming language.},
urldate = {2019-11-27},
journal = {arXiv:1607.04690 [cs]},
author = {Ehrhard, Thomas and Tasson, Christine},
year = {2018},
note = {ZSCC: 0000013[s0]
arXiv: 1607.04690},
keywords = {Denotational semantics, Linear logic, Probabilistic programming, Programming language theory}
}
@inproceedings{ehresmann_applications_2018,
title = {Applications of {Categories} to {Biology} and {Cognition}},
doi = {10/ggdf93},
author = {Ehresmann, Andrée C.},
year = {2018},
note = {ZSCC: NoCitationData[s0]},
keywords = {Biology, Emergence, Neuroscience}
}
@inproceedings{castellan_concurrent_2018,
address = {Oxford, United Kingdom},
title = {The concurrent game semantics of {Probabilistic} {PCF}},
isbn = {978-1-4503-5583-4},
url = {http://dl.acm.org/citation.cfm?doid=3209108.3209187},
doi = {10/ggdjfz},
abstract = {We define a new games model of Probabilistic PCF (PPCF) by enriching thin concurrent games with symmetry, recently introduced by Castellan et al, with probability. This model supports two interpretations of PPCF, one sequential and one parallel. We make the case for this model by exploiting the causal structure of probabilistic concurrent strategies. First, we show that the strategies obtained from PPCF programs have a deadlock-free interaction, and therefore deduce that there is an interpretation-preserving functor from our games to the probabilistic relational model recently proved fully abstract by Ehrhard et al. It follows that our model is intensionally fully abstract. Finally, we propose a definition of probabilistic innocence and prove a finite definability result, leading to a second (independent) proof of full abstraction.},
language = {en},
urldate = {2019-11-26},
booktitle = {Proceedings of the 33rd {Annual} {ACM}/{IEEE} {Symposium} on {Logic} in {Computer} {Science} - {LICS} '18},
publisher = {ACM Press},
author = {Castellan, Simon and Clairambault, Pierre and Paquet, Hugo and Winskel, Glynn},
year = {2018},
note = {ZSCC: 0000018},
keywords = {Denotational semantics, Game semantics, Interactive semantics, Probabilistic programming, Programming language theory},
pages = {215--224}
}
@article{scibior_denotational_2017,
title = {Denotational validation of higher-order {Bayesian} inference},
volume = {2},
issn = {24751421},
url = {http://arxiv.org/abs/1711.03219},
doi = {10.1145/3158148},
abstract = {We present a modular semantic account of Bayesian inference algorithms for probabilistic programming languages, as used in data science and machine learning. Sophisticated inference algorithms are often explained in terms of composition of smaller parts. However, neither their theoretical justification nor their implementation reflects this modularity. We show how to conceptualise and analyse such inference algorithms as manipulating intermediate representations of probabilistic programs using higher-order functions and inductive types, and their denotational semantics. Semantic accounts of continuous distributions use measurable spaces. However, our use of higher-order functions presents a substantial technical difficulty: it is impossible to define a measurable space structure over the collection of measurable functions between arbitrary measurable spaces that is compatible with standard operations on those functions, such as function application. We overcome this difficulty using quasi-Borel spaces, a recently proposed mathematical structure that supports both function spaces and continuous distributions. We define a class of semantic structures for representing probabilistic programs, and semantic validity criteria for transformations of these representations in terms of distribution preservation. We develop a collection of building blocks for composing representations. We use these building blocks to validate common inference algorithms such as Sequential Monte Carlo and Markov Chain Monte Carlo. To emphasize the connection between the semantic manipulation and its traditional measure theoretic origins, we use Kock's synthetic measure theory. We demonstrate its usefulness by proving a quasi-Borel counterpart to the Metropolis-Hastings-Green theorem.},
number = {POPL},
urldate = {2019-10-10},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ścibior, Adam and Kammar, Ohad and Vákár, Matthijs and Staton, Sam and Yang, Hongseok and Cai, Yufei and Ostermann, Klaus and Moss, Sean K. and Heunen, Chris and Ghahramani, Zoubin},
month = dec,
year = {2017},
note = {arXiv: 1711.03219},
pages = {1--29}
}
@article{ehrhard_measurable_2017,
title = {Measurable {Cones} and {Stable}, {Measurable} {Functions}},
volume = {2},
issn = {24751421},
url = {http://arxiv.org/abs/1711.09640},
doi = {10/ggdjf8},
abstract = {We define a notion of stable and measurable map between cones endowed with measurability tests and show that it forms a cpo-enriched cartesian closed category. This category gives a denotational model of an extension of PCF supporting the main primitives of probabilistic functional programming, like continuous and discrete probabilistic distributions, sampling, conditioning and full recursion. We prove the soundness and adequacy of this model with respect to a call-by-name operational semantics and give some examples of its denotations.},
number = {POPL},
urldate = {2019-11-26},
journal = {Proceedings of the ACM on Programming Languages},
author = {Ehrhard, Thomas and Pagani, Michele and Tasson, Christine},
month = dec,
year = {2017},
note = {ZSCC: 0000021
arXiv: 1711.09640},
keywords = {Denotational semantics, Probabilistic programming, Programming language theory},
pages = {1--28}
}
@article{jacobs_quantum_2017,
title = {Quantum effect logic in cognition},
volume = {81},
issn = {0022-2496},
url = {http://www.sciencedirect.com/science/article/pii/S0022249617300378},
doi = {10/gcnkcj},
abstract = {This paper illustrates applications of a new, modern version of quantum logic in quantum cognition. The new logic uses ‘effects’ as predicates, instead of the more restricted interpretation of predicates as projections — which is used so far in this area. Effect logic involves states and predicates, validity and conditioning, and also state and predicate transformation via channels. The main aim of this paper is to demonstrate the usefulness of this effect logic in quantum cognition, via many high-level reformulations of standard examples. The usefulness of the logic is greatly increased by its implementation in the programming language Python.},
language = {en},
urldate = {2019-11-22},
journal = {Journal of Mathematical Psychology},
author = {Jacobs, Bart},
month = dec,
year = {2017},
note = {ZSCC: 0000002},
keywords = {Categorical probability theory, Effectus theory, Psychology},
pages = {1--10}
}
@inproceedings{mascari_symetries_2017,
title = {Symetries and asymetries of the immune system response: {A} categorification approach},
shorttitle = {Symetries and asymetries of the immune system response},
doi = {10/ggdnd3},
abstract = {A new modeling approach and conceptual framework to the immune system response and its dual role with respect to cancer is proposed based on Applied Category Theory. States of cells and pathogenes are structured as mathematical structures (categories), the interactions, at a given phase, between cells of the immune system and pathogenes, correspond to a pair of adjunctions (adjoint functors), the interaction process consisting of the sequential composition of an identification phase, a preparation phase and an activation phase is modeled by the composition of maps of adjunctions: the approach is illustrated by considering the Cancer-Immunity Cycle. A third dimension is needed to model Cancer Immunoediting. The categorical foundations of our approach is based on Marco Grandis and Rober Paré theory of Intercategories.},
booktitle = {2017 {IEEE} {International} {Conference} on {Bioinformatics} and {Biomedicine} ({BIBM})},
author = {Mascari, Jean-François and Giacchero, Damien and Sfakianakis, Nikolaos},
month = nov,
year = {2017},
note = {ZSCC: 0000000
ISSN: null},
keywords = {Biology, Sketchy},
pages = {1451--1454}
}
@article{hess_topological_2017,
title = {Topological analysis of the connectome of digital reconstructions of neural microcircuits},
volume = {11},
issn = {1662-5188},
url = {http://arxiv.org/abs/1601.01580},
doi = {10/gdjbfn},
abstract = {A recent publication provides the network graph for a neocortical microcircuit comprising 8 million connections between 31,000 neurons (H. Markram, et al., Reconstruction and simulation of neocortical microcircuitry, Cell, 163 (2015) no. 2, 456-492). Since traditional graph-theoretical methods may not be sufficient to understand the immense complexity of such a biological network, we explored whether methods from algebraic topology could provide a new perspective on its structural and functional organization. Structural topological analysis revealed that directed graphs representing connectivity among neurons in the microcircuit deviated significantly from different varieties of randomized graph. In particular, the directed graphs contained in the order of \$10{\textasciicircum}7\$ simplices \{{\textbackslash}DH\} groups of neurons with all-to-all directed connectivity. Some of these simplices contained up to 8 neurons, making them the most extreme neuronal clustering motif ever reported. Functional topological analysis of simulated neuronal activity in the microcircuit revealed novel spatio-temporal metrics that provide an effective classification of functional responses to qualitatively different stimuli. This study represents the first algebraic topological analysis of structural connectomics and connectomics-based spatio-temporal activity in a biologically realistic neural microcircuit. The methods used in the study show promise for more general applications in network science.},
urldate = {2019-11-22},
journal = {Frontiers in Computational Neuroscience},
author = {Hess, Kathryn and Dotko, Pawe and Levi, Ran and Nolte, Max and Reimann, Michael and Scolamiero, Martina and Turner, Katharine and Muller, Eilif and Markram, Henry},
month = jun,
year = {2017},
note = {ZSCC: NoCitationData[s0]
arXiv: 1601.01580},
keywords = {Neuroscience, Topological data analysis},
pages = {48}
}
@article{tran_deep_2017,
title = {Deep {Probabilistic} {Programming}},
url = {http://arxiv.org/abs/1701.03757},
abstract = {We propose Edward, a Turing-complete probabilistic programming language. Edward defines two compositional representations---random variables and inference. By treating inference as a first class citizen, on a par with modeling, we show that probabilistic programming can be as flexible and computationally efficient as traditional deep learning. For flexibility, Edward makes it easy to fit the same model using a variety of composable inference methods, ranging from point estimation to variational inference to MCMC. In addition, Edward can reuse the modeling representation as part of inference, facilitating the design of rich variational models and generative adversarial networks. For efficiency, Edward is integrated into TensorFlow, providing significant speedups over existing probabilistic systems. For example, we show on a benchmark logistic regression task that Edward is at least 35x faster than Stan and 6x faster than PyMC3. Further, Edward incurs no runtime overhead: it is as fast as handwritten TensorFlow.},
urldate = {2019-11-27},
journal = {arXiv:1701.03757 [cs, stat]},
author = {Tran, Dustin and Hoffman, Matthew D. and Saurous, Rif A. and Brevdo, Eugene and Murphy, Kevin and Blei, David M.},
month = mar,
year = {2017},
note = {ZSCC: 0000108
arXiv: 1701.03757},
keywords = {Bayesian inference, Implementation, Machine learning, Probabilistic programming}
}
@article{zhang_understanding_2017,
title = {Understanding deep learning requires rethinking generalization},
url = {http://arxiv.org/abs/1611.03530},
abstract = {Despite their massive size, successful deep artificial neural networks can exhibit a remarkably small difference between training and test performance. Conventional wisdom attributes small generalization error either to properties of the model family, or to the regularization techniques used during training. Through extensive systematic experiments, we show how these traditional approaches fail to explain why large neural networks generalize well in practice. Specifically, our experiments establish that state-of-the-art convolutional networks for image classification trained with stochastic gradient methods easily fit a random labeling of the training data. This phenomenon is qualitatively unaffected by explicit regularization, and occurs even if we replace the true images by completely unstructured random noise. We corroborate these experimental findings with a theoretical construction showing that simple depth two neural networks already have perfect finite sample expressivity as soon as the number of parameters exceeds the number of data points as it usually does in practice. We interpret our experimental findings by comparison with traditional models.},
urldate = {2019-11-22},
journal = {arXiv:1611.03530 [cs]},
author = {Zhang, Chiyuan and Bengio, Samy and Hardt, Moritz and Recht, Benjamin and Vinyals, Oriol},
month = feb,
year = {2017},
note = {ZSCC: NoCitationData[s0]
arXiv: 1611.03530},
keywords = {Classical ML, Machine learning}
}
@article{kurakin_adversarial_2017,
title = {Adversarial examples in the physical world},
url = {http://arxiv.org/abs/1607.02533},
abstract = {Most existing machine learning classifiers are highly vulnerable to adversarial examples. An adversarial example is a sample of input data which has been modified very slightly in a way that is intended to cause a machine learning classifier to misclassify it. In many cases, these modifications can be so subtle that a human observer does not even notice the modification at all, yet the classifier still makes a mistake. Adversarial examples pose security concerns because they could be used to perform an attack on machine learning systems, even if the adversary has no access to the underlying model. Up to now, all previous work have assumed a threat model in which the adversary can feed data directly into the machine learning classifier. This is not always the case for systems operating in the physical world, for example those which are using signals from cameras and other sensors as an input. This paper shows that even in such physical world scenarios, machine learning systems are vulnerable to adversarial examples. We demonstrate this by feeding adversarial images obtained from cell-phone camera to an ImageNet Inception classifier and measuring the classification accuracy of the system. We find that a large fraction of adversarial examples are classified incorrectly even when perceived through the camera.},
urldate = {2019-11-23},
journal = {arXiv:1607.02533 [cs, stat]},
author = {Kurakin, Alexey and Goodfellow, Ian and Bengio, Samy},
month = feb,
year = {2017},
note = {ZSCC: NoCitationData[s0]
arXiv: 1607.02533},
keywords = {Adversarial attacks, Classical ML, Machine learning}
}
@article{borgstrom_lambda-calculus_2017,
title = {A {Lambda}-{Calculus} {Foundation} for {Universal} {Probabilistic} {Programming}},
url = {http://arxiv.org/abs/1512.08990},
abstract = {We develop the operational semantics of an untyped probabilistic lambda-calculus with continuous distributions, as a foundation for universal probabilistic programming languages such as Church, Anglican, and Venture. Our first contribution is to adapt the classic operational semantics of lambda-calculus to a continuous setting via creating a measure space on terms and defining step-indexed approximations. We prove equivalence of big-step and small-step formulations of this distribution-based semantics. To move closer to inference techniques, we also define the sampling-based semantics of a term as a function from a trace of random samples to a value. We show that the distribution induced by integrating over all traces equals the distribution-based semantics. Our second contribution is to formalize the implementation technique of trace Markov chain Monte Carlo (MCMC) for our calculus and to show its correctness. A key step is defining sufficient conditions for the distribution induced by trace MCMC to converge to the distribution-based semantics. To the best of our knowledge, this is the first rigorous correctness proof for trace MCMC for a higher-order functional language.},
urldate = {2019-11-26},
journal = {arXiv:1512.08990 [cs]},
author = {Borgström, Johannes and Lago, Ugo Dal and Gordon, Andrew D. and Szymczak, Marcin},
month = jan,
year = {2017},
note = {ZSCC: NoCitationData[s1]
arXiv: 1512.08990},
keywords = {Probabilistic programming, Programming language theory}
}
@article{heunen_convenient_2017,
title = {A {Convenient} {Category} for {Higher}-{Order} {Probability} {Theory}},
url = {http://arxiv.org/abs/1701.02547},
abstract = {Higher-order probabilistic programming languages allow programmers to write sophisticated models in machine learning and statistics in a succinct and structured way, but step outside the standard measure-theoretic formalization of probability theory. Programs may use both higher-order functions and continuous distributions, or even define a probability distribution on functions. But standard probability theory does not handle higher-order functions well: the category of measurable spaces is not cartesian closed. Here we introduce quasi-Borel spaces. We show that these spaces: form a new formalization of probability theory replacing measurable spaces; form a cartesian closed category and so support higher-order functions; form a well-pointed category and so support good proof principles for equational reasoning; and support continuous probability distributions. We demonstrate the use of quasi-Borel spaces for higher-order functions and probability by: showing that a well-known construction of probability theory involving random functions gains a cleaner expression; and generalizing de Finetti's theorem, that is a crucial theorem in probability theory, to quasi-Borel spaces.},
urldate = {2019-10-10},
journal = {arXiv:1701.02547 [cs, math]},
author = {Heunen, Chris and Kammar, Ohad and Staton, Sam and Yang, Hongseok},
month = jan,
year = {2017},
note = {arXiv: 1701.02547}
}
@unpublished{clerc_pointless_2017,
title = {Pointless learning (long version)},
url = {https://hal.archives-ouvertes.fr/hal-01429663},
abstract = {Bayesian inversion is at the heart of probabilistic programming and more generally machine learning. Understanding inversion is made difficult by the pointful (kernel-centric) point of view usually taken in the literature. We develop a pointless (kernel-free) approach to inversion. While doing so, we revisit some foundational objects of probability theory, unravel their category-theoretical underpinnings and show how pointless Bayesian inversion sits naturally at the centre of this construction .},
urldate = {2019-11-24},
author = {Clerc, Florence and Danos, Vincent and Dahlqvist, Fredrik and Garnier, Ilias},
month = jan,
year = {2017},
note = {ZSCC: 0000000},
keywords = {Bayesianism, Categorical probability theory, Purely theoretical}
}
@incollection{yang_commutative_2017,
address = {Berlin, Heidelberg},
title = {Commutative {Semantics} for {Probabilistic} {Programming}},
volume = {10201},
isbn = {978-3-662-54433-4 978-3-662-54434-1},
url = {http://link.springer.com/10.1007/978-3-662-54434-1_32},
abstract = {We show that a measure-based denotational semantics for probabilistic programming is commutative. The idea underlying probabilistic programming languages (Anglican, Church, Hakaru, ...) is that programs express statistical models as a combination of prior distributions and likelihood of observations. The product of prior and likelihood is an unnormalized posterior distribution, and the inference problem is to ﬁnd the normalizing constant. One common semantic perspective is thus that a probabilistic program is understood as an unnormalized posterior measure, in the sense of measure theory, and the normalizing constant is the measure of the entire semantic domain.},
language = {en},
urldate = {2019-11-23},
booktitle = {Programming {Languages} and {Systems}},
publisher = {Springer Berlin Heidelberg},
author = {Staton, Sam},
editor = {Yang, Hongseok},
year = {2017},
doi = {10.1007/978-3-662-54434-1_32},
note = {ZSCC: NoCitationData[s0] },
keywords = {Bayesianism, Probabilistic programming, Programming language theory, Semantics},
pages = {855--879}
}
@article{keimel_mixed_2017,
title = {Mixed powerdomains for probability and nondeterminism},
url = {http://arxiv.org/abs/1612.01005},
doi = {10/ggdmrp},
abstract = {We consider mixed powerdomains combining ordinary nondeterminism and probabilistic nondeterminism. We characterise them as free algebras for suitable (in)equation-al theories; we establish functional representation theorems; and we show equivalencies between state transformers and appropriately healthy predicate transformers. The extended nonnegative reals serve as `truth-values'. As usual with powerdomains, everything comes in three flavours: lower, upper, and order-convex. The powerdomains are suitable convex sets of subprobability valuations, corresponding to resolving nondeterministic choice before probabilistic choice. Algebraically this corresponds to the probabilistic choice operator distributing over the nondeterministic choice operator. (An alternative approach to combining the two forms of nondeterminism would be to resolve probabilistic choice first, arriving at a domain-theoretic version of random sets. However, as we also show, the algebraic approach then runs into difficulties.) Rather than working directly with valuations, we take a domain-theoretic functional-analytic approach, employing domain-theoretic abstract convex sets called Kegelspitzen; these are equivalent to the abstract probabilistic algebras of Graham and Jones, but are more convenient to work with. So we define power Kegelspitzen, and consider free algebras, functional representations, and predicate transformers. To do so we make use of previous work on domain-theoretic cones (d-cones), with the bridge between the two of them being provided by a free d-cone construction on Kegelspitzen.},
urldate = {2019-11-28},
journal = {arXiv:1612.01005 [cs]},
author = {Keimel, Klaus and Plotkin, Gordon D.},
year = {2017},
note = {ZSCC: 0000001
arXiv: 1612.01005},
keywords = {Denotational semantics, Powerdomains, Probabilistic programming, Programming language theory}
}
@article{jacobs_formal_2017,
title = {A {Formal} {Semantics} of {Influence} in {Bayesian} {Reasoning}},
url = {http://drops.dagstuhl.de/opus/volltexte/2017/8089/},
doi = {10/ggdgbc},
abstract = {This paper proposes a formal deﬁnition of inﬂuence in Bayesian reasoning, based on the notions of state (as probability distribution), predicate, validity and conditioning. Our approach highlights how conditioning a joint entwined/entangled state with a predicate on one of its components has ‘crossover’ inﬂuence on the other components. We use the total variation metric on probability distributions to quantitatively measure such inﬂuence. These insights are applied to give a rigorous explanation of the fundamental concept of d-separation in Bayesian networks.},
language = {en},
urldate = {2019-11-24},
journal = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik GmbH, Wadern/Saarbruecken, Germany},
author = {Jacobs, Bart and Zanasi, Fabio},
year = {2017},
note = {ZSCC: 0000012},
keywords = {Bayesianism, Categorical probability theory, Programming language theory, Semantics}
}
@article{hess_cliques_2017,
title = {Cliques of {Neurons} {Bound} into {Cavities} {Provide} a {Missing} {Link} between {Structure} and {Function}},
volume = {11},
issn = {1662-5188},
url = {https://www.frontiersin.org/articles/10.3389/fncom.2017.00048/full},
doi = {10/gdjbfn},
abstract = {The lack of a formal link between neural network structure and its emergent function has hampered our understanding of how the brain processes information. We have now come closer to describing such a link by taking the direction of synaptic transmission into account, constructing graphs of a network that reflect the direction of information flow, and analyzing these directed graphs using algebraic topology. Applying this approach to a local network of neurons in the neocortex revealed a remarkably intricate and previously unseen topology of synaptic connectivity. The synaptic network contains an abundance of cliques of neurons bound into cavities that guide the emergence of correlated activity. In response to stimuli, correlated activity binds synaptically connected neurons into functional cliques and cavities that evolve in a stereotypical sequence towards peak complexity. We propose that the brain processes stimuli by forming increasingly complex functional cliques and cavities.},
language = {English},
urldate = {2019-11-22},
journal = {Frontiers in Computational Neuroscience},
author = {Hess, Kathryn and Reimann, Michael W. and Nolte, Max and Scolamiero, Martina and Turner, Katharine and Perin, Rodrigo and Chindemi, Giuseppe and Dłotko, Paweł and Levi, Ran and Markram, Henry},
year = {2017},
note = {ZSCC: 0000130},
keywords = {Neuroscience, Topological data analysis}
}
@article{sizemore_cliques_2016,
title = {Cliques and {Cavities} in the {Human} {Connectome}},
url = {http://arxiv.org/abs/1608.03520},
abstract = {Encoding brain regions and their connections as a network of nodes and edges captures many of the possible paths along which information can be transmitted as humans process and perform complex behaviors. Because cognitive processes involve large and distributed networks of brain areas, examinations of multi-node routes within larger connection patterns can offer fundamental insights into the complexities of brain function. Here, we investigate both densely connected groups of nodes that could perform local computations as well as larger patterns of interactions that would allow for parallel processing. Finding such structures necessitates we move from considering pairwise interactions to capturing higher order relations, concepts naturally expressed in the language of algebraic topology. These tools can be used to study mesoscale structures arising from the arrangement of densely connected substructures called cliques in otherwise sparsely connected brain networks. We detect cliques (all-to-all connected sets of brain regions) in the average structural connectomes of 8 healthy adults and discover the presence of more large cliques than expected in null networks constructed via wiring minimization, providing architecture through which brain network can perform rapid, local processing. We then locate topological cavities of different dimensions, around which information may flow in either diverging or converging patterns. These cavities exist consistently across subjects, differ from those observed in null model networks, and link regions of early and late evolutionary origin in long loops, underscoring their unique role in controlling brain function. These results offer a first demonstration that techniques from algebraic topology offer a novel perspective on structural connectomics, highlighting loop-like paths as crucial features in the human brain's structural architecture.},
urldate = {2019-11-22},
journal = {arXiv:1608.03520 [math, q-bio]},
author = {Sizemore, Ann and Giusti, Chad and Kahn, Ari and Betzel, Richard F. and Bassett, Danielle S.},
month = dec,
year = {2016},
note = {ZSCC: NoCitationData[s0]
arXiv: 1608.03520},
keywords = {Neuroscience, Topological data analysis}
}
@article{jacobs_predicate/state_2016,
series = {The {Thirty}-second {Conference} on the {Mathematical} {Foundations} of {Programming} {Semantics} ({MFPS} {XXXII})},
title = {A {Predicate}/{State} {Transformer} {Semantics} for {Bayesian} {Learning}},
volume = {325},
issn = {1571-0661},
url = {http://www.sciencedirect.com/science/article/pii/S1571066116300883},
doi = {10/ggdgbb},
abstract = {This paper establishes a link between Bayesian inference (learning) and predicate and state transformer operations from programming semantics and logic. Specifically, a very general definition of backward inference is given via first applying a predicate transformer and then conditioning. Analogously, forward inference involves first conditioning and then applying a state transformer. These definitions are illustrated in many examples in discrete and continuous probability theory and also in quantum theory.},
language = {en},
urldate = {2019-11-24},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Jacobs, Bart and Zanasi, Fabio},
month = oct,
year = {2016},
note = {ZSCC: 0000030},
keywords = {Bayesianism, Categorical ML, Categorical probability theory, Effectus theory, Programming language theory, Semantics},
pages = {185--200}
}
@article{olah_attention_2016,
title = {Attention and {Augmented} {Recurrent} {Neural} {Networks}},
volume = {1},
issn = {2476-0757},
url = {http://distill.pub/2016/augmented-rnns},
doi = {10/gf33sg},
abstract = {A visual overview of neural attention, and the powerful extensions of neural networks being built on top of it.},
language = {en},
number = {9},
urldate = {2019-11-22},
journal = {Distill},
author = {Olah, Chris and Carter, Shan},
month = sep,
year = {2016},
note = {ZSCC: 0000042},
keywords = {Classical ML, Machine learning},
pages = {e1}
}
@article{serafini_logic_2016,
title = {Logic {Tensor} {Networks}: {Deep} {Learning} and {Logical} {Reasoning} from {Data} and {Knowledge}},
shorttitle = {Logic {Tensor} {Networks}},
url = {http://arxiv.org/abs/1606.04422},
abstract = {We propose Logic Tensor Networks: a uniform framework for integrating automatic learning and reasoning. A logic formalism called Real Logic is defined on a first-order language whereby formulas have truth-value in the interval [0,1] and semantics defined concretely on the domain of real numbers. Logical constants are interpreted as feature vectors of real numbers. Real Logic promotes a well-founded integration of deductive reasoning on a knowledge-base and efficient data-driven relational machine learning. We show how Real Logic can be implemented in deep Tensor Neural Networks with the use of Google's tensorflow primitives. The paper concludes with experiments applying Logic Tensor Networks on a simple but representative example of knowledge completion.},
urldate = {2019-11-24},
journal = {arXiv:1606.04422 [cs]},
author = {Serafini, Luciano and Garcez, Artur d'Avila},
month = jul,
year = {2016},
note = {ZSCC: 0000057
arXiv: 1606.04422},
keywords = {Abstract machines, Machine learning, Symbolic logic}
}
@article{ehrhard_introduction_2016,
title = {An introduction to {Differential} {Linear} {Logic}: proof-nets, models and antiderivatives},
shorttitle = {An introduction to {Differential} {Linear} {Logic}},
url = {http://arxiv.org/abs/1606.01642},
abstract = {Differential Linear Logic enriches Linear Logic with additional logical rules for the exponential connectives, dual to the usual rules of dereliction, weakening and contraction. We present a proof-net syntax for Differential Linear Logic and a categorical axiomatization of its denotational models. We also introduce a simple categorical condition on these models under which a general antiderivative operation becomes available. Last we briefly describe the model of sets and relations and give a more detailed account of the model of finiteness spaces and linear and continuous functions.},
urldate = {2019-11-28},
journal = {arXiv:1606.01642 [cs]},
author = {Ehrhard, Thomas},
month = jun,
year = {2016},
note = {ZSCC: 0000002
arXiv: 1606.01642},
keywords = {Denotational semantics, Differential Linear Logic, Differentiation, Linear logic}
}
@article{tsuchiya_using_2016,
title = {Using category theory to assess the relationship between consciousness and integrated information theory},
volume = {107},
issn = {0168-0102},
url = {http://www.sciencedirect.com/science/article/pii/S0168010215002989},
doi = {10/ggdf95},
abstract = {One of the most mysterious phenomena in science is the nature of conscious experience. Due to its subjective nature, a reductionist approach is having a hard time in addressing some fundamental questions about consciousness. These questions are squarely and quantitatively tackled by a recently developed theoretical framework, called integrated information theory (IIT) of consciousness. In particular, IIT proposes that a maximally irreducible conceptual structure (MICS) is identical to conscious experience. However, there has been no principled way to assess the claimed identity. Here, we propose to apply a mathematical formalism, category theory, to assess the proposed identity and suggest that it is important to consider if there exists a proper translation between the domain of conscious experience and that of the MICS. If such translation exists, we postulate that questions in one domain can be answered in the other domain; very difficult questions in the domain of consciousness can be resolved in the domain of mathematics. We claim that it is possible to empirically test if such a functor exists, by using a combination of neuroscientific and computational approaches. Our general, principled and empirical framework allows us to assess the relationship between the domain of consciousness and the domain of mathematical structures, including those suggested by IIT.},
language = {en},
urldate = {2019-11-22},
journal = {Neuroscience Research},
author = {Tsuchiya, Naotsugu and Taguchi, Shigeru and Saigo, Hayato},
month = jun,
year = {2016},
note = {ZSCC: 0000029},
keywords = {Psychology, Sketchy},
pages = {1--7}
}
@article{hess_quantifying_2016,
title = {Quantifying topological invariants of neuronal morphologies},
url = {http://arxiv.org/abs/1603.08432},
abstract = {Nervous systems are characterized by neurons displaying a diversity of morphological shapes. Traditionally, different shapes have been qualitatively described based on visual inspection and quantitatively described based on morphometric parameters. Neither process provides a solid foundation for categorizing the various morphologies, a problem that is important in many fields. We propose a stable topological measure as a standardized descriptor for any tree-like morphology, which encodes its skeletal branching anatomy. More specifically it is a barcode of the branching tree as determined by a spherical filtration centered at the root or neuronal soma. This Topological Morphology Descriptor (TMD) allows for the discrimination of groups of random and neuronal trees at linear computational cost.},
urldate = {2019-11-22},
journal = {arXiv:1603.08432 [cs, math, q-bio]},
author = {Hess, Kathryn and Kanari, Lida and Dłotko, Paweł and Scolamiero, Martina and Levi, Ran and Shillcock, Julian and Markram, Henry},
month = mar,
year = {2016},
note = {ZSCC: 0000005
arXiv: 1603.08432},
keywords = {Neuroscience, Topological data analysis}
}
@article{staton_semantics_2016,
title = {Semantics for probabilistic programming: higher-order functions, continuous distributions, and soft constraints},
shorttitle = {Semantics for probabilistic programming},
url = {http://arxiv.org/abs/1601.04943},
doi = {10/ggdf97},
abstract = {We study the semantic foundation of expressive probabilistic programming languages, that support higher-order functions, continuous distributions, and soft constraints (such as Anglican, Church, and Venture). We define a metalanguage (an idealised version of Anglican) for probabilistic computation with the above features, develop both operational and denotational semantics, and prove soundness, adequacy, and termination. They involve measure theory, stochastic labelled transition systems, and functor categories, but admit intuitive computational readings, one of which views sampled random variables as dynamically allocated read-only variables. We apply our semantics to validate nontrivial equations underlying the correctness of certain compiler optimisations and inference algorithms such as sequential Monte Carlo simulation. The language enables defining probability distributions on higher-order functions, and we study their properties.},
urldate = {2019-11-23},
journal = {Proceedings of the 31st Annual ACM/IEEE Symposium on Logic in Computer Science - LICS '16},
author = {Staton, Sam and Yang, Hongseok and Heunen, Chris and Kammar, Ohad and Wood, Frank},
year = {2016},
note = {ZSCC: 0000071
arXiv: 1601.04943},
keywords = {Bayesianism, Probabilistic programming, Programming language theory, Semantics},
pages = {525--534}
}
@article{cho_introduction_2015,
title = {An {Introduction} to {Effectus} {Theory}},
url = {http://arxiv.org/abs/1512.05813},
abstract = {Effectus theory is a new branch of categorical logic that aims to capture the essentials of quantum logic, with probabilistic and Boolean logic as special cases. Predicates in effectus theory are not subobjects having a Heyting algebra structure, like in topos theory, but `characteristic' functions, forming effect algebras. Such effect algebras are algebraic models of quantitative logic, in which double negation holds. Effects in quantum theory and fuzzy predicates in probability theory form examples of effect algebras. This text is an account of the basics of effectus theory. It includes the fundamental duality between states and effects, with the associated Born rule for validity of an effect (predicate) in a particular state. A basic result says that effectuses can be described equivalently in both `total' and `partial' form. So-called `commutative' and `Boolean' effectuses are distinguished, for probabilistic and classical models. It is shown how these Boolean effectuses are essentially extensive categories. A large part of the theory is devoted to the logical notions of comprehension and quotient, which are described abstractly as right adjoint to truth, and as left adjoint to falisity, respectively. It is illustrated how comprehension and quotients are closely related to measurement. The paper closes with a section on `non-commutative' effectus theory, where the appropriate formalisation is not entirely clear yet.},
urldate = {2019-11-23},
journal = {arXiv:1512.05813 [quant-ph]},
author = {Cho, Kenta and Jacobs, Bart and Westerbaan, Bas and Westerbaan, Abraham},
month = dec,
year = {2015},
note = {ZSCC: 0000039
arXiv: 1512.05813},
keywords = {Effectus theory}
}
@article{ehresmann_conciliating_2015,
series = {Integral {Biomathics}: {Life} {Sciences}, {Mathematics}, and {Phenomenological} {Philosophy}},
title = {Conciliating neuroscience and phenomenology via category theory},
volume = {119},
issn = {0079-6107},
url = {http://www.sciencedirect.com/science/article/pii/S0079610715001005},
doi = {10/f75jzr},
abstract = {The paper discusses how neural and mental processes correlate for developing cognitive abilities like memory or spatial representation and allowing the emergence of higher cognitive processes up to embodied cognition, consciousness and creativity. It is done via the presentation of MENS (for Memory Evolutive Neural System), a mathematical methodology, based on category theory, which encompasses the neural and mental systems and analyzes their dynamics in the process of ‘becoming’. Using the categorical notion of a colimit, it describes the generation of mental objects through the iterative binding of distributed synchronous assemblies of neurons, and presents a new rationale of spatial representation in the hippocampus (Gómez-Ramirez and Sanz, 2011). An important result is that the degeneracy of the neural code (Edelman, 1989) is the property allowing for the formation of mental objects and cognitive processes of increasing complexity order, with multiple neuronal realizabilities; it is essential “to explain certain empirical phenomena like productivity and systematicity of thought and thinking (Aydede 2010)”. Rather than restricting the discourse to linguistics or philosophy of mind, the formal methods used in MENS lead to precise notions of Compositionality, Productivity and Systematicity, which overcome the dichotomic debate of classicism vs. connectionism and their multiple facets. It also allows developing the naturalized phenomenology approach asked for by Varela (1996) which “seeks articulations by mutual constraints between phenomena present in experience and the correlative field of phenomena established by the cognitive sciences”, while avoiding their pitfalls.},
language = {en},
number = {3},
urldate = {2019-11-28},
journal = {Progress in Biophysics and Molecular Biology},
author = {Ehresmann, Andrée C. and Gomez-Ramirez, Jaime},
month = dec,
year = {2015},
note = {ZSCC: 0000018},
keywords = {Biology, Emergence, Neuroscience, Psychology, Sketchy},
pages = {347--359}
}
@article{jacobs_type_2015,
title = {A {Type} {Theory} for {Probabilistic} and {Bayesian} {Reasoning}},
url = {http://arxiv.org/abs/1511.09230},
abstract = {This paper introduces a novel type theory and logic for probabilistic reasoning. Its logic is quantitative, with fuzzy predicates. It includes normalisation and conditioning of states. This conditioning uses a key aspect that distinguishes our probabilistic type theory from quantum type theory, namely the bijective correspondence between predicates and side-effect free actions (called instrument, or assert, maps). The paper shows how suitable computation rules can be derived from this predicate-action correspondence, and uses these rules for calculating conditional probabilities in two well-known examples of Bayesian reasoning in (graphical) models. Our type theory may thus form the basis for a mechanisation of Bayesian inference.},
urldate = {2019-11-21},
journal = {arXiv:1511.09230 [cs, math]},
author = {Jacobs, Bart and Adams, Robin},
month = nov,
year = {2015},
note = {ZSCC: 0000013
arXiv: 1511.09230},
keywords = {Bayesianism, Effectus theory, Type theory}
}
@article{jacobs_towards_2015,
title = {Towards a {Categorical} {Account} of {Conditional} {Probability}},
volume = {195},
issn = {2075-2180},
url = {http://arxiv.org/abs/1306.0831},
doi = {10/ggdf9w},
abstract = {This paper presents a categorical account of conditional probability, covering both the classical and the quantum case. Classical conditional probabilities are expressed as a certain "triangle-fill-in" condition, connecting marginal and joint probabilities, in the Kleisli category of the distribution monad. The conditional probabilities are induced by a map together with a predicate (the condition). The latter is a predicate in the logic of effect modules on this Kleisli category. This same approach can be transferred to the category of C*-algebras (with positive unital maps), whose predicate logic is also expressed in terms of effect modules. Conditional probabilities can again be expressed via a triangle-fill-in property. In the literature, there are several proposals for what quantum conditional probability should be, and also there are extra difficulties not present in the classical case. At this stage, we only describe quantum systems with classical parametrization.},
urldate = {2019-11-21},
journal = {Electronic Proceedings in Theoretical Computer Science},
author = {Jacobs, Bart and Furber, Robert},
month = nov,
year = {2015},
note = {ZSCC: NoCitationData[s0]
arXiv: 1306.0831},
keywords = {Categorical probability theory, Effectus theory},
pages = {179--195}
}
@article{jacobs_new_2015,
title = {New {Directions} in {Categorical} {Logic}, for {Classical}, {Probabilistic} and {Quantum} {Logic}},
volume = {11},
issn = {18605974},
url = {http://arxiv.org/abs/1205.3940},
doi = {10/ggdf99},
abstract = {Intuitionistic logic, in which the double negation law not-not-P = P fails, is dominant in categorical logic, notably in topos theory. This paper follows a different direction in which double negation does hold. The algebraic notions of effect algebra/module that emerged in theoretical physics form the cornerstone. It is shown that under mild conditions on a category, its maps of the form X -{\textgreater} 1+1 carry such effect module structure, and can be used as predicates. Predicates are identified in many different situations, and capture for instance ordinary subsets, fuzzy predicates in a probabilistic setting, idempotents in a ring, and effects (positive elements below the unit) in a C*-algebra or Hilbert space. In quantum foundations the duality between states and effects plays an important role. It appears here in the form of an adjunction, where we use maps 1 -{\textgreater} X as states. For such a state s and a predicate p, the validity probability s {\textbar}= p is defined, as an abstract Born rule. It captures many forms of (Boolean or probabilistic) validity known from the literature. Measurement from quantum mechanics is formalised categorically in terms of `instruments', using L{\textbackslash}"uders rule in the quantum case. These instruments are special maps associated with predicates (more generally, with tests), which perform the act of measurement and may have a side-effect that disturbs the system under observation. This abstract description of side-effects is one of the main achievements of the current approach. It is shown that in the special case of C*-algebras, side-effect appear exclusively in the non-commutative case. Also, these instruments are used for test operators in a dynamic logic that can be used for reasoning about quantum programs/protocols. The paper describes four successive assumptions, towards a categorical axiomatisation of quantitative logic for probabilistic and quantum systems.},
number = {3},
urldate = {2019-11-23},
journal = {Logical Methods in Computer Science},
author = {Jacobs, Bart},
month = oct,
year = {2015},
note = {ZSCC: NoCitationData[s0]
arXiv: 1205.3940},
keywords = {Effectus theory},
pages = {24}
}
@article{goodfellow_explaining_2015,
title = {Explaining and {Harnessing} {Adversarial} {Examples}},
url = {http://arxiv.org/abs/1412.6572},
abstract = {Several machine learning models, including neural networks, consistently misclassify adversarial examples---inputs formed by applying small but intentionally worst-case perturbations to examples from the dataset, such that the perturbed input results in the model outputting an incorrect answer with high confidence. Early attempts at explaining this phenomenon focused on nonlinearity and overfitting. We argue instead that the primary cause of neural networks' vulnerability to adversarial perturbation is their linear nature. This explanation is supported by new quantitative results while giving the first explanation of the most intriguing fact about them: their generalization across architectures and training sets. Moreover, this view yields a simple and fast method of generating adversarial examples. Using this approach to provide examples for adversarial training, we reduce the test set error of a maxout network on the MNIST dataset.},
urldate = {2019-11-23},
journal = {arXiv:1412.6572 [cs, stat]},
author = {Goodfellow, Ian J. and Shlens, Jonathon and Szegedy, Christian},
month = mar,
year = {2015},
note = {ZSCC: 0000015
arXiv: 1412.6572},
keywords = {Adversarial attacks, Classical ML, Machine learning}
}
@article{paul_why_2015,
title = {Why does {Deep} {Learning} work? - {A} perspective from {Group} {Theory}},
shorttitle = {Why does {Deep} {Learning} work?},
url = {http://arxiv.org/abs/1412.6621},
abstract = {Why does Deep Learning work? What representations does it capture? How do higher-order representations emerge? We study these questions from the perspective of group theory, thereby opening a new approach towards a theory of Deep learning. One factor behind the recent resurgence of the subject is a key algorithmic step called pre-training: first search for a good generative model for the input samples, and repeat the process one layer at a time. We show deeper implications of this simple principle, by establishing a connection with the interplay of orbits and stabilizers of group actions. Although the neural networks themselves may not form groups, we show the existence of \{{\textbackslash}em shadow\} groups whose elements serve as close approximations. Over the shadow groups, the pre-training step, originally introduced as a mechanism to better initialize a network, becomes equivalent to a search for features with minimal orbits. Intuitively, these features are in a way the \{{\textbackslash}em simplest\}. Which explains why a deep learning network learns simple features first. Next, we show how the same principle, when repeated in the deeper layers, can capture higher order representations, and why representation complexity increases as the layers get deeper.},
urldate = {2019-11-22},
journal = {arXiv:1412.6621 [cs, stat]},
author = {Paul, Arnab and Venkatasubramanian, Suresh},
month = feb,
year = {2015},
note = {ZSCC: NoCitationData[s0]
arXiv: 1412.6621},
keywords = {Classical ML, Machine learning}
}
@inproceedings{scibior_practical_2015,
address = {New York, NY, USA},
series = {Haskell '15},
title = {Practical {Probabilistic} {Programming} with {Monads}},
isbn = {978-1-4503-3808-0},
url = {http://doi.acm.org/10.1145/2804302.2804317},
doi = {10/gft39z},
abstract = {The machine learning community has recently shown a lot of interest in practical probabilistic programming systems that target the problem of Bayesian inference. Such systems come in different forms, but they all express probabilistic models as computational processes using syntax resembling programming languages. In the functional programming community monads are known to offer a convenient and elegant abstraction for programming with probability distributions, but their use is often limited to very simple inference problems. We show that it is possible to use the monad abstraction to construct probabilistic models for machine learning, while still offering good performance of inference in challenging models. We use a GADT as an underlying representation of a probability distribution and apply Sequential Monte Carlo-based methods to achieve efficient inference. We define a formal semantics via measure theory. We demonstrate a clean and elegant implementation that achieves performance comparable with Anglican, a state-of-the-art probabilistic programming system.},
urldate = {2019-11-26},
booktitle = {Proceedings of the 2015 {ACM} {SIGPLAN} {Symposium} on {Haskell}},
publisher = {ACM},
author = {Ścibior, Adam and Ghahramani, Zoubin and Gordon, Andrew D.},
year = {2015},
note = {ZSCC: 0000048
event-place: Vancouver, BC, Canada},
keywords = {Bayesian inference, Implementation, Probabilistic programming, Programming language theory},
pages = {165--176}
}
@article{hur_provably_2015,
title = {A {Provably} {Correct} {Sampler} for {Probabilistic} {Programs}},
abstract = {We consider the problem of inferring the implicit distribution speciﬁed by a probabilistic program. A popular inference technique for probabilistic programs called Markov Chain Monte Carlo or MCMC sampling involves running the program repeatedly and generating sample values by perturbing values produced in “previous runs”. This simulates a Markov chain whose stationary distribution is the distribution speciﬁed by the probabilistic program.},
language = {en},
author = {Hur, Chung-Kil and Nori, Aditya V and Rajamani, Sriram K},
year = {2015},
note = {ZSCC: 0000017},
keywords = {Bayesian inference, Implementation, Probabilistic programming, Programming language theory},
pages = {21}
}
@article{ghahramani_probabilistic_2015,
title = {Probabilistic machine learning and artificial intelligence},
volume = {521},
issn = {0028-0836, 1476-4687},
url = {http://www.nature.com/articles/nature14541},
doi = {10/gdxwhq},
language = {en},
number = {7553},
urldate = {2019-11-28},
journal = {Nature},
author = {Ghahramani, Zoubin},
month = may,
year = {2015},
note = {ZSCC: 0000611},
keywords = {Bayesian inference, Classical ML, Machine learning, Probabilistic programming},
pages = {452--459}
}
@inproceedings{baez_operads_2015,
title = {Operads and {Phylogenetic} {Trees}},
abstract = {We construct an operad \${\textbackslash}mathrm\{Phyl\}\$ whose operations are the edge-labelled trees used in phylogenetics. This operad is the coproduct of \${\textbackslash}mathrm\{Com\}\$, the operad for commutative semigroups, and \$[0,{\textbackslash}infty)\$, the operad with unary operations corresponding to nonnegative real numbers, where composition is addition. We show that there is a homeomorphism between the space of \$n\$-ary operations of \${\textbackslash}mathrm\{Phyl\}\$ and \${\textbackslash}mathcal\{T\}\_n{\textbackslash}times [0,{\textbackslash}infty){\textasciicircum}\{n+1\}\$, where \${\textbackslash}mathcal\{T\}\_n\$ is the space of metric \$n\$-trees introduced by Billera, Holmes and Vogtmann. Furthermore, we show that the Markov models used to reconstruct phylogenetic trees from genome data give coalgebras of \${\textbackslash}mathrm\{Phyl\}\$. These always extend to coalgebras of the larger operad \${\textbackslash}mathrm\{Com\} + [0,{\textbackslash}infty]\$, since Markov processes on finite sets converge to an equilibrium as time approaches infinity. We show that for any operad \$O\$, its coproduct with \$[0,{\textbackslash}infty]\$ contains the operad \$W(O)\$ constucted by Boardman and Vogt. To prove these results, we explicitly describe the coproduct of operads in terms of labelled trees.},
author = {Baez, John C. and Otter, Nina},
year = {2015},
note = {ZSCC: NoCitationData[s1]
arXiv: 1512.03337},
keywords = {Biology, Coalgebras}
}
@article{graves_neural_2014,
title = {Neural {Turing} {Machines}},
url = {http://arxiv.org/abs/1410.5401},
abstract = {We extend the capabilities of neural networks by coupling them to external memory resources, which they can interact with by attentional processes. The combined system is analogous to a Turing Machine or Von Neumann architecture but is differentiable end-to-end, allowing it to be efficiently trained with gradient descent. Preliminary results demonstrate that Neural Turing Machines can infer simple algorithms such as copying, sorting, and associative recall from input and output examples.},
urldate = {2019-11-21},
journal = {arXiv:1410.5401 [cs]},
author = {Graves, Alex and Wayne, Greg and Danihelka, Ivo},
month = dec,
year = {2014},
note = {ZSCC: 0001222
arXiv: 1410.5401},
keywords = {Abstract machines, Classical ML, Machine learning}
}
@article{goodfellow_generative_2014,
title = {Generative {Adversarial} {Networks}},
url = {http://arxiv.org/abs/1406.2661},
abstract = {We propose a new framework for estimating generative models via an adversarial process, in which we simultaneously train two models: a generative model G that captures the data distribution, and a discriminative model D that estimates the probability that a sample came from the training data rather than G. The training procedure for G is to maximize the probability of D making a mistake. This framework corresponds to a minimax two-player game. In the space of arbitrary functions G and D, a unique solution exists, with G recovering the training data distribution and D equal to 1/2 everywhere. In the case where G and D are defined by multilayer perceptrons, the entire system can be trained with backpropagation. There is no need for any Markov chains or unrolled approximate inference networks during either training or generation of samples. Experiments demonstrate the potential of the framework through qualitative and quantitative evaluation of the generated samples.},
urldate = {2019-11-28},
journal = {arXiv:1406.2661 [cs, stat]},
author = {Goodfellow, Ian J. and Pouget-Abadie, Jean and Mirza, Mehdi and Xu, Bing and Warde-Farley, David and Ozair, Sherjil and Courville, Aaron and Bengio, Yoshua},
month = jun,
year = {2014},
note = {ZSCC: 0000010
arXiv: 1406.2661},
keywords = {Adversarial attacks, Classical ML, Implementation, Machine learning}
}
@inproceedings{fages_cells_2014,
address = {Cham},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Cells as {Machines}: {Towards} {Deciphering} {Biochemical} {Programs} in the {Cell}},
isbn = {978-3-319-04483-5},
shorttitle = {Cells as {Machines}},
doi = {10/ggdf96},
abstract = {Systems biology aims at understanding complex biological processes in terms of their basic mechanisms at the molecular level in cells. The bet of applying theoretical computer science concepts and software engineering methods to the analysis of distributed biochemical reaction systems in the cell, designed by natural evolution, has led to interesting challenges in computer science, and new model-based insights in biology. In this paper, we review the development over the last decade of the biochemical abstract machine (Biocham) software environment for modeling cell biology molecular reaction systems, reasoning about them at different levels of abstraction, formalizing biological behaviors in temporal logic with numerical constraints, and using them to infer non-measurable kinetic parameter values, evaluate robustness, decipher natural biochemical processes and implement new programs in synthetic biology.},
language = {en},
booktitle = {Distributed {Computing} and {Internet} {Technology}},
publisher = {Springer International Publishing},
author = {Fages, François},
editor = {Natarajan, Raja},
year = {2014},
note = {ZSCC: NoCitationData[s0]},
keywords = {Biology, Rewriting theory, Symbolic logic, Systems biology},
pages = {50--67}
}
@inproceedings{ehrhard_probabilistic_2014,
address = {San Diego, California, USA},
title = {Probabilistic coherence spaces are fully abstract for probabilistic {PCF}},
isbn = {978-1-4503-2544-8},
url = {http://dl.acm.org/citation.cfm?doid=2535838.2535865},
doi = {10/ggdf9x},
abstract = {Probabilistic coherence spaces (PCoh) yield a semantics of higherorder probabilistic computation, interpreting types as convex sets and programs as power series. We prove that the equality of interpretations in PCoh characterizes the operational indistinguishability of programs in PCF with a random primitive.},
language = {en},
urldate = {2019-11-22},
booktitle = {Proceedings of the 41st {ACM} {SIGPLAN}-{SIGACT} {Symposium} on {Principles} of {Programming} {Languages} - {POPL} '14},
publisher = {ACM Press},
author = {Ehrhard, Thomas and Tasson, Christine and Pagani, Michele},
year = {2014},
note = {ZSCC: 0000060},
keywords = {Coherence spaces, Probabilistic programming, Programming language theory, Semantics},
pages = {309--320}
}
@article{culbertson_bayesian_2013,
title = {Bayesian machine learning via category theory},
url = {http://arxiv.org/abs/1312.1445},
abstract = {From the Bayesian perspective, the category of conditional probabilities (a variant of the Kleisli category of the Giry monad, whose objects are measurable spaces and arrows are Markov kernels) gives a nice framework for conceptualization and analysis of many aspects of machine learning. Using categorical methods, we construct models for parametric and nonparametric Bayesian reasoning on function spaces, thus providing a basis for the supervised learning problem. In particular, stochastic processes are arrows to these function spaces which serve as prior probabilities. The resulting inference maps can often be analytically constructed in this symmetric monoidal weakly closed category. We also show how to view general stochastic processes using functor categories and demonstrate the Kalman filter as an archetype for the hidden Markov model.},
urldate = {2019-11-22},
journal = {arXiv:1312.1445 [math]},
author = {Culbertson, Jared and Sturtz, Kirk},
month = dec,
year = {2013},
note = {ZSCC: 0000006
arXiv: 1312.1445},
keywords = {Bayesianism, Categorical ML, Categorical probability theory, Purely theoretical}
}
@article{fong_causal_2013,
title = {Causal {Theories}: {A} {Categorical} {Perspective} on {Bayesian} {Networks}},
shorttitle = {Causal {Theories}},
url = {http://arxiv.org/abs/1301.6201},
abstract = {In this dissertation we develop a new formal graphical framework for causal reasoning. Starting with a review of monoidal categories and their associated graphical languages, we then revisit probability theory from a categorical perspective and introduce Bayesian networks, an existing structure for describing causal relationships. Motivated by these, we propose a new algebraic structure, which we term a causal theory. These take the form of a symmetric monoidal category, with the objects representing variables and morphisms ways of deducing information about one variable from another. A major advantage of reasoning with these structures is that the resulting graphical representations of morphisms match well with intuitions for flows of information between these variables. These categories can then be modelled in other categories, providing concrete interpretations for the variables and morphisms. In particular, we shall see that models in the category of measurable spaces and stochastic maps provide a slight generalisation of Bayesian networks, and naturally form a category themselves. We conclude with a discussion of this category, classifying the morphisms and discussing some basic universal constructions. ERRATA: (i) Pages 41-42: Objects of a causal theory are words, not collections, in \$V\$, and we include swaps as generating morphisms, subject to the identities defining a symmetric monoidal category. (ii) Page 46: A causal model is a strong symmetric monoidal functor.},
urldate = {2019-10-10},
journal = {arXiv:1301.6201 [math]},
author = {Fong, Brendan},
month = jan,
year = {2013},
note = {arXiv: 1301.6201}
}
@incollection{poggio_tomaso_2013,
title = {Tomaso {A}. {Poggio} autobiography},
url = {http://poggio-lab.mit.edu/sites/default/files/cv/tomasopoggio.pdf},
author = {Poggio, Tomaso},
year = {2013},
note = {ZSCC: NoCitationData[s0]},
keywords = {Classical ML, Compendium, Machine learning},
pages = {54}
}
@inproceedings{izbicki_algebraic_2013,
title = {Algebraic classifiers: a generic approach to fast cross-validation, online training, and parallel training},
shorttitle = {Algebraic classifiers},
abstract = {We use abstract algebra to derive new algorithms for fast cross-validation, online learning, and parallel learning. To use these algorithms on a classification model, we must show that the model has appropriate algebraic structure. It is easy to give algebraic structure to some models, and we do this explicitly for Bayesian classifiers and a novel variation of decision stumps called HomStumps. But not all classifiers have an obvious structure, so we introduce the Free HomTrainer. This can be used to give a "generic" algebraic structure to any classifier. We use the Free HomTrainer to give algebraic structure to bagging and boosting. In so doing, we derive novel online and parallel algorithms, and present the first fast cross-validation schemes for these classifiers.},
booktitle = {{ICML}},
author = {Izbicki, Michael},
year = {2013},
note = {ZSCC: 0000013},
keywords = {Algebra, Categorical ML, Machine learning}
}
@inproceedings{andreatta_towards_2013,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Towards a {Categorical} {Theory} of {Creativity} for {Music}, {Discourse}, and {Cognition}},
isbn = {978-3-642-39357-0},
doi = {10/ggdndz},
abstract = {This article presents a first attempt at establishing a category-theoretical model of creative processes. The model, which is applied to musical creativity, discourse theory, and cognition, suggests the relevance of the notion of “colimit” as a unifying construction in the three domains as well as the central role played by the Yoneda Lemma in the categorical formalization of creative processes.},
language = {en},
booktitle = {Mathematics and {Computation} in {Music}},
publisher = {Springer},
author = {Andreatta, Moreno and Ehresmann, Andrée and Guitart, René and Mazzola, Guerino},
editor = {Yust, Jason and Wild, Jonathan and Burgoyne, John Ashley},
year = {2013},
note = {ZSCC: NoCitationData[s1]},
keywords = {Emergence, Neuroscience, Psychology, Sketchy},
pages = {19--37}
}
@article{manzyuk_simply_2012,
series = {Proceedings of the 28th {Conference} on the {Mathematical} {Foundations} of {Programming} {Semantics} ({MFPS} {XXVIII})},
title = {A {Simply} {Typed} λ-{Calculus} of {Forward} {Automatic} {Differentiation}},
volume = {286},
issn = {1571-0661},
url = {http://www.sciencedirect.com/science/article/pii/S1571066112000473},
doi = {10/ggdm57},
abstract = {We present an extension of the simply typed λ-calculus with pushforward operators. This extension is motivated by the desire to incorporate forward automatic differentiation, which is an important technique in numeric computing, into functional programming. Our calculus is similar to Ehrhard and Regnierʼs differential λ-calculus, but is based on the differential geometric idea of pushforward rather than derivative. We prove that, like the differential λ-calculus, our calculus can be soundly interpreted in differential λ-categories.},
language = {en},
urldate = {2019-11-28},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Manzyuk, Oleksandr},
month = sep,
year = {2012},
note = {ZSCC: 0000013},
keywords = {Automatic differentiation, Differentiation, Programming language theory},
pages = {257--272}
}
@article{ehresmann_mens_2012,
title = {{MENS}, an {Info}-{Computational} {Model} for ({Neuro}-)cognitive {Systems} {Capable} of {Creativity}},
volume = {14},
doi = {10/ggdf9t},
abstract = {MENS is a bio-inspired model for higher level cognitive systems; it is an application of the Memory Evolutive Systems developed with Vanbremeersch to model complex multi-scale, multi-agent self-organized systems, such as biological or social systems. Its development resorts to an info-computationalism: first we characterize the properties of the human brain/mind at the origin of higher order cognitive processes up to consciousness and creativity, then we ‘abstract’ them in a MENS mathematical model for natural or artificial cognitive systems. The model, based on a ‘dynamic’ Category Theory incorporating Time, emphasizes the computability problems which are raised.},
journal = {Entropy},
author = {Ehresmann, Andrée C.},
year = {2012},
note = {ZSCC: 0000017},
keywords = {Emergence, Neuroscience},
pages = {1703--1716}
}
@article{sokolova_probabilistic_2011,
title = {Probabilistic systems coalgebraically: {A} survey},
volume = {412},
issn = {0304-3975},
shorttitle = {Probabilistic systems coalgebraically},
url = {https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3185909/},
doi = {10/frbx24},
abstract = {We survey the work on both discrete and continuous-space probabilistic systems as coalgebras, starting with how probabilistic systems are modeled as coalgebras and followed by a discussion of their bisimilarity and behavioral equivalence, mentioning results that follow from the coalgebraic treatment of probabilistic systems. It is interesting to note that, for different reasons, for both discrete and continuous probabilistic systems it may be more convenient to work with behavioral equivalence than with bisimilarity.},
number = {38},
urldate = {2019-11-26},
journal = {Theoretical Computer Science},
author = {Sokolova, Ana},
month = sep,
year = {2011},
pmid = {21998490},
pmcid = {PMC3185909},
note = {ZSCC: 0000057 },
keywords = {Coalgebras, Probabilistic transition systems, Transition systems},
pages = {5095--5110}
}
@article{ehrhard_probabilistic_2011,
title = {Probabilistic coherence spaces as a model of higher-order probabilistic computation},
volume = {209},
issn = {0890-5401},
url = {http://www.sciencedirect.com/science/article/pii/S0890540111000411},
doi = {10/ctfch6},
abstract = {We study a probabilistic version of coherence spaces and show that these objects provide a model of linear logic. We build a model of the pure lambda-calculus in this setting and show how to interpret a probabilistic version of the functional language PCF. We give a probabilistic interpretation of the semantics of probabilistic PCF closed terms of ground type. Last we suggest a generalization of this approach, using Banach spaces.},
language = {en},
number = {6},
urldate = {2019-11-22},
journal = {Information and Computation},
author = {Ehrhard, Thomas and Danos, Vincent},
month = jun,
year = {2011},
note = {ZSCC: 0000065},
keywords = {Coherence spaces, Probabilistic programming, Programming language theory, Semantics},
pages = {966--991}
}
@inproceedings{ehrhard_computational_2011,
address = {Toronto, ON, Canada},
title = {The {Computational} {Meaning} of {Probabilistic} {Coherence} {Spaces}},
isbn = {978-1-4577-0451-2},
url = {http://ieeexplore.ieee.org/document/5970206/},
doi = {10/cpv52n},
abstract = {We study the probabilistic coherent spaces — a denotational semantics interpreting programs by power series with non negative real coefﬁcients. We prove that this semantics is adequate for a probabilistic extension of the untyped λ-calculus: the probability that a term reduces to a head normal form is equal to its denotation computed on a suitable set of values. The result gives, in a probabilistic setting, a quantitative reﬁnement to the adequacy of Scott’s model for untyped λ-calculus.},
language = {en},
urldate = {2019-11-26},
booktitle = {2011 {IEEE} 26th {Annual} {Symposium} on {Logic} in {Computer} {Science}},
publisher = {IEEE},
author = {Ehrhard, Thomas and Pagani, Michele and Tasson, Christine},
month = jun,
year = {2011},
note = {ZSCC: 0000036},
keywords = {Coherence spaces, Denotational semantics, Probabilistic programming, Programming language theory},
pages = {87--96}
}
@article{blute_convenient_2010,
title = {A convenient differential category},
url = {http://arxiv.org/abs/1006.3140},
abstract = {In this paper, we show that the category of Mackey-complete, separated, topological convex bornological vector spaces and bornological linear maps is a differential category. Such spaces were introduced by Fr{\textbackslash}"olicher and Kriegl, where they were called convenient vector spaces. While much of the structure necessary to demonstrate this observation is already contained in Fr{\textbackslash}"olicher and Kriegl's book, we here give a new interpretation of the category of convenient vector spaces as a model of the differential linear logic of Ehrhard and Regnier. Rather than base our proof on the abstract categorical structure presented by Fr{\textbackslash}"olicher and Kriegl, we prefer to focus on the bornological structure of convenient vector spaces. We believe bornological structures will ultimately yield a wide variety of models of differential logics.},
urldate = {2019-11-28},
journal = {arXiv:1006.3140 [cs, math]},
author = {Blute, Richard and Ehrhard, Thomas and Tasson, Christine},
month = jun,
year = {2010},
note = {ZSCC: 0000054
arXiv: 1006.3140},
keywords = {Differential Linear Logic, Differentiation, Linear logic}
}
@misc{watanabe_algebraic_2009,
title = {Algebraic {Geometry} and {Statistical} {Learning} {Theory}},
url = {/core/books/algebraic-geometry-and-statistical-learning-theory/9C8FD1BDC817E2FC79117C7F41544A3A},
abstract = {Cambridge Core - Pattern Recognition and Machine Learning - Algebraic Geometry and Statistical Learning Theory - by Sumio Watanabe},
language = {en},
urldate = {2019-11-22},
journal = {Cambridge Core},
author = {Watanabe, Sumio},
month = aug,
year = {2009},
doi = {10.1017/CBO9780511800474},
note = {ZSCC: 0000276 },
keywords = {Algebra, Bayesianism, Purely theoretical, Statistical learning theory}
}
@article{keimel_predicate_2009,
title = {Predicate {Transformers} for {Extended} {Probability} and {Non}-determinism},
volume = {19},
issn = {0960-1295},
url = {http://dx.doi.org/10.1017/S0960129509007555},
doi = {10/bkvgqc},
abstract = {We investigate laws for predicate transformers for the combination of non-deterministic choice and (extended) probabilistic choice, where predicates are taken to be functions to the extended non-negative reals, or to closed intervals of such reals. These predicate transformers correspond to state transformers, which are functions to conical powerdomains, which are the appropriate powerdomains for the combined forms of non-determinism. As with standard powerdomains for non-deterministic choice, these come in three flavours – lower, upper and (order-)convex – so there are also three kinds of predicate transformers. In order to make the connection, the powerdomains are first characterised in terms of relevant classes of functionals. Much of the development is carried out at an abstract level, a kind of domain-theoretic functional analysis: one considers d-cones, which are dcpos equipped with a module structure over the non-negative extended reals, in place of topological vector spaces. Such a development still needs to be carried out for probabilistic choice per se; it would presumably be necessary to work with a notion of convex space rather than a cone.},
number = {3},
urldate = {2019-11-26},
journal = {Mathematical. Structures in Comp. Sci.},
author = {Keimel, Klaus and Plotkin, Gordon d.},
month = jun,
year = {2009},
note = {ZSCC: 0000015},
keywords = {Denotational semantics, Powerdomains, Programming language theory},
pages = {501--539}
}
@article{tix_semantic_2009,
title = {Semantic {Domains} for {Combining} {Probability} and {Non}-{Determinism}},
volume = {222},
issn = {15710661},
url = {https://linkinghub.elsevier.com/retrieve/pii/S1571066109000036},
doi = {10/d9hwq7},
abstract = {We present domain-theoretic models that support both probabilistic and nondeterministic choice. In [36], Morgan and McIver developed an ad hoc semantics for a simple imperative language with both probabilistic and nondeterministic choice operators over a discrete state space, using domain-theoretic tools. We present a model also using domain theory in the sense of D.S. Scott (see e.g. [15]), but built over quite general continuous domains instead of discrete state spaces.},
language = {en},
urldate = {2019-11-26},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Tix, Regina and Keimel, Klaus and Plotkin, Gordon},
month = feb,
year = {2009},
note = {ZSCC: 0000055},
keywords = {Denotational semantics, Powerdomains, Probabilistic programming, Programming language theory},
pages = {3--99}
}
@book{panangaden_labelled_2009,
address = {London, UK, UK},
title = {Labelled {Markov} {Processes}},
isbn = {978-1-84816-287-7},
abstract = {Labelled Markov processes are probabilistic versions of labelled transition systems with continuous state spaces. This book covers basic probability and measure theory on continuous state spaces and then develops the theory of LMPs. The main topics covered are bisimulation, the logical characterization of bisimulation, metrics and approximation theory. An unusual feature of the book is the connection made with categorical and domain theoretic concepts.},
publisher = {Imperial College Press},
author = {Panangaden, Prakash},
year = {2009},
note = {ZSCC: 0000184},
keywords = {Probabilistic transition systems, Transition systems}
}
@inproceedings{gomez_modeling_2009,
title = {Modeling cognitive systems with {Category} {Theory} {Towards} rigor in cognitive sciences},
abstract = {Traditionally, mathematical formalisms in cognitive sciences have been confined to toy model world descriptions. (5)},
author = {Gómez, Jaime},
year = {2009}
}
@article{brown_category_2008,
title = {Category {Theory} and {Higher} {Dimensional} {Algebra}: potential descriptive tools in neuroscience},
shorttitle = {Category {Theory} and {Higher} {Dimensional} {Algebra}},
url = {http://arxiv.org/abs/math/0306223},
abstract = {We explain the notion of colimit in category theory as a potential tool for describing structures and their communication, and the notion of higher dimensional algebra as potential yoga for dealing with processes and processes of processes.},
language = {en},
urldate = {2019-11-22},
journal = {arXiv:math/0306223},
author = {Brown, R. and Porter, T.},
month = feb,
year = {2008},
note = {ZSCC: NoCitationData[s0]
arXiv: math/0306223},
keywords = {Emergence, Neuroscience, Rewriting theory, Sketchy}
}
@article{fiore_cartesian_2008,
title = {The cartesian closed bicategory of generalised species of structures},
volume = {77},
issn = {00246107},
url = {http://doi.wiley.com/10.1112/jlms/jdm096},
doi = {10/bd2mr9},
abstract = {The concept of generalised species of structures between small categories and, correspondingly, that of generalised analytic functor between presheaf categories are introduced. An operation of substitution for generalised species, which is the counterpart to the composition of generalised analytic functors, is also put forward. These deﬁnitions encompass most notions of combinatorial species considered in the literature—including of course Joyal’s original notion—together with their associated substitution operation. Our ﬁrst main result exhibits the substitution calculus of generalised species as arising from a Kleisli bicategory for a pseudo-comonad on profunctors. Our second main result establishes that the bicategory of generalised species of structures is cartesian closed.},
language = {en},
number = {1},
urldate = {2019-11-28},
journal = {Journal of the London Mathematical Society},
author = {Fiore, M. and Gambino, N. and Hyland, M. and Winskel, G.},
month = feb,
year = {2008},
note = {ZSCC: 0000067},
keywords = {Denotational semantics, Differential Linear Logic, Differentiation, Linear logic},
pages = {203--220}
}
@incollection{horimoto_neural_2008,
address = {Berlin, Heidelberg},
title = {Neural {Algebra} and {Consciousness}: {A} {Theory} of {Structural} {Functionality} in {Neural} {Nets}},
volume = {5147},
isbn = {978-3-540-85100-4 978-3-540-85101-1},
shorttitle = {Neural {Algebra} and {Consciousness}},
url = {http://link.springer.com/10.1007/978-3-540-85101-1_8},
abstract = {Thoughts are spatio-temporal patterns of coalitions of ﬁring neurons and their interconnections. Neural algebras represent these patterns as formal algebraic objects, and a suitable composition operation reﬂects their interaction. Thus, a neural algebra is associated with any neural net. The present paper presents this formalization and develops the basic algebraic tools for formulating and solving the problem of ﬁnding the neural correlates of concepts such as reﬂection, association, coordination, etc. The main application is to the notion of consciousness, whose structural and functional basis is made explicit as the emergence of a set of solutions to a ﬁxpoint equation.},
language = {en},
urldate = {2019-11-22},
booktitle = {Algebraic {Biology}},
publisher = {Springer Berlin Heidelberg},
author = {Engeler, Erwin},
editor = {Horimoto, Katsuhisa and Regensburger, Georg and Rosenkranz, Markus and Yoshida, Hiroshi},
year = {2008},
doi = {10.1007/978-3-540-85101-1_8},
note = {ZSCC: NoCitationData[s0] },
keywords = {Emergence, Neuroscience, Sketchy},
pages = {96--109}
}
@inproceedings{danos_rule-based_2008,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Rule-{Based} {Modelling}, {Symmetries}, {Refinements}},
isbn = {978-3-540-68413-8},
doi = {10/dc5k68},
abstract = {Rule-based modelling is particularly effective for handling the highly combinatorial aspects of cellular signalling. The dynamics is described in terms of interactions between partial complexes, and the ability to write rules with such partial complexes -i.e., not to have to specify all the traits of the entitities partaking in a reaction but just those that matter- is the key to obtaining compact descriptions of what otherwise could be nearly infinite dimensional dynamical systems. This also makes these descriptions easier to read, write and modify.In the course of modelling a particular signalling system it will often happen that more traits matter in a given interaction than previously thought, and one will need to strengthen the conditions under which that interaction may happen. This is a process that we call rule refinement and which we set out in this paper to study. Specifically we present a method to refine rule sets in a way that preserves the implied stochastic semantics.This stochastic semantics is dictated by the number of different ways in which a given rule can be applied to a system (obeying the mass action principle). The refinement formula we obtain explains how to refine rules and which choice of refined rates will lead to a neutral refinement, i.e., one that has the same global activity as the original rule had (and therefore leaves the dynamics unchanged). It has a pleasing mathematical simplicity, and is reusable with little modification across many variants of stochastic graph rewriting. A particular case of the above is the derivation of a maximal refinement which is equivalent to a (possibly infinite) Petri net and can be useful to get a quick approximation of the dynamics and to calibrate models. As we show with examples, refinement is also useful to understand how different subpopulations contribute to the activity of a rule, and to modulate differentially their impact on that activity.},
language = {en},
booktitle = {Formal {Methods} in {Systems} {Biology}},
publisher = {Springer},
author = {Danos, Vincent and Feret, Jérôme and Fontana, Walter and Harmer, Russell and Krivine, Jean},
editor = {Fisher, Jasmin},
year = {2008},
note = {ZSCC: NoCitationData[s0]},
keywords = {Biology, Rewriting theory, Systems biology},
pages = {103--122}
}
@inproceedings{fiore_differential_2007,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Differential {Structure} in {Models} of {Multiplicative} {Biadditive} {Intuitionistic} {Linear} {Logic}},
isbn = {978-3-540-73228-0},
doi = {10/c8vgx8},
abstract = {In the first part of the paper I investigate categorical models of multiplicative biadditive intuitionistic linear logic, and note that in them some surprising coherence laws arise. The thesis for the second part of the paper is that these models provide the right framework for investigating differential structure in the context of linear logic. Consequently, within this setting, I introduce a notion of creation operator (as considered by physicists for bosonic Fock space in the context of quantum field theory), provide an equivalent description of creation operators in terms of creation maps, and show that they induce a differential operator satisfying all the basic laws of differentiation (the product and chain rules, the commutation relations, etc.).},
language = {en},
booktitle = {Typed {Lambda} {Calculi} and {Applications}},
publisher = {Springer},
author = {Fiore, Marcelo P.},
editor = {Della Rocca, Simona Ronchi},
year = {2007},
note = {ZSCC: NoCitationData[s1]},
keywords = {Differential Linear Logic, Differentiation, Linear logic},
pages = {163--177}
}
@article{ehrhard_differential_2006,
series = {Logic, {Language}, {Information} and {Computation}},
title = {Differential interaction nets},
volume = {364},
issn = {0304-3975},
url = {http://www.sciencedirect.com/science/article/pii/S0304397506005299},
doi = {10/bg5g4b},
abstract = {We introduce interaction nets for a fragment of the differential lambda-calculus and exhibit in this framework a new symmetry between the of course and the why not modalities of linear logic, which is completely similar to the symmetry between the tensor and par connectives of linear logic. We use algebraic intuitions for introducing these nets and their reduction rules, and then we develop two correctness criteria (weak typability and acyclicity) and show that they guarantee strong normalization. Finally, we outline the correspondence between this interaction nets formalism and the resource lambda-calculus.},
language = {en},
number = {2},
urldate = {2019-11-28},
journal = {Theoretical Computer Science},
author = {Ehrhard, T. and Regnier, L.},
month = nov,
year = {2006},
note = {ZSCC: 0000146},
keywords = {Differential Linear Logic, Differentiation, Linear logic},
pages = {166--195}
}
@article{fages_biocham:_2006,
title = {{BIOCHAM}: an environment for modeling biological systems and formalizing experimental knowledge},
volume = {22},
issn = {1367-4803, 1460-2059},
shorttitle = {{BIOCHAM}},
url = {https://academic.oup.com/bioinformatics/article-lookup/doi/10.1093/bioinformatics/btl172},
doi = {10/dfv},
language = {en},
number = {14},
urldate = {2019-11-23},
journal = {Bioinformatics},
author = {Fages, F. and Calzone, L. and Soliman, S.},
month = jul,
year = {2006},
note = {ZSCC: 0000264},
keywords = {Abstract machines, Biology, Implementation, Rewriting theory, Symbolic logic, Systems biology},
pages = {1805--1807}
}
@book{wilkinson_stochastic_2006,
title = {Stochastic {Modelling} for {Systems} {Biology}},
isbn = {978-1-58488-540-5},
abstract = {Although stochastic kinetic models are increasingly accepted as the best way to represent and simulate genetic and biochemical networks, most researchers in the field have limited knowledge of stochastic process theory. The stochastic processes formalism provides a beautiful, elegant, and coherent foundation for chemical kinetics and there is a wealth of associated theory every bit as powerful and elegant as that for conventional continuous deterministic models. The time is right for an introductory text written from this perspective. Stochastic Modelling for Systems Biology presents an accessible introduction to stochastic modelling using examples that are familiar to systems biology researchers. Focusing on computer simulation, the author examines the use of stochastic processes for modelling biological systems. He provides a comprehensive understanding of stochastic kinetic modelling of biological networks in the systems biology context. The text covers the latest simulation techniques and research material, such as parameter inference, and includes many examples and figures as well as software code in R for various applications.While emphasizing the necessary probabilistic and stochastic methods, the author takes a practical approach, rooting his theoretical development in discussions of the intended application. Written with self-study in mind, the book includes technical chapters that deal with the difficult problems of inference for stochastic kinetic models from experimental data. Providing enough background information to make the subject accessible to the non-specialist, the book integrates a fairly diverse literature into a single convenient and notationally consistent source.},
language = {en},
publisher = {CRC Press},
author = {Wilkinson, Darren J.},
month = apr,
year = {2006},
note = {ZSCC: NoCitationData[s1]
Google-Books-ID: roHTk4m8JGAC},
keywords = {Bayesian inference, Biology, Implementation, Probabilistic programming, Rewriting theory, Systems biology, Transition systems}
}
@article{varacca_distributing_2006,
title = {Distributing probability over non-determinism},
volume = {16},
issn = {0960-1295, 1469-8072},
url = {http://www.journals.cambridge.org/abstract_S0960129505005074},
doi = {10/czs9sx},
language = {en},
number = {01},
urldate = {2019-11-26},
journal = {Mathematical Structures in Computer Science},
author = {Varacca, Daniele and Winskel, Glynn},
month = feb,
year = {2006},
note = {ZSCC: 0000108},
keywords = {Categorical probability theory, Denotational semantics, Programming language theory},
pages = {87}
}
@inproceedings{fages_machine_2006,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Machine {Learning} {Biochemical} {Networks} from {Temporal} {Logic} {Properties}},
isbn = {978-3-540-46236-1},
doi = {10/dd8},
abstract = {One central issue in systems biology is the definition of formal languages for describing complex biochemical systems and their behavior at different levels. The biochemical abstract machine BIOCHAM is based on two formal languages, one rule-based language used for modeling biochemical networks, at three abstraction levels corresponding to three semantics: boolean, concentration and population; and one temporal logic language used for formalizing the biological properties of the system. In this paper, we show how the temporal logic language can be turned into a specification language. We describe two algorithms for inferring reaction rules and kinetic parameter values from a temporal specification formalizing the biological data. Then, with an example of the cell cycle control, we illustrate how these machine learning techniques may be useful to the modeler.},
language = {en},
booktitle = {Transactions on {Computational} {Systems} {Biology} {VI}},
publisher = {Springer},
author = {Fages, François and Calzone, Laurence and Chabrier-Rivier, Nathalie and Soliman, Sylvain},
editor = {Priami, Corrado and Plotkin, Gordon},
year = {2006},
note = {ZSCC: NoCitationData[s0]},
keywords = {Abstract machines, Biology, Classical ML, Machine learning, Symbolic logic, Systems biology},
pages = {68--94}
}
@article{van_breugel_domain_2005,
series = {Foundations of {Software} {Science} and {Computation} {Structures}},
title = {Domain theory, testing and simulation for labelled {Markov} processes},
volume = {333},
issn = {0304-3975},
url = {http://www.sciencedirect.com/science/article/pii/S030439750400711X},
doi = {10/ft9vc5},
abstract = {This paper presents a fundamental study of similarity and bisimilarity for labelled Markov processes (LMPs). The main results characterize similarity as a testing preorder and bisimilarity as a testing equivalence. In general, LMPs are not required to satisfy a finite-branching condition—indeed the state space may be a continuum, with the transitions given by arbitrary probability measures. Nevertheless we show that to characterize bisimilarity it suffices to use finitely-branching labelled trees as tests. Our results involve an interaction between domain theory and measure theory. One of the main technical contributions is to show that a final object in a suitable category of LMPs can be constructed by solving a domain equation D≅V(D)Act, where V is the probabilistic powerdomain. Given an LMP whose state space is an analytic space, bisimilarity arises as the kernel of the unique map to the final LMP. We also show that the metric for approximate bisimilarity introduced by Desharnais, Gupta, Jagadeesan and Panangaden generates the Lawson topology on the domain D.},
language = {en},
number = {1},
urldate = {2019-11-26},
journal = {Theoretical Computer Science},
author = {van Breugel, Franck and Mislove, Michael and Ouaknine, Joël and Worrell, James},
month = mar,
year = {2005},
note = {ZSCC: 0000058},
keywords = {Coalgebras, Denotational semantics, Probabilistic transition systems, Transition systems},
pages = {171--197}
}
@article{lack_adhesive_2005,
title = {Adhesive and quasiadhesive categories},
volume = {39},
url = {http://www.numdam.org/item/ITA_2005__39_3_511_0/},
doi = {10/fntwvv},
language = {en},
number = {3},
urldate = {2019-11-22},
journal = {RAIRO - Theoretical Informatics and Applications - Informatique Théorique et Applications},
author = {Lack, Stephen and Sobociński, Paweł},
year = {2005},
note = {ZSCC: 0000185},
keywords = {Purely theoretical, Rewriting theory},
pages = {511--545}
}
@incollection{goos_probabilistic_2004,
address = {Berlin, Heidelberg},
title = {Probabilistic {Automata}: {System} {Types}, {Parallel} {Composition} and {Comparison}},
volume = {2925},
isbn = {978-3-540-22265-1 978-3-540-24611-4},
shorttitle = {Probabilistic {Automata}},
url = {http://link.springer.com/10.1007/978-3-540-24611-4_1},
abstract = {We survey various notions of probabilistic automata and probabilistic bisimulation, accumulating in an expressiveness hierarchy of probabilistic system types. The aim of this paper is twofold: On the one hand it provides an overview of existing types of probabilistic systems and, on the other hand, it explains the relationship between these models. We overview probabilistic systems with discrete probabilities only. The expressiveness order used to built the hierarchy is deﬁned via the existence of mappings between the corresponding system types that preserve and reﬂect bisimilarity. Additionally, we discuss parallel composition for the presented types of systems, augmenting the map of probabilistic automata with closedness under this compositional operator.},
language = {en},
urldate = {2019-11-28},
booktitle = {Validation of {Stochastic} {Systems}},
publisher = {Springer Berlin Heidelberg},
author = {Sokolova, Ana and de Vink, Erik P.},
editor = {Goos, Gerhard and Hartmanis, Juris and van Leeuwen, Jan and Baier, Christel and Haverkort, Boudewijn R. and Hermanns, Holger and Katoen, Joost-Pieter and Siegle, Markus},
year = {2004},
doi = {10.1007/978-3-540-24611-4_1},
note = {ZSCC: NoCitationData[s1] },
keywords = {Coalgebras, Probabilistic transition systems, Transition systems},
pages = {1--43}
}
@inproceedings{healy_neural_2004,
title = {Neural {Networks}, {Knowledge} and {Cognition}: {A} {Mathematical} {Semantic} {Model} {Based} upon {Category} {Theory}},
shorttitle = {Neural {Networks}, {Knowledge} and {Cognition}},
abstract = {Category theory can be applied to mathematically model the semantics of cognitive neural systems. We discuss semantics as a hierarchy of concepts, or symbolic descriptions of items sensed and represented in the connection weights distributed throughout a neural network. The hierarchy expresses subconcept relationships, and in a neural network it becomes represented incrementally through a Hebbian-like learning process. The categorical semantic model described here explains the learning process as the derivation of colimits and limits in a concept category. It explains the representation of the concept hierarchy in a neural network at each stage of learning as a system of functors and natural transformations, expressing knowledge coherence across the regions of a multi-regional network equipped with multiple sensors. The model yields design principles that constrain neural network designs capable of the most important aspects of cognitive behavior.},
author = {Healy, Michael J. and Caudell, Thomas P.},
year = {2004}
}
@article{ehrhard_differential_2003,
title = {The differential lambda-calculus},
volume = {309},
issn = {0304-3975},
url = {http://www.sciencedirect.com/science/article/pii/S030439750300392X},
doi = {10/bf3b8v},
abstract = {We present an extension of the lambda-calculus with differential constructions. We state and prove some basic results (confluence, strong normalization in the typed case), and also a theorem relating the usual Taylor series of analysis to the linear head reduction of lambda-calculus.},
language = {en},
number = {1},
urldate = {2019-11-24},
journal = {Theoretical Computer Science},
author = {Ehrhard, Thomas and Regnier, Laurent},
month = dec,
year = {2003},
note = {ZSCC: 0000307},
keywords = {Differentiation, Linear logic, Programming language theory},
pages = {1--41}
}
@article{philipona_is_2003,
title = {Is {There} {Something} {Out} {There}? {Inferring} {Space} from {Sensorimotor} {Dependencies}},
volume = {15},
shorttitle = {Is {There} {Something} {Out} {There}?},
doi = {10/frg7gs},
abstract = {This letter suggests that in biological organisms, the perceived structure of reality, in particular the notions of body, environment, space, object, and attribute, could be a consequence of an effort on the part of brains to account for the dependency between their inputs and their outputs in terms of a small number of parameters. To validate this idea, a procedure is demonstrated whereby the brain of a (simulated) organism with arbitrary input and output connectivity can deduce the dimensionality of the rigid group of the space underlying its input-output relationship, that is, the dimension of what the organism will call physical space.},
journal = {Neural computation},
author = {Philipona, David and O’Regan, J. and Nadal, Jean-Pierre},
month = oct,
year = {2003},
note = {ZSCC: 0000225},
keywords = {Algebra, Neuroscience},
pages = {2029--49}
}
@article{bartels_hierarchy_2003,
series = {{CMCS}'03, {Coalgebraic} {Methods} in {Computer} {Science} ({Satellite} {Event} for {ETAPS} 2003)},
title = {A hierarchy of probabilistic system types},
volume = {82},
issn = {1571-0661},
url = {http://www.sciencedirect.com/science/article/pii/S1571066104806327},
doi = {10/d7kq38},
abstract = {We arrange various types of probabilistic transition systems studied in the literature in an expressiveness hierarchy. The expressiveness criterion is the existence of an embedding of systems of the one class into those of the other. An embedding here is a system transformation which preserves and reflects bisimilarity. To facilitate the task, we define the classes of systems and the corresponding notion of bisimilarity coalgebraically and use the new technical result that an embedding arises from a natural transformation with injective components between the two coalgebra functors under consideration. Moreover, we argue that coalgebraic bisimilarity, on which we base our results, coincides with the concrete notions proposed in the literature for the different system classes, exemplified by a detailed proof for the case of general Segala-type systems.},
language = {en},
number = {1},
urldate = {2019-11-26},
journal = {Electronic Notes in Theoretical Computer Science},
author = {Bartels, Falk and Sokolova, Ana and de Vink, Erik},
month = jul,
year = {2003},
note = {ZSCC: 0000049},
keywords = {Coalgebras, Probabilistic transition systems, Transition systems},
pages = {57--75}
}
@article{desharnais_bisimulation_2002,
title = {Bisimulation for {Labelled} {Markov} {Processes}},
volume = {179},
issn = {0890-5401},
url = {http://www.sciencedirect.com/science/article/pii/S0890540101929621},
doi = {10/fmp9vd},
abstract = {In this paper we introduce a new class of labelled transition systems—labelled Markov processes— and define bisimulation for them. Labelled Markov processes are probabilistic labelled transition systems where the state space is not necessarily discrete. We assume that the state space is a certain type of common metric space called an analytic space. We show that our definition of probabilistic bisimulation generalizes the Larsen–Skou definition given for discrete systems. The formalism and mathematics is substantially different from the usual treatment of probabilistic process algebra. The main technical contribution of the paper is a logical characterization of probabilistic bisimulation. This study revealed some unexpected results, even for discrete probabilistic systems. •Bisimulation can be characterized by a very weak modal logic. The most striking feature is that one has no negation or any kind of negative proposition.•We do not need any finite branching assumption, yet there is no need of infinitary conjunction. We also show how to construct the maximal autobisimulation on a system. In the finite state case, this is just a state minimization construction. The proofs that we give are of an entirely different character than the typical proofs of these results. They use quite subtle facts about analytic spaces and appear, at first sight, to be entirely nonconstructive. Yet one can give an algorithm for deciding bisimilarity of finite state systems which constructs a formula that witnesses the failure of bisimulation.},
language = {en},
number = {2},
urldate = {2019-11-26},
journal = {Information and Computation},
author = {Desharnais, Josée and Edalat, Abbas and Panangaden, Prakash},
month = dec,
year = {2002},
note = {ZSCC: 0000297},
keywords = {Coalgebras, Denotational semantics, Probabilistic transition systems, Symbolic logic, Transition systems},
pages = {163--193}
}
@article{abramsky_geometry_2002,
title = {Geometry of {Interaction} and {Linear} {Combinatory} {Algebras}},
volume = {12},
issn = {0960-1295},
url = {http://dx.doi.org/10.1017/S0960129502003730},
doi = {10/fcsmhm},
abstract = {We present an axiomatic framework for Girard's Geometry of Interaction based on the notion of linear combinatory algebra. We give a general construction on traced monoidal categories, with certain additional structure, that is sufficient to capture the exponentials of Linear Logic, which produces such algebras (and hence also ordinary combinatory algebras). We illustrate the construction on six standard examples, representing both the ‘particle-style’ as well as the ‘wave-style’ Geometry of Interaction.},
number = {5},
urldate = {2019-11-28},
journal = {Mathematical. Structures in Comp. Sci.},
author = {Abramsky, Samson and Haghverdi, Esfandiar and Scott, Philip},
month = oct,
year = {2002},
note = {ZSCC: 0000162},
keywords = {Interactive semantics, Linear logic},
pages = {625--665}
}
@article{mccullagh_what_2002,
title = {What is a statistical model?},
volume = {30},
url = {http://projecteuclid.org/euclid.aos/1035844977},
doi = {10/bkts3m},
language = {en},
number = {5},
urldate = {2019-11-22},
journal = {The Annals of Statistics},
author = {McCullagh, Peter},
month = oct,
year = {2002},
note = {ZSCC: 0000230},
keywords = {Bayesianism, Categorical ML, Categorical probability theory, Compendium, Purely theoretical, Statistical learning theory},
pages = {1225--1310}
}
@book{mazzola_topos_2002,
title = {The {Topos} of {Music}: {Geometric} {Logic} of {Concepts}, {Theory}, and {Performance}},
isbn = {978-3-7643-5731-3},
shorttitle = {The {Topos} of {Music}},
url = {https://www.springer.com/gp/book/9783764357313},
abstract = {Topos of Music is an extensive and elaborate body of mathematical investigations into music and involves several and ontologically different levels of musical description. Albeit the author Guerino Mazzola lists 17 contributors and 2 collaborators, the book should be characterized as a monograph. Large portions of the content represent original research of Mazzola himself, and the material from other work is exposed from Mazzola's point of view and is well referenced. The preface preintimates an intended double meaning of the term topos in the title. On the one hand, it provides a mathematical anchor, which is programmatic for the entire approach: the concept of a cartesian closed category with a subobject classifier. (...)Zentralblatt MATH},
language = {en},
urldate = {2019-11-28},
publisher = {Birkhäuser Basel},
author = {Mazzola, Guerino},
year = {2002},
doi = {10.1007/978-3-0348-8141-8},
note = {ZSCC: NoCitationData[s1] },
keywords = {Emergence, Psychology, Sketchy}
}
@inproceedings{kato_category_2002,
title = {Category {Theory} and {Consciousness}},
doi = {10/ggdf92},
abstract = {We propose a new research program which provides an approach to consciousness and the t problem ofdeep reality. Our model is based on category theory. The need to t relate l t local l behavior to global behavior has convinced us early on that a good model l for conscious entities had to be found in the notion of sheaf. With t this t i formulation, f l ti , every presheaf represents a brain (orr conscious entity). An important rtant aspect t of the theory we will develop is the notion of sheafification of a presheaf which i will ill allow us to include complementarity as part of the description of the t universe. i . OUf model is intended to describe a notion of consciousness which is i pervasive throughout the universe and not localized in individual conscious entities. i i . At the same time, we will provide a way of describing how consciousness can arise i from fr non-consciousness; it has always been a major problem to understand how it it is i possible i that the fundamental components of a brain are in fact non-conscious, i ,},
author = {Kato, Goro C. and Struppa, Daniele C.},
year = {2002},
note = {ZSCC: 0000016},
keywords = {Emergence, Sketchy}
}
@incollection{wermuth_graphical_2001,
address = {Oxford},
title = {Graphical {Models}: {Overview}},
isbn = {978-0-08-043076-8},
shorttitle = {Graphical {Models}},
url = {http://www.sciencedirect.com/science/article/pii/B008043076700440X},
abstract = {Graphical Markov models provide a method of representing possibly complicated multivariate dependencies in such a way that the general qualitative features can be understood, that statistical independencies are highlighted, and that some properties can be derived directly. Variables are represented by the nodes of a graph. Pairs of nodes may be joined by an edge. Edges are directed if one variable is a response to the other variable considered as explanatory, but are undirected if the variables are on an equal footing. Absence of an edge typically implies statistical independence, conditional, or marginal depending on the kind of graph. The need for a number of types of graph arises because it is helpful to represent a number of different kinds of dependence structures. Of special importance are chain graphs in which variables are arranged in a sequence or chain of blocks, the variables in any one block being on an equal footing, some being possibly joint responses to variables in the past and some being jointly explanatory to variables in the future of the block considered. Some main properties of such systems are outlined, and recent research results are sketched. Suggestions for further reading are given. As an illustrative example, some analysis of data on the treatment of chronic pain is presented.},
language = {en},
urldate = {2019-11-22},
booktitle = {International {Encyclopedia} of the {Social} \& {Behavioral} {Sciences}},
publisher = {Pergamon},
author = {Wermuth, N. and Cox, D. R.},
editor = {Smelser, Neil J. and Baltes, Paul B.},
month = jan,
year = {2001},
doi = {10.1016/B0-08-043076-7/00440-X},
note = {ZSCC: NoCitationData[s0] },
keywords = {Bayesianism, Classical ML, Machine learning},
pages = {6379--6386}
}
@article{rutten_universal_2000,
series = {Modern {Algebra}},
title = {Universal coalgebra: a theory of systems},
volume = {249},
issn = {0304-3975},
shorttitle = {Universal coalgebra},
url = {http://www.sciencedirect.com/science/article/pii/S0304397500000566},
doi = {10/fqrjpn},
abstract = {In the semantics of programming, finite data types such as finite lists, have traditionally been modelled by initial algebras. Later final coalgebras were used in order to deal with infinite data types. Coalgebras, which are the dual of algebras, turned out to be suited, moreover, as models for certain types of automata and more generally, for (transition and dynamical) systems. An important property of initial algebras is that they satisfy the familiar principle of induction. Such a principle was missing for coalgebras until the work of Aczel (Non-Well-Founded sets, CSLI Leethre Notes, Vol. 14, center for the study of Languages and information, Stanford, 1988) on a theory of non-wellfounded sets, in which he introduced a proof principle nowadays called coinduction. It was formulated in terms of bisimulation, a notion originally stemming from the world of concurrent programming languages. Using the notion of coalgebra homomorphism, the definition of bisimulation on coalgebras can be shown to be formally dual to that of congruence on algebras. Thus, the three basic notions of universal algebra: algebra, homomorphism of algebras, and congruence, turn out to correspond to coalgebra, homomorphism of coalgebras, and bisimulation, respectively. In this paper, the latter are taken as the basic ingredients of a theory called universal coalgebra. Some standard results from universal algebra are reformulated (using the aforementioned correspondence) and proved for a large class of coalgebras, leading to a series of results on, e.g., the lattices of subcoalgebras and bisimulations, simple coalgebras and coinduction, and a covariety theorem for coalgebras similar to Birkhoff's variety theorem.},
language = {en},
number = {1},
urldate = {2019-11-26},
journal = {Theoretical Computer Science},
author = {Rutten, J. J. M. M.},
month = oct,
year = {2000},
note = {ZSCC: 0001448},
keywords = {Coalgebras, Transition systems},
pages = {3--80}
}
@inproceedings{danos_probabilistic_2000,
title = {Probabilistic game semantics},
volume = {3},
isbn = {978-0-7695-0725-5},
doi = {10/b6k43s},
abstract = {A category of HO/N-style games and probabilistic strategies is developed where the possible choices of a strategy are quantified so as to give a measure of the likelihood of seeing a given play. A 2-sided die is shown to be universal in this category, in the sense that any strategy breaks down into a composition between some deterministic strategy and that die. The interpretative power of the category is then demonstrated by delineating a Cartesian closed subcategory which provides a fully abstract model of a probabilistic extension of Idealized Algol},
author = {Danos, Vincent and Harmer, Russell},
month = feb,
year = {2000},
note = {ZSCC: NoCitationData[s1]},
keywords = {Denotational semantics, Game semantics, Interactive semantics, Probabilistic programming, Programming language theory},
pages = {204--213}
}
@inproceedings{healy_category_2000,
address = {Como, Italy},
title = {Category theory applied to neural modeling and graphical representations},
isbn = {978-0-7695-0619-7},
url = {http://ieeexplore.ieee.org/document/861277/},
doi = {10/dr29pc},
abstract = {Category theory can be applied to mathematically model the semantics of cognitive neural systems. Here, we employ colimits, functors and natural transformations to model the implementation of concept hierarchies in neural networks equipped with multiple sensors.},
language = {en},
urldate = {2019-11-22},
booktitle = {Proceedings of the {IEEE}-{INNS}-{ENNS} {International} {Joint} {Conference} on {Neural} {Networks}. {IJCNN} 2000. {Neural} {Computing}: {New} {Challenges} and {Perspectives} for the {New} {Millennium}},
publisher = {IEEE},
author = {Healy, M.J.},
year = {2000},
note = {ZSCC: 0000032},
keywords = {Emergence, Neuroscience, Sketchy},
pages = {35--40 vol.3}
}
@inproceedings{de_vink_bisimulation_1997,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Computer} {Science}},
title = {Bisimulation for probabilistic transition systems: {A} coalgebraic approach},
isbn = {978-3-540-69194-5},
shorttitle = {Bisimulation for probabilistic transition systems},
doi = {10/fcqzmk},
abstract = {The notion of bisimulation as proposed by Larsen and Skou for discrete probabilistic transition systems is shown to coincide with a coalgebraic definition in the sense of Aczel and Mendier in terms of a set functor. This coalgebraic formulation makes it possible to generalize the concepts to a continuous setting involving Borel probability measures. Under reasonable conditions, generalized probabilistic bisimilarity can be characterized categorically. Application of the final coalgebra paradigm then yields an internally fully abstract semantical domain with respect to probabilistic bisimulation.},
language = {en},
booktitle = {Automata, {Languages} and {Programming}},
publisher = {Springer},
author = {de Vink, E. P. and Rutten, J. J. M. M.},
editor = {Degano, Pierpaolo and Gorrieri, Roberto and Marchetti-Spaccamela, Alberto},
year = {1997},
note = {ZSCC: NoCitationData[s1]},
keywords = {Categorical probability theory, Coalgebras, Denotational semantics, Probabilistic transition systems, Transition systems},
pages = {460--470}
}
@article{baas_emergence_1997,
title = {On {Emergence} and {Explanation}},
volume = {25},
issn = {0769-4113},
url = {https://www.persee.fr/doc/intel_0769-4113_1997_num_25_2_1558},
doi = {10/ggdf9z},
abstract = {Emergence is a universal phenomenon that can be defined mathematically in a very general way. This is useful for the study of scientifically legitimate explanations of complex systems, here defined as hyperstructures. A requirement is that observation mechanisms are considered within the general framework. Two notions of emergence are defined, and specific examples of these are discussed.},
language = {en},
number = {2},
urldate = {2019-11-22},
journal = {Intellectica. Revue de l'Association pour la Recherche Cognitive},
author = {Baas, Nils Andreas and Emmeche, Claus},
year = {1997},
note = {ZSCC: 0000201},
keywords = {Biology, Emergence},
pages = {67--83}
}
@article{heckerman_tutorial_1995,
title = {A {Tutorial} on {Learning} {With} {Bayesian} {Networks}},
url = {https://www.microsoft.com/en-us/research/publication/a-tutorial-on-learning-with-bayesian-networks/},
abstract = {A Bayesian network is a graphical model that encodes probabilistic relationships among variables of interest. When used in conjunction with statistical techniques, the graphical model has several advantages for data analysis. One, because the model encodes dependencies among all variables, it readily handles situations where some data entries are missing. Two, a Bayesian network can …},
language = {en-US},
urldate = {2019-11-22},
author = {Heckerman, David},
month = mar,
year = {1995},
note = {ZSCC: 0000058},
keywords = {Bayesianism, Classical ML, Machine learning}
}
@article{siegelmann_computational_1995,
title = {On the {Computational} {Power} of {Neural} {Nets}},
volume = {50},
issn = {0022-0000},
url = {http://www.sciencedirect.com/science/article/pii/S0022000085710136},
doi = {10/dvwtc3},
abstract = {This paper deals with finite size networks which consist of interconnections of synchronously evolving processors. Each processor updates its state by applying a "sigmoidal" function to a linear combination of the previous states of all units. We prove that one may simulate all Turing machines by such nets. In particular, one can simulate any multi-stack Turing machine in real time, and there is a net made up of 886 processors which computes a universal partial-recursive function. Products (high order nets) are not required, contrary to what had been stated in the literature. Non-deterministic Turing machines can be simulated by non-deterministic rational nets, also in real time. The simulation result has many consequences regarding the decidability, or more generally the complexity, of questions about recursive nets.},
language = {en},
number = {1},
urldate = {2019-11-28},
journal = {Journal of Computer and System Sciences},
author = {Siegelmann, H. T. and Sontag, E. D.},
month = feb,
year = {1995},
note = {ZSCC: 0000002},
keywords = {Classical ML, Machine learning},
pages = {132--150}
}
@inproceedings{girard_geometry_1995,
address = {Berlin, Heidelberg},
series = {{NATO} {ASI} {Series}},
title = {On {Geometry} of {Interaction}},
isbn = {978-3-642-79361-5},
doi = {10/fr557p},
abstract = {The paper expounds geometry of interaction, for the first time in the full case, i.e. for all connectives of linear logic, including additives and constants. The interpretation is done within a C*-algebra which is induced by the rule of resolution of logic programming, and therefore the execution formula can be presented as a simple logic programming loop. Part of the data is public (shared channels) but part of it can be viewed as private dialect (defined up to isomorphism) that cannot be shared during interaction, thus illustrating the theme of communication without understanding. One can prove a nilpotency (i.e. termination) theorem for this semantics, and also its soundness w.r.t. a slight modification of familiar sequent calculus in the case of exponential-free conclusions.},
language = {en},
booktitle = {Proof and {Computation}},
publisher = {Springer},
author = {Girard, Jean-Yves},
editor = {Schwichtenberg, Helmut},
year = {1995},
note = {ZSCC: NoCitationData[s0]},
keywords = {Interactive semantics, Linear logic},
pages = {145--191}
}
@book{engeler_combinatory_1995,
series = {Progress in {Theoretical} {Computer} {Science}},
title = {The {Combinatory} {Programme}},
isbn = {978-0-8176-3801-6},
url = {https://www.springer.com/gb/book/9780817638016},
abstract = {Combinatory logic started as a programme in the foundation of mathematics and in an historical context at a time when such endeavours attracted the most gifted among the mathematicians. This small volume arose under quite differ ent circumstances, namely within the context of reworking the mathematical foundations of computer science. I have been very lucky in finding gifted students who agreed to work with me and chose, for their Ph. D. theses, subjects that arose from my own attempts 1 to create a coherent mathematical view of these foundations. The result of this collaborative work is presented here in the hope that it does justice to the individual contributor and that the reader has a chance of judging the work as a whole. E. Engeler ETH Zurich, April 1994 lCollected in Chapter III, An Algebraization of Algorithmics, in Algorithmic Properties of Structures, Selected Papers of Erwin Engeler, World Scientific PubJ. Co. , Singapore, 1993, pp. 183-257. I Historical and Philosophical Background Erwin Engeler In the fall of 1928 a young American turned up at the Mathematical Institute of Gottingen, a mecca of mathematicians at the time; he was a young man with a dream and his name was H. B. Curry. He felt that he had the tools in hand with which to solve the problem of foundations of mathematics mice and for all. His was an approach that came to be called "formalist" and embodied that later became known as Combinatory Logic.},
language = {en},
urldate = {2019-11-26},
publisher = {Birkhäuser Basel},
author = {Engeler, Erwin},
year = {1995},
doi = {10.1007/978-1-4612-4268-0},
note = {ZSCC: NoCitationData[s1] },
keywords = {Algebra, Programming language theory, Purely theoretical}
}
@incollection{lawvere_tools_1994,
title = {Tools for the {Advancement} of {Objective} {Logic}: {Closed} {Categories} and {Toposes}},
shorttitle = {Tools for the {Advancement} of {Objective} {Logic}},
booktitle = {The {Logical} {Foundations} of {Cognition}},
publisher = {Oxford University Press USA},
author = {Lawvere, F. William},
editor = {Macnamara, John and Reyes, Gonzalo E.},
year = {1994},
note = {ZSCC: NoCitationData[s0]},
keywords = {Compendium, Emergence, Psychology, Sketchy},
pages = {43--56}
}
@article{jones_probabilistic_1989,
title = {Probabilistic {Non}-determinism},
language = {en},
author = {Jones, Claire},
year = {1989},
note = {ZSCC: 0000000},
keywords = {Denotational semantics, Probabilistic programming, Programming language theory},
pages = {198}
}
@inproceedings{jones_probabilistic_1989,
address = {Piscataway, NJ, USA},
title = {A {Probabilistic} {Powerdomain} of {Evaluations}},
isbn = {978-0-8186-1954-0},
url = {http://dl.acm.org/citation.cfm?id=77350.77370},
urldate = {2019-11-26},
booktitle = {Proceedings of the {Fourth} {Annual} {Symposium} on {Logic} in {Computer} {Science}},
publisher = {IEEE Press},
author = {Jones, C. and Plotkin, G.},
year = {1989},
note = {ZSCC: 0000389
event-place: Pacific Grove, California, USA},
keywords = {Denotational semantics, Powerdomains, Probabilistic programming, Programming language theory},
pages = {186--195}
}
@article{girard_linear_1987,
title = {Linear logic},
volume = {50},
issn = {0304-3975},
url = {http://www.sciencedirect.com/science/article/pii/0304397587900454},
doi = {10/cmv5mj},
abstract = {The familiar connective of negation is broken into two operations: linear negation which is the purely negative part of negation and the modality “of course” which has the meaning of a reaffirmation. Following this basic discovery, a completely new approach to the whole area between constructive logics and programmation is initiated.},
language = {en},
number = {1},
urldate = {2019-11-26},
journal = {Theoretical Computer Science},
author = {Girard, Jean-Yves},
month = jan,
year = {1987},
note = {ZSCC: 0005505},
keywords = {Denotational semantics, Linear logic, Type theory},
pages = {1--101}
}
@inproceedings{giry_categorical_1982,
address = {Berlin, Heidelberg},
series = {Lecture {Notes} in {Mathematics}},
title = {A categorical approach to probability theory},
isbn = {978-3-540-39041-1},
doi = {10/dtx5t5},
language = {en},
booktitle = {Categorical {Aspects} of {Topology} and {Analysis}},
publisher = {Springer},
author = {Giry, Michèle},
editor = {Banaschewski, B.},
year = {1982},
note = {ZSCC: NoCitationData[s1]},
keywords = {Categorical probability theory},
pages = {68--85}
}
@article{plotkin_lcf_1977,
title = {{LCF} considered as a programming language},
volume = {5},
issn = {0304-3975},
url = {http://www.sciencedirect.com/science/article/pii/0304397577900445},
doi = {10/dc7fdn},
abstract = {The paper studies connections between denotational and operational semantics for a simple programming language based on LCF. It begins with the connection between the behaviour of a program and its denotation. It turns out that a program denotes ⊥ in any of several possible semantics if it does not terminate. From this it follows that if two terms have the same denotation in one of these semantics, they have the same behaviour in all contexts. The converse fails for all the semantics. If, however, the language is extended to allow certain parallel facilities behavioural equivalence does coincide with denotational equivalence in one of the semantics considered, which may therefore be called “fully abstract”. Next a connection is given which actually determines the semantics up to isomorphism from the behaviour alone. Conversely, by allowing further parallel facilities, every r.e. element of the fully abstract semantics becomes definable, thus characterising the programming language, up to interdefinability, from the set of r.e. elements of the domains of the semantics.},
language = {en},
number = {3},
urldate = {2019-11-26},
journal = {Theoretical Computer Science},
author = {Plotkin, G. D.},
month = dec,
year = {1977},
note = {ZSCC: 0001407},
keywords = {Probabilistic programming, Programming language theory},
pages = {223--255}
}
@article{rosen_representation_1958,
title = {The representation of biological systems from the standpoint of the theory of categories},
volume = {20},
issn = {0007-4985, 1522-9602},
url = {http://link.springer.com/10.1007/BF02477890},
doi = {10/fdgzxz},
language = {en},
number = {4},
urldate = {2019-11-22},
journal = {The Bulletin of Mathematical Biophysics},
author = {Rosen, Robert},
month = dec,
year = {1958},
note = {ZSCC: 0000262},
keywords = {Biology, Sketchy},
pages = {317--341}
}
@misc{murfet_linear_nodate,
title = {Linear logic and deep learning},
language = {en},
author = {Murfet, Daniel and Hu, Huiyi},
note = {ZSCC: NoCitationData[s0]},
keywords = {Categorical ML, Linear logic, Machine learning, Semantics}
}
@misc{murfet_mathematics_nodate,
title = {Mathematics of {AlphaGo}},
author = {Murfet, Daniel},
note = {ZSCC: NoCitationData[s0]},
keywords = {Classical ML, Machine learning}
}
@misc{murfet_algebra_nodate,
title = {Algebra and {Artiﬁcial} {Intelligence}},
language = {en},
author = {Murfet, Daniel},
note = {ZSCC: NoCitationData[s0]},
keywords = {Algebra, Classical ML, Machine learning, Sketchy}
}
@article{gromov_structures_nodate,
title = {Structures, {Learning} and {Ergosystems}: {Chapters}},
abstract = {We introduce a concept of an ergosystem which functions by building its ”internal structure“ out of the ”raw structures“ in the incoming ﬂows of signals.},
language = {en},
author = {Gromov, Misha},
note = {ZSCC: 0000010},
keywords = {Biology, Compendium, Emergence, Neuroscience, Sketchy},
pages = {159}
}
@article{boutillier_kappa_nodate,
title = {The {Kappa} {Language} and {Kappa} {Tools}},
language = {en},
author = {Boutillier, Pierre and Feret, Jérôme and Krivine, Jean and Fontana, Walter},
note = {ZSCC: NoCitationData[s0]},
keywords = {Biology, Implementation, Systems biology},
pages = {52}
}