diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 00000000..b211418d
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,18 @@
+name: ci
+on:
+ push:
+ branches:
+ - newdocs
+ - master
+permissions:
+ contents: write
+jobs:
+ deploy:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: 3.x
+ - run: pip install mkdocs-material
+ # - run: mkdocs gh-deploy --force
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index bbc7fd51..965f6b87 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
src/data
+docs/site
venv
_cache
docs/build
diff --git a/.netlify/state.json b/.netlify/state.json
deleted file mode 100644
index 42007608..00000000
--- a/.netlify/state.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "siteId": "12c0eb5b-a921-45cf-af85-b903b65b801c"
-}
\ No newline at end of file
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index b3cccaa1..1ae80ed4 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -51,8 +51,4 @@ A **new major GHC version** has been released. Here's what you need to do:
## Documentation
-The docs are built with Sphinx. Once installed, cd to the `docs` directory, then run `make html` to build locally. CI does this automatically, so to update the docs, just update the markdown (e.g. docs/source/usage.md), and push.
-
-## Website
-
-The website is also hosted in the repo (`/monad-bayes-site`), and is built with `hakyll`. Do `stack exec site build` to build. CI **does not** automatically build the site, so to update, you will need to run this command, and only then push to github.
\ No newline at end of file
+The docs are built with MkDocs. Serve locally with: `mkdocs serve`. Site is served online with Netlify.
\ No newline at end of file
diff --git a/README.md b/README.md
index e89b9b99..18788c12 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# [Monad-Bayes](https://monad-bayes-site.netlify.app/_site/about.html)
+# [Monad-Bayes](https://monad-bayes.netlify.app/)
A library for probabilistic programming in Haskell.
@@ -7,7 +7,7 @@ A library for probabilistic programming in Haskell.
[![Hackage Deps](https://img.shields.io/hackage-deps/v/monad-bayes.svg)](http://packdeps.haskellers.com/reverse/monad-bayes)
[![Build status](https://badge.buildkite.com/147af088063e8619fcf52ecf93fa7dd3353a2e8a252ef8e6ad.svg?branch=master)](https://buildkite.com/tweag-1/monad-bayes) -->
-[See the website](https://monad-bayes-site.netlify.app/_site/about.html) for an overview of the documentation, library, tutorials, examples (and a link to this very source code).
+[See the docs](https://monad-bayes.netlify.app/) for a user guide, notebook-style tutorials, an example gallery, and a detailed account of the implementation.
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index d0c3cbf1..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,20 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = source
-BUILDDIR = build
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/docs/docs/examples.md b/docs/docs/examples.md
new file mode 100644
index 00000000..5be12b02
--- /dev/null
+++ b/docs/docs/examples.md
@@ -0,0 +1,18 @@
+---
+title: Example Gallery
+---
+
+## [Histograms](/notebooks/Histogram.html)
+
+## [JSON (with `lens`)](/notebooks/Lenses.html)
+
+## [Diagrams](/notebooks/Diagrams.html)
+
+## [Probabilistic Parsing](/notebooks/Parsing.html)
+
+## [Streams (with `pipes`)](/notebooks/Streaming.html)
+
+## [Ising models](/notebooks/Ising.html)
+
+## [Physics](/notebooks/ClassicalPhysics.html)
+
diff --git a/monad-bayes-site/_site/images/code_example.png b/docs/docs/images/code_example.png
similarity index 100%
rename from monad-bayes-site/_site/images/code_example.png
rename to docs/docs/images/code_example.png
diff --git a/monad-bayes-site/_site/images/haskell-logo.png b/docs/docs/images/haskell-logo.png
similarity index 100%
rename from monad-bayes-site/_site/images/haskell-logo.png
rename to docs/docs/images/haskell-logo.png
diff --git a/monad-bayes-site/_site/images/plot.png b/docs/docs/images/plot.png
similarity index 100%
rename from monad-bayes-site/_site/images/plot.png
rename to docs/docs/images/plot.png
diff --git a/docs/source/_static/priorpred.png b/docs/docs/images/priorpred.png
similarity index 100%
rename from docs/source/_static/priorpred.png
rename to docs/docs/images/priorpred.png
diff --git a/monad-bayes-site/_site/images/randomwalk.png b/docs/docs/images/randomwalk.png
similarity index 100%
rename from monad-bayes-site/_site/images/randomwalk.png
rename to docs/docs/images/randomwalk.png
diff --git a/docs/source/_static/regress.png b/docs/docs/images/regress.png
similarity index 100%
rename from docs/source/_static/regress.png
rename to docs/docs/images/regress.png
diff --git a/docs/docs/index.md b/docs/docs/index.md
new file mode 100644
index 00000000..c02f090b
--- /dev/null
+++ b/docs/docs/index.md
@@ -0,0 +1,32 @@
+# Welcome to Monad-Bayes
+
+Monad-Bayes is a library for **probabilistic programming** written in **Haskell**.
+
+**Define distributions** [as programs](/notebooks/Introduction.html)
+
+**Perform inference** [with a variety of standard methods](tutorials.md) [defined compositionally](http://approximateinference.org/accepted/ScibiorGhahramani2016.pdf)
+
+**Integrate with Haskell code** [like this](examples.md) because Monad-Bayes is just a library, not a separate language
+
+## Example
+
+```haskell
+model :: Distribution Double
+model = do
+ x <- bernoulli 0.5
+ normal (if x then (-3) else 3) 1
+
+image :: Distribution Plot
+image = fmap (plot . histogram 200) (replicateM 100000 model)
+
+sampler image
+```
+
+The program `model` is a mixture of Gaussians. Its type `Distribution Double` represents a distribution over reals.
+`image` is a program too: as its type shows, it is a distribution over plots. In particular, plots that arise from forming a 200 bin histogram out of 100000 independent identically distributed (iid) draws from `model`.
+To sample from `image`, we simply write `sampler image`, with the result shown below:
+
+
+
\ No newline at end of file
diff --git a/docs/docs/javascripts/mathjax.js b/docs/docs/javascripts/mathjax.js
new file mode 100644
index 00000000..0f4b6e69
--- /dev/null
+++ b/docs/docs/javascripts/mathjax.js
@@ -0,0 +1,16 @@
+window.MathJax = {
+ tex: {
+ inlineMath: [["\\(", "\\)"]],
+ displayMath: [["\\[", "\\]"]],
+ processEscapes: true,
+ processEnvironments: true
+ },
+ options: {
+ ignoreHtmlClass: ".*|",
+ processHtmlClass: "arithmatex"
+ }
+ };
+
+ document$.subscribe(() => {
+ MathJax.typesetPromise()
+ })
\ No newline at end of file
diff --git a/monad-bayes-site/AdvancedSampling.html b/docs/docs/notebooks/AdvancedSampling.html
similarity index 100%
rename from monad-bayes-site/AdvancedSampling.html
rename to docs/docs/notebooks/AdvancedSampling.html
diff --git a/monad-bayes-site/Bayesian.html b/docs/docs/notebooks/Bayesian.html
similarity index 100%
rename from monad-bayes-site/Bayesian.html
rename to docs/docs/notebooks/Bayesian.html
diff --git a/monad-bayes-site/ClassicalPhysics.html b/docs/docs/notebooks/ClassicalPhysics.html
similarity index 100%
rename from monad-bayes-site/ClassicalPhysics.html
rename to docs/docs/notebooks/ClassicalPhysics.html
diff --git a/monad-bayes-site/Diagrams.html b/docs/docs/notebooks/Diagrams.html
similarity index 100%
rename from monad-bayes-site/Diagrams.html
rename to docs/docs/notebooks/Diagrams.html
diff --git a/monad-bayes-site/Functional_PPLs.html b/docs/docs/notebooks/Functional_PPLs.html
similarity index 100%
rename from monad-bayes-site/Functional_PPLs.html
rename to docs/docs/notebooks/Functional_PPLs.html
diff --git a/monad-bayes-site/Histogram.html b/docs/docs/notebooks/Histogram.html
similarity index 100%
rename from monad-bayes-site/Histogram.html
rename to docs/docs/notebooks/Histogram.html
diff --git a/monad-bayes-site/Introduction.html b/docs/docs/notebooks/Introduction.html
similarity index 99%
rename from monad-bayes-site/Introduction.html
rename to docs/docs/notebooks/Introduction.html
index c8d7b9e6..d6b0d71a 100644
--- a/monad-bayes-site/Introduction.html
+++ b/docs/docs/notebooks/Introduction.html
@@ -14614,7 +14614,7 @@
This serves as an interactive alternative to the user guide. This isn't intended as a tutorial to Haskell, but if you're familiar with probabilistic programming, the general flow of the code should look familiar.
This serves as an interactive alternative to the user guide. This isn't intended as a tutorial to Haskell, but if you're familiar with probabilistic programming, the general flow of the code should look familiar.
To get a sense of how probabilistic programming with monad-bayes works, consider the following:
diff --git a/monad-bayes-site/Ising.html b/docs/docs/notebooks/Ising.html
similarity index 100%
rename from monad-bayes-site/Ising.html
rename to docs/docs/notebooks/Ising.html
diff --git a/monad-bayes-site/Lazy.html b/docs/docs/notebooks/Lazy.html
similarity index 100%
rename from monad-bayes-site/Lazy.html
rename to docs/docs/notebooks/Lazy.html
diff --git a/monad-bayes-site/Lenses.html b/docs/docs/notebooks/Lenses.html
similarity index 100%
rename from monad-bayes-site/Lenses.html
rename to docs/docs/notebooks/Lenses.html
diff --git a/monad-bayes-site/MCMC.html b/docs/docs/notebooks/MCMC.html
similarity index 100%
rename from monad-bayes-site/MCMC.html
rename to docs/docs/notebooks/MCMC.html
diff --git a/monad-bayes-site/Parsing.html b/docs/docs/notebooks/Parsing.html
similarity index 100%
rename from monad-bayes-site/Parsing.html
rename to docs/docs/notebooks/Parsing.html
diff --git a/monad-bayes-site/RealTimeInference.html b/docs/docs/notebooks/RealTimeInference.html
similarity index 100%
rename from monad-bayes-site/RealTimeInference.html
rename to docs/docs/notebooks/RealTimeInference.html
diff --git a/monad-bayes-site/SMC.html b/docs/docs/notebooks/SMC.html
similarity index 100%
rename from monad-bayes-site/SMC.html
rename to docs/docs/notebooks/SMC.html
diff --git a/monad-bayes-site/Sampling.html b/docs/docs/notebooks/Sampling.html
similarity index 100%
rename from monad-bayes-site/Sampling.html
rename to docs/docs/notebooks/Sampling.html
diff --git a/monad-bayes-site/Streaming.html b/docs/docs/notebooks/Streaming.html
similarity index 100%
rename from monad-bayes-site/Streaming.html
rename to docs/docs/notebooks/Streaming.html
diff --git a/docs/source/probprog.md b/docs/docs/probprog.md
similarity index 89%
rename from docs/source/probprog.md
rename to docs/docs/probprog.md
index aae13016..76b4fc98 100644
--- a/docs/source/probprog.md
+++ b/docs/docs/probprog.md
@@ -1,19 +1,27 @@
-# Quickstart
+# User Guide
Probabilistic programming is all about being able to write probabilistic models as programs. For instance, here is a Bayesian linear regression model, which we would write equationally as:
-```{math}
+$$
\beta \sim \operatorname{normal}(0, 2)
+$$
+$$
\alpha \sim \operatorname{normal}(0, 2)
+$$
+$$
\sigma^2 \sim \operatorname{gamma}(4, 4)
+$$
+$$
\epsilon_{n} \sim \operatorname{normal}(0, \sigma)
+$$
+$$
y_{n}=\alpha+\beta x_{n}+\epsilon_{n}
-```
+$$
but in code as:
@@ -36,7 +44,7 @@ regression xsys = do
This is the *model*. To perform *inference* , suppose we have a data set `xsys` like:
-![](_static/priorpred.png)
+![](images/priorpred.png)
To run the model
@@ -51,7 +59,7 @@ mhRunsRegression = sampler
This yields 1000 samples from an MCMC walk using an MH kernel. `mh n` produces a distribution over chains of length `n`, along with the probability of that chain. Sampling a chain and plotting its final state gives:
-![](_static/regress.png)
+![](/images/regress.png)
Monad-bayes provides a variety of MCMC and SMC methods, and methods arising from the composition of the two.
@@ -65,9 +73,9 @@ Monad-bayes provides a variety of MCMC and SMC methods, and methods arising from
Other probabilistic programming languages with fairly similar APIs include WebPPL and Gen. This cognitive-science oriented introduction to [WebPPL](https://probmods.org/) is an excellent resource for learning about probabilistic programming. The [tutorials for Gen](https://www.gen.dev/tutorials/) are also very good, particularly for learning about traces.
-# Specifying distributions
+## Specifying distributions
-A distribution in monad-bayes over a set {math}`X`, is of type:
+A distribution in monad-bayes over a set $X$, is of type:
```haskell
MonadMeasure m => m X
@@ -81,7 +89,7 @@ Monad-bayes provides standard distributions, such as
random :: Distribution Double
```
-which is distributed uniformly over {math}`[0,1]`.
+which is distributed uniformly over $[0,1]$.
The full set is listed at https://hackage.haskell.org/package/monad-bayes-0.1.1.0/docs/Control-Monad-Bayes-Class.html
@@ -100,9 +108,9 @@ monad-bayes also lets us construct new distributions out of these. `MonadMeasure
fmap (> 0.5) random :: MonadMeasure m => m Bool
```
-This is the uniform distribution over {math}`(0.5, 1]`.
+This is the uniform distribution over $(0.5, 1]$.
-As an important special case, if `x :: MonadMeasure m => m (a,b)` is a joint distribution over two variables, then `fmap fst a :: MonadMeasure m => m a` **marginalizes** out the second variable. That is to say, `fmap fst a` is the distribution {math}`p(a)`, where {math}`p(a) = \int_b p(a,b)`.
+As an important special case, if `x :: MonadMeasure m => m (a,b)` is a joint distribution over two variables, then `fmap fst a :: MonadMeasure m => m a` **marginalizes** out the second variable. That is to say, `fmap fst a` is the distribution $p(a)$, where $p(a) = \int_b p(a,b)$.
The above example use only the functor instance for `m`, but we also have the monad instance, as used in:
@@ -113,13 +121,12 @@ example = bernoulli 0.5 >>= (\x -> if x then random else normal 0 1)
It's easiest to understand this distribution as a probabilistic program: it's the distribution you get by first sampling from `bernoulli 0.5`, then checking the result. If the result is `True`, then sample from `random`, else from `normal 0 1`. As a distribution, this has a PDF:
-```{math}
- f(x) = 1[0\leq x \leq 1]*0.5 + \mathcal{N}(0,1)(x)*0.5
- ```
+$$
+f(x) = 1[0\leq x \leq 1]*0.5 + \mathcal{N}(0,1)(x)*0.5
+$$
-
Equivalently, we could write this in do-notation as:
@@ -138,6 +145,7 @@ That said, it is often useful to think of probabilistic programs as specifying d
monad-bayes provides a function `score :: MonadMeasure m => Log Double -> m ()`. (**Note**: `Log Double` is a wrapper for `Double` which stores doubles as their logarithm, and does multiplication by addition of logarithms.)
+
```haskell
example :: MonadMeasure m => m Double
example = do
@@ -167,42 +175,7 @@ example = do
This describes a Poisson distribution in which all even values of the random variable are marginalized out.
-
-
-
-
-
-
-
-
-
-
-
## Inference methods
@@ -267,7 +240,7 @@ which gives
[([1,2,3,4],0.5),([2,3,4,5],0.5)]
```
-### Near exact inference for continuous distributions
+## Near exact inference for continuous distributions
Monad-Bayes does not currently support exact inference (via symbolic solving) for continuous distributions. However, it *does* support numerical integration. For example, for the distribution defined by
@@ -278,7 +251,7 @@ model = do
normal 0 (sqrt var)
```
-you may run `probability (0, 1000) model` to obtain the probability in the range `(0,1000)`. As expected, this should be roughly {math}`0.5`, since the PDF of `model` is symmetric around {math}`0`.
+you may run `probability (0, 1000) model` to obtain the probability in the range `(0,1000)`. As expected, this should be roughly $0.5$, since the PDF of `model` is symmetric around $0$.
You can also try `expectation model`, `variance model`, `momentGeneratingFunction model n` or `cdf model n`.
@@ -305,7 +278,7 @@ example = do
if x then normal 0 1 else normal 1 2
```
-`sampler example` will produce a sample from a Bernoulli distribution with {math}`p=0.5`, and if it is {math}`True`, return a sample from a standard normal, else from a normal with mean 1 and std 2.
+`sampler example` will produce a sample from a Bernoulli distribution with $p=0.5$, and if it is $True$, return a sample from a standard normal, else from a normal with mean 1 and std 2.
`(replicateM n . sampler) example` will produce a list of `n` independent samples. However, it is recommended to instead do `(sampler . replicateM n) example`, which will create a new model (`replicateM n example`) consisting of `n` independent draws from `example`.
@@ -395,7 +368,7 @@ run = (sampler . mcmc (MCMCConfig {
proposal = SingleSiteMH})) example
```
-produces {math}`5` unbiased samples from the posterior, by using single-site trace MCMC with the Metropolis-Hastings (MH) method. This means that the random walk is over execution traces of the probabilistic program, and the proposal distribution modifies a single random variable as a time, and then uses MH for the accept-reject criterion. For example, from the above you'd get:
+produces $5$ unbiased samples from the posterior, by using single-site trace MCMC with the Metropolis-Hastings (MH) method. This means that the random walk is over execution traces of the probabilistic program, and the proposal distribution modifies a single random variable as a time, and then uses MH for the accept-reject criterion. For example, from the above you'd get:
```
[True,True,True,True,True]
@@ -403,7 +376,7 @@ produces {math}`5` unbiased samples from the posterior, by using single-site tra
The final element of the chain is the head of the list, so you can drop samples from the end of the list for burn-in.
-### Piped MCMC
+## Streaming MCMC
You can also run `MCMC` using `mcmcP`. This creates an infinite chain, expressed as a stream or using the corresponding type from the `pipes` library, a `Producer`. This is a very natural representation of a random walk in Haskell.
@@ -623,7 +596,7 @@ mixture1 point = do
return cluster
```
-is a piece of code to infer whether an observed point was generated from a Gaussian of mean {math}`1` or {math}`5`. That is, `mixture1` is a conditional Bernoulli distribution over the mean given an observation. You're not going to be able to do much with `mixture1` though. Exact inference is impossible because of the sample from the normal, and as for sampling, there is zero probability of sampling the normal to exactly match the observed point, which is what the `condition` requires.
+is a piece of code to infer whether an observed point was generated from a Gaussian of mean $1$ or $5$. That is, `mixture1` is a conditional Bernoulli distribution over the mean given an observation. You're not going to be able to do much with `mixture1` though. Exact inference is impossible because of the sample from the normal, and as for sampling, there is zero probability of sampling the normal to exactly match the observed point, which is what the `condition` requires.
However, the same conditional distribution is represented by
diff --git a/docs/docs/tutorials.md b/docs/docs/tutorials.md
new file mode 100644
index 00000000..98ea7942
--- /dev/null
+++ b/docs/docs/tutorials.md
@@ -0,0 +1,21 @@
+---
+title: Tutorials
+---
+
+## [Introduction to Monad-Bayes](/notebooks/Introduction.html)
+
+## [Sampling from a distribution](/notebooks/Sampling.html)
+
+## [Bayesian models](/notebooks/Bayesian.html)
+
+## [Markov Chain Monte Carlo](/notebooks/MCMC.html)
+
+## [Sequential Monte Carlo](/notebooks/SMC.html)
+
+## [Lazy Sampling](/notebooks/Lazy.html)
+
+## [Advanced Inference Methods](/notebooks/AdvancedSampling.html)
+
+
\ No newline at end of file
diff --git a/docs/source/usage.md b/docs/docs/usage.md
similarity index 93%
rename from docs/source/usage.md
rename to docs/docs/usage.md
index 557529db..52bc96dd 100644
--- a/docs/source/usage.md
+++ b/docs/docs/usage.md
@@ -1,4 +1,4 @@
-# The implementation of Monad-Bayes
+# Implementation guide
This document assumes the reader is familiar with the basics of Bayesian probability theory, basic Haskell (the syntax, the type system, do-notation, monad transformers), and how to specify distributions in monad-bayes (see the [docs](probprog.md))
@@ -6,7 +6,7 @@ That's enough to understand the core ideas, but for the more advanced content, y
## References
-monad-bayes is the codebase accompanying the theory of probabilistic programming described in [this paper](https://arxiv.org/pdf/1711.03219.pdf).
+Monad-Bayes is the codebase accompanying the theory of probabilistic programming described in [this paper](https://arxiv.org/pdf/1711.03219.pdf).
## The core typeclasses
@@ -27,7 +27,7 @@ class Monad m => MonadDistribution m where
random :: m Double
```
-This one method, `random`, represents a uniform distribution over {math}`[0,1]`. (`MonadDistribution` actually has a few other distributions, but that's not essential.)
+This one method, `random`, represents a uniform distribution over $[0,1]$. (`MonadDistribution` actually has a few other distributions, but that's not essential.)
What comes next is clever: you can define any other distribution you like in terms of `random`. As an example:
@@ -44,7 +44,7 @@ normal m s = fmap (quantile (normalDistr m s)) random
`normalDistr` comes from a separate library `Statistics.Distribution.Normal` and `quantile (normalDistr m s) :: Double -> Double` is the inverse CDF of the normal, a deterministic function.
-Again, to emphasize: **all of our randomness can be reduced to draws from a uniform distribution over the interval {math}`[0,1]`**.
+Again, to emphasize: **all of our randomness can be reduced to draws from a uniform distribution over the interval $[0,1]$**.
So we now have a way of constructing distributions in a monadic fashion. As a simple example:
@@ -56,7 +56,7 @@ example = do
return (x + y > 1.5)
```
-Think of this as the procedure of first sampling uniformly from {math}`[0,1]`, then from {math}`[0,x]`, and then returning the Boolean {math}`x + y > 1.5`. More precisely, this is the **marginal** probability of {math}`x + y > 1.5`.
+Think of this as the procedure of first sampling uniformly from $[0,1]$, then from $[0,x]$, and then returning the Boolean $x + y > 1.5$. More precisely, this is the **marginal** probability of $x + y > 1.5$.
**Technical note**: `MonadDistribution` actually contains a number of other distributions beyond `random`, which by default are defined in terms of `random`, but allow for different definitions when desired. For example, `Sampler` (an instance of `MonadDistribution` in Control.Monad.Sampler) defines `normal` and other distributions independently of `random`.
@@ -603,11 +603,11 @@ newtype Integrator a = Integrator {getCont :: Cont Double a}
This `MonadDistribution` instance interprets a probabilistic program as a numerical integrator. For a nice explanation, see [this blog post](https://jtobin.io/giry-monad-implementation).
`Integrator a` is isomorphic to `(a -> Double) -> Double`.
-A program `model` of type `Integrator a` will take a function `f` and calculate {math}`E_{p}[f] = \int f(x)*p(x)` where {math}`p` is the density of `model`.
+A program `model` of type `Integrator a` will take a function `f` and calculate $E_{p}[f] = \int f(x)*p(x)$ where $p$ is the density of `model`.
-The integral for the expectation is performed by quadrature, using the tanh-sinh approach. For example, `random :: Integrator Double` is the program which takes a function `f` and integrates `f` over the {math}`(0,1)` range.
+The integral for the expectation is performed by quadrature, using the tanh-sinh approach. For example, `random :: Integrator Double` is the program which takes a function `f` and integrates `f` over the $(0,1)$ range.
-We can calculate the probability for an interval {math}`(a,b)` of any model of type `Integrator Double` by setting `f` to be the function that returns {math}`1` for that range, else {math}`0`. Similarly for the CDF, MGF and so on.
+We can calculate the probability for an interval $(a,b)4 of any model of type `Integrator Double` by setting `f` to be the function that returns $1$ for that range, else $0$. Similarly for the CDF, MGF and so on.
## Inference methods under the hood
@@ -624,7 +624,7 @@ example = replicateM 100 $ do
return x
```
-Doing `enumerator example` will create a list of {math}`2^{100}` entries, all but one of which have {math}`0` mass. (See below for a way to perform this inference efficiently).
+Doing `enumerator example` will create a list of $2^{100}$ entries, all but one of which have $0$ mass. (See below for a way to perform this inference efficiently).
The main purpose of `Enumerator` is didactic, as a way to understand simple discrete distributions in full. In addition, you can use it in concert with transformers like `Weighted`, to get a sense of how they work. For example, consider:
@@ -654,23 +654,6 @@ model = do
is really an unnormalized measure, rather than a probability distribution. `normalize` views it as of type `Weighted Integrator Double`, which is isomorphic to `(Double -> (Double, Log Double) -> Double)`. This can be used to compute the normalization constant, and divide the integrator's output by it, all within `Integrator`.
-### Quadrature
-
-As described on the section on `Integrator`, we can interpret our probabilistic program of type `MonadDistribution m => m a` as having concrete type `Integrator a`. This views our program as an integrator, allowing us to calculate expectations, probabilities and so on via quadrature (i.e. numerical approximation of an integral).
-
-This can also handle programs of type `MonadMeasure m => m a`, that is, programs with `factor` statements. For these cases, a function `normalize :: Weighted Integrator a -> Integrator a` is employed. For example,
-
-```haskell
-model :: MonadMeasure m => m Double
-model = do
- var <- gamma 1 1
- n <- normal 0 (sqrt var)
- condition (n > 0)
- return var
-```
-
-is really an unnormalized measure, rather than a probability distribution. `normalize` views it as of type `Weighted Integrator Double`, which is isomorphic to `((Double -> (Double, Log Double)) -> (Double, Log Double))`. This can be used to compute the normalization constant, and divide the integrator's output by it, all within `Integrator`.
-
### Independent forward sampling
For any program of type `p = MonadDistribution m => m a`, we may do `sampler p` or `runST $ sampleSTfixed p`. Note that if there are any calls to `factor` in the program, then it cannot have type `MonadDistribution m`.
@@ -753,9 +736,9 @@ example = replicateM 100 $ do
return x
```
-Naive enumeration, as in `enumerator example` is enormously and needlessly inefficient, because it will create a {math}`2^{100}` size list of possible values. What we'd like to do is to throw away values of `x` that are `False` at each condition statement, rather than carrying them along forever.
+Naive enumeration, as in `enumerator example` is enormously and needlessly inefficient, because it will create a $2^{100}$ size list of possible values. What we'd like to do is to throw away values of `x` that are `False` at each condition statement, rather than carrying them along forever.
-Suppose we have a function `removeZeros :: Enumerator a -> Enumerator a`, which removes values of the distribution with {math}`0` mass from `Enumerator`. We can then write `enumerator $ sequentially removeZeros 100 $ model` to run `removeZeros` at each of the 100 `condition` statements, making the algorithm run quickly.
+Suppose we have a function `removeZeros :: Enumerator a -> Enumerator a`, which removes values of the distribution with $0$ mass from `Enumerator`. We can then write `enumerator $ sequentially removeZeros 100 $ model` to run `removeZeros` at each of the 100 `condition` statements, making the algorithm run quickly.
### Sequential Monte Carlo
@@ -860,7 +843,7 @@ This means that an `S (T (P m)) a` is a program "interpreted as a population of
So the algorithm works by creating `n` particles, and at each of the first `k` calls to `factor`, first resampling the population and then for each particle in the population, doing an MH-MCMC walk for `t` steps to update it.
-### Sequential Monte Carlo squared ({math}`SMC^2`)
+### Sequential Monte Carlo Squared
This combines RMSMC and PMMH. That is, it is RMSMC, but for the MCMC rejuvenation procedure, PMMH is used instead of MH.
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index 6fcf05b4..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,35 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=source
-set BUILDDIR=build
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.https://www.sphinx-doc.org/
- exit /b 1
-)
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:end
-popd
diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml
new file mode 100644
index 00000000..acacf977
--- /dev/null
+++ b/docs/mkdocs.yml
@@ -0,0 +1,62 @@
+site_name: Probabilistic Programming in Haskell
+theme:
+ name: material
+ features:
+ - content.code.annotate
+ - content.tooltips
+ - navigation.sections
+ - navigation.top
+ - navigation.tracking
+ - search.highlight
+ - search.share
+ - search.suggest
+ - toc.follow
+ palette:
+ - scheme: default
+ primary: indigo
+ accent: indigo
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - scheme: slate
+ primary: indigo
+ accent: indigo
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/tweag/monad-bayes
+
+markdown_extensions:
+ - pymdownx.arithmatex:
+ generic: true
+ - admonition
+ - attr_list
+ - pymdownx.details
+ - pymdownx.emoji:
+ emoji_index: !!python/name:materialx.emoji.twemoji
+ emoji_generator: !!python/name:materialx.emoji.to_svg
+ - pymdownx.highlight:
+ anchor_linenums: true
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ alternate_style: true
+ - toc:
+ permalink: true
+extra_javascript:
+ - javascripts/mathjax.js
+ - https://polyfill.io/v3/polyfill.min.js?features=es6
+ - https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js
+
+
+nav:
+ - 'index.md'
+ - 'probprog.md'
+ - 'tutorials.md'
+ - 'examples.md'
+ - 'usage.md'
\ No newline at end of file
diff --git a/docs/netlify.toml b/docs/netlify.toml
new file mode 100644
index 00000000..b94d3e20
--- /dev/null
+++ b/docs/netlify.toml
@@ -0,0 +1,3 @@
+[build]
+ command = "mkdocs build"
+ publish = "site"
\ No newline at end of file
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 04a1a78e..51b61b75 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,32 +1,3 @@
-alabaster==0.7.12
-Babel==2.10.3
-certifi==2022.6.15
-charset-normalizer==2.0.12
-docutils==0.18.1
-idna==3.3
-imagesize==1.3.0
-importlib-metadata==4.11.4
-Jinja2==3.1.2
-markdown-it-py==2.1.0
-MarkupSafe==2.1.1
-mdit-py-plugins==0.3.0
-mdurl==0.1.1
-myst-parser==0.18.0
-packaging==21.3
-Pygments==2.12.0
-pyparsing==3.0.9
-pytz==2022.1
-PyYAML==6.0
-requests==2.28.0
-snowballstemmer==2.2.0
-Sphinx==5.0.2
-sphinx-theme==1.0
-sphinxcontrib-applehelp==1.0.2
-sphinxcontrib-devhelp==1.0.2
-sphinxcontrib-htmlhelp==2.0.0
-sphinxcontrib-jsmath==1.0.1
-sphinxcontrib-qthelp==1.0.3
-sphinxcontrib-serializinghtml==1.1.5
-typing_extensions==4.2.0
-urllib3==1.26.9
-zipp==3.8.0
+mkdocs
+mkdocs-material
+pymdown-extensions
\ No newline at end of file
diff --git a/docs/runtime.txt b/docs/runtime.txt
new file mode 100644
index 00000000..98fccd6d
--- /dev/null
+++ b/docs/runtime.txt
@@ -0,0 +1 @@
+3.8
\ No newline at end of file
diff --git a/docs/source/conf.py b/docs/source/conf.py
deleted file mode 100644
index 1f6e3558..00000000
--- a/docs/source/conf.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Configuration file for the Sphinx documentation builder.
-#
-# This file only contains a selection of the most common options. For a full
-# list see the documentation:
-# https://www.sphinx-doc.org/en/master/usage/configuration.html
-
-import sphinx_theme
-
-
-# -- Path setup --------------------------------------------------------------
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#
-# import os
-# import sys
-# sys.path.insert(0, os.path.abspath('.'))
-
-
-# -- Project information -----------------------------------------------------
-
-project = 'monad-bayes'
-copyright = '2021, Adam Scibior'
-author = 'Adam Scibior, Reuben Cohn-Gordon'
-
-
-# -- General configuration ---------------------------------------------------
-
-# Add any Sphinx extension module names here, as strings. They can be
-# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
-# ones.
-extensions = ['myst_parser', 'sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
-
-# mathjax_path = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js'
-# mathjax_path="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-# This pattern also affects html_static_path and html_extra_path.
-exclude_patterns = []
-
-
-# -- Options for HTML output -------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-#
-html_theme = 'stanford_theme'
-# html_theme = "sphinx_rtd_theme"
-html_theme_path = [sphinx_theme.get_html_theme_path('stanford-theme')]
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
\ No newline at end of file
diff --git a/docs/source/index.rst b/docs/source/index.rst
deleted file mode 100644
index a9864a9f..00000000
--- a/docs/source/index.rst
+++ /dev/null
@@ -1,25 +0,0 @@
-.. monad-bayes documentation master file, created by
- sphinx-quickstart on Fri Dec 17 18:38:02 2021.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-Documentation for Monad-Bayes
-=======================================
-
-Monad-bayes is a library for doing probabilistic programming in Haskell.
-You can specify your model independent of the inference method as in other
-probabilistic programming languages.
-A unique feature is the modular construction of inference algorithms from simple parts. See this short paper for details: http://approximateinference.org/accepted/ScibiorGhahramani2016.pdf
-
-In addition, Monad-Bayes is a normal Haskell library, not a separate language implemented using Haskell,
-so full interoperation with arbitrary Haskell code is totally straightforward.
-
-.. The *user guide* shows how to write models in Monad-Bayes and perform inference.
-.. The *developer guide* shows how the library works under the hood.
-
-
-.. toctree::
- probprog
- usage
- :maxdepth: 2
- :caption: Contents:
\ No newline at end of file
diff --git a/monad-bayes-site/_cache/-14469358728363703 b/monad-bayes-site/_cache/-14469358728363703
deleted file mode 100644
index ef444f92..00000000
Binary files a/monad-bayes-site/_cache/-14469358728363703 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-1749146891218283582 b/monad-bayes-site/_cache/-1749146891218283582
deleted file mode 100644
index a3de3498..00000000
Binary files a/monad-bayes-site/_cache/-1749146891218283582 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-2095085398174420213 b/monad-bayes-site/_cache/-2095085398174420213
deleted file mode 100644
index 98b71a6d..00000000
Binary files a/monad-bayes-site/_cache/-2095085398174420213 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-2375272998066041756 b/monad-bayes-site/_cache/-2375272998066041756
deleted file mode 100644
index 00f186ed..00000000
Binary files a/monad-bayes-site/_cache/-2375272998066041756 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-418326561550140877 b/monad-bayes-site/_cache/-418326561550140877
deleted file mode 100644
index 3f937bb9..00000000
Binary files a/monad-bayes-site/_cache/-418326561550140877 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-467837984207213569 b/monad-bayes-site/_cache/-467837984207213569
deleted file mode 100644
index 05580600..00000000
Binary files a/monad-bayes-site/_cache/-467837984207213569 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-486650535700094795 b/monad-bayes-site/_cache/-486650535700094795
deleted file mode 100644
index f76dd238..00000000
Binary files a/monad-bayes-site/_cache/-486650535700094795 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-5172228707638515792 b/monad-bayes-site/_cache/-5172228707638515792
deleted file mode 100644
index 8f37f346..00000000
Binary files a/monad-bayes-site/_cache/-5172228707638515792 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-7115516761682178897 b/monad-bayes-site/_cache/-7115516761682178897
deleted file mode 100644
index af36317b..00000000
Binary files a/monad-bayes-site/_cache/-7115516761682178897 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-7705901527387215267 b/monad-bayes-site/_cache/-7705901527387215267
deleted file mode 100644
index bc8840b2..00000000
Binary files a/monad-bayes-site/_cache/-7705901527387215267 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-8213771197072494705 b/monad-bayes-site/_cache/-8213771197072494705
deleted file mode 100644
index bc8840b2..00000000
Binary files a/monad-bayes-site/_cache/-8213771197072494705 and /dev/null differ
diff --git a/monad-bayes-site/_cache/-9013735153427741298 b/monad-bayes-site/_cache/-9013735153427741298
deleted file mode 100644
index 8f0c6a61..00000000
Binary files a/monad-bayes-site/_cache/-9013735153427741298 and /dev/null differ
diff --git a/monad-bayes-site/_cache/1368425861625336465 b/monad-bayes-site/_cache/1368425861625336465
deleted file mode 100644
index 057620e3..00000000
Binary files a/monad-bayes-site/_cache/1368425861625336465 and /dev/null differ
diff --git a/monad-bayes-site/_cache/1574220054724235017 b/monad-bayes-site/_cache/1574220054724235017
deleted file mode 100644
index fe8b725d..00000000
Binary files a/monad-bayes-site/_cache/1574220054724235017 and /dev/null differ
diff --git a/monad-bayes-site/_cache/1699790891411764839 b/monad-bayes-site/_cache/1699790891411764839
deleted file mode 100644
index ce1a063f..00000000
Binary files a/monad-bayes-site/_cache/1699790891411764839 and /dev/null differ
diff --git a/monad-bayes-site/_cache/2373810728545156078 b/monad-bayes-site/_cache/2373810728545156078
deleted file mode 100644
index 2fdd705e..00000000
Binary files a/monad-bayes-site/_cache/2373810728545156078 and /dev/null differ
diff --git a/monad-bayes-site/_cache/2449001432756035941 b/monad-bayes-site/_cache/2449001432756035941
deleted file mode 100644
index 652344dc..00000000
Binary files a/monad-bayes-site/_cache/2449001432756035941 and /dev/null differ
diff --git a/monad-bayes-site/_cache/4058882883785258270 b/monad-bayes-site/_cache/4058882883785258270
deleted file mode 100644
index f76dd238..00000000
Binary files a/monad-bayes-site/_cache/4058882883785258270 and /dev/null differ
diff --git a/monad-bayes-site/_cache/414693611730391602 b/monad-bayes-site/_cache/414693611730391602
deleted file mode 100644
index f76dd238..00000000
Binary files a/monad-bayes-site/_cache/414693611730391602 and /dev/null differ
diff --git a/monad-bayes-site/_cache/4301022619885502842 b/monad-bayes-site/_cache/4301022619885502842
deleted file mode 100644
index aa71383a..00000000
Binary files a/monad-bayes-site/_cache/4301022619885502842 and /dev/null differ
diff --git a/monad-bayes-site/_cache/4760906592812334848 b/monad-bayes-site/_cache/4760906592812334848
deleted file mode 100644
index bc8840b2..00000000
Binary files a/monad-bayes-site/_cache/4760906592812334848 and /dev/null differ
diff --git a/monad-bayes-site/_cache/5479724175313719771 b/monad-bayes-site/_cache/5479724175313719771
deleted file mode 100644
index 97fd5318..00000000
Binary files a/monad-bayes-site/_cache/5479724175313719771 and /dev/null differ
diff --git a/monad-bayes-site/_cache/6071968468844630503 b/monad-bayes-site/_cache/6071968468844630503
deleted file mode 100644
index bc8840b2..00000000
Binary files a/monad-bayes-site/_cache/6071968468844630503 and /dev/null differ
diff --git a/monad-bayes-site/_cache/612309881219222752 b/monad-bayes-site/_cache/612309881219222752
deleted file mode 100644
index db7b5fa8..00000000
Binary files a/monad-bayes-site/_cache/612309881219222752 and /dev/null differ
diff --git a/monad-bayes-site/_cache/61508901266306704 b/monad-bayes-site/_cache/61508901266306704
deleted file mode 100644
index dc05d987..00000000
Binary files a/monad-bayes-site/_cache/61508901266306704 and /dev/null differ
diff --git a/monad-bayes-site/_cache/6529001706401220769 b/monad-bayes-site/_cache/6529001706401220769
deleted file mode 100644
index 26697240..00000000
Binary files a/monad-bayes-site/_cache/6529001706401220769 and /dev/null differ
diff --git a/monad-bayes-site/_cache/6532272550738173416 b/monad-bayes-site/_cache/6532272550738173416
deleted file mode 100644
index c8bced3c..00000000
Binary files a/monad-bayes-site/_cache/6532272550738173416 and /dev/null differ
diff --git a/monad-bayes-site/_cache/7620556062931942817 b/monad-bayes-site/_cache/7620556062931942817
deleted file mode 100644
index e02a62d2..00000000
Binary files a/monad-bayes-site/_cache/7620556062931942817 and /dev/null differ
diff --git a/monad-bayes-site/_cache/7706377979175549665 b/monad-bayes-site/_cache/7706377979175549665
deleted file mode 100644
index 8abb38fe..00000000
Binary files a/monad-bayes-site/_cache/7706377979175549665 and /dev/null differ
diff --git a/monad-bayes-site/_cache/7816614600408871852 b/monad-bayes-site/_cache/7816614600408871852
deleted file mode 100644
index f76dd238..00000000
Binary files a/monad-bayes-site/_cache/7816614600408871852 and /dev/null differ
diff --git a/monad-bayes-site/_cache/7955284607178663634 b/monad-bayes-site/_cache/7955284607178663634
deleted file mode 100644
index 14ad7459..00000000
Binary files a/monad-bayes-site/_cache/7955284607178663634 and /dev/null differ
diff --git a/monad-bayes-site/_cache/8422607591983755365 b/monad-bayes-site/_cache/8422607591983755365
deleted file mode 100644
index 421daaca..00000000
Binary files a/monad-bayes-site/_cache/8422607591983755365 and /dev/null differ
diff --git a/monad-bayes-site/_site/about.html b/monad-bayes-site/_site/about.html
deleted file mode 100644
index ec6e2c31..00000000
--- a/monad-bayes-site/_site/about.html
+++ /dev/null
@@ -1,78 +0,0 @@
-
-
-
-
-
-
- Monad-Bayes -
-
-
-
-
-
-
Integrate with Haskell codelike this because Monad-Bayes is just a library, not a separate language
-
-
Example
-
model ::DistributionDouble
-model =do
- x <- bernoulli 0.5
- normal (if x then (-3) else3) 1
-
-image ::DistributionPlot
-image =fmap (plot . histogram 200) (replicateM 100000 model)
-
-sampler image
-
The program model is a mixture of Gaussians. Its type Distribution Double represents a distribution over reals.
-image is a program too: as its type shows, it is a distribution over plots. In particular, plots that arise from forming a 200 bin histogram out of 100000 independent identically distributed (iid) draws from model.
-To sample from image, we simply write sampler image, with the result shown below: