http://offtopia.net/infergo-talk/
press Space to move through slides
1 var breastCancer = flip(0.01)
2 var benignCyst = flip(0.2)
3 var positiveMammogram = (breastCancer && flip(0.8))
4 || (benignCyst && flip(0.5))
5 condition(positiveMammogram)
6 return breastCancer
In the example: breast cancer probability given
positive mammogram.
Two polar views:
|
|
I mean: |
|
---|---|
I write: |
|
PyStan (70Mb) | Stan, C++ compiler, Cython, OCaml? |
---|---|
Pyro (>600Mb) |
Successfully installed contextlib2-0.5.5 decorator-4.3.0 graphviz-0.10.1 networkx-2.2 opt-einsum-2.3.2 pyro-ppl-0.3.0 six-1.12.0 torch-1.0.0 tqdm-4.29.1 |
Turing.jl (50Mb): |
Installed NNlib ─────────── v0.4.3 Installed Colors ────────── v0.9.5 Installed Arpack ────────── v0.3.0 Installed BinaryProvider ── v0.5.3 Installed ForwardDiff ───── v0.10.1 Installed ColorTypes ────── v0.7.5 Installed ProgressMeter ─── v0.9.0 Installed ZipFile ───────── v0.8.0 Installed StaticArrays ──── v0.10.2 Installed Juno ──────────── v0.5.3 ... (~40 packages) |
Go is
1 type Model struct {
2 Data []float64
3 }
4
5 // x[0] is the mean, x[1] is the
6 // log stddev of the distribution
7 func (m *Model) Observe(x []float64) float64 {
8 // Our prior is a unit normal ...
9 ll := Normal.Logps(0, 1, x...)
10 // ... but the posterior is based on data observations.
11 ll += Normal.Logps(x[0], math.Exp(x[1]), m.Data...)
12 return ll
13 }
1 m := &Model{[]float64{
2 -0.854, 1.067, -1.220, 0.818, -0.749,
3 0.805, 1.443, 1.069, 1.426, 0.308}}
1 x := []float64{0, 0}
2
3 opt := &infer.Momentum{
4 Rate: 0.01,
5 Decay: 0.998,
6 }
7 for iter := 0; iter != 1000; iter++ {
8 opt.Step(m, x)
9 }
10 mean, logs = x[0], x[1]
1 x := []float64{0, 0}
2
3 hmc := &infer.HMC{
4 Eps: 0.1,
5 }
6 samples := make(chan []float64)
7 hmc.Sample(m, x, samples)
8 for i := 0; i != 1000; i++ {
9 x = <-samples
10 }
11 hmc.Stop()
1 type Model struct {
2 Data chan float64 // data is a channel
3 N int // batch size
4 }
5
6 func (m *Model) Observe(x []float64) float64 {
7 ll := Normal.Logps(0, 1, x...)
8 // observe a batch of data from the channel
9 for i := 0; i != m.N; i++ {
10 ll += Normal.Logp(x[0], math.Exp(x[1]), <- m.Data)
11 }
12 return ll
13 }
1 type Model interface {
2 Observe(parameters []float64) float64
3 }
1 type Expon float64
2
3 func (m Expon) Observe(x []float64) float64 {
4 return -x*m
5 }
1 var Normal normal
2
3 // Observe implements the Model interface.
4 func (dist normal) Observe(x []float64) float64 {
5 mu, sigma, y := x[0], x[1], x[2:]
6 return dist.Logps(mu, sigma, y...)
7 }
8 // Logp computes the log pdf of a single observation.
9 func (_ normal) Logp(mu, sigma float64, y float64)
10 float64 {
11 ...
12 }
13
14 // Logps computes the log pdf of a vector of observations.
15 func (_ normal) Logps(mu, sigma float64, y ...float64)
16 float64 {
17 ...
18 }
float64
or nothing are differentiated.float64
;float64
;
1 func (m *Model) Observe(x []float64) float64 {
2 var ll float64
3 ad.Assignment(&ll, ad.Call(func(_ []float64) {
4 Normal.Logps(0, 0, x...)
5 }, 2, ad.Value(0), ad.Value(1)))
6 ad.Assignment(&ll,
7 ad.Arithmetic(ad.OpAdd, &ll,
8 ad.Call(func(_ []float64) {
9 Normal.Logps(0, 0, m.Data...)
10 }, 2, &x[0], ad.Elemental(math.Exp, &x[1]))))
11 return ad.Return(&ll)
12 }
1 type adTape struct {
2 records []record // recorded instructions
3 places []*float64 // variable places
4 values []float64 // stored values
5 elementals []elemental // gradients of elementals
6 cstack []counters // counter stack (see below)
7 }
1 func Arithmetic(op int, px ...*float64) *float64 {
2 tape := tapes.get()
3 // Register
4 p := Value(0)
5 r := record{
6 typ: typArithmetic,
7 op: op,
8 p: len(tape.places),
9 }
10 tape.places = append(tape.places, p)
11 tape.places = append(tape.places, px...)
12 tape.records = append(tape.records, r)
13 // Run
14 switch op {
15 case OpNeg:
16 *p = -*px[0]
17 // ...
18 }
19 return p
20 }
float64
.
func FuncGrad(m Model) (
Func func(x []float64) float64,
Grad func(grad []float64, x []float64))
Stan | Infergo |
---|---|
|
|
|
|
Infergo (0.5s|8.9s) | Stan (54s|3.7s) |
---|---|
|
|