Skip to content

Commit

Permalink
Add LogExpM1 transformation (#2601)
Browse files Browse the repository at this point in the history
* Add softplus transformation

Softplus transformation (from non-negative to reals) might be more numerically stable (see Fig. 9 in Kucukelbir et al. 2017).

* add test

* Change name and implement a more numerically stable logexpm1

See https://github.com/tensorflow/tensorflow/blob/0b0d3c12ace80381f4a44365d30275a9a262609b/tensorflow/python/ops/distributions/util.py#L1009 for the derivation

* change default transformation for PositiveContinuous

* Revert "change default transformation for PositiveContinuous"

This reverts commit 8bc036c.

* name change
  • Loading branch information
Junpeng Lao authored and ColCarroll committed Oct 9, 2017
1 parent 0bb0ad1 commit 55da81d
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 4 deletions.
27 changes: 23 additions & 4 deletions pymc3/distributions/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from .distribution import draw_values
import numpy as np

__all__ = ['transform', 'stick_breaking', 'logodds', 'interval',
__all__ = ['transform', 'stick_breaking', 'logodds', 'interval', 'log_exp_m1',
'lowerbound', 'upperbound', 'log', 'sum_to_1', 't_stick_breaking']


Expand Down Expand Up @@ -105,12 +105,31 @@ def jacobian_det(self, x):
log = Log()


class LogExpM1(ElemwiseTransform):
name = "log_exp_m1"

def backward(self, x):
return tt.nnet.softplus(x)

def forward(self, x):
"""Inverse operation of softplus
y = Log(Exp(x) - 1)
= Log(1 - Exp(-x)) + x
"""
return tt.log(1.-tt.exp(-x)) + x

def forward_val(self, x, point=None):
return self.forward(x)

def jacobian_det(self, x):
return -tt.nnet.softplus(-x)

log_exp_m1 = LogExpM1()


class LogOdds(ElemwiseTransform):
name = "logodds"

def __init__(self):
pass

def backward(self, x):
return invlogit(x, 0.0)

Expand Down
10 changes: 10 additions & 0 deletions pymc3/tests/test_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,16 @@ def test_log():
close_to_logical(vals > 0, True, tol)


def test_log_exp_m1():
check_transform_identity(tr.log_exp_m1, Rplusbig)
check_jacobian_det(tr.log_exp_m1, Rplusbig, elemwise=True)
check_jacobian_det(tr.log_exp_m1, Vector(Rplusbig, 2),
tt.dvector, [0, 0], elemwise=True)

vals = get_values(tr.log_exp_m1)
close_to_logical(vals > 0, True, tol)


def test_logodds():
check_transform_identity(tr.logodds, Unit)
check_jacobian_det(tr.logodds, Unit, elemwise=True)
Expand Down

0 comments on commit 55da81d

Please sign in to comment.