Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions petab/v1/distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
"Normal",
"Rayleigh",
"Uniform",
"LogUniform",
]


Expand Down Expand Up @@ -382,6 +383,10 @@ class Uniform(Distribution):
If ``False``, no transformation is applied.
If a transformation is applied, the lower and upper bounds are the
lower and upper bounds of the underlying uniform distribution.
Note that this differs from the usual definition of a log-uniform
distribution, where the logarithm of the variable is uniformly
distributed between the logarithms of the bounds (see also
:class:`LogUniform`).
Comment on lines 383 to +389
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure how to write this most clearly... fine as is.

Suggested change
If a transformation is applied, the lower and upper bounds are the
lower and upper bounds of the underlying uniform distribution.
Note that this differs from the usual definition of a log-uniform
distribution, where the logarithm of the variable is uniformly
distributed between the logarithms of the bounds (see also
:class:`LogUniform`).
The bounds are implemented after any transformation, i.e. the
bounds are ``[low, high]`` on parameter scale. Note that this
differs from the usual definition of a log-uniform distribution
that implements bounds before any transformation, which results in
the bounds ``[log(low), log(high)]`` (see also :class:`LogUniform`).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks, but not sure it's clearer. I'll keep the old one for now.

"""

def __init__(
Expand Down Expand Up @@ -411,6 +416,45 @@ def _ppf_untransformed_untruncated(self, q) -> np.ndarray | float:
return uniform.ppf(q, loc=self._low, scale=self._high - self._low)


class LogUniform(Distribution):
"""A log-uniform or reciprocal distribution.

A random variable is log-uniformly distributed between ``low`` and ``high``
if its logarithm is uniformly distributed between ``log(low)`` and
``log(high)``.

:param low: The lower bound of the distribution.
:param high: The upper bound of the distribution.
:param trunc: The truncation limits of the distribution.
"""

def __init__(
self,
low: float,
high: float,
trunc: tuple[float, float] | None = None,
):
self._logbase = np.exp(1)
self._low = self._log(low)
self._high = self._log(high)
super().__init__(log=self._logbase, trunc=trunc)

def __repr__(self):
return self._repr({"low": self._low, "high": self._high})

def _sample(self, shape=None) -> np.ndarray | float:
return np.random.uniform(low=self._low, high=self._high, size=shape)

def _pdf_untransformed_untruncated(self, x) -> np.ndarray | float:
return uniform.pdf(x, loc=self._low, scale=self._high - self._low)

def _cdf_untransformed_untruncated(self, x) -> np.ndarray | float:
return uniform.cdf(x, loc=self._low, scale=self._high - self._low)

def _ppf_untransformed_untruncated(self, q) -> np.ndarray | float:
return uniform.ppf(q, loc=self._low, scale=self._high - self._low)
Comment on lines +445 to +455
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not necessarily for this PR, but could support (optional) provision of some rng: np.random.Generator to Distribution.__init__. for custom seed reproducibility.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, that should be added. Will do that when refactoring this whole distribution handling.



class Laplace(Distribution):
"""A (log-)Laplace distribution.

Expand Down
9 changes: 7 additions & 2 deletions petab/v2/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ class PriorDistribution(str, Enum):
PriorDistribution.LAPLACE: Laplace,
PriorDistribution.LOG_LAPLACE: Laplace,
PriorDistribution.LOG_NORMAL: Normal,
PriorDistribution.LOG_UNIFORM: Uniform,
PriorDistribution.LOG_UNIFORM: LogUniform,
PriorDistribution.NORMAL: Normal,
PriorDistribution.RAYLEIGH: Rayleigh,
PriorDistribution.UNIFORM: Uniform,
Expand Down Expand Up @@ -1060,7 +1060,12 @@ def prior_dist(self) -> Distribution:
# `Uniform.__init__` does not accept the `trunc` parameter
low = max(self.prior_parameters[0], self.lb)
high = min(self.prior_parameters[1], self.ub)
return cls(low, high, log=log)
return cls(low, high)

if cls == LogUniform:
# Mind the different interpretation of distribution parameters for
# Uniform(..., log=True) and LogUniform!!
return cls(*self.prior_parameters, trunc=[self.lb, self.ub])

return cls(*self.prior_parameters, log=log, trunc=[self.lb, self.ub])

Expand Down
18 changes: 18 additions & 0 deletions tests/v1/test_distributions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import sys
from math import exp

import numpy as np
import pytest
Expand Down Expand Up @@ -115,3 +116,20 @@ def cdf(x):
assert_allclose(
distribution.pdf(sample), reference_pdf, rtol=1e-10, atol=1e-14
)


def test_log_uniform():
"""Test Uniform(a, b, log=True) vs LogUniform(a, b)."""
# support between exp(1) and exp(2)
dist = Uniform(1, 2, log=True)
assert dist.pdf(exp(0)) == 0
assert dist.pdf(exp(1)) > 0
assert dist.pdf(exp(2)) > 0
assert dist.pdf(exp(3)) == 0

# support between 1 and 2
dist = LogUniform(1, 2)
assert dist.pdf(0) == 0
assert dist.pdf(1) > 0
assert dist.pdf(2) > 0
assert dist.pdf(3) == 0