Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 42 additions & 33 deletions petab/v2/C.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,14 +79,10 @@
NOMINAL_VALUE = "nominalValue"
#: Estimate column in the parameter table
ESTIMATE = "estimate"
#: Initialization prior type column in the parameter table
INITIALIZATION_PRIOR_TYPE = "initializationPriorType"
#: Initialization prior parameters column in the parameter table
INITIALIZATION_PRIOR_PARAMETERS = "initializationPriorParameters"
#: Objective prior type column in the parameter table
OBJECTIVE_PRIOR_TYPE = "objectivePriorType"
#: Objective prior parameters column in the parameter table
OBJECTIVE_PRIOR_PARAMETERS = "objectivePriorParameters"
#: Prior distribution type column in the parameter table
PRIOR_DISTRIBUTION = "priorDistribution"
#: Prior parameters column in the parameter table
PRIOR_PARAMETERS = "priorParameters"

#: Mandatory columns of parameter table
PARAMETER_DF_REQUIRED_COLS = [
Expand All @@ -101,10 +97,8 @@
PARAMETER_DF_OPTIONAL_COLS = [
PARAMETER_NAME,
NOMINAL_VALUE,
INITIALIZATION_PRIOR_TYPE,
INITIALIZATION_PRIOR_PARAMETERS,
OBJECTIVE_PRIOR_TYPE,
OBJECTIVE_PRIOR_PARAMETERS,
PRIOR_DISTRIBUTION,
PRIOR_PARAMETERS,
]

#: Parameter table columns
Expand Down Expand Up @@ -193,35 +187,50 @@

# NOISE MODELS

#: Uniform distribution
UNIFORM = "uniform"
#: Uniform distribution on the parameter scale
PARAMETER_SCALE_UNIFORM = "parameterScaleUniform"
#: Normal distribution
NORMAL = "normal"
#: Normal distribution on the parameter scale
PARAMETER_SCALE_NORMAL = "parameterScaleNormal"

#: Cauchy distribution.
CAUCHY = "cauchy"
#: Chi-squared distribution.
# FIXME: "chisquare" in PEtab and sbml-distrib, but usually "chi-squared"
CHI_SQUARED = "chisquare"
#: Exponential distribution.
EXPONENTIAL = "exponential"
#: Gamma distribution.
GAMMA = "gamma"
#: Laplace distribution
LAPLACE = "laplace"
#: Laplace distribution on the parameter scale
PARAMETER_SCALE_LAPLACE = "parameterScaleLaplace"
#: Log-normal distribution
LOG_NORMAL = "logNormal"
#: Log10-normal distribution.
LOG10_NORMAL = "log10-normal"
#: Log-Laplace distribution
LOG_LAPLACE = "logLaplace"
LOG_LAPLACE = "log-laplace"
#: Log-normal distribution
LOG_NORMAL = "log-normal"
#: Log-uniform distribution.
LOG_UNIFORM = "log-uniform"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not really in-scope for this PR but is log10-uniform missing? I guess this is how we would convert parameterScale=log10 into PEtab v2, or is that just no longer possible?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not included in PEtab-dev/PEtab#595. So, so far, it's not available in PEtab v2. Right, I guess we'd need it to upconvert v1 priors. Also log10-laplace. Best to be discussed at https://github.com/PEtab-dev/PEtab.

#: Normal distribution
NORMAL = "normal"
#: Rayleigh distribution.
RAYLEIGH = "rayleigh"
#: Uniform distribution
UNIFORM = "uniform"

#: Supported prior types
PRIOR_TYPES = [
UNIFORM,
NORMAL,
#: Supported prior distribution types
PRIOR_DISTRIBUTIONS = [
CAUCHY,
CHI_SQUARED,
EXPONENTIAL,
GAMMA,
LAPLACE,
LOG_NORMAL,
LOG10_NORMAL,
LOG_LAPLACE,
PARAMETER_SCALE_UNIFORM,
PARAMETER_SCALE_NORMAL,
PARAMETER_SCALE_LAPLACE,
LOG_NORMAL,
LOG_UNIFORM,
NORMAL,
RAYLEIGH,
UNIFORM,
]


#: Supported noise distributions
NOISE_MODELS = [NORMAL, LAPLACE]

Expand Down
73 changes: 42 additions & 31 deletions petab/v2/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,35 +124,41 @@ class NoiseDistribution(str, Enum):
LAPLACE = C.LAPLACE


class PriorType(str, Enum):
class PriorDistribution(str, Enum):
"""Prior types.

Prior types as used in the PEtab parameters table.
"""

#: Normal distribution.
NORMAL = C.NORMAL
#: Cauchy distribution.
CAUCHY = C.CAUCHY
#: Chi-squared distribution.
CHI_SQUARED = C.CHI_SQUARED
#: Exponential distribution.
EXPONENTIAL = C.EXPONENTIAL
#: Gamma distribution.
GAMMA = C.GAMMA
#: Laplace distribution.
LAPLACE = C.LAPLACE
#: Uniform distribution.
UNIFORM = C.UNIFORM
#: Log-normal distribution.
LOG_NORMAL = C.LOG_NORMAL
#: Log10-normal distribution.
LOG10_NORMAL = C.LOG10_NORMAL
#: Log-Laplace distribution
LOG_LAPLACE = C.LOG_LAPLACE
PARAMETER_SCALE_NORMAL = C.PARAMETER_SCALE_NORMAL
PARAMETER_SCALE_LAPLACE = C.PARAMETER_SCALE_LAPLACE
PARAMETER_SCALE_UNIFORM = C.PARAMETER_SCALE_UNIFORM

#: Log-normal distribution.
LOG_NORMAL = C.LOG_NORMAL
#: Log-uniform distribution.
LOG_UNIFORM = C.LOG_UNIFORM
#: Normal distribution.
NORMAL = C.NORMAL
#: Rayleigh distribution.
RAYLEIGH = C.RAYLEIGH
#: Uniform distribution.
UNIFORM = C.UNIFORM

#: Objective prior types as used in the PEtab parameters table.
ObjectivePriorType = PriorType
#: Initialization prior types as used in the PEtab parameters table.
InitializationPriorType = PriorType

assert set(C.PRIOR_TYPES) == {e.value for e in ObjectivePriorType}, (
"ObjectivePriorType enum does not match C.PRIOR_TYPES: "
f"{set(C.PRIOR_TYPES)} vs { {e.value for e in ObjectivePriorType} }"
assert set(C.PRIOR_DISTRIBUTIONS) == {e.value for e in PriorDistribution}, (
"PriorDistribution enum does not match C.PRIOR_DISTRIBUTIONS "
f"{set(C.PRIOR_DISTRIBUTIONS)} vs { {e.value for e in PriorDistribution} }"
)


Expand Down Expand Up @@ -849,18 +855,16 @@ class Parameter(BaseModel):
ub: float | None = Field(alias=C.UPPER_BOUND, default=None)
#: Nominal value.
nominal_value: float | None = Field(alias=C.NOMINAL_VALUE, default=None)
#: Parameter scale.
# TODO: keep or remove?
scale: ParameterScale = Field(
alias=C.PARAMETER_SCALE, default=ParameterScale.LIN
)
# TODO: change to bool in PEtab, or serialize as 0/1?
# https://github.com/PEtab-dev/PEtab/discussions/610
#: Is the parameter to be estimated?
estimate: bool = Field(alias=C.ESTIMATE, default=True)

# TODO priors
# pydantic vs. petab.v1.priors.Prior
#: Type of parameter prior distribution.
prior_distribution: PriorDistribution | None = Field(
alias=C.PRIOR_DISTRIBUTION, default=None
)
#: Prior distribution parameters.
prior_parameters: list[float] = Field(
alias=C.PRIOR_PARAMETERS, default_factory=list
)

#: :meta private:
model_config = ConfigDict(
Expand All @@ -879,13 +883,22 @@ def _validate_id(cls, v):
raise ValueError(f"Invalid ID: {v}")
return v

@field_validator("prior_parameters", mode="before")
@classmethod
def _validate_prior_parameters(cls, v):
if isinstance(v, str):
v = v.split(C.PARAMETER_SEPARATOR)
elif not isinstance(v, Sequence):
v = [v]

return [float(x) for x in v]

@field_validator("estimate", mode="before")
@classmethod
def _validate_estimate_before(cls, v):
if isinstance(v, bool):
return v

# TODO: clarify whether extra whitespace is allowed
if isinstance(v, str):
v = v.strip().lower()
if v == "true":
Expand Down Expand Up @@ -929,8 +942,6 @@ def _validate(self) -> Self:
):
raise ValueError("Lower bound must be less than upper bound.")

# TODO parameterScale?

# TODO priorType, priorParameters

return self
Expand Down
49 changes: 18 additions & 31 deletions petab/v2/problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -813,14 +813,13 @@ def n_measurements(self) -> int:
"""Number of measurements."""
return len(self.measurement_table.measurements)

# TODO: update after implementing priors in `Parameter`
@property
def n_priors(self) -> int:
"""Number of priors."""
if OBJECTIVE_PRIOR_PARAMETERS not in self.parameter_df:
return 0

return self.parameter_df[OBJECTIVE_PRIOR_PARAMETERS].notna().sum()
return sum(
p.prior_distribution is not None
for p in self.parameter_table.parameters
)

def validate(
self, validation_tasks: list[ValidationTask] = None
Expand Down Expand Up @@ -944,10 +943,8 @@ def add_parameter(
scale: str = None,
lb: Number = None,
ub: Number = None,
init_prior_type: str = None,
init_prior_pars: str | Sequence = None,
obj_prior_type: str = None,
obj_prior_pars: str | Sequence = None,
prior_dist: str = None,
prior_pars: str | Sequence = None,
**kwargs,
):
"""Add a parameter to the problem.
Expand All @@ -959,11 +956,8 @@ def add_parameter(
scale: The parameter scale
lb: The lower bound of the parameter
ub: The upper bound of the parameter
init_prior_type: The type of the initialization prior distribution
init_prior_pars: The parameters of the initialization prior
distribution
obj_prior_type: The type of the objective prior distribution
obj_prior_pars: The parameters of the objective prior distribution
prior_dist: The type of the prior distribution
prior_pars: The parameters of the prior distribution
kwargs: additional columns/values to add to the parameter table
"""
record = {
Expand All @@ -979,22 +973,14 @@ def add_parameter(
record[LOWER_BOUND] = lb
if ub is not None:
record[UPPER_BOUND] = ub
if init_prior_type is not None:
record[INITIALIZATION_PRIOR_TYPE] = init_prior_type
if init_prior_pars is not None:
if not isinstance(init_prior_pars, str):
init_prior_pars = PARAMETER_SEPARATOR.join(
map(str, init_prior_pars)
)
record[INITIALIZATION_PRIOR_PARAMETERS] = init_prior_pars
if obj_prior_type is not None:
record[OBJECTIVE_PRIOR_TYPE] = obj_prior_type
if obj_prior_pars is not None:
if not isinstance(obj_prior_pars, str):
obj_prior_pars = PARAMETER_SEPARATOR.join(
map(str, obj_prior_pars)
)
record[OBJECTIVE_PRIOR_PARAMETERS] = obj_prior_pars
if prior_dist is not None:
record[PRIOR_DISTRIBUTION] = prior_dist
if prior_pars is not None:
if isinstance(prior_pars, Sequence) and not isinstance(
prior_pars, str
):
prior_pars = PARAMETER_SEPARATOR.join(map(str, prior_pars))
record[PRIOR_PARAMETERS] = prior_pars
record.update(kwargs)

self.parameter_table += core.Parameter(**record)
Expand Down Expand Up @@ -1132,7 +1118,8 @@ def model_dump(self, **kwargs) -> dict[str, Any]:
'id': 'par',
'lb': 0.0,
'nominal_value': None,
'scale': <ParameterScale.LIN: 'lin'>,
'prior_distribution': None,
'prior_parameters': [],
'ub': 1.0}]}
"""
res = {
Expand Down