From 9baf981340f85536cdbd9768e5125d0c6a7325d7 Mon Sep 17 00:00:00 2001 From: Daniel Weindl Date: Mon, 9 Dec 2024 14:31:27 +0100 Subject: [PATCH] Functions for adding conditions/observables/parameter to Problem (#328) Add functions for adding individual conditions/observables/parameter/measurements to Problem. This will simplify writing test cases and interactively assembling petab problems. `petab.v2.Problem.add_*` will be added / updated to the new format separately. Related to #220. --- petab/v1/mapping.py | 1 + petab/v1/problem.py | 181 +++++++++++++++++++++++++++- petab/v2/petab1to2.py | 21 +++- petab/v2/problem.py | 198 +++++++++++++++++++++++++++++++ tests/v1/test_petab.py | 105 ++++++++-------- tests/{v1 => v2}/test_mapping.py | 6 +- tests/v2/test_problem.py | 102 +++++++++++----- 7 files changed, 518 insertions(+), 96 deletions(-) rename tests/{v1 => v2}/test_mapping.py (83%) diff --git a/petab/v1/mapping.py b/petab/v1/mapping.py index 6eade50e..bae9d5fb 100644 --- a/petab/v1/mapping.py +++ b/petab/v1/mapping.py @@ -1,4 +1,5 @@ """Functionality related to the PEtab entity mapping table""" +# TODO: Move to petab.v2.mapping from pathlib import Path import pandas as pd diff --git a/petab/v1/problem.py b/petab/v1/problem.py index 6145656f..f4951ce6 100644 --- a/petab/v1/problem.py +++ b/petab/v1/problem.py @@ -3,8 +3,9 @@ import os import tempfile -from collections.abc import Iterable +from collections.abc import Iterable, Sequence from math import nan +from numbers import Number from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING from warnings import warn @@ -1005,3 +1006,181 @@ def n_priors(self) -> int: return 0 return self.parameter_df[OBJECTIVE_PRIOR_PARAMETERS].notna().sum() + + def add_condition(self, id_: str, name: str = None, **kwargs): + """Add a simulation condition to the problem. + + Arguments: + id_: The condition id + name: The condition name + kwargs: Parameter, value pairs to add to the condition table. + """ + record = {CONDITION_ID: [id_], **kwargs} + if name is not None: + record[CONDITION_NAME] = name + tmp_df = pd.DataFrame(record).set_index([CONDITION_ID]) + self.condition_df = ( + pd.concat([self.condition_df, tmp_df]) + if self.condition_df is not None + else tmp_df + ) + + def add_observable( + self, + id_: str, + formula: str | float | int, + noise_formula: str | float | int = None, + noise_distribution: str = None, + transform: str = None, + name: str = None, + **kwargs, + ): + """Add an observable to the problem. + + Arguments: + id_: The observable id + formula: The observable formula + noise_formula: The noise formula + noise_distribution: The noise distribution + transform: The observable transformation + name: The observable name + kwargs: additional columns/values to add to the observable table + + """ + record = { + OBSERVABLE_ID: [id_], + OBSERVABLE_FORMULA: [formula], + } + if name is not None: + record[OBSERVABLE_NAME] = [name] + if noise_formula is not None: + record[NOISE_FORMULA] = [noise_formula] + if noise_distribution is not None: + record[NOISE_DISTRIBUTION] = [noise_distribution] + if transform is not None: + record[OBSERVABLE_TRANSFORMATION] = [transform] + record.update(kwargs) + + tmp_df = pd.DataFrame(record).set_index([OBSERVABLE_ID]) + self.observable_df = ( + pd.concat([self.observable_df, tmp_df]) + if self.observable_df is not None + else tmp_df + ) + + def add_parameter( + self, + id_: str, + estimated: bool | str | int = True, + nominal_value=None, + scale: str = None, + lb: Number = None, + ub: Number = None, + init_prior_type: str = None, + init_prior_pars: str | Sequence = None, + obj_prior_type: str = None, + obj_prior_pars: str | Sequence = None, + **kwargs, + ): + """Add a parameter to the problem. + + Arguments: + id_: The parameter id + estimated: Whether the parameter is estimated + nominal_value: The nominal value of the parameter + scale: The parameter scale + lb: The lower bound of the parameter + ub: The upper bound of the parameter + init_prior_type: The type of the initialization prior distribution + init_prior_pars: The parameters of the initialization prior + distribution + obj_prior_type: The type of the objective prior distribution + obj_prior_pars: The parameters of the objective prior distribution + kwargs: additional columns/values to add to the parameter table + """ + record = { + PARAMETER_ID: [id_], + } + if estimated is not None: + record[ESTIMATE] = [ + int(estimated) + if isinstance(estimated, bool | int) + else estimated + ] + if nominal_value is not None: + record[NOMINAL_VALUE] = [nominal_value] + if scale is not None: + record[PARAMETER_SCALE] = [scale] + if lb is not None: + record[LOWER_BOUND] = [lb] + if ub is not None: + record[UPPER_BOUND] = [ub] + if init_prior_type is not None: + record[INITIALIZATION_PRIOR_TYPE] = [init_prior_type] + if init_prior_pars is not None: + if not isinstance(init_prior_pars, str): + init_prior_pars = PARAMETER_SEPARATOR.join( + map(str, init_prior_pars) + ) + record[INITIALIZATION_PRIOR_PARAMETERS] = [init_prior_pars] + if obj_prior_type is not None: + record[OBJECTIVE_PRIOR_TYPE] = [obj_prior_type] + if obj_prior_pars is not None: + if not isinstance(obj_prior_pars, str): + obj_prior_pars = PARAMETER_SEPARATOR.join( + map(str, obj_prior_pars) + ) + record[OBJECTIVE_PRIOR_PARAMETERS] = [obj_prior_pars] + record.update(kwargs) + + tmp_df = pd.DataFrame(record).set_index([PARAMETER_ID]) + self.parameter_df = ( + pd.concat([self.parameter_df, tmp_df]) + if self.parameter_df is not None + else tmp_df + ) + + def add_measurement( + self, + obs_id: str, + sim_cond_id: str, + time: float, + measurement: float, + observable_parameters: Sequence[str] = None, + noise_parameters: Sequence[str] = None, + preeq_cond_id: str = None, + ): + """Add a measurement to the problem. + + Arguments: + obs_id: The observable ID + sim_cond_id: The simulation condition ID + time: The measurement time + measurement: The measurement value + observable_parameters: The observable parameters + noise_parameters: The noise parameters + preeq_cond_id: The pre-equilibration condition ID + """ + record = { + OBSERVABLE_ID: [obs_id], + SIMULATION_CONDITION_ID: [sim_cond_id], + TIME: [time], + MEASUREMENT: [measurement], + } + if observable_parameters is not None: + record[OBSERVABLE_PARAMETERS] = [ + PARAMETER_SEPARATOR.join(observable_parameters) + ] + if noise_parameters is not None: + record[NOISE_PARAMETERS] = [ + PARAMETER_SEPARATOR.join(noise_parameters) + ] + if preeq_cond_id is not None: + record[PREEQUILIBRATION_CONDITION_ID] = [preeq_cond_id] + + tmp_df = pd.DataFrame(record) + self.measurement_df = ( + pd.concat([self.measurement_df, tmp_df]) + if self.measurement_df is not None + else tmp_df + ) diff --git a/petab/v2/petab1to2.py b/petab/v2/petab1to2.py index 86cbe49c..866414c3 100644 --- a/petab/v2/petab1to2.py +++ b/petab/v2/petab1to2.py @@ -2,6 +2,7 @@ import shutil from itertools import chain from pathlib import Path +from urllib.parse import urlparse from pandas.io.common import get_handle, is_url @@ -76,7 +77,7 @@ def petab1to2(yaml_config: Path | str, output_dir: Path | str = None): # condition tables, observable tables, SBML files, parameter table: # no changes - just copy file = yaml_config[C.PARAMETER_FILE] - _copy_file(get_src_path(file), get_dest_path(file)) + _copy_file(get_src_path(file), Path(get_dest_path(file))) for problem_config in yaml_config[C.PROBLEMS]: for file in chain( @@ -89,7 +90,7 @@ def petab1to2(yaml_config: Path | str, output_dir: Path | str = None): problem_config.get(C.MEASUREMENT_FILES, []), problem_config.get(C.VISUALIZATION_FILES, []), ): - _copy_file(get_src_path(file), get_dest_path(file)) + _copy_file(get_src_path(file), Path(get_dest_path(file))) # TODO: Measurements: preequilibration to experiments/timecourses once # finalized @@ -131,10 +132,14 @@ def _update_yaml(yaml_config: dict) -> dict: return yaml_config -def _copy_file(src: Path | str, dest: Path | str): +def _copy_file(src: Path | str, dest: Path): """Copy file.""" - src = str(src) - dest = str(dest) + # src might be a URL - convert to Path if local + src_url = urlparse(src) + if not src_url.scheme: + src = Path(src) + elif src_url.scheme == "file" and not src_url.netloc: + src = Path(src.removeprefix("file:/")) if is_url(src): with get_handle(src, mode="r") as src_handle: @@ -142,4 +147,8 @@ def _copy_file(src: Path | str, dest: Path | str): dest_handle.write(src_handle.handle.read()) return - shutil.copy(str(src), str(dest)) + try: + if dest.samefile(src): + return + except FileNotFoundError: + shutil.copy(str(src), str(dest)) diff --git a/petab/v2/problem.py b/petab/v2/problem.py index 4c36d791..87a9b6e1 100644 --- a/petab/v2/problem.py +++ b/petab/v2/problem.py @@ -4,7 +4,9 @@ import logging import os import tempfile +from collections.abc import Sequence from math import nan +from numbers import Number from pathlib import Path from typing import TYPE_CHECKING @@ -724,3 +726,199 @@ def validate( break return validation_results + + def add_condition(self, id_: str, name: str = None, **kwargs): + """Add a simulation condition to the problem. + + Arguments: + id_: The condition id + name: The condition name + kwargs: Parameter, value pairs to add to the condition table. + """ + record = {CONDITION_ID: [id_], **kwargs} + if name is not None: + record[CONDITION_NAME] = name + tmp_df = pd.DataFrame(record).set_index([CONDITION_ID]) + self.condition_df = ( + pd.concat([self.condition_df, tmp_df]) + if self.condition_df is not None + else tmp_df + ) + + def add_observable( + self, + id_: str, + formula: str, + noise_formula: str | float | int = None, + noise_distribution: str = None, + transform: str = None, + name: str = None, + **kwargs, + ): + """Add an observable to the problem. + + Arguments: + id_: The observable id + formula: The observable formula + noise_formula: The noise formula + noise_distribution: The noise distribution + transform: The observable transformation + name: The observable name + kwargs: additional columns/values to add to the observable table + + """ + record = { + OBSERVABLE_ID: [id_], + OBSERVABLE_FORMULA: [formula], + } + if name is not None: + record[OBSERVABLE_NAME] = [name] + if noise_formula is not None: + record[NOISE_FORMULA] = [noise_formula] + if noise_distribution is not None: + record[NOISE_DISTRIBUTION] = [noise_distribution] + if transform is not None: + record[OBSERVABLE_TRANSFORMATION] = [transform] + record.update(kwargs) + + tmp_df = pd.DataFrame(record).set_index([OBSERVABLE_ID]) + self.observable_df = ( + pd.concat([self.observable_df, tmp_df]) + if self.observable_df is not None + else tmp_df + ) + + def add_parameter( + self, + id_: str, + estimated: bool | str | int = True, + nominal_value=None, + scale: str = None, + lb: Number = None, + ub: Number = None, + init_prior_type: str = None, + init_prior_pars: str | Sequence = None, + obj_prior_type: str = None, + obj_prior_pars: str | Sequence = None, + **kwargs, + ): + """Add a parameter to the problem. + + Arguments: + id_: The parameter id + estimated: Whether the parameter is estimated + nominal_value: The nominal value of the parameter + scale: The parameter scale + lb: The lower bound of the parameter + ub: The upper bound of the parameter + init_prior_type: The type of the initialization prior distribution + init_prior_pars: The parameters of the initialization prior + distribution + obj_prior_type: The type of the objective prior distribution + obj_prior_pars: The parameters of the objective prior distribution + kwargs: additional columns/values to add to the parameter table + """ + record = { + PARAMETER_ID: [id_], + } + if estimated is not None: + record[ESTIMATE] = [ + int(estimated) + if isinstance(estimated, bool | int) + else estimated + ] + if nominal_value is not None: + record[NOMINAL_VALUE] = [nominal_value] + if scale is not None: + record[PARAMETER_SCALE] = [scale] + if lb is not None: + record[LOWER_BOUND] = [lb] + if ub is not None: + record[UPPER_BOUND] = [ub] + if init_prior_type is not None: + record[INITIALIZATION_PRIOR_TYPE] = [init_prior_type] + if init_prior_pars is not None: + if not isinstance(init_prior_pars, str): + init_prior_pars = PARAMETER_SEPARATOR.join( + map(str, init_prior_pars) + ) + record[INITIALIZATION_PRIOR_PARAMETERS] = [init_prior_pars] + if obj_prior_type is not None: + record[OBJECTIVE_PRIOR_TYPE] = [obj_prior_type] + if obj_prior_pars is not None: + if not isinstance(obj_prior_pars, str): + obj_prior_pars = PARAMETER_SEPARATOR.join( + map(str, obj_prior_pars) + ) + record[OBJECTIVE_PRIOR_PARAMETERS] = [obj_prior_pars] + record.update(kwargs) + + tmp_df = pd.DataFrame(record).set_index([PARAMETER_ID]) + self.parameter_df = ( + pd.concat([self.parameter_df, tmp_df]) + if self.parameter_df is not None + else tmp_df + ) + + def add_measurement( + self, + obs_id: str, + sim_cond_id: str, + time: float, + measurement: float, + observable_parameters: Sequence[str] = None, + noise_parameters: Sequence[str] = None, + preeq_cond_id: str = None, + ): + """Add a measurement to the problem. + + Arguments: + obs_id: The observable ID + sim_cond_id: The simulation condition ID + time: The measurement time + measurement: The measurement value + observable_parameters: The observable parameters + noise_parameters: The noise parameters + preeq_cond_id: The pre-equilibration condition ID + """ + record = { + OBSERVABLE_ID: [obs_id], + SIMULATION_CONDITION_ID: [sim_cond_id], + TIME: [time], + MEASUREMENT: [measurement], + } + if observable_parameters is not None: + record[OBSERVABLE_PARAMETERS] = [ + PARAMETER_SEPARATOR.join(observable_parameters) + ] + if noise_parameters is not None: + record[NOISE_PARAMETERS] = [ + PARAMETER_SEPARATOR.join(noise_parameters) + ] + if preeq_cond_id is not None: + record[PREEQUILIBRATION_CONDITION_ID] = [preeq_cond_id] + + tmp_df = pd.DataFrame(record) + self.measurement_df = ( + pd.concat([self.measurement_df, tmp_df]) + if self.measurement_df is not None + else tmp_df + ) + + def add_mapping(self, petab_id: str, model_id: str): + """Add a mapping table entry to the problem. + + Arguments: + petab_id: The new PEtab-compatible ID mapping to `model_id` + model_id: The ID of some entity in the model + """ + record = { + PETAB_ENTITY_ID: [petab_id], + MODEL_ENTITY_ID: [model_id], + } + tmp_df = pd.DataFrame(record).set_index([PETAB_ENTITY_ID]) + self.mapping_df = ( + pd.concat([self.mapping_df, tmp_df]) + if self.mapping_df is not None + else tmp_df + ) diff --git a/tests/v1/test_petab.py b/tests/v1/test_petab.py index 1a3f3344..eac237d2 100644 --- a/tests/v1/test_petab.py +++ b/tests/v1/test_petab.py @@ -16,6 +16,7 @@ import petab.v1 from petab.C import * from petab.models.sbml_model import SbmlModel +from petab.v1 import Problem @pytest.fixture @@ -44,55 +45,61 @@ def petab_problem(): model.addParameter("fixedParameter1", 0.0) model.addParameter("observable_1", 0.0) - measurement_df = pd.DataFrame( - data={ - OBSERVABLE_ID: ["obs1", "obs2"], - MEASUREMENT: [0.1, 0.2], - OBSERVABLE_PARAMETERS: ["", "p1;p2"], - NOISE_PARAMETERS: ["p3;p4", "p5"], - } + petab_problem = petab.Problem() + petab_problem.add_measurement( + obs_id="obs1", + sim_cond_id="condition1", + time=1.0, + measurement=0.1, + noise_parameters=["p3", "p4"], + ) + petab_problem.add_measurement( + obs_id="obs2", + sim_cond_id="condition2", + time=1.0, + measurement=0.2, + observable_parameters=["p1", "p2"], + noise_parameters=["p5"], ) - condition_df = pd.DataFrame( - data={ - CONDITION_ID: ["condition1", "condition2"], - CONDITION_NAME: ["", "Condition 2"], - "fixedParameter1": [1.0, 2.0], - } - ).set_index(CONDITION_ID) + petab_problem.add_condition("condition1", fixedParameter1=1.0) + petab_problem.add_condition( + "condition2", fixedParameter1=2.0, name="Condition 2" + ) - parameter_df = pd.DataFrame( - data={ - PARAMETER_ID: ["dynamicParameter1", "dynamicParameter2"], - PARAMETER_NAME: ["", "..."], - ESTIMATE: [1, 0], - } - ).set_index(PARAMETER_ID) + petab_problem.add_parameter("dynamicParameter1", estimate=1) + petab_problem.add_parameter("dynamicParameter2", estimate=0, name="...") - observable_df = pd.DataFrame( - data={ - OBSERVABLE_ID: ["obs1"], - OBSERVABLE_NAME: ["julius"], - OBSERVABLE_FORMULA: ["observable_1 * observableParameter1_obs1"], - NOISE_FORMULA: ["0.1 * observable_1 * observableParameter1_obs1"], - } - ).set_index(OBSERVABLE_ID) + petab_problem.add_observable( + "obs1", + formula="observable_1 * observableParameter1_obs1", + noise_formula="0.1 * observable_1 * observableParameter1_obs1", + name="julius", + ) with tempfile.TemporaryDirectory() as temp_dir: sbml_file_name = Path(temp_dir, "model.xml") libsbml.writeSBMLToFile(model.document, str(sbml_file_name)) measurement_file_name = Path(temp_dir, "measurements.tsv") - petab.write_measurement_df(measurement_df, measurement_file_name) + petab.write_measurement_df( + petab_problem.measurement_df, measurement_file_name + ) condition_file_name = Path(temp_dir, "conditions.tsv") - petab.write_condition_df(condition_df, condition_file_name) + petab.write_condition_df( + petab_problem.condition_df, condition_file_name + ) parameter_file_name = Path(temp_dir, "parameters.tsv") - petab.write_parameter_df(parameter_df, parameter_file_name) + petab.write_parameter_df( + petab_problem.parameter_df, parameter_file_name + ) observable_file_name = Path(temp_dir, "observables.tsv") - petab.write_observable_df(observable_df, observable_file_name) + petab.write_observable_df( + petab_problem.observable_df, observable_file_name + ) with pytest.deprecated_call(): petab_problem = petab.Problem.from_files( @@ -822,44 +829,26 @@ def test_problem_from_yaml_v1_multiple_files(): observable_files: [observables1.tsv, observables2.tsv] sbml_files: [] """ - with tempfile.TemporaryDirectory() as tmpdir: yaml_path = Path(tmpdir, "problem.yaml") with open(yaml_path, "w") as f: f.write(yaml_config) for i in (1, 2): - condition_df = pd.DataFrame( - { - CONDITION_ID: [f"condition{i}"], - } - ) - condition_df.set_index([CONDITION_ID], inplace=True) + problem = Problem() + problem.add_condition(f"condition{i}") petab.write_condition_df( - condition_df, Path(tmpdir, f"conditions{i}.tsv") + problem.condition_df, Path(tmpdir, f"conditions{i}.tsv") ) - measurement_df = pd.DataFrame( - { - SIMULATION_CONDITION_ID: [f"condition{i}"], - OBSERVABLE_ID: [f"observable{i}"], - TIME: [i], - MEASUREMENT: [1], - } - ) + problem.add_measurement(f"observable{i}", f"condition{i}", 1, 1) petab.write_measurement_df( - measurement_df, Path(tmpdir, f"measurements{i}.tsv") + problem.measurement_df, Path(tmpdir, f"measurements{i}.tsv") ) - observables_df = pd.DataFrame( - { - OBSERVABLE_ID: [f"observable{i}"], - OBSERVABLE_FORMULA: [1], - NOISE_FORMULA: [1], - } - ) + problem.add_observable(f"observable{i}", 1, 1) petab.write_observable_df( - observables_df, Path(tmpdir, f"observables{i}.tsv") + problem.observable_df, Path(tmpdir, f"observables{i}.tsv") ) petab_problem1 = petab.Problem.from_yaml(yaml_path) diff --git a/tests/v1/test_mapping.py b/tests/v2/test_mapping.py similarity index 83% rename from tests/v1/test_mapping.py rename to tests/v2/test_mapping.py index 4eaaaeb2..60ba6b49 100644 --- a/tests/v1/test_mapping.py +++ b/tests/v2/test_mapping.py @@ -1,11 +1,11 @@ -"""Tests related to petab.mapping""" +"""Tests related to petab.v2.mapping""" import tempfile import pandas as pd import pytest -from petab.C import * # noqa: F403 -from petab.mapping import * +from petab.v2 import get_mapping_df, write_mapping_df +from petab.v2.C import * # noqa: F403 def test_get_mapping_df(): diff --git a/tests/v2/test_problem.py b/tests/v2/test_problem.py index 418f7818..9d13e3df 100644 --- a/tests/v2/test_problem.py +++ b/tests/v2/test_problem.py @@ -1,18 +1,24 @@ import tempfile from pathlib import Path +import numpy as np import pandas as pd +from pandas.testing import assert_frame_equal import petab.v2 as petab from petab.v2 import Problem from petab.v2.C import ( CONDITION_ID, - MEASUREMENT, + ESTIMATE, + LOWER_BOUND, + MODEL_ENTITY_ID, NOISE_FORMULA, + NOMINAL_VALUE, OBSERVABLE_FORMULA, OBSERVABLE_ID, - SIMULATION_CONDITION_ID, - TIME, + PARAMETER_ID, + PETAB_ENTITY_ID, + UPPER_BOUND, ) @@ -55,44 +61,26 @@ def test_problem_from_yaml_multiple_files(): observable_files: [observables1.tsv, observables2.tsv] model_files: """ - with tempfile.TemporaryDirectory() as tmpdir: yaml_path = Path(tmpdir, "problem.yaml") with open(yaml_path, "w") as f: f.write(yaml_config) for i in (1, 2): - condition_df = pd.DataFrame( - { - CONDITION_ID: [f"condition{i}"], - } - ) - condition_df.set_index([CONDITION_ID], inplace=True) + problem = Problem() + problem.add_condition(f"condition{i}") petab.write_condition_df( - condition_df, Path(tmpdir, f"conditions{i}.tsv") + problem.condition_df, Path(tmpdir, f"conditions{i}.tsv") ) - measurement_df = pd.DataFrame( - { - SIMULATION_CONDITION_ID: [f"condition{i}"], - OBSERVABLE_ID: [f"observable{i}"], - TIME: [i], - MEASUREMENT: [1], - } - ) + problem.add_measurement(f"observable{i}", f"condition{i}", 1, 1) petab.write_measurement_df( - measurement_df, Path(tmpdir, f"measurements{i}.tsv") + problem.measurement_df, Path(tmpdir, f"measurements{i}.tsv") ) - observables_df = pd.DataFrame( - { - OBSERVABLE_ID: [f"observable{i}"], - OBSERVABLE_FORMULA: [1], - NOISE_FORMULA: [1], - } - ) + problem.add_observable(f"observable{i}", 1, 1) petab.write_observable_df( - observables_df, Path(tmpdir, f"observables{i}.tsv") + problem.observable_df, Path(tmpdir, f"observables{i}.tsv") ) petab_problem1 = petab.Problem.from_yaml(yaml_path) @@ -105,3 +93,61 @@ def test_problem_from_yaml_multiple_files(): assert petab_problem.measurement_df.shape[0] == 2 assert petab_problem.observable_df.shape[0] == 2 assert petab_problem.condition_df.shape[0] == 2 + + +def test_modify_problem(): + """Test modifying a problem via the API.""" + problem = Problem() + problem.add_condition("condition1", parameter1=1) + problem.add_condition("condition2", parameter2=2) + + exp_condition_df = pd.DataFrame( + data={ + CONDITION_ID: ["condition1", "condition2"], + "parameter1": [1.0, np.nan], + "parameter2": [np.nan, 2.0], + } + ).set_index([CONDITION_ID]) + assert_frame_equal( + problem.condition_df, exp_condition_df, check_dtype=False + ) + + problem.add_observable("observable1", "1") + problem.add_observable("observable2", "2", noise_formula=2.2) + + exp_observable_df = pd.DataFrame( + data={ + OBSERVABLE_ID: ["observable1", "observable2"], + OBSERVABLE_FORMULA: ["1", "2"], + NOISE_FORMULA: [np.nan, 2.2], + } + ).set_index([OBSERVABLE_ID]) + assert_frame_equal( + problem.observable_df, exp_observable_df, check_dtype=False + ) + + problem.add_parameter("parameter1", 1, 0, lb=1, ub=2) + problem.add_parameter("parameter2", False, 2) + + exp_parameter_df = pd.DataFrame( + data={ + PARAMETER_ID: ["parameter1", "parameter2"], + ESTIMATE: [1, 0], + NOMINAL_VALUE: [0.0, 2.0], + LOWER_BOUND: [1.0, np.nan], + UPPER_BOUND: [2.0, np.nan], + } + ).set_index([PARAMETER_ID]) + assert_frame_equal( + problem.parameter_df, exp_parameter_df, check_dtype=False + ) + + problem.add_mapping("new_petab_id", "some_model_entity_id") + + exp_mapping_df = pd.DataFrame( + data={ + PETAB_ENTITY_ID: ["new_petab_id"], + MODEL_ENTITY_ID: ["some_model_entity_id"], + } + ).set_index([PETAB_ENTITY_ID]) + assert_frame_equal(problem.mapping_df, exp_mapping_df, check_dtype=False)