From d1e871fc54415b043361ba704e4ab9d90b504114 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 22 Jan 2025 14:14:33 +0100 Subject: [PATCH 1/6] Add target minimization test --- tests/integration/test_minimization.py | 62 ++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 tests/integration/test_minimization.py diff --git a/tests/integration/test_minimization.py b/tests/integration/test_minimization.py new file mode 100644 index 000000000..a818b2298 --- /dev/null +++ b/tests/integration/test_minimization.py @@ -0,0 +1,62 @@ +"""Tests for target minimization.""" + +import numpy as np +import pandas as pd +import pytest +import torch +from torch.testing import assert_close + +from baybe.acquisition.acqfs import qKnowledgeGradient +from baybe.acquisition.base import AcquisitionFunction +from baybe.parameters.numerical import NumericalDiscreteParameter +from baybe.surrogates.gaussian_process.core import GaussianProcessSurrogate +from baybe.targets.numerical import NumericalTarget +from baybe.utils.basic import get_subclasses +from baybe.utils.random import set_random_seed + + +def get_acqf_values(acqf_cls, surrogate, searchspace, objective, df): + # TODO: Should be replace once a proper public interface is available + acqf = acqf_cls().to_botorch(surrogate, searchspace, objective, df) + return acqf(torch.tensor(searchspace.transform(df).values).unsqueeze(-2)) + + +def compute_posterior_and_acqf(acqf_cls, df, searchspace, objective): + surrogate_max = GaussianProcessSurrogate() + surrogate_max.fit(searchspace, objective, df) + with torch.no_grad(): + posterior = surrogate_max.posterior(df) + acqf = get_acqf_values(acqf_cls, surrogate_max, searchspace, objective, df) + return posterior, acqf + + +@pytest.mark.parametrize( + "acqf_cls", + [ + a + for a in get_subclasses(AcquisitionFunction) + if not issubclass(a, qKnowledgeGradient) + ], +) +def test_minimization(acqf_cls): + """Maximizing targets is equivalent to minimizing target with inverted data.""" + values = np.linspace(10, 20) + searchspace = NumericalDiscreteParameter("p", values).to_searchspace() + + # Maximization of plain targets + set_random_seed(0) + df_max = pd.DataFrame({"p": values, "t": values}) + obj_max = NumericalTarget("t", "MAX").to_objective() + p_min, acqf_max = compute_posterior_and_acqf(acqf_cls, df_max, searchspace, obj_max) + + # Minimization of inverted targets + set_random_seed(0) + df_min = pd.DataFrame({"p": values, "t": -values}) + obj_min = NumericalTarget("t", "MIN").to_objective() + p_max, acqf_min = compute_posterior_and_acqf(acqf_cls, df_min, searchspace, obj_min) + + # Both must yield identical posterior (modulo the sign) and acquisition values + assert torch.equal(p_min.mean, -p_max.mean) + assert torch.equal(p_min.mvn.covariance_matrix, p_max.mvn.covariance_matrix) + # TODO: https://github.com/pytorch/botorch/issues/2681 + assert_close(acqf_max, acqf_min, rtol=0.0001, atol=0.1) From 40029b52459cb34bbb3f215c9b23129d71be6362 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Tue, 21 Jan 2025 10:11:21 +0100 Subject: [PATCH 2/6] Add missing inversion of best_f in minimization mode --- CHANGELOG.md | 2 ++ baybe/acquisition/base.py | 1 + 2 files changed, 3 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2df98ae4b..d535a07ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -50,6 +50,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 between constraints and dropped parameters yielding empty parameter sets - Minimizing a single `NumericalTarget` with specified bounds/transformation via `SingleTargetObjective` no longer erroneously maximizes it +- Improvement-based Monte Carlo acquisition functions now use the correct + reference value in minimization mode ### Removed - `botorch_function_wrapper` utility for creating lookup callables diff --git a/baybe/acquisition/base.py b/baybe/acquisition/base.py index a21bb5454..913a8e1a4 100644 --- a/baybe/acquisition/base.py +++ b/baybe/acquisition/base.py @@ -115,6 +115,7 @@ def to_botorch( if issubclass(acqf_cls, bo_acqf.AnalyticAcquisitionFunction): additional_params["maximize"] = False elif issubclass(acqf_cls, bo_acqf.MCAcquisitionFunction): + additional_params["best_f"] *= -1.0 additional_params["objective"] = LinearMCObjective( torch.tensor([-1.0]) ) From fb8853dd430e4c1eb2d6d016e1cf4f0c93d62ef5 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 22 Jan 2025 14:19:30 +0100 Subject: [PATCH 3/6] Move inversion step to right context Not all MC acquisitions functions use best_f (e.g. qUCB), causing a KeyError in these cases --- baybe/acquisition/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/baybe/acquisition/base.py b/baybe/acquisition/base.py index 913a8e1a4..cec7c1d92 100644 --- a/baybe/acquisition/base.py +++ b/baybe/acquisition/base.py @@ -111,11 +111,12 @@ def to_botorch( additional_params["best_f"] = ( bo_surrogate.posterior(train_x).mean.min().item() ) + if self.is_mc: + additional_params["best_f"] *= -1.0 if issubclass(acqf_cls, bo_acqf.AnalyticAcquisitionFunction): additional_params["maximize"] = False - elif issubclass(acqf_cls, bo_acqf.MCAcquisitionFunction): - additional_params["best_f"] *= -1.0 + elif self.is_mc: additional_params["objective"] = LinearMCObjective( torch.tensor([-1.0]) ) From 23445abf6ce5e78de25eebfec79cc8cbcabec7f2 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Wed, 22 Jan 2025 14:22:48 +0100 Subject: [PATCH 4/6] Move objective statement qNegIntegratedPosteriorVariance does not accept an objective --- baybe/acquisition/base.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/baybe/acquisition/base.py b/baybe/acquisition/base.py index cec7c1d92..2ba4c9c94 100644 --- a/baybe/acquisition/base.py +++ b/baybe/acquisition/base.py @@ -116,13 +116,13 @@ def to_botorch( if issubclass(acqf_cls, bo_acqf.AnalyticAcquisitionFunction): additional_params["maximize"] = False + elif issubclass(acqf_cls, bo_acqf.qNegIntegratedPosteriorVariance): + # qNIPV is valid but does not require any adjusted params + pass elif self.is_mc: additional_params["objective"] = LinearMCObjective( torch.tensor([-1.0]) ) - elif issubclass(acqf_cls, bo_acqf.qNegIntegratedPosteriorVariance): - # qNIPV is valid but does not require any adjusted params - pass else: raise ValueError( f"Unsupported acquisition function type: {acqf_cls}." From 1505cd68cd9843094c840bca5879c2509d6a2f73 Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Thu, 23 Jan 2025 10:49:15 +0100 Subject: [PATCH 5/6] Avoid use of is_mc due to yet unclear semantics https://github.com/emdgroup/baybe/issues/467 --- baybe/acquisition/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/baybe/acquisition/base.py b/baybe/acquisition/base.py index 2ba4c9c94..14deb945f 100644 --- a/baybe/acquisition/base.py +++ b/baybe/acquisition/base.py @@ -111,7 +111,7 @@ def to_botorch( additional_params["best_f"] = ( bo_surrogate.posterior(train_x).mean.min().item() ) - if self.is_mc: + if issubclass(acqf_cls, bo_acqf.MCAcquisitionFunction): additional_params["best_f"] *= -1.0 if issubclass(acqf_cls, bo_acqf.AnalyticAcquisitionFunction): @@ -119,7 +119,7 @@ def to_botorch( elif issubclass(acqf_cls, bo_acqf.qNegIntegratedPosteriorVariance): # qNIPV is valid but does not require any adjusted params pass - elif self.is_mc: + elif issubclass(acqf_cls, bo_acqf.MCAcquisitionFunction): additional_params["objective"] = LinearMCObjective( torch.tensor([-1.0]) ) From f29c4b5dea1519b2f3722fa273be56916731095a Mon Sep 17 00:00:00 2001 From: AdrianSosic Date: Thu, 23 Jan 2025 10:51:14 +0100 Subject: [PATCH 6/6] Add TODO note for qKnowledgeGradient --- tests/integration/test_minimization.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_minimization.py b/tests/integration/test_minimization.py index a818b2298..e2412d317 100644 --- a/tests/integration/test_minimization.py +++ b/tests/integration/test_minimization.py @@ -35,7 +35,7 @@ def compute_posterior_and_acqf(acqf_cls, df, searchspace, objective): [ a for a in get_subclasses(AcquisitionFunction) - if not issubclass(a, qKnowledgeGradient) + if not issubclass(a, qKnowledgeGradient) # TODO: not yet clear how to handle ], ) def test_minimization(acqf_cls):