Skip to content

Commit

Permalink
Reap deprecated **kwargs argument from optimize_acqf variants (pytorc…
Browse files Browse the repository at this point in the history
…h#2390)

Summary:
## Motivation

This code was deprecated somewhere between 0.8.0 and 0.9.0; as we are now past 0.11.0, it can be reaped.

Pull Request resolved: pytorch#2390

Test Plan:
Existing units, including tutorials.

## Related PRs

pytorch#1677

Reviewed By: Balandat

Differential Revision: D58930631

Pulled By: esantorella

fbshipit-source-id: fc6c3900baa53fa8d04299a1e398f23aa1f3aa53
  • Loading branch information
esantorella authored and facebook-github-bot committed Jun 26, 2024
1 parent ef73ea6 commit 1b11aca
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 43 deletions.
31 changes: 0 additions & 31 deletions botorch/optim/optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,25 +137,6 @@ def get_ic_generator(self) -> TGenInitialConditions:
return gen_batch_initial_conditions


def _raise_deprecation_warning_if_kwargs(fn_name: str, kwargs: Dict[str, Any]) -> None:
"""
Raise a warning if kwargs are provided.
Some functions used to support **kwargs. The applicable parameters have now been
refactored to be named arguments, so no warning will be raised for users passing
the expected arguments. However, if a user had been passing an inapplicable
keyword argument, this will now raise a warning whereas in the past it did
nothing.
"""
if len(kwargs) > 0:
warnings.warn(
f"`{fn_name}` does not support arguments {list(kwargs.keys())}. In "
"the future, this will become an error.",
DeprecationWarning,
stacklevel=2,
)


def _optimize_acqf_all_features_fixed(
*,
bounds: Tensor,
Expand Down Expand Up @@ -866,7 +847,6 @@ def optimize_acqf_mixed(
batch_initial_conditions: Optional[Tensor] = None,
ic_generator: Optional[TGenInitialConditions] = None,
ic_gen_kwargs: Optional[Dict] = None,
**kwargs: Any,
) -> Tuple[Tensor, Tensor]:
r"""Optimize over a list of fixed_features and returns the best solution.
Expand Down Expand Up @@ -920,8 +900,6 @@ def optimize_acqf_mixed(
for nonlinear inequality constraints.
ic_gen_kwargs: Additional keyword arguments passed to function specified by
`ic_generator`
kwargs: kwargs do nothing. This is provided so that the same arguments can
be passed to different acquisition functions without raising an error.
Returns:
A two-element tuple containing
Expand All @@ -939,7 +917,6 @@ def optimize_acqf_mixed(
"are currently not supported when `q > 1`. This is needed to "
"compute the joint acquisition value."
)
_raise_deprecation_warning_if_kwargs("optimize_acqf_mixed", kwargs)

ic_gen_kwargs = ic_gen_kwargs or {}

Expand Down Expand Up @@ -1016,7 +993,6 @@ def optimize_acqf_discrete(
choices: Tensor,
max_batch_size: int = 2048,
unique: bool = True,
**kwargs: Any,
) -> Tuple[Tensor, Tensor]:
r"""Optimize over a discrete set of points using batch evaluation.
Expand All @@ -1034,8 +1010,6 @@ def optimize_acqf_discrete(
a large training set.
unique: If True return unique choices, o/w choices may be repeated
(only relevant if `q > 1`).
kwargs: kwargs do nothing. This is provided so that the same arguments can
be passed to different acquisition functions without raising an error.
Returns:
A two-element tuple containing
Expand All @@ -1050,7 +1024,6 @@ def optimize_acqf_discrete(
)
if choices.numel() == 0:
raise InputDataError("`choices` must be non-emtpy.")
_raise_deprecation_warning_if_kwargs("optimize_acqf_discrete", kwargs)
choices_batched = choices.unsqueeze(-2)
if q > 1:
candidate_list, acq_value_list = [], []
Expand Down Expand Up @@ -1168,7 +1141,6 @@ def optimize_acqf_discrete_local_search(
batch_initial_conditions: Optional[Tensor] = None,
max_batch_size: int = 2048,
unique: bool = True,
**kwargs: Any,
) -> Tuple[Tensor, Tensor]:
r"""Optimize acquisition function over a lattice.
Expand Down Expand Up @@ -1201,16 +1173,13 @@ def optimize_acqf_discrete_local_search(
a large training set.
unique: If True return unique choices, o/w choices may be repeated
(only relevant if `q > 1`).
kwargs: kwargs do nothing. This is provided so that the same arguments can
be passed to different acquisition functions without raising an error.
Returns:
A two-element tuple containing
- a `q x d`-dim tensor of generated candidates.
- an associated acquisition value.
"""
_raise_deprecation_warning_if_kwargs("optimize_acqf_discrete_local_search", kwargs)
candidate_list = []
base_X_pending = acq_function.X_pending if q > 1 else None
base_X_avoid = X_avoid
Expand Down
14 changes: 2 additions & 12 deletions test/optim/test_optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ class SinOneOverXAcqusitionFunction(MockAcquisitionFunction):
"""
Acquisition function for sin(1/x).
This is useful for testing because it behaves pathologically only zero, so
optimization is likely to fail when initializing near zero but not
This is useful for testing because it behaves pathologically only near zero,
so optimization is likely to fail when initializing near zero but not
elsewhere.
"""

Expand Down Expand Up @@ -1565,16 +1565,6 @@ def test_optimize_acqf_discrete(self):

choices = torch.rand(5, 2, **tkwargs)

# warning for unsupported keyword arguments
with self.assertWarnsRegex(
DeprecationWarning,
r"`optimize_acqf_discrete` does not support arguments "
r"\['num_restarts'\]. In the future, this will become an error.",
):
optimize_acqf_discrete(
acq_function=mock_acq_function, q=q, choices=choices, num_restarts=8
)

exp_acq_vals = mock_acq_function(choices)

# test unique
Expand Down

0 comments on commit 1b11aca

Please sign in to comment.