Skip to content

Commit

Permalink
Revert back to allowing inducing point method to be optional (#486)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #486

Inducing point methods can now be initialized within models again with them knowing their own dimensionality. So we allow the inducing point args to be completely optional again. This also lets us reorder the args to be in a more sensible order.

Differential Revision: D67226225
  • Loading branch information
JasonKChow authored and facebook-github-bot committed Dec 14, 2024
1 parent 052349c commit 3a630ff
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 24 deletions.
24 changes: 14 additions & 10 deletions aepsych/models/gp_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,25 +49,26 @@ class GPClassificationModel(AEPsychModelDeviceMixin, ApproximateGP):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Initialize the GP Classification model
Args:
dim (int): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use for selecting inducing points.
inducing_size (int): Number of inducing points. Defaults to 100.
mean_module (gpytorch.means.Mean, optional): GP mean class. Defaults to a constant with a normal prior.
covar_module (gpytorch.kernels.Kernel, optional): GP covariance kernel class. Defaults to scaled RBF with a
gamma prior.
likelihood (gpytorch.likelihood.Likelihood, optional): The likelihood function to use. If None defaults to
Bernouli likelihood.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, an AutoAllocator is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time.
optimizer_options (Dict[str, Any], optional): Optimizer options to pass to the SciPy optimizer during
Expand Down Expand Up @@ -98,7 +99,9 @@ def __init__(
dim=self.dim, stimuli_per_trial=self.stimuli_per_trial
)

self.inducing_point_method = inducing_point_method
self.inducing_point_method = inducing_point_method or AutoAllocator(
dim=self.dim
)
inducing_points = self.inducing_point_method.allocate_inducing_points(
num_inducing=self.inducing_size,
covar_module=covar_module or default_covar,
Expand Down Expand Up @@ -339,31 +342,32 @@ class GPBetaRegressionModel(GPClassificationModel):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Initialize the GP Beta Regression model
Args:
dim (int): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use to select the inducing points.
inducing_size (int, optional): Number of inducing points. Defaults to 100.
mean_module (gpytorch.means.Mean, optional): GP mean class. Defaults to a constant with a normal prior. Defaults to None.
covar_module (gpytorch.kernels.Kernel, optional): GP covariance kernel class. Defaults to scaled RBF with a
gamma prior.
likelihood (gpytorch.likelihood.Likelihood, optional): The likelihood function to use. If None defaults to
Beta likelihood.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, an AutoAllocator is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time. Defaults to None.
"""
if likelihood is None:
likelihood = BetaLikelihood()
self.inducing_point_method = inducing_point_method

super().__init__(
dim=dim,
mean_module=mean_module,
Expand Down
1 change: 0 additions & 1 deletion aepsych/models/gp_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from aepsych.models.base import AEPsychModelDeviceMixin
from aepsych.utils import get_optimizer_options, promote_0d
from aepsych.utils_logging import getLogger
from botorch.models.utils.inducing_point_allocators import InducingPointAllocator
from gpytorch.likelihoods import GaussianLikelihood, Likelihood
from gpytorch.models import ExactGP

Expand Down
6 changes: 3 additions & 3 deletions aepsych/models/monotonic_projection_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,13 +98,13 @@ def __init__(
lb: torch.Tensor,
ub: torch.Tensor,
dim: int,
inducing_point_method: InducingPointAllocator,
monotonic_dims: List[int],
monotonic_grid_size: int = 20,
min_f_val: Optional[float] = None,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
Expand All @@ -115,7 +115,6 @@ def __init__(
lb (torch.Tensor): Lower bounds of the parameters.
ub (torch.Tensor): Upper bounds of the parameters.
dim (int, optional): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method for allocating inducing points.
monotonic_dims (List[int]): A list of the dimensions on which monotonicity should
be enforced.
monotonic_grid_size (int): The size of the grid, s, in 1. above. Defaults to 20.
Expand All @@ -125,6 +124,8 @@ def __init__(
gamma prior. Defaults to None.
likelihood (Likelihood, optional): The likelihood function to use. If None defaults to
Gaussian likelihood. Defaults to None.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, an AutoAllocator is made.
inducing_size (int): The number of inducing points to use. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time. Defaults to None.
Expand All @@ -133,7 +134,6 @@ def __init__(
self.monotonic_dims = [int(d) for d in monotonic_dims]
self.mon_grid_size = monotonic_grid_size
self.min_f_val = min_f_val
self.inducing_point_method = inducing_point_method
self.lb = lb
self.ub = ub

Expand Down
20 changes: 10 additions & 10 deletions aepsych/models/semi_p.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,29 +253,30 @@ class SemiParametricGPModel(GPClassificationModel):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
stim_dim: int = 0,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Any] = None,
slope_mean: float = 2,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""
Initialize SemiParametricGP.
Args:
dim (int, optional): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use to select the inducing points.
inducing_size (int): Number of inducing points. Defaults to 100.
stim_dim (int): Index of the intensity (monotonic) dimension. Defaults to 0.
mean_module (gpytorch.means.Mean, optional): GP mean class. Defaults to a constant with a normal prior.
covar_module (gpytorch.kernels.Kernel, optional): GP covariance kernel class. Defaults to scaled RBF with a
gamma prior.
likelihood (gpytorch.likelihood.Likelihood, optional): The likelihood function to use. If None defaults to
linear-Bernouli likelihood with probit link.
slope_mean (float): The mean of the slope. Defaults to 2.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, an AutoAllocator is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time.
optimizer_options (Dict[str, Any], optional): Optimizer options to pass to the SciPy optimizer during
Expand Down Expand Up @@ -309,7 +310,6 @@ def __init__(
assert isinstance(
likelihood, LinearBernoulliLikelihood
), "SemiP model only supports linear Bernoulli likelihoods!"
self.inducing_point_method = inducing_point_method

super().__init__(
dim=dim,
Expand Down Expand Up @@ -513,37 +513,37 @@ class HadamardSemiPModel(GPClassificationModel):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
stim_dim: int = 0,
slope_mean_module: Optional[gpytorch.means.Mean] = None,
slope_covar_module: Optional[gpytorch.kernels.Kernel] = None,
offset_mean_module: Optional[gpytorch.means.Mean] = None,
offset_covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
slope_mean: float = 2,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""
Initialize HadamardSemiPModel.
Args:
dim (int): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use to select the inducing points.
inducing_size (int): Number of inducing points. Defaults to 100.
stim_dim (int): Index of the intensity (monotonic) dimension. Defaults to 0.
slope_mean_module (gpytorch.means.Mean, optional): Mean module to use (default: constant mean) for slope.
slope_covar_module (gpytorch.kernels.Kernel, optional): Covariance kernel to use (default: scaled RBF) for slope.
offset_mean_module (gpytorch.means.Mean, optional): Mean module to use (default: constant mean) for offset.
offset_covar_module (gpytorch.kernels.Kernel, optional): Covariance kernel to use (default: scaled RBF) for offset.
likelihood (gpytorch.likelihood.Likelihood, optional)): defaults to bernoulli with logistic input and a floor of .5
slope_mean (float): The mean of the slope. Defaults to 2.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, an AutoAllocator is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time.
optimizer_options (Dict[str, Any], optional): Optimizer options to pass to the SciPy optimizer during
fitting. Assumes we are using L-BFGS-B.
"""
self.inducing_point_method = inducing_point_method
super().__init__(
dim=dim,
inducing_size=inducing_size,
Expand Down
6 changes: 6 additions & 0 deletions tests/test_points_allocators.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,6 +489,12 @@ def test_select_inducing_points(self):
)
self.assertTrue(len(points) <= 20)

def test_model_default_allocator(self):
model = GPClassificationModel(dim=2)

self.assertIsInstance(model.inducing_point_method, AutoAllocator)
self.assertTrue(model.inducing_point_method.dim == 2)


if __name__ == "__main__":
unittest.main()

0 comments on commit 3a630ff

Please sign in to comment.