Skip to content

Commit

Permalink
Revert back to allowing inducing point method to be optional (#486)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #486

Inducing point methods can now be initialized within models again with them knowing their own dimensionality. So we allow the inducing point args to be completely optional again. This also lets us reorder the args to be in a more sensible order.

Reviewed By: crasanders

Differential Revision: D67226225

fbshipit-source-id: b23f94d16aa3c52ddf4002157dc73cf78cea9e5c
  • Loading branch information
JasonKChow committed Dec 18, 2024
1 parent 350bf33 commit 04af42f
Show file tree
Hide file tree
Showing 23 changed files with 53 additions and 225 deletions.
30 changes: 17 additions & 13 deletions aepsych/models/gp_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
from aepsych.config import Config
from aepsych.factory.default import default_mean_covar_factory
from aepsych.models.base import AEPsychModelDeviceMixin
from aepsych.models.inducing_points import AutoAllocator
from aepsych.models.inducing_points import GreedyVarianceReduction
from aepsych.models.inducing_points.base import InducingPointAllocator
from aepsych.utils import get_dims, get_optimizer_options, promote_0d
from aepsych.utils_logging import getLogger
from botorch.models.utils.inducing_point_allocators import InducingPointAllocator
from gpytorch.likelihoods import BernoulliLikelihood, BetaLikelihood, Likelihood
from gpytorch.models import ApproximateGP
from gpytorch.variational import CholeskyVariationalDistribution, VariationalStrategy
Expand Down Expand Up @@ -49,25 +49,26 @@ class GPClassificationModel(AEPsychModelDeviceMixin, ApproximateGP):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Initialize the GP Classification model
Args:
dim (int): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use for selecting inducing points.
inducing_size (int): Number of inducing points. Defaults to 100.
mean_module (gpytorch.means.Mean, optional): GP mean class. Defaults to a constant with a normal prior.
covar_module (gpytorch.kernels.Kernel, optional): GP covariance kernel class. Defaults to scaled RBF with a
gamma prior.
likelihood (gpytorch.likelihood.Likelihood, optional): The likelihood function to use. If None defaults to
Bernouli likelihood.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, a GreedyVarianceReduction is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time.
optimizer_options (Dict[str, Any], optional): Optimizer options to pass to the SciPy optimizer during
Expand Down Expand Up @@ -98,7 +99,9 @@ def __init__(
dim=self.dim, stimuli_per_trial=self.stimuli_per_trial
)

self.inducing_point_method = inducing_point_method
self.inducing_point_method = inducing_point_method or GreedyVarianceReduction(
dim=self.dim
)
inducing_points = self.inducing_point_method.allocate_inducing_points(
num_inducing=self.inducing_size,
covar_module=covar_module or default_covar,
Expand Down Expand Up @@ -152,7 +155,7 @@ def from_config(cls, config: Config) -> GPClassificationModel:
max_fit_time = config.getfloat(classname, "max_fit_time", fallback=None)

inducing_point_method_class = config.getobj(
classname, "inducing_point_method", fallback=AutoAllocator
classname, "inducing_point_method", fallback=GreedyVarianceReduction
)
# Check if allocator class has a `from_config` method
if hasattr(inducing_point_method_class, "from_config"):
Expand Down Expand Up @@ -339,31 +342,32 @@ class GPBetaRegressionModel(GPClassificationModel):
def __init__(
self,
dim: int,
inducing_point_method: InducingPointAllocator,
inducing_size: int = 100,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
) -> None:
"""Initialize the GP Beta Regression model
Args:
dim (int): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method to use to select the inducing points.
inducing_size (int, optional): Number of inducing points. Defaults to 100.
mean_module (gpytorch.means.Mean, optional): GP mean class. Defaults to a constant with a normal prior. Defaults to None.
covar_module (gpytorch.kernels.Kernel, optional): GP covariance kernel class. Defaults to scaled RBF with a
gamma prior.
likelihood (gpytorch.likelihood.Likelihood, optional): The likelihood function to use. If None defaults to
Beta likelihood.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, a GreedyVarianceReduction is made.
inducing_size (int): Number of inducing points. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time. Defaults to None.
"""
if likelihood is None:
likelihood = BetaLikelihood()
self.inducing_point_method = inducing_point_method

super().__init__(
dim=dim,
mean_module=mean_module,
Expand Down
1 change: 0 additions & 1 deletion aepsych/models/gp_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from aepsych.models.base import AEPsychModelDeviceMixin
from aepsych.utils import get_dims, get_optimizer_options, promote_0d
from aepsych.utils_logging import getLogger
from botorch.models.utils.inducing_point_allocators import InducingPointAllocator
from gpytorch.likelihoods import GaussianLikelihood, Likelihood
from gpytorch.models import ExactGP

Expand Down
2 changes: 0 additions & 2 deletions aepsych/models/inducing_points/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,12 @@
import sys

from ...config import Config
from .auto import AutoAllocator
from .fixed import FixedAllocator
from .greedy_variance_reduction import GreedyVarianceReduction
from .kmeans import KMeansAllocator
from .sobol import SobolAllocator

__all__ = [
"AutoAllocator",
"FixedAllocator",
"GreedyVarianceReduction",
"KMeansAllocator",
Expand Down
44 changes: 0 additions & 44 deletions aepsych/models/inducing_points/auto.py

This file was deleted.

12 changes: 6 additions & 6 deletions aepsych/models/monotonic_projection_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
from aepsych.config import Config
from aepsych.factory.default import default_mean_covar_factory
from aepsych.models.gp_classification import GPClassificationModel
from aepsych.models.inducing_points import AutoAllocator
from aepsych.models.inducing_points import GreedyVarianceReduction
from aepsych.models.inducing_points.base import InducingPointAllocator
from aepsych.utils import get_dims, get_optimizer_options
from botorch.models.utils.inducing_point_allocators import InducingPointAllocator
from botorch.posteriors.gpytorch import GPyTorchPosterior
from gpytorch.likelihoods import Likelihood
from statsmodels.stats.moment_helpers import corr2cov, cov2corr
Expand Down Expand Up @@ -98,13 +98,13 @@ def __init__(
lb: torch.Tensor,
ub: torch.Tensor,
dim: int,
inducing_point_method: InducingPointAllocator,
monotonic_dims: List[int],
monotonic_grid_size: int = 20,
min_f_val: Optional[float] = None,
mean_module: Optional[gpytorch.means.Mean] = None,
covar_module: Optional[gpytorch.kernels.Kernel] = None,
likelihood: Optional[Likelihood] = None,
inducing_point_method: Optional[InducingPointAllocator] = None,
inducing_size: int = 100,
max_fit_time: Optional[float] = None,
optimizer_options: Optional[Dict[str, Any]] = None,
Expand All @@ -115,7 +115,6 @@ def __init__(
lb (torch.Tensor): Lower bounds of the parameters.
ub (torch.Tensor): Upper bounds of the parameters.
dim (int, optional): The number of dimensions in the parameter space.
inducing_point_method (InducingPointAllocator): The method for allocating inducing points.
monotonic_dims (List[int]): A list of the dimensions on which monotonicity should
be enforced.
monotonic_grid_size (int): The size of the grid, s, in 1. above. Defaults to 20.
Expand All @@ -125,6 +124,8 @@ def __init__(
gamma prior. Defaults to None.
likelihood (Likelihood, optional): The likelihood function to use. If None defaults to
Gaussian likelihood. Defaults to None.
inducing_point_method (InducingPointAllocator, optional): The method to use for selecting inducing points.
If not set, a GreedyVarianceReduction is made.
inducing_size (int): The number of inducing points to use. Defaults to 100.
max_fit_time (float, optional): The maximum amount of time, in seconds, to spend fitting the model. If None,
there is no limit to the fitting time. Defaults to None.
Expand All @@ -133,7 +134,6 @@ def __init__(
self.monotonic_dims = [int(d) for d in monotonic_dims]
self.mon_grid_size = monotonic_grid_size
self.min_f_val = min_f_val
self.inducing_point_method = inducing_point_method
self.lb = lb
self.ub = ub

Expand Down Expand Up @@ -250,7 +250,7 @@ def from_config(cls, config: Config) -> MonotonicProjectionGP:
max_fit_time = config.getfloat(classname, "max_fit_time", fallback=None)

inducing_point_method_class = config.getobj(
classname, "inducing_point_method", fallback=AutoAllocator
classname, "inducing_point_method", fallback=GreedyVarianceReduction
)
# Check if allocator class has a `from_config` method
if hasattr(inducing_point_method_class, "from_config"):
Expand Down
8 changes: 4 additions & 4 deletions aepsych/models/monotonic_rejection_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@
from aepsych.kernels.rbf_partial_grad import RBFKernelPartialObsGrad
from aepsych.means.constant_partial_grad import ConstantMeanPartialObsGrad
from aepsych.models.base import AEPsychMixin
from aepsych.models.inducing_points import AutoAllocator, SobolAllocator
from aepsych.models.inducing_points import GreedyVarianceReduction, SobolAllocator
from aepsych.models.inducing_points.base import InducingPointAllocator
from aepsych.utils import _process_bounds, get_dims, get_optimizer_options, promote_0d
from botorch.fit import fit_gpytorch_mll
from botorch.models.utils.inducing_point_allocators import InducingPointAllocator
from gpytorch.kernels import Kernel
from gpytorch.likelihoods import BernoulliLikelihood, Likelihood
from gpytorch.means import Mean
Expand Down Expand Up @@ -83,7 +83,7 @@ def __init__(
acquisition function evaluation. Defaults to 250.
num_rejection_samples (int): Number of samples used for rejection sampling. Defaults to 4096.
inducing_point_method (InducingPointAllocator, optional): Method for selecting inducing points. If not set,
an AutoAllocator is created.
a GreedyVarianceReduction is created.
optimizer_options (Dict[str, Any], optional): Optimizer options to pass to the SciPy optimizer during
fitting. Assumes we are using L-BFGS-B.
"""
Expand All @@ -92,7 +92,7 @@ def __init__(
likelihood = BernoulliLikelihood()

self.inducing_size = num_induc
self.inducing_point_method = inducing_point_method or AutoAllocator(
self.inducing_point_method = inducing_point_method or GreedyVarianceReduction(
dim=self.dim
)

Expand Down
Loading

0 comments on commit 04af42f

Please sign in to comment.