Skip to content

Commit

Permalink
Prepare using schedulers in the yaml config
Browse files Browse the repository at this point in the history
  • Loading branch information
louisPoulain committed Jul 3, 2024
1 parent 8c7f807 commit fdd55c0
Showing 1 changed file with 23 additions and 0 deletions.
23 changes: 23 additions & 0 deletions mlpp_lib/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,35 @@ def get_metric(metric: Union[str, dict]) -> Callable:
return metric


def get_scheduler(scheduler_config: dict) -> tf.keras.optimizers.schedules.LearningRateSchedule:
"""Create a learning rate scheduler from a config dictionary."""

if isinstance(scheduler_config, dict):
scheduler_name = list(scheduler_config.keys())[0]
scheduler_options = scheduler_config[scheduler_name]
else:
LOGGER.info("Not using any schedulers.")
return None

if hasattr(tf.keras.optimizers.schedules, scheduler_name):
LOGGER.info(f"Using keras built-in learning rate scheduler: {scheduler_name}")
scheduler_obj = getattr(tf.keras.optimizers.schedules, scheduler_name)
scheduler = scheduler_obj(**scheduler_options)
else:
raise KeyError(f"The scheduler {scheduler_name} is not available.")

return scheduler


def get_optimizer(optimizer: Union[str, dict]) -> Callable:
"""Get the optimizer, keras built-in only."""

if isinstance(optimizer, dict):
optimizer_name = list(optimizer.keys())[0]
optimizer_options = optimizer[optimizer_name]
scheduler = get_scheduler(optimizer_options.pop("learning_rate", None))
if scheduler:
optimizer_options["learning_rate"] = scheduler
else:
optimizer_name = optimizer
optimizer_options = {}
Expand Down

0 comments on commit fdd55c0

Please sign in to comment.