Skip to content

Commit

Permalink
Update mlflow
Browse files Browse the repository at this point in the history
  • Loading branch information
dnerini committed Nov 27, 2023
2 parents a9eb155 + 783916b commit 09fc9ca
Show file tree
Hide file tree
Showing 4 changed files with 1,523 additions and 351 deletions.
50 changes: 35 additions & 15 deletions mlpp_lib/models.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
import tensorflow as tf
import logging
from typing import Optional, Union, Any

import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import (
Add,
Dense,
Dropout,
BatchNormalization,
Activation,
)

from tensorflow.keras import Model, initializers

from mlpp_lib.physical_layers import *
from mlpp_lib.probabilistic_layers import *

import numpy as np

try:
import tcn # type: ignore
except ImportError:
Expand All @@ -24,6 +23,10 @@
TCN_IMPORTED = True


_LOGGER = logging.getLogger(__name__)


@tf.keras.utils.register_keras_serializable()
class MonteCarloDropout(Dropout):
def call(self, inputs):
return super().call(inputs, training=True)
Expand All @@ -32,15 +35,22 @@ def call(self, inputs):
def _build_fcn_block(
inputs,
hidden_layers,
batchnorm,
activations,
dropout,
mc_dropout,
skip_connection,
idx=0,
):
if mc_dropout and dropout is None:
_LOGGER.warning("dropout=None, hence I will ignore mc_dropout=True")

x = inputs
for i, units in enumerate(hidden_layers):
x = Dense(units, activation=activations[i], name=f"dense_{idx}_{i}")(x)
x = Dense(units, name=f"dense_{idx}_{i}")(x)
if batchnorm:
x = BatchNormalization()(x)
x = Activation(activations[i])(x)
if i < len(dropout) and 0.0 < dropout[i] < 1.0:
if mc_dropout:
x = MonteCarloDropout(dropout[i], name=f"mc_dropout_{idx}_{i}")(x)
Expand Down Expand Up @@ -82,6 +92,7 @@ def fully_connected_network(
input_shape: tuple[int],
output_size: int,
hidden_layers: list,
batchnorm: bool = False,
activations: Optional[Union[str, list[str]]] = "relu",
dropout: Optional[Union[float, list[float]]] = None,
mc_dropout: bool = False,
Expand All @@ -101,6 +112,8 @@ def fully_connected_network(
hidden_layers: list[int]
List that is used to define the fully connected block. Each element creates
a Dense layer with the corresponding units.
batchnorm: bool
Use batch normalization. Default is False.
activations: str or list[str]
(Optional) Activation function(s) for the Dense layer(s). See https://keras.io/api/layers/activations/#relu-function.
If a string is passed, the same activation is used for all layers. Default is `relu`.
Expand Down Expand Up @@ -150,6 +163,7 @@ def fully_connected_network(
x = _build_fcn_block(
inputs,
hidden_layers,
batchnorm,
activations,
dropout,
mc_dropout,
Expand All @@ -165,6 +179,7 @@ def fully_connected_multibranch_network(
input_shape: tuple[int],
output_size: int,
hidden_layers: list,
batchnorm: bool = False,
activations: Optional[Union[str, list[str]]] = "relu",
dropout: Optional[Union[float, list[float]]] = None,
mc_dropout: bool = False,
Expand All @@ -184,6 +199,8 @@ def fully_connected_multibranch_network(
hidden_layers: list[int]
List that is used to define the fully connected block. Each element creates
a Dense layer with the corresponding units.
batchnorm: bool
Use batch normalization. Default is False.
activations: str or list[str]
(Optional) Activation function(s) for the Dense layer(s). See https://keras.io/api/layers/activations/#relu-function.
If a string is passed, the same activation is used for all layers. Default is `relu`.
Expand Down Expand Up @@ -242,6 +259,7 @@ def fully_connected_multibranch_network(
x = _build_fcn_block(
inputs,
hidden_layers,
batchnorm,
activations,
dropout,
mc_dropout,
Expand All @@ -263,6 +281,7 @@ def deep_cross_network(
input_shape: tuple[int],
output_size: int,
hidden_layers: list,
batchnorm: bool = True,
activations: Optional[Union[str, list[str]]] = "relu",
dropout: Optional[Union[float, list[float]]] = None,
mc_dropout: bool = False,
Expand All @@ -282,6 +301,8 @@ def deep_cross_network(
hidden_layers: list[int]
List that is used to define the fully connected block. Each element creates
a Dense layer with the corresponding units.
batchnorm: bool
Use batch normalization. Default is True.
activations: str or list[str]
(Optional) Activation function(s) for the Dense layer(s). See https://keras.io/api/layers/activations/#relu-function.
If a string is passed, the same activation is used for all layers. Default is `relu`.
Expand Down Expand Up @@ -338,16 +359,15 @@ def deep_cross_network(

# deep part
deep = inputs
for i, u in enumerate(hidden_layers):
deep = Dense(u)(deep)
deep = BatchNormalization()(deep)
deep = Activation(activations[i])(deep)
if i < len(dropout) and 0.0 < dropout[i] < 1.0:
if mc_dropout:
deep = MonteCarloDropout(dropout[i])(deep)
else:
deep = Dropout(dropout[i])(deep)
# deep = tf.keras.Model(inputs=inputs, outputs=deep, name="deepblock")
deep = _build_fcn_block(
deep,
hidden_layers,
batchnorm,
activations,
dropout,
mc_dropout,
skip_connection=False,
)

# merge
merge = tf.keras.layers.Concatenate()([cross, deep])
Expand Down
Loading

0 comments on commit 09fc9ca

Please sign in to comment.