-
Notifications
You must be signed in to change notification settings - Fork 3
/
tsmixer_grid_search_conf.py
117 lines (93 loc) · 5 KB
/
tsmixer_grid_search_conf.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
from .tsmixer_conf import TSMixerConf
from dataclasses import dataclass, field
from mashumaro import DataClassDictMixin
from typing import Optional, Tuple, Dict, List, Iterator
from loguru import logger
import os
@dataclass
class TSMixerGridSearch(DataClassDictMixin):
"""Configuration for grid search
"""
@dataclass
class ParamRange(DataClassDictMixin):
learning_rates: List[float]
"Learning rates"
no_mixer_layers: List[int]
"Number of mixer layers"
dropouts: List[float]
"Dropout"
input_lengths: List[int]
"Number of time steps to use as input"
prediction_lengths: List[int]
"Number of time steps to predict"
feat_mixing_hidden_channels: List[Optional[int]] = field(default_factory=lambda: [None])
"Number of hidden channels in the feature mixing MLP. If None, uses same as input features."
batch_sizes: List[int] = field(default_factory=lambda: [64])
"Batch size"
num_epochs: List[int] = field(default_factory=lambda: [100])
"Number of epochs to train for"
optimizers: List[str] = field(default_factory=lambda: ["Adam"])
"Optimizer to use"
@property
def options_str(self) -> str:
s = []
s.append(("lr",str(self.learning_rates)))
s.append(("nmix",str(self.no_mixer_layers)))
s.append(("drop",str(self.dropouts)))
s.append(("in",str(self.input_lengths)))
s.append(("pred",str(self.prediction_lengths)))
s.append(("hidden",str(self.feat_mixing_hidden_channels)))
s.append(("batch",str(self.batch_sizes)))
s.append(("epochs",str(self.num_epochs)))
s.append(("opt",str(self.optimizers)))
# Sort by key
s = sorted(s, key=lambda x: x[0])
return "_".join([f"{k}{v}" for k,v in s])
param_ranges: List[ParamRange]
"Any number of parameter ranges to try"
output_dir: str
"Output directory"
no_features: int
"Number of features in the dataset"
data_src: TSMixerConf.DataSrc = TSMixerConf.DataSrc.CSV_FILE
"Where to load the dataset from"
data_src_csv: Optional[str] = None
"Path to the CSV file to load the dataset from. Only used if data_src is CSV_FILE"
def iterate(self) -> Iterator[TSMixerConf]:
"""Iterate over all configurations
Yields:
Iterator[TSMixerConf]: Configuration for a single run
"""
for idx,param_range in enumerate(self.param_ranges):
logger.info("===========================================")
logger.info(f"Grid search iteration {idx+1}/{len(self.param_ranges)}")
logger.info("===========================================")
for learning_rate in param_range.learning_rates:
for no_mixer_layers in param_range.no_mixer_layers:
for dropout in param_range.dropouts:
for feat_mixing_hidden_channels in param_range.feat_mixing_hidden_channels:
for input_length in param_range.input_lengths:
for prediction_length in param_range.prediction_lengths:
for batch_size in param_range.batch_sizes:
for num_epochs in param_range.num_epochs:
for optimizer in param_range.optimizers:
# Output subdir
output_dir = os.path.join(self.output_dir, param_range.options_str)
conf = TSMixerConf(
input_length=input_length,
prediction_length=prediction_length,
no_features=self.no_features,
no_mixer_layers=no_mixer_layers,
output_dir=output_dir,
data_src=self.data_src,
data_src_csv=self.data_src_csv,
batch_size=batch_size,
num_epochs=num_epochs,
learning_rate=learning_rate,
optimizer=optimizer,
dropout=dropout,
feat_mixing_hidden_channels=feat_mixing_hidden_channels
)
logger.info(f"TSMixer config: {conf}")
logger.info(f"Output sub-dir: {output_dir}")
yield conf