-
Notifications
You must be signed in to change notification settings - Fork 805
/
seq2seq.ini
executable file
·35 lines (35 loc) · 1.12 KB
/
seq2seq.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
[strings]
# Mode : train, test, serve
mode = train
train_enc = data/train.enc
train_dec = data/train.dec
test_enc = data/test.enc
test_dec = data/test.enc
# folder where checkpoints, vocabulary, temporary data will be stored
working_directory = working_dir/
[ints]
# vocabulary size
# 20,000 is a reasonable size
enc_vocab_size = 20000
dec_vocab_size = 20000
# number of LSTM layers : 1/2/3
num_layers = 3
# typical options : 128, 256, 512, 1024
layer_size = 256
# dataset size limit; typically none : no limit
max_train_data_size = 0
batch_size = 64
# steps per checkpoint
# Note : At a checkpoint, models parameters are saved, model is evaluated
# and results are printed
steps_per_checkpoint = 300
[floats]
learning_rate = 0.5
learning_rate_decay_factor = 0.99
max_gradient_norm = 5.0
##############################################################################
# Note : Edit the bucket sizes at line47 of execute.py (_buckets)
#
# Learn more about the configurations from this link
# https://www.tensorflow.org/versions/r0.9/tutorials/seq2seq/index.html
##############################################################################