-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathparams.py
More file actions
70 lines (54 loc) · 1.39 KB
/
params.py
File metadata and controls
70 lines (54 loc) · 1.39 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# -*- coding: utf-8 -*-
#/usr/bin/python2
# path
## local
data_path_base = './datasets'
logdir_path = './logdir'
# hyper_params is about experiment
class default_hyper_params:
num_epochs = 1000
eval_per_epoch = num_epochs//10 +1 # 10 times for eval in total train
save_per_epoch = num_epochs//2 +1 # floor_division + 1 == ceiling_division
batch_size = 32
keep_probe = 0.5
keep_probe_test = 1.0
learn_rate = 0.001
# model_params is about model structure
class MLP_model_params(default_hyper_params):
# data
input_dim = 784
output_dim = 10
# model
hidden_units = [256, 128,64,10] # alias = E
num_banks = 16
keep_probe = 0.2
# train
batch_size = 32
learn_rate = 0.001
class Deep_MNIST_model_params(default_hyper_params):
# data
input_dim = 784
output_dim = 10
# train
batch_size = 32
learn_rate = 0.001
class autoencoder_vae_model_params(default_hyper_params):
# data
input_dim = 784
learn_rate = 0.0001
n_hidden = 500
dim_z = 20
pass
class empty_model_params(default_hyper_params):
pass
class Test1_params:
# path
data_path = '{}/timit/TIMIT/TEST/*/*/*.wav'.format(data_path_base)
# test
batch_size = 32
class Convert_params:
# path
data_path = '{}/arctic/bdl/*.wav'.format(data_path_base)
# convert
batch_size = 2
emphasis_magnitude = 1.2