-
Notifications
You must be signed in to change notification settings - Fork 0
/
dbn_evaluation.py
92 lines (63 loc) · 2.61 KB
/
dbn_evaluation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import argparse
import pickle
import numpy as np
import torch
import utils.loader as l
from core.dbn import DBN
def get_arguments():
"""Gets arguments from the command line.
Returns:
A parser with the input arguments.
"""
# Creates the ArgumentParser
parser = argparse.ArgumentParser(usage='Evaluates a fine-tuned DBN.')
parser.add_argument('history', help='History file identifier', type=str)
parser.add_argument('dataset', help='Dataset identifier', choices=['fmnist', 'kmnist', 'mnist'])
parser.add_argument('-n_visible', help='Number of visible units', type=int, default=784)
parser.add_argument('-n_layers', help='Number of DBN layers', type=int, default=3)
parser.add_argument('-batch_size', help='Batch size', type=int, default=128)
parser.add_argument('-epochs', help='Number of training epochs', nargs='+', type=int, default=[3, 3, 3])
parser.add_argument('-seed', help='Seed identifier', type=int, default=0)
parser.add_argument('--use_gpu', help='Usage of GPU', action='store_true')
return parser.parse_args()
if __name__ == '__main__':
# Gathers the input arguments
args = get_arguments()
# Gathering common variables
history = args.history
dataset = args.dataset
seed = args.seed
# Gathering RBM-related variable
n_visible = args.n_visible
n_layers = args.n_layers
batch_size = args.batch_size
epochs = tuple(args.epochs)
use_gpu = args.use_gpu
# Loads the data
train, _, test = l.load_dataset(name=dataset, seed=seed)
# Defines the seeds
torch.manual_seed(seed)
np.random.seed(seed)
# Loads the history file
with open(history, "rb") as input_file:
# Loads object from file
h = pickle.load(input_file)
# Gathers the best parameters
p = h.best_agent[-1][0]
# Fine-tuned parameters
n_hidden = tuple([int(_p[0]) for _p in p[:n_layers]])
steps = tuple([1] * n_layers)
lr = tuple([float(_p[0]) for _p in p[n_layers:n_layers*2]])
momentum = tuple([float(_p[0]) for _p in p[n_layers*2:n_layers*3]])
decay = tuple([float(_p[0]) for _p in p[n_layers*3:]])
temperature = tuple([1] * n_layers)
# Initializes the model
model = DBN('bernoulli', n_visible, n_hidden, steps,
lr, momentum, decay, temperature, use_gpu)
# Trains the model using the training set
model.fit(train, batch_size, epochs)
# Reconstructs over the testing set
mse, _ = model.reconstruct(test)
# Outputs the MSE to a file
with open(history + '.txt', 'w') as output_file:
output_file.write(str(mse.detach().numpy().item()))