-
Notifications
You must be signed in to change notification settings - Fork 20
/
DBN.m
137 lines (113 loc) · 4.75 KB
/
DBN.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
classdef DBN < handle
%DBN Summary of this class goes here
% Detailed explanation goes here
properties
sizes
rbm = BernoulliRBM;
end
methods
function dbn = DBN(x, y, sizes, opts)
n = size(x, 2);
m = size(y, 2);
dbn.sizes = [n, sizes];
nLayers = numel(dbn.sizes) - 1;
for i = 1 : nLayers - 1
dbn.rbm(i) = BernoulliRBM(dbn.sizes(i), dbn.sizes(i + 1), opts);
end
% for the final layer of the dbn we add some additional
% parameters for modelling the joint distribution of the inputs and target classes
dbn.rbm(nLayers) = SoftmaxRBM(dbn.sizes(nLayers), dbn.sizes(nLayers + 1), opts, m);
end
function train(dbn, x, y)
n = numel(dbn.rbm);
for i = 1 : n-1
train(dbn.rbm(i), x);
x = rbmup(dbn.rbm(i), x);
end
train(dbn.rbm(n), x, y);
end
function probs = predict(dbn, x, y)
n = numel(dbn.rbm);
m = size(y, 1);
c = size(y, 2);
% do an upward pass through the network for the test examples
% to compute the feature activations in the penultimate layer
for i = 2 : n
x = rbmup(dbn.rbm(i - 1), x);
end
% precompute for efficiency
precom = repmat(dbn.rbm(n).c', m, 1) + x * dbn.rbm(n).W';
probs = zeros(m,c, 'gpuArray');
% probablities aren't normalized
for i = 1:c
probs(:, i) = exp(dbn.rbm(n).d(i)) * prod(1 + exp(precom + repmat(dbn.rbm(n).U(:, i)', m, 1)), 2);
end
end
function x = generate(dbn, class, c, nGibbSteps)
% randomly initialize a single input
x = rand(1, dbn.sizes(1));
n = numel(dbn.rbm);
% clamp softmax to this label
y = zeros(1, c);
y(class) = 1;
% do an upward pass through the network for the test examples
% to compute the feature activations in the penultimate layer
for i = 1 : n - 1
x = rbmup(dbn.rbm(i), x);
end
% do nGibbSteps iterations of gibbs sampling at the top layer
for i = 1:nGibbSteps - 1
h = RBM.sample(dbn.rbm(n).c' + x * dbn.rbm(n).W' + y * dbn.rbm(n).U');
x = RBM.sample(dbn.rbm(n).b' + h * dbn.rbm(n).W);
end
h = RBM.sample(dbn.rbm(n).c' + x * dbn.rbm(n).W' + y * dbn.rbm(n).U');
x = logsig(dbn.rbm(n).b' + h * dbn.rbm(n).W);
% do a downward pass to generate sample
for i = n-1:-1:1
x = rbmdown(dbn.rbm(i), x);
end
x = reshape(x, 28, 28)';
end
function x = generate2(dbn, class, c, nGibbSteps)
% randomly initialize the visbile units of the jointly trained layer
x = rand(1, dbn.sizes(end - 1));
n = numel(dbn.rbm);
% clamp softmax to this label
y = zeros(1, c);
y(class) = 1;
% do nGibbSteps iterations of gibbs sampling at the top layer
for i = 1:nGibbSteps
h = RBM.sample(dbn.rbm(n).c' + x * dbn.rbm(n).W' + y * dbn.rbm(n).U');
x = RBM.sample(dbn.rbm(n).b' + h * dbn.rbm(n).W);
end
% do a downward pass to generate sample
for i = n-1:-1:1
x = rbmdown(dbn.rbm(i), x);
end
end
function x = imageseq(dbn, class, c, nGibbSteps)
% randomly initialize the visbile units of the jointly trained layer
x = rand(1, dbn.sizes(end - 1));
n = numel(dbn.rbm);
% clamp softmax to this label
y = zeros(1, c);
y(class) = 1;
% do nGibbSteps iterations of gibbs sampling at the top layer
for i = 1:nGibbSteps
h = RBM.sample(dbn.rbm(n).c' + x * dbn.rbm(n).W' + y * dbn.rbm(n).U');
x = RBM.sample(dbn.rbm(n).b' + h * dbn.rbm(n).W);
saveimg(dbn, x, n, class, i);
end
% h = RBM.sample(dbn.rbm(n).c' + x * dbn.rbm(n).W' + y * dbn.rbm(n).U');
% x = logistic(dbn.rbm(n).b' + h * dbn.rbm(n).W);
%
end
function saveimg(dbn, x, n, class, iter)
% do a downward pass to generate sample
for i = n-1:-1:1
x = rbmdown(dbn.rbm(i), x);
end
imwrite(reshape(gather(x), 28, 28)', sprintf('figures/%d/%03d.png', class - 1, iter));
end
end
end