-
Notifications
You must be signed in to change notification settings - Fork 2
/
HMRF_VEM.m
100 lines (94 loc) · 3.68 KB
/
HMRF_VEM.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
%% Àëãîðèòì íàõîæäåíèÿ MAP-îöåíêè
%---input---------------------------------------------------------
% data: èñõîäíûå äàííûå, ìàòðèöà XxYxZxP
% p: ïîñëåäíÿÿ ðàçìåðíîñòü (öâåòà èëè time series)
% k: êîëè÷åñòâî êëàññîâ
% beta: ïàðàìåòð ìîäåëè Ïîòòñà
% mus: ïàðàìåòðû ôîí Ìèçåñà-Ôèøåðà
% kappas: ïàðàìåòðû ôîí Ìèçåñà-Ôèøåðà
% lambda: ïàðàìåòð Mean Field
% MAP_iter: ìàêñèìàëüíîå êîëè÷åñòâî èòåðàöèé
% INNER_iter: ìàêñèìàëüíîå êîëè÷åñòâî âíóòðåííèõ èòåðàöèé
% neighbours_count: êîëè÷åñòâî ñîñåäåé, äîñòóïíûå çíà÷åíèÿ
% 2-D: 4, 8, 16
% 3-D: 6, 26
% mask: ìàñêà äëÿ èçîáðàæåíèÿ, ìàòðèöà XxYxZ
% 1 äëÿ òî÷êè ïðîèçâîäÿòñÿ âû÷èñëåíèÿ
% 0 òî÷êà èãíîðèðóåòñÿ
%---output--------------------------------------------------------
% sample: ìàòðèöà ôèíàëüíîé ñåãìåíòàöèè
% beta: îöåíåííîå çíà÷åíèå beta
% kappas: ïàðàìåòðû ôîí Ìèçåñà-Ôèøåðà
% mus: ïàðàìåòðû ôîí Ìèçåñà-Ôèøåðà
function [sample, beta, mus, kappas, sample2]=HMRF_VEM(data, p, k, beta, mus, kappas, ...
lambda, MAP_iter, INNER_iter, ...
neighbours_count, mask, labelCosts)
sz = size(data);
flatsize = prod(sz(1:end-1));
flat = reshape(data, [flatsize, p]);
if(nargin < 11)
mask = ones(sz(1:end-1));
end
% neighbours indexes
all_neighbours_ind = GetNeighbours(sz(1:end-1), neighbours_count);
for i=1:MAP_iter
fprintf('\tIteration: %d out of %d\n',i,MAP_iter);
[~, logprobs] = CalculateLikelihoodProbabilities(flat, k, kappas, mus, mask);
Q = zeros(flatsize, k);
Q(:, 1) = ones(flatsize, 1);
for u=1:INNER_iter
if(nargin < 12)
Qtilde = exp(beta ...
* squeeze(sum(reshape(Q(all_neighbours_ind, :), [neighbours_count, flatsize, k]), 1)) ...
- logprobs');
else
invQ = 1 - Q;
H = double(zeros(size(invQ)));
A = double(zeros(size(invQ)));
for idy=1:k
A(1, idy) = labelCosts(idy) * prod(invQ(2:end, idy), 1);
for idx=2:flatsize
A(idx, idy) = A(idx-1, idy) * invQ(idx-1, idy) / invQ(idx, idy);
end
end
for idx=1:flatsize
H(idx, 1) = sum(A(idx, 2:end), 2);
for idy=2:k
H(idx, idy) = H(idx, idy-1) + A(idx, idy-1) - A(idx, idy);
end
end
Qtilde = exp(beta ...
* squeeze(sum(reshape(Q(all_neighbours_ind, :), [neighbours_count, flatsize, k]), 1)) ...
- logprobs' ...
+ H);
end
Qtilde = min(Qtilde, 10^100);
sumQ = sum(Qtilde, 2);
sumQ(sumQ==0) = 1;
Q = (1-lambda) * Q + lambda * Qtilde ./ sumQ;
Q(mask==0, :) = 0;
end
for l=1:k
R = Q(mask~=0, l)' * flat(mask~=0,:);
Rlen = sqrt(sum(R .^ 2));
if Rlen ~= 0
mus(l, :) = R / Rlen;
else
fprintf('WARNING: Rlen is zero\n');
end
eqn = @(x) besseli(p/2, x, 1)/besseli(p/2-1,x, 1) * sum(Q(mask~=0, l)) - Rlen;
initval = kappas(l);
if isnan(eqn(initval))
fprintf('WARNING: function is NaN at initial value\n');
initval = 10;
end
opts1 = optimset('display','off');
kappas(l) = lsqnonlin(eqn, initval, 0, Inf, opts1);
end
end
% Find MAP
[~, sample] = max(Q, [], 2);
sample = reshape(sample, sz(1:end-1));
sample(mask==0) = 0;
[~, logprobs] = CalculateLikelihoodProbabilities(flat, k, kappas, mus, sample);
sample2 = MRF_MAP_GraphCutAExpansion(sample, logprobs, beta, k, MAP_iter, neighbours_count);