forked from paparazzi/paparazzi
-
Notifications
You must be signed in to change notification settings - Fork 124
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
236 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,7 @@ | ||
function entropy = getEntropy(p_dist) | ||
entropy = 0; | ||
for i = 1:size(p_dist,2) | ||
if(p_dist(i) > 0) | ||
entropy = entropy - p_dist(i) * log2(p_dist(i)); | ||
end | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
% read YUV Dictionary | ||
% clear; close all; clc; | ||
|
||
% load Dictionary | ||
% YUV_Dict = load('GoodDictionaryCyberZooPinkMat.dat'); | ||
YUV_Dict = load('Dictionary_00000.dat'); | ||
% YUV_Dict = load('Dictionary_big_flowers.dat'); | ||
|
||
% params | ||
n_textons = 20; % number of visual words | ||
patch_size = 6; % size of one patch | ||
|
||
% extract YUV components | ||
U_ALL = YUV_Dict(1:4:end,1); | ||
Y1_ALL = YUV_Dict(2:4:end,1); | ||
V_ALL = YUV_Dict(3:4:end,1); | ||
Y2_ALL = YUV_Dict(4:4:end,1); | ||
|
||
Dictionary = zeros(n_textons,patch_size*patch_size*3); | ||
|
||
|
||
hh = figure(); | ||
|
||
for w = 1:n_textons | ||
subplot(ceil(sqrt(n_textons)), ceil(sqrt(n_textons)), w); | ||
|
||
% extract YUV per texton | ||
U = U_ALL((w-1)*18+1:w*18, 1); | ||
Y1 = Y1_ALL((w-1)*18+1:w*18, 1); | ||
V = V_ALL((w-1)*18+1:w*18, 1); | ||
Y2 = Y2_ALL((w-1)*18+1:w*18, 1); | ||
|
||
% conversion | ||
R1 = Y1 + 1.4022 .* (V - 128); | ||
G1 = Y1 - 0.3456 .* (U - 128) - (0.7145 .* (V - 128)); | ||
B1 = Y1 + 1.7710 .* (U - 128); | ||
R2 = Y2 + 1.4022 .* (V - 128); | ||
G2 = Y2 - 0.3456 .* (U - 128) - (0.7145 .* (V - 128)); | ||
B2 = Y2 + 1.7710 .* (U - 128); | ||
|
||
R = zeros(patch_size,patch_size); | ||
G = zeros(patch_size,patch_size); | ||
B = zeros(patch_size,patch_size); | ||
R(:,1:2:end) = reshape(R1,3,6)'; | ||
R(:,2:2:end) = reshape(R2,3,6)'; | ||
G(:,1:2:end) = reshape(G1,3,6)'; | ||
G(:,2:2:end) = reshape(G2,3,6)'; | ||
B(:,1:2:end) = reshape(B1,3,6)'; | ||
B(:,2:2:end) = reshape(B2,3,6)'; | ||
|
||
% clip the values into range [0, 255] | ||
R = max(0, min(R, 255)); | ||
G = max(0, min(G, 255)); | ||
B = max(0, min(B, 255)); | ||
% R = min(max(R, 0),255); | ||
% G = min(max(G, 0),255); | ||
% B = min(max(B, 0),255); | ||
|
||
% form bgr image | ||
R_norm = R./255; | ||
G_norm = G./255; | ||
B_norm = B./255; | ||
rgbimg (:,:,1) = B_norm; | ||
rgbimg (:,:,2) = G_norm; | ||
rgbimg (:,:,3) = R_norm; | ||
|
||
Dictionary(w,1:patch_size*patch_size) = reshape(rgbimg(:,:,1)',1,36); | ||
Dictionary(w,patch_size*patch_size+1:patch_size*patch_size*2) = reshape(rgbimg(:,:,2)',1,36); | ||
Dictionary(w,patch_size*patch_size*2+1:patch_size*patch_size*3) = reshape(rgbimg(:,:,3)',1,36); | ||
|
||
imshow(rgbimg); | ||
title(num2str(w)) | ||
end | ||
|
||
% save dictionary_visualwords2_rgb.mat Dictionary |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,154 @@ | ||
function train() | ||
|
||
MAX_SAMPLES = 25000; | ||
start_sample = 1; | ||
|
||
BIAS = true; | ||
PRIOR = true; | ||
TEST_SET = true; | ||
test_ratio = 0.30; | ||
|
||
weights = false; % whether we have onboard weights we want to compare to the current learning: | ||
if(weights) | ||
w = load('Weights_00000.dat'); | ||
end | ||
|
||
% structure A: | ||
% 1) height | ||
% 2) gain | ||
% 3) cov div | ||
% 4-end) textons | ||
A = load('Training_set_00000.dat'); | ||
n_samples = size(A,1); | ||
if(TEST_SET) | ||
n_training = round((1-test_ratio) * n_samples); | ||
n_test = n_samples - n_training; | ||
start_ind = floor(rand(1) * (n_samples - n_test)); | ||
A_test = A(start_ind:start_ind+n_test, :); | ||
A = [A(1:start_ind-1, :); A(start_ind+n_test+1:end, :)]; | ||
% A_test = A(n_training+1:end, :); | ||
% A = A(1:n_training, :); | ||
end | ||
|
||
% ************************************** | ||
% learn a mapping from features to gain: | ||
% ************************************** | ||
|
||
A = A(start_sample:min([MAX_SAMPLES, size(A,1)]), :); | ||
b = A(:,2); | ||
f = A(:,4:end); | ||
if(BIAS) | ||
AA = [f, ones(size(A,1),1)]; | ||
else | ||
AA = f; | ||
end | ||
if(~PRIOR) | ||
x = AA \ b; | ||
else | ||
alpha = 1; %10; | ||
x = inv(AA' * AA + alpha * eye(size(AA, 2))) * AA' * b; | ||
end | ||
|
||
% store the resulting weights: | ||
fid = fopen('Weights_MATLAB.dat', 'w'); | ||
for i = 1:length(x)-1 | ||
fprintf(fid, '%f ', x(i)); | ||
end | ||
fprintf(fid, '%f', x(end)); | ||
fclose(fid); | ||
|
||
% evaluate the resulting estimates and compare them with the weights | ||
% learned onboard: | ||
y = AA * x; | ||
height_gain_estimate = y; | ||
fprintf('MAE on training set = %f\n', mean(abs(y-b))); | ||
if(weights) | ||
Z = AA * w'; | ||
end | ||
figure(); plot(y); hold on; plot(b); | ||
if(weights) | ||
plot(Z); | ||
end | ||
title('Height on training set') | ||
legend({'height gain estimate', 'height gain', 'onboard gain estimate'}); | ||
|
||
figure(); plot(smooth(y, 20)); hold on; plot(b); | ||
if(weights) | ||
plot(smooth(Z, 20)); | ||
end | ||
title('Smoothed Height') | ||
legend({'height gain estimate', 'height gain', 'onboard gain estimate'}); | ||
|
||
figure(); | ||
bar(x, 'FaceColor', [1 0 0]); % hold on; bar(w); | ||
title('Weights learned in MATLAB'); | ||
|
||
figure(); | ||
plot(A(:,1)); hold on; plot(A(:,2)); | ||
legend({'Height', 'Gain'}); | ||
|
||
% MAE on test set: | ||
f_test = A_test(:,4:end); | ||
if(BIAS) | ||
AA_test = [f_test, ones(size(A_test,1),1)]; | ||
else | ||
AA_test = f_test; | ||
end | ||
y_test = AA_test * x; | ||
b_test = A_test(:, 2); | ||
fprintf('MAE on test set = %f\n', mean(abs(y_test-b_test))); | ||
if(weights) | ||
Z_test = AA_test * w'; | ||
end | ||
figure(); plot(y_test); hold on; plot(b_test); | ||
if(weights) | ||
plot(Z_test); | ||
end | ||
title('Height on test set') | ||
legend({'height gain estimate', 'height gain', 'onboard gain estimate'}); | ||
|
||
|
||
% *********************************** | ||
% now learn a function to learn sonar | ||
% *********************************** | ||
|
||
b = A(:,1); | ||
f = A(:,4:end); | ||
if(BIAS) | ||
AA = [f, ones(size(A,1),1)]; | ||
else | ||
AA = f; | ||
end | ||
x = AA \ b; | ||
y = AA * x; | ||
figure(); plot(smooth(y, 20)); hold on; plot(b); | ||
|
||
b = A(:,1); | ||
f = height_gain_estimate; | ||
AA = [f, ones(size(A,1),1)]; | ||
x = AA \ b; | ||
y = AA * x; | ||
plot(smooth(y,20)); | ||
title('Sonar height') | ||
legend({'estimate trained with sonar', 'sonar', 'scaled gain estimate'}); | ||
|
||
figure(); | ||
plot(A(:, 3)); | ||
title('Cov div'); | ||
|
||
entr = getEntropies(f); | ||
p_peak = zeros(1, size(f,2)); | ||
p_peak(1) = 1; | ||
min_entr = getEntropy(p_peak); | ||
p_uniform = ones(1, size(f,2)) ./ size(f,2); | ||
max_entr = getEntropy(p_uniform); | ||
figure(); | ||
histogram(entr, 30); | ||
title(['Entropies: min = ' num2str(min_entr) ', max = ' num2str(max_entr)]); | ||
|
||
function entropies = getEntropies(f) | ||
n_el = size(f,1); | ||
entropies = zeros(n_el,1); | ||
for el = 1:n_el | ||
entropies(el) = getEntropy(f(el, :)); | ||
end |