% This program calls the function MnistConv.m and trains the network max_epoch %times. Then it provides a test data set to the trained network and displays %its accuracy. Be advised that this program takes quite some time to run. clear all close all clc %% Training set: sample_perfect = load('sample_gato1.txt'); %the image is 31x13. [lin, col] = size(sample_perfect); Images_train = zeros(lin, col, 100); %100 training samples. Images_train(:, :, 1) = sample_perfect; Labels_train = ones(130, 1); %1 => it is a cat; 0 => not a cat. %Creating training set: for k = 2:130 if k <= 110 %cat images block_of_noise = unifrnd(0, 10, 5, 5); %3x3 noise. idx_lin = round(unifrnd(1, 31-5)); idx_col = round(unifrnd(1, 12-5)); Images_train(:, :, k) = sample_perfect; Images_train(idx_lin:idx_lin+4, idx_col:idx_col+4, k) = block_of_noise; Labels_train(k) = 1; else %not cat images Images_train(:, :, k) = unifrnd(0, 10, lin, col); Labels_train(k) = 0; end end %Show a training sample: for i =1:10 my_color_map=[1:-0.1:0; 1:-0.1:0; 1:-0.1:0]'; figure(i) image(Images_train(:, :, round(unifrnd(1, 130)))); colormap(my_color_map); title('One training sample'); end my_color_map=[1:-0.1:0; 1:-0.1:0; 1:-0.1:0]'; figure(11) image(Images_train(:, :, 120)); colormap(my_color_map); title('One training sample'); rng(1); %% Learning: %2 feature maps of 31x12 pixels and neighborhood 2x2 => 2 output images from %the extraction network and each of them is 15x6. % Weights initialization: W1 = 1e-2*randn([2 2 2]); W5 = (2*rand(100, 2*15*6) - 1) * sqrt(6) / sqrt(360 + 2000); Wo = (2*rand( 2, 100) - 1) * sqrt(6) / sqrt( 10 + 100); X = Images_train; %input images D = Labels_train; %correct labels for supervised learning max_epoch = 30; for epoch = 1:max_epoch fprintf('Actual epoch: %u.\n', epoch); [W1, W5, Wo] = MnistConv(W1, W5, Wo, X, D); end % At this point, [W1, W5, Wo] are the adjusted parameters: W1 %filters W5 %hidden layer Wo %output layer %% Test: %Creating test set: Images_test = zeros(lin, col, 20); %20 test samples. Labels_test = ones(20, 1); %1 => it is a cat; 0 => not a cat. for k = 1:20 if k <= 17 %cat images block_of_noise = unifrnd(0, 10, 4, 4); %3x3 noise. idx_lin = round(unifrnd(1, 31-4)); idx_col = round(unifrnd(1, 12-4)); Images_test(:, :, k) = sample_perfect; Images_test(idx_lin:idx_lin+3, idx_col:idx_col+3, k) = block_of_noise; Labels_test(k) = 1; else %not cat images Images_test(:, :, k) = unifrnd(0, 10, lin, col); Labels_test(k) = 0; end end X = Images_test; %input labels D = Labels_test; %correct labels for accuracy check acc = 0; %hit counter N = length(D); for k = 1:N x = X(:, :, k); % Input, 28x28 y1 = Conv(x, W1); % Convolution, 20x20x20 y2 = ReLU(y1); % y3 = Pool(y2); % Pool, 10x10x20 y4 = reshape(y3, [], 1); % 2000 v5 = W5*y4; % ReLU, 360 y5 = ReLU(v5); % v = Wo*y5; % Softmax, 10 y = Softmax(v); % [~, i] = max(y); if i == D(k) %if classification is correct, then increment the hit counter acc = acc + 1; end end acc = acc / N; fprintf('Accuracy is %.4f%%. \n', 100*acc); %% Saving results and trained weights. save('MnistConv.mat');