% This program calls the BackpropCE and the BackpropXOR functions and %trains the neural networks max_epoch times each. The squared sum of the output %error (es1 and es2) is calculated at every epoch for each neural network, %and their average (E1 and E2) is calculated. % W11 and W12: weights adjusted by the BackpropCE.m function. % W21 and W22: weights adjusted by the BackproXOR.m function. clear all close all clc % Inputs: X = [0 0 1; 0 1 1; 1 0 1; 1 1 1]; % Desired outputs: D = [0; 0; 1; 1]; Ne = 500; % Average and weights initialization: E1 = zeros(Ne, 1); E2 = zeros(Ne, 1); W11 = 2*rand(4, 3) - 1; % Cross entropy W12 = 2*rand(1, 4) - 1; % Cross entropy W21 = W11; % Sum of squared error W22 = W12; % Sum of squared error %Training processes: max_epoch = Ne; for epoch = 1:max_epoch [W11, W12] = BackpropCE(W11, W12, X, D); [W21, W22] = BackpropXOR(W21, W22, X, D); es1 = 0; es2 = 0; N = size(X,1); for k = 1:N x = X(k, :)'; d = D(k); v1 = W11*x; y1 = Sigmoid(v1); v = W12*y1; y = Sigmoid(v); es1 = es1 + (d - y)^2; v1 = W21*x; y1 = Sigmoid(v1); v = W22*y1; y = Sigmoid(v); es2 = es2 + (d - y)^2; end E1(epoch) = es1/N; E2(epoch) = es2/N; end % Figure position and size Figure_config = struct('PaperPositionMode','auto', ... 'Units','centimeters', ... 'Position',[5 9 30 20]); CEvsSSE_figure = figure(); set(CEvsSSE_figure, Figure_config); plot(1:max_epoch, E1, 'r', 1:max_epoch, E2, 'b:', 'LineWidth', 2.5) ax = gca; ax.YGrid = 'on'; ax.FontSize = 20; xlabel('Epoch') ylabel('Average of Training error') legend('Cross Entropy', 'Sum of Squared Errors')