This commit is contained in:
2017-05-28 06:53:01 +09:00
parent e04668ffb0
commit 3d6575bc2a
5 changed files with 51 additions and 24 deletions

View File

@@ -39,6 +39,21 @@ Theta2_grad = zeros(size(Theta2));
% cost function computation is correct by verifying the cost
% computed in ex4.m
%
yy = zeros(m, num_labels);
for i = 1:m
yy(i, y(i)) = 1;
endfor
a1 = [ones(m, 1), X];
z2 = a1*Theta1';
a2 = [ones(m, 1), sigmoid(z2)];
z3 = a2*Theta2';
a3 = hx = sigmoid(z3);
J = sum(sum(-yy.*log(hx)-(1.-yy).*log(1.-hx)))/m;
J += lambda*(sum(sum(Theta1(:, 2:end).^2)) + sum(sum(Theta2(:, 2:end).^2)))/(2*m);
% Part 2: Implement the backpropagation algorithm to compute the gradients
% Theta1_grad and Theta2_grad. You should return the partial derivatives of
% the cost function with respect to Theta1 and Theta2 in Theta1_grad and
@@ -54,6 +69,15 @@ Theta2_grad = zeros(size(Theta2));
% over the training examples if you are implementing it for the
% first time.
%
delta3 = a3 - yy;
delta2 = delta3*Theta2.*[ones(m, 1), sigmoidGradient(z2)];
delta2 = delta2(:, 2:end);
Theta2_grad += (delta3'*a2)/m;
Theta1_grad += (delta2'*a1)/m;
% Part 3: Implement regularization with the cost function and gradients.
%
% Hint: You can implement this around the code for
@@ -62,25 +86,11 @@ Theta2_grad = zeros(size(Theta2));
% and Theta2_grad from Part 2.
%
Theta2_grad(:, 2:end) += lambda*Theta2(:, 2:end)/m;
Theta1_grad(:, 2:end) += lambda*Theta1(:, 2:end)/m;
% -------------------------------------------------------------
% ----------------------------------------------------------
% =========================================================================