diff --git a/README.md b/README.md index 6c4d5a4..607d47c 100644 --- a/README.md +++ b/README.md @@ -58,3 +58,8 @@ __Instructor__: Andrew Ng. - [One vs. All Multi Class Classifier](week4/machine-learning-ex3/ex3/oneVsAll.m) - [Predict one vs. all Multi Class Classifier](week4/machine-learning-ex3/ex3/predictOneVsAll.m) - [Neural Network Prediction Function](week4/machine-learning-ex3/ex3/predict.m) + + +## Week 5 +### Quizzes +- [Neural Networks: Learning](week5/neural-networks-quiz.md) \ No newline at end of file diff --git a/week4/machine-learning-ex3/ex3/predict.m b/week4/machine-learning-ex3/ex3/predict.m index 670eeb9..7fc6d85 100644 --- a/week4/machine-learning-ex3/ex3/predict.m +++ b/week4/machine-learning-ex3/ex3/predict.m @@ -1,43 +1,15 @@ function p = predict(Theta1, Theta2, X) -%PREDICT Predict the label of an input given a trained neural network -% p = PREDICT(Theta1, Theta2, X) outputs the predicted label of X given the -% trained weights of a neural network (Theta1, Theta2) - -% Useful values -m = size(X, 1); -num_labels = size(Theta2, 1); - -% You need to return the following variables correctly -% p = zeros(m, 1); - -% add x0 in x -a1 = [ones(m, 1) X]; -a2 = sigmoid(a1 * Theta1'); -a2 = [ones(m, 1) a2]; -a3 = sigmoid(a2 * Theta2'); -[maxProbability index] = max(a3, [], 2); -p = index; - -% ====================== YOUR CODE HERE ====================== -% Instructions: Complete the following code to make predictions using -% your learned neural network. You should set p to a -% vector containing labels between 1 to num_labels. -% -% Hint: The max function might come in useful. In particular, the max -% function can also return the index of the max element, for more -% information see 'help max'. If your examples are in rows, then, you -% can use max(A, [], 2) to obtain the max for each row. -% - - - - - - - - - -% ========================================================================= - - + %PREDICT Predict the label of an input given a trained neural network + % p = PREDICT(Theta1, Theta2, X) outputs the predicted label of X given the + % trained weights of a neural network (Theta1, Theta2) + + % training data size + m = size(X, 1); + + a1 = [ones(m, 1) X]; + a2 = sigmoid(a1 * Theta1'); + a2 = [ones(m, 1) a2]; + a3 = sigmoid(a2 * Theta2'); + [maxProbability index] = max(a3, [], 2); + p = index; end diff --git a/week4/machine-learning-ex3/ex3/token.mat b/week4/machine-learning-ex3/ex3/token.mat index c29981c..8e37ee1 100644 --- a/week4/machine-learning-ex3/ex3/token.mat +++ b/week4/machine-learning-ex3/ex3/token.mat @@ -1,4 +1,4 @@ -# Created by Octave 5.2.0, Mon Jun 15 03:58:24 2020 GMT +# Created by Octave 5.2.0, Mon Jun 15 04:01:39 2020 GMT # name: email # type: sq_string # elements: 1 diff --git a/week5/assets/quiz-1.PNG b/week5/assets/quiz-1.PNG new file mode 100644 index 0000000..5df4724 Binary files /dev/null and b/week5/assets/quiz-1.PNG differ diff --git a/week5/assets/quiz-2.PNG b/week5/assets/quiz-2.PNG new file mode 100644 index 0000000..4029e41 Binary files /dev/null and b/week5/assets/quiz-2.PNG differ diff --git a/week5/assets/quiz-3.PNG b/week5/assets/quiz-3.PNG new file mode 100644 index 0000000..0a18b40 Binary files /dev/null and b/week5/assets/quiz-3.PNG differ diff --git a/week5/assets/quiz-4.PNG b/week5/assets/quiz-4.PNG new file mode 100644 index 0000000..9459756 Binary files /dev/null and b/week5/assets/quiz-4.PNG differ diff --git a/week5/assets/quiz-5.PNG b/week5/assets/quiz-5.PNG new file mode 100644 index 0000000..1b77774 Binary files /dev/null and b/week5/assets/quiz-5.PNG differ diff --git a/week5/neural-networks-quiz.md b/week5/neural-networks-quiz.md new file mode 100644 index 0000000..b0b02c9 --- /dev/null +++ b/week5/neural-networks-quiz.md @@ -0,0 +1,7 @@ +# Neural Networks: Learning + +![Question 1](assets/quiz-1.PNG) +![Question 2](assets/quiz-2.PNG) +![Question 3](assets/quiz-3.PNG) +![Question 4](assets/quiz-4.PNG) +![Question 5](assets/quiz-5.PNG) diff --git a/week5/week5.m b/week5/week5.m new file mode 100644 index 0000000..a4712e8 --- /dev/null +++ b/week5/week5.m @@ -0,0 +1,23 @@ +clc; +clear; + +% Gradiant Approximater (Gradiant Checker) +function gradent = approximateGradient(theta, cost) + n = length(theta); + gradient = zeros(n, 1); + EPSILON = 1e-4; + for i = 1:n + thetaPlus = theta; + thetaPlus(i) += EPSILON; + thetaMinus = theta; + thetaMinus(i) -= EPSILON; + gradent(i) = (cost(thetaPlus) - cost(thetaMinus)) / (2 * EPSILON); + endfor +endfunction + +function J = costFunction(theta) + J = 100 * rand(1, 1); +endfunction + +hypothesis = [0 ; 1 ; 2]; +disp(approximateGradient(hypothesis, @costFunction));