1
+ ## Machine Learning Online Class - Exercise 4 Neural Network Learning
2
+
3
+ # Instructions
4
+ # ------------
5
+ #
6
+ # This file contains code that helps you get started on the
7
+ # linear exercise. You will need to complete the following functions
8
+ # in this exericse:
9
+ #
10
+ # sigmoidGradient.m
11
+ # randInitializeWeights.m
12
+ # nnCostFunction.m
13
+ #
14
+ # For this exercise, you will not need to change any code in this file,
15
+ # or any other files other than those mentioned above.
16
+ #
17
+
18
+ ## Initialization
19
+ import numpy as np
20
+ import matplotlib .pyplot as plt
21
+ import scipy .io as io
22
+ import ex3helper as helper3
23
+ import ex4helper as helper
24
+
25
+ ## Setup the parameters you will use for this exercise
26
+ inputLayerSize = 400 ; # 20x20 Input Images of Digits
27
+ hiddenLayerSize = 25 ; # 25 hidden units
28
+ numLabels = 10 ; # 10 labels, from 1 to 10
29
+ # (note that we have mapped "0" to label 10)
30
+
31
+ ## =========== Part 1: Loading and Visualizing Data =============
32
+ # We start the exercise by first loading and visualizing the dataset.
33
+ # You will be working with a dataset that contains handwritten digits.
34
+ #
35
+
36
+ # Load Training Data
37
+ print ('Loading and Visualizing Data ...' )
38
+ mat = io .loadmat ('./data/ex4data1.mat' )
39
+ X = mat ['X' ]
40
+ y = np .squeeze (mat ['y' ])
41
+
42
+ m = X .shape [0 ]
43
+
44
+ # Randomly select 100 data points to display
45
+ perm = np .random .permutation (m )
46
+ sel = X [perm [0 :100 ],:]
47
+
48
+ helper3 .displayData (sel )
49
+
50
+ input ('\n Part 1 completed. Program paused. Press enter to continue: ' )
51
+
52
+
53
+ ## ================ Part 2: Loading Parameters ================
54
+ # In this part of the exercise, we load some pre-initialized
55
+ # neural network parameters.
56
+
57
+ print ('\n Loading Saved Neural Network Parameters ...' )
58
+
59
+ mat = io .loadmat ('./data/ex4weights.mat' )
60
+
61
+ theta1 = mat ['Theta1' ]
62
+ theta2 = mat ['Theta2' ]
63
+
64
+ nnParams = np .array ([theta1 .flatten (), theta2 .flatten ()])
65
+
66
+
67
+ ## ================ Part 3: Compute Cost (Feedforward) ================
68
+ # To the neural network, you should first start by implementing the
69
+ # feedforward part of the neural network that returns the cost only. You
70
+ # should complete the code in nnCostFunction.m to return cost. After
71
+ # implementing the feedforward to compute the cost, you can verify that
72
+ # your implementation is correct by verifying that you get the same cost
73
+ # as us for the fixed debugging parameters.
74
+ #
75
+ # We suggest implementing the feedforward cost *without* regularization
76
+ # first so that it will be easier for you to debug. Later, in part 4, you
77
+ # will get to implement the regularized cost.
78
+ #
79
+ print ('Feedforward Using Neural Network ...' )
80
+
81
+ # Weight regularization parameter (we set this to 0 here).
82
+ lambdaVal = 0
83
+
84
+ J = helper .nnCostFunction (nnParams , X , y , lambdaVal )
85
+
86
+ print ('Cost at parameters (loaded from ex4weights): {:.6f}' .format (J ))
87
+ print ('this value should be approx: 0.287629' )
88
+
89
+ input ('\n Part 2 & 3 completed. Program paused. Press enter to continue: ' )
90
+
91
+ ## =============== Part 4: Implement Regularization ===============
92
+ # Once your cost function implementation is correct, you should now
93
+ # continue to implement the regularization with the cost.
94
+ #
95
+
96
+ print ('\n Checking Cost Function (w/ Regularization) ... ' )
97
+
98
+ # Weight regularization parameter (we set this to 1 here).
99
+ lambdaVal = 1
100
+
101
+ J = helper .nnCostFunction (nnParams , X , y , lambdaVal )
102
+
103
+ print ('Cost at parameters (loaded from ex4weights): {:.6f}' .format (J ))
104
+ print ('this value should be approx: 0.383770' )
105
+
106
+ input ('\n Part 4 completed. Program paused. Press enter to continue: ' )
107
+
108
+
109
+ ## ================ Part 5: Sigmoid Gradient ================
110
+ # Before you start implementing the neural network, you will first
111
+ # implement the gradient for the sigmoid function. You should complete the
112
+ # code in the sigmoidGradient.m file.
113
+ #
114
+
115
+ print ('\n Evaluating sigmoid gradient...' )
116
+
117
+ g = helper .sigmoidGradient (np .array ([- 1 , - 0.5 , 0 , 0.5 , 1 ]))
118
+ print ('Sigmoid gradient evaluated at [-1 -0.5 0 0.5 1]:' );
119
+ print (g );
120
+
121
+ input ('\n Part 5 completed. Program paused. Press enter to continue: ' )
122
+
123
+ ## ================ Part 6: Initializing Pameters ================
124
+ # In this part of the exercise, you will be starting to implment a two
125
+ # layer neural network that classifies digits. You will start by
126
+ # implementing a function to initialize the weights of the neural network
127
+ # (randInitializeWeights.m)
128
+
129
+ print ('\n Initializing Neural Network Parameters ...' )
130
+
131
+ initialTheta1 = np .random .rand (inputLayerSize + 1 , hiddenLayerSize )
132
+ initialTheta2 = np .random .rand (hiddenLayerSize + 1 , num_labels )
133
+
134
+ # Unroll parameters
135
+ initialNNParams = np .array ([initialTheta1 .flatten (), initialTheta2 .flatten ()])
136
+
137
+ ## =============== Part 7: Implement Backpropagation ===============
138
+ # Once your cost matches up with ours, you should proceed to implement the
139
+ # backpropagation algorithm for the neural network. You should add to the
140
+ # code you've written in nnCostFunction.m to return the partial
141
+ # derivatives of the parameters.
142
+ #
143
+ print ('\n Checking Backpropagation... ' )
144
+
145
+ #Check gradients by running checkNNGradients
146
+ #helper.checkNNGradients()
147
+
148
+ #input('\nPart 6 & 7 completed. Program paused. Press enter to continue: ')
0 commit comments