Skip to content

Commit 524f5b1

Browse files
authored
Merge pull request manvillej#2 from manvillej/Ex2LR&Regulariz
Ex2 lr&regulariz
2 parents 6001fc3 + 211fe1e commit 524f5b1

11 files changed

+579
-4
lines changed

README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Python-AndrewNgML
22
Python implementation of Andrew Ng's ML course projects
33
- Ex1 (Linear Regression) = Complete
4-
- Ex2 = Incomplete
4+
- Ex2 (Logistic Regression & Regulation) = Complete
55
- Ex3 = Incomplete
66
- Ex4 = Incomplete
77
- Ex5 = Incomplete

__pycache__/ex2helper.cpython-36.pyc

3.56 KB
Binary file not shown.
File renamed without changes.
File renamed without changes.

data/ex2data1.txt

+100
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
34.62365962451697,78.0246928153624,0
2+
30.28671076822607,43.89499752400101,0
3+
35.84740876993872,72.90219802708364,0
4+
60.18259938620976,86.30855209546826,1
5+
79.0327360507101,75.3443764369103,1
6+
45.08327747668339,56.3163717815305,0
7+
61.10666453684766,96.51142588489624,1
8+
75.02474556738889,46.55401354116538,1
9+
76.09878670226257,87.42056971926803,1
10+
84.43281996120035,43.53339331072109,1
11+
95.86155507093572,38.22527805795094,0
12+
75.01365838958247,30.60326323428011,0
13+
82.30705337399482,76.48196330235604,1
14+
69.36458875970939,97.71869196188608,1
15+
39.53833914367223,76.03681085115882,0
16+
53.9710521485623,89.20735013750205,1
17+
69.07014406283025,52.74046973016765,1
18+
67.94685547711617,46.67857410673128,0
19+
70.66150955499435,92.92713789364831,1
20+
76.97878372747498,47.57596364975532,1
21+
67.37202754570876,42.83843832029179,0
22+
89.67677575072079,65.79936592745237,1
23+
50.534788289883,48.85581152764205,0
24+
34.21206097786789,44.20952859866288,0
25+
77.9240914545704,68.9723599933059,1
26+
62.27101367004632,69.95445795447587,1
27+
80.1901807509566,44.82162893218353,1
28+
93.114388797442,38.80067033713209,0
29+
61.83020602312595,50.25610789244621,0
30+
38.78580379679423,64.99568095539578,0
31+
61.379289447425,72.80788731317097,1
32+
85.40451939411645,57.05198397627122,1
33+
52.10797973193984,63.12762376881715,0
34+
52.04540476831827,69.43286012045222,1
35+
40.23689373545111,71.16774802184875,0
36+
54.63510555424817,52.21388588061123,0
37+
33.91550010906887,98.86943574220611,0
38+
64.17698887494485,80.90806058670817,1
39+
74.78925295941542,41.57341522824434,0
40+
34.1836400264419,75.2377203360134,0
41+
83.90239366249155,56.30804621605327,1
42+
51.54772026906181,46.85629026349976,0
43+
94.44336776917852,65.56892160559052,1
44+
82.36875375713919,40.61825515970618,0
45+
51.04775177128865,45.82270145776001,0
46+
62.22267576120188,52.06099194836679,0
47+
77.19303492601364,70.45820000180959,1
48+
97.77159928000232,86.7278223300282,1
49+
62.07306379667647,96.76882412413983,1
50+
91.56497449807442,88.69629254546599,1
51+
79.94481794066932,74.16311935043758,1
52+
99.2725269292572,60.99903099844988,1
53+
90.54671411399852,43.39060180650027,1
54+
34.52451385320009,60.39634245837173,0
55+
50.2864961189907,49.80453881323059,0
56+
49.58667721632031,59.80895099453265,0
57+
97.64563396007767,68.86157272420604,1
58+
32.57720016809309,95.59854761387875,0
59+
74.24869136721598,69.82457122657193,1
60+
71.79646205863379,78.45356224515052,1
61+
75.3956114656803,85.75993667331619,1
62+
35.28611281526193,47.02051394723416,0
63+
56.25381749711624,39.26147251058019,0
64+
30.05882244669796,49.59297386723685,0
65+
44.66826172480893,66.45008614558913,0
66+
66.56089447242954,41.09209807936973,0
67+
40.45755098375164,97.53518548909936,1
68+
49.07256321908844,51.88321182073966,0
69+
80.27957401466998,92.11606081344084,1
70+
66.74671856944039,60.99139402740988,1
71+
32.72283304060323,43.30717306430063,0
72+
64.0393204150601,78.03168802018232,1
73+
72.34649422579923,96.22759296761404,1
74+
60.45788573918959,73.09499809758037,1
75+
58.84095621726802,75.85844831279042,1
76+
99.82785779692128,72.36925193383885,1
77+
47.26426910848174,88.47586499559782,1
78+
50.45815980285988,75.80985952982456,1
79+
60.45555629271532,42.50840943572217,0
80+
82.22666157785568,42.71987853716458,0
81+
88.9138964166533,69.80378889835472,1
82+
94.83450672430196,45.69430680250754,1
83+
67.31925746917527,66.58935317747915,1
84+
57.23870631569862,59.51428198012956,1
85+
80.36675600171273,90.96014789746954,1
86+
68.46852178591112,85.59430710452014,1
87+
42.0754545384731,78.84478600148043,0
88+
75.47770200533905,90.42453899753964,1
89+
78.63542434898018,96.64742716885644,1
90+
52.34800398794107,60.76950525602592,0
91+
94.09433112516793,77.15910509073893,1
92+
90.44855097096364,87.50879176484702,1
93+
55.48216114069585,35.57070347228866,0
94+
74.49269241843041,84.84513684930135,1
95+
89.84580670720979,45.35828361091658,1
96+
83.48916274498238,48.38028579728175,1
97+
42.2617008099817,87.10385094025457,1
98+
99.31500880510394,68.77540947206617,1
99+
55.34001756003703,64.9319380069486,1
100+
74.77589300092767,89.52981289513276,1

data/ex2data2.txt

+118
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
0.051267,0.69956,1
2+
-0.092742,0.68494,1
3+
-0.21371,0.69225,1
4+
-0.375,0.50219,1
5+
-0.51325,0.46564,1
6+
-0.52477,0.2098,1
7+
-0.39804,0.034357,1
8+
-0.30588,-0.19225,1
9+
0.016705,-0.40424,1
10+
0.13191,-0.51389,1
11+
0.38537,-0.56506,1
12+
0.52938,-0.5212,1
13+
0.63882,-0.24342,1
14+
0.73675,-0.18494,1
15+
0.54666,0.48757,1
16+
0.322,0.5826,1
17+
0.16647,0.53874,1
18+
-0.046659,0.81652,1
19+
-0.17339,0.69956,1
20+
-0.47869,0.63377,1
21+
-0.60541,0.59722,1
22+
-0.62846,0.33406,1
23+
-0.59389,0.005117,1
24+
-0.42108,-0.27266,1
25+
-0.11578,-0.39693,1
26+
0.20104,-0.60161,1
27+
0.46601,-0.53582,1
28+
0.67339,-0.53582,1
29+
-0.13882,0.54605,1
30+
-0.29435,0.77997,1
31+
-0.26555,0.96272,1
32+
-0.16187,0.8019,1
33+
-0.17339,0.64839,1
34+
-0.28283,0.47295,1
35+
-0.36348,0.31213,1
36+
-0.30012,0.027047,1
37+
-0.23675,-0.21418,1
38+
-0.06394,-0.18494,1
39+
0.062788,-0.16301,1
40+
0.22984,-0.41155,1
41+
0.2932,-0.2288,1
42+
0.48329,-0.18494,1
43+
0.64459,-0.14108,1
44+
0.46025,0.012427,1
45+
0.6273,0.15863,1
46+
0.57546,0.26827,1
47+
0.72523,0.44371,1
48+
0.22408,0.52412,1
49+
0.44297,0.67032,1
50+
0.322,0.69225,1
51+
0.13767,0.57529,1
52+
-0.0063364,0.39985,1
53+
-0.092742,0.55336,1
54+
-0.20795,0.35599,1
55+
-0.20795,0.17325,1
56+
-0.43836,0.21711,1
57+
-0.21947,-0.016813,1
58+
-0.13882,-0.27266,1
59+
0.18376,0.93348,0
60+
0.22408,0.77997,0
61+
0.29896,0.61915,0
62+
0.50634,0.75804,0
63+
0.61578,0.7288,0
64+
0.60426,0.59722,0
65+
0.76555,0.50219,0
66+
0.92684,0.3633,0
67+
0.82316,0.27558,0
68+
0.96141,0.085526,0
69+
0.93836,0.012427,0
70+
0.86348,-0.082602,0
71+
0.89804,-0.20687,0
72+
0.85196,-0.36769,0
73+
0.82892,-0.5212,0
74+
0.79435,-0.55775,0
75+
0.59274,-0.7405,0
76+
0.51786,-0.5943,0
77+
0.46601,-0.41886,0
78+
0.35081,-0.57968,0
79+
0.28744,-0.76974,0
80+
0.085829,-0.75512,0
81+
0.14919,-0.57968,0
82+
-0.13306,-0.4481,0
83+
-0.40956,-0.41155,0
84+
-0.39228,-0.25804,0
85+
-0.74366,-0.25804,0
86+
-0.69758,0.041667,0
87+
-0.75518,0.2902,0
88+
-0.69758,0.68494,0
89+
-0.4038,0.70687,0
90+
-0.38076,0.91886,0
91+
-0.50749,0.90424,0
92+
-0.54781,0.70687,0
93+
0.10311,0.77997,0
94+
0.057028,0.91886,0
95+
-0.10426,0.99196,0
96+
-0.081221,1.1089,0
97+
0.28744,1.087,0
98+
0.39689,0.82383,0
99+
0.63882,0.88962,0
100+
0.82316,0.66301,0
101+
0.67339,0.64108,0
102+
1.0709,0.10015,0
103+
-0.046659,-0.57968,0
104+
-0.23675,-0.63816,0
105+
-0.15035,-0.36769,0
106+
-0.49021,-0.3019,0
107+
-0.46717,-0.13377,0
108+
-0.28859,-0.060673,0
109+
-0.61118,-0.067982,0
110+
-0.66302,-0.21418,0
111+
-0.59965,-0.41886,0
112+
-0.72638,-0.082602,0
113+
-0.83007,0.31213,0
114+
-0.72062,0.53874,0
115+
-0.59389,0.49488,0
116+
-0.48445,0.99927,0
117+
-0.0063364,0.99927,0
118+
0.63265,-0.030612,0

ex1.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040

4141
## ======================= Part 2: Plotting =======================
4242
print('Plotting Data ...')
43-
data = np.genfromtxt('ex1data1.txt', delimiter=',')
43+
data = np.genfromtxt('./data/ex1data1.txt', delimiter=',')
4444

4545
x=np.array(data[:,0])
4646
x=np.expand_dims(x,axis=0)

ex1_multi.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636

3737

3838
print('Loading data...')
39-
data = np.genfromtxt('ex1data2.txt', delimiter=',')
39+
data = np.genfromtxt('./data/ex1data2.txt', delimiter=',')
4040
x = np.array(data[:,:2])
4141
y = np.array(data[:,2])
4242
m = y.shape[0]
@@ -132,7 +132,7 @@
132132
#
133133

134134
## Load Data
135-
data = np.genfromtxt('ex1data2.txt', delimiter=',')
135+
data = np.genfromtxt('./data/ex1data2.txt', delimiter=',')
136136
x = np.array(data[:,:2])
137137
y = np.array(data[:,2])
138138
m = y.shape[0]

ex2.py

+127
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
## Machine Learning Online Class - Exercise 2: Logistic Regression
2+
#
3+
# Instructions
4+
# ------------
5+
#
6+
# This file contains code that helps you get started on the logistic
7+
# regression exercise. You will need to complete the following functions
8+
# in this exericse:
9+
#
10+
# sigmoid - complete
11+
# costFunction - complete
12+
# predict - complete
13+
# costFunctionReg - complete
14+
#
15+
# For this exercise, you will not need to change any code in this file,
16+
# or any other files other than those mentioned above.
17+
#
18+
19+
## Initialization
20+
21+
## Load Data
22+
# The first two columns contains the exam scores and the third column
23+
# contains the label.
24+
import numpy as np
25+
import ex2helper as helper
26+
import matplotlib.pyplot as plt
27+
data = np.genfromtxt('./data/ex2data1.txt', delimiter=',')
28+
y = np.array(data[:,2])
29+
x = np.array(data[:,0:2])
30+
31+
32+
## ==================== Part 1: Plotting ====================
33+
# We start the exercise by first plotting the data to understand the
34+
# the problem we are working with.
35+
[m,n] = x.shape
36+
37+
r = x
38+
x = np.ones((m, n+1))
39+
x[:,1:] = r
40+
41+
print('\nPlotting data with \'o\' indicating (y = 1) examples and \'x\' indicating (y = 0) examples.')
42+
43+
helper.plotData(x,y,'Exam Score 1', 'Exam Score 2')
44+
45+
46+
input('\nPart 1 completed. Program paused. Press enter to continue: ')
47+
## ============ Part 2: Compute Cost and Gradient ============
48+
# In this part of the exercise, you will implement the cost and gradient
49+
# for logistic regression. You neeed to complete the code in
50+
# costFunction.m
51+
#
52+
# Setup the data matrix appropriately, and add ones for the intercept term
53+
54+
55+
theta = np.zeros(n+1)
56+
57+
cost = helper.costFunction(theta,x,y)
58+
grad = helper.gradient(theta, x, y)
59+
60+
61+
print('Cost at initial theta (zeros): {0:.3f}'.format(cost))
62+
print('Expected cost (approx): 0.693')
63+
print('Gradient at initial theta (zeros): ')
64+
print(grad)
65+
print('Expected gradients (approx):\n -0.1000\n -12.0092\n -11.2628')
66+
67+
68+
# Compute and display cost and gradient with non-zero theta
69+
test_theta = np.array([-24, 0.2, 0.2])
70+
cost = helper.costFunction(test_theta, x, y)
71+
grad = helper.gradient(test_theta, x, y)
72+
73+
print('Cost at initial theta (zeros): {0:.3f}'.format(cost))
74+
print('Expected cost (approx): 0.218')
75+
print('Gradient at initial theta (zeros): ')
76+
print(grad)
77+
print('Expected gradients (approx):\n 0.043\n 2.566\n 2.647')
78+
79+
input('\nPart 2 completed. Program paused. Press enter to continue: ')
80+
81+
## ============= Part 3: Optimizing using fminunc =============
82+
# In this exercise, you will use a built-in function (fminunc) to find the
83+
# optimal parameters theta.
84+
85+
# Set options for fminunc
86+
87+
results = helper.optimize(theta,x,y)
88+
theta = results.x
89+
cost = results.fun
90+
91+
# Print theta to screen
92+
print('Cost at theta found by scipy.optimize.minimize with TNC: {0:.3f}'.format(cost))
93+
print('Expected cost (approx): 0.203')
94+
print('theta:')
95+
print(theta)
96+
print('Expected theta (approx):')
97+
print('[ -25.161 0.206 0.201]')
98+
helper.plotDecisionBoundary(theta,x,y,'Exam Score 1', 'Exam Score 2')
99+
100+
input('\nPart 3 completed. Program paused. Press enter to continue: ')
101+
102+
103+
## ============== Part 4: Predict and Accuracies ==============
104+
# After learning the parameters, you'll like to use it to predict the outcomes
105+
# on unseen data. In this part, you will use the logistic regression model
106+
# to predict the probability that a student with score 45 on exam 1 and
107+
# score 85 on exam 2 will be admitted.
108+
#
109+
# Furthermore, you will compute the training and test set accuracies of
110+
# our model.
111+
#
112+
# Your task is to complete the code in predict.m
113+
# Predict probability for a student with score 45 on exam 1
114+
# and score 85 on exam 2
115+
116+
prob = helper.sigmoid(np.matmul(np.array([1, 45, 85]), theta))
117+
print('For a student with scores 45 and 85, we predict an admission probability of ', prob)
118+
print('Expected value: 0.775 +/- 0.002');
119+
120+
# Compute accuracy on our training set
121+
p = helper.predict(theta, x)
122+
predictions = np.zeros(p.shape)
123+
predictions[np.where(p==y)] = 1
124+
125+
126+
print('Train Accuracy: ', np.mean(predictions) * 100)
127+
print('Expected accuracy (approx): 89.0\n')

0 commit comments

Comments
 (0)