Skip to content

Commit 0d86b1e

Browse files
solves week 3 programming exercise
1 parent c254e48 commit 0d86b1e

28 files changed

+3332
-3
lines changed

week3/ex2/costFunction.m

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
function [J, grad] = costFunction(theta, X, y)
2+
%COSTFUNCTION Compute cost and gradient for logistic regression
3+
% J = COSTFUNCTION(theta, X, y) computes the cost of using theta as the
4+
% parameter for logistic regression and the gradient of the cost
5+
% w.r.t. to the parameters.
6+
7+
function J = logisticRegressionCost(theta, X, y)
8+
estimatedResults = sigmoid(X * theta);
9+
trainingSamples = length(y);
10+
J = -(1 / trainingSamples) * (
11+
y' * log(estimatedResults)
12+
+ (1 - y)' * log(1 - estimatedResults)
13+
);
14+
endfunction
15+
16+
function gradient = gradientVector(theta, X, y)
17+
trainingExamples = length(y);
18+
gradient = (1 / trainingExamples) * (X' * (sigmoid(X * theta) - y));
19+
endfunction
20+
21+
J = logisticRegressionCost(theta, X, y);
22+
grad = gradientVector(theta, X, y);
23+
end

week3/ex2/costFunctionReg.m

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
function [J, grad] = costFunctionReg(theta, X, y, lambda)
2+
%COSTFUNCTIONREG Compute cost and gradient for logistic regression with regularization
3+
% J = COSTFUNCTIONREG(theta, X, y, lambda) computes the cost of using
4+
% theta as the parameter for regularized logistic regression and the
5+
% gradient of the cost w.r.t. to the parameters.
6+
7+
function J = logisticRegressionRegularizedCost(theta, X, y)
8+
estimatedResults = sigmoid(X * theta);
9+
trainingExamples = length(y);
10+
11+
J = (- 1 / trainingExamples) * (
12+
y' * log(estimatedResults)
13+
+ (1 - y)' * log(1 - estimatedResults)
14+
) + (lambda / (2 * trainingExamples)) * (
15+
sum(theta .^ 2) - theta(1) ^ 2
16+
);
17+
endfunction
18+
19+
function gradient = gradientVector(theta, X, y)
20+
trainingExamples = length(y);
21+
gradient = (1 / trainingExamples) * (X' * (sigmoid(X * theta) - y));
22+
endfunction
23+
24+
function gradient = regularizedGradientVector(theta, X, y)
25+
trainingExamples = length(y);
26+
gradient = gradientVector(theta, X, y);
27+
modifiedHypothesis = (lambda / trainingExamples) * theta;
28+
modifiedHypothesis(1) = 0;
29+
gradient += modifiedHypothesis;
30+
endfunction
31+
32+
J = logisticRegressionRegularizedCost(theta, X, y);
33+
grad = regularizedGradientVector(theta, X, y);
34+
end

week3/ex2/ex2.mlx

155 KB
Binary file not shown.

week3/ex2/ex2_companion.mlx

12.3 KB
Binary file not shown.

week3/ex2/ex2data1.txt

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
34.62365962451697,78.0246928153624,0
2+
30.28671076822607,43.89499752400101,0
3+
35.84740876993872,72.90219802708364,0
4+
60.18259938620976,86.30855209546826,1
5+
79.0327360507101,75.3443764369103,1
6+
45.08327747668339,56.3163717815305,0
7+
61.10666453684766,96.51142588489624,1
8+
75.02474556738889,46.55401354116538,1
9+
76.09878670226257,87.42056971926803,1
10+
84.43281996120035,43.53339331072109,1
11+
95.86155507093572,38.22527805795094,0
12+
75.01365838958247,30.60326323428011,0
13+
82.30705337399482,76.48196330235604,1
14+
69.36458875970939,97.71869196188608,1
15+
39.53833914367223,76.03681085115882,0
16+
53.9710521485623,89.20735013750205,1
17+
69.07014406283025,52.74046973016765,1
18+
67.94685547711617,46.67857410673128,0
19+
70.66150955499435,92.92713789364831,1
20+
76.97878372747498,47.57596364975532,1
21+
67.37202754570876,42.83843832029179,0
22+
89.67677575072079,65.79936592745237,1
23+
50.534788289883,48.85581152764205,0
24+
34.21206097786789,44.20952859866288,0
25+
77.9240914545704,68.9723599933059,1
26+
62.27101367004632,69.95445795447587,1
27+
80.1901807509566,44.82162893218353,1
28+
93.114388797442,38.80067033713209,0
29+
61.83020602312595,50.25610789244621,0
30+
38.78580379679423,64.99568095539578,0
31+
61.379289447425,72.80788731317097,1
32+
85.40451939411645,57.05198397627122,1
33+
52.10797973193984,63.12762376881715,0
34+
52.04540476831827,69.43286012045222,1
35+
40.23689373545111,71.16774802184875,0
36+
54.63510555424817,52.21388588061123,0
37+
33.91550010906887,98.86943574220611,0
38+
64.17698887494485,80.90806058670817,1
39+
74.78925295941542,41.57341522824434,0
40+
34.1836400264419,75.2377203360134,0
41+
83.90239366249155,56.30804621605327,1
42+
51.54772026906181,46.85629026349976,0
43+
94.44336776917852,65.56892160559052,1
44+
82.36875375713919,40.61825515970618,0
45+
51.04775177128865,45.82270145776001,0
46+
62.22267576120188,52.06099194836679,0
47+
77.19303492601364,70.45820000180959,1
48+
97.77159928000232,86.7278223300282,1
49+
62.07306379667647,96.76882412413983,1
50+
91.56497449807442,88.69629254546599,1
51+
79.94481794066932,74.16311935043758,1
52+
99.2725269292572,60.99903099844988,1
53+
90.54671411399852,43.39060180650027,1
54+
34.52451385320009,60.39634245837173,0
55+
50.2864961189907,49.80453881323059,0
56+
49.58667721632031,59.80895099453265,0
57+
97.64563396007767,68.86157272420604,1
58+
32.57720016809309,95.59854761387875,0
59+
74.24869136721598,69.82457122657193,1
60+
71.79646205863379,78.45356224515052,1
61+
75.3956114656803,85.75993667331619,1
62+
35.28611281526193,47.02051394723416,0
63+
56.25381749711624,39.26147251058019,0
64+
30.05882244669796,49.59297386723685,0
65+
44.66826172480893,66.45008614558913,0
66+
66.56089447242954,41.09209807936973,0
67+
40.45755098375164,97.53518548909936,1
68+
49.07256321908844,51.88321182073966,0
69+
80.27957401466998,92.11606081344084,1
70+
66.74671856944039,60.99139402740988,1
71+
32.72283304060323,43.30717306430063,0
72+
64.0393204150601,78.03168802018232,1
73+
72.34649422579923,96.22759296761404,1
74+
60.45788573918959,73.09499809758037,1
75+
58.84095621726802,75.85844831279042,1
76+
99.82785779692128,72.36925193383885,1
77+
47.26426910848174,88.47586499559782,1
78+
50.45815980285988,75.80985952982456,1
79+
60.45555629271532,42.50840943572217,0
80+
82.22666157785568,42.71987853716458,0
81+
88.9138964166533,69.80378889835472,1
82+
94.83450672430196,45.69430680250754,1
83+
67.31925746917527,66.58935317747915,1
84+
57.23870631569862,59.51428198012956,1
85+
80.36675600171273,90.96014789746954,1
86+
68.46852178591112,85.59430710452014,1
87+
42.0754545384731,78.84478600148043,0
88+
75.47770200533905,90.42453899753964,1
89+
78.63542434898018,96.64742716885644,1
90+
52.34800398794107,60.76950525602592,0
91+
94.09433112516793,77.15910509073893,1
92+
90.44855097096364,87.50879176484702,1
93+
55.48216114069585,35.57070347228866,0
94+
74.49269241843041,84.84513684930135,1
95+
89.84580670720979,45.35828361091658,1
96+
83.48916274498238,48.38028579728175,1
97+
42.2617008099817,87.10385094025457,1
98+
99.31500880510394,68.77540947206617,1
99+
55.34001756003703,64.9319380069486,1
100+
74.77589300092767,89.52981289513276,1

week3/ex2/ex2data2.txt

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
0.051267,0.69956,1
2+
-0.092742,0.68494,1
3+
-0.21371,0.69225,1
4+
-0.375,0.50219,1
5+
-0.51325,0.46564,1
6+
-0.52477,0.2098,1
7+
-0.39804,0.034357,1
8+
-0.30588,-0.19225,1
9+
0.016705,-0.40424,1
10+
0.13191,-0.51389,1
11+
0.38537,-0.56506,1
12+
0.52938,-0.5212,1
13+
0.63882,-0.24342,1
14+
0.73675,-0.18494,1
15+
0.54666,0.48757,1
16+
0.322,0.5826,1
17+
0.16647,0.53874,1
18+
-0.046659,0.81652,1
19+
-0.17339,0.69956,1
20+
-0.47869,0.63377,1
21+
-0.60541,0.59722,1
22+
-0.62846,0.33406,1
23+
-0.59389,0.005117,1
24+
-0.42108,-0.27266,1
25+
-0.11578,-0.39693,1
26+
0.20104,-0.60161,1
27+
0.46601,-0.53582,1
28+
0.67339,-0.53582,1
29+
-0.13882,0.54605,1
30+
-0.29435,0.77997,1
31+
-0.26555,0.96272,1
32+
-0.16187,0.8019,1
33+
-0.17339,0.64839,1
34+
-0.28283,0.47295,1
35+
-0.36348,0.31213,1
36+
-0.30012,0.027047,1
37+
-0.23675,-0.21418,1
38+
-0.06394,-0.18494,1
39+
0.062788,-0.16301,1
40+
0.22984,-0.41155,1
41+
0.2932,-0.2288,1
42+
0.48329,-0.18494,1
43+
0.64459,-0.14108,1
44+
0.46025,0.012427,1
45+
0.6273,0.15863,1
46+
0.57546,0.26827,1
47+
0.72523,0.44371,1
48+
0.22408,0.52412,1
49+
0.44297,0.67032,1
50+
0.322,0.69225,1
51+
0.13767,0.57529,1
52+
-0.0063364,0.39985,1
53+
-0.092742,0.55336,1
54+
-0.20795,0.35599,1
55+
-0.20795,0.17325,1
56+
-0.43836,0.21711,1
57+
-0.21947,-0.016813,1
58+
-0.13882,-0.27266,1
59+
0.18376,0.93348,0
60+
0.22408,0.77997,0
61+
0.29896,0.61915,0
62+
0.50634,0.75804,0
63+
0.61578,0.7288,0
64+
0.60426,0.59722,0
65+
0.76555,0.50219,0
66+
0.92684,0.3633,0
67+
0.82316,0.27558,0
68+
0.96141,0.085526,0
69+
0.93836,0.012427,0
70+
0.86348,-0.082602,0
71+
0.89804,-0.20687,0
72+
0.85196,-0.36769,0
73+
0.82892,-0.5212,0
74+
0.79435,-0.55775,0
75+
0.59274,-0.7405,0
76+
0.51786,-0.5943,0
77+
0.46601,-0.41886,0
78+
0.35081,-0.57968,0
79+
0.28744,-0.76974,0
80+
0.085829,-0.75512,0
81+
0.14919,-0.57968,0
82+
-0.13306,-0.4481,0
83+
-0.40956,-0.41155,0
84+
-0.39228,-0.25804,0
85+
-0.74366,-0.25804,0
86+
-0.69758,0.041667,0
87+
-0.75518,0.2902,0
88+
-0.69758,0.68494,0
89+
-0.4038,0.70687,0
90+
-0.38076,0.91886,0
91+
-0.50749,0.90424,0
92+
-0.54781,0.70687,0
93+
0.10311,0.77997,0
94+
0.057028,0.91886,0
95+
-0.10426,0.99196,0
96+
-0.081221,1.1089,0
97+
0.28744,1.087,0
98+
0.39689,0.82383,0
99+
0.63882,0.88962,0
100+
0.82316,0.66301,0
101+
0.67339,0.64108,0
102+
1.0709,0.10015,0
103+
-0.046659,-0.57968,0
104+
-0.23675,-0.63816,0
105+
-0.15035,-0.36769,0
106+
-0.49021,-0.3019,0
107+
-0.46717,-0.13377,0
108+
-0.28859,-0.060673,0
109+
-0.61118,-0.067982,0
110+
-0.66302,-0.21418,0
111+
-0.59965,-0.41886,0
112+
-0.72638,-0.082602,0
113+
-0.83007,0.31213,0
114+
-0.72062,0.53874,0
115+
-0.59389,0.49488,0
116+
-0.48445,0.99927,0
117+
-0.0063364,0.99927,0
118+
0.63265,-0.030612,0

week3/ex2/lib/jsonlab/AUTHORS.txt

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
The author of "jsonlab" toolbox is Qianqian Fang. Qianqian
2+
is currently an Assistant Professor at Massachusetts General Hospital,
3+
Harvard Medical School.
4+
5+
Address: Martinos Center for Biomedical Imaging,
6+
Massachusetts General Hospital,
7+
Harvard Medical School
8+
Bldg 149, 13th St, Charlestown, MA 02129, USA
9+
URL: http://nmr.mgh.harvard.edu/~fangq/
10+
Email: <fangq at nmr.mgh.harvard.edu> or <fangqq at gmail.com>
11+
12+
13+
The script loadjson.m was built upon previous works by
14+
15+
- Nedialko Krouchev: http://www.mathworks.com/matlabcentral/fileexchange/25713
16+
date: 2009/11/02
17+
- François Glineur: http://www.mathworks.com/matlabcentral/fileexchange/23393
18+
date: 2009/03/22
19+
- Joel Feenstra: http://www.mathworks.com/matlabcentral/fileexchange/20565
20+
date: 2008/07/03
21+
22+
23+
This toolbox contains patches submitted by the following contributors:
24+
25+
- Blake Johnson <bjohnso at bbn.com>
26+
part of revision 341
27+
28+
- Niclas Borlin <Niclas.Borlin at cs.umu.se>
29+
various fixes in revision 394, including
30+
- loadjson crashes for all-zero sparse matrix.
31+
- loadjson crashes for empty sparse matrix.
32+
- Non-zero size of 0-by-N and N-by-0 empty matrices is lost after savejson/loadjson.
33+
- loadjson crashes for sparse real column vector.
34+
- loadjson crashes for sparse complex column vector.
35+
- Data is corrupted by savejson for sparse real row vector.
36+
- savejson crashes for sparse complex row vector.
37+
38+
- Yul Kang <yul.kang.on at gmail.com>
39+
patches for svn revision 415.
40+
- savejson saves an empty cell array as [] instead of null
41+
- loadjson differentiates an empty struct from an empty array

week3/ex2/lib/jsonlab/ChangeLog.txt

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
============================================================================
2+
3+
JSONlab - a toolbox to encode/decode JSON/UBJSON files in MATLAB/Octave
4+
5+
----------------------------------------------------------------------------
6+
7+
JSONlab ChangeLog (key features marked by *):
8+
9+
== JSONlab 1.0 (codename: Optimus - Final), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
10+
11+
2015/01/02 polish help info for all major functions, update examples, finalize 1.0
12+
2014/12/19 fix a bug to strictly respect NoRowBracket in savejson
13+
14+
== JSONlab 1.0.0-RC2 (codename: Optimus - RC2), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
15+
16+
2014/11/22 show progress bar in loadjson ('ShowProgress')
17+
2014/11/17 add Compact option in savejson to output compact JSON format ('Compact')
18+
2014/11/17 add FastArrayParser in loadjson to specify fast parser applicable levels
19+
2014/09/18 start official github mirror: https://github.com/fangq/jsonlab
20+
21+
== JSONlab 1.0.0-RC1 (codename: Optimus - RC1), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
22+
23+
2014/09/17 fix several compatibility issues when running on octave versions 3.2-3.8
24+
2014/09/17 support 2D cell and struct arrays in both savejson and saveubjson
25+
2014/08/04 escape special characters in a JSON string
26+
2014/02/16 fix a bug when saving ubjson files
27+
28+
== JSONlab 0.9.9 (codename: Optimus - beta), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
29+
30+
2014/01/22 use binary read and write in saveubjson and loadubjson
31+
32+
== JSONlab 0.9.8-1 (codename: Optimus - alpha update 1), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
33+
34+
2013/10/07 better round-trip conservation for empty arrays and structs (patch submitted by Yul Kang)
35+
36+
== JSONlab 0.9.8 (codename: Optimus - alpha), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
37+
2013/08/23 *universal Binary JSON (UBJSON) support, including both saveubjson and loadubjson
38+
39+
== JSONlab 0.9.1 (codename: Rodimus, update 1), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
40+
2012/12/18 *handling of various empty and sparse matrices (fixes submitted by Niclas Borlin)
41+
42+
== JSONlab 0.9.0 (codename: Rodimus), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
43+
44+
2012/06/17 *new format for an invalid leading char, unpacking hex code in savejson
45+
2012/06/01 support JSONP in savejson
46+
2012/05/25 fix the empty cell bug (reported by Cyril Davin)
47+
2012/04/05 savejson can save to a file (suggested by Patrick Rapin)
48+
49+
== JSONlab 0.8.1 (codename: Sentiel, Update 1), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
50+
51+
2012/02/28 loadjson quotation mark escape bug, see http://bit.ly/yyk1nS
52+
2012/01/25 patch to handle root-less objects, contributed by Blake Johnson
53+
54+
== JSONlab 0.8.0 (codename: Sentiel), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
55+
56+
2012/01/13 *speed up loadjson by 20 fold when parsing large data arrays in matlab
57+
2012/01/11 remove row bracket if an array has 1 element, suggested by Mykel Kochenderfer
58+
2011/12/22 *accept sequence of 'param',value input in savejson and loadjson
59+
2011/11/18 fix struct array bug reported by Mykel Kochenderfer
60+
61+
== JSONlab 0.5.1 (codename: Nexus Update 1), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
62+
63+
2011/10/21 fix a bug in loadjson, previous code does not use any of the acceleration
64+
2011/10/20 loadjson supports JSON collections - concatenated JSON objects
65+
66+
== JSONlab 0.5.0 (codename: Nexus), FangQ <fangq (at) nmr.mgh.harvard.edu> ==
67+
68+
2011/10/16 package and release jsonlab 0.5.0
69+
2011/10/15 *add json demo and regression test, support cpx numbers, fix double quote bug
70+
2011/10/11 *speed up readjson dramatically, interpret _Array* tags, show data in root level
71+
2011/10/10 create jsonlab project, start jsonlab website, add online documentation
72+
2011/10/07 *speed up savejson by 25x using sprintf instead of mat2str, add options support
73+
2011/10/06 *savejson works for structs, cells and arrays
74+
2011/09/09 derive loadjson from JSON parser from MATLAB Central, draft savejson.m

0 commit comments

Comments
 (0)