Skip to content

Commit ee5b44e

Browse files
authored
Merge pull request #3 from Computational-Imaging-LAB/main
v2.1
2 parents acafc6e + d73e646 commit ee5b44e

35 files changed

+665
-22
lines changed

IRIS.mlapp

-115 KB
Binary file not shown.

Scripts/htmls/giphy.gif

296 KB
Loading

Scripts/htmls/stat.html

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
2+
3+
<!DOCTYPE html>
4+
<html>
5+
<head>
6+
<meta name="viewport" content="width=device-width, initial-scale=1">
7+
<style>
8+
img {
9+
display: block;
10+
margin-left: auto;
11+
margin-right: auto;
12+
}
13+
</style>
14+
</head>
15+
<body>
16+
17+
18+
<img src='giphy.gif' alt="Paris" style="width:250px;height:120px;">
19+
20+
</body>
21+
</html>

Scripts/htmls/stat2.gif

262 KB
Loading

Scripts/htmls/stat2.html

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
2+
3+
<!DOCTYPE html>
4+
<html>
5+
<head>
6+
<meta name="viewport" content="width=device-width, initial-scale=1">
7+
<style>
8+
img {
9+
display: block;
10+
margin-left: auto;
11+
margin-right: auto;
12+
}
13+
</style>
14+
</head>
15+
<body>
16+
17+
18+
<img src='stat2.gif' alt="Paris" style="width:600px;height:560px">
19+
20+
</body>
21+
</html>

Scripts/htmls/statistical.html

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
<head>
2+
<body>
3+
4+
5+
6+
<div>
7+
<h2 style='text-align:center'>Ttest Statistical Result</h2>
8+
<h3>One-sample ttest results</h3>
9+
<table>
10+
<tr>
11+
<th style='text-align:center padding:5px,line-height:3.5em'><u>Feature</u></th>
12+
<th style="padding:0 15px 0 15px;">p-val<0.05</th>
13+
<th style="padding:0 15px 0 15px;">Hypothesis</th>
14+
<th style="padding:0 15px 0 15px;">tstat</th>
15+
<th style="padding:0 15px 0 15px;">Degrees of Freedom</th>
16+
<th style="padding:0 15px 0 15px;">Standard Deviation</th>
17+
</tr>
18+
19+
<tr>
20+
<td style="text-align:center; color:rgb(0,255,0);">10001</td>
21+
<td style="padding:0 15px 0 15px;">Tom</td>
22+
<td style="padding:0 15px 0 15px;">M</td>
23+
<td style="padding:0 15px 0 15px;">30</td>
24+
</tr>
25+
26+
</table>
27+
</div>
28+
</body>
29+
</head>

Scripts/models/BaggedTrees.asv

Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
2+
function [trainedClassifier, validationAccuracy] = CoarseKNNS(trainingData,response2,Folds,HoldOut,classt,categoricalVal)
3+
% [trainedClassifier, validationAccuracy] = trainClassifier(trainingData)
4+
% Returns a trained classifier and its accuracy. This code recreates the
5+
% classification model trained in Classification Learner app. Use the
6+
% generated code to automate training the same model with new data, or to
7+
% learn how to programmatically train models.
8+
%
9+
response2
10+
% Input:
11+
% trainingData: A matrix with the same number of columns and data type
12+
% as the matrix imported into the app.
13+
%
14+
% Output:
15+
% trainedClassifier: A struct containing the trained classifier. The
16+
% struct contains various fields with information about the trained
17+
% classifier.
18+
%
19+
% trainedClassifier.predictFcn: A function to make predictions on new
20+
% data.
21+
%
22+
% validationAccuracy: A double containing the accuracy in percent. In
23+
% the app, the History list displays this overall accuracy score for
24+
% each model.
25+
%
26+
% Use the code to train the model with new data. To retrain your
27+
% classifier, call the function from the command line with your original
28+
% data or new data as the input argument trainingData.
29+
%
30+
% For example, to retrain a classifier trained with the original data set
31+
% T, enter:
32+
% [trainedClassifier, validationAccuracy] = trainClassifier(T)
33+
%
34+
% To make predictions with the returned 'trainedClassifier' on new data T2,
35+
% use
36+
% yfit = trainedClassifier.predictFcn(T2)
37+
%
38+
% T2 must be a matrix containing only the predictor columns used for
39+
% training. For details, enter:
40+
% trainedClassifier.HowToPredict
41+
42+
% Auto-generated by MATLAB on 31-May-2020 03:30:11
43+
44+
45+
% Extract predictors and response
46+
% This code processes the data into the right shape for training the
47+
% model.
48+
% Convert input to table
49+
summary(trainingData)
50+
51+
inputTable=trainingData;
52+
53+
predictorsInd=(1:size(trainingData,2));
54+
55+
56+
57+
predictorsInd(response2)=[];
58+
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
59+
predictors = inputTable(:, predictorNames);
60+
response = inputTable.(inputTable.Properties.VariableNames{response2});
61+
62+
63+
64+
isCategoricalPredictor = categoricalVal;
65+
classes=unique(response(~isnan(response)));
66+
% This code specifies all the classifier options and trains the classifier.
67+
68+
69+
70+
71+
template = templateTree(...
72+
'MaxNumSplits', 64);
73+
classificationKNN = fitcensemble(...
74+
predictors, ...
75+
response, ...
76+
'Method', 'Bag', ...
77+
'NumLearningCycles', 30, ...
78+
'Learners', template, ...
79+
'ClassNames', classes);
80+
81+
82+
83+
% Create the result struct with predict function
84+
predictorExtractionFcn = @(y) y(:, predictorNames);
85+
knnPredictFcn = @(x) predict(classificationKNN, x);
86+
trainedClassifier.predictFcn = @(x) knnPredictFcn(predictorExtractionFcn(x));
87+
88+
89+
90+
% Add additional fields to the result struct
91+
trainedClassifier.Classification = classificationKNN;
92+
trainedClassifier.About = 'This struct is a trained model exported from Classification Learner R2020a.';
93+
trainedClassifier.HowToPredict = sprintf('To make predictions on a new predictor column matrix, X, use: \n yfit = c.predictFcn(X) \nreplacing ''c'' with the name of the variable that is this struct, e.g. ''trainedModel''. \n \nX must contain exactly 13 columns because this model was trained using 13 predictors. \nX must contain only predictor columns in exactly the same order and format as your training \ndata. Do not include the response column or any columns you did not import into the app. \n \nFor more information, see <a href="matlab:helpview(fullfile(docroot, ''stats'', ''stats.map''), ''appclassification_exportmodeltoworkspace'')">How to predict using an exported model</a>.');
94+
95+
% Extract predictors and response
96+
% This code processes the data into the right shape for training the
97+
% model.
98+
% Convert input to table
99+
inputTable=trainingData;
100+
predictorsInd=(1:size(trainingData,2));
101+
predictorsInd(response2)=[];
102+
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
103+
predictors = inputTable(:, predictorNames)
104+
response = inputTable.(string(inputTable.Properties.VariableNames{response2}));
105+
response=response(~isnan(response));
106+
% Perform cross-validation
107+
108+
109+
110+
111+
if HoldOut>0
112+
disp("Holdout method is using with the value of '"+num2str(HoldOut)+"'")
113+
cvp = cvpartition(response, 'Holdout', HoldOut);
114+
trainingPredictors = predictors(cvp.training, :);
115+
trainingResponse = response(cvp.training, :);
116+
trainingIsCategoricalPredictor = isCategoricalPredictor;
117+
118+
119+
120+
121+
template = templateTree(...
122+
'MaxNumSplits', 64);
123+
classificationKNN = fitcensemble(...
124+
trainingPredictors, ...
125+
trainingResponse, ...
126+
'Method', 'Bag', ...
127+
'NumLearningCycles', 30, ...
128+
'Learners', template, ...
129+
'ClassNames', classes);
130+
131+
132+
% Create the result struct with predict function
133+
predictorExtractionFcn = @(y) y(:, predictorNames);
134+
knnPredictFcn = @(x) predict(classificationKNN, x);
135+
trainedClassifier.predictFcn = @(x) knnPredictFcn(predictorExtractionFcn(x));
136+
137+
138+
% Add additional fields to the result struct
139+
trainedClassifier.Classification = classificationKNN;
140+
trainedClassifier.About = 'This struct is a trained model exported from Classification Learner R2020a.';
141+
trainedClassifier.HowToPredict = sprintf('To make predictions on a new predictor column matrix, X, use: \n yfit = c.predictFcn(X) \nreplacing ''c'' with the name of the variable that is this struct, e.g. ''trainedModel''. \n \nX must contain exactly 13 columns because this model was trained using 13 predictors. \nX must contain only predictor columns in exactly the same order and format as your training \ndata. Do not include the response column or any columns you did not import into the app. \n \nFor more information, see <a href="matlab:helpview(fullfile(docroot, ''stats'', ''stats.map''), ''appclassification_exportmodeltoworkspace'')">How to predict using an exported model</a>.');
142+
143+
% Extract predictors and response
144+
% This code processes the data into the right shape for training the
145+
% model.
146+
% Convert input to table
147+
inputTable=trainingData;
148+
predictorsInd=(1:size(trainingData,2));
149+
predictorsInd(response2)=[];
150+
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
151+
predictors = inputTable(:, predictorNames)
152+
response = inputTable.(string(inputTable.Properties.VariableNames{response2}));
153+
response=response(~isnan(response));
154+
% Perform cross-validation
155+
156+
predictors = predictors(cvp.test, :);
157+
response = response(cvp.test, :);
158+
[validationPredictions, validationScores] = trainedClassifier.predictFcn(predictors);
159+
correctPredictions = (validationPredictions == response);
160+
validationAccuracy = sum(correctPredictions)/length(correctPredictions);
161+
162+
163+
else
164+
disp("K-Fold method is using with '"+num2str(Folds)+"' folds")
165+
166+
partitionedModel = crossval(trainedClassifier.Classification, 'KFold', Folds);
167+
% Compute validation predictions
168+
169+
[validationPredictions, validationScores] = kfoldPredict(partitionedModel);
170+
% Compute validation accuracy;
171+
validationAccuracy = 1 - kfoldLoss(partitionedModel, 'LossFun', 'ClassifError');
172+
disp("resp")
173+
174+
end
175+
176+
177+
178+
179+
180+
ygt=response;
181+
ypr=validationPredictions;
182+
for i=classes'
183+
TP=sum((ygt==i).*(ypr==i))
184+
FN=sum((ygt==i).*~(ypr==i))
185+
FP=sum(~(ygt==i).*(ypr==i))
186+
TN=sum(~(ygt==i).*~(ypr==i))
187+
cmVals{i+1}=[TP FN FP TN]
188+
189+
Sens{i+1}=(TP)/(TP+FN);
190+
Specificity{i+1}=(TN)/(FP+TN);
191+
192+
end
193+
trainedClassifier.RequiredVariables=predictorNames;
194+
try
195+
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
196+
trainedClassifier.plots.AUC=AUC;
197+
trainedClassifier.plots.OPTROCPT=OPTROCPT;
198+
trainedClassifier.plots.T=T;
199+
trainedClassifier.plots.SUBY=SUBY;
200+
trainedClassifier.plots.SUBYNAMES=SUBYNAMES;
201+
trainedClassifier.plots.X=X;
202+
trainedClassifier.plots.Y=Y;
203+
trainedClassifier.plots.Ygt=response;
204+
trainedClassifier.plots.Ypr=validationPredictions;
205+
trainedClassifier.plots.sensitivity=Sens;
206+
trainedClassifier.plots.specificity=Specificity;
207+
trainedClassifier.plots.cmVals=cmVals;
208+
trainedClassifier.plots.Accuracy=validationAccuracy;
209+
trainedClassifier.classes=classt;
210+
211+
end
212+

Scripts/models/BaggedTrees.m

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,8 +191,17 @@
191191

192192
end
193193
trainedClassifier.RequiredVariables=predictorNames;
194-
194+
try
195195
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
196+
catch
197+
X=-1;
198+
Y=-1;
199+
T=-1;
200+
AUC=-1;
201+
OPTROCPT=-1;
202+
SUBY=-1;
203+
SUBYNAMES=-1;
204+
end
196205
trainedClassifier.plots.AUC=AUC;
197206
trainedClassifier.plots.OPTROCPT=OPTROCPT;
198207
trainedClassifier.plots.T=T;

Scripts/models/BoostedTrees.m

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -189,8 +189,17 @@
189189

190190
end
191191
trainedClassifier.RequiredVariables=predictorNames;
192-
192+
try
193193
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
194+
catch
195+
X=-1;
196+
Y=-1;
197+
T=-1;
198+
AUC=-1;
199+
OPTROCPT=-1;
200+
SUBY=-1;
201+
SUBYNAMES=-1;
202+
end
194203
trainedClassifier.plots.AUC=AUC;
195204
trainedClassifier.plots.OPTROCPT=OPTROCPT;
196205
trainedClassifier.plots.T=T;

Scripts/models/CoarseGaussianSVM.m

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,8 +191,17 @@
191191

192192
end
193193
trainedClassifier.RequiredVariables=predictorNames;
194-
194+
try
195195
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
196+
catch
197+
X=-1;
198+
Y=-1;
199+
T=-1;
200+
AUC=-1;
201+
OPTROCPT=-1;
202+
SUBY=-1;
203+
SUBYNAMES=-1;
204+
end
196205
trainedClassifier.plots.AUC=AUC;
197206
trainedClassifier.plots.OPTROCPT=OPTROCPT;
198207
trainedClassifier.plots.T=T;

Scripts/models/CoarseKNNS.m

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -187,7 +187,17 @@
187187
end
188188
trainedClassifier.RequiredVariables=predictorNames;
189189

190+
try
190191
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
192+
catch
193+
X=-1;
194+
Y=-1;
195+
T=-1;
196+
AUC=-1;
197+
OPTROCPT=-1;
198+
SUBY=-1;
199+
SUBYNAMES=-1;
200+
end
191201
trainedClassifier.plots.AUC=AUC;
192202
trainedClassifier.plots.OPTROCPT=OPTROCPT;
193203
trainedClassifier.plots.T=T;

Scripts/models/CoarseTreee.m

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,8 +191,17 @@
191191

192192
end
193193
trainedClassifier.RequiredVariables=predictorNames;
194-
194+
try
195195
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
196+
catch
197+
X=-1;
198+
Y=-1;
199+
T=-1;
200+
AUC=-1;
201+
OPTROCPT=-1;
202+
SUBY=-1;
203+
SUBYNAMES=-1;
204+
end
196205
trainedClassifier.plots.AUC=AUC;
197206
trainedClassifier.plots.OPTROCPT=OPTROCPT;
198207
trainedClassifier.plots.T=T;

Scripts/models/CosineKNNS.m

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -191,8 +191,17 @@
191191

192192
end
193193
trainedClassifier.RequiredVariables=predictorNames;
194-
194+
try
195195
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
196+
catch
197+
X=-1;
198+
Y=-1;
199+
T=-1;
200+
AUC=-1;
201+
OPTROCPT=-1;
202+
SUBY=-1;
203+
SUBYNAMES=-1;
204+
end
196205
trainedClassifier.plots.AUC=AUC;
197206
trainedClassifier.plots.OPTROCPT=OPTROCPT;
198207
trainedClassifier.plots.T=T;

0 commit comments

Comments
 (0)