Skip to content

Commit

Permalink
Merge pull request #3 from Computational-Imaging-LAB/main
Browse files Browse the repository at this point in the history
v2.1
  • Loading branch information
abdullahbas authored Apr 4, 2022
2 parents acafc6e + d73e646 commit ee5b44e
Show file tree
Hide file tree
Showing 35 changed files with 665 additions and 22 deletions.
Binary file modified IRIS.mlapp
Binary file not shown.
Binary file added Scripts/htmls/giphy.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
21 changes: 21 additions & 0 deletions Scripts/htmls/stat.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@


<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
img {
display: block;
margin-left: auto;
margin-right: auto;
}
</style>
</head>
<body>


<img src='giphy.gif' alt="Paris" style="width:250px;height:120px;">

</body>
</html>
Binary file added Scripts/htmls/stat2.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
21 changes: 21 additions & 0 deletions Scripts/htmls/stat2.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@


<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
img {
display: block;
margin-left: auto;
margin-right: auto;
}
</style>
</head>
<body>


<img src='stat2.gif' alt="Paris" style="width:600px;height:560px">

</body>
</html>
29 changes: 29 additions & 0 deletions Scripts/htmls/statistical.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
<head>
<body>



<div>
<h2 style='text-align:center'>Ttest Statistical Result</h2>
<h3>One-sample ttest results</h3>
<table>
<tr>
<th style='text-align:center padding:5px,line-height:3.5em'><u>Feature</u></th>
<th style="padding:0 15px 0 15px;">p-val<0.05</th>
<th style="padding:0 15px 0 15px;">Hypothesis</th>
<th style="padding:0 15px 0 15px;">tstat</th>
<th style="padding:0 15px 0 15px;">Degrees of Freedom</th>
<th style="padding:0 15px 0 15px;">Standard Deviation</th>
</tr>

<tr>
<td style="text-align:center; color:rgb(0,255,0);">10001</td>
<td style="padding:0 15px 0 15px;">Tom</td>
<td style="padding:0 15px 0 15px;">M</td>
<td style="padding:0 15px 0 15px;">30</td>
</tr>

</table>
</div>
</body>
</head>
212 changes: 212 additions & 0 deletions Scripts/models/BaggedTrees.asv
Original file line number Diff line number Diff line change
@@ -0,0 +1,212 @@

function [trainedClassifier, validationAccuracy] = CoarseKNNS(trainingData,response2,Folds,HoldOut,classt,categoricalVal)
% [trainedClassifier, validationAccuracy] = trainClassifier(trainingData)
% Returns a trained classifier and its accuracy. This code recreates the
% classification model trained in Classification Learner app. Use the
% generated code to automate training the same model with new data, or to
% learn how to programmatically train models.
%
response2
% Input:
% trainingData: A matrix with the same number of columns and data type
% as the matrix imported into the app.
%
% Output:
% trainedClassifier: A struct containing the trained classifier. The
% struct contains various fields with information about the trained
% classifier.
%
% trainedClassifier.predictFcn: A function to make predictions on new
% data.
%
% validationAccuracy: A double containing the accuracy in percent. In
% the app, the History list displays this overall accuracy score for
% each model.
%
% Use the code to train the model with new data. To retrain your
% classifier, call the function from the command line with your original
% data or new data as the input argument trainingData.
%
% For example, to retrain a classifier trained with the original data set
% T, enter:
% [trainedClassifier, validationAccuracy] = trainClassifier(T)
%
% To make predictions with the returned 'trainedClassifier' on new data T2,
% use
% yfit = trainedClassifier.predictFcn(T2)
%
% T2 must be a matrix containing only the predictor columns used for
% training. For details, enter:
% trainedClassifier.HowToPredict

% Auto-generated by MATLAB on 31-May-2020 03:30:11


% Extract predictors and response
% This code processes the data into the right shape for training the
% model.
% Convert input to table
summary(trainingData)

inputTable=trainingData;

predictorsInd=(1:size(trainingData,2));



predictorsInd(response2)=[];
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
predictors = inputTable(:, predictorNames);
response = inputTable.(inputTable.Properties.VariableNames{response2});



isCategoricalPredictor = categoricalVal;
classes=unique(response(~isnan(response)));
% This code specifies all the classifier options and trains the classifier.




template = templateTree(...
'MaxNumSplits', 64);
classificationKNN = fitcensemble(...
predictors, ...
response, ...
'Method', 'Bag', ...
'NumLearningCycles', 30, ...
'Learners', template, ...
'ClassNames', classes);



% Create the result struct with predict function
predictorExtractionFcn = @(y) y(:, predictorNames);
knnPredictFcn = @(x) predict(classificationKNN, x);
trainedClassifier.predictFcn = @(x) knnPredictFcn(predictorExtractionFcn(x));



% Add additional fields to the result struct
trainedClassifier.Classification = classificationKNN;
trainedClassifier.About = 'This struct is a trained model exported from Classification Learner R2020a.';
trainedClassifier.HowToPredict = sprintf('To make predictions on a new predictor column matrix, X, use: \n yfit = c.predictFcn(X) \nreplacing ''c'' with the name of the variable that is this struct, e.g. ''trainedModel''. \n \nX must contain exactly 13 columns because this model was trained using 13 predictors. \nX must contain only predictor columns in exactly the same order and format as your training \ndata. Do not include the response column or any columns you did not import into the app. \n \nFor more information, see <a href="matlab:helpview(fullfile(docroot, ''stats'', ''stats.map''), ''appclassification_exportmodeltoworkspace'')">How to predict using an exported model</a>.');

% Extract predictors and response
% This code processes the data into the right shape for training the
% model.
% Convert input to table
inputTable=trainingData;
predictorsInd=(1:size(trainingData,2));
predictorsInd(response2)=[];
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
predictors = inputTable(:, predictorNames)
response = inputTable.(string(inputTable.Properties.VariableNames{response2}));
response=response(~isnan(response));
% Perform cross-validation




if HoldOut>0
disp("Holdout method is using with the value of '"+num2str(HoldOut)+"'")
cvp = cvpartition(response, 'Holdout', HoldOut);
trainingPredictors = predictors(cvp.training, :);
trainingResponse = response(cvp.training, :);
trainingIsCategoricalPredictor = isCategoricalPredictor;




template = templateTree(...
'MaxNumSplits', 64);
classificationKNN = fitcensemble(...
trainingPredictors, ...
trainingResponse, ...
'Method', 'Bag', ...
'NumLearningCycles', 30, ...
'Learners', template, ...
'ClassNames', classes);


% Create the result struct with predict function
predictorExtractionFcn = @(y) y(:, predictorNames);
knnPredictFcn = @(x) predict(classificationKNN, x);
trainedClassifier.predictFcn = @(x) knnPredictFcn(predictorExtractionFcn(x));


% Add additional fields to the result struct
trainedClassifier.Classification = classificationKNN;
trainedClassifier.About = 'This struct is a trained model exported from Classification Learner R2020a.';
trainedClassifier.HowToPredict = sprintf('To make predictions on a new predictor column matrix, X, use: \n yfit = c.predictFcn(X) \nreplacing ''c'' with the name of the variable that is this struct, e.g. ''trainedModel''. \n \nX must contain exactly 13 columns because this model was trained using 13 predictors. \nX must contain only predictor columns in exactly the same order and format as your training \ndata. Do not include the response column or any columns you did not import into the app. \n \nFor more information, see <a href="matlab:helpview(fullfile(docroot, ''stats'', ''stats.map''), ''appclassification_exportmodeltoworkspace'')">How to predict using an exported model</a>.');

% Extract predictors and response
% This code processes the data into the right shape for training the
% model.
% Convert input to table
inputTable=trainingData;
predictorsInd=(1:size(trainingData,2));
predictorsInd(response2)=[];
predictorNames = trainingData.Properties.VariableNames(predictorsInd);
predictors = inputTable(:, predictorNames)
response = inputTable.(string(inputTable.Properties.VariableNames{response2}));
response=response(~isnan(response));
% Perform cross-validation

predictors = predictors(cvp.test, :);
response = response(cvp.test, :);
[validationPredictions, validationScores] = trainedClassifier.predictFcn(predictors);
correctPredictions = (validationPredictions == response);
validationAccuracy = sum(correctPredictions)/length(correctPredictions);


else
disp("K-Fold method is using with '"+num2str(Folds)+"' folds")

partitionedModel = crossval(trainedClassifier.Classification, 'KFold', Folds);
% Compute validation predictions

[validationPredictions, validationScores] = kfoldPredict(partitionedModel);
% Compute validation accuracy;
validationAccuracy = 1 - kfoldLoss(partitionedModel, 'LossFun', 'ClassifError');
disp("resp")

end





ygt=response;
ypr=validationPredictions;
for i=classes'
TP=sum((ygt==i).*(ypr==i))
FN=sum((ygt==i).*~(ypr==i))
FP=sum(~(ygt==i).*(ypr==i))
TN=sum(~(ygt==i).*~(ypr==i))
cmVals{i+1}=[TP FN FP TN]

Sens{i+1}=(TP)/(TP+FN);
Specificity{i+1}=(TN)/(FP+TN);

end
trainedClassifier.RequiredVariables=predictorNames;
try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
trainedClassifier.plots.SUBY=SUBY;
trainedClassifier.plots.SUBYNAMES=SUBYNAMES;
trainedClassifier.plots.X=X;
trainedClassifier.plots.Y=Y;
trainedClassifier.plots.Ygt=response;
trainedClassifier.plots.Ypr=validationPredictions;
trainedClassifier.plots.sensitivity=Sens;
trainedClassifier.plots.specificity=Specificity;
trainedClassifier.plots.cmVals=cmVals;
trainedClassifier.plots.Accuracy=validationAccuracy;
trainedClassifier.classes=classt;

end

11 changes: 10 additions & 1 deletion Scripts/models/BaggedTrees.m
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,17 @@

end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
11 changes: 10 additions & 1 deletion Scripts/models/BoostedTrees.m
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,17 @@

end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
11 changes: 10 additions & 1 deletion Scripts/models/CoarseGaussianSVM.m
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,17 @@

end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
10 changes: 10 additions & 0 deletions Scripts/models/CoarseKNNS.m
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,17 @@
end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
11 changes: 10 additions & 1 deletion Scripts/models/CoarseTreee.m
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,17 @@

end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
11 changes: 10 additions & 1 deletion Scripts/models/CosineKNNS.m
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,17 @@

end
trainedClassifier.RequiredVariables=predictorNames;

try
[X,Y,T,AUC,OPTROCPT,SUBY,SUBYNAMES] = perfcurve(response,validationPredictions,1);
catch
X=-1;
Y=-1;
T=-1;
AUC=-1;
OPTROCPT=-1;
SUBY=-1;
SUBYNAMES=-1;
end
trainedClassifier.plots.AUC=AUC;
trainedClassifier.plots.OPTROCPT=OPTROCPT;
trainedClassifier.plots.T=T;
Expand Down
Loading

1 comment on commit ee5b44e

@abdullahbas
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nice!

Please sign in to comment.