RNFS-A-robust-nature-inspired-feature-selection-for-remote-sensing-image-classification / evaluation / QSVM.m
QSVM.m
Raw
%% Quadratic SVM Code for Saving Average Testing Metrics
% This script processes multiple trials of a neural network model,
% computes various testing metrics, and saves the average metrics to Excel files.
% The code performs the following tasks:
% 1. Loads data from multiple .mat files containing features and labels.
% 2. Trains a neural network model and performs cross-validation.
% 3. Computes validation and test accuracies as well as other metrics.
% 4. Saves individual models and metrics to separate .mat files.
% 5. Calculates average metrics across all trials.
% 6. Saves the average metrics to Excel files for further analysis.
% This script helps in evaluating model performance across different trials and 
% provides a convenient way to store and review the results.

clear;
clc;

% Define base path and file name pattern
basePath = 'matFiles/';
filePrefix = 'Algorithm_Name_Dataset_Name_Trial_';
fileSuffix = '.mat';
numTrials = 60;

% Create file names
fileNames = cell(1, numTrials);
for i = 1:numTrials
    fileNames{i} = fullfile(basePath, [filePrefix, num2str(i), fileSuffix]);
end

% Initialize arrays to store results
validationAccuracies = zeros(numTrials, 1);
testAccuracies = zeros(numTrials, 1);
macroPrecisionTesting = zeros(numTrials, 1);
microPrecisionTesting = zeros(numTrials, 1);
macroSensitivityTesting = zeros(numTrials, 1);
microSensitivityTesting = zeros(numTrials, 1);
macroSpecificityTesting = zeros(numTrials, 1);
microSpecificityTesting = zeros(numTrials, 1);
macroAccuracyTesting = zeros(numTrials, 1);
microAccuracyTesting = zeros(numTrials, 1);
macroFMeasureTesting = zeros(numTrials, 1);
microFMeasureTesting = zeros(numTrials, 1);

% Save the random number generator state
rngState = rng;

% Loop through each file
for i = 1:numTrials
    data = load(fileNames{i});
    predictors = data.trFeat;
    response = categorical(data.trainLabel);
    testPredictors = data.tsFeat;
    testResponse = categorical(data.testLabel);
    
    % SVM Model with Quadratic Kernel
    template = templateSVM('KernelFunction', 'polynomial', 'PolynomialOrder', 2, 'KernelScale', 'auto', 'BoxConstraint', 1, 'Standardize', true);
    classificationSVM = fitcecoc(predictors, response, 'Learners', template, 'Coding', 'onevsone', 'ClassNames', unique(response));
    
    % Cross-validation
    partitionedModel = crossval(classificationSVM, 'KFold', 5);
    validationAccuracy = 1 - kfoldLoss(partitionedModel, 'LossFun', 'ClassifError');
    validationAccuracies(i) = validationAccuracy * 100;
    
    % Testing
    testPredictions = predict(classificationSVM, testPredictors);
    testAccuracy = sum(testPredictions == testResponse) / numel(testResponse);
    testAccuracies(i) = testAccuracy * 100;
    
    % Confusion matrix and metrics
    testConfusionMatrix = confusionmat(testResponse, testPredictions);
    statsTest = statsOfMeasure(testConfusionMatrix, 0);
    
    macroPrecisionTesting(i) = statsTest.macroAVG(5);
    microPrecisionTesting(i) = statsTest.microAVG(5);
    macroSensitivityTesting(i) = statsTest.macroAVG(6);
    microSensitivityTesting(i) = statsTest.microAVG(6);
    macroSpecificityTesting(i) = statsTest.macroAVG(7);
    microSpecificityTesting(i) = statsTest.microAVG(7);
    macroAccuracyTesting(i) = statsTest.macroAVG(8);
    microAccuracyTesting(i) = statsTest.microAVG(8);
    macroFMeasureTesting(i) = statsTest.macroAVG(9);
    microFMeasureTesting(i) = statsTest.microAVG(9);
    
    % Save model in results folder
    save(fullfile('results', sprintf('model_data_%d.mat', i)), 'classificationSVM', 'partitionedModel');
end

% Compute and save average metrics
averageMacroMetrics = table(mean(validationAccuracies), mean(macroPrecisionTesting), mean(macroSensitivityTesting), mean(macroSpecificityTesting), mean(macroAccuracyTesting), mean(macroFMeasureTesting), 'VariableNames', {'AvgValAcc', 'AvgMacroPrec', 'AvgMacroSens', 'AvgMacroSpec', 'AvgMacroAcc', 'AvgMacroF1'});
averageMicroMetrics = table(mean(validationAccuracies), mean(microPrecisionTesting), mean(microSensitivityTesting), mean(microSpecificityTesting), mean(microAccuracyTesting), mean(microFMeasureTesting), 'VariableNames', {'AvgValAcc', 'AvgMicroPrec', 'AvgMicroSens', 'AvgMicroSpec', 'AvgMicroAcc', 'AvgMicroF1'});

% Save all results inside the results folder
save(fullfile('results', 'Algorithm_Name_Dataset_Name_Quadratic_SVM_trained_models_and_metrics.mat'), ...
    'rngState', 'fileNames', 'validationAccuracies', 'testAccuracies', ...
    'macroPrecisionTesting', 'microPrecisionTesting', 'macroSensitivityTesting', ...
    'microSensitivityTesting', 'macroSpecificityTesting', 'microSpecificityTesting', ...
    'macroAccuracyTesting', 'microAccuracyTesting', 'macroFMeasureTesting', 'microFMeasureTesting', ...
    'averageMacroMetrics', 'averageMicroMetrics');

% Save average metrics to Excel in the results folder
writetable(averageMacroMetrics, fullfile('results', 'Algorithm_Name_Dataset_Name_Quadratic_SVM_average_macro_metrics.xlsx'));
writetable(averageMicroMetrics, fullfile('results', 'Algorithm_Name_Dataset_Name_Quadratic_SVM_average_micro_metrics.xlsx'));