clear;
clc;
%% Dataset Paths
% Uncomment the dataset you want to use
% WHU-RS19 Dataset
% trainData = load('Datasets/feat_trn_fc7_prop_ppr_0.9_RS19.mat');
% trainLabels = load('Datasets/trainLab_0.9_RS19.mat');
% testData = load('Datasets/feat_tst_fc7_prop_ppr_0.9_RS19.mat');
% testLabels = load('Datasets/testLab_0.9_RS19.mat');
% trainFeat = trainData.F_trn;
% trainLabel = trainLabels.trainLab;
% testFeat = testData.F_tst;
% testLabel = testLabels.testLab;
% RSSCN7 Dataset
% Data = load('Datasets/RSSCN.mat');
% featAll = Data.featAll;
% labelAll = Data.labelAll;
% UCM Dataset
% load('E:\Uzma\UC merced\UCMerced_LandUse\UC_Merced_extracted_features_and_labels.mat');
% featAll = [featuresTrain; featuresTest];
% labelAll = [labelsTrain; labelsTest];
%% Parameter Settings
N = 10; % Population size for optimization
max_Iter = 100; % Maximum iterations for optimization
trials = 60; % Number of trials
% Initialize result storage
SfList = cell(trials, 1);
vaccList = zeros(trials, 1);
taccList = zeros(trials, 1);
NfList = zeros(trials, 1);
curveList = zeros(trials, max_Iter);
stat = cell(trials, 1);
%% Trials Loop
for i = 1:trials
fprintf('\n Trial # %g', i);
% Shuffle dataset
num_samples = size(featAll, 1);
perm_indices = randperm(num_samples);
featShuffled = featAll(perm_indices, :);
labelShuffled = labelAll(perm_indices, :);
% Split data (80% training, 20% testing)
trainSize = round(0.8 * num_samples);
trainFeat = featShuffled(1:trainSize, :);
trainLabel = labelShuffled(1:trainSize, :);
testFeat = featShuffled(trainSize+1:end, :);
testLabel = labelShuffled(trainSize+1:end, :);
% Hold-out validation set (20% of training data)
HO = cvpartition(trainLabel, 'HoldOut', 0.20);
% Feature selection using Fox Optimization Algorithm
[sFeat, Sf, Nf, curve] = jWOA(trainFeat, trainLabel, N, max_Iter, HO);
% Evaluate validation accuracy
vAcc = jKNN(sFeat, trainLabel, HO);
% Evaluate test accuracy
[tAcc, stat{i}] = jKNNTest(sFeat, trainLabel, testFeat(:, Sf), testLabel);
% Store results
vaccList(i) = vAcc;
taccList(i) = tAcc;
SfList{i} = Sf;
NfList(i) = Nf;
curveList(i, :) = curve;
fprintf('\n Validation Accuracy: %g %%', vAcc);
fprintf('\n Test Accuracy: %g %%', tAcc);
% Save trial results
filename = sprintf('matFiles/Dataset_Name/Trial_%d.mat', i);
trFeat = trainFeat(:, Sf);
tsFeat = testFeat(:, Sf);
save(filename, 'trFeat', 'trainLabel', 'tsFeat', 'testLabel', 'Nf', 'Sf', 'curve', 'vAcc', 'tAcc');
end
%% Final Results Summary
minvAcc = min(vaccList);
maxvAcc = max(vaccList);
avgAcc = mean(vaccList);
mintAcc = min(taccList);
maxtAcc = max(taccList);
avgNfList = mean(NfList);
fprintf('\n\n------------------------------');
fprintf('\n\t\tFinal Results');
fprintf('\n------------------------------');
fprintf('\n\n\tNum of Trials: %g ', trials);
fprintf('\n\tMaximum Iterations: %g ', max_Iter);
fprintf('\n\tAvg Selected Features: %g ', avgNfList);
fprintf('\n\tAvg Validation Accuracy: %g %%', avgAcc);
fprintf('\n\tMin Validation Accuracy: %g ', minvAcc);
fprintf('\n\tMax Validation Accuracy: %g ', maxvAcc);
fprintf('\n\tMin Test Accuracy: %g ', mintAcc);
fprintf('\n\tMax Test Accuracy: %g ', maxtAcc);
fprintf('\n\n------------------------------\n');
% Save summary results
save('matFiles/Dataset_Name/Final_results.mat', 'trials', 'max_Iter', 'N', 'vaccList', 'SfList', 'NfList', 'curveList', 'avgAcc', 'minvAcc', 'maxvAcc', 'avgNfList', 'taccList', 'stat');
%% Generate Colors for Plotting
numColors = trials;
colors = rand(numColors, 3);
%% Plot Optimization Curves
figure;
for trial = 1:trials
plot(1:max_Iter, curveList(trial, :), 'LineWidth', 1, 'Color', colors(trial, :), 'DisplayName', ['Trial ' num2str(trial)]);
hold on;
end