MAIA / RNACreator.m
RNACreator.m
Raw
clear
clc

%The following numerical values are examples of how the software works 
population_size = 10;             %Minimum of 10 networks for the right performance of the algorithm
Generation = 100; 

NeuronLayer1 = 60;
NeuronLayer2 = 30;
NeuronLayer3 = 23;

MinNeuron = 6;

numFeatures = 33;
threshold = 0.60;

numClasses = 2;
classes = [0 1];

directory = '';                    %Include directory for saving results
   
data = '.mat';                     %Include the database of input variables for training
inputs = (importdata(data)');

Targets = '.mat';                  %Include the database of output variables for training
Targets = (importdata(Targets)');

tbl = [inputs Targets]; 
numObservations = size(tbl,1);

[idxTrain,idxval] = trainingPartitions(numObservations,[0.7 0.3]);

Data.idxTrain = idxTrain;
tblTrain = tbl(idxTrain,:);
XTrain = tblTrain(:,1:numFeatures);  
TTrain = categorical(tblTrain(:,numFeatures+1));

Data.idxVal = idxval;    
tblVal = tbl(idxval,:);
XVal = tblVal(:,1:numFeatures);  
TVal = categorical(tblVal(:,numFeatures+1)); 

population = cell(1, numel(population_size));
net = cell(1, numel(population_size));
info = cell(1, numel(population_size));


%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%Start of training
for i=1:population_size  
    fprintf('Network %d\n',i);

    layer1 = randi([MinNeuron + 20 NeuronLayer1]);
    layer2 = randi([MinNeuron + 10 NeuronLayer2]);
    layer3 = randi([MinNeuron      NeuronLayer3]);

    layers = [
      featureInputLayer(numFeatures, Normalization="zscore")
      fullyConnectedLayer(layer1)
      fullyConnectedLayer(layer2)
      fullyConnectedLayer(layer3)
      fullyConnectedLayer(numClasses)
      sigmoidLayer];

    options = trainingOptions("adam", ExecutionEnvironment="cpu", ValidationData={XVal,TVal}, Verbose=false);

    net= trainnet(XTrain,TTrain,layers,"crossentropy",options);
   
    population {i}.network = net;
    population {i}.Layer1 = layer1;
    population {i}.Layer2 = layer2;
    population {i}.Layer3 = layer3;
    population {i}.Data = Data;
          
    %%Accuracy Train
    YTrain = minibatchpredict(net,XTrain);
    YTrain = onehotdecode(YTrain,classes,2);
    Accuracy_Train = mean(YTrain == TTrain);
    disp("Accuracy Train = ")
    disp(Accuracy_Train)
    population{i}.Accuracy_Train = Accuracy_Train;

    %%Accuracy Validation
    YVal = minibatchpredict(net,XVal);
    YVal = onehotdecode(YVal,classes,2);
    Accuracy_Validation =  mean(YVal == TVal);
    population{i}.Accuracy_Validation = Accuracy_Validation;
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

for i = 1:length(population)
    for j = i + 1:length(population)
        if population{1, i}.Accuracy_Train < population{1, j}.Accuracy_Train
            temp = population{1, i};
            population{i} = population{1, j};
            population{j} = temp;
        end
    end
end

clc

[populationFinal]= GeneticAlgorithm(Generation, population, population_size, idxTrain, idxval, XTrain, TTrain, XVal,TVal, numFeatures, classes, layers, options, NeuronLayer1, NeuronLayer2, NeuronLayer3, directory, threshold, numClasses, MinNeuron);