classificationRice / DNN_code_jjos.txt
DNN_code_jjos.txt
Raw
##################Autor Jairo Orozco #################################
import matplotlib.pyplot as plt

import numpy as np
import earthpy.plot as ep
import seaborn as sns
import earthpy.spatial as es
import scipy.io as spio
import plotly.graph_objects as go
import plotly.express as px

from scipy.io import loadmat

import pandas as pd

from sklearn.model_selection import train_test_split
from sklearn.metrics import (accuracy_score,
                             confusion_matrix, classification_report)

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.layers import Input, Dense, Dropout, BatchNormalization
from tensorflow.keras.models import Sequential
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tqdm import tqdm
import earthpy.plot as ep 

#img = spio.loadmat('mosaic_kit_168_vclass.mat')['mosaic_kit_168_vclass']
#gt=spio.loadmat('mosaic_kit_168_vclass_gt.mat')['mosaic_kit_168_vclass_gt']



#img = spio.loadmat('mosaic_kit_180_vclass.mat')['mosaic_kit_180_vclass']
#gt=spio.loadmat('mosaic_kit_180_vclass_gt.mat')['mosaic_kit_180_vclass_gt']

img = spio.loadmat('mosaic_kit_204_vclass.mat')['mosaic_kit_204_vclass']
gt=spio.loadmat('mosaic_kit_204_vclass_gt.mat')['mosaic_kit_204_vclass_gt']

#img = spio.loadmat('mosaic_kit_216_vclass.mat')['mosaic_kit_216_vclass']
#gt=spio.loadmat('mosaic_kit_216_vclass_gt.mat')['mosaic_kit_216_vclass_gt']

#img = spio.loadmat('mosaic_kit_228_vclass.mat')['mosaic_kit_228_vclass']
#gt=spio.loadmat('mosaic_kit_228_vclass_gt.mat')['mosaic_kit_228_vclass_gt']

#img = spio.loadmat('mosaic_kit_240_vclass.mat')['mosaic_kit_240_vclass']
#gt=spio.loadmat('mosaic_kit_240_vclass_gt.mat')['mosaic_kit_240_vclass_gt']

'''gt[gt==2]=7
gt[gt==3]=9
gt[gt==7]=3
gt[gt==9]=2'''

print(gt.shape)

######################################################ground_truth###################
'''f,ax=plt.subplots(figsize=(12,10))
im=ax.imshow(gt, cmap='nipy_spectral')
ep.draw_legend(im,titles=['background','HDT','HDNT1','HDNT2','HNT','Control'])
plt.tight_layout()

plt.show()
jj'''
###################################################################################
#print('la imagen',img[0,0,:])

df = pd.DataFrame(img.reshape(img.shape[0]*img.shape[1], -1))

df.columns = [f'band{i}' for i in range(1, df.shape[-1]+1)]

df['class'] = gt.ravel()

#print(np.array(df[0,:])

############################################################ splitting data set #################################################


X = df[df['class']!=0].iloc[:, :-1].values
X=pd.DataFrame(X)

y_1=df[df['class']!=0].iloc[:, -1].values

print(X.shape)
num_classes = np.unique(y_1)


############################################################ start_ t-sne #################################################
import matplotlib.patheffects as PathEffects


import seaborn as sns
sns.set_style('darkgrid')
sns.set_palette('muted')
sns.set_context("notebook", font_scale=1.5,
                rc={"lines.linewidth": 2.5})
RS = 123


from sklearn.manifold import TSNE
X_embedded = TSNE(n_components=2, learning_rate='auto', init='random', perplexity=4).fit_transform(X)

X_embedded=np.array(X_embedded)


def fashion_scatter(x, colors):
    # choose a color palette with seaborn.
    num_classes = len(np.unique(colors))
    palette = np.array(sns.color_palette("hls", 6))

    # create a scatter plot.
    f = plt.figure(figsize=(8, 8))
    ax = plt.subplot(aspect='equal')
    sc = ax.scatter(x[:,0], x[:,1], lw=0, s=40, c=palette[colors.astype(np.int)])
   # plt.xlim(-25, 25)
    #plt.ylim(-25, 25)
    ax.axis('off')
    ax.axis('tight')

    # add the labels for each digit corresponding to the label
    txts = []

    for i in range(num_classes):

        # Position of each label at median of data points.

        xtext, ytext = np.median(x[colors == i, :], axis=0)
        txt = ax.text(xtext, ytext, str(i), fontsize=24)
        txt.set_path_effects([
            PathEffects.Stroke(linewidth=5, foreground="w"),
            PathEffects.Normal()])
        txts.append(txt)

    return f, ax, sc, txts


#fashion_scatter(X_embedded, y_1)




################################################################with pca ##############################################
from sklearn.decomposition import PCA
pca_50 = PCA(n_components=50)
pca_result_50 = pca_50.fit_transform(X)
pca_tsne = TSNE(random_state=RS).fit_transform(pca_result_50)

ax2 = plt.subplot(aspect='equal')
XX = pca_tsne[:,0]
YY = pca_tsne[:,1]

ax3 = sns.scatterplot(
    x=XX, y=YY,
    hue=y_1,
    palette=sns.color_palette("hls", 5),
    data=X,
    legend=False,
    #alpha=0.3,
    ax=ax2

)
#plt.legend(labels=['',"HDT","HDNT1",'HDNT2','HNT','Control'])
#plt.tight_layout()
plt.xlabel('t-SNE1')
# Set y-axis label
plt.ylabel('t-SNE2')

plt.show()

############################################################ end_t-sne #################################################


aaaaa


y = tf.keras.utils.to_categorical(df[df['class']!=0].iloc[:, -1].values , 
                                  num_classes= np.unique(gt).shape[0], 
                                  dtype='float32') 





X_train, X_test, y_train, y_test = train_test_split(X, y, train_size = 0.7, stratify = y)

print(f"Train Data: {X_train.shape}\nTest Data: {X_test.shape}")
print(y_train.shape)
############################################################ bulding the model #################################################

model = Sequential(name='Indian_pines')

model.add(Input(shape = X_train[0].shape, name = 'Input_Layer'))

model.add(BatchNormalization(name = 'BatchNormalization'))

model.add(Dense(units = 128, activation= 'relu', name = 'Layer1'))
model.add(Dense(units = 128, activation= 'relu', name = 'Layer2'))
model.add(Dense(units = 128, activation= 'relu', name = 'Layer3'))
model.add(Dense(units = 128, activation= 'relu', name = 'Layer4'))

model.add(Dropout(rate = 0.2, name = 'Dropout1',))

model.add(Dense(units = 64, activation= 'relu', name = 'Layer5'))
model.add(Dense(units = 64, activation= 'relu', name = 'Layer6'))
model.add(Dense(units = 64, activation= 'relu', name = 'Layer7'))
model.add(Dense(units = 64, activation= 'relu', name = 'Layer8'))

model.add(Dropout(rate = 0.2, name = 'Dropout2'))

model.add(Dense(units = 32, activation= 'relu', name = 'Layer9'))
model.add(Dense(units = 32, activation= 'relu', name = 'Layer10'))
model.add(Dense(units = 32, activation= 'relu', name = 'Layer11'))
model.add(Dense(units = 32, activation= 'relu', name = 'Layer12'))

model.add(Dense(units = y_train.shape[1], activation= 'softmax', name = 'Output_Layer'))

model.summary()

#######################################################Training the DNN ################################################

model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])

# Callbacks
es = EarlyStopping(monitor = 'val_loss',
                   min_delta = 0,
                   patience = 15,
                   verbose = 1,
                   restore_best_weights = True)

checkpoint = ModelCheckpoint(filepath = 'rice_Model.h5', 
                             monitor = 'val_loss', 
                             mode ='min', 
                             save_best_only = True,
                             verbose = 1)
# Fit
history = model.fit(x = X_train, 
          y = y_train,
          validation_data = (X_test, y_test), 
          epochs = 200,
          callbacks = [es, checkpoint])



print(history.history)

#####################################################plot training######################################################
# hist = pd.DataFrame(data= history.history)

# fig = go.Figure()

# fig.add_trace(go.Scatter(x = hist.index, y = hist.loss.values,
#                     mode='lines+markers',
#                     name='Train Loss'))

# fig.add_trace(go.Scatter(x = hist.index, y = hist.accuracy.values,
#                     mode='lines+markers',
#                     name='Train Accuracy'))

# fig.add_trace(go.Scatter(x = hist.index, y = hist.val_loss.values,
#                     mode='lines+markers', name='Test loss'))

# fig.add_trace(go.Scatter(x = hist.index, y = hist.val_accuracy.values,
#                     mode='lines+markers', name='Test Accuracy'))

# fig.show()




#################################################### prediction #######################################################

pred = np.argmax(model.predict(X_test), axis=1)

# Classification Report
print(classification_report(pred, np.argmax(y_test, 1),
      target_names = ['HDT', 'HNDT1 ', 'HNDT2', 'HNT ', 
                 'Control'])),
                #  '6. ', '7.', 
                #  '8. ', '9.','10.','11.','12.','13.','14.','15.','16.']))

####################################################### Plot prediction ###############################################
def plot_data(data):
  # fig = plt.figure(figsize=(12, 10))
  # plt.imshow(data, cmap='nipy_spectral')
  # plt.colorbar()
  # plt.axis('off')
  # plt.show()

  f,ax=plt.subplots(figsize=(12,10))
  im=ax.imshow(data, cmap='nipy_spectral')
  #ep.draw_legend(im,titles=['background','HDT','HNDT1','HNDT2','HNT','Control'])
  plt.tight_layout()
  plt.show()

l = []

for i in tqdm(range(df.shape[0])):
  if df.iloc[i, -1] == 0:
    l.append(0)
  else:
    l.append(np.argmax(model.predict(df.iloc[i, :-1].values.reshape(-1, df.shape[1]-1)), 1))
    
q = np.array(l).reshape(gt.shape).astype('float')

plot_data(q)




################################################################ plot some bands #################################

# fig = plt.figure(figsize = (20, 12))

# ax = fig.subplots(2, 5)

# for i in range(2):
#   for j in range(5):
#     c = np.random.randint(200)
#     ax[i][j].imshow(img[:, :, c], cmap='gray')
#     ax[i][j].axis('off')
#     ax[i][j].title.set_text(f"Band - {c}")
#     c+=1

# plt.tight_layout()

# plt.show()

#########################################################Imagen de color ##########################################################

# img_color = np.moveaxis(img, -1, 0)

# ep.plot_rgb(img_color, (60, 30, 27), figsize=(12, 10)) 

# plt.show()

########################################################Confusion matrix################################################################