from google.colab import files setdate = files.upload() import pandas as pd dataset_med_nod = pd.read_csv('med_nod.csv') dataset_med_nod.head() dataset_med_nod.shape x = dataset_med_nod.iloc[:,0:7].values y = dataset_med_nod.iloc[:,7].values x[1,:] x.shape, y.shape # split dataset from sklearn.model_selection import train_test_split x_train,x_test, y_train,y_test = train_test_split(x,y,test_size=0.3,random_state=0) x_train.shape, y_train.shape, x_test.shape, y_test.shape from keras.models import Sequential from keras.layers import Dense from keras.optimizers import SGD,Adam model_new = Sequential() model_new.add(Dense(10,input_shape=(7,),activation='tanh')) model_new.add(Dense(8,activation='tanh')) model_new.add(Dense(6,activation='tanh')) model_new.add(Dense(1,activation='sigmoid')) model_new.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy']) model_new.summary() hist = model_new.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=100, batch_size=100) # Commented out IPython magic to ensure Python compatibility. import matplotlib.pyplot as plt # %matplotlib inline import seaborn as sns sns.set() acc = hist.history['accuracy'] val = hist.history['val_accuracy'] epochs = range(1, len(acc) + 1) plt.plot(epochs, acc, '-', label='Training accuracy') plt.plot(epochs, val, ':', label='Validation accuracy') plt.title('Training and Validation Accuracy') plt.xlabel('Epoch') plt.ylabel('Accuracy') plt.legend(loc='lower right') plt.plot() # Evaluate on test data test_loss, test_acc = model_new.evaluate(x_test, y_test) print('Test Accuracy: {}'.format(test_acc)) from sklearn.metrics import confusion_matrix y_predicted = model_new.predict(x_test) > 0.5 mat = confusion_matrix(y_test, y_predicted) labels = ['class 0', 'class 1'] sns.heatmap(mat, square=True, annot=True, fmt='d', cbar=False, cmap='Blues', xticklabels=labels, yticklabels=labels) plt.xlabel('Predicted label') plt.ylabel('Actual label') from sklearn.metrics import classification_report print(classification_report(y_test, y_predicted))