From 562c929cd8b75d08d8ca368e0400c70188cd35a4 Mon Sep 17 00:00:00 2001 From: Navan Chauhan Date: Sat, 4 Jan 2020 15:33:34 +0530 Subject: Publish deploy 2020-01-04 15:33 --- tutorials/custom-image-classifier-keras-tensorflow/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'tutorials/custom-image-classifier-keras-tensorflow') diff --git a/tutorials/custom-image-classifier-keras-tensorflow/index.html b/tutorials/custom-image-classifier-keras-tensorflow/index.html index 9ea32fe..c3518fd 100644 --- a/tutorials/custom-image-classifier-keras-tensorflow/index.html +++ b/tutorials/custom-image-classifier-keras-tensorflow/index.html @@ -1,4 +1,4 @@ -Creating a Custom Image Classifier using Tensorflow 2.x and Keras for Detecting Malaria | Navan Chauhan

Creating a Custom Image Classifier using Tensorflow 2.x and Keras for Detecting Malaria

Done during Google Code-In. Org: Tensorflow.

Imports

%tensorflow_version 2.x #This is for telling Colab that you want to use TF 2.0, ignore if running on local machine
+Creating a Custom Image Classifier using TensorFlow 2.x and Keras for Detecting Malaria | Navan Chauhan

Creating a Custom Image Classifier using TensorFlow 2.x and Keras for Detecting Malaria

Done during Google Code-In. Org: Tensorflow.

Imports

%tensorflow_version 2.x #This is for telling Colab that you want to use TF 2.0, ignore if running on local machine
 
 from PIL import Image # We use the PIL Library to resize images
 import numpy as np
@@ -58,7 +58,7 @@ model.add(layers.Dense(500,activation="relu"))
 model.add(layers.Dropout(0.2))
 model.add(layers.Dense(2,activation="softmax"))#2 represent output layer neurons
 model.summary()
-

Compiling Model

We use the adam optimiser as it is an adaptive learning rate optimization algorithm that's been designed specifically for training deep neural networks, which means it changes its learning rate automaticaly to get the best results

model.compile(optimizer="adam",
+

Compiling Model

We use the Adam optimizer as it is an adaptive learning rate optimization algorithm that's been designed specifically for training deep neural networks, which means it changes its learning rate automatically to get the best results

model.compile(optimizer="adam",
               loss="sparse_categorical_crossentropy",
              metrics=["accuracy"])
 

Training Model

We train the model for 10 epochs on the training data and then validate it using the testing data

history = model.fit(X_train,y_train, epochs=10, validation_data=(X_test,y_test))
-- 
cgit v1.2.3