Skip to main content

Male Female Classification

  # %%  Importing Libraries


import numpy as np       
import pandas as pd     
import matplotlib.pyplot as plt
import os

# %%  Importing Keras Libraries

import keras.backend as K 
from keras.models import Sequential
from keras.utils import to_categorical
from keras.layers.normalization import BatchNormalization
from keras.layers import Dense, Convolution2D, Activation, MaxPooling2D, AveragePooling2D, Dropout, Flatten
from keras.preprocessing import image
from keras.layers import LeakyReLU

# %%   Importing sklearn libraries

from sklearn.metrics import confusion_matrix, classification_report
from sklearn.model_selection import train_test_split


#%%  Assining the directories

men_img_path = 'men'
women_img_path = 'women'
directory = [(0,men_img_path), (1,women_img_path)] # assing the binary symbol ; type - list

# %%  Creating ImageDataBunch

men = []
women = []
img_size = 300

train_images = []
labels = []

for num , dir in directory:
    dir = dir + '/'

    count = 0

    for file in os.listdir(_dir):
        if count >= 1000:
            break

        img = image.load_img(_dir + str(file), target_size=(300,300))
        img = image.img_to_array(img)
        img = img/255
        train_images.append(img)
        labels.append(num)
        count += 1

# %%
print(type(train_images))
# print(train_images)
print(train_images[1].shape)
print(len(train_images))

# %%

plt.axis('off')
plt.imshow(train_images[100])
plt.imshow(train_images[1500])
# %%  Converting images list to array

x = np.array(train_images)

# %%  Splitting

x_train, x_test, y_train, y_test = train_test_split(x, labels, test_size=0.1, random_state=100)

# %%  Testing the vislulization

plt.axis('off')
plt.imshow(x_train[1000])
plt.imshow(x_test[500])

# %%
plt.figure(figsize=(20,50))

for i in range(20,40):
    plt.subplot(10,5,i+1)
    plt.axis('off')
    plt.imshow(x_train[i])
    plt.xlabel(y_train[i])

# %%  Converting labels list to categorical

y_train_labels = to_categorical(y_train)  # Here I did only train part bescause of training
# to_categorical` converts this into a matrix with as many

# %%  Initializing the model along with the input shape

def build(width, height, depth, classes):
    #initialize the model along with the input shape
    model = Sequential()
    input_shape = (height, width, depth)
    chanDim = -1
    
    if K.image_data_format() == 'channels_first':
        input_shape = (depth, height, width)
        chanDim = 1
        
    # layer 1 : CONV -> RELU -> MAXPOOL
    model.add(Convolution2D(64, (3,3), padding='same', input_shape=input_shape))
    model.add(Activation('relu'))
    model.add(BatchNormalization(axis=chanDim))
    model.add(MaxPooling2D(pool_size=(3,3)))
    model.add(Dropout(0.25))
    
    # layer 2-1 : CONV -> LEAKYRELU 
    model.add(Convolution2D(128, (3,3), padding='same'))
    model.add(LeakyReLU(alpha=0.1))
    model.add(BatchNormalization(axis=chanDim))

    # layer 2-2 : CONV -> LEAKYRELU -> AVGPOOL
    model.add(Convolution2D(128, (3,3), padding='same'))
    model.add(LeakyReLU(alpha=0.1))
    model.add(BatchNormalization(axis=chanDim))
    model.add(AveragePooling2D(pool_size=(3,3)))
    model.add(Dropout(0.25))
    
    # layer 3 : CONV -> RELU -> MAXPOOL
    model.add(Convolution2D(256, (3,3), padding='same'))
    model.add(Activation('relu'))
    model.add(BatchNormalization(axis=chanDim))
    model.add(MaxPooling2D(pool_size=(3,3)))
    model.add(Dropout(0.25))
    
    # layer 4 : CONV -> LEAKYRELU -> AVGPOOL
    model.add(Convolution2D(512, (3,3), padding='same'))
    model.add(LeakyReLU(alpha=0.1))
    model.add(BatchNormalization(axis=chanDim))
    model.add(AveragePooling2D(pool_size=(3,3)))
    model.add(Dropout(0.25))
    
    # layer 5 : FLATTEN -> DENSE -> RELU
    model.add(Flatten())
    model.add(Dense(1024))
    model.add(Activation('relu'))
    model.add(BatchNormalization())
    model.add(Dropout(0.25))
    
    # DENSE -> DENSE -> RELU
    model.add(Dense(512))
    model.add(Activation('relu'))
    model.add(BatchNormalization())
    model.add(Dropout(0.25))
    
    # SIGMOID , SOFTMAX-> just to check the accuracy with this (softmax would work too)
    model.add(Dense(classes))
    model.add(Activation('sigmoid'))
    
    return model

# %%

model = build(img_size, img_size, 32)
# %%

model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['accuracy'])

model.summary()
# %%

model.fit(
    x_train,
    y_train_labels,
    batch_size=30,
    shuffle=True,
    epochs=50,
    validation_split=0.2
)


Comments

Popular posts from this blog

Gradient Descent with RSME

Optimization Alorithms Ex : G.D. , S.D. ,  Adam, RMS prop , momentum , adelta Gradient Descent is an  optimization algorithm that find a best fit line and local minima of a differentiable function for given training data set. S imply used to find the coefficients (weights) and intercept (bias) that minimize a cost function as far as possible.  There are three types of  g radient descent techniques:   Regular Batch GD (Gradient Descent) -  Studiously descend the curve in one path towards one minima ; every hop calculates the cost function for entire training data. If training data is large, one should not use this. Random GD (Stochastic GD) -   Calculates the Cost function for only one (randomly selected) training data per hop ; tend to jump all over the place due to randomness but due to it actually jump across minima’s.  Mini Batch gradient descent - Somewhere midway between the above 2. Does the calculation for a bunch of random data poin...

Why python ? What is Python?

Python is a generally interpreted and  interactive dynamic symmetric   high-level  object oriented programming language. It is widely used in Machine Learning today. Pretty easy to understand, learn, code and explain because it has very crisp and clear syntaxes than other languages.  Guido van Rossum made Python in 1991, named his programming language after the television show Monty Python's Flying Circus. Python has got features or derived features from ABC named programming language. Interactive - The result will be printed on the screen, immediately return, in the next line as we entered. High-level - Humans can easy to interpret; the source code contains easy-to-read syntax that is later converted into a low-level language (0 , 1) Dynamic-symmetric – Don’t need to clarify the data type. It Allows the type casting. Type Casting –  We can transform the one data type in another data type Object Oriented – language is focused on Object...