Skip to main content

Posts

Showing posts from October, 2020

Multiple classification from many of directories

  # %%  Import nessacary libraries import  numpy  as  np import  pandas  as  pd import  cv2 import  matplotlib.pyplot  as  plt import  os import  glob # %%   Keras Tensorflow libraries from  keras  import  layers from  keras.models  import  Model from  keras.optimizers  import  RMSprop , Adam , Nadam from  keras.preprocessing.image  import  ImageDataGenerator from  keras.layers  import  Input, BatchNormalization, Dense, Dropout, Conv2D, Flatten, GlobalAveragePooling2D, LeakyReLU from  keras.preprocessing.image  import  ImageDataGenerator, img_to_array, load_img # %%  Path path  =   r 'G:/Machine Learning/Project/Lego Mnifigures Classification/dataset' open_dir  =  os....

Digit Recognition

Here you can import digit dataset from scikit learn library which is in-built, So you don't need to download from other else Note: If you use visual code, I recommend you to turn your color theme to Monokai because it has a few extra and important keyword and attractive colors than other theme.   # %%  Import libraries import  numpy  as  np import  pandas  as  pd import  matplotlib.pyplot  as  plt import  random  # %%   Load dataset from  sklearn.datasets  import  load_digits dataset  =  load_digits() dataset.keys() output: d ict_keys(['data', 'target', 'target_names', 'images', 'DESCR']) You have to check all to direct print them Here DESCR is a description of dataset # %%   divide the dataset into input and target inputs  =  dataset.data target  =  dataset.target # %% ...

male female classification

  # %%  Import libraries import  numpy  as  np import  matplotlib.pyplot  as  plt import  cv2  as  cv import  os import  glob import  random from  tensorflow.keras  import  backend  as  K from  tensorflow.keras.models  import  Sequential from  tensorflow.keras.preprocessing.image  import  ImageDataGenerator , img_to_array from  tensorflow.keras.optimizers  import  Adam from  tensorflow.keras.layers  import  Dense, Conv2D, BatchNormalization, Flatten, MaxPooling2D, Dropout, Activation from  tensorflow.keras.utils  import  to_categorical, plot_model # %%  Intial parameters epochs =  100 learning_rate =  1e-3 batch_size =  64 input_dim = ( 96 , 96 , 3 ) # %%  Load image file ...

Bank customers survival or not classification | churn modeling

  # ANN using Stochastic Gradiant Descent # %%  Importing the libraries import  numpy  as  np import  matplotlib.pyplot  as  plt import  pandas  as  pd # %% Importing the dataset dataset = pd.read_csv( 'Churn_Modelling.csv' ) x = dataset.iloc[:,  3 : 13 ].values y = dataset.iloc[:,  13 ].values # %% Encoding categorical data from  sklearn.preprocessing  import  LabelEncoder, OneHotEncoder le= LabelEncoder() x[:,  1 ] = le.fit_transform(x[:,  1 ]) x[:,  2 ] = le.fit_transform(x[:,  2 ]) # %%  Creating Dataframe df = pd.DataFrame(x,columns=dataset.columns[ 3 : 13 ]) # %%  ColumnTransformer , OneHotEncoding from  sklearn.compose  import  ColumnTransformer transformer = C...

Cat and dog classification

  # %%  Importing the libraries from  keras.models  import  Sequential from  keras.layers  import  Conv2D  from  keras.layers  import  MaxPooling2D from  keras.layers  import  Flatten from  keras.layers  import  Dense # %%  Initializing the CNN classifier = Sequential() # %%  Step-1 Convolution classifier.add(Conv2D( 32 , ( 3 ,  3 ), input_shape = ( 64 ,  64 ,  3 ), activation =  'relu' )) # All the images come to us does'nt have same size and shape so we have to fix the  dimensions of the pixels rgb color channel in input shape parameter # %%  Step - 2 Pooling classifier.add(MaxPooling2D(pool_size=( 2 ,...

Multiple Classification using Iris Dataset

Multiple Classification using Iris Dataset ; this dataset is come in built in sklearn library. Here we have got three classes of Iris flower; you can read fully article using dataset['DESCR'] Although you can read comments for better understanding or you can just copy it. # %%  Import libraries import  numpy  as  np import  pandas  as  pd import  matplotlib.pyplot  as  plt # %%  Load dataset from  sklearn.datasets  import  load_iris dataset = load_iris() print(dataset.keys()) # %%  Assign the dataset to features and outputs x = dataset.data y = dataset.target print(x.shape , y.shape) # %%  Data Spliting from  sklearn.model_selection  import  train_test_split x_train, x_test, y_train, y_test = train_test_split(x,y, test_size= 0.2 , random_state= 0...