Intellipaat Back

Explore Courses Blog Tutorials Interview Questions
0 votes
4 views
in Machine Learning by (130 points)

code:

import numpy as np

import matplotlib.pyplot as plt

import cv2

import os

from sklearn.model_selection import train_test_split
DataDir=r'E:\colours_classifier'
CATEGORIES=['Red','Blue','Green']
for i in CATEGORIES:
        path = os.path.join(DataDir,i)
        for img in os.listdir(path):
            image_rgb=cv2.imread(os.path.join(path,img),cv2.IMREAD_COLOR)
            img_array = cv2.cvtColor(image_rgb, cv2.COLOR_BGR2RGB)
            plt.imshow(img_array)
            plt.show()
            break
        break
           
img_size=100
new_array = cv2.resize(img_array,(img_size,img_size))
training_data=[]
def create_training_data():
    for i in CATEGORIES:
        
        path = os.path.join(DataDir,i)
        class_num=CATEGORIES.index(i)
        
        for img in os.listdir(path):
            try:
               img_array=cv2.imread(os.path.join(path,img),cv2.IMREAD_COLOR)
               new_array=cv2.resize(img_array,(img_size,img_size))
               training_data.append([new_array,class_num]) 
            except Exception as e:
                pass

create_training_data()
print(len(training_data)) 

import random

random.shuffle(training_data)

for sample in training_data[:10]:

    print(sample[1])

X = []

y = []

for features,label in training_data:

    X.append(features)

    y.append(label)

X = np.array(X).reshape(-1,img_size,img_size,1)

y = np.array(y)

import pickle

pickle_out = open(r'E:\colours_classifier\X.pickle',"wb")

pickle.dump(X,pickle_out)

pickle_out.close()

pickle_out = open(r'E:\colours_classifier\y.pickle',"wb")

pickle.dump(y,pickle_out)

pickle_out.close()

import tensorflow as tf

from tensorflow.keras.preprocessing.image import ImageDataGenerator

from tensorflow.keras.models import Sequential

from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten

from tensorflow.keras.layers import Conv2D, MaxPooling2D

import pickle

import numpy as np

pickle_in=open(r"E:\colours_classifier\X.pickle","rb")

X=pickle.load(pickle_in)

pickle_in=open(r"E:\colours_classifier\y.pickle","rb")

y=pickle.load(pickle_in)

y = np.array(y)

X=X/255.0

model= Sequential()

model.add(Conv2D(256,(3,3),input_shape=X.shape[1:]))

model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(2,2)))

model.add(Conv2D(256,(3,3)))

model.add(Activation('relu'))

model.add(MaxPooling2D(pool_size=(2,2)))

model.add(Flatten())

model.add(Dense(64))

model.add(Dense(1))

model.add(Activation('sigmoid'))

model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])

model.fit(X,y,batch_size=10,epochs=10,validation_split=0.3)

model.save(r"E:\colours_classifier\COLOURS_CNN.model")

///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

ValueError Traceback (most recent call last)

<ipython-input-3-fdf5462edba5> in <module>
     36 model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
     37 
---> 38 model.fit(X,y,batch_size=10,epochs=10,validation_split=0.3)
     39 
     40 model.save(r"E:\colours_classifier\COLOURS_CNN.model")

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    817         max_queue_size=max_queue_size,
    818         workers=workers,
--> 819         use_multiprocessing=use_multiprocessing)
    820 
    821   def evaluate(self,

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in fit(self, model, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    233           max_queue_size=max_queue_size,
    234           workers=workers,
--> 235           use_multiprocessing=use_multiprocessing)
    236 
    237       total_samples = _get_total_number_of_samples(training_data_adapter)

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_training_inputs(model, x, y, batch_size, epochs, sample_weights, class_weights, steps_per_epoch, validation_split, validation_data, validation_steps, shuffle, distribution_strategy, max_queue_size, workers, use_multiprocessing)
    550         batch_size=batch_size,
    551         check_steps=False,
--> 552         steps=steps_per_epoch)
    553     (x, y, sample_weights,
    554      val_x, val_y,

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, batch_size, check_steps, steps_name, steps, validation_split, shuffle, extract_tensors_from_dataset)
   2381         is_dataset=is_dataset,
   2382         class_weight=class_weight,
-> 2383         batch_size=batch_size)
   2384 
   2385   def _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs,

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_tensors(self, x, y, sample_weight, run_eagerly, dict_inputs, is_dataset, class_weight, batch_size)
   2483       # Check that all arrays have the same length.
   2484       if not self._distribution_strategy:
-> 2485         training_utils.check_array_lengths(x, y, sample_weights)
   2486         if self._is_graph_network and not run_eagerly:
   2487           # Additional checks to avoid users mistakenly using improper loss fns.

~\Anaconda3\envs\tf_gpu\lib\site-packages\tensorflow_core\python\keras\engine\training_utils.py in check_array_lengths(inputs, targets, weights)
    742                      'the same number of samples as target arrays. '
    743                      'Found ' + str(list(set_x)[0]) + ' input samples '
--> 744                      'and ' + str(list(set_y)[0]) + ' target samples.')
    745   if len(set_w) > 1:
    746     raise ValueError('All sample_weight arrays should have '

ValueError: Input arrays should have the same number of samples as target arrays. Found 1647 input samples and 549 target samples.
Please log in to answer this question.

31k questions

32.8k answers

501 comments

693 users

Browse Categories

...