memory error
My dataset have 70k images which i want to train through Conv2D
but it is throwing memory error when i tried to load the dataset. I just have 4GB RAM, how i can resolve this issue through HDF5
matrix by creating a dataset in HDF5
? and then loading it to train, i guess it will take less memory then. I tried some tutorial to create HDF5 dataset but this processes comes after where the error is occurring. What i am doing wrong? Please ask if question is not clear.
datagen=ImageDataGenerator(rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
batch_size=28
num_classes=37
epochs=100
os.chdir("E:");
path="Dataset/One";
classes=os.listdir(path)
x=[]#Datapoints
y=[]#labels
for fol in classes:
imgfiles=os.listdir(path+u'\\'+fol);
for img in imgfiles:
im=Image.open(path+u'\\'+fol+u'\\'+img);
im=numpy.asarray(im)/255;
x.append(im)
y.append(fol)
x=numpy.array(x)
y=numpy.array(y)
#memory error####################################################
x=x.reshape((-1,100,100,1))
n=x.shape[0]
randomize=numpy.arange(n)
numpy.random.shuffle(randomize)
randomize
x=x[randomize]
y=y[randomize]