c***@gmail.com
2015-04-02 13:58:51 UTC
i am trying to run the convolutional neural network with my own data set
and i get the TypeError: cannot convert Type TensorType(int32, matrix) (of
variable Subtensor{int64:int64:}.0)into to Type TensorType(int32, vector).
You can try to manually convert Subtensor {int64:int64:}.0 into tensorType
(int32, vector) i am wondering i the problem is the way i created my
dataset. This is the code i used which is a modification from one in
stackoverflow.com. I have 360 images and i want to classify them to four
classes.
import numpy as np
import pandas as pd
import sys
import time
import os
from datetime import datetime
from sklearn import ensemble, cross_validation, preprocessing
def dir_to_dataset(glob_files, loc_train_labels=" "):
print("Gonna process:\n\t %s"%glob_files)
dataset = []
for file_count, file_name in enumerate(
sorted(glob(glob_files),key=len)):
image = Image.open(file_name)
img = Image.open(file_name).convert('LA') #tograyscale
pixels = [f[0] for f in list(img.getdata())]
dataset.append(pixels)
if file_count % 1000 == 0:
print("\t %s files processed"%file_count)
outfile = glob_files+"out"
np.save(outfile, dataset)
if len(loc_train_labels) > 0:
df = pd.read_csv(loc_train_labels)
return np.array(dataset), np.array(df["Class"])
else:
return np.array(dataset)
Data, y = dir_to_dataset("trainMNISTForm\\*.JPG","labels.csv")
# Data and labels are read
train_set_x = Data[:259]
val_set_x = Data[260:309]
test_set_x = Data[310:360]
train_set_y = y[:259]
val_set_y = y[260:309]
test_set_y = y[310:360]
# Divided dataset into 3 parts. I had 360 images.
train_set = train_set_x, train_set_y
val_set = val_set_x, val_set_y
test_set = test_set_x, val_set_y
dataset = [train_set, val_set, test_set]
f = gzip.open('file.pkl.gz', 'wb')
cPickle.dump(dataset, f, protocol=2)
f.close()
Kindly help
and i get the TypeError: cannot convert Type TensorType(int32, matrix) (of
variable Subtensor{int64:int64:}.0)into to Type TensorType(int32, vector).
You can try to manually convert Subtensor {int64:int64:}.0 into tensorType
(int32, vector) i am wondering i the problem is the way i created my
dataset. This is the code i used which is a modification from one in
stackoverflow.com. I have 360 images and i want to classify them to four
classes.
import numpy as np
import pandas as pd
import sys
import time
import os
from datetime import datetime
from sklearn import ensemble, cross_validation, preprocessing
def dir_to_dataset(glob_files, loc_train_labels=" "):
print("Gonna process:\n\t %s"%glob_files)
dataset = []
for file_count, file_name in enumerate(
sorted(glob(glob_files),key=len)):
image = Image.open(file_name)
img = Image.open(file_name).convert('LA') #tograyscale
pixels = [f[0] for f in list(img.getdata())]
dataset.append(pixels)
if file_count % 1000 == 0:
print("\t %s files processed"%file_count)
outfile = glob_files+"out"
np.save(outfile, dataset)
if len(loc_train_labels) > 0:
df = pd.read_csv(loc_train_labels)
return np.array(dataset), np.array(df["Class"])
else:
return np.array(dataset)
Data, y = dir_to_dataset("trainMNISTForm\\*.JPG","labels.csv")
# Data and labels are read
train_set_x = Data[:259]
val_set_x = Data[260:309]
test_set_x = Data[310:360]
train_set_y = y[:259]
val_set_y = y[260:309]
test_set_y = y[310:360]
# Divided dataset into 3 parts. I had 360 images.
train_set = train_set_x, train_set_y
val_set = val_set_x, val_set_y
test_set = test_set_x, val_set_y
dataset = [train_set, val_set, test_set]
f = gzip.open('file.pkl.gz', 'wb')
cPickle.dump(dataset, f, protocol=2)
f.close()
Kindly help
--
---
You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to theano-users+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
---
You received this message because you are subscribed to the Google Groups "theano-users" group.
To unsubscribe from this group and stop receiving emails from it, send an email to theano-users+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.