Hello (Mufei, I’m sure you’ll respond to this first ),
So,
I keep getting “Target size (torch.Size([20804])) must be the same as input size (torch.Size([15321])” in one of my batches. There is no graph with 15,321 nodes or 20804 labels. Any idea where I should be looking for: Here is my load_graphs code,
" if self.mode == ‘train’:
graph_files = ‘dglgraph/training’
self.train_list = [f for f in listdir(graph_files)]
onlyfiles = self.train_list
self.train_graphs = []
self.train_labels = []
self.graph_id = []
self.train_features = []
for i in range(len(onlyfiles)):
fn = str(onlyfiles[i])
fn = ‘dglgraph/training/’+fn
g, l = load_graphs(fn)
graph = g[0]
num_nodes = graph.number_of_nodes()
for j in range(num_nodes):
self.graph_id.append(self.counter)
label = l
self.train_graphs.append(graph)
self.train_labels.append(label)
self.train_features.append(graph.ndata[‘node_attributes’])
self.counter += 1
print('labels: ', len(self.train_labels))
"
And here is the collate function:
"def collate(sample):
graphs, labels_d =map(list, zip(*sample))
labels = []
for g in labels_d:
for k, v in g.items():
labels.append(v)
feats =[]
for g in graphs:
feat = g.ndata[‘node_attributes’]
feats.append(feat)
graph = dgl.batch(graphs)
feats = torch.from_numpy(np.concatenate(feats))
labels = torch.from_numpy(np.concatenate(labels))
return graph, feats, labels
"
Thanks so much.