for i, mini_batch in enumerate(dataloader):
print(mini_batch.blocks[0])
print(mini_batch.blocks[0].srcnodes._graph)
print(mini_batch.blocks[0].dstnodes._graph)
print(i, mini_batch.node_features["id"].shape)
# print
Block(num_src_nodes=1861, num_dst_nodes=1490, num_edges=14845)
Block(num_src_nodes=1861, num_dst_nodes=1490, num_edges=14845)
Block(num_src_nodes=1861, num_dst_nodes=1490, num_edges=14845)
0 torch.Size([1861, 1])
It seems no difference between them.