Is there any code example for Hetero Graph classification with only 1 node type

In my condition, my hetero graph only has 1 node type, while the edges contain 2 types. However, when I run the example from the doc, it said that I only have 1 node type, and keep erroring

Can you provide a code snippet for reproducing your issue?

of course!
this is what my graph looks like
Graph(num_nodes={'glycine': 253}, num_edges={('glycine', 'normal_link', 'glycine'): 252, ('glycine', 'skip_link', 'glycine'): 82}, metagraph=[('glycine', 'glycine', 'normal_link'), ('glycine', 'glycine', 'skip_link')])
this is my RGCN models:

class RGCN(nn.Module):
    def __init__(self, in_feats, hid_feats, out_feats, rel_names):
        super().__init__()

        self.conv1 = dglnn.HeteroGraphConv({
            rel: dglnn.GraphConv(in_feats, hid_feats)
            for rel in rel_names}, aggregate='sum')
        self.conv2 = dglnn.HeteroGraphConv({
            rel: dglnn.GraphConv(hid_feats, out_feats)
            for rel in rel_names}, aggregate='sum')

    def forward(self, graph, inputs):
        # inputs is features of nodes
        h = self.conv1(graph, inputs)
        h = {k: F.relu(v) for k, v in h.items()}
        h = self.conv2(graph, h)
        return h

class HeteroClassifier(nn.Module):
    def __init__(self, in_dim, hidden_dim, n_classes, rel_names):
        super().__init__()

        self.rgcn = RGCN(in_dim, hidden_dim, hidden_dim, rel_names)
        self.classify = nn.Linear(hidden_dim, n_classes)

    def forward(self, g):
        h = {'glycine':g.ndata['glycine']}
        h = self.rgcn(g, h)
        with g.local_scope():
            g.ndata['h'] = h
            # Calculate graph representation by average readout.
            hg = 0
            for ntype in g.ntypes:
                hg = hg + dgl.mean_nodes(g, 'h', ntype=ntype)
            return self.classify(hg)

this is my train loops:


from tqdm import tqdm
model = HeteroClassifier(4, 10, 1, etypes).to('cuda')
opt = torch.optim.Adam(model.parameters(), lr=0.01, betas=(0.5, 0.999))
losses = []
for epoch in tqdm(range(100)):
    for batched_graph, labels in dataloader:
        # batched_graph = batched_graph.to('cuda')
        # labels = labels.to('cuda')
        logits = model(batched_graph)
        # print(logits.dtype)
        loss = F.mse_loss(logits, labels)
        opt.zero_grad()
        loss.backward()
        opt.step()
        losses.append(loss)
    avg_loss = torch.mean(torch.FloatTensor(losses))
    print(avg_loss)

last is the error display:

AssertionError: The HeteroNodeDataView has only one node type. please pass a tensor directly

LOOKING FORWARD YOUR REPLY AND THANKS AGAIN

How did you initialize dataloader?

Hi. Were you able to solve this? Same problem right now. Thanks