When Weight=False the model doesn't output the correct Output feature size

I am using the evolveGCN as a reference to build a similar model on HeteroGeneous graphs. When I put the parameter weight=False, the GraphConv class doesn’t output the correct feature size or dimensions. When I change the weight=True it outputs the correct feature size. Please if anyone can help with what is going on in my code.

class EvolveGCNH(nn.Module):
    def __init__(self, in_feats=166, n_hidden=76, num_layers=2, n_classes=2, classifier_hidden=510):
        # default parameters follow the official config
        super(EvolveGCNH, self).__init__()
        self.num_layers = num_layers
        self.pooling_layers = nn.ModuleList()
        self.recurrent_layers = nn.ModuleList()
        self.gnn_convs = nn.ModuleList()
        self.gcn_weights_list = nn.ParameterList()

        self.pooling_layers.append(TopK(in_feats, n_hidden))
        # similar to EvolveGCNO
        self.recurrent_layers.append(MatGRUCell(in_feats=in_feats, out_feats=n_hidden))
        self.gcn_weights_list.append(Parameter(torch.Tensor(in_feats, n_hidden)))
        # weight=False doesn't work and doesn't output n_hidden feature
        self.gnn_convs.append(
            HeteroGraphConv(
            {
                "follower": GraphConv(
                    in_feats=in_feats,
                    out_feats=n_hidden,
                    bias=True, activation=nn.RReLU(), weight=True,
                ),
                "following": GraphConv(
                    in_feats=in_feats,
                    out_feats=n_hidden,
                    bias=True, activation=nn.RReLU(), weight=True,
                ),
            },
            aggregate="sum",
        )
            
            )
        for _ in range(num_layers - 1):
            self.pooling_layers.append(TopK(n_hidden, n_hidden))
            self.recurrent_layers.append(MatGRUCell(in_feats=n_hidden, out_feats=n_hidden))
            self.gcn_weights_list.append(Parameter(torch.Tensor(n_hidden, n_hidden)))
            self.gnn_convs.append(
                HeteroGraphConv(
                {
                    "follower": GraphConv(
                        in_feats=n_hidden,
                        out_feats=n_hidden,
                        bias=False, activation=nn.RReLU(), weight=False,
                    ),
                    "following": GraphConv(
                        in_feats=n_hidden,
                        out_feats=n_hidden,
                        bias=False, activation=nn.RReLU(), weight=False,
                    ),
                },
                aggregate="sum",
            )
                
                )

        self.mlp = nn.Sequential(nn.Linear(n_hidden, classifier_hidden),
                                 nn.ReLU(),
                                 nn.Linear(classifier_hidden, n_classes))
        self.reset_parameters()

    def reset_parameters(self):
        for gcn_weight in self.gcn_weights_list:
            init.xavier_uniform_(gcn_weight)

    def forward(self, g_list):
        feature_list = []
        for g in g_list:
            feature_list.append(g.ndata['feat'])
        for i in range(self.num_layers):
            W = self.gcn_weights_list[i]
            for j, g in enumerate(g_list):
                X_tilde = self.pooling_layers[i](feature_list[j])
                #print(W.shape)
                W = self.recurrent_layers[i](W, X_tilde)
                #print(W.shape)
                #print(self.gnn_convs[i])
                result = self.gnn_convs[i](g,{'user':feature_list[j]}, {'weight': W})['user']
                
                #print(result.shape)
                feature_list[j] = result
        return self.mlp(feature_list[-1])

This behavior is expected in GraphConv. In doc here: GraphConv — DGL 0.8.2 documentation.

weight (bool, optional) – If True, apply a linear layer. Otherwise, aggregating the messages without a weight matrix.

Could you check if the shape of W is correct? I think setting weight=False during initialization and passing the weight as an argument in forward is correct for EvolveGCN.

This topic was automatically closed 30 days after the last reply. New replies are no longer allowed.