class Net(nn.Module):
def init(self):
super(Net, self).init()
self.layer1 = GCNLayer(1433, 16)
self.layer2 = GCNLayer(16, 7)
def forward(self, g, features):
x = F.relu(self.layer1(g, features))
x = self.layer2(g, x)
return x
net = Net()
print(net)
this example code gives the feats size of layer1 of the value(1433,16), since 1433 represents the input features dimension size, why is the out_feats size 16?