Can anyone guide me?
class GraphSage_BiLSTM_GAT(nn.Module):
def init(self, nfeat, nhid, nclass, dropout):
super(GraphSage_BiLSTM_GAT, self).init()
self.dropout = dropout
self.conv1 = SAGEConv(nfeat, nhid, aggregator_type='mean')
self.conv2 = SAGEConv(nhid, nhid, aggregator_type='mean')
self.conv3 = GATConv(nhid, nhid, num_heads=16)
self.LS_end = nn.LSTM(input_size=nhid, hidden_size=nclass, num_layers=8, dropout=dropout, batch_first=True,
bidirectional=True)
# self.conv3 = SAGEConv(nclass, nclass, aggregator_type='mean')
self.conv4 = GATConv(nclass, nclass, num_heads=16)
def forward(self, x, adj):
x = F.relu(self.conv1(adj, x)) # different from self-defined gcn
x = F.dropout(x, self.dropout, training=self.training)
x = self.conv2(adj, x)
.............
.............
return F.log_softmax(x, dim=1)
How to add the bilstm layer initialized in the constructor inside the forward function? Later I want to add an attention layer too. I am not sure about the lstm operation and its purpose here. Can I change the size of node embedding while giving input to the first graphsage layer? Can someone please explain?