Weighted edges in GAT for classification

Hi! I’ve recently started with geometric deep learning and finding my way around DGL, the community is awesome.
I wanted to use edge weights with GAT, so far i’ve only used node features. Here’s the code:

class GATLayer(nn.Module):

def __init__(self,
			 in_dim,
			 out_dim, 
			 num_heads,
			 feat_drop=0.,
			 attn_drop=0.,
			 alpha=0.2,
			 agg_activation=F.elu):
	super(GATLayer, self).__init__()
	self.num_heads = num_heads
	self.feat_drop = nn.Dropout(feat_drop)
	self.fc = nn.Linear(in_dim, num_heads * out_dim, bias=False)
	self.attn_l = nn.Parameter(torch.Tensor(size=(num_heads, out_dim, 1)))
	self.attn_r = nn.Parameter(torch.Tensor(size=(num_heads, out_dim, 1)))
	self.attn_drop = nn.Dropout(attn_drop)
	self.activation = nn.LeakyReLU(alpha)
	self.softmax = edge_softmax

	self.agg_activation = agg_activation

def clean_data(self):
	ndata_names = ['ft', 'a1', 'a2']
	edata_names = ['a_drop']
	for name in ndata_names:
		self.g.ndata.pop(name)
	for name in edata_names:
		self.g.edata.pop(name)

def forward(self, bg, features):
	self.g = bg
	# h = self.feat_drop(features)
	h = features
	ft = self.fc(h).reshape((h.shape[0], self.num_heads, -1))
	head_ft = ft.transpose(0, 1)
	a1 = torch.bmm(head_ft, self.attn_l).transpose(0, 1)
	# print(a1)
	a2 = torch.bmm(head_ft, self.attn_r).transpose(0, 1)
	self.g.ndata.update({'ft': ft, 'a1': a1, 'a2': a2})
	self.g.apply_edges(self.edge_attention)

	self.edge_softmax()

	self.g.update_all(fn.src_mul_edge('ft', 'a_drop', 'ft'), fn.sum('ft', 'ft'))

	ret = self.g.ndata['ft']
	ret = ret.flatten(1)

	if self.agg_activation is not None:
		ret = self.agg_activation(ret)

	self.clean_data()

	return ret

def edge_attention(self, edges):
	a = self.activation(edges.src['a1'] + edges.dst['a2'])
	return {'a': a}

def edge_softmax(self):
	attention = self.softmax(self.g, self.g.edata.pop('a'))
	self.g.edata['a_drop'] = self.attn_drop(attention)

class GATClassifier(nn.Module):

def __init__(self, in_dim, hidden_dim, num_heads, n_classes):
	super(GATClassifier, self).__init__()

	self.layers = nn.ModuleList()
	self.layers.append(GATLayer(in_dim, hidden_dim, num_heads))
	self.layers.append(GATLayer(hidden_dim * num_heads, hidden_dim, num_heads))
	self.classify = nn.Linear(hidden_dim * num_heads, n_classes)

def forward(self, bg, features):
	h = features
	for i, gnn in enumerate(self.layers):
		h = gnn(bg, h)
		# print(h)
	bg.ndata['h'] = h
	hg = dgl.mean_nodes(bg, 'h')
	return self.classify(hg)`

I’ve used weighted edges with GCN using this update function:
g.update_all(fn.u_mul_e(lhs_field='h', rhs_field='w', out='m'), fn.sum('m', 'h_neigh'))
is there a similar way for GATs or anything that can help me include edge weights?

Hi, integrating edge weights into GAT is more complex than GCN because it computes a dynamic edge weight (attention). Could you tell us what kind of math formulation are you looking at? so we could provide more info on how to implement it.

Hey!
thanks for replying, I dont have a particular formula in mind, the edges in my case are the euclidean distances between the nodes, therein making node features coordinates in euclidean space. Just wanted to ask if there has been similar work before?
Thanks

Hello,
I am new with GNN, and I want to perform node classification using weighted edges graphs, so can you provide me a explication or a simple idea about how to implement GCN or GAT for this task using weighted graphs (edges)!

GraphConv already supports weighted graphs. You can extend GAT with the same practice.

1 Like