|
| 1 | +import sys |
| 2 | +import torch |
| 3 | +import math |
| 4 | +from torch import nn |
| 5 | +from torch_geometric.nn import TAGConv |
| 6 | +import yaml |
| 7 | + |
| 8 | +class GNNBackBone(nn.Module): |
| 9 | + """ |
| 10 | + Implements a multi-layer graph convolutional neural network, with ReLU non-linearities between layers, |
| 11 | + according to hyperparameters specified in the input config |
| 12 | + """ |
| 13 | + def __init__(self, input_dim, num_layers, num_hops, latent_size): |
| 14 | + super().__init__() |
| 15 | + |
| 16 | + self.input_dim_ = input_dim |
| 17 | + self.num_layers_ = num_layers |
| 18 | + self.num_hops_ = num_hops |
| 19 | + self.latent_size_ = latent_size |
| 20 | + |
| 21 | + f = [self.latent_size_]*self.num_layers_ |
| 22 | + f = [self.input_dim_] + f |
| 23 | + |
| 24 | + self.graph_convs = nn.ModuleList() |
| 25 | + for layer in range(self.num_layers_): |
| 26 | + self.graph_convs.append(TAGConv(in_channels=f[layer], out_channels=f[layer+1], K=self.num_hops_).jittable()) |
| 27 | + |
| 28 | + def forward(self, x, edge_index, edge_weight) -> torch.Tensor: |
| 29 | + for conv in self.graph_convs: |
| 30 | + x = conv(x, edge_index, edge_weight) |
| 31 | + x = torch.relu(x) |
| 32 | + return x |
| 33 | + |
| 34 | +if __name__ == "__main__": |
| 35 | + # Load config yaml file |
| 36 | + config_file = str(sys.argv[1]) |
| 37 | + script_file = str(sys.argv[2]) |
| 38 | + with open(config_file, 'r') as stream: |
| 39 | + config = yaml.safe_load(stream)['GNNBackBone'] |
| 40 | + print(config) |
| 41 | + scripted_model = torch.jit.script(GNNBackBone(config['InputDim'], config['NumLayers'], config['NumHops'], config['LatentSize'])) |
| 42 | + scripted_model.save(script_file) |
0 commit comments