IF YOU WOULD LIKE TO GET AN ACCOUNT, please write an email to s dot adaszewski at gmail dot com. User accounts are meant only to report issues and/or generate pull requests. This is a purpose-specific Git hosting for ADARED projects. Thank you for your understanding!
Vous ne pouvez pas sélectionner plus de 25 sujets Les noms de sujets doivent commencer par une lettre ou un nombre, peuvent contenir des tirets ('-') et peuvent comporter jusqu'à 35 caractères.

51 lignes
1.7KB

  1. #
  2. # Copyright (C) Stanislaw Adaszewski, 2020
  3. # License: GPLv3
  4. #
  5. import torch
  6. from .dropout import dropout
  7. from .weights import init_glorot
  8. from typing import List, Callable
  9. class GraphConv(torch.nn.Module):
  10. def __init__(self, in_channels: int, out_channels: int,
  11. adjacency_matrix: torch.Tensor, **kwargs) -> None:
  12. super().__init__(**kwargs)
  13. self.in_channels = in_channels
  14. self.out_channels = out_channels
  15. self.weight = torch.nn.Parameter(init_glorot(in_channels, out_channels))
  16. self.adjacency_matrix = adjacency_matrix
  17. def forward(self, x: torch.Tensor) -> torch.Tensor:
  18. x = torch.sparse.mm(x, self.weight) \
  19. if x.is_sparse \
  20. else torch.mm(x, self.weight)
  21. x = torch.sparse.mm(self.adjacency_matrix, x) \
  22. if self.adjacency_matrix.is_sparse \
  23. else torch.mm(self.adjacency_matrix, x)
  24. return x
  25. class DropoutGraphConvActivation(torch.nn.Module):
  26. def __init__(self, input_dim: int, output_dim: int,
  27. adjacency_matrix: torch.Tensor, keep_prob: float=1.,
  28. activation: Callable[[torch.Tensor], torch.Tensor]=torch.nn.functional.relu,
  29. **kwargs) -> None:
  30. super().__init__(**kwargs)
  31. self.input_dim = input_dim
  32. self.output_dim = output_dim
  33. self.adjacency_matrix = adjacency_matrix
  34. self.keep_prob = keep_prob
  35. self.activation = activation
  36. self.graph_conv = GraphConv(input_dim, output_dim, adjacency_matrix)
  37. def forward(self, x: torch.Tensor) -> torch.Tensor:
  38. x = dropout(x, self.keep_prob)
  39. x = self.graph_conv(x)
  40. x = self.activation(x)
  41. return x