IF YOU WOULD LIKE TO GET AN ACCOUNT, please write an email to s dot adaszewski at gmail dot com. User accounts are meant only to report issues and/or generate pull requests. This is a purpose-specific Git hosting for ADARED projects. Thank you for your understanding!
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

80 lines
2.8KB

  1. from .fastconv import FastConvLayer
  2. from .bulkdec import BulkDecodeLayer
  3. from .input import OneHotInputLayer
  4. from .trainprep import PreparedData
  5. import torch
  6. import types
  7. from typing import List, \
  8. Union, \
  9. Callable
  10. class FastModel(torch.nn.Module):
  11. def __init__(self, prep_d: PreparedData,
  12. layer_dimensions: List[int] = [32, 64],
  13. keep_prob: float = 1.,
  14. rel_activation: Callable[[torch.Tensor], torch.Tensor] = lambda x: x,
  15. layer_activation: Callable[[torch.Tensor], torch.Tensor] = torch.nn.functional.relu,
  16. dec_activation: Callable[[torch.Tensor], torch.Tensor] = lambda x: x,
  17. **kwargs) -> None:
  18. super().__init__(**kwargs)
  19. self._check_params(prep_d, layer_dimensions, rel_activation,
  20. layer_activation, dec_activation)
  21. self.prep_d = prep_d
  22. self.layer_dimensions = layer_dimensions
  23. self.keep_prob = float(keep_prob)
  24. self.rel_activation = rel_activation
  25. self.layer_activation = layer_activation
  26. self.dec_activation = dec_activation
  27. self.seq = None
  28. self.build()
  29. def build(self):
  30. in_layer = OneHotInputLayer(self.prep_d)
  31. last_output_dim = in_layer.output_dim
  32. seq = [ in_layer ]
  33. for dim in self.layer_dimensions:
  34. conv_layer = FastConvLayer(input_dim = last_output_dim,
  35. output_dim = [dim] * len(self.prep_d.node_types),
  36. data = self.prep_d,
  37. keep_prob = self.keep_prob,
  38. rel_activation = self.rel_activation,
  39. layer_activation = self.layer_activation)
  40. last_output_dim = conv_layer.output_dim
  41. seq.append(conv_layer)
  42. dec_layer = BulkDecodeLayer(input_dim = last_output_dim,
  43. data = self.prep_d,
  44. keep_prob = self.keep_prob,
  45. activation = self.dec_activation)
  46. seq.append(dec_layer)
  47. seq = torch.nn.Sequential(*seq)
  48. self.seq = seq
  49. def forward(self, _):
  50. return self.seq(None)
  51. def _check_params(self, prep_d, layer_dimensions, rel_activation,
  52. layer_activation, dec_activation):
  53. if not isinstance(prep_d, PreparedData):
  54. raise TypeError('prep_d must be an instanced of PreparedData')
  55. if not isinstance(layer_dimensions, list):
  56. raise TypeError('layer_dimensions must be a list')
  57. if not isinstance(rel_activation, types.FunctionType):
  58. raise TypeError('rel_activation must be a function')
  59. if not isinstance(layer_activation, types.FunctionType):
  60. raise TypeError('layer_activation must be a function')
  61. if not isinstance(dec_activation, types.FunctionType):
  62. raise TypeError('dec_activation must be a function')