IF YOU WOULD LIKE TO GET AN ACCOUNT, please write an email to s dot adaszewski at gmail dot com. User accounts are meant only to report issues and/or generate pull requests. This is a purpose-specific Git hosting for ADARED projects. Thank you for your understanding!
Du kannst nicht mehr als 25 Themen auswählen Themen müssen entweder mit einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.

107 Zeilen
3.6KB

  1. from icosagon.input import InputLayer, \
  2. OneHotInputLayer
  3. from icosagon.data import Data
  4. import torch
  5. import pytest
  6. def _some_data():
  7. d = Data()
  8. d.add_node_type('Gene', 1000)
  9. d.add_node_type('Drug', 100)
  10. d.add_relation_type('Target', 1, 0, torch.rand(100, 1000))
  11. d.add_relation_type('Interaction', 0, 0, torch.rand(1000, 1000))
  12. d.add_relation_type('Side Effect: Nausea', 1, 1, torch.rand(100, 100))
  13. d.add_relation_type('Side Effect: Infertility', 1, 1, torch.rand(100, 100))
  14. d.add_relation_type('Side Effect: Death', 1, 1, torch.rand(100, 100))
  15. return d
  16. def _some_data_with_interactions():
  17. d = Data()
  18. d.add_node_type('Gene', 1000)
  19. d.add_node_type('Drug', 100)
  20. d.add_relation_type('Target', 1, 0,
  21. torch.rand((100, 1000), dtype=torch.float32).round())
  22. d.add_relation_type('Interaction', 0, 0,
  23. torch.rand((1000, 1000), dtype=torch.float32).round())
  24. d.add_relation_type('Side Effect: Nausea', 1, 1,
  25. torch.rand((100, 100), dtype=torch.float32).round())
  26. d.add_relation_type('Side Effect: Infertility', 1, 1,
  27. torch.rand((100, 100), dtype=torch.float32).round())
  28. d.add_relation_type('Side Effect: Death', 1, 1,
  29. torch.rand((100, 100), dtype=torch.float32).round())
  30. return d
  31. def test_input_layer_01():
  32. d = _some_data()
  33. for output_dim in [32, 64, 128]:
  34. layer = InputLayer(d, output_dim)
  35. assert layer.output_dim[0] == output_dim
  36. assert len(layer.node_reps) == 2
  37. assert layer.node_reps[0].shape == (1000, output_dim)
  38. assert layer.node_reps[1].shape == (100, output_dim)
  39. assert layer.data == d
  40. def test_input_layer_02():
  41. d = _some_data()
  42. layer = InputLayer(d, 32)
  43. res = layer(None)
  44. assert isinstance(res[0], torch.Tensor)
  45. assert isinstance(res[1], torch.Tensor)
  46. assert res[0].shape == (1000, 32)
  47. assert res[1].shape == (100, 32)
  48. assert torch.all(res[0] == layer.node_reps[0])
  49. assert torch.all(res[1] == layer.node_reps[1])
  50. def test_input_layer_03():
  51. if torch.cuda.device_count() == 0:
  52. pytest.skip('No CUDA devices on this host')
  53. d = _some_data()
  54. layer = InputLayer(d, 32)
  55. device = torch.device('cuda:0')
  56. layer = layer.to(device)
  57. print(list(layer.parameters()))
  58. # assert layer.device.type == 'cuda:0'
  59. assert layer.node_reps[0].device == device
  60. assert layer.node_reps[1].device == device
  61. def test_one_hot_input_layer_01():
  62. d = _some_data()
  63. layer = OneHotInputLayer(d)
  64. assert layer.output_dim == [1000, 100]
  65. assert len(layer.node_reps) == 2
  66. assert layer.node_reps[0].shape == (1000, 1000)
  67. assert layer.node_reps[1].shape == (100, 100)
  68. assert layer.data == d
  69. assert layer.is_sparse
  70. def test_one_hot_input_layer_02():
  71. d = _some_data()
  72. layer = OneHotInputLayer(d)
  73. res = layer(None)
  74. assert isinstance(res[0], torch.Tensor)
  75. assert isinstance(res[1], torch.Tensor)
  76. assert res[0].shape == (1000, 1000)
  77. assert res[1].shape == (100, 100)
  78. assert torch.all(res[0].to_dense() == layer.node_reps[0].to_dense())
  79. assert torch.all(res[1].to_dense() == layer.node_reps[1].to_dense())
  80. def test_one_hot_input_layer_03():
  81. if torch.cuda.device_count() == 0:
  82. pytest.skip('No CUDA devices on this host')
  83. d = _some_data()
  84. layer = OneHotInputLayer(d)
  85. device = torch.device('cuda:0')
  86. layer = layer.to(device)
  87. print(list(layer.parameters()))
  88. # assert layer.device.type == 'cuda:0'
  89. assert layer.node_reps[0].device == device
  90. assert layer.node_reps[1].device == device