|
|
@@ -9,6 +9,8 @@ from icosagon.convlayer import DecagonLayer |
|
|
|
from icosagon.declayer import DecodeLayer
|
|
|
|
from icosagon.decode import DEDICOMDecoder
|
|
|
|
from icosagon.data import Data
|
|
|
|
from icosagon.trainprep import prepare_training, \
|
|
|
|
TrainValTest
|
|
|
|
import torch
|
|
|
|
|
|
|
|
|
|
|
@@ -17,11 +19,12 @@ def test_decode_layer_01(): |
|
|
|
d.add_node_type('Dummy', 100)
|
|
|
|
d.add_relation_type('Dummy Relation 1', 0, 0,
|
|
|
|
torch.rand((100, 100), dtype=torch.float32).round().to_sparse())
|
|
|
|
prep_d = prepare_training(d, TrainValTest(.8, .1, .1))
|
|
|
|
in_layer = OneHotInputLayer(d)
|
|
|
|
d_layer = DecagonLayer(in_layer.output_dim, 32, d)
|
|
|
|
seq = torch.nn.Sequential(in_layer, d_layer)
|
|
|
|
last_layer_repr = seq(None)
|
|
|
|
dec = DecodeLayer(input_dim=d_layer.output_dim, data=d, keep_prob=1.,
|
|
|
|
dec = DecodeLayer(input_dim=d_layer.output_dim, data=prep_d, keep_prob=1.,
|
|
|
|
decoder_class=DEDICOMDecoder, activation=lambda x: x)
|
|
|
|
pred_adj_matrices = dec(last_layer_repr)
|
|
|
|
assert isinstance(pred_adj_matrices, dict)
|
|
|
@@ -35,10 +38,11 @@ def test_decode_layer_02(): |
|
|
|
d.add_node_type('Dummy', 100)
|
|
|
|
d.add_relation_type('Dummy Relation 1', 0, 0,
|
|
|
|
torch.rand((100, 100), dtype=torch.float32).round().to_sparse())
|
|
|
|
prep_d = prepare_training(d, TrainValTest(.8, .1, .1))
|
|
|
|
|
|
|
|
in_layer = OneHotInputLayer(d)
|
|
|
|
d_layer = DecagonLayer(in_layer.output_dim, 32, d)
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=d, keep_prob=1.,
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=prep_d, keep_prob=1.,
|
|
|
|
decoder_class=DEDICOMDecoder, activation=lambda x: x)
|
|
|
|
seq = torch.nn.Sequential(in_layer, d_layer, dec_layer)
|
|
|
|
|
|
|
@@ -56,10 +60,11 @@ def test_decode_layer_03(): |
|
|
|
d.add_node_type('Dummy 2', 100)
|
|
|
|
d.add_relation_type('Dummy Relation 1', 0, 1,
|
|
|
|
torch.rand((100, 100), dtype=torch.float32).round().to_sparse())
|
|
|
|
prep_d = prepare_training(d, TrainValTest(.8, .1, .1))
|
|
|
|
|
|
|
|
in_layer = OneHotInputLayer(d)
|
|
|
|
d_layer = DecagonLayer(in_layer.output_dim, 32, d)
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=d, keep_prob=1.,
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=prep_d, keep_prob=1.,
|
|
|
|
decoder_class={(0, 1): DEDICOMDecoder}, activation=lambda x: x)
|
|
|
|
seq = torch.nn.Sequential(in_layer, d_layer, dec_layer)
|
|
|
|
|
|
|
@@ -77,9 +82,11 @@ def test_decode_layer_04(): |
|
|
|
d.add_node_type('Dummy', 100)
|
|
|
|
assert len(d.relation_types[0, 0]) == 0
|
|
|
|
|
|
|
|
prep_d = prepare_training(d, TrainValTest(.8, .1, .1))
|
|
|
|
|
|
|
|
in_layer = OneHotInputLayer(d)
|
|
|
|
d_layer = DecagonLayer(in_layer.output_dim, 32, d)
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=d, keep_prob=1.,
|
|
|
|
dec_layer = DecodeLayer(input_dim=d_layer.output_dim, data=prep_d, keep_prob=1.,
|
|
|
|
decoder_class=DEDICOMDecoder, activation=lambda x: x)
|
|
|
|
seq = torch.nn.Sequential(in_layer, d_layer, dec_layer)
|
|
|
|
|
|
|
|