IF YOU WOULD LIKE TO GET AN ACCOUNT, please write an email to s dot adaszewski at gmail dot com. User accounts are meant only to report issues and/or generate pull requests. This is a purpose-specific Git hosting for ADARED projects. Thank you for your understanding!
Explorar el Código

Change test in test_unigram_03().

master
Stanislaw Adaszewski hace 4 años
padre
commit
094813b298
Se han modificado 2 ficheros con 11 adiciones y 48 borrados
  1. +0
    -33
      src/icosagon/convlayer.py
  2. +11
    -15
      tests/icosagon/test_sampling.py

+ 0
- 33
src/icosagon/convlayer.py Ver fichero

@@ -68,33 +68,6 @@ class DecagonLayer(torch.nn.Module):
self.next_layer_repr[fam.node_type_row].append(
Convolutions(fam.node_type_column, convolutions))
# def build_fam_two_node_types_sym(self, fam) -> None:
# convolutions_row = torch.nn.ModuleList()
# convolutions_column = torch.nn.ModuleList()
#
# if self.input_dim[fam.node_type_column] != \
# self.input_dim[fam.node_type_row]:
# raise ValueError('input_dim for row and column must be equal for a symmetric family')
#
# if self.output_dim[fam.node_type_column] != \
# self.output_dim[fam.node_type_row]:
# raise ValueError('output_dim for row and column must be equal for a symmetric family')
#
# for r in fam.relation_types:
# assert r.adjacency_matrix is not None and \
# r.adjacency_matrix_backward is not None
# conv = DropoutGraphConvActivation(self.input_dim[fam.node_type_column],
# self.output_dim[fam.node_type_row], r.adjacency_matrix,
# self.keep_prob, self.rel_activation)
# convolutions_row.append(conv)
# convolutions_column.append(conv.clone(r.adjacency_matrix_backward))
#
# self.next_layer_repr[fam.node_type_row].append(
# Convolutions(fam.node_type_column, convolutions_row))
#
# self.next_layer_repr[fam.node_type_column].append(
# Convolutions(fam.node_type_row, convolutions_column))
def build_fam_two_node_types(self, fam) -> None:
convolutions_row = torch.nn.ModuleList()
convolutions_column = torch.nn.ModuleList()
@@ -118,12 +91,6 @@ class DecagonLayer(torch.nn.Module):
self.next_layer_repr[fam.node_type_column].append(
Convolutions(fam.node_type_row, convolutions_column))
# def build_fam_two_node_types(self, fam) -> None:
# if fam.is_symmetric:
# self.build_fam_two_node_types_sym(fam)
# else:
# self.build_fam_two_node_types_asym(fam)
def build_family(self, fam) -> None:
if fam.node_type_row == fam.node_type_column:
self.build_fam_one_node_type(fam)


+ 11
- 15
tests/icosagon/test_sampling.py Ver fichero

@@ -118,7 +118,7 @@ def test_unigram_02():
def test_unigram_03():
range_max = 7
distortion = 0.75
batch_size = 25
batch_size = 2500
unigrams = [ 1, 3, 2, 1, 2, 1, 3]
num_true = 1
@@ -129,8 +129,8 @@ def test_unigram_03():
true_classes_tf = tf.convert_to_tensor(true_classes)
true_classes_torch = torch.tensor(true_classes)
counts_tf = defaultdict(list)
counts_torch = defaultdict(list)
counts_tf = torch.zeros(range_max)
counts_torch = torch.zeros(range_max)
for i in range(10):
neg_samples, _, _ = tf.nn.fixed_unigram_candidate_sampler(
@@ -142,29 +142,25 @@ def test_unigram_03():
distortion=distortion,
unigrams=unigrams)
counts = defaultdict(int)
counts = torch.zeros(range_max)
with tf.Session() as sess:
neg_samples = neg_samples.eval()
for x in neg_samples:
counts[x.item()] += 1
for k, v in counts.items():
counts_tf[k].append(v)
counts_tf += counts
neg_samples = icosagon.sampling.fixed_unigram_candidate_sampler(
true_classes=true_classes,
distortion=distortion,
unigrams=unigrams)
counts = defaultdict(int)
counts = torch.zeros(range_max)
for x in neg_samples:
counts[x.item()] += 1
for k, v in counts.items():
counts_torch[k].append(v)
counts_torch += counts
for i in range(range_max):
print('counts_tf[%d]:' % i, counts_tf[i])
print('counts_torch[%d]:' % i, counts_torch[i])
print('counts_tf:', counts_tf)
print('counts_torch:', counts_torch)
for i in range(range_max):
statistic, pvalue = scipy.stats.ttest_ind(counts_tf[i], counts_torch[i])
assert pvalue * range_max > .05
distance = scipy.stats.wasserstein_distance(counts_tf, counts_torch)
assert distance < 2000

Cargando…
Cancelar
Guardar