1+ # -*- coding: utf-8 -*-
2+ """
3+ 4+ @description:
5+ """
6+
7+ from seq2seq import SimpleSeq2Seq , Seq2Seq , AttentionSeq2Seq
8+ import numpy as np
9+ from keras .utils .test_utils import keras_test
10+
11+ input_length = 5
12+ input_dim = 3
13+
14+ output_length = 3
15+ output_dim = 4
16+
17+ samples = 100
18+ hidden_dim = 24
19+
20+
21+ @keras_test
22+ def test_SimpleSeq2Seq ():
23+ x = np .random .random ((samples , input_length , input_dim ))
24+ y = np .random .random ((samples , output_length , output_dim ))
25+
26+ models = []
27+ print (x )
28+ print (y )
29+ models += [SimpleSeq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
30+ input_shape = (input_length , input_dim ))]
31+ models += [SimpleSeq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
32+ input_shape = (input_length , input_dim ), depth = 2 )]
33+
34+ for model in models :
35+ model .compile (loss = 'mse' , optimizer = 'sgd' )
36+ model .fit (x , y , nb_epoch = 1 )
37+
38+
39+ @keras_test
40+ def test_Seq2Seq ():
41+ x = np .random .random ((samples , input_length , input_dim ))
42+ y = np .random .random ((samples , output_length , output_dim ))
43+
44+ models = []
45+ models += [Seq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
46+ input_shape = (input_length , input_dim ))]
47+ models += [Seq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
48+ input_shape = (input_length , input_dim ), peek = True )]
49+ models += [Seq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
50+ input_shape = (input_length , input_dim ), depth = 2 )]
51+ models += [Seq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
52+ input_shape = (input_length , input_dim ), peek = True , depth = 2 )]
53+
54+ for model in models :
55+ model .compile (loss = 'mse' , optimizer = 'sgd' )
56+ model .fit (x , y , epochs = 1 )
57+
58+ model = Seq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
59+ input_shape = (input_length , input_dim ), peek = True , depth = 2 , teacher_force = True )
60+ model .compile (loss = 'mse' , optimizer = 'sgd' )
61+ model .fit ([x , y ], y , epochs = 1 )
62+
63+
64+ @keras_test
65+ def test_AttentionSeq2Seq ():
66+ x = np .random .random ((samples , input_length , input_dim ))
67+ y = np .random .random ((samples , output_length , output_dim ))
68+
69+ models = []
70+ models += [AttentionSeq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
71+ input_shape = (input_length , input_dim ))]
72+ models += [AttentionSeq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
73+ input_shape = (input_length , input_dim ), depth = 2 )]
74+ models += [AttentionSeq2Seq (output_dim = output_dim , hidden_dim = hidden_dim , output_length = output_length ,
75+ input_shape = (input_length , input_dim ), depth = 3 )]
76+
77+ for model in models :
78+ model .compile (loss = 'mse' , optimizer = 'sgd' )
79+ model .fit (x , y , epochs = 1 )
80+
81+ # test_SimpleSeq2Seq()
82+ # test_Seq2Seq()
83+ # test_AttentionSeq2Seq()
84+ from seq2seq .models import AttentionSeq2Seq
85+
86+ model = AttentionSeq2Seq (input_dim = 5 , input_length = 7 , hidden_dim = 10 , output_length = 8 , output_dim = 20 , depth = 4 )
87+ model .compile (loss = 'mse' , optimizer = 'rmsprop' )
0 commit comments