@@ -793,5 +793,164 @@ def func(x):
793
793
return tf .identity (y [0 ], name = "output" )
794
794
self .run_test_case (func , {"input:0" : x_val }, [], ["output:0" ], rtol = 1e-05 , atol = 1e-06 )
795
795
796
+ @check_tf_min_version ("2.0" )
797
+ @skip_tf_versions ("2.1" , "Bug in TF 2.1" )
798
+ def test_keras_masked_lstm_embedding_unidirectional (self ):
799
+ for go_backwards in [True , False ]:
800
+ timesteps = 4
801
+ # Note: masked LSTM only support post-padded input after conversion
802
+ # test case sequence_lens = [4, 2, 0]
803
+ x_val = np .array ([
804
+ [1 , 2 , 3 , 4 ],
805
+ [5 , 6 , 0 , 0 ],
806
+ [0 , 0 , 0 , 0 ]
807
+ ], dtype = np .int32 )
808
+
809
+ model_in = tf .keras .layers .Input ((timesteps ,), dtype = "int32" )
810
+ x_embedding = tf .keras .layers .Embedding (
811
+ input_dim = 10 ,
812
+ output_dim = 5 ,
813
+ mask_zero = True ,
814
+ embeddings_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 41 ),
815
+ )(model_in )
816
+
817
+ # RNN layer inherits the mask propagated from above embedding layer
818
+ model_out = tf .keras .layers .LSTM (
819
+ units = 5 ,
820
+ go_backwards = go_backwards ,
821
+ return_state = True ,
822
+ kernel_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 42 ),
823
+ bias_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 43 ),
824
+ recurrent_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 44 ),
825
+ )(x_embedding )
826
+ model = tf .keras .models .Model (inputs = model_in , outputs = model_out )
827
+
828
+ def func (x ):
829
+ y = model (x )
830
+ # skiping output Y: https://github.com/microsoft/onnxruntime/issues/12492
831
+ return (tf .identity (y [1 ], name = "output_yh" ),
832
+ tf .identity (y [2 ], name = "output_yc" ))
833
+
834
+ output_list = ["output_yh:0" , "output_yc:0" ]
835
+ self .run_test_case (func , {"input:0" : x_val }, [], output_list , rtol = 1e-05 , atol = 1e-06 )
836
+
837
+ @check_tf_min_version ("2.0" )
838
+ @skip_tf_versions ("2.1" , "Bug in TF 2.1" )
839
+ def test_keras_masked_lstm_embedding_bidirectional (self ):
840
+ timesteps = 4
841
+ # Note: masked LSTM only support post-padded input after conversion
842
+ # test case sequence_lens = [4, 2, 0]
843
+ x_val = np .array ([
844
+ [1 , 2 , 3 , 4 ],
845
+ [5 , 6 , 0 , 0 ],
846
+ [0 , 0 , 0 , 0 ]
847
+ ], dtype = np .int32 )
848
+
849
+ model_in = tf .keras .layers .Input ((timesteps ,), dtype = "int32" )
850
+ x_embedding = tf .keras .layers .Embedding (
851
+ input_dim = 10 ,
852
+ output_dim = 5 ,
853
+ mask_zero = True ,
854
+ embeddings_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 41 ),
855
+ )(model_in )
856
+
857
+ # RNN layer inherits the mask propagated from above embedding layer
858
+ lstm_layer = tf .keras .layers .LSTM (
859
+ units = 5 ,
860
+ go_backwards = False ,
861
+ return_state = True ,
862
+ kernel_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 42 ),
863
+ bias_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 43 ),
864
+ recurrent_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 44 ),
865
+ )
866
+ model_out = tf .keras .layers .Bidirectional (lstm_layer )(x_embedding )
867
+ model = tf .keras .models .Model (inputs = model_in , outputs = model_out )
868
+
869
+ def func (x ):
870
+ y = model (x )
871
+ # skiping output Y: https://github.com/microsoft/onnxruntime/issues/12492
872
+ return (tf .identity (y [1 ], name = "output_yh_f" ),
873
+ tf .identity (y [2 ], name = "output_yc_f" ),
874
+ tf .identity (y [3 ], name = "output_yh_r" ),
875
+ tf .identity (y [4 ], name = "output_yc_r" ))
876
+
877
+ output_list = ["output_yh_f:0" , "output_yc_f:0" , "output_yh_r:0" , "output_yc_r:0" ]
878
+ self .run_test_case (func , {"input:0" : x_val }, [], output_list , rtol = 1e-05 , atol = 1e-06 ,
879
+ require_lstm_count = 2 )
880
+
881
+ @check_tf_min_version ("2.0" )
882
+ @skip_tf_versions ("2.1" , "Bug in TF 2.1" )
883
+ def test_keras_masked_lstm_unidirectional (self ):
884
+ for go_backwards in [True , False ]:
885
+ batch_size , timesteps , feat = 3 , 4 , 5
886
+ in_shape = (timesteps , feat )
887
+ x_val = np .random .uniform (size = [batch_size , timesteps , feat ]).astype (np .float32 )
888
+ # Note: masked LSTM only support post-padded input after conversion
889
+ # test case sequence_lens = [4, 2, 0]
890
+ x_val [1 , 2 :, :] = 0.
891
+ x_val [2 , :, :] = 0.
892
+
893
+ model_in = tf .keras .layers .Input (shape = in_shape , dtype = "float32" )
894
+ x_masked = tf .keras .layers .Masking (mask_value = 0. )(model_in )
895
+
896
+ # RNN layer inherits the mask propagated from above mask layer
897
+ model_out = tf .keras .layers .LSTM (
898
+ units = 5 ,
899
+ go_backwards = go_backwards ,
900
+ return_state = True ,
901
+ kernel_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 42 ),
902
+ bias_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 43 ),
903
+ recurrent_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 44 ),
904
+ )(x_masked )
905
+ model = tf .keras .models .Model (inputs = model_in , outputs = model_out )
906
+
907
+ def func (x ):
908
+ y = model (x )
909
+ # skiping output Y: https://github.com/microsoft/onnxruntime/issues/12492
910
+ return (tf .identity (y [1 ], name = "output_yh" ),
911
+ tf .identity (y [2 ], name = "output_yc" ))
912
+
913
+ output_list = ["output_yh:0" , "output_yc:0" ]
914
+ self .run_test_case (func , {"input:0" : x_val }, [], output_list , rtol = 1e-05 , atol = 1e-06 )
915
+
916
+ @check_tf_min_version ("2.0" )
917
+ @skip_tf_versions ("2.1" , "Bug in TF 2.1" )
918
+ def test_keras_masked_lstm_bidirectional (self ):
919
+ batch_size , timesteps , feat = 3 , 4 , 5
920
+ in_shape = (timesteps , feat )
921
+ x_val = np .random .uniform (size = [batch_size , timesteps , feat ]).astype (np .float32 )
922
+ # Note: masked LSTM only support post-padded input after conversion
923
+ # test case sequence_lens = [4, 2, 0]
924
+ x_val [1 , 2 :, :] = 0.
925
+ x_val [2 , :, :] = 0.
926
+
927
+ model_in = tf .keras .layers .Input (shape = in_shape , dtype = "float32" )
928
+ x_masked = tf .keras .layers .Masking (mask_value = 0. )(model_in )
929
+
930
+ # RNN layer inherits the mask propagated from above mask layer
931
+ lstm_layer = tf .keras .layers .LSTM (
932
+ units = 5 ,
933
+ go_backwards = False ,
934
+ return_state = True ,
935
+ kernel_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 42 ),
936
+ bias_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 43 ),
937
+ recurrent_initializer = tf .random_uniform_initializer (0.0 , 1.0 , seed = 44 ),
938
+ )
939
+ model_out = tf .keras .layers .Bidirectional (lstm_layer )(x_masked )
940
+ model = tf .keras .models .Model (inputs = model_in , outputs = model_out )
941
+
942
+ def func (x ):
943
+ y = model (x )
944
+ # skiping output Y: https://github.com/microsoft/onnxruntime/issues/12492
945
+ return (tf .identity (y [1 ], name = "output_yh_f" ),
946
+ tf .identity (y [2 ], name = "output_yc_f" ),
947
+ tf .identity (y [3 ], name = "output_yh_r" ),
948
+ tf .identity (y [4 ], name = "output_yc_r" ))
949
+
950
+ output_list = ["output_yh_f:0" , "output_yc_f:0" , "output_yh_r:0" , "output_yc_r:0" ]
951
+ self .run_test_case (func , {"input:0" : x_val }, [], output_list , rtol = 1e-05 , atol = 1e-06 ,
952
+ require_lstm_count = 2 )
953
+
954
+
796
955
if __name__ == '__main__' :
797
956
unittest_main ()
0 commit comments