/aosp12/packages/modules/NeuralNetworks/runtime/test/specs/V1_0/ |
H A D | rnn_state.mod.py | 29 activation_param = Int32Scalar("activation_param", 1) # Relu variable 35 activation_param).To([hidden_state_out, output])
|
H A D | svdf_state.mod.py | 30 activation_param = Int32Scalar("activation_param", 0) variable 35 rank_param, activation_param).To([state_out, output])
|
H A D | rnn.mod.py | 29 activation_param = Int32Scalar("activation_param", 1) # Relu variable 35 activation_param).To([hidden_state_out, output])
|
H A D | svdf.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | svdf2.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | svdf_bias_present.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | lstm2_state.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm2.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm2_state2.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm3.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm3_state.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
H A D | lstm3_state2.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|
/aosp12/packages/modules/NeuralNetworks/runtime/test/specs/V1_1/ |
H A D | rnn_state_relaxed.mod.py | 29 activation_param = Int32Scalar("activation_param", 1) # Relu variable 35 activation_param).To([hidden_state_out, output])
|
H A D | svdf_state_relaxed.mod.py | 30 activation_param = Int32Scalar("activation_param", 0) variable 35 rank_param, activation_param).To([state_out, output])
|
H A D | rnn_relaxed.mod.py | 29 activation_param = Int32Scalar("activation_param", 1) # Relu variable 35 activation_param).To([hidden_state_out, output])
|
H A D | svdf2_relaxed.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | svdf_bias_present_relaxed.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | svdf_relaxed.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
/aosp12/packages/modules/NeuralNetworks/runtime/test/specs/V1_2/ |
H A D | svdf_state_float16.mod.py | 30 activation_param = Int32Scalar("activation_param", 0) variable 35 rank_param, activation_param).To([state_out, output])
|
H A D | layer_norm_lstm.mod.py | 73 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 101 cell_state_in, activation_param, cell_clip_param, proj_clip_param, 245 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 273 cell_state_in, activation_param, cell_clip_param, proj_clip_param,
|
H A D | rnn_float16.mod.py | 29 activation_param = Int32Scalar("activation_param", 1) # Relu variable 35 activation_param).To([hidden_state_out, output])
|
H A D | svdf_bias_present_float16.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | svdf_float16.mod.py | 32 activation_param = Int32Scalar("activation_param", 0) variable 37 rank_param, activation_param).To([state_out, output])
|
H A D | lstm2_state_float16.mod.py | 54 activation_param = Int32Scalar("activation_param", 4) # Tanh variable 91 activation_param,
|