Home
last modified time | relevance | path

Searched defs:tensor (Results 1 – 10 of 10) sorted by relevance

/packages/modules/NeuralNetworks/runtime/test/specs/V1_3/
Dbidirectional_sequence_rnn_1_3.mod.py20 def convert_to_time_major(tensor, tensor_shape): argument
30 def reverse_batch_major(tensor, tensor_shape): argument
33 def split_tensor_in_two(tensor, tensor_shape): argument
Dbidirectional_sequence_rnn_state_output.mod.py20 def convert_to_time_major(tensor, tensor_shape): argument
31 def reverse_batch_major(tensor, tensor_shape): argument
35 def split_tensor_in_two(tensor, tensor_shape): argument
Dunidirectional_sequence_rnn.mod.py42 def convert_to_time_major(tensor, num_batches, max_time, input_size): argument
/packages/modules/NeuralNetworks/runtime/test/specs/V1_2/
Dbidirectional_sequence_rnn.mod.py20 def convert_to_time_major(tensor, tensor_shape): argument
30 def reverse_batch_major(tensor, tensor_shape): argument
33 def split_tensor_in_two(tensor, tensor_shape): argument
Dunidirectional_sequence_rnn.mod.py39 def convert_to_time_major(tensor, num_batches, max_time, input_size): argument
/packages/modules/NeuralNetworks/runtime/operation_converters/
DSubGraphContext.cpp54 int SubGraphContext::addTensorFlatbuffer(TensorFlatbuffer tensor, int32_t operandIdx) { in addTensorFlatbuffer()
205 TensorFlatbuffer tensor = tflite::CreateTensorDirect( in createTensorFlatbufferFromOperand() local
/packages/modules/NeuralNetworks/common/cpu_operations/
DQuantizedLSTMTest.cpp226 Result setInputTensor(Execution* execution, int tensor, const std::vector<T>& data) { in setInputTensor()
230 Result setOutputTensor(Execution* execution, int tensor, std::vector<T>* data) { in setOutputTensor()
DQLSTM.cpp36 inline bool hasTensor(IOperationExecutionContext* context, const uint32_t tensor) { in hasTensor()
58 for (const int tensor : requiredTensorInputs) { in prepare() local
DUnidirectionalSequenceLSTM.cpp40 inline bool hasTensor(IOperationExecutionContext* context, const uint32_t tensor) { in hasTensor()
/packages/modules/OnDevicePersonalization/src/com/android/ondevicepersonalization/services/inference/
DIsolatedModelServiceImpl.java124 Tensor tensor = interpreter.getInputTensor(i); in runTfliteInterpreter() local