1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 // Contains all the entry points to the C Neural Networks API.
18 // We do basic validation of the operands and then call the class
19 // that implements the functionality.
20
21 #define LOG_TAG "NeuralNetworks"
22
23 #include <ControlFlow.h>
24 #include <LegacyUtils.h>
25 #include <MetaModel.h>
26 #include <Tracing.h>
27 #include <nnapi/Types.h>
28
29 #include <algorithm>
30 #include <cstddef>
31 #include <memory>
32 #include <utility>
33 #include <vector>
34
35 #include "BurstBuilder.h"
36 #include "CompilationBuilder.h"
37 #include "Event.h"
38 #include "ExecutionBuilder.h"
39 #include "ExecutionCallback.h"
40 #include "FlatbufferModelBuilder.h"
41 #include "Manager.h"
42 #include "Memory.h"
43 #include "NeuralNetworks.h"
44 #include "NeuralNetworksExtensions.h"
45 #include "NeuralNetworksOEM.h"
46 #include "Telemetry.h"
47
48 #pragma clang diagnostic push
49 #pragma clang diagnostic ignored "-Wunused-parameter"
50 #include "tensorflow/lite/interpreter.h"
51 #include "tensorflow/lite/kernels/register.h"
52 #include "tensorflow/lite/model.h"
53 #pragma clang diagnostic pop
54
55 using namespace android::nn;
56
57 // Make sure the constants defined in the header files have not changed values.
58 // IMPORTANT: When adding new values, update kNumberOfDataTypes or kNumberOfDataTypesOEM
59 // in Utils.h.
60 static_assert(ANEURALNETWORKS_FLOAT32 == 0, "ANEURALNETWORKS_FLOAT32 has changed");
61 static_assert(ANEURALNETWORKS_INT32 == 1, "ANEURALNETWORKS_INT32 has changed");
62 static_assert(ANEURALNETWORKS_UINT32 == 2, "ANEURALNETWORKS_UINT32 has changed");
63 static_assert(ANEURALNETWORKS_TENSOR_FLOAT32 == 3, "ANEURALNETWORKS_TENSOR_FLOAT32 has changed");
64 static_assert(ANEURALNETWORKS_TENSOR_INT32 == 4, "ANEURALNETWORKS_TENSOR_INT32 has changed");
65 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_ASYMM == 5,
66 "ANEURALNETWORKS_TENSOR_QUANT8_ASYMM has changed");
67 static_assert(ANEURALNETWORKS_BOOL == 6, "ANEURALNETWORKS_BOOL has changed");
68 static_assert(ANEURALNETWORKS_TENSOR_QUANT16_SYMM == 7,
69 "ANEURALNETWORKS_TENSOR_QUANT16_SYMM has changed");
70 static_assert(ANEURALNETWORKS_TENSOR_FLOAT16 == 8, "ANEURALNETWORKS_TENSOR_FLOAT16 has changed");
71 static_assert(ANEURALNETWORKS_TENSOR_BOOL8 == 9, "ANEURALNETWORKS_TENSOR_BOOL8 has changed");
72 static_assert(ANEURALNETWORKS_FLOAT16 == 10, "ANEURALNETWORKS_FLOAT16 has changed");
73 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL == 11,
74 "ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL has changed");
75 static_assert(ANEURALNETWORKS_TENSOR_QUANT16_ASYMM == 12,
76 "ANEURALNETWORKS_TENSOR_QUANT16_ASYMM has changed");
77 static_assert(ANEURALNETWORKS_TENSOR_QUANT8_SYMM == 13,
78 "ANEURALNETWORKS_TENSOR_QUANT8_SYMM has changed");
79 static_assert(ANEURALNETWORKS_OEM_SCALAR == 10000, "ANEURALNETWORKS_OEM_SCALAR has changed");
80 static_assert(ANEURALNETWORKS_TENSOR_OEM_BYTE == 10001,
81 "ANEURALNETWORKS_TENSOR_OEM_BYTE has changed");
82
83 // IMPORTANT: When adding new values, update kNumberOfOperationTypes or
84 // kNumberOfOperationTypesOEMin Utils.h.
85 static_assert(ANEURALNETWORKS_ADD == 0, "ANEURALNETWORKS_ADD has changed");
86 static_assert(ANEURALNETWORKS_AVERAGE_POOL_2D == 1, "ANEURALNETWORKS_AVERAGE_POOL_2D has changed");
87 static_assert(ANEURALNETWORKS_CONCATENATION == 2, "ANEURALNETWORKS_CONCATENATION has changed");
88 static_assert(ANEURALNETWORKS_CONV_2D == 3, "ANEURALNETWORKS_CONV_2D has changed");
89 static_assert(ANEURALNETWORKS_DEPTHWISE_CONV_2D == 4,
90 "ANEURALNETWORKS_DEPTHWISE_CONV_2D has changed");
91 static_assert(ANEURALNETWORKS_DEPTH_TO_SPACE == 5, "ANEURALNETWORKS_DEPTH_TO_SPACE has changed");
92 static_assert(ANEURALNETWORKS_DEQUANTIZE == 6, "ANEURALNETWORKS_DEQUANTIZE has changed");
93 static_assert(ANEURALNETWORKS_EMBEDDING_LOOKUP == 7,
94 "ANEURALNETWORKS_EMBEDDING_LOOKUP has changed");
95 static_assert(ANEURALNETWORKS_FLOOR == 8, "ANEURALNETWORKS_FLOOR has changed");
96 static_assert(ANEURALNETWORKS_FULLY_CONNECTED == 9, "ANEURALNETWORKS_FULLY_CONNECTED has changed");
97 static_assert(ANEURALNETWORKS_HASHTABLE_LOOKUP == 10,
98 "ANEURALNETWORKS_HASHTABLE_LOOKUP has changed");
99 static_assert(ANEURALNETWORKS_L2_NORMALIZATION == 11,
100 "ANEURALNETWORKS_L2_NORMALIZATION has changed");
101 static_assert(ANEURALNETWORKS_L2_POOL_2D == 12, "ANEURALNETWORKS_L2_POOL has changed");
102 static_assert(ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION == 13,
103 "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION has changed");
104 static_assert(ANEURALNETWORKS_LOGISTIC == 14, "ANEURALNETWORKS_LOGISTIC has changed");
105 static_assert(ANEURALNETWORKS_LSH_PROJECTION == 15, "ANEURALNETWORKS_LSH_PROJECTION has changed");
106 static_assert(ANEURALNETWORKS_LSTM == 16, "ANEURALNETWORKS_LSTM has changed");
107 static_assert(ANEURALNETWORKS_MAX_POOL_2D == 17, "ANEURALNETWORKS_MAX_POOL has changed");
108 static_assert(ANEURALNETWORKS_MUL == 18, "ANEURALNETWORKS_MUL has changed");
109 static_assert(ANEURALNETWORKS_RELU == 19, "ANEURALNETWORKS_RELU has changed");
110 static_assert(ANEURALNETWORKS_RELU1 == 20, "ANEURALNETWORKS_RELU1 has changed");
111 static_assert(ANEURALNETWORKS_RELU6 == 21, "ANEURALNETWORKS_RELU6 has changed");
112 static_assert(ANEURALNETWORKS_RESHAPE == 22, "ANEURALNETWORKS_RESHAPE has changed");
113 static_assert(ANEURALNETWORKS_RESIZE_BILINEAR == 23, "ANEURALNETWORKS_RESIZE_BILINEAR has changed");
114 static_assert(ANEURALNETWORKS_RNN == 24, "ANEURALNETWORKS_RNN has changed");
115 static_assert(ANEURALNETWORKS_SOFTMAX == 25, "ANEURALNETWORKS_SOFTMAX has changed");
116 static_assert(ANEURALNETWORKS_SPACE_TO_DEPTH == 26, "ANEURALNETWORKS_SPACE_TO_DEPTH has changed");
117 static_assert(ANEURALNETWORKS_SVDF == 27, "ANEURALNETWORKS_SVDF has changed");
118 static_assert(ANEURALNETWORKS_TANH == 28, "ANEURALNETWORKS_TANH has changed");
119
120 static_assert(ANEURALNETWORKS_BATCH_TO_SPACE_ND == 29,
121 "ANEURALNETWORKS_BATCH_TO_SPACE_ND has changed");
122 static_assert(ANEURALNETWORKS_DIV == 30, "ANEURALNETWORKS_DIV has changed");
123 static_assert(ANEURALNETWORKS_MEAN == 31, "ANEURALNETWORKS_MEAN has changed");
124 static_assert(ANEURALNETWORKS_PAD == 32, "ANEURALNETWORKS_PAD has changed");
125 static_assert(ANEURALNETWORKS_SPACE_TO_BATCH_ND == 33,
126 "ANEURALNETWORKS_SPACE_TO_BATCH_ND has changed");
127 static_assert(ANEURALNETWORKS_SQUEEZE == 34, "ANEURALNETWORKS_SQUEEZE has changed");
128 static_assert(ANEURALNETWORKS_STRIDED_SLICE == 35, "ANEURALNETWORKS_STRIDED_SLICE has changed");
129 static_assert(ANEURALNETWORKS_SUB == 36, "ANEURALNETWORKS_TANH has changed");
130 static_assert(ANEURALNETWORKS_TRANSPOSE == 37, "ANEURALNETWORKS_TRANSPOSE has changed");
131
132 static_assert(ANEURALNETWORKS_ABS == 38, "ANEURALNETWORKS_ABS has changed");
133 static_assert(ANEURALNETWORKS_ARGMAX == 39, "ANEURALNETWORKS_ARGMAX has changed");
134 static_assert(ANEURALNETWORKS_ARGMIN == 40, "ANEURALNETWORKS_ARGMIN has changed");
135 static_assert(ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM == 41,
136 "ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM has changed");
137 static_assert(ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM == 42,
138 "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM has changed");
139 static_assert(ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN == 43,
140 "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN has changed");
141 static_assert(ANEURALNETWORKS_BOX_WITH_NMS_LIMIT == 44,
142 "ANEURALNETWORKS_BOX_WITH_NMS_LIMIT has changed");
143 static_assert(ANEURALNETWORKS_CAST == 45, "ANEURALNETWORKS_CAST has changed");
144 static_assert(ANEURALNETWORKS_CHANNEL_SHUFFLE == 46, "ANEURALNETWORKS_CHANNEL_SHUFFLE has changed");
145 static_assert(ANEURALNETWORKS_DETECTION_POSTPROCESSING == 47,
146 "ANEURALNETWORKS_DETECTION_POSTPROCESSING has changed");
147 static_assert(ANEURALNETWORKS_EQUAL == 48, "ANEURALNETWORKS_EQUAL has changed");
148 static_assert(ANEURALNETWORKS_EXP == 49, "ANEURALNETWORKS_EXP has changed");
149 static_assert(ANEURALNETWORKS_EXPAND_DIMS == 50, "ANEURALNETWORKS_EXPAND_DIMS has changed");
150 static_assert(ANEURALNETWORKS_GATHER == 51, "ANEURALNETWORKS_GATHER has changed");
151 static_assert(ANEURALNETWORKS_GENERATE_PROPOSALS == 52,
152 "ANEURALNETWORKS_GENERATE_PROPOSALS has changed");
153 static_assert(ANEURALNETWORKS_GREATER == 53, "ANEURALNETWORKS_GREATER has changed");
154 static_assert(ANEURALNETWORKS_GREATER_EQUAL == 54, "ANEURALNETWORKS_GREATER_EQUAL has changed");
155 static_assert(ANEURALNETWORKS_GROUPED_CONV_2D == 55, "ANEURALNETWORKS_GROUPED_CONV_2D has changed");
156 static_assert(ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT == 56,
157 "ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT has changed");
158 static_assert(ANEURALNETWORKS_INSTANCE_NORMALIZATION == 57,
159 "ANEURALNETWORKS_INSTANCE_NORMALIZATION has changed");
160 static_assert(ANEURALNETWORKS_LESS == 58, "ANEURALNETWORKS_LESS has changed");
161 static_assert(ANEURALNETWORKS_LESS_EQUAL == 59, "ANEURALNETWORKS_LESS_EQUAL has changed");
162 static_assert(ANEURALNETWORKS_LOG == 60, "ANEURALNETWORKS_LOG has changed");
163 static_assert(ANEURALNETWORKS_LOGICAL_AND == 61, "ANEURALNETWORKS_LOGICAL_AND has changed");
164 static_assert(ANEURALNETWORKS_LOGICAL_NOT == 62, "ANEURALNETWORKS_LOGICAL_NOT has changed");
165 static_assert(ANEURALNETWORKS_LOGICAL_OR == 63, "ANEURALNETWORKS_LOGICAL_OR has changed");
166 static_assert(ANEURALNETWORKS_LOG_SOFTMAX == 64, "ANEURALNETWORKS_LOG_SOFTMAX has changed");
167 static_assert(ANEURALNETWORKS_MAXIMUM == 65, "ANEURALNETWORKS_MAXIMUM has changed");
168 static_assert(ANEURALNETWORKS_MINIMUM == 66, "ANEURALNETWORKS_MINIMUM has changed");
169 static_assert(ANEURALNETWORKS_NEG == 67, "ANEURALNETWORKS_NEG has changed");
170 static_assert(ANEURALNETWORKS_NOT_EQUAL == 68, "ANEURALNETWORKS_NOT_EQUAL has changed");
171 static_assert(ANEURALNETWORKS_PAD_V2 == 69, "ANEURALNETWORKS_PAD_V2 has changed");
172 static_assert(ANEURALNETWORKS_POW == 70, "ANEURALNETWORKS_POW has changed");
173 static_assert(ANEURALNETWORKS_PRELU == 71, "ANEURALNETWORKS_PRELU has changed");
174 static_assert(ANEURALNETWORKS_QUANTIZE == 72, "ANEURALNETWORKS_QUANTIZE has changed");
175 static_assert(ANEURALNETWORKS_QUANTIZED_16BIT_LSTM == 73,
176 "ANEURALNETWORKS_QUANTIZED_16BIT_LSTM has changed");
177 static_assert(ANEURALNETWORKS_RANDOM_MULTINOMIAL == 74,
178 "ANEURALNETWORKS_RANDOM_MULTINOMIAL has changed");
179 static_assert(ANEURALNETWORKS_REDUCE_ALL == 75, "ANEURALNETWORKS_REDUCE_ALL has changed");
180 static_assert(ANEURALNETWORKS_REDUCE_ANY == 76, "ANEURALNETWORKS_REDUCE_ANY has changed");
181 static_assert(ANEURALNETWORKS_REDUCE_MAX == 77, "ANEURALNETWORKS_REDUCE_MAX has changed");
182 static_assert(ANEURALNETWORKS_REDUCE_MIN == 78, "ANEURALNETWORKS_REDUCE_MIN has changed");
183 static_assert(ANEURALNETWORKS_REDUCE_PROD == 79, "ANEURALNETWORKS_REDUCE_PROD has changed");
184 static_assert(ANEURALNETWORKS_REDUCE_SUM == 80, "ANEURALNETWORKS_REDUCE_SUM has changed");
185 static_assert(ANEURALNETWORKS_ROI_ALIGN == 81, "ANEURALNETWORKS_ROI_ALIGN has changed");
186 static_assert(ANEURALNETWORKS_ROI_POOLING == 82, "ANEURALNETWORKS_ROI_POOLING has changed");
187 static_assert(ANEURALNETWORKS_RSQRT == 83, "ANEURALNETWORKS_RSQRT has changed");
188 static_assert(ANEURALNETWORKS_SELECT == 84, "ANEURALNETWORKS_SELECT has changed");
189 static_assert(ANEURALNETWORKS_SIN == 85, "ANEURALNETWORKS_SIN has changed");
190 static_assert(ANEURALNETWORKS_SLICE == 86, "ANEURALNETWORKS_SLICE has changed");
191 static_assert(ANEURALNETWORKS_SPLIT == 87, "ANEURALNETWORKS_SPLIT has changed");
192 static_assert(ANEURALNETWORKS_SQRT == 88, "ANEURALNETWORKS_SQRT has changed");
193 static_assert(ANEURALNETWORKS_TILE == 89, "ANEURALNETWORKS_TILE has changed");
194 static_assert(ANEURALNETWORKS_TOPK_V2 == 90, "ANEURALNETWORKS_TOPK_V2 has changed");
195 static_assert(ANEURALNETWORKS_TRANSPOSE_CONV_2D == 91,
196 "ANEURALNETWORKS_TRANSPOSE_CONV_2D has changed");
197 static_assert(ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM == 92,
198 "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM has changed");
199 static_assert(ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN == 93,
200 "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN has changed");
201 static_assert(ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR == 94,
202 "ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR has changed");
203 static_assert(ANEURALNETWORKS_QUANTIZED_LSTM == 95, "ANEURALNETWORKS_QUANTIZED_LSTM has changed");
204 static_assert(ANEURALNETWORKS_IF == 96, "ANEURALNETWORKS_IF has changed");
205 static_assert(ANEURALNETWORKS_WHILE == 97, "ANEURALNETWORKS_WHILE has changed");
206 static_assert(ANEURALNETWORKS_ELU == 98, "ANEURALNETWORKS_ELU has changed");
207 static_assert(ANEURALNETWORKS_HARD_SWISH == 99, "ANEURALNETWORKS_HARD_SWISH has changed");
208 static_assert(ANEURALNETWORKS_FILL == 100, "ANEURALNETWORKS_FILL has changed");
209 static_assert(ANEURALNETWORKS_RANK == 101, "ANEURALNETWORKS_RANK has changed");
210 static_assert(ANEURALNETWORKS_BATCH_MATMUL == 102, "ANEURALNETWORKS_BATCH_MATMUL has changed");
211 static_assert(ANEURALNETWORKS_PACK == 103, "ANEURALNETWORKS_PACK has changed");
212 static_assert(ANEURALNETWORKS_MIRROR_PAD == 104, "ANEURALNETWORKS_MIRROR_PAD has changed");
213 static_assert(ANEURALNETWORKS_REVERSE == 105, "ANEURALNETWORKS_REVERSE has changed");
214 static_assert(ANEURALNETWORKS_OEM_OPERATION == 10000, "ANEURALNETWORKS_OEM_OPERATION has changed");
215
216 static_assert(ANEURALNETWORKS_FUSED_NONE == 0, "ANEURALNETWORKS_FUSED_NONE has changed");
217 static_assert(ANEURALNETWORKS_FUSED_RELU == 1, "ANEURALNETWORKS_FUSED_RELU has changed");
218 static_assert(ANEURALNETWORKS_FUSED_RELU1 == 2, "ANEURALNETWORKS_FUSED_RELU1 has changed");
219 static_assert(ANEURALNETWORKS_FUSED_RELU6 == 3, "ANEURALNETWORKS_FUSED_RELU6 has changed");
220
221 static_assert(ANEURALNETWORKS_PREFER_LOW_POWER == 0,
222 "ANEURALNETWORKS_PREFER_LOW_POWER has changed");
223 static_assert(ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER == 1,
224 "ANEURALNETWORKS_PREFER_FAST_SINGLE_ANSWER has changed");
225 static_assert(ANEURALNETWORKS_PREFER_SUSTAINED_SPEED == 2,
226 "ANEURALNETWORKS_PREFER_SUSTAINED_SPEED has changed");
227
228 static_assert(ANEURALNETWORKS_NO_ERROR == 0, "ANEURALNETWORKS_NO_ERROR has changed");
229 static_assert(ANEURALNETWORKS_OUT_OF_MEMORY == 1, "ANEURALNETWORKS_OUT_OF_MEMORY has changed");
230 static_assert(ANEURALNETWORKS_INCOMPLETE == 2, "ANEURALNETWORKS_INCOMPLETE has changed");
231 static_assert(ANEURALNETWORKS_UNEXPECTED_NULL == 3, "ANEURALNETWORKS_UNEXPECTED_NULL has changed");
232 static_assert(ANEURALNETWORKS_BAD_DATA == 4, "ANEURALNETWORKS_BAD_DATA has changed");
233 static_assert(ANEURALNETWORKS_OP_FAILED == 5, "ANEURALNETWORKS_OP_FAILED has changed");
234 static_assert(ANEURALNETWORKS_BAD_STATE == 6, "ANEURALNETWORKS_BAD_STATE has changed");
235 static_assert(ANEURALNETWORKS_UNMAPPABLE == 7, "ANEURALNETWORKS_UNMAPPABLE has changed");
236 static_assert(ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE == 8,
237 "ANEURALNETWORKS_OUTPUT_INSUFFICIENT_SIZE has changed");
238 static_assert(ANEURALNETWORKS_UNAVAILABLE_DEVICE == 9,
239 "ANEURALNETWORKS_UNAVAILABLE_DEVICE has changed");
240 static_assert(ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT == 10,
241 "ANEURALNETWORKS_MISSED_DEADLINE_TRANSIENT has changed");
242 static_assert(ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT == 11,
243 "ANEURALNETWORKS_MISSED_DEADLINE_PERSISTENT has changed");
244 static_assert(ANEURALNETWORKS_RESOURCE_EXHAUSTED_TRANSIENT == 12,
245 "ANEURALNETWORKS_RESOURCE_EXHAUSTED_TRANSIENT has changed");
246 static_assert(ANEURALNETWORKS_RESOURCE_EXHAUSTED_PERSISTENT == 13,
247 "ANEURALNETWORKS_RESOURCE_EXHAUSTED_PERSISTENT has changed");
248 static_assert(ANEURALNETWORKS_DEAD_OBJECT == 14, "ANEURALNETWORKS_DEAD_OBJECT has changed");
249
250 static_assert(ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES == 128,
251 "ANEURALNETWORKS_MAX_SIZE_OF_IMMEDIATELY_COPIED_VALUES has changed");
252
253 static_assert(ANEURALNETWORKS_DEVICE_UNKNOWN == 0, "ANEURALNETWORKS_DEVICE_UNKNOWN has changed");
254 static_assert(ANEURALNETWORKS_DEVICE_OTHER == 1, "ANEURALNETWORKS_DEVICE_OTHER has changed");
255 static_assert(ANEURALNETWORKS_DEVICE_CPU == 2, "ANEURALNETWORKS_DEVICE_CPU has changed");
256 static_assert(ANEURALNETWORKS_DEVICE_GPU == 3, "ANEURALNETWORKS_DEVICE_GPU has changed");
257 static_assert(ANEURALNETWORKS_DEVICE_ACCELERATOR == 4,
258 "ANEURALNETWORKS_DEVICE_ACCELERATOR has changed");
259
260 static_assert(ANEURALNETWORKS_DURATION_ON_HARDWARE == 0,
261 "ANEURALNETWORKS_DURATION_ON_HARDWARE has changed");
262 static_assert(ANEURALNETWORKS_DURATION_IN_DRIVER == 1,
263 "ANEURALNETWORKS_DURATION_IN_DRIVER has changed");
264 static_assert(ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE == 2,
265 "ANEURALNETWORKS_FENCED_DURATION_ON_HARDWARE has changed");
266 static_assert(ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER == 3,
267 "ANEURALNETWORKS_FENCED_DURATION_IN_DRIVER has changed");
268
269 // Make sure that the constants are compatible with the values defined in
270 // hardware/interfaces/neuralnetworks/1.0/types.hal.
271 static_assert(static_cast<int32_t>(OperandType::OEM) == ANEURALNETWORKS_OEM_SCALAR,
272 "OEM != ANEURALNETWORKS_OEM");
273 static_assert(static_cast<int32_t>(OperandType::FLOAT32) == ANEURALNETWORKS_FLOAT32,
274 "FLOAT32 != ANEURALNETWORKS_FLOAT32");
275 static_assert(static_cast<int32_t>(OperandType::INT32) == ANEURALNETWORKS_INT32,
276 "INT32 != ANEURALNETWORKS_INT32");
277 static_assert(static_cast<int32_t>(OperandType::UINT32) == ANEURALNETWORKS_UINT32,
278 "UINT32 != ANEURALNETWORKS_UINT32");
279 static_assert(static_cast<int32_t>(OperandType::TENSOR_OEM_BYTE) == ANEURALNETWORKS_TENSOR_OEM_BYTE,
280 "TENSOR_OEM_BYTE != ANEURALNETWORKS_TENSOR_OEM_BYTE");
281 static_assert(static_cast<int32_t>(OperandType::TENSOR_FLOAT32) == ANEURALNETWORKS_TENSOR_FLOAT32,
282 "TENSOR_FLOAT32 != ANEURALNETWORKS_TENSOR_FLOAT32");
283 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_ASYMM) ==
284 ANEURALNETWORKS_TENSOR_QUANT8_ASYMM,
285 "TENSOR_QUANT8_ASYMM != ANEURALNETWORKS_TENSOR_QUANT8_ASYMM");
286
287 static_assert(static_cast<int32_t>(OperationType::ADD) == ANEURALNETWORKS_ADD,
288 "OperationType::ADD != ANEURALNETWORKS_ADD");
289 static_assert(static_cast<int32_t>(OperationType::AVERAGE_POOL_2D) ==
290 ANEURALNETWORKS_AVERAGE_POOL_2D,
291 "OperationType::AVERAGE_POOL_2D != ANEURALNETWORKS_AVERAGE_POOL_2D");
292 static_assert(static_cast<int32_t>(OperationType::CONV_2D) == ANEURALNETWORKS_CONV_2D,
293 "OperationType::CONV_2D != ANEURALNETWORKS_CONV_2D");
294 static_assert(static_cast<int32_t>(OperationType::DEPTHWISE_CONV_2D) ==
295 ANEURALNETWORKS_DEPTHWISE_CONV_2D,
296 "OperationType::DEPTHWISE_CONV_2D != ANEURALNETWORKS_DEPTHWISE_CONV_2D");
297 static_assert(static_cast<int32_t>(OperationType::DEPTH_TO_SPACE) == ANEURALNETWORKS_DEPTH_TO_SPACE,
298 "OperationType::DEPTH_TO_SPACE != ANEURALNETWORKS_DEPTH_TO_SPACE");
299 static_assert(static_cast<int32_t>(OperationType::DEQUANTIZE) == ANEURALNETWORKS_DEQUANTIZE,
300 "OperationType::DEQUANTIZE != ANEURALNETWORKS_DEQUANTIZE");
301 static_assert(static_cast<int32_t>(OperationType::EMBEDDING_LOOKUP) ==
302 ANEURALNETWORKS_EMBEDDING_LOOKUP,
303 "OperationType::EMBEDDING_LOOKUP != ANEURALNETWORKS_EMBEDDING_LOOKUP");
304 static_assert(static_cast<int32_t>(OperationType::FLOOR) == ANEURALNETWORKS_FLOOR,
305 "OperationType::FLOOR != ANEURALNETWORKS_FLOOR");
306 static_assert(static_cast<int32_t>(OperationType::FULLY_CONNECTED) ==
307 ANEURALNETWORKS_FULLY_CONNECTED,
308 "OperationType::FULLY_CONNECTED != ANEURALNETWORKS_FULLY_CONNECTED");
309 static_assert(static_cast<int32_t>(OperationType::HASHTABLE_LOOKUP) ==
310 ANEURALNETWORKS_HASHTABLE_LOOKUP,
311 "OperationType::HASHTABLE_LOOKUP != ANEURALNETWORKS_HASHTABLE_LOOKUP");
312 static_assert(static_cast<int32_t>(OperationType::L2_NORMALIZATION) ==
313 ANEURALNETWORKS_L2_NORMALIZATION,
314 "OperationType::L2_NORMALIZATION != ANEURALNETWORKS_L2_NORMALIZATION");
315 static_assert(static_cast<int32_t>(OperationType::L2_POOL_2D) == ANEURALNETWORKS_L2_POOL_2D,
316 "OperationType::L2_POOL_2D != ANEURALNETWORKS_L2_POOL_2D");
317 static_assert(static_cast<int32_t>(OperationType::LOCAL_RESPONSE_NORMALIZATION) ==
318 ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION,
319 "OperationType::LOCAL_RESPONSE_NORMALIZATION != "
320 "ANEURALNETWORKS_LOCAL_RESPONSE_NORMALIZATION");
321 static_assert(static_cast<int32_t>(OperationType::LOGISTIC) == ANEURALNETWORKS_LOGISTIC,
322 "OperationType::LOGISTIC != ANEURALNETWORKS_LOGISTIC");
323 static_assert(static_cast<int32_t>(OperationType::LSH_PROJECTION) == ANEURALNETWORKS_LSH_PROJECTION,
324 "OperationType::LSH_PROJECTION != ANEURALNETWORKS_LSH_PROJECTION");
325 static_assert(static_cast<int32_t>(OperationType::LSTM) == ANEURALNETWORKS_LSTM,
326 "OperationType::LSTM != ANEURALNETWORKS_LSTM");
327 static_assert(static_cast<int32_t>(OperationType::MAX_POOL_2D) == ANEURALNETWORKS_MAX_POOL_2D,
328 "OperationType::MAX_POOL_2D != ANEURALNETWORKS_MAX_POOL_2D");
329 static_assert(static_cast<int32_t>(OperationType::MUL) == ANEURALNETWORKS_MUL,
330 "OperationType::MUL != ANEURALNETWORKS_MUL");
331 static_assert(static_cast<int32_t>(OperationType::RELU) == ANEURALNETWORKS_RELU,
332 "OperationType::RELU != ANEURALNETWORKS_RELU");
333 static_assert(static_cast<int32_t>(OperationType::RELU1) == ANEURALNETWORKS_RELU1,
334 "OperationType::RELU1 != ANEURALNETWORKS_RELU1");
335 static_assert(static_cast<int32_t>(OperationType::RELU6) == ANEURALNETWORKS_RELU6,
336 "OperationType::RELU6 != ANEURALNETWORKS_RELU6");
337 static_assert(static_cast<int32_t>(OperationType::RESHAPE) == ANEURALNETWORKS_RESHAPE,
338 "OperationType::RESHAPE != ANEURALNETWORKS_RESHAPE");
339 static_assert(static_cast<int32_t>(OperationType::RESIZE_BILINEAR) ==
340 ANEURALNETWORKS_RESIZE_BILINEAR,
341 "OperationType::RESIZE_BILINEAR != ANEURALNETWORKS_RESIZE_BILINEAR");
342 static_assert(static_cast<int32_t>(OperationType::RNN) == ANEURALNETWORKS_RNN,
343 "OperationType::RNN != ANEURALNETWORKS_RNN");
344 static_assert(static_cast<int32_t>(OperationType::SOFTMAX) == ANEURALNETWORKS_SOFTMAX,
345 "OperationType::SOFTMAX != ANEURALNETWORKS_SOFTMAX");
346 static_assert(static_cast<int32_t>(OperationType::SPACE_TO_DEPTH) == ANEURALNETWORKS_SPACE_TO_DEPTH,
347 "OperationType::SPACE_TO_DEPTH != ANEURALNETWORKS_SPACE_TO_DEPTH");
348 static_assert(static_cast<int32_t>(OperationType::SVDF) == ANEURALNETWORKS_SVDF,
349 "OperationType::SVDF != ANEURALNETWORKS_SVDF");
350 static_assert(static_cast<int32_t>(OperationType::TANH) == ANEURALNETWORKS_TANH,
351 "OperationType::TANH != ANEURALNETWORKS_TANH");
352
353 static_assert(static_cast<int32_t>(FusedActivationFunc::NONE) == ANEURALNETWORKS_FUSED_NONE,
354 "FusedActivationFunc::NONE != ANEURALNETWORKS_FUSED_NONE");
355 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU) == ANEURALNETWORKS_FUSED_RELU,
356 "FusedActivationFunc::RELU != ANEURALNETWORKS_FUSED_RELU");
357 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU1) == ANEURALNETWORKS_FUSED_RELU1,
358 "FusedActivationFunc::RELU1 != ANEURALNETWORKS_FUSED_RELU1");
359 static_assert(static_cast<int32_t>(FusedActivationFunc::RELU6) == ANEURALNETWORKS_FUSED_RELU6,
360 "FusedActivationFunc::RELU6 != ANEURALNETWORKS_FUSED_RELU6");
361
362 // Make sure that the constants are compatible with the values defined in
363 // hardware/interfaces/neuralnetworks/1.1/types.hal.
364 static_assert(static_cast<int32_t>(OperationType::BATCH_TO_SPACE_ND) ==
365 ANEURALNETWORKS_BATCH_TO_SPACE_ND,
366 "OperationType::BATCH_TO_SPACE_ND != ANEURALNETWORKS_BATCH_TO_SPACE_ND");
367 static_assert(static_cast<int32_t>(OperationType::DIV) == ANEURALNETWORKS_DIV,
368 "OperationType::DIV != ANEURALNETWORKS_DIV");
369 static_assert(static_cast<int32_t>(OperationType::MEAN) == ANEURALNETWORKS_MEAN,
370 "OperationType::MEAN != ANEURALNETWORKS_MEAN");
371 static_assert(static_cast<int32_t>(OperationType::PAD) == ANEURALNETWORKS_PAD,
372 "OperationType::PAD != ANEURALNETWORKS_PAD");
373 static_assert(static_cast<int32_t>(OperationType::SPACE_TO_BATCH_ND) ==
374 ANEURALNETWORKS_SPACE_TO_BATCH_ND,
375 "OperationType::SPACE_TO_BATCH_ND != ANEURALNETWORKS_SPACE_TO_BATCH_ND");
376 static_assert(static_cast<int32_t>(OperationType::SQUEEZE) == ANEURALNETWORKS_SQUEEZE,
377 "OperationType::SQUEEZE != ANEURALNETWORKS_SQUEEZE");
378 static_assert(static_cast<int32_t>(OperationType::STRIDED_SLICE) == ANEURALNETWORKS_STRIDED_SLICE,
379 "OperationType::STRIDED_SLICE != ANEURALNETWORKS_STRIDED_SLICE");
380 static_assert(static_cast<int32_t>(OperationType::SUB) == ANEURALNETWORKS_SUB,
381 "OperationType::SUB != ANEURALNETWORKS_SUB");
382 static_assert(static_cast<int32_t>(OperationType::TRANSPOSE) == ANEURALNETWORKS_TRANSPOSE,
383 "OperationType::TRANSPOSE != ANEURALNETWORKS_TRANSPOSE");
384
385 // Make sure that the constants are compatible with the values defined in
386 // hardware/interfaces/neuralnetworks/1.2/types.hal.
387 static_assert(static_cast<int32_t>(OperandType::BOOL) == ANEURALNETWORKS_BOOL,
388 "BOOL != ANEURALNETWORKS_BOOL");
389 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT16_SYMM) ==
390 ANEURALNETWORKS_TENSOR_QUANT16_SYMM,
391 "TENSOR_QUANT16_SYMM != ANEURALNETWORKS_TENSOR_QUANT16_SYMM");
392 static_assert(static_cast<int32_t>(OperandType::TENSOR_FLOAT16) == ANEURALNETWORKS_TENSOR_FLOAT16,
393 "TENSOR_FLOAT16 != ANEURALNETWORKS_TENSOR_FLOAT16");
394 static_assert(static_cast<int32_t>(OperandType::TENSOR_BOOL8) == ANEURALNETWORKS_TENSOR_BOOL8,
395 "TENSOR_BOOL8 != ANEURALNETWORKS_TENSOR_BOOL8");
396 static_assert(static_cast<int32_t>(OperandType::FLOAT16) == ANEURALNETWORKS_FLOAT16,
397 "FLOAT16 != ANEURALNETWORKS_FLOAT16");
398 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) ==
399 ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL,
400 "TENSOR_QUANT8_SYMM_PER_CHANNEL != ANEURALNETWORKS_TENSOR_QUANT8_SYMM_PER_CHANNEL");
401 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT16_ASYMM) ==
402 ANEURALNETWORKS_TENSOR_QUANT16_ASYMM,
403 "TENSOR_QUANT16_ASYMM != ANEURALNETWORKS_TENSOR_QUANT16_ASYMM");
404 static_assert(static_cast<int32_t>(OperandType::TENSOR_QUANT8_SYMM) ==
405 ANEURALNETWORKS_TENSOR_QUANT8_SYMM,
406 "TENSOR_QUANT8_SYMM != ANEURALNETWORKS_TENSOR_QUANT8_SYMM");
407
408 static_assert(static_cast<int32_t>(OperationType::ABS) == ANEURALNETWORKS_ABS,
409 "OperationType::ABS != ANEURALNETWORKS_ABS");
410 static_assert(static_cast<int32_t>(OperationType::ARGMAX) == ANEURALNETWORKS_ARGMAX,
411 "OperationType::ARGMAX != ANEURALNETWORKS_ARGMAX");
412 static_assert(static_cast<int32_t>(OperationType::ARGMIN) == ANEURALNETWORKS_ARGMIN,
413 "OperationType::ARGMIN != ANEURALNETWORKS_ARGMIN");
414 static_assert(static_cast<int32_t>(OperationType::AXIS_ALIGNED_BBOX_TRANSFORM) ==
415 ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM,
416 "OperationType::AXIS_ALIGNED_BBOX_TRANSFORM != "
417 "ANEURALNETWORKS_AXIS_ALIGNED_BBOX_TRANSFORM");
418 static_assert(static_cast<int32_t>(OperationType::BIDIRECTIONAL_SEQUENCE_LSTM) ==
419 ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM,
420 "OperationType::BIDIRECTIONAL_SEQUENCE_LSTM != "
421 "ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_LSTM");
422 static_assert(
423 static_cast<int32_t>(OperationType::BIDIRECTIONAL_SEQUENCE_RNN) ==
424 ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN,
425 "OperationType::BIDIRECTIONAL_SEQUENCE_RNN != ANEURALNETWORKS_BIDIRECTIONAL_SEQUENCE_RNN");
426 static_assert(static_cast<int32_t>(OperationType::BOX_WITH_NMS_LIMIT) ==
427 ANEURALNETWORKS_BOX_WITH_NMS_LIMIT,
428 "OperationType::BOX_WITH_NMS_LIMIT != ANEURALNETWORKS_BOX_WITH_NMS_LIMIT");
429 static_assert(static_cast<int32_t>(OperationType::CAST) == ANEURALNETWORKS_CAST,
430 "OperationType::CAST != ANEURALNETWORKS_CAST");
431 static_assert(static_cast<int32_t>(OperationType::CHANNEL_SHUFFLE) ==
432 ANEURALNETWORKS_CHANNEL_SHUFFLE,
433 "OperationType::CHANNEL_SHUFFLE != ANEURALNETWORKS_CHANNEL_SHUFFLE");
434 static_assert(
435 static_cast<int32_t>(OperationType::DETECTION_POSTPROCESSING) ==
436 ANEURALNETWORKS_DETECTION_POSTPROCESSING,
437 "OperationType::DETECTION_POSTPROCESSING != ANEURALNETWORKS_DETECTION_POSTPROCESSING");
438 static_assert(static_cast<int32_t>(OperationType::EQUAL) == ANEURALNETWORKS_EQUAL,
439 "OperationType::EQUAL != ANEURALNETWORKS_EQUAL");
440 static_assert(static_cast<int32_t>(OperationType::EXP) == ANEURALNETWORKS_EXP,
441 "OperationType::EXP != ANEURALNETWORKS_EXP");
442 static_assert(static_cast<int32_t>(OperationType::EXPAND_DIMS) == ANEURALNETWORKS_EXPAND_DIMS,
443 "OperationType::EXPAND_DIMS != ANEURALNETWORKS_EXPAND_DIMS");
444 static_assert(static_cast<int32_t>(OperationType::GATHER) == ANEURALNETWORKS_GATHER,
445 "OperationType::GATHER != ANEURALNETWORKS_GATHER");
446 static_assert(static_cast<int32_t>(OperationType::GENERATE_PROPOSALS) ==
447 ANEURALNETWORKS_GENERATE_PROPOSALS,
448 "OperationType::GENERATE_PROPOSALS != ANEURALNETWORKS_GENERATE_PROPOSALS");
449 static_assert(static_cast<int32_t>(OperationType::GREATER) == ANEURALNETWORKS_GREATER,
450 "OperationType::GREATER != ANEURALNETWORKS_GREATER");
451 static_assert(static_cast<int32_t>(OperationType::GREATER_EQUAL) == ANEURALNETWORKS_GREATER_EQUAL,
452 "OperationType::GREATER_EQUAL != ANEURALNETWORKS_GREATER_EQUAL");
453 static_assert(static_cast<int32_t>(OperationType::GROUPED_CONV_2D) ==
454 ANEURALNETWORKS_GROUPED_CONV_2D,
455 "OperationType::GROUPED_CONV_2D != ANEURALNETWORKS_GROUPED_CONV_2D");
456 static_assert(static_cast<int32_t>(OperationType::HEATMAP_MAX_KEYPOINT) ==
457 ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT,
458 "OperationType::HEATMAP_MAX_KEYPOINT != ANEURALNETWORKS_HEATMAP_MAX_KEYPOINT");
459 static_assert(static_cast<int32_t>(OperationType::INSTANCE_NORMALIZATION) ==
460 ANEURALNETWORKS_INSTANCE_NORMALIZATION,
461 "OperationType::INSTANCE_NORMALIZATION != ANEURALNETWORKS_INSTANCE_NORMALIZATION");
462 static_assert(static_cast<int32_t>(OperationType::LESS) == ANEURALNETWORKS_LESS,
463 "OperationType::LESS != ANEURALNETWORKS_LESS");
464 static_assert(static_cast<int32_t>(OperationType::LESS_EQUAL) == ANEURALNETWORKS_LESS_EQUAL,
465 "OperationType::LESS_EQUAL != ANEURALNETWORKS_LESS_EQUAL");
466 static_assert(static_cast<int32_t>(OperationType::LOG) == ANEURALNETWORKS_LOG,
467 "OperationType::LOG != ANEURALNETWORKS_LOG");
468 static_assert(static_cast<int32_t>(OperationType::LOGICAL_AND) == ANEURALNETWORKS_LOGICAL_AND,
469 "OperationType::LOGICAL_AND != ANEURALNETWORKS_LOGICAL_AND");
470 static_assert(static_cast<int32_t>(OperationType::LOGICAL_NOT) == ANEURALNETWORKS_LOGICAL_NOT,
471 "OperationType::LOGICAL_NOT != ANEURALNETWORKS_LOGICAL_NOT");
472 static_assert(static_cast<int32_t>(OperationType::LOGICAL_OR) == ANEURALNETWORKS_LOGICAL_OR,
473 "OperationType::LOGICAL_OR != ANEURALNETWORKS_LOGICAL_OR");
474 static_assert(static_cast<int32_t>(OperationType::LOG_SOFTMAX) == ANEURALNETWORKS_LOG_SOFTMAX,
475 "OperationType::LOG_SOFTMAX != ANEURALNETWORKS_LOG_SOFTMAX");
476 static_assert(static_cast<int32_t>(OperationType::MAXIMUM) == ANEURALNETWORKS_MAXIMUM,
477 "OperationType::MAXIMUM != ANEURALNETWORKS_MAXIMUM");
478 static_assert(static_cast<int32_t>(OperationType::MINIMUM) == ANEURALNETWORKS_MINIMUM,
479 "OperationType::MINIMUM != ANEURALNETWORKS_MINIMUM");
480 static_assert(static_cast<int32_t>(OperationType::NEG) == ANEURALNETWORKS_NEG,
481 "OperationType::NEG != ANEURALNETWORKS_NEG");
482 static_assert(static_cast<int32_t>(OperationType::NOT_EQUAL) == ANEURALNETWORKS_NOT_EQUAL,
483 "OperationType::NOT_EQUAL != ANEURALNETWORKS_NOT_EQUAL");
484 static_assert(static_cast<int32_t>(OperationType::PAD_V2) == ANEURALNETWORKS_PAD_V2,
485 "OperationType::PAD_V2 != ANEURALNETWORKS_PAD_V2");
486 static_assert(static_cast<int32_t>(OperationType::POW) == ANEURALNETWORKS_POW,
487 "OperationType::POW != ANEURALNETWORKS_POW");
488 static_assert(static_cast<int32_t>(OperationType::PRELU) == ANEURALNETWORKS_PRELU,
489 "OperationType::PRELU != ANEURALNETWORKS_PRELU");
490 static_assert(static_cast<int32_t>(OperationType::QUANTIZE) == ANEURALNETWORKS_QUANTIZE,
491 "OperationType::QUANTIZE != ANEURALNETWORKS_QUANTIZE");
492 static_assert(static_cast<int32_t>(OperationType::QUANTIZED_16BIT_LSTM) ==
493 ANEURALNETWORKS_QUANTIZED_16BIT_LSTM,
494 "OperationType::QUANTIZED_16BIT_LSTM != ANEURALNETWORKS_QUANTIZED_16BIT_LSTM");
495 static_assert(static_cast<int32_t>(OperationType::RANDOM_MULTINOMIAL) ==
496 ANEURALNETWORKS_RANDOM_MULTINOMIAL,
497 "OperationType::RANDOM_MULTINOMIAL != ANEURALNETWORKS_RANDOM_MULTINOMIAL");
498 static_assert(static_cast<int32_t>(OperationType::REDUCE_ALL) == ANEURALNETWORKS_REDUCE_ALL,
499 "OperationType::REDUCE_ALL != ANEURALNETWORKS_REDUCE_ALL");
500 static_assert(static_cast<int32_t>(OperationType::REDUCE_ANY) == ANEURALNETWORKS_REDUCE_ANY,
501 "OperationType::REDUCE_ANY != ANEURALNETWORKS_REDUCE_ANY");
502 static_assert(static_cast<int32_t>(OperationType::REDUCE_MAX) == ANEURALNETWORKS_REDUCE_MAX,
503 "OperationType::REDUCE_MAX != ANEURALNETWORKS_REDUCE_MAX");
504 static_assert(static_cast<int32_t>(OperationType::REDUCE_MIN) == ANEURALNETWORKS_REDUCE_MIN,
505 "OperationType::REDUCE_MIN != ANEURALNETWORKS_REDUCE_MIN");
506 static_assert(static_cast<int32_t>(OperationType::REDUCE_PROD) == ANEURALNETWORKS_REDUCE_PROD,
507 "OperationType::REDUCE_PROD != ANEURALNETWORKS_REDUCE_PROD");
508 static_assert(static_cast<int32_t>(OperationType::REDUCE_SUM) == ANEURALNETWORKS_REDUCE_SUM,
509 "OperationType::REDUCE_SUM != ANEURALNETWORKS_REDUCE_SUM");
510 static_assert(static_cast<int32_t>(OperationType::ROI_ALIGN) == ANEURALNETWORKS_ROI_ALIGN,
511 "OperationType::ROI_ALIGN != ANEURALNETWORKS_ROI_ALIGN");
512 static_assert(static_cast<int32_t>(OperationType::ROI_POOLING) == ANEURALNETWORKS_ROI_POOLING,
513 "OperationType::ROI_POOLING != ANEURALNETWORKS_ROI_POOLING");
514 static_assert(static_cast<int32_t>(OperationType::RSQRT) == ANEURALNETWORKS_RSQRT,
515 "OperationType::RSQRT != ANEURALNETWORKS_RSQRT");
516 static_assert(static_cast<int32_t>(OperationType::SELECT) == ANEURALNETWORKS_SELECT,
517 "OperationType::SELECT != ANEURALNETWORKS_SELECT");
518 static_assert(static_cast<int32_t>(OperationType::SIN) == ANEURALNETWORKS_SIN,
519 "OperationType::SIN != ANEURALNETWORKS_SIN");
520 static_assert(static_cast<int32_t>(OperationType::SLICE) == ANEURALNETWORKS_SLICE,
521 "OperationType::SLICE != ANEURALNETWORKS_SLICE");
522 static_assert(static_cast<int32_t>(OperationType::SPLIT) == ANEURALNETWORKS_SPLIT,
523 "OperationType::SPLIT != ANEURALNETWORKS_SPLIT");
524 static_assert(static_cast<int32_t>(OperationType::SQRT) == ANEURALNETWORKS_SQRT,
525 "OperationType::SQRT != ANEURALNETWORKS_SQRT");
526 static_assert(static_cast<int32_t>(OperationType::TILE) == ANEURALNETWORKS_TILE,
527 "OperationType::TILE != ANEURALNETWORKS_TILE");
528 static_assert(static_cast<int32_t>(OperationType::TOPK_V2) == ANEURALNETWORKS_TOPK_V2,
529 "OperationType::TOPK_V2 != ANEURALNETWORKS_TOPK_V2");
530 static_assert(static_cast<int32_t>(OperationType::TRANSPOSE_CONV_2D) ==
531 ANEURALNETWORKS_TRANSPOSE_CONV_2D,
532 "OperationType::TRANSPOSE_CONV_2D != ANEURALNETWORKS_TRANSPOSE_CONV_2D");
533 static_assert(static_cast<int32_t>(OperationType::UNIDIRECTIONAL_SEQUENCE_LSTM) ==
534 ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM,
535 "OperationType::UNIDIRECTIONAL_SEQUENCE_LSTM != "
536 "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_LSTM");
537 static_assert(static_cast<int32_t>(OperationType::UNIDIRECTIONAL_SEQUENCE_RNN) ==
538 ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN,
539 "OperationType::UNIDIRECTIONAL_SEQUENCE_RNN != "
540 "ANEURALNETWORKS_UNIDIRECTIONAL_SEQUENCE_RNN");
541 static_assert(static_cast<int32_t>(OperationType::RESIZE_NEAREST_NEIGHBOR) ==
542 ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR,
543 "OperationType::RESIZE_NEAREST_NEIGHBOR != ANEURALNETWORKS_RESIZE_NEAREST_NEIGHBOR");
544 static_assert(static_cast<int32_t>(OperationType::QUANTIZED_LSTM) == ANEURALNETWORKS_QUANTIZED_LSTM,
545 "OperationType::QUANTIZED_LSTM != ANEURALNETWORKS_QUANTIZED_LSTM");
546 static_assert(static_cast<int32_t>(OperationType::IF) == ANEURALNETWORKS_IF,
547 "OperationType::IF != ANEURALNETWORKS_IF");
548 static_assert(static_cast<int32_t>(OperationType::WHILE) == ANEURALNETWORKS_WHILE,
549 "OperationType::WHILE != ANEURALNETWORKS_WHILE");
550 static_assert(static_cast<int32_t>(OperationType::ELU) == ANEURALNETWORKS_ELU,
551 "OperationType::ELU != ANEURALNETWORKS_ELU");
552 static_assert(static_cast<int32_t>(OperationType::HARD_SWISH) == ANEURALNETWORKS_HARD_SWISH,
553 "OperationType::HARD_SWISH != ANEURALNETWORKS_HARD_SWISH");
554 static_assert(static_cast<int32_t>(OperationType::FILL) == ANEURALNETWORKS_FILL,
555 "OperationType::FILL != ANEURALNETWORKS_FILL");
556 static_assert(static_cast<int32_t>(OperationType::RANK) == ANEURALNETWORKS_RANK,
557 "OperationType::RANK != ANEURALNETWORKS_RANK");
558 static_assert(static_cast<int32_t>(OperationType::BATCH_MATMUL) == ANEURALNETWORKS_BATCH_MATMUL,
559 "OperationType::BATCH_MATMUL != ANEURALNETWORKS_BATCH_MATMUL");
560 static_assert(static_cast<int32_t>(OperationType::PACK) == ANEURALNETWORKS_PACK,
561 "OperationType::PACK != ANEURALNETWORKS_PACK");
562 static_assert(static_cast<int32_t>(OperationType::MIRROR_PAD) == ANEURALNETWORKS_MIRROR_PAD,
563 "OperationType::MIRROR_PAD != ANEURALNETWORKS_MIRROR_PAD");
564 static_assert(static_cast<int32_t>(OperationType::REVERSE) == ANEURALNETWORKS_REVERSE,
565 "OperationType::REVERSE != ANEURALNETWORKS_REVERSE");
566
567 static_assert(static_cast<int32_t>(DeviceType::OTHER) == ANEURALNETWORKS_DEVICE_OTHER,
568 "DeviceType::OTHER != ANEURALNETWORKS_DEVICE_OTHER");
569 static_assert(static_cast<int32_t>(DeviceType::CPU) == ANEURALNETWORKS_DEVICE_CPU,
570 "DeviceType::CPU != ANEURALNETWORKS_DEVICE_CPU");
571 static_assert(static_cast<int32_t>(DeviceType::GPU) == ANEURALNETWORKS_DEVICE_GPU,
572 "DeviceType::GPU != ANEURALNETWORKS_DEVICE_GPU");
573 static_assert(static_cast<int32_t>(DeviceType::ACCELERATOR) == ANEURALNETWORKS_DEVICE_ACCELERATOR,
574 "DeviceType::ACCELERATOR != ANEURALNETWORKS_DEVICE_ACCELERATOR");
575
576 // Make sure that the constants are compatible with the values defined in
577 // hardware/interfaces/neuralnetworks/1.3/types.hal.
578 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_LOW) ==
579 Priority::LOW,
580 "ANEURALNETWORKS_PRIORITY_LOW does not map to Priority::LOW");
581 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_MEDIUM) ==
582 Priority::MEDIUM,
583 "ANEURALNETWORKS_PRIORITY_MEDIUM does not map to Priority::MEDIUM");
584 static_assert(android::nn::convertToCanonicalPriority(ANEURALNETWORKS_PRIORITY_HIGH) ==
585 Priority::HIGH,
586 "ANEURALNETWORKS_PRIORITY_HIGH does not map to Priority::HIGH");
587
588 // Asserts for ANeuralNetworksOperandType memory layout
589 static_assert(offsetof(ANeuralNetworksOperandType, type) == 0,
590 "ANeuralNetworksOperandType.type offset != 0");
591 static_assert(offsetof(ANeuralNetworksOperandType, dimensionCount) == 4,
592 "ANeuralNetworksOperandType.dimensionCount offset != 4");
593 static_assert(offsetof(ANeuralNetworksOperandType, dimensions) == 8,
594 "ANeuralNetworksOperandType.dimensions offset != 8");
595 static_assert(offsetof(ANeuralNetworksOperandType, scale) == 8 + sizeof(void*),
596 "ANeuralNetworksOperandType.scale offset != 8 + sizeof(void*)");
597 static_assert(offsetof(ANeuralNetworksOperandType, zeroPoint) == 12 + sizeof(void*),
598 "ANeuralNetworksOperandType.zeroPoint offset != 12 + sizeof(void*)");
599 static_assert(sizeof(ANeuralNetworksOperandType) == 16 + sizeof(void*),
600 "ANeuralNetworksOperandType size changed");
601 static_assert(alignof(ANeuralNetworksOperandType) == alignof(void*),
602 "ANeuralNetworksOperandType alignment changed");
603
604 // Asserts for ANeuralNetworksSymmPerChannelQuantParams memory layout
605 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, channelDim) == 0,
606 "ANeuralNetworksSymmPerChannelQuantParams.channelDim offset != 4 + sizeof(void*)");
607 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, scaleCount) == 4,
608 "ANeuralNetworksSymmPerChannelQuantParams.scaleCount offset != 0");
609 static_assert(offsetof(ANeuralNetworksSymmPerChannelQuantParams, scales) == 8,
610 "ANeuralNetworksSymmPerChannelQuantParams.scales offset != 4");
611 static_assert(sizeof(ANeuralNetworksSymmPerChannelQuantParams) == 8 + sizeof(void*),
612 "ANeuralNetworksSymmPerChannelQuantParams size != 8 + sizeof(void*)");
613 static_assert(alignof(ANeuralNetworksSymmPerChannelQuantParams) == alignof(void*),
614 "ANeuralNetworksOperandType alignment changed");
615
616 // Asserts for compilation caching
617 static_assert(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN == 32,
618 "ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN has changed");
619 static_assert(ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN == kByteSizeOfCacheToken,
620 "ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN != kByteSizeOfCacheToken");
621
622 // Asserts for compilation priority
623 static_assert(ANEURALNETWORKS_PRIORITY_LOW == 90, "ANEURALNETWORKS_PRIORITY_LOW has changed");
624 static_assert(ANEURALNETWORKS_PRIORITY_MEDIUM == 100,
625 "ANEURALNETWORKS_PRIORITY_MEDIUM has changed");
626 static_assert(ANEURALNETWORKS_PRIORITY_HIGH == 110, "ANEURALNETWORKS_PRIORITY_HIGH has changed");
627 static_assert(ANEURALNETWORKS_PRIORITY_DEFAULT == ANEURALNETWORKS_PRIORITY_MEDIUM,
628 "ANEURALNETWORKS_PRIORITY_DEFAULT has changed");
629
630 // Asserts for feature levels
631 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_1 == 27, "ANEURALNETWORKS_FEATURE_LEVEL_1 has changed");
632 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_2 == 28, "ANEURALNETWORKS_FEATURE_LEVEL_2 has changed");
633 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_3 == 29, "ANEURALNETWORKS_FEATURE_LEVEL_3 has changed");
634 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_4 == 30, "ANEURALNETWORKS_FEATURE_LEVEL_4 has changed");
635 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_5 == 31, "ANEURALNETWORKS_FEATURE_LEVEL_5 has changed");
636 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_6 == 1000006,
637 "ANEURALNETWORKS_FEATURE_LEVEL_6 has changed");
638 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_7 == 1000007,
639 "ANEURALNETWORKS_FEATURE_LEVEL_7 has changed");
640 static_assert(ANEURALNETWORKS_FEATURE_LEVEL_8 == 1000008,
641 "ANEURALNETWORKS_FEATURE_LEVEL_8 has changed");
642
ANeuralNetworks_getDeviceCount(uint32_t * numDevices)643 int ANeuralNetworks_getDeviceCount(uint32_t* numDevices) {
644 if (numDevices == nullptr) {
645 LOG(ERROR) << "ANeuralNetworks_getDeviceCount passed a nullptr";
646 return ANEURALNETWORKS_UNEXPECTED_NULL;
647 }
648 *numDevices = DeviceManager::get()->getDrivers().size();
649 return ANEURALNETWORKS_NO_ERROR;
650 }
651
ANeuralNetworks_getDevice(uint32_t devIndex,ANeuralNetworksDevice ** device)652 int ANeuralNetworks_getDevice(uint32_t devIndex, ANeuralNetworksDevice** device) {
653 if (device == nullptr) {
654 LOG(ERROR) << "ANeuralNetworks_getDevice passed a nullptr";
655 return ANEURALNETWORKS_UNEXPECTED_NULL;
656 }
657 const std::vector<std::shared_ptr<Device>>& devices = DeviceManager::get()->getDrivers();
658 if (devIndex >= devices.size()) {
659 LOG(ERROR) << "ANeuralNetworks_getDevice passed an invalid device index";
660 return ANEURALNETWORKS_BAD_DATA;
661 }
662 *device = reinterpret_cast<ANeuralNetworksDevice*>(devices.at(devIndex).get());
663 return ANEURALNETWORKS_NO_ERROR;
664 }
665
ANeuralNetworksDevice_getName(const ANeuralNetworksDevice * device,const char ** name)666 int ANeuralNetworksDevice_getName(const ANeuralNetworksDevice* device, const char** name) {
667 if (device == nullptr || name == nullptr) {
668 LOG(ERROR) << "ANeuralNetworksDevice_getName passed a nullptr";
669 return ANEURALNETWORKS_UNEXPECTED_NULL;
670 }
671 const Device* d = reinterpret_cast<const Device*>(device);
672 *name = d->getName().c_str();
673 return ANEURALNETWORKS_NO_ERROR;
674 }
675
ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice * device,const char ** version)676 int ANeuralNetworksDevice_getVersion(const ANeuralNetworksDevice* device, const char** version) {
677 if (device == nullptr || version == nullptr) {
678 LOG(ERROR) << "ANeuralNetworksDevice_getVersion passed a nullptr";
679 return ANEURALNETWORKS_UNEXPECTED_NULL;
680 }
681 const Device* d = reinterpret_cast<const Device*>(device);
682 *version = d->getVersionString().c_str();
683 return ANEURALNETWORKS_NO_ERROR;
684 }
685
ANeuralNetworksDevice_getType(const ANeuralNetworksDevice * device,int32_t * type)686 int ANeuralNetworksDevice_getType(const ANeuralNetworksDevice* device, int32_t* type) {
687 if (device == nullptr || type == nullptr) {
688 LOG(ERROR) << "ANeuralNetworksDevice_getType passed a nullptr";
689 return ANEURALNETWORKS_UNEXPECTED_NULL;
690 }
691 const Device* d = reinterpret_cast<const Device*>(device);
692 int32_t dType = d->getType();
693 if (dType < 0) {
694 return ANEURALNETWORKS_OP_FAILED;
695 }
696 *type = d->getType();
697 return ANEURALNETWORKS_NO_ERROR;
698 }
699
700 #ifdef NN_DEBUGGABLE
701 static int64_t sRuntimeFeatureLevel = 0;
forTest_setRuntimeFeatureLevel(int64_t level)702 void forTest_setRuntimeFeatureLevel(int64_t level) {
703 sRuntimeFeatureLevel = level;
704 }
705 #endif
706
707 // Since ANeuralNetworks_getRuntimeFeatureLevel is new in 31 while libneuralnetwork targets
708 // "min_sdk_version: 30", calling it should be properly guarded (e.g. __builtin_available).
709 // But calling it within the same compilation unit is perfectly fine. Guarding it doesn't
710 // make any sense and is simply wrong. (It's available on a system where __builtin_available(30)
711 // evaluates to false.)
712 // To make the compiler happy we introduce getRuntimeFeatureLevelImpl() and call it within the
713 // library.
getRuntimeFeatureLevelImpl()714 static inline int64_t getRuntimeFeatureLevelImpl() {
715 #ifdef NN_DEBUGGABLE
716 if (sRuntimeFeatureLevel) {
717 return sRuntimeFeatureLevel;
718 }
719 #endif
720 return DeviceManager::get()->getRuntimeFeatureLevel();
721 }
722
ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice * device,int64_t * featureLevel)723 int ANeuralNetworksDevice_getFeatureLevel(const ANeuralNetworksDevice* device,
724 int64_t* featureLevel) {
725 if (device == nullptr || featureLevel == nullptr) {
726 LOG(ERROR) << "ANeuralNetworksDevice_getFeatureLevel passed a nullptr";
727 return ANEURALNETWORKS_UNEXPECTED_NULL;
728 }
729 Device* d = reinterpret_cast<Device*>(const_cast<ANeuralNetworksDevice*>(device));
730 int64_t dFeatureLevel = DeviceManager::versionToFeatureLevel(d->getFeatureLevel().level);
731 if (dFeatureLevel < 0) {
732 return ANEURALNETWORKS_BAD_STATE;
733 }
734 *featureLevel = std::min(getRuntimeFeatureLevelImpl(), dFeatureLevel);
735 return ANEURALNETWORKS_NO_ERROR;
736 }
737
ANeuralNetworksDevice_wait(const ANeuralNetworksDevice * device)738 int ANeuralNetworksDevice_wait(const ANeuralNetworksDevice* device) {
739 if (device == nullptr) {
740 LOG(ERROR) << "ANeuralNetworksDevice_wait passed a nullptr";
741 return ANEURALNETWORKS_UNEXPECTED_NULL;
742 }
743 const Device* d = reinterpret_cast<const Device*>(device);
744 return d->wait();
745 }
746
ANeuralNetworksModel_getSupportedOperationsForDevices(const ANeuralNetworksModel * model,const ANeuralNetworksDevice * const * devices,uint32_t numDevices,bool * supportedOps)747 int ANeuralNetworksModel_getSupportedOperationsForDevices(
748 const ANeuralNetworksModel* model, const ANeuralNetworksDevice* const* devices,
749 uint32_t numDevices, bool* supportedOps) {
750 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksModel_getSupportedOperationsForDevices");
751 if (model == nullptr || devices == nullptr || supportedOps == nullptr) {
752 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed a nullptr";
753 return ANEURALNETWORKS_UNEXPECTED_NULL;
754 }
755 if (numDevices == 0) {
756 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed an empty "
757 "device list";
758 return ANEURALNETWORKS_BAD_DATA;
759 }
760 const FlatbufferModelBuilder* m = reinterpret_cast<const FlatbufferModelBuilder*>(model);
761 if (!m->isFinished() || !m->isValid()) {
762 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed an unfinished "
763 "or invalid Model";
764 return ANEURALNETWORKS_BAD_STATE;
765 }
766
767 const Model canonicalModel = m->makeModel();
768 const std::vector<uint32_t>& opMap = m->getSortedOperationMapping();
769 // init the output array to false for all the operations.
770 std::fill(supportedOps, supportedOps + opMap.size(), false);
771 for (uint32_t i = 0; i < numDevices; i++) {
772 if (devices[i] == nullptr) {
773 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed a nullptr "
774 "as a device";
775 return ANEURALNETWORKS_UNEXPECTED_NULL;
776 }
777 for (uint32_t j = i + 1; j < numDevices; j++) {
778 if (devices[i] == devices[j]) {
779 LOG(ERROR) << "ANeuralNetworksModel_getSupportedOperationsForDevices passed "
780 "duplicate devices";
781 return ANEURALNETWORKS_BAD_DATA;
782 }
783 }
784
785 Device* d = reinterpret_cast<Device*>(const_cast<ANeuralNetworksDevice*>(devices[i]));
786 const MetaModel metaModel(canonicalModel, DeviceManager::get()->strictSlicing());
787 const std::vector<bool> supportsByDevice = d->getSupportedOperations(metaModel);
788 for (uint32_t j = 0; j < supportsByDevice.size(); j++) {
789 uint32_t originalIdx = opMap[j];
790 supportedOps[originalIdx] |= supportsByDevice[j];
791 }
792 }
793 return ANEURALNETWORKS_NO_ERROR;
794 }
795
ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel *,const ANeuralNetworksDevice * const *,uint32_t,ANeuralNetworksCompilation **)796 int ANeuralNetworksCompilation_createForDevices(ANeuralNetworksModel* /* model */,
797 const ANeuralNetworksDevice* const* /* devices */,
798 uint32_t /* numDevices */,
799 ANeuralNetworksCompilation** /* compilation */) {
800 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_createForDevices");
801 // Not supported yet in NNAPI v2
802 LOG(ERROR) << "ANeuralNetworksCompilation_createForDevices unimplemented in Neural Networks V2";
803 return ANEURALNETWORKS_OP_FAILED;
804 }
805
806 struct ExecutionContext {
807 // inputs are always copied before execution while outputs may be set by custom allocation
808 std::vector<void*> outputs;
809 std::vector<size_t> outputSizes;
810 std::vector<bool> isOutputSpecifiedAtIndex;
811 std::vector<const void*> inputs;
812 std::vector<size_t> inputSizes;
813
814 std::unique_ptr<tflite::Interpreter> interpreter;
815
ExecutionContextExecutionContext816 ExecutionContext(std::unique_ptr<tflite::Interpreter> interpreter)
817 : outputs(interpreter->outputs().size()),
818 outputSizes(interpreter->outputs().size()),
819 isOutputSpecifiedAtIndex(interpreter->outputs().size(), false),
820 inputs(interpreter->inputs().size()),
821 inputSizes(interpreter->inputs().size()),
822 interpreter(std::move(interpreter)) {}
823 };
824
ANeuralNetworksExecution_compute(ANeuralNetworksExecution * execution)825 int ANeuralNetworksExecution_compute(ANeuralNetworksExecution* execution) {
826 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_compute");
827 if (!execution) {
828 LOG(ERROR) << "ANeuralNetworksExecution_compute passed a nullptr";
829 return ANEURALNETWORKS_UNEXPECTED_NULL;
830 }
831
832 auto context = reinterpret_cast<ExecutionContext*>(execution);
833 if (std::any_of(context->isOutputSpecifiedAtIndex.begin(),
834 context->isOutputSpecifiedAtIndex.end(), [](bool isSet) { return !isSet; })) {
835 LOG(ERROR) << "ANeuralNetworksExecution_compute not all output buffers are specified";
836 return ANEURALNETWORKS_BAD_DATA;
837 }
838
839 auto result = context->interpreter->AllocateTensors();
840 if (result != kTfLiteOk) {
841 LOG(ERROR) << "ANeuralNetworksExecution_compute allocate tensors failed";
842 return ANEURALNETWORKS_OP_FAILED;
843 }
844
845 for (uint32_t index = 0; index < context->interpreter->inputs().size(); index++) {
846 const void* buffer = context->inputs[index];
847 if (buffer == nullptr) {
848 LOG(ERROR) << "ANeuralNetworksExecution_compute not all input buffers are specified";
849 return ANEURALNETWORKS_BAD_DATA;
850 }
851 size_t length = context->inputSizes[index];
852 std::memcpy(context->interpreter->input_tensor(index)->data.raw, buffer, length);
853 }
854
855 if (context->interpreter->Invoke() != kTfLiteOk) {
856 return ANEURALNETWORKS_OP_FAILED;
857 }
858
859 for (uint32_t i = 0; i < context->interpreter->outputs().size(); i++) {
860 if (context->outputs[i] == nullptr) {
861 continue;
862 }
863
864 const size_t bufferSize = context->outputSizes[i];
865 std::memcpy(context->outputs[i], context->interpreter->output_tensor(i)->data.raw,
866 bufferSize);
867 }
868 return ANEURALNETWORKS_NO_ERROR;
869 }
870
ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution *,bool)871 int ANeuralNetworksExecution_setMeasureTiming(ANeuralNetworksExecution* /* execution */,
872 bool /* measure */) {
873 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setMeasureTiming");
874 // Not supported yet in NNAPI v2
875 LOG(ERROR) << "ANeuralNetworksExecution_setMeasureTiming unimplemented in Neural Networks V2";
876 return ANEURALNETWORKS_OP_FAILED;
877 }
878
ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution *,int32_t,uint64_t *)879 int ANeuralNetworksExecution_getDuration(const ANeuralNetworksExecution* /* execution */,
880 int32_t /* durationCode */, uint64_t* /* duration */) {
881 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getDuration");
882 // Not supported yet in NNAPI v2
883 LOG(ERROR) << "ANeuralNetworksExecution_getDuration unimplemented in Neural Networks V2";
884 return ANEURALNETWORKS_OP_FAILED;
885 }
886
ANeuralNetworksBurst_create(ANeuralNetworksCompilation * compilation,ANeuralNetworksBurst ** burst)887 int ANeuralNetworksBurst_create(ANeuralNetworksCompilation* compilation,
888 ANeuralNetworksBurst** burst) {
889 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksBurst_create");
890 if (!compilation || !burst) {
891 LOG(ERROR) << "ANeuralNetworksBurst_create passed a nullptr";
892 return ANEURALNETWORKS_UNEXPECTED_NULL;
893 }
894
895 CompilationBuilder* c = reinterpret_cast<CompilationBuilder*>(compilation);
896 BurstBuilder* b = nullptr;
897 int result = c->createBurst(&b);
898 *burst = reinterpret_cast<ANeuralNetworksBurst*>(b);
899 return result;
900 }
901
ANeuralNetworksBurst_free(ANeuralNetworksBurst * burst)902 void ANeuralNetworksBurst_free(ANeuralNetworksBurst* burst) {
903 NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksBurst_free");
904 // No validation. Free of nullptr is valid.
905 BurstBuilder* b = reinterpret_cast<BurstBuilder*>(burst);
906 delete b;
907 }
908
ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution *,ANeuralNetworksBurst *)909 int ANeuralNetworksExecution_burstCompute(ANeuralNetworksExecution* /* execution */,
910 ANeuralNetworksBurst* /* burst */) {
911 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_burstCompute");
912 // Not supported yet in NNAPI v2
913 LOG(ERROR) << "ANeuralNetworksExecution_burstCompute unimplemented in Neural Networks V2";
914 return ANEURALNETWORKS_OP_FAILED;
915 }
916
ANeuralNetworksMemoryDesc_create(ANeuralNetworksMemoryDesc ** desc)917 int ANeuralNetworksMemoryDesc_create(ANeuralNetworksMemoryDesc** desc) {
918 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_create");
919 if (desc != nullptr) {
920 *desc = nullptr;
921 }
922 if (!desc) {
923 LOG(ERROR) << "ANeuralNetworksMemoryDesc_create passed a nullptr";
924 return ANEURALNETWORKS_UNEXPECTED_NULL;
925 }
926 auto mb = std::make_unique<MemoryBuilder>();
927 *desc = reinterpret_cast<ANeuralNetworksMemoryDesc*>(mb.release());
928 return ANEURALNETWORKS_NO_ERROR;
929 }
930
ANeuralNetworksMemoryDesc_free(ANeuralNetworksMemoryDesc * desc)931 void ANeuralNetworksMemoryDesc_free(ANeuralNetworksMemoryDesc* desc) {
932 NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksMemoryDesc_free");
933 // No validation. Free of nullptr is valid.
934 MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
935 delete mb;
936 }
937
ANeuralNetworksMemoryDesc_addInputRole(ANeuralNetworksMemoryDesc * desc,const ANeuralNetworksCompilation * compilation,uint32_t index,float frequency)938 int ANeuralNetworksMemoryDesc_addInputRole(ANeuralNetworksMemoryDesc* desc,
939 const ANeuralNetworksCompilation* compilation,
940 uint32_t index, float frequency) {
941 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_addInputRole");
942 if (!desc || !compilation) {
943 LOG(ERROR) << "ANeuralNetworksMemoryDesc_addInputRole passed a nullptr";
944 return ANEURALNETWORKS_UNEXPECTED_NULL;
945 }
946 MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
947 const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
948 return mb->addRole(*c, IOType::INPUT, index, frequency);
949 }
950
ANeuralNetworksMemoryDesc_addOutputRole(ANeuralNetworksMemoryDesc * desc,const ANeuralNetworksCompilation * compilation,uint32_t index,float frequency)951 int ANeuralNetworksMemoryDesc_addOutputRole(ANeuralNetworksMemoryDesc* desc,
952 const ANeuralNetworksCompilation* compilation,
953 uint32_t index, float frequency) {
954 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_addOutputRole");
955 if (!desc || !compilation) {
956 LOG(ERROR) << "ANeuralNetworksMemoryDesc_addOutputRole passed a nullptr";
957 return ANEURALNETWORKS_UNEXPECTED_NULL;
958 }
959 MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
960 const CompilationBuilder* c = reinterpret_cast<const CompilationBuilder*>(compilation);
961 return mb->addRole(*c, IOType::OUTPUT, index, frequency);
962 }
963
ANeuralNetworksMemoryDesc_setDimensions(ANeuralNetworksMemoryDesc * desc,uint32_t rank,const uint32_t * dimensions)964 int ANeuralNetworksMemoryDesc_setDimensions(ANeuralNetworksMemoryDesc* desc, uint32_t rank,
965 const uint32_t* dimensions) {
966 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_setDimensions");
967 if (!desc || (!dimensions && rank > 0)) {
968 LOG(ERROR) << "ANeuralNetworksMemoryDesc_setDimensions passed a nullptr";
969 return ANEURALNETWORKS_UNEXPECTED_NULL;
970 }
971 const std::vector<uint32_t> dims(dimensions, dimensions + rank);
972 MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
973 return mb->setDimensions(dims);
974 }
975
ANeuralNetworksMemoryDesc_finish(ANeuralNetworksMemoryDesc * desc)976 int ANeuralNetworksMemoryDesc_finish(ANeuralNetworksMemoryDesc* desc) {
977 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemoryDesc_finish");
978 if (!desc) {
979 LOG(ERROR) << "ANeuralNetworksMemoryDesc_finish passed a nullptr";
980 return ANEURALNETWORKS_UNEXPECTED_NULL;
981 }
982 MemoryBuilder* mb = reinterpret_cast<MemoryBuilder*>(desc);
983 return mb->finish();
984 }
985
ANeuralNetworksMemory_createFromDesc(const ANeuralNetworksMemoryDesc * desc,ANeuralNetworksMemory ** memory)986 int ANeuralNetworksMemory_createFromDesc(const ANeuralNetworksMemoryDesc* desc,
987 ANeuralNetworksMemory** memory) {
988 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksMemory_createFromDesc");
989 if (memory != nullptr) {
990 *memory = nullptr;
991 }
992 if (!desc || !memory) {
993 LOG(ERROR) << "ANeuralNetworksMemory_createFromDesc passed a nullptr";
994 return ANEURALNETWORKS_UNEXPECTED_NULL;
995 }
996 const MemoryBuilder* mb = reinterpret_cast<const MemoryBuilder*>(desc);
997 auto [n, m] = mb->allocate();
998 if (n != ANEURALNETWORKS_NO_ERROR) {
999 return n;
1000 }
1001 *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1002 return ANEURALNETWORKS_NO_ERROR;
1003 }
1004
ANeuralNetworksMemory_copy(const ANeuralNetworksMemory * src,const ANeuralNetworksMemory * dst)1005 int ANeuralNetworksMemory_copy(const ANeuralNetworksMemory* src, const ANeuralNetworksMemory* dst) {
1006 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksMemory_copy");
1007 if (!src || !dst) {
1008 LOG(ERROR) << "ANeuralNetworksMemory_copy passed a nullptr";
1009 return ANEURALNETWORKS_UNEXPECTED_NULL;
1010 }
1011 const RuntimeMemory* s = reinterpret_cast<const RuntimeMemory*>(src);
1012 const RuntimeMemory* d = reinterpret_cast<const RuntimeMemory*>(dst);
1013 return RuntimeMemory::copy(*s, *d);
1014 }
1015
ANeuralNetworksMemory_createFromFd(size_t size,int prot,int fd,size_t offset,ANeuralNetworksMemory ** memory)1016 int ANeuralNetworksMemory_createFromFd(size_t size, int prot, int fd, size_t offset,
1017 ANeuralNetworksMemory** memory) {
1018 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksMemory_createFromFd");
1019 if (memory != nullptr) {
1020 *memory = nullptr;
1021 }
1022 if (!memory) {
1023 LOG(ERROR) << "ANeuralNetworksMemory_createFromFd passed a nullptr";
1024 return ANEURALNETWORKS_UNEXPECTED_NULL;
1025 }
1026 int n = ANEURALNETWORKS_NO_ERROR;
1027 std::unique_ptr<MemoryFd> m;
1028 std::tie(n, m) = MemoryFd::create(size, prot, fd, offset);
1029 if (n != ANEURALNETWORKS_NO_ERROR) {
1030 return n;
1031 }
1032 *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1033 return ANEURALNETWORKS_NO_ERROR;
1034 }
1035
ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer * ahwb,ANeuralNetworksMemory ** memory)1036 int ANeuralNetworksMemory_createFromAHardwareBuffer(const AHardwareBuffer* ahwb,
1037 ANeuralNetworksMemory** memory) {
1038 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksMemory_createFromAHardwareBuffer");
1039 if (memory != nullptr) {
1040 *memory = nullptr;
1041 }
1042 if (!ahwb || !memory) {
1043 LOG(ERROR) << "ANeuralNetworksMemory_createFromAHardwareBuffer passed a nullptr";
1044 return ANEURALNETWORKS_UNEXPECTED_NULL;
1045 }
1046 int n = ANEURALNETWORKS_NO_ERROR;
1047 std::unique_ptr<MemoryAHWB> m;
1048 std::tie(n, m) = MemoryAHWB::create(*ahwb);
1049 if (n != ANEURALNETWORKS_NO_ERROR) {
1050 return n;
1051 }
1052 *memory = reinterpret_cast<ANeuralNetworksMemory*>(m.release());
1053 return ANEURALNETWORKS_NO_ERROR;
1054 }
1055
ANeuralNetworksMemory_free(ANeuralNetworksMemory * memory)1056 void ANeuralNetworksMemory_free(ANeuralNetworksMemory* memory) {
1057 NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksMemory_free");
1058 // No validation. Free of nullptr is valid.
1059 RuntimeMemory* m = reinterpret_cast<RuntimeMemory*>(memory);
1060 delete m;
1061 }
1062
ANeuralNetworksModel_create(ANeuralNetworksModel ** model)1063 int ANeuralNetworksModel_create(ANeuralNetworksModel** model) {
1064 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_create");
1065 initVLogMask();
1066 if (!model) {
1067 LOG(ERROR) << "ANeuralNetworksModel_create passed a nullptr";
1068 return ANEURALNETWORKS_UNEXPECTED_NULL;
1069 }
1070 FlatbufferModelBuilder* m = new (std::nothrow) FlatbufferModelBuilder();
1071 if (m == nullptr) {
1072 *model = nullptr;
1073 return ANEURALNETWORKS_OUT_OF_MEMORY;
1074 }
1075 *model = reinterpret_cast<ANeuralNetworksModel*>(m);
1076 return ANEURALNETWORKS_NO_ERROR;
1077 }
1078
ANeuralNetworksModel_free(ANeuralNetworksModel * model)1079 void ANeuralNetworksModel_free(ANeuralNetworksModel* model) {
1080 NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksModel_free");
1081 // No validation. Free of nullptr is valid.
1082 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1083 delete m;
1084 }
1085
ANeuralNetworksModel_finish(ANeuralNetworksModel * model)1086 int ANeuralNetworksModel_finish(ANeuralNetworksModel* model) {
1087 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_finish");
1088 if (!model) {
1089 LOG(ERROR) << "ANeuralNetworksModel_finish passed a nullptr";
1090 return ANEURALNETWORKS_UNEXPECTED_NULL;
1091 }
1092 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1093 return m->finish();
1094 }
1095
ANeuralNetworksModel_addOperand(ANeuralNetworksModel * model,const ANeuralNetworksOperandType * type)1096 int ANeuralNetworksModel_addOperand(ANeuralNetworksModel* model,
1097 const ANeuralNetworksOperandType* type) {
1098 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_addOperand");
1099 if (!model || !type) {
1100 LOG(ERROR) << "ANeuralNetworksModel_addOperand passed a nullptr";
1101 return ANEURALNETWORKS_UNEXPECTED_NULL;
1102 }
1103 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1104 return m->addOperand(*type);
1105 }
1106
ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel * model,int32_t index,const void * buffer,size_t length)1107 int ANeuralNetworksModel_setOperandValue(ANeuralNetworksModel* model, int32_t index,
1108 const void* buffer, size_t length) {
1109 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValue");
1110 if (!model || (!buffer && length != 0)) {
1111 LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
1112 return ANEURALNETWORKS_UNEXPECTED_NULL;
1113 }
1114 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1115 return m->setOperandValue(index, buffer, length);
1116 }
1117
ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksMemory * memory,size_t offset,size_t length)1118 int ANeuralNetworksModel_setOperandValueFromMemory(ANeuralNetworksModel* model, int32_t index,
1119 const ANeuralNetworksMemory* memory,
1120 size_t offset, size_t length) {
1121 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValueFromMemory");
1122 if (!model || !memory) {
1123 LOG(ERROR) << "ANeuralNetworksModel_setOperandValue passed a nullptr";
1124 return ANEURALNETWORKS_UNEXPECTED_NULL;
1125 }
1126 const RuntimeMemory* mem = reinterpret_cast<const RuntimeMemory*>(memory);
1127 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1128 return m->setOperandValueFromMemory(index, mem, offset, length);
1129 }
1130
ANeuralNetworksModel_setOperandValueFromModel(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksModel * value)1131 int ANeuralNetworksModel_setOperandValueFromModel(ANeuralNetworksModel* model, int32_t index,
1132 const ANeuralNetworksModel* value) {
1133 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandValueFromModel");
1134 if (!model || !value) {
1135 LOG(ERROR) << "ANeuralNetworksModel_setOperandValueFromModel passed a nullptr";
1136 return ANEURALNETWORKS_UNEXPECTED_NULL;
1137 }
1138 const FlatbufferModelBuilder* val = reinterpret_cast<const FlatbufferModelBuilder*>(value);
1139 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1140 return m->setOperandValueFromModel(index, val);
1141 }
1142
ANeuralNetworksModel_addOperation(ANeuralNetworksModel * model,ANeuralNetworksOperationType type,uint32_t inputCount,const uint32_t * inputs,uint32_t outputCount,const uint32_t * outputs)1143 int ANeuralNetworksModel_addOperation(ANeuralNetworksModel* model,
1144 ANeuralNetworksOperationType type, uint32_t inputCount,
1145 const uint32_t* inputs, uint32_t outputCount,
1146 const uint32_t* outputs) {
1147 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_addOperation");
1148 if (!model || !inputs || !outputs) {
1149 LOG(ERROR) << "ANeuralNetworksModel_addOperation passed a nullptr";
1150 return ANEURALNETWORKS_UNEXPECTED_NULL;
1151 }
1152 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1153 return m->addOperation(type, inputCount, inputs, outputCount, outputs);
1154 }
1155
ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(ANeuralNetworksModel * model,int32_t index,const ANeuralNetworksSymmPerChannelQuantParams * channelQuant)1156 int ANeuralNetworksModel_setOperandSymmPerChannelQuantParams(
1157 ANeuralNetworksModel* model, int32_t index,
1158 const ANeuralNetworksSymmPerChannelQuantParams* channelQuant) {
1159 NNTRACE_RT(NNTRACE_PHASE_PREPARATION,
1160 "ANeuralNetworksModel_setOperandSymmPerChannelQuantParams");
1161 if (!model || !channelQuant) {
1162 LOG(ERROR) << "ANeuralNetworksModel_setOperandSymmPerChannelQuantParams passed a nullptr";
1163 return ANEURALNETWORKS_UNEXPECTED_NULL;
1164 }
1165 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1166 return m->setOperandSymmPerChannelQuantParams(index, *channelQuant);
1167 }
1168
ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel * model,uint32_t inputCount,const uint32_t * inputs,uint32_t outputCount,const uint32_t * outputs)1169 int ANeuralNetworksModel_identifyInputsAndOutputs(ANeuralNetworksModel* model, uint32_t inputCount,
1170 const uint32_t* inputs, uint32_t outputCount,
1171 const uint32_t* outputs) {
1172 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_identifyInputsAndOutputs");
1173 if (!model || !inputs || !outputs) {
1174 LOG(ERROR) << ("ANeuralNetworksModel_identifyInputsAndOutputs passed a nullptr");
1175 return ANEURALNETWORKS_UNEXPECTED_NULL;
1176 }
1177 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1178 return m->identifyInputsAndOutputs(inputCount, inputs, outputCount, outputs);
1179 }
1180
ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel * model,bool allow)1181 int ANeuralNetworksModel_relaxComputationFloat32toFloat16(ANeuralNetworksModel* model, bool allow) {
1182 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_relaxComputationFloat32toFloat16");
1183 if (!model) {
1184 LOG(ERROR) << ("ANeuralNetworksModel_relaxComputationFloat32toFloat16 passed a nullptr");
1185 return ANEURALNETWORKS_UNEXPECTED_NULL;
1186 }
1187 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1188 return m->relaxComputationFloat32toFloat16(allow);
1189 }
1190
1191 struct CompilationContext {
1192 std::unique_ptr<tflite::FlatBufferModel> flatBufferModel;
1193 bool isFinished;
1194
CompilationContextCompilationContext1195 CompilationContext(std::unique_ptr<tflite::FlatBufferModel> flatBufferModel)
1196 : flatBufferModel(std::move(flatBufferModel)), isFinished(false) {}
1197 };
1198
ANeuralNetworksCompilation_create(ANeuralNetworksModel * model,ANeuralNetworksCompilation ** compilation)1199 int ANeuralNetworksCompilation_create(ANeuralNetworksModel* model,
1200 ANeuralNetworksCompilation** compilation) {
1201 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_create");
1202 if (!model || !compilation) {
1203 LOG(ERROR) << "ANeuralNetworksCompilation_create passed a nullptr";
1204 return ANEURALNETWORKS_UNEXPECTED_NULL;
1205 }
1206
1207 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1208
1209 auto tfliteModel = m->createTfliteModel();
1210 if (!tfliteModel.ok()) {
1211 LOG(ERROR) << "ANeuralNetworksCompilation_create error: " << tfliteModel.error();
1212 return ANEURALNETWORKS_OP_FAILED;
1213 }
1214
1215 std::unique_ptr<tflite::FlatBufferModel> flatBufferModel =
1216 tflite::FlatBufferModel::BuildFromModel(tfliteModel.value());
1217 if (!flatBufferModel) {
1218 LOG(ERROR) << "ANeuralNetworksCompilation_create error: tflite::BuildFromModel error";
1219 return ANEURALNETWORKS_OP_FAILED;
1220 }
1221
1222 std::unique_ptr<CompilationContext> context =
1223 std::make_unique<CompilationContext>(std::move(flatBufferModel));
1224 *compilation = reinterpret_cast<ANeuralNetworksCompilation*>(context.release());
1225 return ANEURALNETWORKS_NO_ERROR;
1226 }
1227
ANeuralNetworksCompilation_free(ANeuralNetworksCompilation * compilation)1228 void ANeuralNetworksCompilation_free(ANeuralNetworksCompilation* compilation) {
1229 NNTRACE_RT(NNTRACE_PHASE_TERMINATION, "ANeuralNetworksCompilation_free");
1230 // No validation. Free of nullptr is valid.
1231 auto c = reinterpret_cast<CompilationContext*>(compilation);
1232 delete c;
1233 }
1234
ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation *,int32_t)1235 int ANeuralNetworksCompilation_setPreference(ANeuralNetworksCompilation* /* compilation */,
1236 int32_t /* preference */) {
1237 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setPreference");
1238 // Not supported yet in NNAPI v2
1239 LOG(ERROR) << "ANeuralNetworksCompilation_setPreference unimplemented in Neural Networks V2";
1240 return ANEURALNETWORKS_OP_FAILED;
1241 }
1242
ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation *,const char *,const uint8_t *)1243 int ANeuralNetworksCompilation_setCaching(ANeuralNetworksCompilation* /* compilation */,
1244 const char* /* cacheDir */, const uint8_t* /* token */) {
1245 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setCaching");
1246 // Not supported yet in NNAPI v2
1247 LOG(ERROR) << "ANeuralNetworksCompilation_setCaching unimplemented in Neural Networks V2";
1248 return ANEURALNETWORKS_OP_FAILED;
1249 }
1250
ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation * compilation)1251 int ANeuralNetworksCompilation_finish(ANeuralNetworksCompilation* compilation) {
1252 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_finish");
1253 if (!compilation) {
1254 LOG(ERROR) << "ANeuralNetworksCompilation_finish passed a nullptr";
1255 return ANEURALNETWORKS_UNEXPECTED_NULL;
1256 }
1257
1258 auto context = reinterpret_cast<CompilationContext*>(compilation);
1259 if (context->isFinished) {
1260 LOG(ERROR) << "ANeuralNetworksCompilation_finish has already been called";
1261 return ANEURALNETWORKS_BAD_STATE;
1262 }
1263 context->isFinished = true;
1264
1265 return ANEURALNETWORKS_NO_ERROR;
1266 }
1267
ANeuralNetworksCompilation_setPriority(ANeuralNetworksCompilation *,int)1268 int ANeuralNetworksCompilation_setPriority(ANeuralNetworksCompilation* /* compilation */,
1269 int /* priority */) {
1270 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setPriority");
1271 // Not supported yet in NNAPI v2
1272 LOG(ERROR) << "ANeuralNetworksCompilation_setPriority unimplemented in Neural Networks V2";
1273 return ANEURALNETWORKS_OP_FAILED;
1274 }
1275
ANeuralNetworksCompilation_setTimeout(ANeuralNetworksCompilation *,uint64_t)1276 int ANeuralNetworksCompilation_setTimeout(ANeuralNetworksCompilation* /* compilation */,
1277 uint64_t /* duration */) {
1278 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_setTimeout");
1279 // Not supported yet in NNAPI v2
1280 LOG(ERROR) << "ANeuralNetworksCompilation_setTimeout unimplemented in Neural Networks V2";
1281 return ANEURALNETWORKS_OP_FAILED;
1282 }
1283
ANeuralNetworksExecution_create(ANeuralNetworksCompilation * compilation,ANeuralNetworksExecution ** execution)1284 int ANeuralNetworksExecution_create(ANeuralNetworksCompilation* compilation,
1285 ANeuralNetworksExecution** execution) {
1286 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_create");
1287 if (!compilation || !execution) {
1288 LOG(ERROR) << "ANeuralNetworksExecution_create passed a nullptr";
1289 return ANEURALNETWORKS_UNEXPECTED_NULL;
1290 }
1291 auto c = reinterpret_cast<CompilationContext*>(compilation);
1292
1293 tflite::ops::builtin::BuiltinOpResolver resolver;
1294 std::unique_ptr<tflite::Interpreter> interpreter;
1295 auto status = tflite::InterpreterBuilder(*c->flatBufferModel, resolver)(&interpreter);
1296 if (status != kTfLiteOk) {
1297 LOG(ERROR) << "ANeuralNetworksExecution_create error: interpreter build status " << status
1298 << " != " << kTfLiteOk;
1299 return ANEURALNETWORKS_OP_FAILED;
1300 }
1301
1302 std::unique_ptr<ExecutionContext> context =
1303 std::make_unique<ExecutionContext>(std::move(interpreter));
1304 *execution = reinterpret_cast<ANeuralNetworksExecution*>(context.release());
1305 return ANEURALNETWORKS_NO_ERROR;
1306 }
1307
ANeuralNetworksExecution_free(ANeuralNetworksExecution * execution)1308 void ANeuralNetworksExecution_free(ANeuralNetworksExecution* execution) {
1309 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_free");
1310 // Free of nullptr is valid.
1311 auto r = reinterpret_cast<ExecutionContext*>(execution);
1312 delete r;
1313 }
1314
ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution *,int32_t,uint32_t *)1315 int ANeuralNetworksExecution_getOutputOperandRank(ANeuralNetworksExecution* /* execution */,
1316 int32_t /* index */, uint32_t* /* rank */) {
1317 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getOutputOperandRank");
1318 // Not supported yet in NNAPI v2
1319 LOG(ERROR)
1320 << "ANeuralNetworksExecution_getOutputOperandRank unimplemented in Neural Networks V2";
1321 return ANEURALNETWORKS_OP_FAILED;
1322 }
1323
ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution *,int32_t,uint32_t *)1324 int ANeuralNetworksExecution_getOutputOperandDimensions(ANeuralNetworksExecution* /* execution */,
1325 int32_t /* index */,
1326 uint32_t* /* dimensions */) {
1327 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_getOutputOperandDimensions");
1328 // Not supported yet in NNAPI v2
1329 LOG(ERROR) << "ANeuralNetworksExecution_getOutputOperandDimensions unimplemented in Neural "
1330 "Networks V2";
1331 return ANEURALNETWORKS_OP_FAILED;
1332 }
1333
ANeuralNetworksExecution_setInput(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,const void * buffer,size_t length)1334 int ANeuralNetworksExecution_setInput(ANeuralNetworksExecution* execution, int32_t index,
1335 const ANeuralNetworksOperandType* type, const void* buffer,
1336 size_t length) {
1337 NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setInput");
1338 // We do not support dynamic shapes
1339 if (type != nullptr) {
1340 LOG(ERROR) << "ANeuralNetworksExecution_setInput expected a nullptr for "
1341 "ANeuralNetworksOperandType* argument";
1342 return ANEURALNETWORKS_BAD_DATA;
1343 }
1344 if (!execution || (!buffer && length != 0)) {
1345 LOG(ERROR) << "ANeuralNetworksExecution_setInput passed a nullptr";
1346 return ANEURALNETWORKS_UNEXPECTED_NULL;
1347 }
1348 auto context = reinterpret_cast<ExecutionContext*>(execution);
1349 if (index < 0 || index >= static_cast<int32_t>(context->interpreter->inputs().size())) {
1350 LOG(ERROR) << "ANeuralNetworksExecution_setInput index out of bounds";
1351 return ANEURALNETWORKS_BAD_DATA;
1352 }
1353
1354 if (context->interpreter->input_tensor(index)->bytes != length) {
1355 LOG(ERROR)
1356 << "ANeuralNetworksExecution_setInput input bytes is different from buffer length";
1357 return ANEURALNETWORKS_BAD_DATA;
1358 }
1359 context->inputs[index] = buffer;
1360 context->inputSizes[index] = length;
1361 return ANEURALNETWORKS_NO_ERROR;
1362 }
1363
ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution *,int32_t,const ANeuralNetworksOperandType *,const ANeuralNetworksMemory *,size_t,size_t)1364 int ANeuralNetworksExecution_setInputFromMemory(ANeuralNetworksExecution* /* execution */,
1365 int32_t /* index */,
1366 const ANeuralNetworksOperandType* /* type */,
1367 const ANeuralNetworksMemory* /* memory */,
1368 size_t /* offset */, size_t /* length */) {
1369 NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setInputFromMemory");
1370 // Not supported yet in NNAPI v2
1371 LOG(ERROR) << "ANeuralNetworksExecution_setInputFromMemory unimplemented in Neural Networks V2";
1372 return ANEURALNETWORKS_OP_FAILED;
1373 }
1374
ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution * execution,int32_t index,const ANeuralNetworksOperandType * type,void * buffer,size_t length)1375 int ANeuralNetworksExecution_setOutput(ANeuralNetworksExecution* execution, int32_t index,
1376 const ANeuralNetworksOperandType* type, void* buffer,
1377 size_t length) {
1378 NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setOutput");
1379 // We do not support dynamic shapes
1380 if (type != nullptr) {
1381 LOG(ERROR) << "ANeuralNetworksExecution_setOutput expected a nullptr for "
1382 "ANeuralNetworksOperandType* argument";
1383 return ANEURALNETWORKS_BAD_DATA;
1384 }
1385
1386 if (!execution || (!buffer && length != 0)) {
1387 LOG(ERROR) << "ANeuralNetworksExecution_setOutput passed a nullptr ";
1388 return ANEURALNETWORKS_UNEXPECTED_NULL;
1389 }
1390
1391 auto context = reinterpret_cast<ExecutionContext*>(execution);
1392 if (index < 0 || index >= static_cast<int32_t>(context->interpreter->outputs().size())) {
1393 LOG(ERROR) << "ANeuralNetworksExecution_setOutput index out of bounds";
1394 return ANEURALNETWORKS_BAD_DATA;
1395 }
1396
1397 const size_t bufferSize = std::max<size_t>(length, 1);
1398 if (bufferSize != context->interpreter->output_tensor(index)->bytes) {
1399 LOG(ERROR) << "ANeuralNetworksExecution_setOutput length is not equal to the output tensor "
1400 "size";
1401 return ANEURALNETWORKS_BAD_DATA;
1402 }
1403
1404 const intptr_t dataPtrValue = reinterpret_cast<intptr_t>(buffer);
1405 if (dataPtrValue % tflite::kDefaultTensorAlignment != 0) {
1406 context->outputs[index] = buffer;
1407 context->outputSizes[index] = bufferSize;
1408 } else {
1409 TfLiteCustomAllocation allocation = {.data = buffer, .bytes = bufferSize};
1410 context->interpreter->SetCustomAllocationForTensor(context->interpreter->outputs()[index],
1411 allocation,
1412 kTfLiteCustomAllocationFlagsNone);
1413 }
1414
1415 context->isOutputSpecifiedAtIndex[index] = true;
1416 return ANEURALNETWORKS_NO_ERROR;
1417 }
1418
ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution *,int32_t,const ANeuralNetworksOperandType *,const ANeuralNetworksMemory *,size_t,size_t)1419 int ANeuralNetworksExecution_setOutputFromMemory(ANeuralNetworksExecution* /* execution */,
1420 int32_t /* index */,
1421 const ANeuralNetworksOperandType* /* type */,
1422 const ANeuralNetworksMemory* /* memory */,
1423 size_t /* offset */, size_t /* length */) {
1424 NNTRACE_RT(NNTRACE_PHASE_INPUTS_AND_OUTPUTS, "ANeuralNetworksExecution_setOutputFromMemory");
1425 // Not supported yet in NNAPI v2
1426 LOG(ERROR)
1427 << "ANeuralNetworksExecution_setOutputFromMemory unimplemented in Neural Networks V2";
1428 return ANEURALNETWORKS_OP_FAILED;
1429 }
1430
ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution *,ANeuralNetworksEvent **)1431 int ANeuralNetworksExecution_startCompute(ANeuralNetworksExecution* /* execution */,
1432 ANeuralNetworksEvent** /* event */) {
1433 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_startCompute");
1434 // Not supported yet in NNAPI v2
1435 LOG(ERROR) << "ANeuralNetworksExecution_startCompute unimplemented in Neural Networks V2";
1436 return ANEURALNETWORKS_OP_FAILED;
1437 }
1438
ANeuralNetworksExecution_setTimeout(ANeuralNetworksExecution *,uint64_t)1439 int ANeuralNetworksExecution_setTimeout(ANeuralNetworksExecution* /* execution */,
1440 uint64_t /* duration */) {
1441 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setTimeout");
1442 // Not supported yet in NNAPI v2
1443 LOG(ERROR) << "ANeuralNetworksExecution_setTimeout unimplemented in Neural Networks V2";
1444 return ANEURALNETWORKS_OP_FAILED;
1445 }
1446
ANeuralNetworksEvent_wait(ANeuralNetworksEvent * event)1447 int ANeuralNetworksEvent_wait(ANeuralNetworksEvent* event) {
1448 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksEvent_wait");
1449 if (event == nullptr) {
1450 LOG(ERROR) << "ANeuralNetworksEvent_wait passed a nullptr";
1451 return ANEURALNETWORKS_UNEXPECTED_NULL;
1452 }
1453
1454 IEvent* e = reinterpret_cast<IEvent*>(event);
1455 return convertErrorStatusToResultCode(e->wait());
1456 }
1457
ANeuralNetworksEvent_free(ANeuralNetworksEvent * event)1458 void ANeuralNetworksEvent_free(ANeuralNetworksEvent* event) {
1459 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksEvent_free");
1460 // No validation. Free of nullptr is valid.
1461 if (event) {
1462 IEvent* e = reinterpret_cast<IEvent*>(event);
1463 e->wait();
1464 delete e;
1465 }
1466 }
1467
ANeuralNetworksExecution_setLoopTimeout(ANeuralNetworksExecution *,uint64_t)1468 int ANeuralNetworksExecution_setLoopTimeout(ANeuralNetworksExecution* /* execution */,
1469 uint64_t /* duration */) {
1470 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setLoopTimeout");
1471 // Not supported yet in NNAPI v2
1472 LOG(ERROR) << "ANeuralNetworksExecution_setLoopTimeout unimplemented in Neural Networks V2";
1473 return ANEURALNETWORKS_OP_FAILED;
1474 }
1475
ANeuralNetworks_getDefaultLoopTimeout()1476 uint64_t ANeuralNetworks_getDefaultLoopTimeout() {
1477 return operation_while::kTimeoutNsDefault;
1478 }
1479
ANeuralNetworks_getMaximumLoopTimeout()1480 uint64_t ANeuralNetworks_getMaximumLoopTimeout() {
1481 return operation_while::kTimeoutNsMaximum;
1482 }
1483
ANeuralNetworksDevice_getExtensionSupport(const ANeuralNetworksDevice * device,const char * extensionName,bool * isExtensionSupported)1484 int ANeuralNetworksDevice_getExtensionSupport(const ANeuralNetworksDevice* device,
1485 const char* extensionName,
1486 bool* isExtensionSupported) {
1487 if (device == nullptr || extensionName == nullptr || isExtensionSupported == nullptr) {
1488 LOG(ERROR) << "ANeuralNetworksDevice_getExtensionSupport passed a nullptr";
1489 return ANEURALNETWORKS_UNEXPECTED_NULL;
1490 }
1491
1492 const Device* d = reinterpret_cast<const Device*>(device);
1493 const auto& supportedExtensions = d->getSupportedExtensions();
1494 *isExtensionSupported = std::any_of(supportedExtensions.begin(), supportedExtensions.end(),
1495 [extensionName](const auto& supportedExtension) {
1496 return supportedExtension.name == extensionName;
1497 });
1498
1499 return ANEURALNETWORKS_NO_ERROR;
1500 }
1501
ANeuralNetworksModel_getExtensionOperandType(ANeuralNetworksModel * model,const char * extensionName,uint16_t operandCodeWithinExtension,int32_t * type)1502 int ANeuralNetworksModel_getExtensionOperandType(ANeuralNetworksModel* model,
1503 const char* extensionName,
1504 uint16_t operandCodeWithinExtension,
1505 int32_t* type) {
1506 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_getExtensionOperandType");
1507 if (!model || !extensionName || !type) {
1508 LOG(ERROR) << "ANeuralNetworksModel_getExtensionOperandType passed a nullptr";
1509 return ANEURALNETWORKS_UNEXPECTED_NULL;
1510 }
1511 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1512 return m->getExtensionType(extensionName, operandCodeWithinExtension, type);
1513 }
1514
ANeuralNetworksModel_getExtensionOperationType(ANeuralNetworksModel * model,const char * extensionName,uint16_t operationCodeWithinExtension,ANeuralNetworksOperationType * type)1515 int ANeuralNetworksModel_getExtensionOperationType(ANeuralNetworksModel* model,
1516 const char* extensionName,
1517 uint16_t operationCodeWithinExtension,
1518 ANeuralNetworksOperationType* type) {
1519 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_getExtensionOperationType");
1520 if (!model || !extensionName || !type) {
1521 LOG(ERROR) << "ANeuralNetworksModel_getExtensionOperationType passed a nullptr";
1522 return ANEURALNETWORKS_UNEXPECTED_NULL;
1523 }
1524 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1525 return m->getExtensionType(extensionName, operationCodeWithinExtension, type);
1526 }
1527
ANeuralNetworksModel_setOperandExtensionData(ANeuralNetworksModel * model,int32_t index,const void * data,size_t length)1528 int ANeuralNetworksModel_setOperandExtensionData(ANeuralNetworksModel* model, int32_t index,
1529 const void* data, size_t length) {
1530 NNTRACE_RT(NNTRACE_PHASE_PREPARATION, "ANeuralNetworksModel_setOperandExtensionData");
1531 if (!model || (!data && length != 0)) {
1532 LOG(ERROR) << "ANeuralNetworksModel_setOperandExtensionData passed a nullptr";
1533 return ANEURALNETWORKS_UNEXPECTED_NULL;
1534 }
1535 FlatbufferModelBuilder* m = reinterpret_cast<FlatbufferModelBuilder*>(model);
1536 return m->setOperandExtensionData(index, data, length);
1537 }
1538
ANeuralNetworksCompilation_addExtensionAttribute(ANeuralNetworksCompilation *,const char *,uint16_t,const void *,size_t)1539 int ANeuralNetworksCompilation_addExtensionAttribute(ANeuralNetworksCompilation* /* compilation */,
1540 const char* /* extensionName */,
1541 uint16_t /* attributeCodeWithinExtension */,
1542 const void* /* data */, size_t /* length */) {
1543 NNTRACE_RT(NNTRACE_PHASE_COMPILATION, "ANeuralNetworksCompilation_addExtensionAttribute");
1544 // Not supported yet in NNAPI v2
1545 LOG(ERROR) << "ANeuralNetworksCompilation_addExtensionAttribute unimplemented in Neural "
1546 "Networks V2";
1547 return ANEURALNETWORKS_OP_FAILED;
1548 }
1549
ANeuralNetworksExecution_addExtensionAttribute(ANeuralNetworksExecution *,const char *,uint16_t,const void *,size_t)1550 int ANeuralNetworksExecution_addExtensionAttribute(ANeuralNetworksExecution* /* execution */,
1551 const char* /* extensionName */,
1552 uint16_t /* attributeCodeWithinExtension */,
1553 const void* /* data */, size_t /* length */) {
1554 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_addExtensionAttribute");
1555 // Not supported yet in NNAPI v2
1556 LOG(ERROR)
1557 << "ANeuralNetworksExecution_addExtensionAttribute unimplemented in Neural Networks V2";
1558 return ANEURALNETWORKS_OP_FAILED;
1559 }
1560
ANeuralNetworksEvent_createFromSyncFenceFd(int syncFenceFd,ANeuralNetworksEvent ** event)1561 int ANeuralNetworksEvent_createFromSyncFenceFd(int syncFenceFd, ANeuralNetworksEvent** event) {
1562 if (event == nullptr) {
1563 LOG(ERROR) << "ANeuralNetworksEvent_createFromSyncFenceFd passed a nullptr";
1564 return ANEURALNETWORKS_UNEXPECTED_NULL;
1565 }
1566 if (syncFenceFd <= 0) {
1567 LOG(ERROR) << "ANeuralNetworksEvent_createFromSyncFenceFd passed an invalid fd: "
1568 << syncFenceFd;
1569 *event = nullptr;
1570 return ANEURALNETWORKS_BAD_DATA;
1571 }
1572 std::unique_ptr<SyncFenceEvent> e =
1573 std::make_unique<SyncFenceEvent>(syncFenceFd, nullptr, nullptr);
1574 *event = reinterpret_cast<ANeuralNetworksEvent*>(e.release());
1575 return ANEURALNETWORKS_NO_ERROR;
1576 }
1577
ANeuralNetworksEvent_getSyncFenceFd(const ANeuralNetworksEvent * event,int * syncFenceFd)1578 int ANeuralNetworksEvent_getSyncFenceFd(const ANeuralNetworksEvent* event, int* syncFenceFd) {
1579 if (syncFenceFd == nullptr) {
1580 LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd passed a nullptr";
1581 return ANEURALNETWORKS_UNEXPECTED_NULL;
1582 }
1583 *syncFenceFd = -1;
1584 if (event == nullptr) {
1585 LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd passed a nullptr";
1586 return ANEURALNETWORKS_UNEXPECTED_NULL;
1587 }
1588 const IEvent* e = reinterpret_cast<const IEvent*>(event);
1589 // The client owns the dupped fd, and is responsible for closing it.
1590 *syncFenceFd = e->getSyncFenceFd(/*shouldDup*/ true);
1591 if (*syncFenceFd <= 0) {
1592 LOG(ERROR) << "ANeuralNetworksEvent_getSyncFenceFd unable to get valid sync_fence fd";
1593 *syncFenceFd = -1;
1594 return ANEURALNETWORKS_BAD_DATA;
1595 }
1596 return ANEURALNETWORKS_NO_ERROR;
1597 }
1598
ANeuralNetworksExecution_startComputeWithDependencies(ANeuralNetworksExecution *,const ANeuralNetworksEvent * const *,uint32_t,uint64_t,ANeuralNetworksEvent **)1599 int ANeuralNetworksExecution_startComputeWithDependencies(
1600 ANeuralNetworksExecution* /* execution */,
1601 const ANeuralNetworksEvent* const* /* dependencies */, uint32_t /* numOfDependencies */,
1602 uint64_t /* duration */, ANeuralNetworksEvent** /* event */) {
1603 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_startComputeWithDependencies");
1604 // Not supported yet in NNAPI v2
1605 LOG(ERROR) << "ANeuralNetworksExecution_startComputeWithDependencies unimplemented in Neural "
1606 "Networks V2";
1607 return ANEURALNETWORKS_OP_FAILED;
1608 }
1609
ANeuralNetworks_getRuntimeFeatureLevel()1610 int64_t ANeuralNetworks_getRuntimeFeatureLevel() {
1611 return getRuntimeFeatureLevelImpl();
1612 }
1613
ANeuralNetworksExecution_enableInputAndOutputPadding(ANeuralNetworksExecution *,bool)1614 int ANeuralNetworksExecution_enableInputAndOutputPadding(ANeuralNetworksExecution* /* execution */,
1615 bool /* enable */) {
1616 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_enableInputAndOutputPadding");
1617 // Not supported yet in NNAPI v2
1618 LOG(ERROR) << "ANeuralNetworksExecution_enableInputAndOutputPadding unimplemented in Neural "
1619 "Networks V2";
1620 return ANEURALNETWORKS_OP_FAILED;
1621 }
1622
ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(const ANeuralNetworksCompilation *,uint32_t,uint32_t *)1623 int ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput(
1624 const ANeuralNetworksCompilation* /* compilation */, uint32_t /* index */,
1625 uint32_t* /* alignment */) {
1626 NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1627 "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput");
1628 // Not supported yet in NNAPI v2
1629 LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput unimplemented in "
1630 "Neural Networks V2";
1631 return ANEURALNETWORKS_OP_FAILED;
1632 }
1633
ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(const ANeuralNetworksCompilation *,uint32_t,uint32_t *)1634 int ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput(
1635 const ANeuralNetworksCompilation* /* compilation */, uint32_t /* index */,
1636 uint32_t* /* padding */) {
1637 NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1638 "ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput");
1639 // Not supported yet in NNAPI v2
1640 LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput unimplemented in "
1641 "Neural Networks V2";
1642 return ANEURALNETWORKS_OP_FAILED;
1643 }
1644
ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(const ANeuralNetworksCompilation *,uint32_t,uint32_t *)1645 int ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput(
1646 const ANeuralNetworksCompilation* /* compilation */, uint32_t /* index */,
1647 uint32_t* /* alignment */) {
1648 NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1649 "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput");
1650 // Not supported yet in NNAPI v2
1651 LOG(ERROR)
1652 << "ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput unimplemented in "
1653 "Neural Networks V2";
1654 return ANEURALNETWORKS_OP_FAILED;
1655 }
1656
ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(const ANeuralNetworksCompilation *,uint32_t,uint32_t *)1657 int ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput(
1658 const ANeuralNetworksCompilation* /* compilation */, uint32_t /* index */,
1659 uint32_t* /* padding */) {
1660 NNTRACE_RT(NNTRACE_PHASE_COMPILATION,
1661 "ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput");
1662 // Not supported yet in NNAPI v2
1663 LOG(ERROR) << "ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput unimplemented in "
1664 "Neural Networks V2";
1665 return ANEURALNETWORKS_OP_FAILED;
1666 }
1667
ANeuralNetworksExecution_setReusable(ANeuralNetworksExecution *,bool)1668 int ANeuralNetworksExecution_setReusable(ANeuralNetworksExecution* /* execution */,
1669 bool /* reusable */) {
1670 NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "ANeuralNetworksExecution_setReusable");
1671 // Not supported yet in NNAPI v2
1672 LOG(ERROR) << "ANeuralNetworksExecution_setReusable unimplemented in Neural Networks V2";
1673 return ANEURALNETWORKS_OP_FAILED;
1674 }
1675