1 /*
2  * Copyright (C) 2021 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ANDROID_HARDWARE_NEURALNETWORKS_AIDL_VTS_HAL_NEURALNETWORKS_H
18 #define ANDROID_HARDWARE_NEURALNETWORKS_AIDL_VTS_HAL_NEURALNETWORKS_H
19 
20 #include <gtest/gtest.h>
21 #include <vector>
22 
23 #include <aidl/android/hardware/neuralnetworks/IDevice.h>
24 
25 #include "Callbacks.h"
26 #include "Utils.h"
27 
28 namespace aidl::android::hardware::neuralnetworks::vts::functional {
29 
30 using NamedDevice = Named<std::shared_ptr<IDevice>>;
31 using NeuralNetworksAidlTestParam = NamedDevice;
32 
33 constexpr int kMinAidlLevelForFL8 = 4;
34 
35 class NeuralNetworksAidlTest : public testing::TestWithParam<NeuralNetworksAidlTestParam> {
36   protected:
37     void SetUp() override;
38     const std::shared_ptr<IDevice> kDevice = getData(GetParam());
39 };
40 
41 const std::vector<NamedDevice>& getNamedDevices();
42 
43 std::string printNeuralNetworksAidlTest(
44         const testing::TestParamInfo<NeuralNetworksAidlTestParam>& info);
45 
46 #define INSTANTIATE_DEVICE_TEST(TestSuite)                                                 \
47     GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TestSuite);                              \
48     INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
49                              printNeuralNetworksAidlTest)
50 
51 // Create an IPreparedModel object. If the model cannot be prepared,
52 // "preparedModel" will be nullptr instead.
53 void createPreparedModel(const std::shared_ptr<IDevice>& device, const Model& model,
54                          std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping = true,
55                          bool useConfig = false);
56 
57 enum class Executor { SYNC, BURST, FENCED };
58 
59 std::string toString(Executor executor);
60 
61 }  // namespace aidl::android::hardware::neuralnetworks::vts::functional
62 
63 #endif  // ANDROID_HARDWARE_NEURALNETWORKS_AIDL_VTS_HAL_NEURALNETWORKS_H
64