2 * Copyright (C) 2017 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #define LOG_TAG "neuralnetworks_hidl_hal_test"
19 #include "VtsHalNeuralnetworksV1_0TargetTest.h"
22 #include "TestHarness.h"
24 #include <android-base/logging.h>
25 #include <android/hidl/memory/1.0/IMemory.h>
26 #include <hidlmemory/mapping.h>
30 namespace neuralnetworks {
33 namespace functional {
35 using ::android::hardware::neuralnetworks::V1_0::implementation::Event;
36 using ::generated_tests::MixedTypedExampleType;
37 namespace generated_tests {
38 extern void Execute(const sp<IDevice>&, std::function<Model(void)>, std::function<bool(int)>,
39 const std::vector<MixedTypedExampleType>&);
42 // A class for test environment setup
43 NeuralnetworksHidlEnvironment::NeuralnetworksHidlEnvironment() {}
45 NeuralnetworksHidlEnvironment::~NeuralnetworksHidlEnvironment() {}
47 NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
48 // This has to return a "new" object because it is freed inside
49 // ::testing::AddGlobalTestEnvironment when the gtest is being torn down
50 static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
54 void NeuralnetworksHidlEnvironment::registerTestServices() {
55 registerTestService<IDevice>();
58 // The main test class for NEURALNETWORK HIDL HAL.
59 NeuralnetworksHidlTest::~NeuralnetworksHidlTest() {}
61 void NeuralnetworksHidlTest::SetUp() {
62 device = ::testing::VtsHalHidlTargetTestBase::getService<IDevice>(
63 NeuralnetworksHidlEnvironment::getInstance());
64 ASSERT_NE(nullptr, device.get());
67 void NeuralnetworksHidlTest::TearDown() {}
69 sp<IPreparedModel> NeuralnetworksHidlTest::doPrepareModelShortcut(const Model& model) {
70 sp<IPreparedModel> preparedModel;
71 ErrorStatus prepareStatus;
72 sp<Event> preparationEvent = new Event();
73 if (preparationEvent.get() == nullptr) {
77 Return<void> prepareRet = device->prepareModel(
78 model, preparationEvent, [&](ErrorStatus status, const sp<IPreparedModel>& prepared) {
79 prepareStatus = status;
80 preparedModel = prepared;
83 if (!prepareRet.isOk() || prepareStatus != ErrorStatus::NONE ||
84 preparedModel.get() == nullptr) {
87 Event::Status eventStatus = preparationEvent->wait();
88 if (eventStatus != Event::Status::SUCCESS) {
96 TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
99 TEST_F(NeuralnetworksHidlTest, StatusTest) {
100 Return<DeviceStatus> status = device->getStatus();
101 ASSERT_TRUE(status.isOk());
102 EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
106 TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
108 device->getCapabilities([](ErrorStatus status, const Capabilities& capabilities) {
109 EXPECT_EQ(ErrorStatus::NONE, status);
110 EXPECT_NE(nullptr, capabilities.supportedOperationTuples.data());
111 EXPECT_NE(0ull, capabilities.supportedOperationTuples.size());
112 EXPECT_EQ(0u, static_cast<uint32_t>(capabilities.cachesCompilation) & ~0x1);
113 EXPECT_LT(0.0f, capabilities.float32Performance.execTime);
114 EXPECT_LT(0.0f, capabilities.float32Performance.powerUsage);
115 EXPECT_LT(0.0f, capabilities.quantized8Performance.execTime);
116 EXPECT_LT(0.0f, capabilities.quantized8Performance.powerUsage);
118 EXPECT_TRUE(ret.isOk());
121 // supported operations positive test
122 TEST_F(NeuralnetworksHidlTest, SupportedOperationsPositiveTest) {
123 Model model = createValidTestModel();
124 Return<void> ret = device->getSupportedOperations(
125 model, [&](ErrorStatus status, const hidl_vec<bool>& supported) {
126 EXPECT_EQ(ErrorStatus::NONE, status);
127 EXPECT_EQ(model.operations.size(), supported.size());
129 EXPECT_TRUE(ret.isOk());
132 // supported operations negative test 1
133 TEST_F(NeuralnetworksHidlTest, SupportedOperationsNegativeTest1) {
134 Model model = createInvalidTestModel1();
135 Return<void> ret = device->getSupportedOperations(
136 model, [&](ErrorStatus status, const hidl_vec<bool>& supported) {
137 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, status);
140 EXPECT_TRUE(ret.isOk());
143 // supported operations negative test 2
144 TEST_F(NeuralnetworksHidlTest, SupportedOperationsNegativeTest2) {
145 Model model = createInvalidTestModel2();
146 Return<void> ret = device->getSupportedOperations(
147 model, [&](ErrorStatus status, const hidl_vec<bool>& supported) {
148 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, status);
151 EXPECT_TRUE(ret.isOk());
154 // prepare simple model positive test
155 TEST_F(NeuralnetworksHidlTest, SimplePrepareModelPositiveTest) {
156 Model model = createValidTestModel();
157 sp<Event> preparationEvent = new Event();
158 ASSERT_NE(nullptr, preparationEvent.get());
159 Return<void> prepareRet = device->prepareModel(
160 model, preparationEvent, [&](ErrorStatus status, const sp<IPreparedModel>& prepared) {
161 EXPECT_EQ(ErrorStatus::NONE, status);
164 ASSERT_TRUE(prepareRet.isOk());
167 // prepare simple model negative test 1
168 TEST_F(NeuralnetworksHidlTest, SimplePrepareModelNegativeTest1) {
169 Model model = createInvalidTestModel1();
170 sp<Event> preparationEvent = new Event();
171 ASSERT_NE(nullptr, preparationEvent.get());
172 Return<void> prepareRet = device->prepareModel(
173 model, preparationEvent, [&](ErrorStatus status, const sp<IPreparedModel>& prepared) {
174 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, status);
177 ASSERT_TRUE(prepareRet.isOk());
180 // prepare simple model negative test 2
181 TEST_F(NeuralnetworksHidlTest, SimplePrepareModelNegativeTest2) {
182 Model model = createInvalidTestModel2();
183 sp<Event> preparationEvent = new Event();
184 ASSERT_NE(nullptr, preparationEvent.get());
185 Return<void> prepareRet = device->prepareModel(
186 model, preparationEvent, [&](ErrorStatus status, const sp<IPreparedModel>& prepared) {
187 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, status);
190 ASSERT_TRUE(prepareRet.isOk());
193 // execute simple graph positive test
194 TEST_F(NeuralnetworksHidlTest, SimpleExecuteGraphPositiveTest) {
195 Model model = createValidTestModel();
196 sp<IPreparedModel> preparedModel = doPrepareModelShortcut(model);
197 ASSERT_NE(nullptr, preparedModel.get());
198 Request request = createValidTestRequest();
200 sp<Event> executionEvent = new Event();
201 ASSERT_NE(nullptr, executionEvent.get());
202 Return<ErrorStatus> executeStatus = preparedModel->execute(request, executionEvent);
203 ASSERT_TRUE(executeStatus.isOk());
204 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executeStatus));
205 Event::Status eventStatus = executionEvent->wait();
206 EXPECT_EQ(Event::Status::SUCCESS, eventStatus);
208 std::vector<float> outputData = {-1.0f, -1.0f, -1.0f, -1.0f};
209 std::vector<float> expectedData = {6.0f, 8.0f, 10.0f, 12.0f};
210 const uint32_t OUTPUT = 1;
212 sp<IMemory> outputMemory = mapMemory(request.pools[OUTPUT]);
213 ASSERT_NE(nullptr, outputMemory.get());
214 float* outputPtr = reinterpret_cast<float*>(static_cast<void*>(outputMemory->getPointer()));
215 ASSERT_NE(nullptr, outputPtr);
216 outputMemory->read();
217 std::copy(outputPtr, outputPtr + outputData.size(), outputData.begin());
218 outputMemory->commit();
219 EXPECT_EQ(expectedData, outputData);
222 // execute simple graph negative test 1
223 TEST_F(NeuralnetworksHidlTest, SimpleExecuteGraphNegativeTest1) {
224 Model model = createValidTestModel();
225 sp<IPreparedModel> preparedModel = doPrepareModelShortcut(model);
226 ASSERT_NE(nullptr, preparedModel.get());
227 Request request = createInvalidTestRequest1();
229 sp<Event> executionEvent = new Event();
230 ASSERT_NE(nullptr, executionEvent.get());
231 Return<ErrorStatus> executeStatus = preparedModel->execute(request, executionEvent);
232 ASSERT_TRUE(executeStatus.isOk());
233 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(executeStatus));
234 executionEvent->wait();
237 // execute simple graph negative test 2
238 TEST_F(NeuralnetworksHidlTest, SimpleExecuteGraphNegativeTest2) {
239 Model model = createValidTestModel();
240 sp<IPreparedModel> preparedModel = doPrepareModelShortcut(model);
241 ASSERT_NE(nullptr, preparedModel.get());
242 Request request = createInvalidTestRequest2();
244 sp<Event> executionEvent = new Event();
245 ASSERT_NE(nullptr, executionEvent.get());
246 Return<ErrorStatus> executeStatus = preparedModel->execute(request, executionEvent);
247 ASSERT_TRUE(executeStatus.isOk());
248 EXPECT_EQ(ErrorStatus::INVALID_ARGUMENT, static_cast<ErrorStatus>(executeStatus));
249 executionEvent->wait();
252 // Mixed-typed examples
253 typedef MixedTypedExampleType MixedTypedExample;
255 // in frameworks/ml/nn/runtime/tests/generated/
256 #include "all_generated_vts_tests.cpp"
258 // TODO: Add tests for execution failure, or wait_for/wait_until timeout.
260 // https://googleplex-android-review.git.corp.google.com/#/c/platform/hardware/interfaces/+/2654636/5/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworksV1_0TargetTest.cpp@222
262 } // namespace functional
265 } // namespace neuralnetworks
266 } // namespace hardware
267 } // namespace android
269 using android::hardware::neuralnetworks::V1_0::vts::functional::NeuralnetworksHidlEnvironment;
271 int main(int argc, char** argv) {
272 ::testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
273 ::testing::InitGoogleTest(&argc, argv);
274 NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
276 int status = RUN_ALL_TESTS();