1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <sys/types.h>
17 #include <sys/stat.h>
18 #include <fcntl.h>
19
20 #include <gtest/gtest.h>
21 #include <gmock/gmock.h>
22
23 #include "common/log.h"
24 #include "hdi_prepared_model_v2_1.h"
25 #include "memory_manager.h"
26 #include "transform.h"
27 #include "test/unittest/common/v2_1/mock_idevice.h"
28 #include "test/unittest/common/file_utils.h"
29 #include "tensor.h"
30 #include "nntensor.h"
31
32 using namespace testing;
33 using namespace testing::ext;
34 using namespace OHOS::NeuralNetworkRuntime;
35 namespace OHOS {
36 namespace NeuralNetworkRuntime {
37 namespace UnitTest {
38 class HDIPreparedModelTest : public testing::Test {
39 protected:
40 void GetBuffer(void*& buffer, size_t length);
41 void InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length);
42 OH_NN_ReturnCode Run(std::vector<IOTensor>& inputs);
43 OH_NN_ReturnCode RunFail(std::vector<IOTensor>& inputs);
44 };
45
46 class MockTensor : public Tensor {
47 public:
48 MOCK_METHOD1(SetTensorDesc, OH_NN_ReturnCode(const TensorDesc*));
49 MOCK_METHOD0(CreateData, OH_NN_ReturnCode());
50 MOCK_METHOD1(CreateData, OH_NN_ReturnCode(size_t));
51 MOCK_METHOD3(CreateData, OH_NN_ReturnCode(int, size_t, size_t));
52 MOCK_CONST_METHOD0(GetTensorDesc, TensorDesc*());
53 MOCK_CONST_METHOD0(GetData, void*());
54 MOCK_CONST_METHOD0(GetFd, int());
55 MOCK_CONST_METHOD0(GetSize, size_t());
56 MOCK_CONST_METHOD0(GetOffset, size_t());
57 MOCK_CONST_METHOD0(GetBackendID, size_t());
58 };
59
GetBuffer(void * & buffer,size_t length)60 void HDIPreparedModelTest::GetBuffer(void*& buffer, size_t length)
61 {
62 std::string data = "ABCD";
63 const size_t dataLength = 100;
64 data.resize(dataLength, '-');
65
66 std::string filename = "/data/log/memory-001.dat";
67 FileUtils fileUtils(filename);
68 fileUtils.WriteFile(data);
69
70 int fd = open(filename.c_str(), O_RDWR);
71 EXPECT_NE(-1, fd);
72
73 const auto& memoryManager = MemoryManager::GetInstance();
74 buffer = memoryManager->MapMemory(fd, length);
75 close(fd);
76 }
77
InitTensor(std::vector<IOTensor> & inputs,void * buffer,size_t length)78 void HDIPreparedModelTest::InitTensor(std::vector<IOTensor>& inputs, void* buffer, size_t length)
79 {
80 IOTensor inputTensor;
81 inputTensor.dataType = OH_NN_INT8;
82 inputTensor.dataType = OH_NN_INT8;
83 inputTensor.format = OH_NN_FORMAT_NCHW;
84 inputTensor.data = buffer;
85 inputTensor.length = length;
86 inputs.emplace_back(std::move(inputTensor));
87 }
88
Run(std::vector<IOTensor> & inputs)89 OH_NN_ReturnCode HDIPreparedModelTest::Run(std::vector<IOTensor>& inputs)
90 {
91 const int vvPosition = 2;
92 std::vector<IOTensor> outputs;
93 std::vector<std::vector<int32_t>> outputsDims {{0}};
94 std::vector<bool> isOutputBufferEnough {};
95
96 OHOS::sptr<V2_1::MockIPreparedModel> sp =
97 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
98 EXPECT_NE(sp, nullptr);
99
100 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
101 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
102 .WillRepeatedly(::testing::DoAll(
103 ::testing::SetArgReferee<vvPosition>(outputsDims),
104 ::testing::Return(HDF_SUCCESS))
105 );
106
107 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
108 return result;
109 }
110
111 /**
112 * @tc.name: hidpreparedmodel_constructor_001
113 * @tc.desc: Verify the Constructor function validate constructor success.
114 * @tc.type: FUNC
115 */
116 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_constructor_001, TestSize.Level0)
117 {
118 OHOS::sptr<V2_1::IPreparedModel> hdiPreparedModel =
119 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
120 EXPECT_NE(hdiPreparedModel, nullptr);
121
122 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(hdiPreparedModel);
123 EXPECT_NE(preparedModel, nullptr);
124 }
125
126 /**
127 * @tc.name: hidpreparedmodel_exportmodelcache_001
128 * @tc.desc: Verify the ExportModelCache function return memory error.
129 * @tc.type: FUNC
130 */
131 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_001, TestSize.Level0)
132 {
133 std::vector<V2_1::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
134 OHOS::sptr<V2_1::IPreparedModel> hdiPreparedModel =
135 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
136 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(hdiPreparedModel);
137 std::vector<Buffer> modelCache;
138 EXPECT_CALL(*((V2_1::MockIPreparedModel*)hdiPreparedModel.GetRefPtr()),
139 ExportModelCache(::testing::_))
140 .WillRepeatedly(
141 ::testing::DoAll(
142 ::testing::SetArgReferee<0>(bufferVect),
143 ::testing::Return(HDF_SUCCESS)
144 )
145 );
146
147 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
148 EXPECT_EQ(OH_NN_MEMORY_ERROR, result);
149 }
150
151 /**
152 * @tc.name: hidpreparedmodel_exportmodelcache_002
153 * @tc.desc: Verify the ExportModelCache function return success.
154 * @tc.type: FUNC
155 */
156 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_002, TestSize.Level0)
157 {
158 std::vector<V2_1::SharedBuffer> bufferVect;
159 OHOS::sptr<V2_1::IPreparedModel> mockPreparedModel =
160 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
161 EXPECT_NE(mockPreparedModel, nullptr);
162
163 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(mockPreparedModel);
164 std::vector<Buffer> modelCache;
165 EXPECT_CALL(*((V2_1::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
166 ExportModelCache(::testing::_))
167 .WillRepeatedly(
168 ::testing::DoAll(
169 ::testing::SetArgReferee<0>(bufferVect),
170 ::testing::Return(HDF_SUCCESS)
171 )
172 );
173
174 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
175 EXPECT_EQ(OH_NN_SUCCESS, result);
176 }
177
178 /**
179 * @tc.name: hidpreparedmodel_exportmodelcache_003
180 * @tc.desc: Verify the ExportModelCache function return invalid parameter.
181 * @tc.type: FUNC
182 */
183 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_003, TestSize.Level0)
184 {
185 OHOS::sptr<V2_1::IPreparedModel> hdiPreparedModel =
186 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
187 EXPECT_NE(hdiPreparedModel, nullptr);
188
189 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(hdiPreparedModel);
190 std::vector<Buffer> modelCache;
191 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
192 EXPECT_EQ(OH_NN_SUCCESS, result);
193 }
194
195 /**
196 * @tc.name: hidpreparedmodel_exportmodelcache_004
197 * @tc.desc: Verify the ExportModelCache function return unvailable device.
198 * @tc.type: FUNC
199 */
200 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_004, TestSize.Level0)
201 {
202 std::vector<V2_1::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
203 OHOS::sptr<V2_1::IPreparedModel> mockPreparedModel =
204 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
205 EXPECT_NE(mockPreparedModel, nullptr);
206
207 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(mockPreparedModel);
208 std::vector<Buffer> modelCache;
209 EXPECT_CALL(*((V2_1::MockIPreparedModel*)mockPreparedModel.GetRefPtr()),
210 ExportModelCache(::testing::_))
211 .WillRepeatedly(
212 ::testing::DoAll(
213 ::testing::SetArgReferee<0>(bufferVect),
214 ::testing::Return(HDF_FAILURE)
215 )
216 );
217
218 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
219 EXPECT_EQ(OH_NN_SAVE_CACHE_EXCEPTION, result);
220 }
221
222 /**
223 * @tc.name: hidpreparedmodel_exportmodelcache_005
224 * @tc.desc: Verify the ExportModelCache function return unvailable device.
225 * @tc.type: FUNC
226 */
227 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_exportmodelcache_005, TestSize.Level0)
228 {
229 LOGE("ExportModelCache hidpreparedmodel_exportmodelcache_005");
230 std::vector<V2_1::SharedBuffer> bufferVect = {{100, 100, 0, 100}};
231 OHOS::sptr<V2_1::IPreparedModel> mockPreparedModel =
232 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
233 EXPECT_NE(mockPreparedModel, nullptr);
234
235 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(mockPreparedModel);
236
237 std::vector<Buffer> modelCache;
238 Buffer buffer;
239 modelCache.emplace_back(buffer);
240 OH_NN_ReturnCode result = preparedModel->ExportModelCache(modelCache);
241 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
242 }
243
244 /**
245 * @tc.name: hidpreparedmodel_run_001
246 * @tc.desc: Verify the Run function return invalid parameter.
247 * @tc.type: FUNC
248 */
249 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_001, TestSize.Level0)
250 {
251 IOTensor inputTensor;
252 inputTensor.dataType = OH_NN_INT8;
253
254 IOTensor outputTensor;
255 outputTensor.dataType = OH_NN_INT8;
256 std::vector<IOTensor> inputs;
257 inputs.emplace_back(std::move(inputTensor));
258 std::vector<IOTensor> outputs;
259
260 std::vector<V2_1::IOTensor> iOutputTensors;
261 V2_1::IOTensor iTensor;
262 iOutputTensors.emplace_back(iTensor);
263 std::vector<std::vector<int32_t>> outputsDims {{0}};
264 std::vector<bool> isOutputBufferEnough {};
265
266 std::shared_ptr<V2_1::MockIPreparedModel> sp = std::make_shared<V2_1::MockIPreparedModel>();
267 OHOS::sptr<V2_1::IPreparedModel> hdiPreparedModel =
268 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
269 EXPECT_NE(hdiPreparedModel, nullptr);
270
271 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(hdiPreparedModel);
272 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
273 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
274 }
275
276 /**
277 * @tc.name: hidpreparedmodel_run_002
278 * @tc.desc: Verify the Run function return success.
279 * @tc.type: FUNC
280 */
281 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_002, TestSize.Level0)
282 {
283 const size_t length = 100;
284 void* buffer = nullptr;
285 GetBuffer(buffer, length);
286
287 std::vector<IOTensor> inputs;
288 std::vector<IOTensor> outputs;
289 InitTensor(inputs, buffer, length);
290
291 OH_NN_ReturnCode result = Run(inputs);
292 EXPECT_EQ(OH_NN_SUCCESS, result);
293 const auto& memoryManager = MemoryManager::GetInstance();
294 memoryManager->UnMapMemory(buffer);
295 }
296
297 /**
298 * @tc.name: hidpreparedmodel_run_003
299 * @tc.desc: Verify the Run function return unavailable device in case of run failure.
300 * @tc.type: FUNC
301 */
302 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_003, TestSize.Level0)
303 {
304 const size_t length = 100;
305 void* buffer = nullptr;
306 GetBuffer(buffer, length);
307
308 std::vector<IOTensor> inputs;
309 std::vector<IOTensor> outputs;
310 InitTensor(inputs, buffer, length);
311
312 std::vector<std::vector<int32_t>> outputsDims {};
313 std::vector<bool> isOutputBufferEnough {};
314
315 OHOS::sptr<V2_1::MockIPreparedModel> sp =
316 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
317 EXPECT_NE(sp, nullptr);
318
319 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
320
321 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
322 .WillRepeatedly(
323 ::testing::DoAll(
324 ::testing::SetArgReferee<2>(outputsDims),
325 ::testing::Return(HDF_FAILURE)
326 )
327 );
328
329 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
330 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, result);
331 const auto& memoryManager = MemoryManager::GetInstance();
332 memoryManager->UnMapMemory(buffer);
333 }
334
335 /**
336 * @tc.name: hidpreparedmodel_run_004
337 * @tc.desc: Verify the Run function return invalid parameter.
338 * @tc.type: FUNC
339 */
340 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_004, TestSize.Level0)
341 {
342 std::vector<IOTensor> inputs;
343 InitTensor(inputs, nullptr, 0);
344 OH_NN_ReturnCode result = Run(inputs);
345 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
346 }
347
348 /**
349 * @tc.name: hidpreparedmodel_run_005
350 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
351 * @tc.type: FUNC
352 */
353 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_005, TestSize.Level0)
354 {
355 const size_t length = 100;
356 void* buffer = nullptr;
357 GetBuffer(buffer, length);
358
359 std::vector<IOTensor> inputs;
360 std::vector<IOTensor> outputs;
361 InitTensor(inputs, buffer, length);
362 InitTensor(outputs, nullptr, 0);
363
364 std::vector<std::vector<int32_t>> outputsDims {};
365 std::vector<bool> isOutputBufferEnough {};
366
367 OHOS::sptr<V2_1::MockIPreparedModel> sp =
368 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
369 EXPECT_NE(sp, nullptr);
370
371 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
372
373 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
374 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
375 const auto& memoryManager = MemoryManager::GetInstance();
376 memoryManager->UnMapMemory(buffer);
377 }
378
379 /**
380 * @tc.name: hidpreparedmodel_run_006
381 * @tc.desc: Verify the Run function return success.
382 * @tc.type: FUNC
383 */
384 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_006, TestSize.Level0)
385 {
386 LOGE("Run hidpreparedmodel_run_006");
387 const size_t length = 100;
388 void* buffer = nullptr;
389 GetBuffer(buffer, length);
390
391 std::vector<IOTensor> inputs;
392 std::vector<IOTensor> outputs;
393 InitTensor(inputs, buffer, length);
394 InitTensor(outputs, buffer, length);
395
396 std::vector<std::vector<int32_t>> outputsDims {{0}};
397 std::vector<bool> isOutputBufferEnough {};
398
399 OHOS::sptr<V2_1::MockIPreparedModel> sp =
400 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
401 EXPECT_NE(sp, nullptr);
402
403 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
404 EXPECT_CALL(*sp, Run(::testing::_, ::testing::_, ::testing::_))
405 .WillRepeatedly(::testing::DoAll(
406 ::testing::SetArgReferee<2>(outputsDims),
407 ::testing::Return(HDF_SUCCESS))
408 );
409
410 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
411 EXPECT_EQ(OH_NN_SUCCESS, result);
412
413 const auto& memoryManager = MemoryManager::GetInstance();
414 memoryManager->UnMapMemory(buffer);
415 }
416
RunFail(std::vector<IOTensor> & inputs)417 OH_NN_ReturnCode HDIPreparedModelTest::RunFail(std::vector<IOTensor>& inputs)
418 {
419 std::vector<IOTensor> outputs;
420 std::vector<std::vector<int32_t>> outputsDims {};
421 std::vector<bool> isOutputBufferEnough {};
422
423 OHOS::sptr<V2_1::MockIPreparedModel> sp =
424 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
425 EXPECT_NE(sp, nullptr);
426
427 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
428
429 OH_NN_ReturnCode result = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
430 return result;
431 }
432
433 /**
434 * @tc.name: hidpreparedmodel_run_007
435 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
436 * @tc.type: FUNC
437 */
438 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_007, TestSize.Level0)
439 {
440 LOGE("Run hidpreparedmodel_run_007");
441 std::vector<IOTensor> inputs;
442 IOTensor inputTensor;
443 inputTensor.dataType = OH_NN_BOOL;
444 inputs.emplace_back(std::move(inputTensor));
445
446 OH_NN_ReturnCode result = RunFail(inputs);
447 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
448 }
449
450 /**
451 * @tc.name: hidpreparedmodel_run_008
452 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
453 * @tc.type: FUNC
454 */
455 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_008, TestSize.Level0)
456 {
457 LOGE("Run hidpreparedmodel_run_008");
458 std::vector<IOTensor> inputs;
459 IOTensor inputTensor;
460 inputTensor.dataType = OH_NN_INT16;
461 inputs.emplace_back(std::move(inputTensor));
462
463 OH_NN_ReturnCode result = RunFail(inputs);
464 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
465 }
466
467 /**
468 * @tc.name: hidpreparedmodel_run_009
469 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
470 * @tc.type: FUNC
471 */
472 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_009, TestSize.Level0)
473 {
474 LOGE("Run hidpreparedmodel_run_009");
475 std::vector<IOTensor> inputs;
476 IOTensor inputTensor;
477 inputTensor.dataType = OH_NN_INT64;
478 inputs.emplace_back(std::move(inputTensor));
479
480 OH_NN_ReturnCode result = RunFail(inputs);
481 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
482 }
483
484 /**
485 * @tc.name: hidpreparedmodel_run_010
486 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
487 * @tc.type: FUNC
488 */
489 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_010, TestSize.Level0)
490 {
491 LOGE("Run hidpreparedmodel_run_010");
492 std::vector<IOTensor> inputs;
493 IOTensor inputTensor;
494 inputTensor.dataType = OH_NN_UINT8;
495 inputs.emplace_back(std::move(inputTensor));
496
497 OH_NN_ReturnCode result = RunFail(inputs);
498 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
499 }
500
501 /**
502 * @tc.name: hidpreparedmodel_run_011
503 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
504 * @tc.type: FUNC
505 */
506 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_011, TestSize.Level0)
507 {
508 LOGE("Run hidpreparedmodel_run_011");
509 std::vector<IOTensor> inputs;
510 IOTensor inputTensor;
511 inputTensor.dataType = OH_NN_UINT16;
512 inputs.emplace_back(std::move(inputTensor));
513
514 OH_NN_ReturnCode result = RunFail(inputs);
515 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
516 }
517
518 /**
519 * @tc.name: hidpreparedmodel_run_012
520 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
521 * @tc.type: FUNC
522 */
523 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_012, TestSize.Level0)
524 {
525 LOGE("Run hidpreparedmodel_run_012");
526 std::vector<IOTensor> inputs;
527 IOTensor inputTensor;
528 inputTensor.dataType = OH_NN_UINT32;
529 inputs.emplace_back(std::move(inputTensor));
530
531 OH_NN_ReturnCode result = RunFail(inputs);
532 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
533 }
534
535 /**
536 * @tc.name: hidpreparedmodel_run_013
537 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
538 * @tc.type: FUNC
539 */
540 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_013, TestSize.Level0)
541 {
542 LOGE("Run hidpreparedmodel_run_013");
543 std::vector<IOTensor> inputs;
544 IOTensor inputTensor;
545 inputTensor.dataType = OH_NN_UINT64;
546 inputs.emplace_back(std::move(inputTensor));
547
548 OH_NN_ReturnCode result = RunFail(inputs);
549 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
550 }
551
552 /**
553 * @tc.name: hidpreparedmodel_run_014
554 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
555 * @tc.type: FUNC
556 */
557 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_014, TestSize.Level0)
558 {
559 LOGE("Run hidpreparedmodel_run_014");
560 std::vector<IOTensor> inputs;
561 IOTensor inputTensor;
562 inputTensor.dataType = OH_NN_FLOAT16;
563 inputs.emplace_back(std::move(inputTensor));
564
565 OH_NN_ReturnCode result = RunFail(inputs);
566 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
567 }
568
569 /**
570 * @tc.name: hidpreparedmodel_run_015
571 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
572 * @tc.type: FUNC
573 */
574 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_015, TestSize.Level0)
575 {
576 LOGE("Run hidpreparedmodel_run_015");
577 std::vector<IOTensor> inputs;
578 IOTensor inputTensor;
579 inputTensor.dataType = OH_NN_FLOAT32;
580 inputs.emplace_back(std::move(inputTensor));
581
582 OH_NN_ReturnCode result = RunFail(inputs);
583 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
584 }
585
586 /**
587 * @tc.name: hidpreparedmodel_run_016
588 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
589 * @tc.type: FUNC
590 */
591 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_016, TestSize.Level0)
592 {
593 LOGE("Run hidpreparedmodel_run_016");
594 std::vector<IOTensor> inputs;
595 IOTensor inputTensor;
596 inputTensor.dataType = OH_NN_FLOAT64;
597 inputTensor.format = OH_NN_FORMAT_NHWC;
598 inputs.emplace_back(std::move(inputTensor));
599
600 OH_NN_ReturnCode result = RunFail(inputs);
601 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
602 }
603
604 /**
605 * @tc.name: hidpreparedmodel_run_017
606 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
607 * @tc.type: FUNC
608 */
609 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_017, TestSize.Level0)
610 {
611 LOGE("Run hidpreparedmodel_run_017");
612 std::vector<IOTensor> inputs;
613 IOTensor inputTensor;
614 inputTensor.dataType = OH_NN_UNKNOWN;
615 inputTensor.format = OH_NN_FORMAT_NONE;
616 inputs.emplace_back(std::move(inputTensor));
617
618 OH_NN_ReturnCode result = RunFail(inputs);
619 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
620 }
621
622 /**
623 * @tc.name: hidpreparedmodel_run_018
624 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
625 * @tc.type: FUNC
626 */
627 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_018, TestSize.Level0)
628 {
629 LOGE("Run hidpreparedmodel_run_018");
630 std::vector<IOTensor> inputs;
631 IOTensor inputTensor;
632 inputTensor.dataType = OH_NN_INT32;
633 inputs.emplace_back(std::move(inputTensor));
634
635 OH_NN_ReturnCode result = RunFail(inputs);
636 EXPECT_EQ(OH_NN_INVALID_PARAMETER, result);
637 }
638
639 /**
640 * @tc.name: hidpreparedmodel_run_019
641 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
642 * @tc.type: FUNC
643 */
644 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_019, TestSize.Level0)
645 {
646 LOGE("Run hidpreparedmodel_run_019");
647 std::vector<NN_Tensor*> inputs;
648 std::vector<NN_Tensor*> outputs;
649 std::vector<std::vector<int32_t>> outputsDims {};
650 std::vector<bool> isOutputBufferEnough {};
651
652 inputs.emplace_back(nullptr);
653
654 OHOS::sptr<V2_1::MockIPreparedModel> sp =
655 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
656 EXPECT_NE(sp, nullptr);
657
658 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
659 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
660 EXPECT_EQ(OH_NN_FAILED, ret);
661 }
662
663 /**
664 * @tc.name: hidpreparedmodel_run_020
665 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
666 * @tc.type: FUNC
667 */
668 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_020, TestSize.Level0)
669 {
670 LOGE("Run hidpreparedmodel_run_020");
671 std::vector<NN_Tensor*> inputs;
672 std::vector<NN_Tensor*> outputs;
673 std::vector<std::vector<int32_t>> outputsDims {};
674 std::vector<bool> isOutputBufferEnough {};
675
676 MockTensor* tensorImpl = new (std::nothrow) MockTensor();
677 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
678 inputs.emplace_back(tensor);
679
680 OHOS::sptr<V2_1::MockIPreparedModel> sp =
681 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
682 EXPECT_NE(sp, nullptr);
683
684 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
685 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
686 EXPECT_EQ(OH_NN_FAILED, ret);
687
688 testing::Mock::AllowLeak(tensorImpl);
689 }
690
691 /**
692 * @tc.name: hidpreparedmodel_run_021
693 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
694 * @tc.type: FUNC
695 */
696 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_021, TestSize.Level0)
697 {
698 LOGE("Run hidpreparedmodel_run_021");
699 std::vector<NN_Tensor*> inputs;
700 std::vector<NN_Tensor*> outputs;
701 std::vector<std::vector<int32_t>> outputsDims {};
702 std::vector<bool> isOutputBufferEnough {};
703
704 size_t deviceId = 1;
705 NNTensor2_0* tensorImpl = new (std::nothrow) NNTensor2_0(deviceId);
706 TensorDesc TensorDesc;
707
708 tensorImpl->SetTensorDesc(&TensorDesc);
709 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(tensorImpl);
710 inputs.emplace_back(tensor);
711
712 OHOS::sptr<V2_1::MockIPreparedModel> sp =
713 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
714 EXPECT_NE(sp, nullptr);
715
716 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
717 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
718 EXPECT_EQ(OH_NN_FAILED, ret);
719 }
720
721 /**
722 * @tc.name: hidpreparedmodel_run_022
723 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
724 * @tc.type: FUNC
725 */
726 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_run_022, TestSize.Level0)
727 {
728 LOGE("Run hidpreparedmodel_run_022");
729 std::vector<NN_Tensor*> inputs;
730 std::vector<NN_Tensor*> outputs;
731 std::vector<std::vector<int32_t>> outputsDims {};
732 std::vector<bool> isOutputBufferEnough {};
733
734 size_t backendId = 1;
735 NNTensor2_0* nnTensor = new (std::nothrow) NNTensor2_0(backendId);
736 EXPECT_NE(nullptr, nnTensor);
737
738 TensorDesc tensorDesc;
739 char name = 'a';
740 tensorDesc.SetName(&name);
741 tensorDesc.SetDataType(OH_NN_UINT32);
742 tensorDesc.SetFormat(OH_NN_FORMAT_NCHW);
743 int32_t expectDim[2] = {3, 3};
744 int32_t* ptr = expectDim;
745 uint32_t dimensionCount = 2;
746 tensorDesc.SetShape(ptr, dimensionCount);
747
748 OH_NN_ReturnCode retSetTensorDesc = nnTensor->SetTensorDesc(&tensorDesc);
749 EXPECT_EQ(OH_NN_SUCCESS, retSetTensorDesc);
750
751 nnTensor->SetSize(200);
752 nnTensor->SetOffset(0);
753 float m_dataArry[9] {0, 1, 2, 3, 4, 5, 6, 7, 8};
754 void* buffer = m_dataArry;
755 nnTensor->SetData(buffer);
756
757 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(nnTensor);
758 inputs.emplace_back(tensor);
759
760 OHOS::sptr<V2_1::MockIPreparedModel> sp =
761 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
762 EXPECT_NE(sp, nullptr);
763
764 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
765 OH_NN_ReturnCode ret = preparedModel->Run(inputs, outputs, outputsDims, isOutputBufferEnough);
766 EXPECT_EQ(OH_NN_UNAVAILABLE_DEVICE, ret);
767 }
768
769 /**
770 * @tc.name: hidpreparedmodel_getmodelid_001
771 * @tc.desc: Verify the Run function return invalid parameter in case of output invalid.
772 * @tc.type: FUNC
773 */
774 HWTEST_F(HDIPreparedModelTest, hidpreparedmodel_getmodelid_001, TestSize.Level0)
775 {
776 LOGE("GetModelID hidpreparedmodel_getmodelid_001");
777 OHOS::sptr<V2_1::MockIPreparedModel> sp =
778 OHOS::sptr<V2_1::MockIPreparedModel>(new (std::nothrow) V2_1::MockIPreparedModel());
779 EXPECT_NE(sp, nullptr);
780
781 uint32_t index = 0;
782 std::unique_ptr<HDIPreparedModelV2_1> preparedModel = std::make_unique<HDIPreparedModelV2_1>(sp);
783 OH_NN_ReturnCode ret = preparedModel->GetModelID(index);
784 EXPECT_EQ(OH_NN_SUCCESS, ret);
785 }
786 } // namespace UnitTest
787 } // namespace NeuralNetworkRuntime
788 } // namespace OHOS
789