1 /*
2 * Copyright (c) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include <gtest/gtest.h>
17 #include <gmock/gmock.h>
18
19 #include "nnbackend.h"
20 #include "common/utils.h"
21 #include "neural_network_core_test.h"
22 #include "compilation.h"
23 #include "tensor.h"
24 #include "device.h"
25 #include "backend.h"
26 #include "backend_manager.h"
27 #include "backend_registrar.h"
28 #include "common/log.h"
29 #include "interfaces/kits/c/neural_network_runtime/neural_network_core.h"
30
31 namespace OHOS {
32 namespace NeuralNetworkRuntime {
33 namespace Unittest {
34 const size_t SIZE_ONE = 1;
BuildModel(InnerModel & model)35 OH_NN_ReturnCode NeuralNetworkCoreTest::BuildModel(InnerModel& model)
36 {
37 int32_t inputDims[2] = {3, 4};
38 OH_NN_Tensor input1 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
39 OH_NN_ReturnCode ret = model.AddTensor(input1);
40 if (ret != OH_NN_SUCCESS) {
41 return ret;
42 }
43
44 // 添加Add算子的第二个输入Tensor,类型为float32,张量形状为[3, 4]
45 OH_NN_Tensor input2 = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
46 ret = model.AddTensor(input2);
47 if (ret != OH_NN_SUCCESS) {
48 return ret;
49 }
50
51 // 添加Add算子的参数Tensor,该参数Tensor用于指定激活函数的类型,Tensor的数据类型为int8。
52 int32_t activationDims = 1;
53 int8_t activationValue = OH_NN_FUSED_NONE;
54 OH_NN_Tensor activation = {OH_NN_INT8, 1, &activationDims, nullptr, OH_NN_ADD_ACTIVATIONTYPE};
55 ret = model.AddTensor(activation);
56 if (ret != OH_NN_SUCCESS) {
57 return ret;
58 }
59
60 // 将激活函数类型设置为OH_NN_FUSED_NONE,表示该算子不添加激活函数。
61 uint32_t index = 2;
62 ret = model.SetTensorValue(index, &activationValue, sizeof(int8_t));
63 if (ret != OH_NN_SUCCESS) {
64 return ret;
65 }
66
67 // 设置Add算子的输出,类型为float32,张量形状为[3, 4]
68 OH_NN_Tensor output = {OH_NN_FLOAT32, 2, inputDims, nullptr, OH_NN_TENSOR};
69 ret = model.AddTensor(output);
70 if (ret != OH_NN_SUCCESS) {
71 return ret;
72 }
73
74 // 指定Add算子的输入、参数和输出索引
75 uint32_t inputIndicesValues[2] = {0, 1};
76 uint32_t paramIndicesValues = 2;
77 uint32_t outputIndicesValues = 3;
78 OH_NN_UInt32Array paramIndices = {¶mIndicesValues, 1};
79 OH_NN_UInt32Array inputIndices = {inputIndicesValues, 2};
80 OH_NN_UInt32Array outputIndices = {&outputIndicesValues, 1};
81
82 // 向模型实例添加Add算子
83 ret = model.AddOperation(OH_NN_OPS_ADD, paramIndices, inputIndices, outputIndices);
84 if (ret != OH_NN_SUCCESS) {
85 return ret;
86 }
87
88 // 设置模型实例的输入、输出索引
89 ret = model.SpecifyInputsAndOutputs(inputIndices, outputIndices);
90 if (ret != OH_NN_SUCCESS) {
91 return ret;
92 }
93
94 // 完成模型实例的构建
95 ret = model.Build();
96 if (ret != OH_NN_SUCCESS) {
97 return ret;
98 }
99
100 return ret;
101 }
102
103 class MockIDevice : public Device {
104 public:
105 MOCK_METHOD1(GetDeviceName, OH_NN_ReturnCode(std::string&));
106 MOCK_METHOD1(GetVendorName, OH_NN_ReturnCode(std::string&));
107 MOCK_METHOD1(GetVersion, OH_NN_ReturnCode(std::string&));
108 MOCK_METHOD1(GetDeviceType, OH_NN_ReturnCode(OH_NN_DeviceType&));
109 MOCK_METHOD1(GetDeviceStatus, OH_NN_ReturnCode(DeviceStatus&));
110 MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
111 std::vector<bool>&));
112 MOCK_METHOD1(IsFloat16PrecisionSupported, OH_NN_ReturnCode(bool&));
113 MOCK_METHOD1(IsPerformanceModeSupported, OH_NN_ReturnCode(bool&));
114 MOCK_METHOD1(IsPrioritySupported, OH_NN_ReturnCode(bool&));
115 MOCK_METHOD1(IsDynamicInputSupported, OH_NN_ReturnCode(bool&));
116 MOCK_METHOD1(IsModelCacheSupported, OH_NN_ReturnCode(bool&));
117 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
118 const ModelConfig&,
119 std::shared_ptr<PreparedModel>&));
120 MOCK_METHOD3(PrepareModel, OH_NN_ReturnCode(const void*,
121 const ModelConfig&,
122 std::shared_ptr<PreparedModel>&));
123 MOCK_METHOD4(PrepareModelFromModelCache, OH_NN_ReturnCode(const std::vector<Buffer>&,
124 const ModelConfig&,
125 std::shared_ptr<PreparedModel>&,
126 bool&));
127 MOCK_METHOD3(PrepareOfflineModel, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
128 const ModelConfig&,
129 std::shared_ptr<PreparedModel>&));
130 MOCK_METHOD1(AllocateBuffer, void*(size_t));
131 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<TensorDesc>));
132 MOCK_METHOD2(AllocateTensorBuffer, void*(size_t, std::shared_ptr<NNTensor>));
133 MOCK_METHOD1(ReleaseBuffer, OH_NN_ReturnCode(const void*));
134 MOCK_METHOD2(AllocateBuffer, OH_NN_ReturnCode(size_t, int&));
135 MOCK_METHOD2(ReleaseBuffer, OH_NN_ReturnCode(int, size_t));
136 };
137
138 class MockBackend : public Backend {
139 public:
140 MOCK_CONST_METHOD0(GetBackendID, size_t());
141 MOCK_CONST_METHOD1(GetBackendName, OH_NN_ReturnCode(std::string&));
142 MOCK_CONST_METHOD1(GetBackendType, OH_NN_ReturnCode(OH_NN_DeviceType&));
143 MOCK_CONST_METHOD1(GetBackendStatus, OH_NN_ReturnCode(DeviceStatus&));
144 MOCK_METHOD1(CreateCompiler, Compiler*(Compilation*));
145 MOCK_METHOD1(DestroyCompiler, OH_NN_ReturnCode(Compiler*));
146 MOCK_METHOD1(CreateExecutor, Executor*(Compilation*));
147 MOCK_METHOD1(DestroyExecutor, OH_NN_ReturnCode(Executor*));
148 MOCK_METHOD1(CreateTensor, Tensor*(TensorDesc*));
149 MOCK_METHOD1(DestroyTensor, OH_NN_ReturnCode(Tensor*));
150 MOCK_METHOD2(GetSupportedOperation, OH_NN_ReturnCode(std::shared_ptr<const mindspore::lite::LiteGraph>,
151 std::vector<bool>&));
152 };
153
Creator4()154 std::shared_ptr<Backend> Creator4()
155 {
156 size_t backendID = 4;
157 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
158
159 EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceStatus(::testing::_))
160 .WillRepeatedly(::testing::Invoke([](DeviceStatus& status) {
161 // 这里直接修改传入的引用参数
162 status = AVAILABLE;
163 return OH_NN_SUCCESS; // 假设成功的状态码
164 }));
165
166 std::string backendName = "mock";
167 EXPECT_CALL(*((MockIDevice *) device.get()), GetDeviceName(::testing::_))
168 .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
169
170 EXPECT_CALL(*((MockIDevice *) device.get()), GetVendorName(::testing::_))
171 .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
172
173 EXPECT_CALL(*((MockIDevice *) device.get()), GetVersion(::testing::_))
174 .WillRepeatedly(::testing::DoAll(::testing::SetArgReferee<0>(backendName), ::testing::Return(OH_NN_SUCCESS)));
175
176 EXPECT_CALL(*((MockIDevice *) device.get()), AllocateBuffer(::testing::_, ::testing::_))
177 .WillRepeatedly(::testing::Invoke([](size_t length, int& fd) {
178 // 这里直接修改传入的引用参数
179 fd = -1;
180 return OH_NN_SUCCESS; // 假设成功的状态码
181 }));
182
183 std::shared_ptr<Backend> backend = std::make_unique<NNBackend>(device, backendID);
184
185 testing::Mock::AllowLeak(device.get());
186
187 return backend;
188 }
189
190 /*
191 * @tc.name: alldevicesid_001
192 * @tc.desc: Verify the allDeviceIds is nullptr of the OH_NNDevice_GetAllDevicesID function.
193 * @tc.type: FUNC
194 */
195 HWTEST_F(NeuralNetworkCoreTest, alldevicesid_001, testing::ext::TestSize.Level0)
196 {
197 const size_t* allDeviceIds = nullptr;
198 uint32_t count {0};
199 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(&allDeviceIds, &count);
200 EXPECT_EQ(OH_NN_SUCCESS, ret);
201 }
202
203 /*
204 * @tc.name: alldeviceid_002
205 * @tc.desc: Verify the allDeviceIds is nullptr of the OH_NNDevice_GetAllDevicesID function.
206 * @tc.type: FUNC
207 */
208 HWTEST_F(NeuralNetworkCoreTest, alldeviceid_002, testing::ext::TestSize.Level0)
209 {
210 uint32_t count {0};
211 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(nullptr, &count);
212 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
213 }
214
215 /*
216 * @tc.name: alldeviceid_003
217 * @tc.desc: Verify the allDeviceIds is nullptr of the OH_NNDevice_GetAllDevicesID function.
218 * @tc.type: FUNC
219 */
220 HWTEST_F(NeuralNetworkCoreTest, alldeviceid_003, testing::ext::TestSize.Level0)
221 {
222 const size_t* allDeviceIds = nullptr;
223 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(&allDeviceIds, nullptr);
224 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
225 }
226
227 /*
228 * @tc.name: alldeviceid_004
229 * @tc.desc: Verify the allDeviceIds is nullptr of the OH_NNDevice_GetAllDevicesID function.
230 * @tc.type: FUNC
231 */
232 HWTEST_F(NeuralNetworkCoreTest, alldeviceid_004, testing::ext::TestSize.Level0)
233 {
234 const size_t allDeviceIds = 0;
235 const size_t* pAllDeviceIds = &allDeviceIds;
236 uint32_t count {0};
237
238 OH_NN_ReturnCode ret = OH_NNDevice_GetAllDevicesID(&pAllDeviceIds, &count);
239 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
240 }
241
242 /*
243 * @tc.name: device_name_001
244 * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
245 * @tc.type: FUNC
246 */
247 HWTEST_F(NeuralNetworkCoreTest, device_name_001, testing::ext::TestSize.Level0)
248 {
249 const size_t deviceId = 12345;
250 const char* name = nullptr;
251 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceId, &name);
252 EXPECT_EQ(OH_NN_FAILED, ret);
253 }
254
255 /*
256 * @tc.name: device_name_002
257 * @tc.desc: Verify the name is no nullptr of the OH_NNDevice_GetName function.
258 * @tc.type: FUNC
259 */
260 HWTEST_F(NeuralNetworkCoreTest, device_name_002, testing::ext::TestSize.Level0)
261 {
262 const size_t deviceId = 0;
263 const char* name = "name";
264 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceId, &name);
265 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
266 }
267
268 /*
269 * @tc.name: device_name_003
270 * @tc.desc: Verify the name is nullptr of the OH_NNDevice_GetName function.
271 * @tc.type: FUNC
272 */
273 HWTEST_F(NeuralNetworkCoreTest, device_name_003, testing::ext::TestSize.Level0)
274 {
275 const size_t deviceId = 0;
276 OH_NN_ReturnCode ret = OH_NNDevice_GetName(deviceId, nullptr);
277 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
278 }
279
280 /*
281 * @tc.name: device_get_type_001
282 * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
283 * @tc.type: FUNC
284 */
285 HWTEST_F(NeuralNetworkCoreTest, device_get_type_001, testing::ext::TestSize.Level0)
286 {
287 size_t deviceID = 12345;
288 OH_NN_DeviceType deviceType = OH_NN_CPU;
289 OH_NN_DeviceType* pDeviceType = &deviceType;
290 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
291 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
292 }
293
294 /*
295 * @tc.name: device_get_type_002
296 * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
297 * @tc.type: FUNC
298 */
299 HWTEST_F(NeuralNetworkCoreTest, device_get_type_002, testing::ext::TestSize.Level0)
300 {
301 size_t deviceID = 0;
302 OH_NN_DeviceType* pDeviceType = nullptr;
303 BackendManager& backendManager = BackendManager::GetInstance();
304 std::string backendName = "mock";
305 std::function<std::shared_ptr<Backend>()> creator = Creator4;
306
307 backendManager.RegisterBackend(backendName, creator);
308 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
309 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
310 }
311
312 /*
313 * @tc.name: device_get_type_003
314 * @tc.desc: Verify the device is nullptr of the OH_NNDevice_GetType function.
315 * @tc.type: FUNC
316 */
317 HWTEST_F(NeuralNetworkCoreTest, device_get_type_003, testing::ext::TestSize.Level0)
318 {
319 size_t deviceID = 0;
320 OH_NN_DeviceType deviceType = OH_NN_OTHERS;
321 OH_NN_DeviceType* pDeviceType = &deviceType;
322 BackendManager& backendManager = BackendManager::GetInstance();
323 std::string backendName = "mock";
324 std::function<std::shared_ptr<Backend>()> creator = Creator4;
325
326 backendManager.RegisterBackend(backendName, creator);
327 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
328 EXPECT_EQ(OH_NN_SUCCESS, ret);
329 }
330
331 /*
332 * @tc.name: device_get_type_004
333 * @tc.desc: Verify the success of the OH_NNDevice_GetType function.
334 * @tc.type: FUNC
335 */
336 HWTEST_F(NeuralNetworkCoreTest, device_get_type_004, testing::ext::TestSize.Level0)
337 {
338 size_t deviceID = 1;
339 OH_NN_DeviceType deviceType = OH_NN_CPU;
340 OH_NN_DeviceType* pDeviceType = &deviceType;
341 OH_NN_ReturnCode ret = OH_NNDevice_GetType(deviceID, pDeviceType);
342 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
343 }
344
345 /*
346 * @tc.name: compilation_construct_001
347 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
348 * @tc.type: FUNC
349 */
350 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_001, testing::ext::TestSize.Level0)
351 {
352 const OH_NNModel* model = nullptr;
353 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
354 EXPECT_EQ(nullptr, ret);
355 }
356
357 /*
358 * @tc.name: compilation_construct_002
359 * @tc.desc: Verify the OH_NNModel is nullptr of the OH_NNCompilation_Construct function.
360 * @tc.type: FUNC
361 */
362 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_002, testing::ext::TestSize.Level0)
363 {
364 const OH_NNModel* model = OH_NNModel_Construct();
365 OH_NNCompilation* ret = OH_NNCompilation_Construct(model);
366 EXPECT_NE(nullptr, ret);
367 }
368
369 /*
370 * @tc.name: compilation_construct_with_off_modelfile_001
371 * @tc.desc: Verify the modelpath is nullptr of the OH_NNCompilation_ConstructWithOfflineModelFile function.
372 * @tc.type: FUNC
373 */
374 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_with_off_modelfile_001, testing::ext::TestSize.Level0)
375 {
376 const char* modelpath = nullptr;
377 OH_NNCompilation* ret = OH_NNCompilation_ConstructWithOfflineModelFile(modelpath);
378 EXPECT_EQ(nullptr, ret);
379 }
380
381 /*
382 * @tc.name: compilation_construct_with_off_modelfile_002
383 * @tc.desc: Verify the modelpath is no nullptr of the OH_NNCompilation_ConstructWithOfflineModelFile function.
384 * @tc.type: FUNC
385 */
386 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_with_off_modelfile_002, testing::ext::TestSize.Level0)
387 {
388 const char* modelpath = "nrrtmodel";
389 OH_NNCompilation* ret = OH_NNCompilation_ConstructWithOfflineModelFile(modelpath);
390 EXPECT_NE(nullptr, ret);
391 }
392
393 /*
394 * @tc.name: compilation_construct_with_off_modelbuffer_001
395 * @tc.desc: Verify the modelbuffer is nullptr of the OH_NNCompilation_ConstructWithOfflineModelBuffer function.
396 * @tc.type: FUNC
397 */
398 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_with_off_modelbuffer_001, testing::ext::TestSize.Level0)
399 {
400 const void* modelbuffer = nullptr;
401 size_t modelsize = 0;
402 OH_NNCompilation* ret = OH_NNCompilation_ConstructWithOfflineModelBuffer(modelbuffer, modelsize);
403 EXPECT_EQ(nullptr, ret);
404 }
405
406 /*
407 * @tc.name: compilation_construct_with_off_modelbuffer_002
408 * @tc.desc: Verify the modelbuffer is no nullptr of the OH_NNCompilation_ConstructWithOfflineModelBuffer function.
409 * @tc.type: FUNC
410 */
411 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_with_off_modelbuffer_002, testing::ext::TestSize.Level0)
412 {
413 char modelbuffer[SIZE_ONE];
414 size_t modelsize = 0;
415 OH_NNCompilation* ret = OH_NNCompilation_ConstructWithOfflineModelBuffer(modelbuffer, modelsize);
416 EXPECT_EQ(nullptr, ret);
417 }
418
419 /*
420 * @tc.name: compilation_construct_with_off_modelbuffer_003
421 * @tc.desc: Verify the modelbuffer is no nullptr of the OH_NNCompilation_ConstructWithOfflineModelBuffer function.
422 * @tc.type: FUNC
423 */
424 HWTEST_F(NeuralNetworkCoreTest, compilation_construct_with_off_modelbuffer_003, testing::ext::TestSize.Level0)
425 {
426 char modelbuffer[SIZE_ONE];
427 size_t modelsize = 1;
428 OH_NNCompilation* ret = OH_NNCompilation_ConstructWithOfflineModelBuffer(modelbuffer, modelsize);
429 EXPECT_NE(nullptr, ret);
430 }
431
432 /*
433 * @tc.name: compilation_constructforcache_001
434 * @tc.desc: Verify the nnCompilation is no nullptr of the OH_NNCompilation_ConstructForCache function.
435 * @tc.type: FUNC
436 */
437 HWTEST_F(NeuralNetworkCoreTest, compilation_constructforcache_001, testing::ext::TestSize.Level0)
438 {
439 OH_NNCompilation* ret = OH_NNCompilation_ConstructForCache();
440 Compilation *compilation = new (std::nothrow) Compilation();
441 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
442 delete compilation;
443 EXPECT_NE(nnCompilation, ret);
444 }
445
446 /*
447 * @tc.name: compilation_exportchachetobuffer_001
448 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
449 * @tc.type: FUNC
450 */
451 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_001, testing::ext::TestSize.Level0)
452 {
453 OH_NNCompilation* compilation = nullptr;
454 const void* buffer = nullptr;
455 size_t length = 0;
456 size_t* modelSize = nullptr;
457 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(compilation, buffer, length, modelSize);
458 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
459 }
460
461 /*
462 * @tc.name: compilation_exportchachetobuffer_002
463 * @tc.desc: Verify the buffer is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
464 * @tc.type: FUNC
465 */
466 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_002, testing::ext::TestSize.Level0)
467 {
468 Compilation *compilation = new (std::nothrow) Compilation();
469 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
470 const void* buffer = nullptr;
471 size_t length = 0;
472 size_t* modelSize = nullptr;
473 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize);
474 delete compilation;
475 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
476 }
477
478 /*
479 * @tc.name: compilation_exportchachetobuffer_003
480 * @tc.desc: Verify the length is 0 of the OH_NNCompilation_ExportCacheToBuffer function.
481 * @tc.type: FUNC
482 */
483 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_003, testing::ext::TestSize.Level0)
484 {
485 Compilation *compilation = new (std::nothrow) Compilation();
486 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
487 char buffer[SIZE_ONE];
488 size_t length = 0;
489 size_t* modelSize = nullptr;
490 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize);
491 delete compilation;
492 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
493 }
494
495 /*
496 * @tc.name: compilation_exportchachetobuffer_004
497 * @tc.desc: Verify the modelSize is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
498 * @tc.type: FUNC
499 */
500 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_004, testing::ext::TestSize.Level0)
501 {
502 Compilation *compilation = new (std::nothrow) Compilation();
503 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
504 char buffer[SIZE_ONE];
505 size_t length = 0;
506 size_t* modelSize = nullptr;
507 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, length, modelSize);
508 delete compilation;
509 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
510 }
511
512 /*
513 * @tc.name: compilation_exportchachetobuffer_005
514 * @tc.desc: Verify the modelSize is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
515 * @tc.type: FUNC
516 */
517 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_005, testing::ext::TestSize.Level0)
518 {
519 Compilation *compilation = new (std::nothrow) Compilation();
520 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
521 char buffer[SIZE_ONE];
522 size_t* modelSize = nullptr;
523 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, SIZE_ONE, modelSize);
524 delete compilation;
525 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
526 }
527
528 /*
529 * @tc.name: compilation_exportchachetobuffer_006
530 * @tc.desc: Verify the length is 0 of the OH_NNCompilation_ExportCacheToBuffer function.
531 * @tc.type: FUNC
532 */
533 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_006, testing::ext::TestSize.Level0)
534 {
535 Compilation *compilation = new (std::nothrow) Compilation();
536 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
537 char buffer[SIZE_ONE];
538 size_t modelSize = 0;
539 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, SIZE_ONE, &modelSize);
540 delete compilation;
541 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
542 }
543
544 /*
545 * @tc.name: compilation_exportchachetobuffer_007
546 * @tc.desc: Verify the length is 0 of the OH_NNCompilation_ExportCacheToBuffer function.
547 * @tc.type: FUNC
548 */
549 HWTEST_F(NeuralNetworkCoreTest, compilation_exportchachetobuffer_007, testing::ext::TestSize.Level0)
550 {
551 Compilation *compilation = new (std::nothrow) Compilation();
552 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
553 char buffer[SIZE_ONE];
554 size_t modelSize = 0;
555 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
556 size_t backid = 1;
557
558 NNCompiler nnCompiler(device, backid);
559 compilation->compiler = &nnCompiler;
560 OH_NN_ReturnCode ret = OH_NNCompilation_ExportCacheToBuffer(nnCompilation, buffer, SIZE_ONE, &modelSize);
561 delete compilation;
562 EXPECT_NE(OH_NN_INVALID_PARAMETER, ret);
563 testing::Mock::AllowLeak(device.get());
564 }
565
566 /*
567 * @tc.name: compilation_importcachefrombuffer_001
568 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_ImportCacheFromBuffer function.
569 * @tc.type: FUNC
570 */
571 HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_001, testing::ext::TestSize.Level0)
572 {
573 OH_NNCompilation* compilation = nullptr;
574 const void* buffer = nullptr;
575 size_t modelsize = 0;
576 OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(compilation, buffer, modelsize);
577 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
578 }
579
580 /*
581 * @tc.name: compilation_importcachefrombuffer_002
582 * @tc.desc: Verify the buffer is nullptr of the OH_NNCompilation_ImportCacheFromBuffer function.
583 * @tc.type: FUNC
584 */
585 HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_002, testing::ext::TestSize.Level0)
586 {
587 Compilation *compilation = new (std::nothrow) Compilation();
588 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
589 const void* buffer = nullptr;
590 size_t modelsize = 0;
591 OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, modelsize);
592 delete compilation;
593 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
594 }
595
596 /*
597 * @tc.name: compilation_importcachefrombuffer_003
598 * @tc.desc: Verify the modelsize is 0 of the OH_NNCompilation_ImportCacheFromBuffer function.
599 * @tc.type: FUNC
600 */
601 HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_003, testing::ext::TestSize.Level0)
602 {
603 Compilation *compilation = new (std::nothrow) Compilation();
604 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
605 char buffer[SIZE_ONE];
606 size_t modelsize = 0;
607 OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, modelsize);
608 delete compilation;
609 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
610 }
611
612 /*
613 * @tc.name: compilation_importcachefrombuffer_004
614 * @tc.desc: Verify the modelsize is 0 of the OH_NNCompilation_ImportCacheFromBuffer function.
615 * @tc.type: FUNC
616 */
617 HWTEST_F(NeuralNetworkCoreTest, compilation_importcachefrombuffer_004, testing::ext::TestSize.Level0)
618 {
619 Compilation *compilation = new (std::nothrow) Compilation();
620 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
621 char buffer[SIZE_ONE];
622 OH_NN_ReturnCode ret = OH_NNCompilation_ImportCacheFromBuffer(nnCompilation, buffer, SIZE_ONE);
623 delete compilation;
624 EXPECT_EQ(OH_NN_SUCCESS, ret);
625 }
626
627 /*
628 * @tc.name: compilation_addextensionconfig_001
629 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_AddExtensionConfig function.
630 * @tc.type: FUNC
631 */
632 HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_001, testing::ext::TestSize.Level0)
633 {
634 OH_NNCompilation* compilation = nullptr;
635 const char* configname = nullptr;
636 const void* configvalue = nullptr;
637 const size_t configvaluesize = 0;
638 OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(compilation, configname, configvalue, configvaluesize);
639 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
640 }
641
642 /*
643 * @tc.name: compilation_addextensionconfig_002
644 * @tc.desc: Verify the configname is nullptr of the OH_NNCompilation_AddExtensionConfig function.
645 * @tc.type: FUNC
646 */
647 HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_002, testing::ext::TestSize.Level0)
648 {
649 Compilation *compilation = new (std::nothrow) Compilation();
650 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
651 const char* configname = nullptr;
652 const void* cofigvalue = nullptr;
653 const size_t configvaluesize = 0;
654 OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize);
655 delete compilation;
656 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
657 }
658
659 /*
660 * @tc.name: compilation_addextensionconfig_003
661 * @tc.desc: Verify the cofigvalue is nullptr of the OH_NNCompilation_AddExtensionConfig function.
662 * @tc.type: FUNC
663 */
664 HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_003, testing::ext::TestSize.Level0)
665 {
666 Compilation *compilation = new (std::nothrow) Compilation();
667 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
668 const char* configname = "ConfigName";
669 const void* cofigvalue = nullptr;
670 const size_t configvaluesize = 0;
671 OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize);
672 delete compilation;
673 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
674 }
675
676 /*
677 * @tc.name: compilation_addextensionconfig_004
678 * @tc.desc: Verify the cofigvalue is nullptr of the OH_NNCompilation_AddExtensionConfig function.
679 * @tc.type: FUNC
680 */
681 HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_004, testing::ext::TestSize.Level0)
682 {
683 Compilation *compilation = new (std::nothrow) Compilation();
684 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
685 const char* configname = "ConfigName";
686 char cofigvalue[SIZE_ONE];
687 const size_t configvaluesize = 0;
688 OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, configvaluesize);
689 delete compilation;
690 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
691 }
692
693 /*
694 * @tc.name: compilation_addextensionconfig_005
695 * @tc.desc: Verify the cofigvalue is nullptr of the OH_NNCompilation_AddExtensionConfig function.
696 * @tc.type: FUNC
697 */
698 HWTEST_F(NeuralNetworkCoreTest, compilation_addextensionconfig_005, testing::ext::TestSize.Level0)
699 {
700 Compilation *compilation = new (std::nothrow) Compilation();
701 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
702 const char* configname = "ConfigName";
703 char cofigvalue[SIZE_ONE];
704 OH_NN_ReturnCode ret = OH_NNCompilation_AddExtensionConfig(nnCompilation, configname, cofigvalue, SIZE_ONE);
705 delete compilation;
706 EXPECT_EQ(OH_NN_SUCCESS, ret);
707 }
708
709 /*
710 * @tc.name: compilation_set_device_001
711 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
712 * @tc.type: FUNC
713 */
714 HWTEST_F(NeuralNetworkCoreTest, compilation_set_device_001, testing::ext::TestSize.Level0)
715 {
716 OH_NNCompilation* compilation = nullptr;
717 size_t deviceId = 1;
718 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(compilation, deviceId);
719 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
720 }
721
722 /*
723 * @tc.name: compilation_set_device_002
724 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
725 * @tc.type: FUNC
726 */
727 HWTEST_F(NeuralNetworkCoreTest, compilation_set_device_002, testing::ext::TestSize.Level0)
728 {
729 Compilation *compilation = new (std::nothrow) Compilation();
730 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
731 size_t deviceId = 1;
732 OH_NN_ReturnCode ret = OH_NNCompilation_SetDevice(nnCompilation, deviceId);
733 delete compilation;
734 EXPECT_EQ(OH_NN_SUCCESS, ret);
735 }
736
737 /*
738 * @tc.name: compilation_set_cache_001
739 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
740 * @tc.type: FUNC
741 */
742 HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_001, testing::ext::TestSize.Level0)
743 {
744 OH_NNCompilation* nnCompilation = nullptr;
745 const char* cacheDir = "../";
746 uint32_t version = 1;
747 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
748 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
749 }
750
751 /*
752 * @tc.name: compilation_set_cache_002
753 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
754 * @tc.type: FUNC
755 */
756 HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_002, testing::ext::TestSize.Level0)
757 {
758 Compilation *compilation = new (std::nothrow) Compilation();
759 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
760 const char* cacheDir = nullptr;
761 uint32_t version = 1;
762 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
763 delete compilation;
764 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
765 }
766
767 /*
768 * @tc.name: compilation_set_cache_003
769 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
770 * @tc.type: FUNC
771 */
772 HWTEST_F(NeuralNetworkCoreTest, compilation_set_cache_003, testing::ext::TestSize.Level0)
773 {
774 Compilation *compilation = new (std::nothrow) Compilation();
775 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
776 const char* cacheDir = "../";
777 uint32_t version = 1;
778 OH_NN_ReturnCode ret = OH_NNCompilation_SetCache(nnCompilation, cacheDir, version);
779 delete compilation;
780 EXPECT_EQ(OH_NN_SUCCESS, ret);
781 }
782
783 /*
784 * @tc.name: compilation_set_performancemode_001
785 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
786 * @tc.type: FUNC
787 */
788 HWTEST_F(NeuralNetworkCoreTest, compilation_set_performancemode_001, testing::ext::TestSize.Level0)
789 {
790 InnerModel innerModel;
791 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
792 OH_NNCompilation* nnCompilation = nullptr;
793 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
794
795 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
796 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
797 }
798
799 /*
800 * @tc.name: compilation_set_performancemode_002
801 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
802 * @tc.type: FUNC
803 */
804 HWTEST_F(NeuralNetworkCoreTest, compilation_set_performancemode_002, testing::ext::TestSize.Level0)
805 {
806 Compilation *compilation = new (std::nothrow) Compilation();
807 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
808 OH_NN_PerformanceMode performanceMode = OH_NN_PERFORMANCE_NONE;
809 OH_NN_ReturnCode ret = OH_NNCompilation_SetPerformanceMode(nnCompilation, performanceMode);
810 delete compilation;
811 EXPECT_EQ(OH_NN_SUCCESS, ret);
812 }
813
814 /*
815 * @tc.name: compilation_set_priority_001
816 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
817 * @tc.type: FUNC
818 */
819 HWTEST_F(NeuralNetworkCoreTest, compilation_set_priority_001, testing::ext::TestSize.Level0)
820 {
821 OH_NNCompilation* nnCompilation = nullptr;
822 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
823 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
824 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
825 }
826
827 /*
828 * @tc.name: compilation_set_priority_002
829 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetCache function.
830 * @tc.type: FUNC
831 */
832 HWTEST_F(NeuralNetworkCoreTest, compilation_set_priority_002, testing::ext::TestSize.Level0)
833 {
834 Compilation *compilation = new (std::nothrow) Compilation();
835 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
836 OH_NN_Priority priority = OH_NN_PRIORITY_NONE;
837 OH_NN_ReturnCode ret = OH_NNCompilation_SetPriority(nnCompilation, priority);
838 delete compilation;
839 EXPECT_EQ(OH_NN_SUCCESS, ret);
840 }
841
842 /*
843 * @tc.name: compilation_enablefloat16_001
844 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function.
845 * @tc.type: FUNC
846 */
847 HWTEST_F(NeuralNetworkCoreTest, compilation_enablefloat16_001, testing::ext::TestSize.Level0)
848 {
849 OH_NNCompilation* nnCompilation = nullptr;
850 bool enableFloat16 = true;
851 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
852 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
853 }
854
855 /*
856 * @tc.name: compilation_enablefloat16_002
857 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function.
858 * @tc.type: FUNC
859 */
860 HWTEST_F(NeuralNetworkCoreTest, compilation_enablefloat16_002, testing::ext::TestSize.Level0)
861 {
862 Compilation *compilation = new (std::nothrow) Compilation();
863 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
864 bool enableFloat16 = true;
865 OH_NN_ReturnCode ret = OH_NNCompilation_EnableFloat16(nnCompilation, enableFloat16);
866 delete compilation;
867 EXPECT_EQ(OH_NN_SUCCESS, ret);
868 }
869
870 /*
871 * @tc.name: compilation_build_001
872 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function.
873 * @tc.type: FUNC
874 */
875 HWTEST_F(NeuralNetworkCoreTest, compilation_build_001, testing::ext::TestSize.Level0)
876 {
877 OH_NNCompilation *nncompilation = nullptr;
878 OH_NN_ReturnCode ret = OH_NNCompilation_Build(nncompilation);
879 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
880 }
881
882 /*
883 * @tc.name: compilation_build_002
884 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_SetCache function.
885 * @tc.type: FUNC
886 */
887 HWTEST_F(NeuralNetworkCoreTest, compilation_build_002, testing::ext::TestSize.Level0)
888 {
889 Compilation *compilation = new (std::nothrow) Compilation();
890 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation*>(compilation);
891 OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
892 delete compilation;
893 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
894 }
895
896 /*
897 * @tc.name: compilation_build_002
898 * @tc.desc: Verify the success of the OH_NNCompilation_Build function.
899 * @tc.type: FUNC
900 */
901 HWTEST_F(NeuralNetworkCoreTest, compilation_build_003, testing::ext::TestSize.Level0)
902 {
903 InnerModel innerModel;
904 EXPECT_EQ(OH_NN_SUCCESS, BuildModel(innerModel));
905
906 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
907 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
908
909 OH_NN_ReturnCode ret = OH_NNCompilation_Build(nnCompilation);
910 EXPECT_EQ(OH_NN_SUCCESS, ret);
911 }
912
913 /*
914 * @tc.name: nnt_tensordesc_destroy_001
915 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_Destroy function.
916 * @tc.type: FUNC
917 */
918 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_destroy_001, testing::ext::TestSize.Level0)
919 {
920 NN_TensorDesc* tensorDesc = nullptr;
921 OH_NN_ReturnCode ret = OH_NNTensorDesc_Destroy(&tensorDesc);
922 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
923 }
924
925 /*
926 * @tc.name: nnt_tensordesc_destroy_002
927 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_Destroy function.
928 * @tc.type: FUNC
929 */
930 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_destroy_002, testing::ext::TestSize.Level0)
931 {
932 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
933 OH_NN_ReturnCode ret = OH_NNTensorDesc_Destroy(&tensorDesc);
934 EXPECT_EQ(OH_NN_SUCCESS, ret);
935 }
936
937 /*
938 * @tc.name: nnt_tensordesc_setname_001
939 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetName function.
940 * @tc.type: FUNC
941 */
942 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setname_001, testing::ext::TestSize.Level0)
943 {
944 NN_TensorDesc* tensorDesc = nullptr;
945 const char* name = nullptr;
946 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetName(tensorDesc, name);
947 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
948 }
949
950 /*
951 * @tc.name: nnt_tensordesc_setname_002
952 * @tc.desc: Verify the name is nullptr of the OH_NNTensorDesc_SetName function.
953 * @tc.type: FUNC
954 */
955 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setname_002, testing::ext::TestSize.Level0)
956 {
957 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
958 const char* name = nullptr;
959 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetName(tensorDesc, name);
960 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
961 }
962
963 /*
964 * @tc.name: nnt_tensordesc_setname_003
965 * @tc.desc: Verify the name is nullptr of the OH_NNTensorDesc_SetName function.
966 * @tc.type: FUNC
967 */
968 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setname_003, testing::ext::TestSize.Level0)
969 {
970 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
971 const char* name = "name";
972 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetName(tensorDesc, name);
973 EXPECT_NE(OH_NN_INVALID_PARAMETER, ret);
974 }
975
976 /*
977 * @tc.name: nnt_tensordesc_getname_001
978 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_GetName function.
979 * @tc.type: FUNC
980 */
981 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getname_001, testing::ext::TestSize.Level0)
982 {
983 NN_TensorDesc* tensorDesc = nullptr;
984 const char* name = nullptr;
985 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetName(tensorDesc, &name);
986 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
987 }
988
989 /*
990 * @tc.name: nnt_tensordesc_getname_002
991 * @tc.desc: Verify the name is nullptr of the OH_NNTensorDesc_GetName function.
992 * @tc.type: FUNC
993 */
994 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getname_002, testing::ext::TestSize.Level0)
995 {
996 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
997 const char* name = nullptr;
998 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetName(tensorDesc, &name);
999 EXPECT_EQ(OH_NN_SUCCESS, ret);
1000 }
1001
1002 /*
1003 * @tc.name: nnt_tensordesc_getname_003
1004 * @tc.desc: Verify the name is nullptr of the OH_NNTensorDesc_GetName function.
1005 * @tc.type: FUNC
1006 */
1007 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getname_003, testing::ext::TestSize.Level0)
1008 {
1009 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1010 const char* name = "name";
1011 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetName(tensorDesc, &name);
1012 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1013 }
1014
1015 /*
1016 * @tc.name: nnt_tensordesc_setdatatype_001
1017 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetDataType function.
1018 * @tc.type: FUNC
1019 */
1020 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setdatatype_001, testing::ext::TestSize.Level0)
1021 {
1022 NN_TensorDesc* tensorDesc = nullptr;
1023 OH_NN_DataType datatype = OH_NN_UNKNOWN;
1024 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetDataType(tensorDesc, datatype);
1025 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1026 }
1027
1028 /*
1029 * @tc.name: nnt_tensordesc_setdatatype_002
1030 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetDataType function.
1031 * @tc.type: FUNC
1032 */
1033 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setdatatype_002, testing::ext::TestSize.Level0)
1034 {
1035 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1036 OH_NN_DataType datatype = OH_NN_UNKNOWN;
1037 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetDataType(tensorDesc, datatype);
1038 EXPECT_NE(OH_NN_INVALID_PARAMETER, ret);
1039 }
1040
1041 /*
1042 * @tc.name: nnt_tensordesc_getdatatype_001
1043 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_GetDataType function.
1044 * @tc.type: FUNC
1045 */
1046 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getdatatype_001, testing::ext::TestSize.Level0)
1047 {
1048 NN_TensorDesc* tensorDesc = nullptr;
1049 OH_NN_DataType* datatype = nullptr;
1050 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetDataType(tensorDesc, datatype);
1051 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1052 }
1053
1054 /*
1055 * @tc.name: nnt_tensordesc_getdatatype_002
1056 * @tc.desc: Verify the OH_NN_DataType is nullptr of the OH_NNTensorDesc_GetDataType function.
1057 * @tc.type: FUNC
1058 */
1059 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getdatatype_002, testing::ext::TestSize.Level0)
1060 {
1061 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1062 OH_NN_DataType datatype = OH_NN_BOOL;
1063 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetDataType(tensorDesc, &datatype);
1064 EXPECT_EQ(OH_NN_SUCCESS, ret);
1065 }
1066
1067 /*
1068 * @tc.name: nnt_tensordesc_getdatatype_003
1069 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_GetDataType function.
1070 * @tc.type: FUNC
1071 */
1072 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getdatatype_003, testing::ext::TestSize.Level0)
1073 {
1074 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1075 OH_NN_DataType* datatype = nullptr;
1076 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetDataType(tensorDesc, datatype);
1077 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1078 }
1079
1080 /*
1081 * @tc.name: nnt_tensordesc_setshape_001
1082 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1083 * @tc.type: FUNC
1084 */
1085 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setshape_001, testing::ext::TestSize.Level0)
1086 {
1087 NN_TensorDesc* tensorDesc = nullptr;
1088 const int32_t* shape = nullptr;
1089 size_t shapeLength = 0;
1090 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetShape(tensorDesc, shape, shapeLength);
1091 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1092 }
1093
1094 /*
1095 * @tc.name: nnt_tensordesc_setshape_002
1096 * @tc.desc: Verify the shape is nullptr of the OH_NNTensorDesc_SetShape function.
1097 * @tc.type: FUNC
1098 */
1099 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setshape_002, testing::ext::TestSize.Level0)
1100 {
1101 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1102 const int32_t* shape = nullptr;
1103 size_t shapeLength = 0;
1104 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetShape(tensorDesc, shape, shapeLength);
1105 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1106 }
1107
1108 /*
1109 * @tc.name: nnt_tensordesc_setshape_003
1110 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1111 * @tc.type: FUNC
1112 */
1113 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setshape_003, testing::ext::TestSize.Level0)
1114 {
1115 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1116 int32_t inputDims[4] = {1, 2, 2, 3};
1117 size_t shapeLength = 0;
1118 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetShape(tensorDesc, inputDims, shapeLength);
1119 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1120 }
1121
1122 /*
1123 * @tc.name: nnt_tensordesc_setshape_004
1124 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1125 * @tc.type: FUNC
1126 */
1127 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setshape_004, testing::ext::TestSize.Level0)
1128 {
1129 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1130 int32_t inputDims[4] = {1, 2, 2, 3};
1131 size_t shapeLength = 1;
1132 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetShape(tensorDesc, inputDims, shapeLength);
1133 EXPECT_EQ(OH_NN_SUCCESS, ret);
1134 }
1135
1136 /*
1137 * @tc.name: nnt_tensordesc_Getshape_001
1138 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1139 * @tc.type: FUNC
1140 */
1141 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_001, testing::ext::TestSize.Level0)
1142 {
1143 NN_TensorDesc* tensorDesc = nullptr;
1144 int32_t* shape = nullptr;
1145 size_t* shapeLength = 0;
1146 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength);
1147 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1148 }
1149
1150 /*
1151 * @tc.name: nnt_tensordesc_Getshape_002
1152 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1153 * @tc.type: FUNC
1154 */
1155 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_002, testing::ext::TestSize.Level0)
1156 {
1157 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1158 int32_t* shape = nullptr;
1159 size_t* shapeLength = 0;
1160 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength);
1161 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1162 }
1163
1164 /*
1165 * @tc.name: nnt_tensordesc_Getshape_003
1166 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1167 * @tc.type: FUNC
1168 */
1169 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_Getshape_003, testing::ext::TestSize.Level0)
1170 {
1171 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1172 int32_t* shape = nullptr;
1173 int lengthValue = 1;
1174 size_t* shapeLength = new size_t(lengthValue);
1175 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetShape(tensorDesc, &shape, shapeLength);
1176 delete shapeLength;
1177 EXPECT_EQ(OH_NN_SUCCESS, ret);
1178 }
1179
1180 /*
1181 * @tc.name: nnt_tensordesc_setformat_001
1182 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1183 * @tc.type: FUNC
1184 */
1185 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setformat_001, testing::ext::TestSize.Level0)
1186 {
1187 NN_TensorDesc* tensorDesc = nullptr;
1188 OH_NN_Format format = static_cast<OH_NN_Format>(OH_NN_FLOAT32);
1189 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetFormat(tensorDesc, format);
1190 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1191 }
1192
1193 /*
1194 * @tc.name: nnt_tensordesc_setformat_002
1195 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1196 * @tc.type: FUNC
1197 */
1198 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_setformat_002, testing::ext::TestSize.Level0)
1199 {
1200 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1201 OH_NN_Format format = static_cast<OH_NN_Format>(OH_NN_FLOAT32);
1202 OH_NN_ReturnCode ret = OH_NNTensorDesc_SetFormat(tensorDesc, format);
1203 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1204 }
1205
1206 /*
1207 * @tc.name: nnt_tensordesc_getformat_001
1208 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1209 * @tc.type: FUNC
1210 */
1211 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_001, testing::ext::TestSize.Level0)
1212 {
1213 NN_TensorDesc* tensorDesc = nullptr;
1214 OH_NN_Format* format = nullptr;
1215 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, format);
1216 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1217 }
1218
1219 /*
1220 * @tc.name: nnt_tensordesc_getformat_002
1221 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1222 * @tc.type: FUNC
1223 */
1224 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_002, testing::ext::TestSize.Level0)
1225 {
1226 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1227 OH_NN_Format* format = nullptr;
1228 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, format);
1229 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1230 }
1231
1232 /*
1233 * @tc.name: nnt_tensordesc_getformat_003
1234 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1235 * @tc.type: FUNC
1236 */
1237 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getformat_003, testing::ext::TestSize.Level0)
1238 {
1239 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1240 OH_NN_Format format = OH_NN_FORMAT_NONE;
1241 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetFormat(tensorDesc, &format);
1242 EXPECT_EQ(OH_NN_SUCCESS, ret);
1243 }
1244
1245 /*
1246 * @tc.name: nnt_tensordesc_getelementcount_001
1247 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1248 * @tc.type: FUNC
1249 */
1250 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_001, testing::ext::TestSize.Level0)
1251 {
1252 NN_TensorDesc* tensorDesc = nullptr;
1253 size_t* elementCount = nullptr;
1254 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, elementCount);
1255 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1256 }
1257
1258 /*
1259 * @tc.name: nnt_tensordesc_getelementcount_002
1260 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1261 * @tc.type: FUNC
1262 */
1263 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_002, testing::ext::TestSize.Level0)
1264 {
1265 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1266 size_t* elementCount = nullptr;
1267 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, elementCount);
1268 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1269 }
1270
1271 /*
1272 * @tc.name: nnt_tensordesc_getelementcount_003
1273 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1274 * @tc.type: FUNC
1275 */
1276 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getelementcount_003, testing::ext::TestSize.Level0)
1277 {
1278 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1279 size_t elementCount = 0;
1280 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetElementCount(tensorDesc, &elementCount);
1281 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1282 }
1283
1284 /*
1285 * @tc.name: nnt_tensordesc_getelementcount_001
1286 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1287 * @tc.type: FUNC
1288 */
1289 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_001, testing::ext::TestSize.Level0)
1290 {
1291 NN_TensorDesc* tensorDesc = nullptr;
1292 size_t* byteSize = nullptr;
1293 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, byteSize);
1294 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1295 }
1296
1297 /*
1298 * @tc.name: nnt_tensordesc_getelementcount_002
1299 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1300 * @tc.type: FUNC
1301 */
1302 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_002, testing::ext::TestSize.Level0)
1303 {
1304 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1305 size_t* byteSize = nullptr;
1306 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, byteSize);
1307 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1308 }
1309
1310 /*
1311 * @tc.name: nnt_tensordesc_getelementcount_003
1312 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1313 * @tc.type: FUNC
1314 */
1315 HWTEST_F(NeuralNetworkCoreTest, nnt_tensordesc_getbytesize_003, testing::ext::TestSize.Level0)
1316 {
1317 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1318 size_t byteSize = 0;
1319 OH_NN_ReturnCode ret = OH_NNTensorDesc_GetByteSize(tensorDesc, &byteSize);
1320 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1321 }
1322
1323 /*
1324 * @tc.name: nnt_nntensor_create_001
1325 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1326 * @tc.type: FUNC
1327 */
1328 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_create_001, testing::ext::TestSize.Level0)
1329 {
1330 NN_TensorDesc* tensorDesc = nullptr;
1331 size_t deviceid = 0;
1332 NN_Tensor* ret = OH_NNTensor_Create(deviceid, tensorDesc);
1333 EXPECT_EQ(nullptr, ret);
1334 }
1335
1336 /*
1337 * @tc.name: nnt_nntensor_create_002
1338 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1339 * @tc.type: FUNC
1340 */
1341 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_create_002, testing::ext::TestSize.Level0)
1342 {
1343 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1344 size_t deviceid = 1;
1345 NN_Tensor* ret = OH_NNTensor_Create(deviceid, tensorDesc);
1346 EXPECT_EQ(nullptr, ret);
1347 }
1348
1349 /*
1350 * @tc.name: nnt_nntensor_create_003
1351 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1352 * @tc.type: FUNC
1353 */
1354 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_create_003, testing::ext::TestSize.Level0)
1355 {
1356 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1357 size_t deviceid = 0;
1358 BackendManager& backendManager = BackendManager::GetInstance();
1359 std::string backendName = "mock";
1360 std::function<std::shared_ptr<Backend>()> creator = Creator4;
1361
1362 backendManager.RegisterBackend(backendName, creator);
1363 NN_Tensor* ret = OH_NNTensor_Create(deviceid, tensorDesc);
1364 EXPECT_EQ(nullptr, ret);
1365 }
1366
1367 /*
1368 * @tc.name: nnt_nntensor_createwithsize_001
1369 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1370 * @tc.type: FUNC
1371 */
1372 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_001, testing::ext::TestSize.Level0)
1373 {
1374 NN_TensorDesc* tensorDesc = nullptr;
1375 size_t deviceid = 0;
1376 size_t size = 0;
1377 NN_Tensor* ret = OH_NNTensor_CreateWithSize(deviceid, tensorDesc, size);
1378 EXPECT_EQ(nullptr, ret);
1379 }
1380
1381 /*
1382 * @tc.name: nnt_nntensor_createwithsize_002
1383 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1384 * @tc.type: FUNC
1385 */
1386 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_002, testing::ext::TestSize.Level0)
1387 {
1388 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1389 size_t deviceid = 1;
1390 size_t size = 0;
1391 NN_Tensor* ret = OH_NNTensor_CreateWithSize(deviceid, tensorDesc, size);
1392 EXPECT_EQ(nullptr, ret);
1393 }
1394
1395 /*
1396 * @tc.name: nnt_nntensor_createwithsize_003
1397 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1398 * @tc.type: FUNC
1399 */
1400 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_003, testing::ext::TestSize.Level0)
1401 {
1402 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1403 size_t deviceid = 0;
1404 size_t size = 0;
1405 BackendManager& backendManager = BackendManager::GetInstance();
1406 std::string backendName = "mock";
1407 std::function<std::shared_ptr<Backend>()> creator = Creator4;
1408
1409 backendManager.RegisterBackend(backendName, creator);
1410 NN_Tensor* ret = OH_NNTensor_CreateWithSize(deviceid, tensorDesc, size);
1411 EXPECT_EQ(nullptr, ret);
1412 }
1413
1414 /*
1415 * @tc.name: nnt_nntensor_createwithsize_001
1416 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1417 * @tc.type: FUNC
1418 */
1419 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_001, testing::ext::TestSize.Level0)
1420 {
1421 NN_TensorDesc* tensorDesc = nullptr;
1422 size_t deviceid = 0;
1423 int fd = 0;
1424 size_t size = 0;
1425 size_t offset = 0;
1426 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1427 EXPECT_EQ(nullptr, ret);
1428 }
1429
1430 /*
1431 * @tc.name: nnt_nntensor_createwithsize_002
1432 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1433 * @tc.type: FUNC
1434 */
1435 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_002, testing::ext::TestSize.Level0)
1436 {
1437 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1438 size_t deviceid = 0;
1439 int fd = -1;
1440 size_t size = 0;
1441 size_t offset = 0;
1442 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1443 EXPECT_EQ(nullptr, ret);
1444 }
1445
1446 /*
1447 * @tc.name: nnt_nntensor_createwithsize_003
1448 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1449 * @tc.type: FUNC
1450 */
1451 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_003, testing::ext::TestSize.Level0)
1452 {
1453 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1454 size_t deviceid = 0;
1455 int fd = 1;
1456 size_t size = 0;
1457 size_t offset = 0;
1458 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1459 EXPECT_EQ(nullptr, ret);
1460 }
1461
1462 /*
1463 * @tc.name: nnt_nntensor_createwithsize_004
1464 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1465 * @tc.type: FUNC
1466 */
1467 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_004, testing::ext::TestSize.Level0)
1468 {
1469 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1470 size_t deviceid = 0;
1471 int fd = 1;
1472 size_t size = 1;
1473 size_t offset = 2;
1474 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1475 EXPECT_EQ(nullptr, ret);
1476 }
1477
1478 /*
1479 * @tc.name: nnt_nntensor_createwithsize_005
1480 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1481 * @tc.type: FUNC
1482 */
1483 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithfd_005, testing::ext::TestSize.Level0)
1484 {
1485 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1486 size_t deviceid = 0;
1487 int fd = 1;
1488 size_t size = 1;
1489 size_t offset = 0;
1490 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1491 EXPECT_EQ(nullptr, ret);
1492 }
1493
1494 /*
1495 * @tc.name: nnt_nntensor_createwithsize_006
1496 * @tc.desc: Verify the NN_TensorDesc is nullptr of the OH_NNTensorDesc_SetShape function.
1497 * @tc.type: FUNC
1498 */
1499 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_createwithsize_006, testing::ext::TestSize.Level0)
1500 {
1501 NN_TensorDesc* tensorDesc = OH_NNTensorDesc_Create();
1502 size_t deviceid = 0;
1503 int fd = 1;
1504 size_t size = 1;
1505 size_t offset = 2;
1506 NN_Tensor* ret = OH_NNTensor_CreateWithFd(deviceid, tensorDesc, fd, size, offset);
1507 EXPECT_EQ(nullptr, ret);
1508 }
1509
1510 /*
1511 * @tc.name: nnt_nntensor_destroy_001
1512 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1513 * @tc.type: FUNC
1514 */
1515 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_destroy_001, testing::ext::TestSize.Level0)
1516 {
1517 NN_Tensor* tensor = nullptr;
1518 OH_NN_ReturnCode ret = OH_NNTensor_Destroy(&tensor);
1519 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1520 }
1521
1522 /*
1523 * @tc.name: nnt_nntensor_destroy_002
1524 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1525 * @tc.type: FUNC
1526 */
1527 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_destroy_002, testing::ext::TestSize.Level0)
1528 {
1529 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1530 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1531 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1532 OH_NN_ReturnCode ret = OH_NNTensor_Destroy(&tensor);
1533 EXPECT_EQ(OH_NN_NULL_PTR, ret);
1534 testing::Mock::AllowLeak(device.get());
1535 }
1536
1537 /*
1538 * @tc.name: nnt_nntensor_gettensordesc_001
1539 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1540 * @tc.type: FUNC
1541 */
1542 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_gettensordesc_001, testing::ext::TestSize.Level0)
1543 {
1544 const NN_Tensor* tensor = nullptr;
1545 NN_TensorDesc* ret = OH_NNTensor_GetTensorDesc(tensor);
1546 EXPECT_EQ(nullptr, ret);
1547 }
1548
1549 /*
1550 * @tc.name: nnt_nntensor_gettensordesc_002
1551 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1552 * @tc.type: FUNC
1553 */
1554 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_gettensordesc_002, testing::ext::TestSize.Level0)
1555 {
1556 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1557 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1558 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1559 NN_TensorDesc* ret = OH_NNTensor_GetTensorDesc(tensor);
1560 EXPECT_NE(nullptr, ret);
1561 testing::Mock::AllowLeak(device.get());
1562 }
1563
1564 /*
1565 * @tc.name: nnt_nntensor_getdatabuffer_001
1566 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1567 * @tc.type: FUNC
1568 */
1569 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getdatabuffer_001, testing::ext::TestSize.Level0)
1570 {
1571 const NN_Tensor* tensor = nullptr;
1572 void* ret = OH_NNTensor_GetDataBuffer(tensor);
1573 EXPECT_EQ(nullptr, ret);
1574 }
1575
1576 /*
1577 * @tc.name: nnt_nntensor_getdatabuffer_002
1578 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1579 * @tc.type: FUNC
1580 */
1581 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getdatabuffer_002, testing::ext::TestSize.Level0)
1582 {
1583 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1584 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1585 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1586 void* ret = OH_NNTensor_GetDataBuffer(tensor);
1587 EXPECT_EQ(nullptr, ret);
1588 testing::Mock::AllowLeak(device.get());
1589 }
1590
1591 /*
1592 * @tc.name: nnt_nntensor_getsize_001
1593 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1594 * @tc.type: FUNC
1595 */
1596 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_001, testing::ext::TestSize.Level0)
1597 {
1598 const NN_Tensor* tensor = nullptr;
1599 size_t* size = nullptr;
1600 OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, size);
1601 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1602 }
1603
1604 /*
1605 * @tc.name: nnt_nntensor_getsize_002
1606 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1607 * @tc.type: FUNC
1608 */
1609 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_002, testing::ext::TestSize.Level0)
1610 {
1611 size_t* size = nullptr;
1612 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1613 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1614 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1615 OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, size);
1616 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1617 testing::Mock::AllowLeak(device.get());
1618 }
1619
1620 /*
1621 * @tc.name: nnt_nntensor_getsize_003
1622 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1623 * @tc.type: FUNC
1624 */
1625 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getsize_003, testing::ext::TestSize.Level0)
1626 {
1627 size_t size = 1;
1628 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1629 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1630 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1631 OH_NN_ReturnCode ret = OH_NNTensor_GetSize(tensor, &size);
1632 EXPECT_EQ(OH_NN_SUCCESS, ret);
1633 testing::Mock::AllowLeak(device.get());
1634 }
1635
1636 /*
1637 * @tc.name: nnt_nntensor_getfd_001
1638 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1639 * @tc.type: FUNC
1640 */
1641 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_001, testing::ext::TestSize.Level0)
1642 {
1643 const NN_Tensor* tensor = nullptr;
1644 int* fd = nullptr;
1645 OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, fd);
1646 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1647 }
1648
1649 /*
1650 * @tc.name: nnt_nntensor_getfd_002
1651 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1652 * @tc.type: FUNC
1653 */
1654 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_002, testing::ext::TestSize.Level0)
1655 {
1656 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1657 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1658 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1659 int* fd = nullptr;
1660 OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, fd);
1661 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1662 testing::Mock::AllowLeak(device.get());
1663 }
1664
1665 /*
1666 * @tc.name: nnt_nntensor_getfd_002
1667 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1668 * @tc.type: FUNC
1669 */
1670 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getfd_003, testing::ext::TestSize.Level0)
1671 {
1672 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1673 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1674 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1675 int fd = 1;
1676 OH_NN_ReturnCode ret = OH_NNTensor_GetFd(tensor, &fd);
1677 EXPECT_EQ(OH_NN_SUCCESS, ret);
1678 testing::Mock::AllowLeak(device.get());
1679 }
1680
1681 /*
1682 * @tc.name: nnt_nntensor_getoffset_001
1683 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1684 * @tc.type: FUNC
1685 */
1686 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_001, testing::ext::TestSize.Level0)
1687 {
1688 const NN_Tensor* tensor = nullptr;
1689 size_t* offset = nullptr;
1690 OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, offset);
1691 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1692 }
1693
1694 /*
1695 * @tc.name: nnt_nntensor_getoffset_002
1696 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1697 * @tc.type: FUNC
1698 */
1699 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_002, testing::ext::TestSize.Level0)
1700 {
1701 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1702 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1703 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1704 size_t* offset = nullptr;
1705 OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, offset);
1706 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1707 testing::Mock::AllowLeak(device.get());
1708 }
1709
1710 /*
1711 * @tc.name: nnt_nntensor_getoffset_003
1712 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1713 * @tc.type: FUNC
1714 */
1715 HWTEST_F(NeuralNetworkCoreTest, nnt_nntensor_getoffset_003, testing::ext::TestSize.Level0)
1716 {
1717 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1718 std::unique_ptr<NNBackend> hdiDevice = std::make_unique<NNBackend>(device, backendID);
1719 NN_Tensor* tensor = reinterpret_cast<NN_Tensor*>(hdiDevice->CreateTensor(tensorDesc));
1720 size_t offset = 1;
1721 OH_NN_ReturnCode ret = OH_NNTensor_GetOffset(tensor, &offset);
1722 EXPECT_EQ(OH_NN_SUCCESS, ret);
1723 testing::Mock::AllowLeak(device.get());
1724 }
1725
1726 /*
1727 * @tc.name: nnt_nnexecutor_getputputshape_001
1728 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1729 * @tc.type: FUNC
1730 */
1731 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getputputshape_001, testing::ext::TestSize.Level0)
1732 {
1733 OH_NNExecutor* executor = nullptr;
1734 uint32_t outputIndex = 0;
1735 int32_t* shape = nullptr;
1736 uint32_t* shapeLength = nullptr;
1737 OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputShape(executor, outputIndex, &shape, shapeLength);
1738 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1739 }
1740
1741 /*
1742 * @tc.name: nnt_nnexecutor_getputputshape_002
1743 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1744 * @tc.type: FUNC
1745 */
1746 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getputputshape_002, testing::ext::TestSize.Level0)
1747 {
1748 size_t m_backendID {0};
1749 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1750 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1751 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1752 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1753 NNExecutor* executor = new (std::nothrow) NNExecutor(
1754 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1755
1756 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1757 uint32_t outputIndex = 0;
1758 int32_t* shape = nullptr;
1759 uint32_t* shapeLength = nullptr;
1760 OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputShape(nnExecutor, outputIndex, &shape, shapeLength);
1761 delete executor;
1762 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1763 testing::Mock::AllowLeak(device.get());
1764 }
1765
1766 /*
1767 * @tc.name: nnt_nnexecutor_getputputshape_001
1768 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1769 * @tc.type: FUNC
1770 */
1771 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputcount_001, testing::ext::TestSize.Level0)
1772 {
1773 const OH_NNExecutor* executor = nullptr;
1774 size_t* inputCount = nullptr;
1775 OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(executor, inputCount);
1776 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1777 }
1778
1779 /*
1780 * @tc.name: nnt_nnexecutor_getinputcount_002
1781 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1782 * @tc.type: FUNC
1783 */
1784 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputcount_002, testing::ext::TestSize.Level0)
1785 {
1786 size_t m_backendID {0};
1787 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1788 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1789 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1790 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1791 NNExecutor* executor = new (std::nothrow) NNExecutor(
1792 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1793
1794 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1795 size_t* inputCount = nullptr;
1796 OH_NN_ReturnCode ret = OH_NNExecutor_GetInputCount(nnExecutor, inputCount);
1797 delete executor;
1798 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1799 testing::Mock::AllowLeak(device.get());
1800 }
1801
1802 /*
1803 * @tc.name: nnt_nnexecutor_getoutputcount_001
1804 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1805 * @tc.type: FUNC
1806 */
1807 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getoutputcount_001, testing::ext::TestSize.Level0)
1808 {
1809 const OH_NNExecutor* executor = nullptr;
1810 size_t* outputCount = nullptr;
1811 OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputCount(executor, outputCount);
1812 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1813 }
1814
1815 /*
1816 * @tc.name: nnt_nnexecutor_getoutputcount_002
1817 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1818 * @tc.type: FUNC
1819 */
1820 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getoutputcount_002, testing::ext::TestSize.Level0)
1821 {
1822 size_t m_backendID {0};
1823 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1824 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1825 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1826 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1827 NNExecutor* executor = new (std::nothrow) NNExecutor(
1828 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1829
1830 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1831 size_t* outputCount = nullptr;
1832 OH_NN_ReturnCode ret = OH_NNExecutor_GetOutputCount(nnExecutor, outputCount);
1833 delete executor;
1834 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1835 testing::Mock::AllowLeak(device.get());
1836 }
1837
1838 /*
1839 * @tc.name: nnt_nnexecutor_createinputtensordesc_001
1840 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1841 * @tc.type: FUNC
1842 */
1843 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_createinputtensordesc_001, testing::ext::TestSize.Level0)
1844 {
1845 const OH_NNExecutor* executor = nullptr;
1846 size_t index = 1;
1847 NN_TensorDesc* ret = OH_NNExecutor_CreateInputTensorDesc(executor, index);
1848 EXPECT_EQ(nullptr, ret);
1849 }
1850
1851 /*
1852 * @tc.name: nnt_nnexecutor_createinputtensordesc_001
1853 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1854 * @tc.type: FUNC
1855 */
1856 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_createouttensordesc_001, testing::ext::TestSize.Level0)
1857 {
1858 const OH_NNExecutor* executor = nullptr;
1859 size_t index = 1;
1860 NN_TensorDesc* ret = OH_NNExecutor_CreateOutputTensorDesc(executor, index);
1861 EXPECT_EQ(nullptr, ret);
1862 }
1863
1864 /*
1865 * @tc.name: nnt_nnexecutor_getoutputcount_001
1866 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1867 * @tc.type: FUNC
1868 */
1869 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_001, testing::ext::TestSize.Level0)
1870 {
1871 const OH_NNExecutor* executor = nullptr;
1872 size_t index = 1;
1873 size_t* minInputDims = nullptr;
1874 size_t* maxInputDims = nullptr;
1875 size_t* shapeLength = nullptr;
1876 OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(executor, index, &minInputDims, &maxInputDims, shapeLength);
1877 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1878 }
1879
1880 /*
1881 * @tc.name: nnt_nnexecutor_getoutputcount_002
1882 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1883 * @tc.type: FUNC
1884 */
1885 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_002, testing::ext::TestSize.Level0)
1886 {
1887 size_t index = 1;
1888 size_t* minInputDims = nullptr;
1889 size_t* maxInputDims = nullptr;
1890 size_t* shapeLength = nullptr;
1891 size_t m_backendID {0};
1892 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1893 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1894 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1895 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1896 NNExecutor* executor = new (std::nothrow) NNExecutor(
1897 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1898
1899 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1900 OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index,
1901 &minInputDims, &maxInputDims, shapeLength);
1902 delete executor;
1903 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1904 testing::Mock::AllowLeak(device.get());
1905 }
1906
1907 /*
1908 * @tc.name: nnt_nnexecutor_getinputdimRange_003
1909 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1910 * @tc.type: FUNC
1911 */
1912 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_getinputdimRange_003, testing::ext::TestSize.Level0)
1913 {
1914 size_t index = 1;
1915 size_t mindims = 1;
1916 size_t* minInputDims = &mindims;
1917 size_t* maxInputDims = nullptr;
1918 size_t* shapeLength = nullptr;
1919 size_t m_backendID {0};
1920 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1921 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1922 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1923 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1924 NNExecutor* executor = new (std::nothrow) NNExecutor(
1925 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1926
1927 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1928 OH_NN_ReturnCode ret = OH_NNExecutor_GetInputDimRange(nnExecutor, index,
1929 &minInputDims, &maxInputDims, shapeLength);
1930 delete executor;
1931 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1932 testing::Mock::AllowLeak(device.get());
1933 }
1934
1935 /*
1936 * @tc.name: nnt_nnexecutor_setonrundone_001
1937 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1938 * @tc.type: FUNC
1939 */
1940 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonrundone_001, testing::ext::TestSize.Level0)
1941 {
1942 OH_NNExecutor* executor = nullptr;
1943 NN_OnRunDone rundone = nullptr;
1944 OH_NN_ReturnCode ret = OH_NNExecutor_SetOnRunDone(executor, rundone);
1945 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1946 }
1947
1948 /*
1949 * @tc.name: nnt_nnexecutor_setonrundone_002
1950 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1951 * @tc.type: FUNC
1952 */
1953 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonrundone_002, testing::ext::TestSize.Level0)
1954 {
1955 size_t m_backendID {0};
1956 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1957 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1958 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1959 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1960 NNExecutor* executor = new (std::nothrow) NNExecutor(
1961 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1962
1963 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1964 NN_OnRunDone rundone = nullptr;
1965 OH_NN_ReturnCode ret = OH_NNExecutor_SetOnRunDone(nnExecutor, rundone);
1966 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1967 testing::Mock::AllowLeak(device.get());
1968 }
1969
1970 /*
1971 * @tc.name: nnt_nnexecutor_setonservicedied_001
1972 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1973 * @tc.type: FUNC
1974 */
1975 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonservicedied_001, testing::ext::TestSize.Level0)
1976 {
1977 OH_NNExecutor* executor = nullptr;
1978 NN_OnServiceDied servicedied = nullptr;
1979 OH_NN_ReturnCode ret = OH_NNExecutor_SetOnServiceDied(executor, servicedied);
1980 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
1981 }
1982
1983 /*
1984 * @tc.name: nnt_nnexecutor_setonservicedied_002
1985 * @tc.desc: Verify the NN_Tensor is nullptr of the OH_NNTensorDesc_SetShape function.
1986 * @tc.type: FUNC
1987 */
1988 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_setonservicedied_002, testing::ext::TestSize.Level0)
1989 {
1990 size_t m_backendID {0};
1991 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
1992 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
1993 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
1994 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
1995 NNExecutor* executor = new (std::nothrow) NNExecutor(
1996 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
1997
1998 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
1999 NN_OnServiceDied servicedied = nullptr;
2000 OH_NN_ReturnCode ret = OH_NNExecutor_SetOnServiceDied(nnExecutor, servicedied);
2001 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2002 testing::Mock::AllowLeak(device.get());
2003 }
2004
2005 /*
2006 * @tc.name: nnt_executor_runsync_001
2007 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2008 * @tc.type: FUNC
2009 */
2010 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_001, testing::ext::TestSize.Level0)
2011 {
2012 OH_NNExecutor* executor = nullptr;
2013 NN_Tensor* inputTensor[] = {nullptr};
2014 size_t inputCount = 0;
2015 NN_Tensor* outputTensor[] = {nullptr};
2016 size_t outputcount = 0;
2017 OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(executor, inputTensor, inputCount, outputTensor, outputcount);
2018 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2019 }
2020
2021 /*
2022 * @tc.name: nnt_executor_runsync_002
2023 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2024 * @tc.type: FUNC
2025 */
2026 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_002, testing::ext::TestSize.Level0)
2027 {
2028 size_t m_backendID {0};
2029 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2030 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2031 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2032 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2033 NNExecutor* executor = new (std::nothrow) NNExecutor(
2034 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2035
2036 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2037 NN_Tensor* inputTensor[] = {nullptr};
2038 size_t inputCount = 0;
2039 NN_Tensor* outputTensor[] = {nullptr};
2040 size_t outputcount = 0;
2041 OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount);
2042 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2043 testing::Mock::AllowLeak(device.get());
2044 }
2045
2046 /*
2047 * @tc.name: nnt_executor_runsync_003
2048 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2049 * @tc.type: FUNC
2050 */
2051 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_003, testing::ext::TestSize.Level0)
2052 {
2053 size_t m_backendID {0};
2054 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2055 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2056 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2057 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2058 NNExecutor* executor = new (std::nothrow) NNExecutor(
2059 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2060
2061 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2062 NN_Tensor* inputTensor[sizetensor];
2063 size_t inputCount = 0;
2064 NN_Tensor* outputTensor[] = {nullptr};
2065 size_t outputcount = 0;
2066 OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount);
2067 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2068 testing::Mock::AllowLeak(device.get());
2069 }
2070
2071 /*
2072 * @tc.name: nnt_executor_runsync_004
2073 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2074 * @tc.type: FUNC
2075 */
2076 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_004, testing::ext::TestSize.Level0)
2077 {
2078 size_t m_backendID {0};
2079 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2080 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2081 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2082 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2083 NNExecutor* executor = new (std::nothrow) NNExecutor(
2084 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2085
2086 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2087 NN_Tensor* inputTensor[sizetensor];
2088 size_t inputCount = 1;
2089 NN_Tensor* outputTensor[] = {nullptr};
2090 size_t outputcount = 0;
2091 OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount);
2092 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2093 testing::Mock::AllowLeak(device.get());
2094 }
2095
2096 /*
2097 * @tc.name: nnt_executor_runsync_005
2098 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2099 * @tc.type: FUNC
2100 */
2101 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runsync_005, testing::ext::TestSize.Level0)
2102 {
2103 size_t m_backendID {0};
2104 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2105 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2106 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2107 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2108 NNExecutor* executor = new (std::nothrow) NNExecutor(
2109 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2110
2111 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2112 NN_Tensor* inputTensor[sizetensor];
2113 size_t inputCount = 1;
2114 NN_Tensor* outputTensor[sizetensor];
2115 size_t outputcount = 0;
2116 OH_NN_ReturnCode ret = OH_NNExecutor_RunSync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount);
2117 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2118 testing::Mock::AllowLeak(device.get());
2119 }
2120
2121 /*
2122 * @tc.name: nnt_executor_runasync_001
2123 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2124 * @tc.type: FUNC
2125 */
2126 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_001, testing::ext::TestSize.Level0)
2127 {
2128 OH_NNExecutor* executor = nullptr;
2129 NN_Tensor* inputTensor[] = {nullptr};
2130 size_t inputCount = 0;
2131 NN_Tensor* outputTensor[] = {nullptr};
2132 size_t outputcount = 0;
2133 int32_t timeout = 1;
2134 void* userdata = nullptr;
2135 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(executor, inputTensor, inputCount, outputTensor, outputcount,
2136 timeout, userdata);
2137 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2138 }
2139
2140 /*
2141 * @tc.name: nnt_executor_runasync_002
2142 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2143 * @tc.type: FUNC
2144 */
2145 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_002, testing::ext::TestSize.Level0)
2146 {
2147 size_t m_backendID {0};
2148 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2149 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2150 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2151 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2152 NNExecutor* executor = new (std::nothrow) NNExecutor(
2153 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2154
2155 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2156 NN_Tensor* inputTensor[] = {nullptr};
2157 size_t inputCount = 0;
2158 NN_Tensor* outputTensor[] = {nullptr};
2159 size_t outputcount = 0;
2160 int32_t timeout = 1;
2161 void* userdata = nullptr;
2162 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2163 timeout, userdata);
2164 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2165 testing::Mock::AllowLeak(device.get());
2166 }
2167
2168 /*
2169 * @tc.name: nnt_executor_runasync_003
2170 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2171 * @tc.type: FUNC
2172 */
2173 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_003, testing::ext::TestSize.Level0)
2174 {
2175 size_t m_backendID {0};
2176 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2177 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2178 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2179 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2180 NNExecutor* executor = new (std::nothrow) NNExecutor(
2181 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2182
2183 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2184 NN_Tensor* inputTensor[sizetensor];
2185 size_t inputCount = 0;
2186 NN_Tensor* outputTensor[] = {nullptr};
2187 size_t outputcount = 0;
2188 int32_t timeout = 1;
2189 void* userdata = nullptr;
2190 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2191 timeout, userdata);
2192 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2193 testing::Mock::AllowLeak(device.get());
2194 }
2195
2196 /*
2197 * @tc.name: nnt_executor_runasync_004
2198 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2199 * @tc.type: FUNC
2200 */
2201 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_004, testing::ext::TestSize.Level0)
2202 {
2203 size_t m_backendID {0};
2204 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2205 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2206 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2207 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2208 NNExecutor* executor = new (std::nothrow) NNExecutor(
2209 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2210
2211 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2212 NN_Tensor* inputTensor[sizetensor];
2213 size_t inputCount = 0;
2214 NN_Tensor* outputTensor[] = {nullptr};
2215 size_t outputcount = 0;
2216 int32_t timeout = 1;
2217 void* userdata = nullptr;
2218 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2219 timeout, userdata);
2220 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2221 testing::Mock::AllowLeak(device.get());
2222 }
2223
2224 /*
2225 * @tc.name: nnt_executor_runasync_005
2226 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2227 * @tc.type: FUNC
2228 */
2229 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_005, testing::ext::TestSize.Level0)
2230 {
2231 size_t m_backendID {0};
2232 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2233 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2234 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2235 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2236 NNExecutor* executor = new (std::nothrow) NNExecutor(
2237 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2238
2239 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2240 NN_Tensor* inputTensor[sizetensor];
2241 size_t inputCount = 1;
2242 NN_Tensor* outputTensor[] = {nullptr};
2243 size_t outputcount = 0;
2244 int32_t timeout = 1;
2245 void* userdata = nullptr;
2246 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2247 timeout, userdata);
2248 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2249 testing::Mock::AllowLeak(device.get());
2250 }
2251
2252 /*
2253 * @tc.name: nnt_executor_runasync_006
2254 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2255 * @tc.type: FUNC
2256 */
2257 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_006, testing::ext::TestSize.Level0)
2258 {
2259 size_t m_backendID {0};
2260 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2261 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2262 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2263 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2264 NNExecutor* executor = new (std::nothrow) NNExecutor(
2265 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2266
2267 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2268 NN_Tensor* inputTensor[sizetensor];
2269 size_t inputCount = 1;
2270 NN_Tensor* outputTensor[sizetensor];
2271 size_t outputcount = 0;
2272 int32_t timeout = 1;
2273 void* userdata = nullptr;
2274 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2275 timeout, userdata);
2276 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2277 testing::Mock::AllowLeak(device.get());
2278 }
2279
2280 /*
2281 * @tc.name: nnt_executor_runasync_007
2282 * @tc.desc: Verify the ExecutorConfig is nullptr of the OH_NNTensorDesc_SetShape function.
2283 * @tc.type: FUNC
2284 */
2285 HWTEST_F(NeuralNetworkCoreTest, nnt_executor_runasync_007, testing::ext::TestSize.Level0)
2286 {
2287 size_t m_backendID {0};
2288 std::shared_ptr<PreparedModel> m_preparedModel {nullptr};
2289 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_inputTensorDescs;
2290 std::vector<std::pair<std::shared_ptr<TensorDesc>, OH_NN_TensorType>> m_outputTensorDescs;
2291 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2292 NNExecutor* executor = new (std::nothrow) NNExecutor(
2293 m_backendID, device, m_preparedModel, m_inputTensorDescs, m_outputTensorDescs);
2294
2295 OH_NNExecutor* nnExecutor = reinterpret_cast<OH_NNExecutor*>(&executor);
2296 NN_Tensor* inputTensor[sizetensor];
2297 size_t inputCount = 1;
2298 NN_Tensor* outputTensor[sizetensor];
2299 size_t outputcount = 1;
2300 int32_t timeout = 1;
2301 void* userdata = nullptr;
2302 OH_NN_ReturnCode ret = OH_NNExecutor_RunAsync(nnExecutor, inputTensor, inputCount, outputTensor, outputcount,
2303 timeout, userdata);
2304 EXPECT_EQ(OH_NN_INVALID_PARAMETER, ret);
2305 testing::Mock::AllowLeak(device.get());
2306 }
2307
2308 /*
2309 * @tc.name: nnt_nnexecutor_construct_001
2310 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
2311 * @tc.type: FUNC
2312 */
2313 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_construct_001, testing::ext::TestSize.Level0)
2314 {
2315 OH_NNCompilation* nnCompilation = nullptr;
2316 OH_NNExecutor* ret = OH_NNExecutor_Construct(nnCompilation);
2317 EXPECT_EQ(nullptr, ret);
2318 }
2319
2320 /*
2321 * @tc.name: nnt_nnexecutor_construct_002
2322 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
2323 * @tc.type: FUNC
2324 */
2325 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_construct_002, testing::ext::TestSize.Level0)
2326 {
2327 Compilation *compilation = new (std::nothrow) Compilation();
2328 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation *>(compilation);
2329 EXPECT_NE(nnCompilation, nullptr);
2330 BackendManager& backendManager = BackendManager::GetInstance();
2331 std::string backendName = "mock";
2332 std::function<std::shared_ptr<Backend>()> creator = Creator4;
2333
2334 BackendRegistrar backendregistrar(backendName, creator);
2335 backendManager.RemoveBackend(backendName);
2336 backendManager.RegisterBackend(backendName, creator);
2337 OH_NNExecutor* ret = OH_NNExecutor_Construct(nnCompilation);
2338 delete compilation;
2339 EXPECT_EQ(nullptr, ret);
2340 }
2341
2342 /*
2343 * @tc.name: nnt_nnexecutor_construct_003
2344 * @tc.desc: Verify the OH_NNCompilation is nullptr of the OH_NNCompilation_SetDevice function.
2345 * @tc.type: FUNC
2346 */
2347 HWTEST_F(NeuralNetworkCoreTest, nnt_nnexecutor_construct_003, testing::ext::TestSize.Level0)
2348 {
2349 Compilation *compilation = new (std::nothrow) Compilation();
2350 OH_NNCompilation* nnCompilation = reinterpret_cast<OH_NNCompilation *>(compilation);
2351 EXPECT_NE(nnCompilation, nullptr);
2352 BackendManager& backendManager = BackendManager::GetInstance();
2353 std::string backendName = "mock";
2354 std::function<std::shared_ptr<Backend>()> creator = Creator4;
2355
2356 backendManager.RegisterBackend(backendName, creator);
2357 std::shared_ptr<MockIDevice> device = std::make_shared<MockIDevice>();
2358 size_t backid = 1;
2359
2360 NNCompiler nnCompiler(device, backid);
2361 compilation->compiler = &nnCompiler;
2362 OH_NNExecutor* ret = OH_NNExecutor_Construct(nnCompilation);
2363 delete compilation;
2364 EXPECT_EQ(nullptr, ret);
2365 }
2366
2367 /*
2368 * @tc.name: compilation_destroy_001
2369 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
2370 * @tc.type: FUNC
2371 */
2372 HWTEST_F(NeuralNetworkCoreTest, compilation_destroy_001, testing::ext::TestSize.Level0)
2373 {
2374 OH_NNCompilation* nncompilation = nullptr;
2375 OH_NNCompilation_Destroy(&nncompilation);
2376 EXPECT_EQ(nullptr, nncompilation);
2377 }
2378
2379 /*
2380 * @tc.name: compilation_destroy_002
2381 * @tc.desc: Verify the normal model of the OH_NNCompilation_Destroy function.
2382 * @tc.type: FUNC
2383 */
2384 HWTEST_F(NeuralNetworkCoreTest, compilation_destroy_002, testing::ext::TestSize.Level0)
2385 {
2386 InnerModel* innerModel = new InnerModel();
2387 EXPECT_NE(nullptr, innerModel);
2388
2389 OH_NNModel* model = reinterpret_cast<OH_NNModel*>(&innerModel);
2390 OH_NNCompilation* nnCompilation = OH_NNCompilation_Construct(model);
2391 OH_NNCompilation_Destroy(&nnCompilation);
2392 EXPECT_EQ(nullptr, nnCompilation);
2393 }
2394
2395 /*
2396 * @tc.name: executor_destroy_001
2397 * @tc.desc: Verify the compilation is nullptr of the OH_NNCompilation_ExportCacheToBuffer function.
2398 * @tc.type: FUNC
2399 */
2400 HWTEST_F(NeuralNetworkCoreTest, executor_destroy_001, testing::ext::TestSize.Level0)
2401 {
2402 OH_NNExecutor* nnExecutor = nullptr;
2403 OH_NNExecutor_Destroy(&nnExecutor);
2404 EXPECT_EQ(nullptr, nnExecutor);
2405 }
2406 } // Unittest
2407 } // namespace NeuralNetworkRuntime
2408 } // namespace OHOS