1 /*
2  * Copyright (c) 2023 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #ifndef NEURAL_NETWORK_RUNTIME_NNBACKEND_H
17 #define NEURAL_NETWORK_RUNTIME_NNBACKEND_H
18 
19 #include "backend.h"
20 
21 #include "executor.h"
22 #include "tensor.h"
23 #include "tensor_desc.h"
24 #include "device.h"
25 #include "nncompiler.h"
26 
27 namespace OHOS {
28 namespace NeuralNetworkRuntime {
29 class NNBackend : public Backend {
30 public:
31     explicit NNBackend(const std::shared_ptr<Device>& device, size_t backendID);
32     ~NNBackend() override;
33 
34     // Backend Info
35     size_t GetBackendID() const override;
36     OH_NN_ReturnCode GetBackendName(std::string& backendName) const override;
37     OH_NN_ReturnCode GetBackendType(OH_NN_DeviceType& backendType) const override;
38     OH_NN_ReturnCode GetBackendStatus(DeviceStatus& status) const override;
39 
40     // Create & Destory compiler
41     Compiler* CreateCompiler(Compilation* compilation) override;
42     OH_NN_ReturnCode DestroyCompiler(Compiler* compiler) override;
43 
44     // Create & Destory Executor
45     Executor* CreateExecutor(Compilation* compilation) override;
46     OH_NN_ReturnCode DestroyExecutor(Executor* executor) override;
47 
48     // Create & Destory Tensor
49     Tensor* CreateTensor(TensorDesc* desc) override;
50     OH_NN_ReturnCode DestroyTensor(Tensor* tensor) override;
51 
52     // external methods
53     std::shared_ptr<Device> GetDevice() const;
54     OH_NN_ReturnCode GetSupportedOperation(std::shared_ptr<const mindspore::lite::LiteGraph> model,
55                                            std::vector<bool>& ops);
56 
57 protected:
58     std::shared_ptr<Device> m_device;
59     size_t m_backendID;
60 };
61 } // NeuralNetworkRuntime
62 } // OHOS
63 
64 #endif // NEURAL_NETWORK_RUNTIME_NNBACKEND_H
65