1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "flatten_builder.h"
17
18 namespace OHOS {
19 namespace NeuralNetworkRuntime {
20 namespace Ops {
21 static const int INPUT_NUM = 1;
22 static const int OUTPUT_NUM = 1;
23 static const int PARAM_MAX_NUM = 1;
24 static const int SCALAR_LENGTH = 1;
25 static const std::string OP_NAME = "Flatten";
26
FlattenBuilder()27 FlattenBuilder::FlattenBuilder() {}
28
~FlattenBuilder()29 FlattenBuilder::~FlattenBuilder() {}
30
SetAxis(const std::shared_ptr<NNTensor> & tensor)31 OH_NN_ReturnCode FlattenBuilder::SetAxis(const std::shared_ptr<NNTensor>& tensor)
32 {
33 if (tensor->GetDataType() != OH_NN_INT64) {
34 LOGE("[Flatten] The axis should be type OH_NN_INT64.");
35 return OH_NN_INVALID_PARAMETER;
36 }
37
38 if (tensor->GetElementCount() != SCALAR_LENGTH) {
39 LOGE("[Flatten] The axis should be scalar.");
40 return OH_NN_INVALID_PARAMETER;
41 }
42
43 void* buffer = tensor->GetBuffer();
44 if (buffer == nullptr) {
45 LOGE("[Flatten] Tensor buffer is nullptr.");
46 return OH_NN_INVALID_PARAMETER;
47 }
48 m_axis = *(static_cast<const int64_t*>(buffer));
49
50 return OH_NN_SUCCESS;
51 }
52
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)53 OH_NN_ReturnCode FlattenBuilder::Build(const std::vector<uint32_t>& paramsIndex,
54 const std::vector<uint32_t>& inputsIndex,
55 const std::vector<uint32_t>& outputsIndex,
56 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
57 {
58 if (m_isBuild) {
59 LOGE("[Flatten] Build failed, the flatten operation has been build. cannot build again.");
60 return OH_NN_OPERATION_FORBIDDEN;
61 }
62
63 auto ret = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
64 if (ret != OH_NN_SUCCESS) {
65 LOGE("[Flatten] Build failed, passed invalid input or output index.");
66 return ret;
67 }
68
69 m_inputsIndex = inputsIndex;
70 m_outputsIndex = outputsIndex;
71
72 ret = CheckParamIndex(paramsIndex, allTensors, PARAM_MAX_NUM);
73 if (ret != OH_NN_SUCCESS) {
74 LOGE("[Flatten] Build failed, passed invalid param index.");
75 return ret;
76 }
77
78 for (int i : paramsIndex) {
79 std::shared_ptr<NNTensor> tensor = allTensors[i];
80 tensor->IdentifyOpParameter();
81 if (m_paramMap.find(tensor->GetType()) != m_paramMap.end()) {
82 ret = (this->*(m_paramMap[tensor->GetType()]))(tensor);
83 } else {
84 LOGE("[Flatten] Build failed, param invalid, type=%d", tensor->GetType());
85 return OH_NN_INVALID_PARAMETER;
86 }
87
88 if (ret != OH_NN_SUCCESS) {
89 LOGE("[Flatten] Build failed, passed invalid param.");
90 return ret;
91 }
92 }
93
94 m_name = OP_NAME;
95 m_isBuild = true;
96 return OH_NN_SUCCESS;
97 }
98
GetPrimitive()99 LiteGraphPrimitvePtr FlattenBuilder::GetPrimitive()
100 {
101 if (!m_isBuild) {
102 LOGE("[Flatten] GetPrimitive failed, cannot get primitive before call build.");
103 return {nullptr, DestroyLiteGraphPrimitive};
104 }
105
106 void* primitive = mindspore::lite::MindIR_Flatten_CreatePrimitive(m_axis);
107 LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive) ;
108 return graphPrimitivePtr;
109 }
110
111 REGISTER_OPS(FlattenBuilder, OH_NN_OPS_FLATTEN);
112 } // namespace Ops
113 } // namespace NeuralNetworkRuntime
114 } // namespace OHOS
115