1 /*
2  * Copyright (c) 2022 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "layernorm_builder.h"
17 
18 #include "mindir.h"
19 #include "ops_registry.h"
20 
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 namespace Ops {
24 static const int INPUT_NUM = 3;
25 static const int OUTPUT_NUM = 1;
26 static const int PARAM_MAX_NUM = 3;
27 static const int INPUT_X = 0;
28 static const int INPUT_GAMMA = 1;
29 static const int INPUT_BETA = 2;
30 static const std::string OP_NAME = "LayerNorm";
31 
LayerNormBuilder()32 LayerNormBuilder::LayerNormBuilder() {}
33 
~LayerNormBuilder()34 LayerNormBuilder::~LayerNormBuilder() {}
35 
SetBeginNormAxis(const std::shared_ptr<NNTensor> & tensor)36 OH_NN_ReturnCode LayerNormBuilder::SetBeginNormAxis(const std::shared_ptr<NNTensor>& tensor)
37 {
38     tensor->IdentifyOpParameter();
39     if (tensor->GetDataType() != OH_NN_INT64) {
40         LOGE("[LayerNormBuilder] SetBeginNormAxis failed. The has_bias should be type OH_NN_INT64.");
41         return OH_NN_INVALID_PARAMETER;
42     }
43 
44     if (!tensor->IsScalar()) {
45         LOGE("[LayerNormBuilder] SetBeginNormAxis failed. The beginNormAxis should be a scalar value.");
46         return OH_NN_INVALID_PARAMETER;
47     }
48 
49     void* buffer = tensor->GetBuffer();
50     if (buffer == nullptr) {
51         LOGE("[LayerNormBuilder] SetBeginNormAxis failed, the beginNormAxis passed a empty buffer.");
52         return OH_NN_INVALID_PARAMETER;
53     }
54 
55     m_beginNormAxis = *static_cast<int64_t*>(buffer);
56     return OH_NN_SUCCESS;
57 }
58 
SetEpsilon(const std::shared_ptr<NNTensor> & tensor)59 OH_NN_ReturnCode LayerNormBuilder::SetEpsilon(const std::shared_ptr<NNTensor>& tensor)
60 {
61     tensor->IdentifyOpParameter();
62     if (tensor->GetDataType() != OH_NN_FLOAT32) {
63         LOGE("[LayerNormBuilder] SetEpsilon failed. The epsilon should be type OH_NN_FLOAT32.");
64         return OH_NN_INVALID_PARAMETER;
65     }
66 
67     if (!tensor->IsScalar()) {
68         LOGE("[LayerNormBuilder] SetEpsilon failed. The epsilon should be a scalar value.");
69         return OH_NN_INVALID_PARAMETER;
70     }
71 
72     void* buffer = tensor->GetBuffer();
73     if (buffer == nullptr) {
74         LOGE("[LayerNormBuilder] SetEpsilon failed, the epsilon passed a empty buffer.");
75         return OH_NN_INVALID_PARAMETER;
76     }
77 
78     m_epsilon = *static_cast<float*>(buffer);
79     return OH_NN_SUCCESS;
80 }
81 
SetBeginParamsAxis(const std::shared_ptr<NNTensor> & tensor)82 OH_NN_ReturnCode LayerNormBuilder::SetBeginParamsAxis(const std::shared_ptr<NNTensor>& tensor)
83 {
84     tensor->IdentifyOpParameter();
85     if (tensor->GetDataType() != OH_NN_INT64) {
86         LOGE("[LayerNormBuilder] SetBeginParamsAxis failed. The has_bias should be type OH_NN_INT64.");
87         return OH_NN_INVALID_PARAMETER;
88     }
89 
90     if (!tensor->IsScalar()) {
91         LOGE("[LayerNormBuilder] SetBeginParamsAxis failed. The beginNormAxis should be a scalar value.");
92         return OH_NN_INVALID_PARAMETER;
93     }
94 
95     void* buffer = tensor->GetBuffer();
96     if (buffer == nullptr) {
97         LOGE("[LayerNormBuilder] SetBeginParamsAxis failed, the beginParamsAxis passed a empty buffer.");
98         return OH_NN_INVALID_PARAMETER;
99     }
100 
101     m_beginParamsAxis = *static_cast<int64_t*>(buffer);
102     return OH_NN_SUCCESS;
103 }
104 
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)105 OH_NN_ReturnCode LayerNormBuilder::Build(const std::vector<uint32_t>& paramsIndex,
106                                          const std::vector<uint32_t>& inputsIndex,
107                                          const std::vector<uint32_t>& outputsIndex,
108                                          const std::vector<std::shared_ptr<NNTensor>>& allTensors)
109 {
110     if (m_isBuild) {
111         LOGE("[LayerNormBuilder] Build failed. LayerNorm operation has been build, cannot build again.");
112         return OH_NN_OPERATION_FORBIDDEN;
113     }
114 
115     OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
116     if (returnCode != OH_NN_SUCCESS) {
117         LOGE("[LayerNormBuilder] Build failed. Passed invalid input or output index.");
118         return returnCode;
119     }
120 
121     m_inputsIndex = inputsIndex;
122     m_outputsIndex = outputsIndex;
123 
124     returnCode = CheckParamIndex(paramsIndex, allTensors, PARAM_MAX_NUM);
125     if (returnCode != OH_NN_SUCCESS) {
126         LOGE("[LayerNormBuilder] Build failed. Passed invalid param index.");
127         return returnCode;
128     }
129 
130     for (int i : paramsIndex) {
131         std::shared_ptr<NNTensor> tensor = allTensors[i];
132         if (m_paramMap.find(tensor->GetType()) != m_paramMap.end()) {
133             returnCode = (this->*(m_paramMap[tensor->GetType()]))(tensor);
134         } else {
135             LOGE("[LayerNormBuilder] Build failed, param invalid, type=%d", tensor->GetType());
136             return OH_NN_INVALID_PARAMETER;
137         }
138 
139         if (returnCode != OH_NN_SUCCESS) {
140             LOGE("[LayerNormBuilder] Build failed. Passed invalid param.");
141             return returnCode;
142         }
143     }
144 
145     auto inputShape = allTensors[inputsIndex[INPUT_X]]->GetDimensions();
146     int64_t inputShapeSize = static_cast<int64_t>(inputShape.size());
147     // beginNormAxis must great than 1, because normal shape cannot equal input shape.
148     if (m_beginNormAxis >= inputShapeSize || m_beginNormAxis < 1) {
149         LOGE("[LayerNormBuilder] Build failed, invalid beginNormAxis value, it should be [1, rank(input)).");
150         return OH_NN_INVALID_PARAMETER;
151     }
152     // validate gamma and beta shape
153     returnCode = ValidateGammaAndBetaShape(inputsIndex, m_beginNormAxis, allTensors);
154     if (returnCode != OH_NN_SUCCESS) {
155         return returnCode;
156     }
157 
158     // The quantization type of the first output determinies that of the operator.
159     SetQuantType(outputsIndex, allTensors);
160 
161     m_name = OP_NAME;
162     m_isBuild = true;
163     return OH_NN_SUCCESS;
164 }
165 
GetPrimitive()166 LiteGraphPrimitvePtr LayerNormBuilder::GetPrimitive()
167 {
168     if (!m_isBuild) {
169         LOGE("[LayerNormBuilder] GetPrimitive failed, cannot get primitive before call build.");
170         return {nullptr, DestroyLiteGraphPrimitive};
171     }
172 
173     void* primitive = mindspore::lite::MindIR_LayerNormFusion_CreatePrimitive(m_beginNormAxis,
174         m_epsilon, m_elementwiseAffine, m_beginParamsAxis);
175     LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
176     return graphPrimitivePtr;
177 }
178 
ValidateGammaAndBetaShape(const std::vector<uint32_t> & inputsIndex,int64_t beginAxis,const std::vector<std::shared_ptr<NNTensor>> & allTensors) const179 OH_NN_ReturnCode LayerNormBuilder::ValidateGammaAndBetaShape(const std::vector<uint32_t>& inputsIndex,
180     int64_t beginAxis, const std::vector<std::shared_ptr<NNTensor>>& allTensors) const
181 {
182     auto inputShape = allTensors[inputsIndex[INPUT_X]]->GetDimensions();
183     auto gammaShape = allTensors[inputsIndex[INPUT_GAMMA]]->GetDimensions();
184     auto betaShape = allTensors[inputsIndex[INPUT_BETA]]->GetDimensions();
185     int64_t inputShapeSize = static_cast<int64_t>(inputShape.size());
186     if (gammaShape.size() != static_cast<size_t>(inputShapeSize - beginAxis)) {
187         LOGE("[LayerNormBuilder] Invalid gamma dimension, gamma dimension should be equal to normalized dimension.");
188         return OH_NN_INVALID_PARAMETER;
189     }
190 
191     if (betaShape.size() != static_cast<size_t>(inputShapeSize - beginAxis)) {
192         LOGE("[LayerNormBuilder] Invalid beta dimension, beta dimension should be equal to normalized dimension.");
193         return OH_NN_INVALID_PARAMETER;
194     }
195 
196     for (auto i = beginAxis; i < inputShapeSize; i++) {
197         if (gammaShape[i - beginAxis] != inputShape[i]) {
198             LOGE("[LayerNormBuilder] Invalid gamma shape, gamma shape should equal to normalized shape.");
199             return OH_NN_INVALID_PARAMETER;
200         }
201         if (betaShape[i - beginAxis] != inputShape[i]) {
202             LOGE("[LayerNormBuilder] Invalid beta shape, bata shape should equal to normalized shape.");
203             return OH_NN_INVALID_PARAMETER;
204         }
205     }
206 
207     return OH_NN_SUCCESS;
208 }
209 
210 REGISTER_OPS(LayerNormBuilder, OH_NN_OPS_LAYER_NORM);
211 } // namespace Ops
212 } // namespace NeuralNetworkRuntime
213 } // namespace OHOS