1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "matmul_builder.h"
17
18 #include "transform.h"
19 #include "validation.h"
20 #include "ops_registry.h"
21
22 namespace OHOS {
23 namespace NeuralNetworkRuntime {
24 namespace Ops {
25 static const int INPUT_NUM = 2;
26 static const int OUTPUT_NUM = 1;
27 static const int PARAM_MAX_NUM = 3;
28 static const int SCALE_LENGTH = 1;
29 static const std::string OP_NAME = "Matmul";
30
MatmulBuilder()31 MatmulBuilder::MatmulBuilder() {}
32
~MatmulBuilder()33 MatmulBuilder::~MatmulBuilder() {}
34
SetTransposeA(const std::shared_ptr<NNTensor> & tensor)35 OH_NN_ReturnCode MatmulBuilder::SetTransposeA(const std::shared_ptr<NNTensor>& tensor)
36 {
37 tensor->IdentifyOpParameter();
38 if (tensor->GetElementCount() != SCALE_LENGTH) {
39 LOGE("[Matmul] Matmul SetTransposeA failed. The transposeA should be scaler.");
40 return OH_NN_INVALID_PARAMETER;
41 }
42
43 if (tensor->GetDataType() != OH_NN_BOOL) {
44 LOGE("[Matmul] Matmul SetTransposeA failed. The transposeA should have type OH_NN_BOOL.");
45 return OH_NN_INVALID_PARAMETER;
46 }
47
48 void* buffer = tensor->GetBuffer();
49 if (buffer == nullptr) {
50 LOGE("[Matmul] SetTransposeA failed, the transposeA passed a empty buffer.");
51 return OH_NN_INVALID_PARAMETER;
52 }
53
54 m_transposeA = *static_cast<bool*>(buffer);
55 return OH_NN_SUCCESS;
56 }
57
SetTransposeB(const std::shared_ptr<NNTensor> & tensor)58 OH_NN_ReturnCode MatmulBuilder::SetTransposeB(const std::shared_ptr<NNTensor>& tensor)
59 {
60 tensor->IdentifyOpParameter();
61 if (tensor->GetElementCount() != SCALE_LENGTH) {
62 LOGE("[Matmul] Matmul SetTransposeB failed. The transposeB should be scaler.");
63 return OH_NN_INVALID_PARAMETER;
64 }
65
66 if (tensor->GetDataType() != OH_NN_BOOL) {
67 LOGE("[Matmul] Matmul SetTransposeB failed. The transposeB TransposeY should have type OH_NN_BOOL.");
68 return OH_NN_INVALID_PARAMETER;
69 }
70
71 void* buffer = tensor->GetBuffer();
72 if (buffer == nullptr) {
73 LOGE("[Matmul] SetTransposeB failed, the transposeB passed a empty buffer.");
74 return OH_NN_INVALID_PARAMETER;
75 }
76
77 m_transposeB = *static_cast<bool*>(buffer);
78 return OH_NN_SUCCESS;
79 }
80
SetActivationType(const std::shared_ptr<NNTensor> & tensor)81 OH_NN_ReturnCode MatmulBuilder::SetActivationType(const std::shared_ptr<NNTensor>& tensor)
82 {
83 tensor->IdentifyOpParameter();
84 if (tensor->GetElementCount() != SCALE_LENGTH) {
85 LOGE("[Matmul] Matmul SetActivationType failed. The shape of activation should be scaler.");
86 return OH_NN_INVALID_PARAMETER;
87 }
88
89 if (tensor->GetDataType() != OH_NN_INT8) {
90 LOGE("[Matmul] Matmul SetActivationType failed. The activation should be type OH_NN_INT8.");
91 return OH_NN_INVALID_PARAMETER;
92 }
93
94 void* buffer = tensor->GetBuffer();
95 if (buffer == nullptr) {
96 LOGE("[Matmul] SetActivationType failed, the activationType passed a empty buffer.");
97 return OH_NN_INVALID_PARAMETER;
98 }
99
100 int8_t* fuseData = static_cast<int8_t*>(buffer);
101 if (!OHOS::NeuralNetworkRuntime::Validation::ValidateFuseType(static_cast<OH_NN_FuseType>(*fuseData))) {
102 LOGE("[Matmul] Matmul SetActivationType failed. Fuse activation type is invalid");
103 return OH_NN_INVALID_PARAMETER;
104 }
105
106 auto fuseType = (OH_NN_FuseType)(*fuseData);
107 m_activationType = NNToMS::TransfromFusionType(fuseType);
108 return OH_NN_SUCCESS;
109 }
110
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)111 OH_NN_ReturnCode MatmulBuilder::Build(const std::vector<uint32_t>& paramsIndex,
112 const std::vector<uint32_t>& inputsIndex,
113 const std::vector<uint32_t>& outputsIndex,
114 const std::vector<std::shared_ptr<NNTensor>>& allTensors)
115 {
116 if (m_isBuild) {
117 LOGE("[Matmul] Matmul Build failed. operation has been build, cannot build again.");
118 return OH_NN_OPERATION_FORBIDDEN;
119 }
120
121 OH_NN_ReturnCode returnCode = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
122 if (returnCode != OH_NN_SUCCESS) {
123 LOGE("[Matmul] Matmul Build failed. Passed invalid input or output indices.");
124 return returnCode;
125 }
126
127 m_inputsIndex = inputsIndex;
128 m_outputsIndex = outputsIndex;
129
130 returnCode = CheckParamIndex(paramsIndex, allTensors, PARAM_MAX_NUM);
131 if (returnCode != OH_NN_SUCCESS) {
132 LOGE("[Matmul] Matmul Build failed. Passed invalid param indices.");
133 return returnCode;
134 }
135
136 for (int i : paramsIndex) {
137 std::shared_ptr<NNTensor> tensor = allTensors[i];
138 if (m_paramMap.find(tensor->GetType()) != m_paramMap.end()) {
139 returnCode = (this->*(m_paramMap[tensor->GetType()]))(tensor);
140 } else {
141 LOGE("[Matmul] Build failed, param invalid, type=%d", tensor->GetType());
142 return OH_NN_INVALID_PARAMETER;
143 }
144
145 if (returnCode != OH_NN_SUCCESS) {
146 LOGE("[Matmul] Matmul Build failed. Passed invalid param.");
147 return returnCode;
148 }
149 }
150
151 // The quantization type of the first output determinies that of the operator.
152 SetQuantType(outputsIndex, allTensors);
153
154 m_isBuild = true;
155 m_name = OP_NAME;
156 return OH_NN_SUCCESS;
157 }
158
GetPrimitive()159 LiteGraphPrimitvePtr MatmulBuilder::GetPrimitive()
160 {
161 if (!m_isBuild) {
162 LOGE("[Matmul] Matmul GetPrimitive failed. Cannot get primitive before call build.");
163 return {nullptr, DestroyLiteGraphPrimitive};
164 }
165
166 auto primitive = mindspore::lite::MindIR_MatMulFusion_CreatePrimitive(m_transposeA, m_transposeB, m_activationType);
167 LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive);
168 return graphPrimitivePtr;
169 }
170
171 REGISTER_OPS(MatmulBuilder, OH_NN_OPS_MATMUL);
172 } // namespace Ops
173 } // namespace NeuralNetworkRuntime
174 } // namespace OHOS