1 /*
2  * Copyright (c) 2024 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  *     http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "log_softmax_builder.h"
17 
18 #include "mindir.h"
19 #include "ops_registry.h"
20 
21 namespace OHOS {
22 namespace NeuralNetworkRuntime {
23 namespace Ops {
24 static const int INPUT_NUM = 1;
25 static const int OUTPUT_NUM = 1;
26 static const int PARAM_MAX_NUM = 1;
27 static const int SCALAR_LENGTH = 1;
28 static const std::string OP_NAME = "LogSoftmax";
29 
LogSoftmaxBuilder()30 LogSoftmaxBuilder::LogSoftmaxBuilder() {}
31 
~LogSoftmaxBuilder()32 LogSoftmaxBuilder::~LogSoftmaxBuilder() {}
33 
SetAxis(const std::shared_ptr<NNTensor> & tensor)34 OH_NN_ReturnCode LogSoftmaxBuilder::SetAxis(const std::shared_ptr<NNTensor>& tensor)
35 {
36     if (tensor->GetDataType() != OH_NN_INT64) {
37         LOGE("[LogSoftmax] The axis should be type OH_NN_INT64.");
38         return OH_NN_INVALID_PARAMETER;
39     }
40 
41     if (tensor->GetElementCount() != SCALAR_LENGTH) {
42         LOGE("[LogSoftmax] The axis should be scalar.");
43         return OH_NN_INVALID_PARAMETER;
44     }
45 
46     void* buffer = tensor->GetBuffer();
47     if (buffer == nullptr) {
48         LOGE("[LogSoftmax] Tensor buffer is nullptr.");
49         return OH_NN_INVALID_PARAMETER;
50     }
51     m_axis = *(static_cast<const int64_t*>(buffer));
52 
53     return OH_NN_SUCCESS;
54 }
55 
Build(const std::vector<uint32_t> & paramsIndex,const std::vector<uint32_t> & inputsIndex,const std::vector<uint32_t> & outputsIndex,const std::vector<std::shared_ptr<NNTensor>> & allTensors)56 OH_NN_ReturnCode LogSoftmaxBuilder::Build(const std::vector<uint32_t>& paramsIndex,
57                                           const std::vector<uint32_t>& inputsIndex,
58                                           const std::vector<uint32_t>& outputsIndex,
59                                           const std::vector<std::shared_ptr<NNTensor>>& allTensors)
60 {
61     if (m_isBuild) {
62         LOGE("[LogSoftmax] Build failed, the logSoftmax operation has been build. cannot build again.");
63         return OH_NN_OPERATION_FORBIDDEN;
64     }
65 
66     auto ret = CheckIOIndex(inputsIndex, outputsIndex, allTensors, INPUT_NUM, OUTPUT_NUM);
67     if (ret != OH_NN_SUCCESS) {
68         LOGE("[LogSoftmax] Build failed, passed invalid input or output index.");
69         return ret;
70     }
71 
72     m_inputsIndex = inputsIndex;
73     m_outputsIndex = outputsIndex;
74 
75     ret = CheckParamIndex(paramsIndex, allTensors, PARAM_MAX_NUM);
76     if (ret != OH_NN_SUCCESS) {
77         LOGE("[LogSoftmax] Build failed, passed invalid param index.");
78         return ret;
79     }
80 
81     for (int i : paramsIndex) {
82         std::shared_ptr<NNTensor> tensor = allTensors[i];
83         tensor->IdentifyOpParameter();
84         if (m_paramMap.find(tensor->GetType()) != m_paramMap.end()) {
85             ret = (this->*(m_paramMap[tensor->GetType()]))(tensor);
86         } else {
87             LOGE("[LogSoftmax] Build failed, param invalid, type=%d", tensor->GetType());
88             return OH_NN_INVALID_PARAMETER;
89         }
90 
91         if (ret != OH_NN_SUCCESS) {
92             LOGE("[LogSoftmax] Build failed, passed invalid param.");
93             return ret;
94         }
95     }
96 
97     m_name = OP_NAME;
98     m_isBuild = true;
99     return OH_NN_SUCCESS;
100 }
101 
GetPrimitive()102 LiteGraphPrimitvePtr LogSoftmaxBuilder::GetPrimitive()
103 {
104     if (!m_isBuild) {
105         LOGE("[LogSoftmax] GetPrimitive failed, cannot get primitive before call build.");
106         return {nullptr, DestroyLiteGraphPrimitive};
107     }
108 
109     void* primitive = mindspore::lite::MindIR_LogSoftmax_CreatePrimitive(m_axis);
110     LiteGraphPrimitvePtr graphPrimitivePtr(primitive, DestroyLiteGraphPrimitive) ;
111     return graphPrimitivePtr;
112 }
113 
114 REGISTER_OPS(LogSoftmaxBuilder, OH_NN_OPS_LOG_SOFTMAX);
115 } // namespace Ops
116 } // namespace NeuralNetworkRuntime
117 } // namespace OHOS
118