1 /*
2 * Copyright (C) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "buffer_converter.h"
17 #include <cmath>
18 #include "avcodec_errors.h"
19 #include "avcodec_log.h"
20 #include "meta/meta_key.h"
21 #include "native_buffer.h"
22 #include "surface_buffer.h"
23 #include "surface_type.h"
24
25 namespace {
26 constexpr OHOS::HiviewDFX::HiLogLabel LABEL = {LOG_CORE, LOG_DOMAIN_FRAMEWORK, "BufferConverter"};
27 using AVCodecRect = OHOS::MediaAVCodec::BufferConverter::AVCodecRect;
28 using GraphicPixelFormat = OHOS::GraphicPixelFormat;
29 using VideoPixelFormat = OHOS::MediaAVCodec::VideoPixelFormat;
30 constexpr int32_t OFFSET_2 = 0x02;
31 constexpr int32_t OFFSET_3 = 0x03;
32 constexpr int32_t OFFSET_15 = 0x0F;
33 constexpr int32_t OFFSET_16 = 0x10;
TranslateSurfaceFormat(GraphicPixelFormat surfaceFormat)34 VideoPixelFormat TranslateSurfaceFormat(GraphicPixelFormat surfaceFormat)
35 {
36 switch (surfaceFormat) {
37 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_P: {
38 return VideoPixelFormat::YUVI420;
39 }
40 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_RGBA_8888: {
41 return VideoPixelFormat::RGBA;
42 }
43 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_P010:
44 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCBCR_420_SP: {
45 return VideoPixelFormat::NV12;
46 }
47 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_P010:
48 case GraphicPixelFormat::GRAPHIC_PIXEL_FMT_YCRCB_420_SP: {
49 return VideoPixelFormat::NV21;
50 }
51 default:
52 AVCODEC_LOGE("Invalid graphic pixel format:%{public}d", static_cast<int32_t>(surfaceFormat));
53 return VideoPixelFormat::UNKNOWN;
54 }
55 }
56
ConvertYUV420SP(uint8_t * dst,uint8_t * src,AVCodecRect * rects,int32_t capacity)57 int32_t ConvertYUV420SP(uint8_t *dst, uint8_t *src, AVCodecRect *rects, int32_t capacity)
58 {
59 AVCodecRect &dstRect = rects[0];
60 AVCodecRect &srcRect = rects[1];
61 AVCodecRect &rect = rects[2]; // 2: index
62 int32_t dstSize = (OFFSET_3 * dstRect.wStride * dstRect.hStride) >> 1;
63 int32_t ret;
64 CHECK_AND_RETURN_RET_LOG(dstSize <= capacity, 0, "No memory. dstSize:%{public}d, capacity:%{public}d", dstSize,
65 capacity);
66 // Y
67 for (int32_t i = 0; i < rect.hStride; ++i) {
68 ret = memcpy_s(dst, dstRect.wStride, src, rect.wStride);
69 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
70 dst += dstRect.wStride;
71 src += srcRect.wStride;
72 }
73 // padding
74 dst += (dstRect.hStride - rect.hStride) * dstRect.wStride;
75 src += (srcRect.hStride - rect.hStride) * srcRect.wStride;
76 rect.hStride >>= 1;
77 // UV
78 for (int32_t i = 0; i < rect.hStride; ++i) {
79 ret = memcpy_s(dst, dstRect.wStride, src, rect.wStride);
80 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
81 dst += dstRect.wStride;
82 src += srcRect.wStride;
83 }
84 return dstSize;
85 }
86
ConvertYUV420P(uint8_t * dst,uint8_t * src,AVCodecRect * rects,int32_t capacity)87 int32_t ConvertYUV420P(uint8_t *dst, uint8_t *src, AVCodecRect *rects, int32_t capacity)
88 {
89 AVCodecRect &dstRect = rects[0];
90 AVCodecRect &srcRect = rects[1];
91 AVCodecRect &rect = rects[2]; // 2: index
92 int32_t dstSize = (OFFSET_3 * dstRect.wStride * dstRect.hStride) >> 1;
93 int32_t ret;
94 CHECK_AND_RETURN_RET_LOG(dstSize <= capacity, 0, "No memory. dstSize:%{public}d, capacity:%{public}d", dstSize,
95 capacity);
96 // Y
97 for (int32_t i = 0; i < rect.hStride; ++i) {
98 ret = memcpy_s(dst, dstRect.wStride, src, rect.wStride);
99 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
100 dst += dstRect.wStride;
101 src += srcRect.wStride;
102 }
103 // padding
104 const int32_t dstWidth = dstRect.wStride >> 1;
105 const int32_t srcWidth = srcRect.wStride >> 1;
106 const int32_t dstPadding = (dstRect.hStride - rect.hStride) * dstRect.wStride;
107 const int32_t srcPadding = (srcRect.hStride - rect.hStride) * srcRect.wStride;
108 rect.hStride >>= 1;
109 rect.wStride >>= 1;
110 dst += dstPadding;
111 src += srcPadding;
112 // U
113 for (int32_t i = 0; i < rect.hStride; ++i) {
114 ret = memcpy_s(dst, dstWidth, src, rect.wStride);
115 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
116 dst += dstWidth;
117 src += srcWidth;
118 }
119 // padding
120 dst += dstPadding >> OFFSET_2;
121 src += srcPadding >> OFFSET_2;
122 // V
123 for (int32_t i = 0; i < rect.hStride; ++i) {
124 ret = memcpy_s(dst, dstWidth, src, rect.wStride);
125 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
126 dst += dstWidth;
127 src += srcWidth;
128 }
129 return dstSize;
130 }
131
ConverteRGBA8888(uint8_t * dst,uint8_t * src,AVCodecRect * rects,int32_t capacity)132 int32_t ConverteRGBA8888(uint8_t *dst, uint8_t *src, AVCodecRect *rects, int32_t capacity)
133 {
134 AVCodecRect &dstRect = rects[0];
135 AVCodecRect &srcRect = rects[1];
136 AVCodecRect &rect = rects[2]; // 2: index
137 int32_t dstSize = dstRect.wStride * dstRect.hStride;
138 int32_t ret;
139 CHECK_AND_RETURN_RET_LOG(dstSize <= capacity, 0, "No memory. dstSize:%{public}d, capacity:%{public}d", dstSize,
140 capacity);
141 for (int32_t i = 0; i < rect.hStride; ++i) {
142 ret = memcpy_s(dst, dstRect.wStride, src, rect.wStride);
143 EXPECT_AND_LOGW(ret != 0, "memcpy failed");
144 dst += dstRect.wStride;
145 src += srcRect.wStride;
146 }
147 return dstSize;
148 }
149 } // namespace
150
151 namespace OHOS {
152 namespace MediaAVCodec {
153 using AVBuffer = Media::AVBuffer;
154 using AVSharedMemory = Media::AVSharedMemory;
155 using Format = Media::Format;
156 using MemoryType = Media::MemoryType;
157 using Tag = Media::Tag;
Create(AVCodecType type)158 std::shared_ptr<BufferConverter> BufferConverter::Create(AVCodecType type)
159 {
160 if (type == AVCODEC_TYPE_VIDEO_ENCODER) {
161 return std::make_shared<BufferConverter>(true);
162 } else if (type == AVCODEC_TYPE_VIDEO_DECODER) {
163 return std::make_shared<BufferConverter>(false);
164 }
165 return nullptr;
166 }
167
BufferConverter(bool isEncoder)168 BufferConverter::BufferConverter(bool isEncoder)
169 : func_(ConvertYUV420SP), isEncoder_(isEncoder), isSharedMemory_(false), needResetFormat_(true)
170 {
171 }
172
ReadFromBuffer(std::shared_ptr<AVBuffer> & buffer,std::shared_ptr<AVSharedMemory> & memory)173 int32_t BufferConverter::ReadFromBuffer(std::shared_ptr<AVBuffer> &buffer, std::shared_ptr<AVSharedMemory> &memory)
174 {
175 std::shared_lock<std::shared_mutex> lock(mutex_);
176 if (isSharedMemory_) {
177 return AVCS_ERR_OK;
178 }
179 CHECK_AND_RETURN_RET_LOG(buffer != nullptr, AVCS_ERR_INVALID_VAL, "buffer is nullptr");
180 if (buffer->memory_ == nullptr) {
181 return AVCS_ERR_OK;
182 }
183 CHECK_AND_RETURN_RET_LOG(buffer->memory_->GetAddr() != nullptr, AVCS_ERR_INVALID_VAL, "buffer addr is nullptr");
184 CHECK_AND_RETURN_RET_LOG(memory != nullptr && memory->GetBase() != nullptr, AVCS_ERR_INVALID_VAL,
185 "shared memory is nullptr");
186 int32_t size = buffer->memory_->GetSize();
187 if (size <= 0) {
188 return AVCS_ERR_OK;
189 }
190 if (isEncoder_) {
191 int32_t ret = buffer->memory_->Read(memory->GetBase(), size, 0);
192 CHECK_AND_RETURN_RET_LOG(ret == size, AVCS_ERR_INVALID_VAL, "Read avbuffer's data failed.");
193 return AVCS_ERR_OK;
194 }
195 AVCodecRect rects[3] = {usrRect_, hwRect_, rect_}; // 1:dstRect, 2:srcRect, 3:rect
196 int32_t usrSize = func_(memory->GetBase(), buffer->memory_->GetAddr(), rects, memory->GetSize());
197 buffer->memory_->SetSize(usrSize);
198 return AVCS_ERR_OK;
199 }
200
WriteToBuffer(std::shared_ptr<AVBuffer> & buffer,std::shared_ptr<AVSharedMemory> & memory)201 int32_t BufferConverter::WriteToBuffer(std::shared_ptr<AVBuffer> &buffer, std::shared_ptr<AVSharedMemory> &memory)
202 {
203 std::shared_lock<std::shared_mutex> lock(mutex_);
204 if (isSharedMemory_) {
205 return AVCS_ERR_OK;
206 }
207 CHECK_AND_RETURN_RET_LOG(buffer != nullptr && buffer->memory_ != nullptr && buffer->memory_->GetAddr() != nullptr,
208 AVCS_ERR_INVALID_VAL, "buffer is nullptr");
209 CHECK_AND_RETURN_RET_LOG(memory != nullptr && memory->GetBase() != nullptr, AVCS_ERR_INVALID_VAL,
210 "shared memory is nullptr");
211 int32_t size = buffer->memory_->GetSize();
212 if (size <= 0) {
213 return AVCS_ERR_OK;
214 }
215 if (!isEncoder_) {
216 (void)buffer->memory_->Write(memory->GetBase(), size, 0);
217 return AVCS_ERR_OK;
218 }
219 AVCodecRect rects[3] = {hwRect_, usrRect_, rect_}; // 1:dstRect, 2:srcRect, 3:rect
220 int32_t hwSize = func_(buffer->memory_->GetAddr(), memory->GetBase(), rects, buffer->memory_->GetCapacity());
221 buffer->memory_->SetSize(hwSize);
222 return AVCS_ERR_OK;
223 }
224
NeedToResetFormatOnce()225 void BufferConverter::NeedToResetFormatOnce()
226 {
227 std::lock_guard<std::shared_mutex> lock(mutex_);
228 needResetFormat_ = true;
229 }
230
GetFormat(Format & format)231 void BufferConverter::GetFormat(Format &format)
232 {
233 std::shared_lock<std::shared_mutex> lock(mutex_);
234 if (isSharedMemory_ || needResetFormat_) {
235 return;
236 }
237 if (!isEncoder_ && format.ContainKey(Tag::VIDEO_WIDTH)) {
238 format.PutIntValue(Tag::VIDEO_WIDTH, usrRect_.wStride);
239 }
240 if (!isEncoder_ && format.ContainKey(Tag::VIDEO_HEIGHT)) {
241 format.PutIntValue(Tag::VIDEO_HEIGHT, usrRect_.hStride);
242 }
243 if (format.ContainKey(Tag::VIDEO_STRIDE) || format.ContainKey(Tag::VIDEO_SLICE_HEIGHT)) {
244 format.PutIntValue(Tag::VIDEO_STRIDE, usrRect_.wStride);
245 format.PutIntValue(Tag::VIDEO_SLICE_HEIGHT, usrRect_.hStride);
246 }
247 }
248
SetFormat(const Format & format)249 void BufferConverter::SetFormat(const Format &format)
250 {
251 std::lock_guard<std::shared_mutex> lock(mutex_);
252 if (isSharedMemory_) {
253 return;
254 }
255 int32_t width = 0;
256 int32_t height = 0;
257 int32_t wStride = 0;
258 int32_t hStride = 0;
259 int32_t pixelFormat = static_cast<int32_t>(VideoPixelFormat::UNKNOWN);
260 if (format.GetIntValue(Tag::VIDEO_PIXEL_FORMAT, pixelFormat)) {
261 SetPixFormat(static_cast<VideoPixelFormat>(pixelFormat));
262 }
263 if (format.GetIntValue(Tag::VIDEO_PIC_WIDTH, width) || format.GetIntValue(Tag::VIDEO_WIDTH, width)) {
264 SetWidth(width);
265 }
266 if (format.GetIntValue(Tag::VIDEO_PIC_HEIGHT, height) || format.GetIntValue(Tag::VIDEO_HEIGHT, height)) {
267 SetHeight(height);
268 }
269 if (!format.GetIntValue(Tag::VIDEO_STRIDE, wStride)) {
270 SetWidthStride(rect_.wStride);
271 } else {
272 hwRect_.wStride = wStride;
273 }
274 if (!format.GetIntValue(Tag::VIDEO_SLICE_HEIGHT, hStride)) {
275 SetHeightStride(rect_.hStride);
276 } else {
277 hwRect_.hStride = hStride;
278 }
279 // check if the converter needs to reset the format.
280 needResetFormat_ = !SetRectValue(width, height, wStride, hStride) ||
281 pixelFormat == static_cast<int32_t>(VideoPixelFormat::UNKNOWN);
282 if (needResetFormat_) {
283 AVCODEC_LOGW("Invalid format:%{public}s", format.Stringify().c_str());
284 return;
285 }
286 AVCODEC_LOGD(
287 "Actual:(%{public}d x %{public}d), Converter:(%{public}d x %{public}d), Hardware:(%{public}d x %{public}d).",
288 width, rect_.hStride, usrRect_.wStride, usrRect_.hStride, hwRect_.wStride, hwRect_.hStride);
289 }
290
SetInputBufferFormat(std::shared_ptr<AVBuffer> & buffer)291 void BufferConverter::SetInputBufferFormat(std::shared_ptr<AVBuffer> &buffer)
292 {
293 if (!isEncoder_) {
294 return;
295 }
296 std::lock_guard<std::shared_mutex> lock(mutex_);
297 if (!needResetFormat_) {
298 return;
299 }
300 needResetFormat_ = !SetBufferFormat(buffer);
301 }
302
SetOutputBufferFormat(std::shared_ptr<AVBuffer> & buffer)303 void BufferConverter::SetOutputBufferFormat(std::shared_ptr<AVBuffer> &buffer)
304 {
305 if (isEncoder_) {
306 return;
307 }
308 std::lock_guard<std::shared_mutex> lock(mutex_);
309 if (!needResetFormat_) {
310 return;
311 }
312 needResetFormat_ = !SetBufferFormat(buffer);
313 }
314
SetPixFormat(const VideoPixelFormat pixelFormat)315 void BufferConverter::SetPixFormat(const VideoPixelFormat pixelFormat)
316 {
317 switch (pixelFormat) {
318 case VideoPixelFormat::YUV420P:
319 case VideoPixelFormat::YUVI420:
320 func_ = ConvertYUV420P;
321 break;
322 case VideoPixelFormat::NV12:
323 case VideoPixelFormat::NV21:
324 func_ = ConvertYUV420SP;
325 break;
326 case VideoPixelFormat::RGBA:
327 func_ = ConverteRGBA8888;
328 break;
329 default:
330 AVCODEC_LOGE("Invalid video pix format:%{public}d", static_cast<int32_t>(pixelFormat));
331 break;
332 };
333 }
334
SetWidth(const int32_t width)335 inline void BufferConverter::SetWidth(const int32_t width)
336 {
337 rect_.wStride = width;
338 int32_t modVal = width & OFFSET_15;
339 if (modVal) {
340 usrRect_.wStride = width + OFFSET_16 - modVal;
341 } else {
342 usrRect_.wStride = width;
343 }
344 }
345
SetHeight(const int32_t height)346 inline void BufferConverter::SetHeight(const int32_t height)
347 {
348 rect_.hStride = height;
349 int32_t modVal = height & OFFSET_15;
350 if (modVal) {
351 usrRect_.hStride = height + OFFSET_16 - modVal;
352 } else {
353 usrRect_.hStride = height;
354 }
355 }
356
SetWidthStride(const int32_t wStride)357 inline void BufferConverter::SetWidthStride(const int32_t wStride)
358 {
359 hwRect_.wStride = wStride;
360 }
361
SetHeightStride(const int32_t hStride)362 inline void BufferConverter::SetHeightStride(const int32_t hStride)
363 {
364 hwRect_.hStride = hStride;
365 }
366
SetBufferFormat(std::shared_ptr<AVBuffer> & buffer)367 bool BufferConverter::SetBufferFormat(std::shared_ptr<AVBuffer> &buffer)
368 {
369 CHECK_AND_RETURN_RET_LOG(buffer != nullptr, false, "buffer is nullptr");
370 if (buffer->memory_ == nullptr) {
371 isSharedMemory_ = true;
372 AVCODEC_LOGW("memory is nullptr");
373 return true;
374 }
375 isSharedMemory_ = buffer->memory_->GetMemoryType() == MemoryType::SHARED_MEMORY;
376 if (isSharedMemory_) {
377 AVCODEC_LOGW("AVBuffer is shared memory");
378 return true;
379 }
380
381 auto surfaceBuffer = buffer->memory_->GetSurfaceBuffer();
382 CHECK_AND_RETURN_RET_LOG(surfaceBuffer != nullptr, false, "surface buffer is nullptr");
383 // pixelFormat
384 VideoPixelFormat pixelFormat = TranslateSurfaceFormat(static_cast<GraphicPixelFormat>(surfaceBuffer->GetFormat()));
385 SetPixFormat(pixelFormat);
386 int32_t width = surfaceBuffer->GetWidth();
387 int32_t height = surfaceBuffer->GetHeight();
388 int32_t wStride = surfaceBuffer->GetStride();
389 int32_t hStride = GetSliceHeightFromSurfaceBuffer(surfaceBuffer);
390 bool ret = SetRectValue(width, height, wStride, hStride);
391 CHECK_AND_RETURN_RET_LOG(ret, false, "width is 0");
392 AVCODEC_LOGI(
393 "Actual:(%{public}d x %{public}d), Converter:(%{public}d x %{public}d), Hardware:(%{public}d x %{public}d).",
394 width, rect_.hStride, usrRect_.wStride, usrRect_.hStride, hwRect_.wStride, hwRect_.hStride);
395 return true;
396 }
397
SetRectValue(const int32_t width,const int32_t height,const int32_t wStride,const int32_t hStride)398 bool BufferConverter::SetRectValue(const int32_t width, const int32_t height, const int32_t wStride,
399 const int32_t hStride)
400 {
401 CHECK_AND_RETURN_RET_LOG(wStride > 0, false, "stride <= 0");
402 CHECK_AND_RETURN_RET_LOG(width > 0 && height > 0, false, "width/height <= 0");
403 int32_t tempPixelSize = wStride / width;
404 tempPixelSize = (tempPixelSize <= 0) ? 1 : tempPixelSize;
405
406 // width or height <= calculated stride <= hardware stride
407 // rect <= usrRect <= hwRect
408 rect_.wStride = width * tempPixelSize;
409 rect_.hStride = height;
410 hwRect_.wStride = std::max(rect_.wStride, wStride);
411 hwRect_.hStride = std::max(rect_.hStride, hStride);
412 usrRect_.wStride = std::min(hwRect_.wStride, CalculateUserStride(width) * tempPixelSize);
413 usrRect_.hStride = std::min(hwRect_.hStride, CalculateUserStride(height));
414 return true;
415 }
416
CalculateUserStride(const int32_t widthHeight)417 inline int32_t BufferConverter::CalculateUserStride(const int32_t widthHeight)
418 {
419 int32_t modVal = widthHeight & OFFSET_15;
420 return modVal ? (widthHeight + OFFSET_16 - modVal) : widthHeight;
421 }
422
GetSliceHeightFromSurfaceBuffer(sptr<SurfaceBuffer> & surfaceBuffer) const423 int32_t BufferConverter::GetSliceHeightFromSurfaceBuffer(sptr<SurfaceBuffer> &surfaceBuffer) const
424 {
425 int32_t height = surfaceBuffer->GetHeight();
426 if (isEncoder_) {
427 return height;
428 }
429 OH_NativeBuffer_Planes *planes = nullptr;
430 GSError err = surfaceBuffer->GetPlanesInfo(reinterpret_cast<void **>(&planes));
431 if (err != GSERROR_OK || planes == nullptr) {
432 AVCODEC_LOGW("get plane info failed, GSError=%{public}d", err);
433 return height;
434 }
435 uint32_t count = planes->planeCount;
436 if (count <= 1) {
437 AVCODEC_LOGW("planes count is %{public}u", count);
438 return height;
439 }
440 return static_cast<int32_t>(static_cast<int64_t>(planes->planes[1].offset) / surfaceBuffer->GetStride());
441 }
442 } // namespace MediaAVCodec
443 } // namespace OHOS
444