1 /*
2 * Copyright (C) 2024 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "pixel_yuv_ext_utils.h"
17
18 #include "image_log.h"
19 #include "ios"
20 #include "istream"
21 #include "image_trace.h"
22 #include "image_system_properties.h"
23 #include "media_errors.h"
24 #include "securec.h"
25 #if !defined(IOS_PLATFORM) && !defined(ANDROID_PLATFORM)
26 #include "surface_buffer.h"
27 #endif
28
29 #undef LOG_DOMAIN
30 #define LOG_DOMAIN LOG_TAG_DOMAIN_ID_IMAGE
31
32 #undef LOG_TAG
33 #define LOG_TAG "PixelYuvExtUtils"
34
35 namespace OHOS {
36 namespace Media {
37
38 static const uint8_t NUM_2 = 2;
39 static const uint8_t NUM_4 = 4;
40
41 static const std::map<PixelFormat, AVPixelFormat> FFMPEG_PIXEL_FORMAT_MAP = {
42 {PixelFormat::UNKNOWN, AVPixelFormat::AV_PIX_FMT_NONE},
43 {PixelFormat::NV21, AVPixelFormat::AV_PIX_FMT_NV21},
44 {PixelFormat::NV12, AVPixelFormat::AV_PIX_FMT_NV12},
45 {PixelFormat::ARGB_8888, AVPixelFormat::AV_PIX_FMT_ARGB},
46 {PixelFormat::BGRA_8888, AVPixelFormat::AV_PIX_FMT_BGRA},
47 };
48
GetYSize(int32_t width,int32_t height)49 static int32_t GetYSize(int32_t width, int32_t height)
50 {
51 return width * height;
52 }
53
GetVOffset(int32_t width,int32_t height)54 static int32_t GetVOffset(int32_t width, int32_t height)
55 {
56 return width * height + ((width + 1) / NUM_2) * ((height + 1) / NUM_2);
57 }
58
GetUStride(int32_t width)59 static int32_t GetUStride(int32_t width)
60 {
61 return (width + 1) / NUM_2;
62 }
63
GetUVHeight(int32_t height)64 static int32_t GetUVHeight(int32_t height)
65 {
66 return (height + 1) / NUM_2;
67 }
68
69 // Yuv420SP, u、 v blend planer
GetUVStride(int32_t width)70 static int32_t GetUVStride(int32_t width)
71 {
72 return (width + 1) / NUM_2 * NUM_2;
73 }
74
GetImageSize(int32_t width,int32_t height)75 static uint32_t GetImageSize(int32_t width, int32_t height)
76 {
77 return width * height + ((width + 1) / NUM_2) * ((height + 1) / NUM_2) * NUM_2;
78 }
79
BGRAToYuv420(const uint8_t * src,uint8_t * dst,int srcW,int srcH,PixelFormat pixelFormat,YUVDataInfo & info)80 bool PixelYuvExtUtils::BGRAToYuv420(const uint8_t *src, uint8_t *dst, int srcW, int srcH,
81 PixelFormat pixelFormat, YUVDataInfo &info)
82 {
83 auto converter = ConverterHandle::GetInstance().GetHandle();
84 int32_t r = 0;
85 if (pixelFormat == PixelFormat::NV12) {
86 r = converter.ARGBToNV12(src, srcW * NUM_4,
87 dst, info.yStride,
88 dst + info.uvOffset,
89 info.uvStride, srcW, srcH);
90 } else if (pixelFormat == PixelFormat::NV21) {
91 r = converter.ARGBToNV21(src, srcW * NUM_4,
92 dst, info.yStride,
93 dst + info.uvOffset,
94 info.uvStride, srcW, srcH);
95 }
96 return r == 0;
97 }
98
Yuv420ToBGRA(const uint8_t * sample,uint8_t * dstArgb,Size & size,PixelFormat pixelFormat,YUVDataInfo & info)99 bool PixelYuvExtUtils::Yuv420ToBGRA(const uint8_t *sample, uint8_t *dstArgb,
100 Size &size, PixelFormat pixelFormat, YUVDataInfo &info)
101 {
102 info.uvStride = (info.uvStride +1) & ~1;
103 const uint8_t *srcY = sample + info.yOffset;
104 const uint8_t *srcUV = sample + info.uvOffset;
105 const uint32_t dstStrideARGB = static_cast<uint32_t>(size.width) * NUM_4;
106 auto converter = ConverterHandle::GetInstance().GetHandle();
107 if (pixelFormat == PixelFormat::NV12) {
108 converter.NV12ToARGB(srcY, info.yStride, srcUV, info.uvStride,
109 dstArgb, dstStrideARGB, size.width, size.height);
110 } else if (pixelFormat == PixelFormat::NV21) {
111 converter.NV21ToARGB(srcY, info.yStride, srcUV, info.uvStride,
112 dstArgb, dstStrideARGB, size.width, size.height);
113 }
114 return true;
115 }
116
Yuv420ToARGB(const uint8_t * sample,uint8_t * dstArgb,Size & size,PixelFormat pixelFormat,YUVDataInfo & info)117 bool PixelYuvExtUtils::Yuv420ToARGB(const uint8_t *sample, uint8_t *dstArgb,
118 Size &size, PixelFormat pixelFormat, YUVDataInfo &info)
119 {
120 std::unique_ptr<uint8_t[]> temp = std::make_unique<uint8_t[]>(size.width * size.height * NUM_4);
121 if (!Yuv420ToBGRA(sample, temp.get(), size, pixelFormat, info)) {
122 IMAGE_LOGE("Yuv420ToBGRA failed");
123 return false;
124 }
125 auto converter = ConverterHandle::GetInstance().GetHandle();
126 if (converter.ARGBToBGRA(temp.get(), size.width * NUM_4, dstArgb,
127 size.width * NUM_4, size.width, size.height) != SUCCESS) {
128 IMAGE_LOGE("ARGBToBGRA failed");
129 return false;
130 }
131 return true;
132 }
133
NV12Rotate(uint8_t * src,PixelSize & size,YUVDataInfo & info,OpenSourceLibyuv::RotationMode & rotateNum,uint8_t * dst,YUVStrideInfo & dstStrides)134 bool PixelYuvExtUtils::NV12Rotate(uint8_t *src, PixelSize &size, YUVDataInfo &info,
135 OpenSourceLibyuv::RotationMode &rotateNum, uint8_t* dst, YUVStrideInfo &dstStrides)
136 {
137 std::unique_ptr<uint8_t[]> tmpPixels = std::make_unique<uint8_t[]>(GetImageSize(size.dstW, size.dstH));
138 uint8_t *srcY = src + info.yOffset;
139 uint8_t *srcUV = src + info.uvOffset;
140 uint8_t *tmpY = tmpPixels.get();
141 uint8_t *tmpU = tmpPixels.get()+ GetYSize(size.dstW, size.dstH);
142 uint8_t *tmpV = tmpPixels.get()+ GetVOffset(size.dstW, size.dstH);
143
144 auto converter = ConverterHandle::GetInstance().GetHandle();
145
146 int srcYStride = static_cast<int>(info.yStride);
147 int srcUVStride = static_cast<int>(info.uvStride);
148 int tmpYStride = size.dstW;
149 int tmpUStride = GetUStride(size.dstW);
150 int tmpVStride = GetUStride(size.dstW);
151 if (converter.NV12ToI420Rotate(srcY, srcYStride, srcUV, srcUVStride,
152 tmpY, tmpYStride,
153 tmpU, tmpUStride,
154 tmpV, tmpVStride,
155 size.srcW, size.srcH, rotateNum) == -1) {
156 return false;
157 }
158
159 int dstYStride = static_cast<int>(dstStrides.yStride);
160 int dstUVStride = static_cast<int>(dstStrides.uvStride);
161 int dstWidth = size.dstW;
162 int dstHeight = size.dstH;
163 auto dstY = dst + dstStrides.yOffset;
164 auto dstUV = dst + dstStrides.uvOffset;
165 if (converter.I420ToNV12(tmpY, tmpYStride, tmpU, tmpUStride, tmpV, tmpVStride,
166 dstY, dstYStride, dstUV, dstUVStride, dstWidth, dstHeight) == -1) {
167 return false;
168 }
169
170 return true;
171 }
172
NV12P010Rotate(YuvPixels yuvPixels,PixelSize & size,YUVDataInfo & info,OpenSourceLibyuv::RotationMode & rotateNum,YUVStrideInfo & dstStrides)173 static bool NV12P010Rotate(YuvPixels yuvPixels, PixelSize& size, YUVDataInfo& info,
174 OpenSourceLibyuv::RotationMode& rotateNum, YUVStrideInfo& dstStrides)
175 {
176 std::unique_ptr<uint16_t[]> dstPixels = std::make_unique<uint16_t[]>(GetImageSize(info.yStride, size.srcH));
177 uint16_t* srcbuffer = reinterpret_cast<uint16_t *>(yuvPixels.srcPixels);
178 uint16_t* srcY = srcbuffer + info.yOffset;
179 uint16_t* srcUV = srcbuffer + info.uvOffset;
180
181 uint16_t* dstY = dstPixels.get();
182 uint16_t* dstU = dstPixels.get() + GetYSize(info.yStride, size.srcH);
183 uint16_t* dstV = dstPixels.get() + GetVOffset(info.yStride, size.srcH);
184 auto converter = ConverterHandle::GetInstance().GetHandle();
185 if (converter.P010ToI010(srcY, info.yStride, srcUV, GetUVStride(info.yStride),
186 dstY, info.yStride, dstU, GetUStride(info.yStride),
187 dstV, GetUStride(info.yStride), size.srcW, size.srcH) == -1) {
188 IMAGE_LOGE("NV12P010ToI010 failed");
189 return false;
190 }
191
192 std::unique_ptr<uint16_t[]> rotatePixels = std::make_unique<uint16_t[]>(GetImageSize(size.srcW, size.srcH));
193 uint16_t* rotateY = rotatePixels.get();
194 uint16_t* rotateU = rotatePixels.get() + GetYSize(size.dstW, size.dstH);
195 uint16_t* rotateV = rotatePixels.get() + GetVOffset(size.dstW, size.dstH);
196
197 if (converter.I010Rotate(dstY, info.yStride, dstU, GetUStride(info.yStride),
198 dstV, GetUStride(info.yStride), rotateY, size.dstW, rotateU, GetUStride(size.dstW),
199 rotateV, GetUStride(size.dstW), size.srcW, size.srcH, rotateNum) == -1) {
200 IMAGE_LOGE("I010Rotate failed");
201 return false;
202 }
203
204 uint16_t* dstbuffer = reinterpret_cast<uint16_t *>(yuvPixels.dstPixels);
205 int32_t dstYStride = static_cast<int32_t>(dstStrides.yStride);
206 int32_t dstUVStride = static_cast<int32_t>(dstStrides.uvStride);
207 uint16_t* dstbufferY = dstbuffer + dstStrides.yOffset;
208 uint16_t* dstbufferUV = dstbuffer + dstStrides.uvOffset;
209 if (converter.I010ToP010(rotateY, size.dstW, rotateU, GetUStride(size.dstW), rotateV, GetUStride(size.dstW),
210 dstbufferY, dstYStride, dstbufferUV, dstUVStride, size.dstW, size.dstH) == -1) {
211 IMAGE_LOGE("I010ToP010 failed");
212 return false;
213 }
214 return true;
215 }
216
YuvRotate(uint8_t * srcPixels,const PixelFormat & format,YUVDataInfo & info,Size & dstSize,uint8_t * dstPixels,YUVStrideInfo & dstStrides,OpenSourceLibyuv::RotationMode & rotateNum)217 bool PixelYuvExtUtils::YuvRotate(uint8_t* srcPixels, const PixelFormat& format, YUVDataInfo& info,
218 Size& dstSize, uint8_t* dstPixels, YUVStrideInfo& dstStrides, OpenSourceLibyuv::RotationMode &rotateNum)
219 {
220 int32_t dstWidth = dstSize.width;
221 int32_t dstHeight = dstSize.height;
222 PixelSize pixelSize = {info.imageSize.width, info.imageSize.height, dstWidth, dstHeight};
223 if (format == PixelFormat::YCBCR_P010 || format == PixelFormat::YCRCB_P010) {
224 IMAGE_LOGD("YuvRotate P010Rotate enter");
225 YuvPixels yuvPixels = {srcPixels, dstPixels, 0, 0};
226 if (!NV12P010Rotate(yuvPixels, pixelSize, info, rotateNum, dstStrides)) {
227 IMAGE_LOGE("YuvRotate P010Rotate fail");
228 return false;
229 }
230 return true;
231 }
232 if (!NV12Rotate(srcPixels, pixelSize, info, rotateNum, dstPixels, dstStrides)) {
233 return false;
234 }
235
236 return true;
237 }
238
ConvertYuvMode(OpenSourceLibyuv::FilterMode & filterMode,const AntiAliasingOption & option)239 void PixelYuvExtUtils::ConvertYuvMode(OpenSourceLibyuv::FilterMode &filterMode, const AntiAliasingOption &option)
240 {
241 switch (option) {
242 case AntiAliasingOption::NONE:
243 filterMode = OpenSourceLibyuv::FilterMode::kFilterNone;
244 break;
245 case AntiAliasingOption::LOW:
246 filterMode = OpenSourceLibyuv::FilterMode::kFilterLinear;
247 break;
248 case AntiAliasingOption::MEDIUM:
249 filterMode = OpenSourceLibyuv::FilterMode::kFilterBilinear;
250 break;
251 case AntiAliasingOption::HIGH:
252 filterMode = OpenSourceLibyuv::FilterMode::kFilterBox;
253 break;
254 default:
255 break;
256 }
257 }
258
ScaleUVPlane(const uint8_t * src,uint8_t * dst,OpenSourceLibyuv::FilterMode filterMode,YuvImageInfo & yuvInfo,uint32_t dstYStride,uint32_t dstYHeight,uint32_t dstYWidth)259 static void ScaleUVPlane(const uint8_t *src, uint8_t*dst, OpenSourceLibyuv::FilterMode filterMode,
260 YuvImageInfo &yuvInfo, uint32_t dstYStride, uint32_t dstYHeight, uint32_t dstYWidth)
261 {
262 uint32_t srcUWidth = static_cast<uint32_t>(GetUStride(yuvInfo.width));
263 uint32_t srcUHeight = static_cast<uint32_t>(GetUVHeight(yuvInfo.height));
264 uint32_t dstUWidth = static_cast<uint32_t>(GetUStride(dstYWidth));
265 uint32_t dstUHeight = static_cast<uint32_t>(GetUVHeight(dstYHeight));
266 // Split VUplane
267 std::unique_ptr<uint8_t[]> uvData = std::make_unique<uint8_t[]>(NUM_2 * srcUWidth * srcUHeight);
268 if (uvData == nullptr) {
269 IMAGE_LOGE("ScaleUVPlane make unique ptr for uvData failed.");
270 return;
271 }
272 uint8_t *uData = nullptr;
273 uint8_t *vData = nullptr;
274 uint32_t dstSplitStride = srcUWidth;
275 const uint8_t *srcUV = src + yuvInfo.yuvDataInfo.uvOffset;
276 uint32_t uvStride = yuvInfo.yuvDataInfo.uvStride;
277 auto converter = ConverterHandle::GetInstance().GetHandle();
278 if (yuvInfo.yuvFormat == PixelFormat::NV12) {
279 uData = uvData.get();
280 vData = uvData.get() + srcUWidth * srcUHeight;
281 converter.SplitUVPlane(srcUV, uvStride, uData, dstSplitStride, vData, dstSplitStride, srcUWidth, srcUHeight);
282 } else if (yuvInfo.yuvFormat == PixelFormat::NV21) {
283 vData = uvData.get();
284 uData = uvData.get() + srcUWidth * srcUHeight;
285 converter.SplitUVPlane(srcUV, uvStride, vData, dstSplitStride, uData, dstSplitStride, srcUWidth, srcUHeight);
286 }
287 // malloc memory to store temp u v
288 std::unique_ptr<uint8_t[]> tempUVData = std::make_unique<uint8_t[]>(NUM_2 * dstUWidth * dstUHeight);
289 if (tempUVData == nullptr) {
290 IMAGE_LOGE("ScaleUVPlane make unique ptr for tempUVData failed.");
291 return;
292 }
293 uint8_t *tempUData = nullptr;
294 uint8_t *tempVData = nullptr;
295 if (yuvInfo.yuvFormat == PixelFormat::NV12) {
296 tempUData = tempUVData.get();
297 tempVData = tempUVData.get() + dstUWidth * dstUHeight;
298 } else if (yuvInfo.yuvFormat == PixelFormat::NV21) {
299 tempVData = tempUVData.get();
300 tempUData = tempUVData.get() + dstUWidth * dstUHeight;
301 }
302
303 // resize u* and v
304 converter.ScalePlane(uData, dstSplitStride, srcUWidth, srcUHeight,
305 tempUData, dstUWidth, dstUWidth, dstUHeight, filterMode);
306
307 converter.ScalePlane(vData, dstSplitStride, srcUWidth, srcUHeight,
308 tempVData, dstUWidth, dstUWidth, dstUHeight, filterMode);
309 // Merge the UV
310 uint8_t *dstUV = dst + GetYSize(dstYStride, dstYHeight);
311 int32_t dstUVStride = static_cast<int32_t>(dstUWidth * NUM_2);
312 //AllocatorType DMA_ALLOC
313 if (dstYStride != dstYWidth) {
314 dstUVStride = static_cast<int32_t>(dstYStride);
315 }
316 if (yuvInfo.yuvFormat == PixelFormat::NV12) {
317 converter.MergeUVPlane(tempUData, dstUWidth, tempVData, dstUWidth, dstUV, dstUVStride, dstUWidth, dstUHeight);
318 } else if (yuvInfo.yuvFormat == PixelFormat::NV21) {
319 converter.MergeUVPlane(tempVData, dstUWidth, tempUData, dstUWidth, dstUV, dstUVStride, dstUWidth, dstUHeight);
320 }
321
322 uData = vData = nullptr;
323 tempUData = tempVData = nullptr;
324 }
325
ScaleP010(YuvPixels yuvPixels,OpenSourceLibyuv::ImageYuvConverter & converter,OpenSourceLibyuv::FilterMode & filterMode,YuvImageInfo & yuvInfo,YUVStrideInfo & dstStrides)326 static void ScaleP010(YuvPixels yuvPixels, OpenSourceLibyuv::ImageYuvConverter &converter,
327 OpenSourceLibyuv::FilterMode &filterMode, YuvImageInfo &yuvInfo, YUVStrideInfo &dstStrides)
328 {
329 uint16_t *srcBuffer = reinterpret_cast<uint16_t *>(yuvPixels.srcPixels);
330 uint16_t* srcY = srcBuffer + yuvInfo.yuvDataInfo.yOffset;
331 int32_t srcYStride = static_cast<int32_t>(yuvInfo.yuvDataInfo.yStride);
332 uint16_t* srcUV = srcY + yuvInfo.yuvDataInfo.uvOffset;
333 int32_t srcUVStride = static_cast<int32_t>(yuvInfo.yuvDataInfo.uvStride);
334 int32_t srcWidth = yuvInfo.width;
335 int32_t srcHeight = yuvInfo.height;
336 uint16_t *dstBuffer = reinterpret_cast<uint16_t *>(yuvPixels.dstPixels);
337 int32_t dst_width = yuvInfo.width * yuvPixels.xAxis;
338 int32_t dst_height = yuvInfo.height * yuvPixels.yAxis;
339 uint16_t* dstBufferY = dstBuffer + dstStrides.yOffset;
340 int32_t dstYStride = static_cast<int32_t>(dstStrides.yStride);
341 uint16_t* dstBufferUV = dstBuffer + dstStrides.uvOffset;
342 int32_t dstUVStride = static_cast<int32_t>(dstStrides.uvStride);
343 std::unique_ptr<uint16_t[]> dstPixels = std::make_unique<uint16_t[]>(GetImageSize(srcYStride, srcHeight));
344 uint16_t* dstY = dstPixels.get();
345 uint16_t* dstU = dstPixels.get() + GetYSize(srcYStride, srcHeight);
346 uint16_t* dstV = dstPixels.get() + GetVOffset(srcYStride, srcHeight);
347
348 if (converter.P010ToI010(srcY, srcYStride, srcUV,
349 srcUVStride, dstY, srcYStride, dstU, GetUStride(srcYStride), dstV,
350 GetUStride(srcYStride), srcWidth, srcHeight) == -1) {
351 IMAGE_LOGE("NV12P010ToI010 failed");
352 return;
353 }
354 std::unique_ptr<uint16_t[]> scalePixels = std::make_unique<uint16_t[]>(GetImageSize(dstYStride, dst_height));
355 uint16_t* scaleY = scalePixels.get();
356 uint16_t* scaleU = scalePixels.get() + GetYSize(dstYStride, dst_height);
357 uint16_t* scaleV = scalePixels.get() + GetVOffset(dstYStride, dst_height);
358 if (converter.I420Scale_16(dstY, srcYStride, dstU, GetUStride(srcYStride), dstV,
359 GetUStride(srcYStride), srcYStride, srcHeight, scaleY, dstYStride, scaleU,
360 GetUStride(dstYStride), scaleV, GetUStride(dstYStride), dst_width, dst_height, filterMode) == -1) {
361 IMAGE_LOGE("I420Scale_16 failed");
362 return;
363 }
364 if (converter.I010ToP010(scaleY, dstYStride, scaleU, GetUStride(dstYStride),
365 scaleV, GetUStride(dstYStride), dstBufferY, dstYStride, dstBufferUV, dstUVStride,
366 dst_width, dst_height) == -1) {
367 IMAGE_LOGE("I010ToP010 failed");
368 return;
369 }
370 }
371
ScaleYuv420(float xAxis,float yAxis,const AntiAliasingOption & option,YuvImageInfo & yuvInfo,uint8_t * src,uint8_t * dst,YUVStrideInfo & dstStrides)372 void PixelYuvExtUtils::ScaleYuv420(float xAxis, float yAxis, const AntiAliasingOption &option,
373 YuvImageInfo &yuvInfo, uint8_t *src, uint8_t *dst, YUVStrideInfo &dstStrides)
374 {
375 OpenSourceLibyuv::FilterMode filterMode = OpenSourceLibyuv ::FilterMode::kFilterLinear;
376 ConvertYuvMode(filterMode, option);
377
378 uint8_t* srcY = src + yuvInfo.yuvDataInfo.yOffset;
379 int srcYStride = static_cast<int>(yuvInfo.yuvDataInfo.yStride);
380 uint8_t* srcUV = srcY + yuvInfo.yuvDataInfo.uvOffset;
381 int srcUVStride = static_cast<int>(yuvInfo.yuvDataInfo.uvStride);
382 int srcWidth = yuvInfo.width;
383 int srcHeight = yuvInfo.height;
384
385 int32_t dst_width = yuvInfo.width * xAxis;
386 int32_t dst_height = yuvInfo.height * yAxis;
387 uint8_t* dstY = dst + dstStrides.yOffset;
388 int dstYStride = static_cast<int>(dstStrides.yStride);
389 uint8_t* dstUV = dst + dstStrides.uvOffset;
390 int dstUVStride = static_cast<int>(dstStrides.uvStride);
391 auto converter = ConverterHandle::GetInstance().GetHandle();
392 YuvPixels yuvPixels = {src, dst, xAxis, yAxis};
393 if (yuvInfo.yuvFormat == PixelFormat::YCBCR_P010 || yuvInfo.yuvFormat == PixelFormat::YCRCB_P010) {
394 ScaleP010(yuvPixels, converter, filterMode, yuvInfo, dstStrides);
395 } else {
396 converter.NV12Scale(srcY, srcYStride, srcUV, srcUVStride, srcWidth, srcHeight,
397 dstY, dstYStride, dstUV, dstUVStride, dst_width, dst_height, filterMode);
398 }
399 }
400
ScaleYuv420(int32_t dst_width,int32_t dst_height,const AntiAliasingOption & option,YuvImageInfo & yuvInfo,uint8_t * src,uint8_t * dst,YUVStrideInfo & dstStrides)401 void PixelYuvExtUtils::ScaleYuv420(int32_t dst_width, int32_t dst_height, const AntiAliasingOption &option,
402 YuvImageInfo &yuvInfo, uint8_t *src, uint8_t *dst, YUVStrideInfo &dstStrides)
403 {
404 OpenSourceLibyuv::FilterMode filterMode = OpenSourceLibyuv ::FilterMode::kFilterLinear;
405 ConvertYuvMode(filterMode, option);
406
407 uint8_t* srcY = src + yuvInfo.yuvDataInfo.yOffset;
408 int srcYStride = static_cast<int>(yuvInfo.yuvDataInfo.yStride);
409 uint8_t* srcUV = srcY + yuvInfo.yuvDataInfo.uvOffset;
410 int srcUVStride = static_cast<int>(yuvInfo.yuvDataInfo.uvStride);
411 int srcWidth = yuvInfo.width;
412 int srcHeight = yuvInfo.height;
413
414 uint8_t* dstY = dst + dstStrides.yOffset;
415 int dstYStride = static_cast<int>(dstStrides.yStride);
416 uint8_t* dstUV = dst + dstStrides.uvOffset;
417 int dstUVStride = static_cast<int>(dstStrides.uvStride);
418 auto converter = ConverterHandle::GetInstance().GetHandle();
419
420 PixelSize pixelSize = {srcWidth, srcHeight, dst_width, dst_height};
421 if (yuvInfo.yuvFormat != PixelFormat::YCBCR_P010 && yuvInfo.yuvFormat != PixelFormat::YCRCB_P010) {
422 converter.NV12Scale(srcY, srcYStride, srcUV, srcUVStride, srcWidth, srcHeight,
423 dstY, dstYStride, dstUV, dstUVStride, dst_width, dst_height, filterMode);
424 }
425 }
426
427
FlipXaxis(uint8_t * src,uint8_t * dst,Size & size,PixelFormat format,YUVDataInfo & info,YUVStrideInfo & dstStrides)428 bool PixelYuvExtUtils::FlipXaxis(uint8_t *src, uint8_t *dst, Size &size, PixelFormat format,
429 YUVDataInfo &info, YUVStrideInfo &dstStrides)
430 {
431 IMAGE_LOGE("PixelYuvExtUtils FlipXaxis");
432 uint8_t *srcY = src + info.yOffset;
433 uint8_t *srcUV = src + info.uvOffset;
434 int srcYStride = static_cast<int>(info.yStride);
435 int srcUVStride = static_cast<int>(info.uvStride);
436 int32_t width = size.width;
437 int32_t height = size.height;
438
439 uint8_t* dstY = dst + dstStrides.yOffset;
440 uint8_t* dstUV = dst + dstStrides.uvOffset;
441 int dstYStride = static_cast<int>(dstStrides.yStride);
442 int dstUVStride = static_cast<int>(dstStrides.uvStride);
443
444 auto converter = ConverterHandle::GetInstance().GetHandle();
445 converter.NV12Copy(srcY, srcYStride, srcUV, srcUVStride, dstY, dstYStride, dstUV, dstUVStride, width, -height);
446 return true;
447 }
448
Mirror(uint8_t * src,uint8_t * dst,Size & size,PixelFormat format,YUVDataInfo & info,YUVStrideInfo & dstStrides,bool isReversed)449 bool PixelYuvExtUtils::Mirror(uint8_t *src, uint8_t *dst, Size &size, PixelFormat format, YUVDataInfo &info,
450 YUVStrideInfo &dstStrides, bool isReversed)
451 {
452 auto converter = ConverterHandle::GetInstance().GetHandle();
453 uint8_t *srcY = src + info.yOffset;
454 uint8_t *srcUV = src + info.uvOffset;
455 int32_t width = size.width;
456 int32_t height = size.height;
457 int srcYStride = static_cast<int>(info.yStride);
458 int srcUVStride = static_cast<int>(info.uvStride);
459
460 uint8_t *dstY = dst + dstStrides.yOffset;
461 uint8_t *dstUV = dst + dstStrides.uvOffset;
462 int dstYStride = static_cast<int>(dstStrides.yStride);
463 int dstUVStride = static_cast<int>(dstStrides.uvStride);
464 height = isReversed? -height : height;
465
466 int iret = converter.NV12Mirror(srcY, srcYStride, srcUV, srcUVStride, dstY, dstYStride,
467 dstUV, dstUVStride, width, height);
468 if (iret == -1) {
469 return false;
470 }
471 return true;
472 }
473 } // namespace Media
474 } // namespace OHOS