1 /*
2 * Copyright 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "C2SoftAvcEnc"
19 #include <log/log.h>
20 #include <utils/misc.h>
21
22 #include <algorithm>
23
24 #include <media/hardware/VideoAPI.h>
25 #include <media/stagefright/MediaDefs.h>
26 #include <media/stagefright/MediaErrors.h>
27 #include <media/stagefright/MetaData.h>
28 #include <media/stagefright/foundation/AUtils.h>
29
30 #include <C2Debug.h>
31 #include <Codec2Mapper.h>
32 #include <C2PlatformSupport.h>
33 #include <Codec2BufferUtils.h>
34 #include <SimpleC2Interface.h>
35 #include <util/C2InterfaceHelper.h>
36
37 #include "C2SoftAvcEnc.h"
38 #include "ih264e.h"
39 #include "ih264e_error.h"
40
41 namespace android {
42
43 namespace {
44
45 constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder";
46 constexpr uint32_t kMinOutBufferSize = 524288;
ParseGop(const C2StreamGopTuning::output & gop,uint32_t * syncInterval,uint32_t * iInterval,uint32_t * maxBframes)47 void ParseGop(
48 const C2StreamGopTuning::output &gop,
49 uint32_t *syncInterval, uint32_t *iInterval, uint32_t *maxBframes) {
50 uint32_t syncInt = 1;
51 uint32_t iInt = 1;
52 for (size_t i = 0; i < gop.flexCount(); ++i) {
53 const C2GopLayerStruct &layer = gop.m.values[i];
54 if (layer.count == UINT32_MAX) {
55 syncInt = 0;
56 } else if (syncInt <= UINT32_MAX / (layer.count + 1)) {
57 syncInt *= (layer.count + 1);
58 }
59 if ((layer.type_ & I_FRAME) == 0) {
60 if (layer.count == UINT32_MAX) {
61 iInt = 0;
62 } else if (iInt <= UINT32_MAX / (layer.count + 1)) {
63 iInt *= (layer.count + 1);
64 }
65 }
66 if (layer.type_ == C2Config::picture_type_t(P_FRAME | B_FRAME) && maxBframes) {
67 *maxBframes = layer.count;
68 }
69 }
70 if (syncInterval) {
71 *syncInterval = syncInt;
72 }
73 if (iInterval) {
74 *iInterval = iInt;
75 }
76 }
77
78 } // namespace
79
80 class C2SoftAvcEnc::IntfImpl : public SimpleInterface<void>::BaseParams {
81 public:
IntfImpl(const std::shared_ptr<C2ReflectorHelper> & helper)82 explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
83 : SimpleInterface<void>::BaseParams(
84 helper,
85 COMPONENT_NAME,
86 C2Component::KIND_ENCODER,
87 C2Component::DOMAIN_VIDEO,
88 MEDIA_MIMETYPE_VIDEO_AVC) {
89 noPrivateBuffers(); // TODO: account for our buffers here
90 noInputReferences();
91 noOutputReferences();
92 noTimeStretch();
93 setDerivedInstance(this);
94
95 addParameter(
96 DefineParam(mUsage, C2_PARAMKEY_INPUT_STREAM_USAGE)
97 .withConstValue(new C2StreamUsageTuning::input(
98 0u, (uint64_t)C2MemoryUsage::CPU_READ))
99 .build());
100
101 addParameter(
102 DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
103 .withConstValue(new C2ComponentAttributesSetting(
104 C2Component::ATTRIB_IS_TEMPORAL))
105 .build());
106
107 addParameter(
108 DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
109 .withDefault(new C2StreamPictureSizeInfo::input(0u, 16, 16))
110 .withFields({
111 C2F(mSize, width).inRange(2, 2560, 2),
112 C2F(mSize, height).inRange(2, 2560, 2),
113 })
114 .withSetter(SizeSetter)
115 .build());
116
117 addParameter(
118 DefineParam(mGop, C2_PARAMKEY_GOP)
119 .withDefault(C2StreamGopTuning::output::AllocShared(
120 0 /* flexCount */, 0u /* stream */))
121 .withFields({C2F(mGop, m.values[0].type_).any(),
122 C2F(mGop, m.values[0].count).any()})
123 .withSetter(GopSetter)
124 .build());
125
126 addParameter(
127 DefineParam(mPictureQuantization, C2_PARAMKEY_PICTURE_QUANTIZATION)
128 .withDefault(C2StreamPictureQuantizationTuning::output::AllocShared(
129 0 /* flexCount */, 0u /* stream */))
130 .withFields({C2F(mPictureQuantization, m.values[0].type_).oneOf(
131 {C2Config::picture_type_t(I_FRAME),
132 C2Config::picture_type_t(P_FRAME),
133 C2Config::picture_type_t(B_FRAME)}),
134 C2F(mPictureQuantization, m.values[0].min).any(),
135 C2F(mPictureQuantization, m.values[0].max).any()})
136 .withSetter(PictureQuantizationSetter)
137 .build());
138
139 addParameter(
140 DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY)
141 .withDefault(new C2PortActualDelayTuning::input(DEFAULT_B_FRAMES))
142 .withFields({C2F(mActualInputDelay, value).inRange(0, MAX_B_FRAMES)})
143 .calculatedAs(InputDelaySetter, mGop)
144 .build());
145
146 addParameter(
147 DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
148 .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
149 // TODO: More restriction?
150 .withFields({C2F(mFrameRate, value).greaterThan(0.)})
151 .withSetter(Setter<decltype(*mFrameRate)>::StrictValueWithNoDeps)
152 .build());
153
154 addParameter(
155 DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
156 .withDefault(new C2StreamBitrateInfo::output(0u, 64000))
157 .withFields({C2F(mBitrate, value).inRange(4096, 12000000)})
158 .withSetter(BitrateSetter)
159 .build());
160
161 addParameter(
162 DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH)
163 .withDefault(new C2StreamIntraRefreshTuning::output(
164 0u, C2Config::INTRA_REFRESH_DISABLED, 0.))
165 .withFields({
166 C2F(mIntraRefresh, mode).oneOf({
167 C2Config::INTRA_REFRESH_DISABLED, C2Config::INTRA_REFRESH_ARBITRARY }),
168 C2F(mIntraRefresh, period).any()
169 })
170 .withSetter(IntraRefreshSetter)
171 .build());
172
173 addParameter(
174 DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL)
175 .withDefault(new C2StreamProfileLevelInfo::output(
176 0u, PROFILE_AVC_CONSTRAINED_BASELINE, LEVEL_AVC_4_1))
177 .withFields({
178 C2F(mProfileLevel, profile).oneOf({
179 PROFILE_AVC_BASELINE,
180 PROFILE_AVC_CONSTRAINED_BASELINE,
181 PROFILE_AVC_MAIN,
182 }),
183 C2F(mProfileLevel, level).oneOf({
184 LEVEL_AVC_1,
185 LEVEL_AVC_1B,
186 LEVEL_AVC_1_1,
187 LEVEL_AVC_1_2,
188 LEVEL_AVC_1_3,
189 LEVEL_AVC_2,
190 LEVEL_AVC_2_1,
191 LEVEL_AVC_2_2,
192 LEVEL_AVC_3,
193 LEVEL_AVC_3_1,
194 LEVEL_AVC_3_2,
195 LEVEL_AVC_4,
196 LEVEL_AVC_4_1,
197 LEVEL_AVC_4_2,
198 LEVEL_AVC_5,
199 }),
200 })
201 .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
202 .build());
203
204 addParameter(
205 DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME)
206 .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE))
207 .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) })
208 .withSetter(Setter<decltype(*mRequestSync)>::NonStrictValueWithNoDeps)
209 .build());
210
211 addParameter(
212 DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL)
213 .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000))
214 .withFields({C2F(mSyncFramePeriod, value).any()})
215 .withSetter(Setter<decltype(*mSyncFramePeriod)>::StrictValueWithNoDeps)
216 .build());
217
218 addParameter(
219 DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS)
220 .withDefault(new C2StreamColorAspectsInfo::input(
221 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED,
222 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
223 .withFields({
224 C2F(mColorAspects, range).inRange(
225 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
226 C2F(mColorAspects, primaries).inRange(
227 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
228 C2F(mColorAspects, transfer).inRange(
229 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
230 C2F(mColorAspects, matrix).inRange(
231 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
232 })
233 .withSetter(ColorAspectsSetter)
234 .build());
235
236 addParameter(
237 DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS)
238 .withDefault(new C2StreamColorAspectsInfo::output(
239 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED,
240 C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED))
241 .withFields({
242 C2F(mCodedColorAspects, range).inRange(
243 C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER),
244 C2F(mCodedColorAspects, primaries).inRange(
245 C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER),
246 C2F(mCodedColorAspects, transfer).inRange(
247 C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER),
248 C2F(mCodedColorAspects, matrix).inRange(
249 C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER)
250 })
251 .withSetter(CodedColorAspectsSetter, mColorAspects)
252 .build());
253 }
254
InputDelaySetter(bool mayBlock,C2P<C2PortActualDelayTuning::input> & me,const C2P<C2StreamGopTuning::output> & gop)255 static C2R InputDelaySetter(
256 bool mayBlock,
257 C2P<C2PortActualDelayTuning::input> &me,
258 const C2P<C2StreamGopTuning::output> &gop) {
259 (void)mayBlock;
260 uint32_t maxBframes = 0;
261 ParseGop(gop.v, nullptr, nullptr, &maxBframes);
262 me.set().value = maxBframes;
263 return C2R::Ok();
264 }
265
BitrateSetter(bool mayBlock,C2P<C2StreamBitrateInfo::output> & me)266 static C2R BitrateSetter(bool mayBlock, C2P<C2StreamBitrateInfo::output> &me) {
267 (void)mayBlock;
268 C2R res = C2R::Ok();
269 if (me.v.value <= 4096) {
270 me.set().value = 4096;
271 }
272 return res;
273 }
274
275
SizeSetter(bool mayBlock,const C2P<C2StreamPictureSizeInfo::input> & oldMe,C2P<C2StreamPictureSizeInfo::input> & me)276 static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
277 C2P<C2StreamPictureSizeInfo::input> &me) {
278 (void)mayBlock;
279 C2R res = C2R::Ok();
280 if (!me.F(me.v.width).supportsAtAll(me.v.width)) {
281 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width)));
282 me.set().width = oldMe.v.width;
283 }
284 if (!me.F(me.v.height).supportsAtAll(me.v.height)) {
285 res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height)));
286 me.set().height = oldMe.v.height;
287 }
288 return res;
289 }
290
ProfileLevelSetter(bool mayBlock,C2P<C2StreamProfileLevelInfo::output> & me,const C2P<C2StreamPictureSizeInfo::input> & size,const C2P<C2StreamFrameRateInfo::output> & frameRate,const C2P<C2StreamBitrateInfo::output> & bitrate)291 static C2R ProfileLevelSetter(
292 bool mayBlock,
293 C2P<C2StreamProfileLevelInfo::output> &me,
294 const C2P<C2StreamPictureSizeInfo::input> &size,
295 const C2P<C2StreamFrameRateInfo::output> &frameRate,
296 const C2P<C2StreamBitrateInfo::output> &bitrate) {
297 (void)mayBlock;
298 if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
299 me.set().profile = PROFILE_AVC_CONSTRAINED_BASELINE;
300 }
301
302 struct LevelLimits {
303 C2Config::level_t level;
304 float mbsPerSec;
305 uint64_t mbs;
306 uint32_t bitrate;
307 };
308 constexpr LevelLimits kLimits[] = {
309 { LEVEL_AVC_1, 1485, 99, 64000 },
310 // Decoder does not properly handle level 1b.
311 // { LEVEL_AVC_1B, 1485, 99, 128000 },
312 { LEVEL_AVC_1_1, 3000, 396, 192000 },
313 { LEVEL_AVC_1_2, 6000, 396, 384000 },
314 { LEVEL_AVC_1_3, 11880, 396, 768000 },
315 { LEVEL_AVC_2, 11880, 396, 2000000 },
316 { LEVEL_AVC_2_1, 19800, 792, 4000000 },
317 { LEVEL_AVC_2_2, 20250, 1620, 4000000 },
318 { LEVEL_AVC_3, 40500, 1620, 10000000 },
319 { LEVEL_AVC_3_1, 108000, 3600, 14000000 },
320 { LEVEL_AVC_3_2, 216000, 5120, 20000000 },
321 { LEVEL_AVC_4, 245760, 8192, 20000000 },
322 { LEVEL_AVC_4_1, 245760, 8192, 50000000 },
323 { LEVEL_AVC_4_2, 522240, 8704, 50000000 },
324 { LEVEL_AVC_5, 589824, 22080, 135000000 },
325 };
326
327 uint64_t mbs = uint64_t((size.v.width + 15) / 16) * ((size.v.height + 15) / 16);
328 float mbsPerSec = float(mbs) * frameRate.v.value;
329
330 // Check if the supplied level meets the MB / bitrate requirements. If
331 // not, update the level with the lowest level meeting the requirements.
332
333 bool found = false;
334 // By default needsUpdate = false in case the supplied level does meet
335 // the requirements. For Level 1b, we want to update the level anyway,
336 // so we set it to true in that case.
337 bool needsUpdate = (me.v.level == LEVEL_AVC_1B);
338 for (const LevelLimits &limit : kLimits) {
339 if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec &&
340 bitrate.v.value <= limit.bitrate) {
341 // This is the lowest level that meets the requirements, and if
342 // we haven't seen the supplied level yet, that means we don't
343 // need the update.
344 if (needsUpdate) {
345 ALOGD("Given level %x does not cover current configuration: "
346 "adjusting to %x", me.v.level, limit.level);
347 me.set().level = limit.level;
348 }
349 found = true;
350 break;
351 }
352 if (me.v.level == limit.level) {
353 // We break out of the loop when the lowest feasible level is
354 // found. The fact that we're here means that our level doesn't
355 // meet the requirement and needs to be updated.
356 needsUpdate = true;
357 }
358 }
359 if (!found) {
360 // We set to the highest supported level.
361 me.set().level = LEVEL_AVC_5;
362 }
363
364 return C2R::Ok();
365 }
366
IntraRefreshSetter(bool mayBlock,C2P<C2StreamIntraRefreshTuning::output> & me)367 static C2R IntraRefreshSetter(bool mayBlock, C2P<C2StreamIntraRefreshTuning::output> &me) {
368 (void)mayBlock;
369 C2R res = C2R::Ok();
370 if (me.v.period < 1) {
371 me.set().mode = C2Config::INTRA_REFRESH_DISABLED;
372 me.set().period = 0;
373 } else {
374 // only support arbitrary mode (cyclic in our case)
375 me.set().mode = C2Config::INTRA_REFRESH_ARBITRARY;
376 }
377 return res;
378 }
379
GopSetter(bool mayBlock,C2P<C2StreamGopTuning::output> & me)380 static C2R GopSetter(bool mayBlock, C2P<C2StreamGopTuning::output> &me) {
381 (void)mayBlock;
382 for (size_t i = 0; i < me.v.flexCount(); ++i) {
383 const C2GopLayerStruct &layer = me.v.m.values[0];
384 if (layer.type_ == C2Config::picture_type_t(P_FRAME | B_FRAME)
385 && layer.count > MAX_B_FRAMES) {
386 me.set().m.values[i].count = MAX_B_FRAMES;
387 }
388 }
389 return C2R::Ok();
390 }
391
PictureQuantizationSetter(bool mayBlock,C2P<C2StreamPictureQuantizationTuning::output> & me)392 static C2R PictureQuantizationSetter(bool mayBlock,
393 C2P<C2StreamPictureQuantizationTuning::output> &me) {
394 (void)mayBlock;
395 (void)me;
396
397 // TODO: refactor with same algorithm in the SetQp()
398 int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
399 int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
400
401 for (size_t i = 0; i < me.v.flexCount(); ++i) {
402 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
403
404 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
405 iMax = layer.max;
406 iMin = layer.min;
407 ALOGV("iMin %d iMax %d", iMin, iMax);
408 } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
409 pMax = layer.max;
410 pMin = layer.min;
411 ALOGV("pMin %d pMax %d", pMin, pMax);
412 } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
413 bMax = layer.max;
414 bMin = layer.min;
415 ALOGV("bMin %d bMax %d", bMin, bMax);
416 }
417 }
418
419 ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
420 iMin, iMax, pMin, pMax, bMin, bMax);
421
422 // ensure we have legal values
423 iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
424 iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
425 pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
426 pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
427 bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
428 bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
429
430 // put them back into the structure
431 for (size_t i = 0; i < me.v.flexCount(); ++i) {
432 const C2PictureQuantizationStruct &layer = me.v.m.values[i];
433
434 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
435 me.set().m.values[i].max = iMax;
436 me.set().m.values[i].min = iMin;
437 }
438 if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
439 me.set().m.values[i].max = pMax;
440 me.set().m.values[i].min = pMin;
441 }
442 if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
443 me.set().m.values[i].max = bMax;
444 me.set().m.values[i].min = bMin;
445 }
446 }
447
448 ALOGV("PictureQuantizationSetter(exit): i %d-%d p %d-%d b %d-%d",
449 iMin, iMax, pMin, pMax, bMin, bMax);
450
451 return C2R::Ok();
452 }
453
ColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::input> & me)454 static C2R ColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::input> &me) {
455 (void)mayBlock;
456 if (me.v.range > C2Color::RANGE_OTHER) {
457 me.set().range = C2Color::RANGE_OTHER;
458 }
459 if (me.v.primaries > C2Color::PRIMARIES_OTHER) {
460 me.set().primaries = C2Color::PRIMARIES_OTHER;
461 }
462 if (me.v.transfer > C2Color::TRANSFER_OTHER) {
463 me.set().transfer = C2Color::TRANSFER_OTHER;
464 }
465 if (me.v.matrix > C2Color::MATRIX_OTHER) {
466 me.set().matrix = C2Color::MATRIX_OTHER;
467 }
468 return C2R::Ok();
469 }
470
CodedColorAspectsSetter(bool mayBlock,C2P<C2StreamColorAspectsInfo::output> & me,const C2P<C2StreamColorAspectsInfo::input> & coded)471 static C2R CodedColorAspectsSetter(bool mayBlock, C2P<C2StreamColorAspectsInfo::output> &me,
472 const C2P<C2StreamColorAspectsInfo::input> &coded) {
473 (void)mayBlock;
474 me.set().range = coded.v.range;
475 me.set().primaries = coded.v.primaries;
476 me.set().transfer = coded.v.transfer;
477 me.set().matrix = coded.v.matrix;
478 return C2R::Ok();
479 }
480
getProfile_l() const481 IV_PROFILE_T getProfile_l() const {
482 switch (mProfileLevel->profile) {
483 case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]];
484 case PROFILE_AVC_BASELINE: return IV_PROFILE_BASE;
485 case PROFILE_AVC_MAIN: return IV_PROFILE_MAIN;
486 default:
487 ALOGD("Unrecognized profile: %x", mProfileLevel->profile);
488 return IV_PROFILE_DEFAULT;
489 }
490 }
491
getLevel_l() const492 UWORD32 getLevel_l() const {
493 struct Level {
494 C2Config::level_t c2Level;
495 UWORD32 avcLevel;
496 };
497 constexpr Level levels[] = {
498 { LEVEL_AVC_1, 10 },
499 { LEVEL_AVC_1B, 9 },
500 { LEVEL_AVC_1_1, 11 },
501 { LEVEL_AVC_1_2, 12 },
502 { LEVEL_AVC_1_3, 13 },
503 { LEVEL_AVC_2, 20 },
504 { LEVEL_AVC_2_1, 21 },
505 { LEVEL_AVC_2_2, 22 },
506 { LEVEL_AVC_3, 30 },
507 { LEVEL_AVC_3_1, 31 },
508 { LEVEL_AVC_3_2, 32 },
509 { LEVEL_AVC_4, 40 },
510 { LEVEL_AVC_4_1, 41 },
511 { LEVEL_AVC_4_2, 42 },
512 { LEVEL_AVC_5, 50 },
513 };
514 for (const Level &level : levels) {
515 if (mProfileLevel->level == level.c2Level) {
516 return level.avcLevel;
517 }
518 }
519 ALOGD("Unrecognized level: %x", mProfileLevel->level);
520 return 41;
521 }
522
getSyncFramePeriod_l() const523 uint32_t getSyncFramePeriod_l() const {
524 if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) {
525 return 0;
526 }
527 double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value;
528 return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.);
529 }
530
531 // unsafe getters
getSize_l() const532 std::shared_ptr<C2StreamPictureSizeInfo::input> getSize_l() const { return mSize; }
getIntraRefresh_l() const533 std::shared_ptr<C2StreamIntraRefreshTuning::output> getIntraRefresh_l() const { return mIntraRefresh; }
getFrameRate_l() const534 std::shared_ptr<C2StreamFrameRateInfo::output> getFrameRate_l() const { return mFrameRate; }
getBitrate_l() const535 std::shared_ptr<C2StreamBitrateInfo::output> getBitrate_l() const { return mBitrate; }
getRequestSync_l() const536 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> getRequestSync_l() const { return mRequestSync; }
getGop_l() const537 std::shared_ptr<C2StreamGopTuning::output> getGop_l() const { return mGop; }
getPictureQuantization_l() const538 std::shared_ptr<C2StreamPictureQuantizationTuning::output> getPictureQuantization_l() const
539 { return mPictureQuantization; }
getCodedColorAspects_l() const540 std::shared_ptr<C2StreamColorAspectsInfo::output> getCodedColorAspects_l() const {
541 return mCodedColorAspects;
542 }
543
544 private:
545 std::shared_ptr<C2StreamUsageTuning::input> mUsage;
546 std::shared_ptr<C2StreamPictureSizeInfo::input> mSize;
547 std::shared_ptr<C2StreamFrameRateInfo::output> mFrameRate;
548 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> mRequestSync;
549 std::shared_ptr<C2StreamIntraRefreshTuning::output> mIntraRefresh;
550 std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
551 std::shared_ptr<C2StreamProfileLevelInfo::output> mProfileLevel;
552 std::shared_ptr<C2StreamSyncFrameIntervalTuning::output> mSyncFramePeriod;
553 std::shared_ptr<C2StreamGopTuning::output> mGop;
554 std::shared_ptr<C2StreamPictureQuantizationTuning::output> mPictureQuantization;
555 std::shared_ptr<C2StreamColorAspectsInfo::input> mColorAspects;
556 std::shared_ptr<C2StreamColorAspectsInfo::output> mCodedColorAspects;
557 };
558
559 #define ive_api_function ih264e_api_function
560
561 namespace {
562
563 // From external/libavc/encoder/ih264e_bitstream.h
564 constexpr uint32_t MIN_STREAM_SIZE = 0x800;
565
GetCPUCoreCount()566 static size_t GetCPUCoreCount() {
567 long cpuCoreCount = 1;
568 #if defined(_SC_NPROCESSORS_ONLN)
569 cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
570 #else
571 // _SC_NPROC_ONLN must be defined...
572 cpuCoreCount = sysconf(_SC_NPROC_ONLN);
573 #endif
574 CHECK(cpuCoreCount >= 1);
575 ALOGV("Number of CPU cores: %ld", cpuCoreCount);
576 return (size_t)cpuCoreCount;
577 }
578
579 } // namespace
580
C2SoftAvcEnc(const char * name,c2_node_id_t id,const std::shared_ptr<IntfImpl> & intfImpl)581 C2SoftAvcEnc::C2SoftAvcEnc(
582 const char *name, c2_node_id_t id, const std::shared_ptr<IntfImpl> &intfImpl)
583 : SimpleC2Component(std::make_shared<SimpleInterface<IntfImpl>>(name, id, intfImpl)),
584 mIntf(intfImpl),
585 mIvVideoColorFormat(IV_YUV_420P),
586 mAVCEncProfile(IV_PROFILE_BASE),
587 mAVCEncLevel(41),
588 mStarted(false),
589 mSawInputEOS(false),
590 mSignalledError(false),
591 mCodecCtx(nullptr),
592 mOutBlock(nullptr),
593 mOutBufferSize(kMinOutBufferSize) {
594
595 // If dump is enabled, then open create an empty file
596 GENERATE_FILE_NAMES();
597 CREATE_DUMP_FILE(mInFile);
598 CREATE_DUMP_FILE(mOutFile);
599
600 initEncParams();
601 }
602
~C2SoftAvcEnc()603 C2SoftAvcEnc::~C2SoftAvcEnc() {
604 onRelease();
605 }
606
onInit()607 c2_status_t C2SoftAvcEnc::onInit() {
608 return C2_OK;
609 }
610
onStop()611 c2_status_t C2SoftAvcEnc::onStop() {
612 return C2_OK;
613 }
614
onReset()615 void C2SoftAvcEnc::onReset() {
616 // TODO: use IVE_CMD_CTL_RESET?
617 releaseEncoder();
618 if (mOutBlock) {
619 mOutBlock.reset();
620 }
621 initEncParams();
622 }
623
onRelease()624 void C2SoftAvcEnc::onRelease() {
625 releaseEncoder();
626 if (mOutBlock) {
627 mOutBlock.reset();
628 }
629 }
630
onFlush_sm()631 c2_status_t C2SoftAvcEnc::onFlush_sm() {
632 // TODO: use IVE_CMD_CTL_FLUSH?
633 return C2_OK;
634 }
635
initEncParams()636 void C2SoftAvcEnc::initEncParams() {
637 mCodecCtx = nullptr;
638 mMemRecords = nullptr;
639 mNumMemRecords = DEFAULT_MEM_REC_CNT;
640 mHeaderGenerated = 0;
641 mNumCores = GetCPUCoreCount();
642 mArch = DEFAULT_ARCH;
643 mSliceMode = DEFAULT_SLICE_MODE;
644 mSliceParam = DEFAULT_SLICE_PARAM;
645 mHalfPelEnable = DEFAULT_HPEL;
646 mIInterval = DEFAULT_I_INTERVAL;
647 mIDRInterval = DEFAULT_IDR_INTERVAL;
648 mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL;
649 mEnableFastSad = DEFAULT_ENABLE_FAST_SAD;
650 mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
651 mEncSpeed = DEFAULT_ENC_SPEED;
652 mIntra4x4 = DEFAULT_INTRA4x4;
653 mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
654 mPSNREnable = DEFAULT_PSNR_ENABLE;
655 mReconEnable = DEFAULT_RECON_ENABLE;
656 mEntropyMode = DEFAULT_ENTROPY_MODE;
657 mBframes = DEFAULT_B_FRAMES;
658
659 gettimeofday(&mTimeStart, nullptr);
660 gettimeofday(&mTimeEnd, nullptr);
661 }
662
setDimensions()663 c2_status_t C2SoftAvcEnc::setDimensions() {
664 ive_ctl_set_dimensions_ip_t s_dimensions_ip;
665 ive_ctl_set_dimensions_op_t s_dimensions_op;
666 IV_STATUS_T status;
667
668 s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL;
669 s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS;
670 s_dimensions_ip.u4_ht = mSize->height;
671 s_dimensions_ip.u4_wd = mSize->width;
672
673 s_dimensions_ip.u4_timestamp_high = -1;
674 s_dimensions_ip.u4_timestamp_low = -1;
675
676 s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t);
677 s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t);
678
679 status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op);
680 if (status != IV_SUCCESS) {
681 ALOGE("Unable to set frame dimensions = 0x%x\n",
682 s_dimensions_op.u4_error_code);
683 return C2_CORRUPTED;
684 }
685 return C2_OK;
686 }
687
setNumCores()688 c2_status_t C2SoftAvcEnc::setNumCores() {
689 IV_STATUS_T status;
690 ive_ctl_set_num_cores_ip_t s_num_cores_ip;
691 ive_ctl_set_num_cores_op_t s_num_cores_op;
692 s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL;
693 s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES;
694 s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES);
695 s_num_cores_ip.u4_timestamp_high = -1;
696 s_num_cores_ip.u4_timestamp_low = -1;
697 s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t);
698
699 s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t);
700
701 status = ive_api_function(
702 mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op);
703 if (status != IV_SUCCESS) {
704 ALOGE("Unable to set processor params = 0x%x\n",
705 s_num_cores_op.u4_error_code);
706 return C2_CORRUPTED;
707 }
708 return C2_OK;
709 }
710
setFrameRate()711 c2_status_t C2SoftAvcEnc::setFrameRate() {
712 ive_ctl_set_frame_rate_ip_t s_frame_rate_ip;
713 ive_ctl_set_frame_rate_op_t s_frame_rate_op;
714 IV_STATUS_T status;
715
716 s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
717 s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE;
718
719 s_frame_rate_ip.u4_src_frame_rate = mFrameRate->value + 0.5;
720 s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate->value + 0.5;
721
722 s_frame_rate_ip.u4_timestamp_high = -1;
723 s_frame_rate_ip.u4_timestamp_low = -1;
724
725 s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t);
726 s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t);
727
728 status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op);
729 if (status != IV_SUCCESS) {
730 ALOGE("Unable to set frame rate = 0x%x\n",
731 s_frame_rate_op.u4_error_code);
732 return C2_CORRUPTED;
733 }
734 return C2_OK;
735 }
736
setIpeParams()737 c2_status_t C2SoftAvcEnc::setIpeParams() {
738 ive_ctl_set_ipe_params_ip_t s_ipe_params_ip;
739 ive_ctl_set_ipe_params_op_t s_ipe_params_op;
740 IV_STATUS_T status;
741
742 s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
743 s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS;
744
745 s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
746 s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
747 s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
748
749 s_ipe_params_ip.u4_timestamp_high = -1;
750 s_ipe_params_ip.u4_timestamp_low = -1;
751
752 s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t);
753 s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t);
754
755 status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op);
756 if (status != IV_SUCCESS) {
757 ALOGE("Unable to set ipe params = 0x%x\n",
758 s_ipe_params_op.u4_error_code);
759 return C2_CORRUPTED;
760 }
761 return C2_OK;
762 }
763
setBitRate()764 c2_status_t C2SoftAvcEnc::setBitRate() {
765 ive_ctl_set_bitrate_ip_t s_bitrate_ip;
766 ive_ctl_set_bitrate_op_t s_bitrate_op;
767 IV_STATUS_T status;
768
769 s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL;
770 s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE;
771
772 s_bitrate_ip.u4_target_bitrate = mBitrate->value;
773
774 s_bitrate_ip.u4_timestamp_high = -1;
775 s_bitrate_ip.u4_timestamp_low = -1;
776
777 s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t);
778 s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t);
779
780 status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op);
781 if (status != IV_SUCCESS) {
782 ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code);
783 return C2_CORRUPTED;
784 }
785 return C2_OK;
786 }
787
setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type)788 c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) {
789 ive_ctl_set_frame_type_ip_t s_frame_type_ip;
790 ive_ctl_set_frame_type_op_t s_frame_type_op;
791 IV_STATUS_T status;
792 s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL;
793 s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE;
794
795 s_frame_type_ip.e_frame_type = e_frame_type;
796
797 s_frame_type_ip.u4_timestamp_high = -1;
798 s_frame_type_ip.u4_timestamp_low = -1;
799
800 s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t);
801 s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t);
802
803 status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op);
804 if (status != IV_SUCCESS) {
805 ALOGE("Unable to set frame type = 0x%x\n",
806 s_frame_type_op.u4_error_code);
807 return C2_CORRUPTED;
808 }
809 return C2_OK;
810 }
811
setQp()812 c2_status_t C2SoftAvcEnc::setQp() {
813 ive_ctl_set_qp_ip_t s_qp_ip;
814 ive_ctl_set_qp_op_t s_qp_op;
815 IV_STATUS_T status;
816
817 ALOGV("in setQp()");
818
819 // set the defaults
820 s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
821 s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
822
823 // TODO: refactor with same algorithm in the PictureQuantizationSetter()
824 int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
825 int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
826
827 IntfImpl::Lock lock = mIntf->lock();
828
829 std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
830 mIntf->getPictureQuantization_l();
831 for (size_t i = 0; i < qp->flexCount(); ++i) {
832 const C2PictureQuantizationStruct &layer = qp->m.values[i];
833
834 if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
835 iMax = layer.max;
836 iMin = layer.min;
837 ALOGV("iMin %d iMax %d", iMin, iMax);
838 } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
839 pMax = layer.max;
840 pMin = layer.min;
841 ALOGV("pMin %d pMax %d", pMin, pMax);
842 } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
843 bMax = layer.max;
844 bMin = layer.min;
845 ALOGV("bMin %d bMax %d", bMin, bMax);
846 }
847 }
848
849 s_qp_ip.u4_i_qp_max = iMax;
850 s_qp_ip.u4_i_qp_min = iMin;
851 s_qp_ip.u4_p_qp_max = pMax;
852 s_qp_ip.u4_p_qp_min = pMin;
853 s_qp_ip.u4_b_qp_max = bMax;
854 s_qp_ip.u4_b_qp_min = bMin;
855
856 // ensure initial qp values are within our newly configured bounds...
857 s_qp_ip.u4_i_qp = std::clamp(DEFAULT_I_QP, iMin, iMax);
858 s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
859 s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
860
861 ALOGV("setQp(): i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
862
863
864 s_qp_ip.u4_timestamp_high = -1;
865 s_qp_ip.u4_timestamp_low = -1;
866
867 s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t);
868 s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t);
869
870 status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op);
871 if (status != IV_SUCCESS) {
872 ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code);
873 return C2_CORRUPTED;
874 }
875 return C2_OK;
876 }
877
setEncMode(IVE_ENC_MODE_T e_enc_mode)878 c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) {
879 IV_STATUS_T status;
880 ive_ctl_set_enc_mode_ip_t s_enc_mode_ip;
881 ive_ctl_set_enc_mode_op_t s_enc_mode_op;
882
883 s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL;
884 s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE;
885
886 s_enc_mode_ip.e_enc_mode = e_enc_mode;
887
888 s_enc_mode_ip.u4_timestamp_high = -1;
889 s_enc_mode_ip.u4_timestamp_low = -1;
890
891 s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t);
892 s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t);
893
894 status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op);
895 if (status != IV_SUCCESS) {
896 ALOGE("Unable to set in header encode mode = 0x%x\n",
897 s_enc_mode_op.u4_error_code);
898 return C2_CORRUPTED;
899 }
900 return C2_OK;
901 }
902
setVbvParams()903 c2_status_t C2SoftAvcEnc::setVbvParams() {
904 ive_ctl_set_vbv_params_ip_t s_vbv_ip;
905 ive_ctl_set_vbv_params_op_t s_vbv_op;
906 IV_STATUS_T status;
907
908 s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL;
909 s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS;
910
911 s_vbv_ip.u4_vbv_buf_size = 0;
912 s_vbv_ip.u4_vbv_buffer_delay = 1000;
913
914 s_vbv_ip.u4_timestamp_high = -1;
915 s_vbv_ip.u4_timestamp_low = -1;
916
917 s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t);
918 s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t);
919
920 status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op);
921 if (status != IV_SUCCESS) {
922 ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code);
923 return C2_CORRUPTED;
924 }
925 return C2_OK;
926 }
927
setAirParams()928 c2_status_t C2SoftAvcEnc::setAirParams() {
929 ive_ctl_set_air_params_ip_t s_air_ip;
930 ive_ctl_set_air_params_op_t s_air_op;
931 IV_STATUS_T status;
932
933 s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL;
934 s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS;
935
936 s_air_ip.e_air_mode =
937 (mIntraRefresh->mode == C2Config::INTRA_REFRESH_DISABLED || mIntraRefresh->period < 1)
938 ? IVE_AIR_MODE_NONE : IVE_AIR_MODE_CYCLIC;
939 s_air_ip.u4_air_refresh_period = mIntraRefresh->period;
940
941 s_air_ip.u4_timestamp_high = -1;
942 s_air_ip.u4_timestamp_low = -1;
943
944 s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t);
945 s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t);
946
947 status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op);
948 if (status != IV_SUCCESS) {
949 ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code);
950 return C2_CORRUPTED;
951 }
952 return C2_OK;
953 }
954
setMeParams()955 c2_status_t C2SoftAvcEnc::setMeParams() {
956 IV_STATUS_T status;
957 ive_ctl_set_me_params_ip_t s_me_params_ip;
958 ive_ctl_set_me_params_op_t s_me_params_op;
959
960 s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
961 s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS;
962
963 s_me_params_ip.u4_enable_fast_sad = mEnableFastSad;
964 s_me_params_ip.u4_enable_alt_ref = mEnableAltRef;
965
966 s_me_params_ip.u4_enable_hpel = mHalfPelEnable;
967 s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL;
968 s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED;
969 s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X;
970 s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y;
971
972 s_me_params_ip.u4_timestamp_high = -1;
973 s_me_params_ip.u4_timestamp_low = -1;
974
975 s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t);
976 s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t);
977
978 status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op);
979 if (status != IV_SUCCESS) {
980 ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code);
981 return C2_CORRUPTED;
982 }
983 return C2_OK;
984 }
985
setGopParams()986 c2_status_t C2SoftAvcEnc::setGopParams() {
987 IV_STATUS_T status;
988 ive_ctl_set_gop_params_ip_t s_gop_params_ip;
989 ive_ctl_set_gop_params_op_t s_gop_params_op;
990
991 s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
992 s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS;
993
994 s_gop_params_ip.u4_i_frm_interval = mIInterval;
995 s_gop_params_ip.u4_idr_frm_interval = mIDRInterval;
996
997 s_gop_params_ip.u4_timestamp_high = -1;
998 s_gop_params_ip.u4_timestamp_low = -1;
999
1000 s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t);
1001 s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t);
1002
1003 status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op);
1004 if (status != IV_SUCCESS) {
1005 ALOGE("Unable to set GOP params = 0x%x\n",
1006 s_gop_params_op.u4_error_code);
1007 return C2_CORRUPTED;
1008 }
1009 return C2_OK;
1010 }
1011
setProfileParams()1012 c2_status_t C2SoftAvcEnc::setProfileParams() {
1013 IntfImpl::Lock lock = mIntf->lock();
1014
1015 IV_STATUS_T status;
1016 ive_ctl_set_profile_params_ip_t s_profile_params_ip;
1017 ive_ctl_set_profile_params_op_t s_profile_params_op;
1018
1019 s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
1020 s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS;
1021
1022 s_profile_params_ip.e_profile = mIntf->getProfile_l();
1023 if (s_profile_params_ip.e_profile == IV_PROFILE_BASE) {
1024 s_profile_params_ip.u4_entropy_coding_mode = 0;
1025 } else {
1026 s_profile_params_ip.u4_entropy_coding_mode = 1;
1027 }
1028 s_profile_params_ip.u4_timestamp_high = -1;
1029 s_profile_params_ip.u4_timestamp_low = -1;
1030
1031 s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t);
1032 s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t);
1033 lock.unlock();
1034
1035 status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op);
1036 if (status != IV_SUCCESS) {
1037 ALOGE("Unable to set profile params = 0x%x\n",
1038 s_profile_params_op.u4_error_code);
1039 return C2_CORRUPTED;
1040 }
1041 return C2_OK;
1042 }
1043
setDeblockParams()1044 c2_status_t C2SoftAvcEnc::setDeblockParams() {
1045 IV_STATUS_T status;
1046 ive_ctl_set_deblock_params_ip_t s_deblock_params_ip;
1047 ive_ctl_set_deblock_params_op_t s_deblock_params_op;
1048
1049 s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
1050 s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS;
1051
1052 s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel;
1053
1054 s_deblock_params_ip.u4_timestamp_high = -1;
1055 s_deblock_params_ip.u4_timestamp_low = -1;
1056
1057 s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t);
1058 s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t);
1059
1060 status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op);
1061 if (status != IV_SUCCESS) {
1062 ALOGE("Unable to enable/disable deblock params = 0x%x\n",
1063 s_deblock_params_op.u4_error_code);
1064 return C2_CORRUPTED;
1065 }
1066 return C2_OK;
1067 }
1068
logVersion()1069 void C2SoftAvcEnc::logVersion() {
1070 ive_ctl_getversioninfo_ip_t s_ctl_ip;
1071 ive_ctl_getversioninfo_op_t s_ctl_op;
1072 UWORD8 au1_buf[512];
1073 IV_STATUS_T status;
1074
1075 s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL;
1076 s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION;
1077 s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t);
1078 s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t);
1079 s_ctl_ip.pu1_version = au1_buf;
1080 s_ctl_ip.u4_version_bufsize = sizeof(au1_buf);
1081
1082 status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op);
1083
1084 if (status != IV_SUCCESS) {
1085 ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code);
1086 } else {
1087 ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version);
1088 }
1089 return;
1090 }
1091
setVuiParams()1092 c2_status_t C2SoftAvcEnc::setVuiParams()
1093 {
1094 ColorAspects sfAspects;
1095 if (!C2Mapper::map(mColorAspects->primaries, &sfAspects.mPrimaries)) {
1096 sfAspects.mPrimaries = android::ColorAspects::PrimariesUnspecified;
1097 }
1098 if (!C2Mapper::map(mColorAspects->range, &sfAspects.mRange)) {
1099 sfAspects.mRange = android::ColorAspects::RangeUnspecified;
1100 }
1101 if (!C2Mapper::map(mColorAspects->matrix, &sfAspects.mMatrixCoeffs)) {
1102 sfAspects.mMatrixCoeffs = android::ColorAspects::MatrixUnspecified;
1103 }
1104 if (!C2Mapper::map(mColorAspects->transfer, &sfAspects.mTransfer)) {
1105 sfAspects.mTransfer = android::ColorAspects::TransferUnspecified;
1106 }
1107 int32_t primaries, transfer, matrixCoeffs;
1108 bool range;
1109 ColorUtils::convertCodecColorAspectsToIsoAspects(sfAspects,
1110 &primaries,
1111 &transfer,
1112 &matrixCoeffs,
1113 &range);
1114 ih264e_vui_ip_t s_vui_params_ip {};
1115 ih264e_vui_op_t s_vui_params_op {};
1116
1117 s_vui_params_ip.e_cmd = IVE_CMD_VIDEO_CTL;
1118 s_vui_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_VUI_PARAMS;
1119
1120 s_vui_params_ip.u1_video_signal_type_present_flag = 1;
1121 s_vui_params_ip.u1_colour_description_present_flag = 1;
1122 s_vui_params_ip.u1_colour_primaries = primaries;
1123 s_vui_params_ip.u1_transfer_characteristics = transfer;
1124 s_vui_params_ip.u1_matrix_coefficients = matrixCoeffs;
1125 s_vui_params_ip.u1_video_full_range_flag = range;
1126
1127 s_vui_params_ip.u4_size = sizeof(ih264e_vui_ip_t);
1128 s_vui_params_op.u4_size = sizeof(ih264e_vui_op_t);
1129
1130 IV_STATUS_T status = ih264e_api_function(mCodecCtx, &s_vui_params_ip,
1131 &s_vui_params_op);
1132 if(status != IV_SUCCESS)
1133 {
1134 ALOGE("Unable to set vui params = 0x%x\n",
1135 s_vui_params_op.u4_error_code);
1136 return C2_CORRUPTED;
1137 }
1138 return C2_OK;
1139 }
1140
initEncoder()1141 c2_status_t C2SoftAvcEnc::initEncoder() {
1142 IV_STATUS_T status;
1143 WORD32 level;
1144
1145 CHECK(!mStarted);
1146
1147 c2_status_t errType = C2_OK;
1148
1149 std::shared_ptr<C2StreamGopTuning::output> gop;
1150 {
1151 IntfImpl::Lock lock = mIntf->lock();
1152 mSize = mIntf->getSize_l();
1153 mBitrate = mIntf->getBitrate_l();
1154 mFrameRate = mIntf->getFrameRate_l();
1155 mIntraRefresh = mIntf->getIntraRefresh_l();
1156 mAVCEncLevel = mIntf->getLevel_l();
1157 mIInterval = mIntf->getSyncFramePeriod_l();
1158 mIDRInterval = mIntf->getSyncFramePeriod_l();
1159 gop = mIntf->getGop_l();
1160 mColorAspects = mIntf->getCodedColorAspects_l();
1161 }
1162 if (gop && gop->flexCount() > 0) {
1163 uint32_t syncInterval = 1;
1164 uint32_t iInterval = 1;
1165 uint32_t maxBframes = 0;
1166 ParseGop(*gop, &syncInterval, &iInterval, &maxBframes);
1167 if (syncInterval > 0) {
1168 ALOGD("Updating IDR interval from GOP: old %u new %u", mIDRInterval, syncInterval);
1169 mIDRInterval = syncInterval;
1170 }
1171 if (iInterval > 0) {
1172 ALOGD("Updating I interval from GOP: old %u new %u", mIInterval, iInterval);
1173 mIInterval = iInterval;
1174 }
1175 if (mBframes != maxBframes) {
1176 ALOGD("Updating max B frames from GOP: old %u new %u", mBframes, maxBframes);
1177 mBframes = maxBframes;
1178 }
1179 }
1180 uint32_t width = mSize->width;
1181 uint32_t height = mSize->height;
1182
1183 mStride = width;
1184
1185 // Assume worst case output buffer size to be equal to number of bytes in input
1186 mOutBufferSize = std::max(width * height * 3 / 2, kMinOutBufferSize);
1187
1188 // TODO
1189 mIvVideoColorFormat = IV_YUV_420P;
1190
1191 ALOGD("Params width %d height %d level %d colorFormat %d bframes %d", width,
1192 height, mAVCEncLevel, mIvVideoColorFormat, mBframes);
1193
1194 /* Getting Number of MemRecords */
1195 {
1196 iv_num_mem_rec_ip_t s_num_mem_rec_ip;
1197 iv_num_mem_rec_op_t s_num_mem_rec_op;
1198
1199 s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t);
1200 s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t);
1201
1202 s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
1203
1204 status = ive_api_function(nullptr, &s_num_mem_rec_ip, &s_num_mem_rec_op);
1205
1206 if (status != IV_SUCCESS) {
1207 ALOGE("Get number of memory records failed = 0x%x\n",
1208 s_num_mem_rec_op.u4_error_code);
1209 return C2_CORRUPTED;
1210 }
1211
1212 mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
1213 }
1214
1215 /* Allocate array to hold memory records */
1216 if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) {
1217 ALOGE("requested memory size is too big.");
1218 return C2_CORRUPTED;
1219 }
1220 mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t));
1221 if (nullptr == mMemRecords) {
1222 ALOGE("Unable to allocate memory for hold memory records: Size %zu",
1223 mNumMemRecords * sizeof(iv_mem_rec_t));
1224 mSignalledError = true;
1225 return C2_CORRUPTED;
1226 }
1227
1228 {
1229 iv_mem_rec_t *ps_mem_rec;
1230 ps_mem_rec = mMemRecords;
1231 for (size_t i = 0; i < mNumMemRecords; i++) {
1232 ps_mem_rec->u4_size = sizeof(iv_mem_rec_t);
1233 ps_mem_rec->pv_base = nullptr;
1234 ps_mem_rec->u4_mem_size = 0;
1235 ps_mem_rec->u4_mem_alignment = 0;
1236 ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE;
1237
1238 ps_mem_rec++;
1239 }
1240 }
1241
1242 /* Getting MemRecords Attributes */
1243 {
1244 ih264e_fill_mem_rec_ip_t s_ih264e_mem_rec_ip = {};
1245 ih264e_fill_mem_rec_op_t s_ih264e_mem_rec_op = {};
1246 iv_fill_mem_rec_ip_t *ps_fill_mem_rec_ip = &s_ih264e_mem_rec_ip.s_ive_ip;
1247 iv_fill_mem_rec_op_t *ps_fill_mem_rec_op = &s_ih264e_mem_rec_op.s_ive_op;
1248
1249 ps_fill_mem_rec_ip->u4_size = sizeof(ih264e_fill_mem_rec_ip_t);
1250 ps_fill_mem_rec_op->u4_size = sizeof(ih264e_fill_mem_rec_op_t);
1251
1252 ps_fill_mem_rec_ip->e_cmd = IV_CMD_FILL_NUM_MEM_REC;
1253 ps_fill_mem_rec_ip->ps_mem_rec = mMemRecords;
1254 ps_fill_mem_rec_ip->u4_num_mem_rec = mNumMemRecords;
1255 ps_fill_mem_rec_ip->u4_max_wd = width;
1256 ps_fill_mem_rec_ip->u4_max_ht = height;
1257 ps_fill_mem_rec_ip->u4_max_level = mAVCEncLevel;
1258 ps_fill_mem_rec_ip->e_color_format = DEFAULT_INP_COLOR_FORMAT;
1259 ps_fill_mem_rec_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
1260 ps_fill_mem_rec_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
1261 ps_fill_mem_rec_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
1262 ps_fill_mem_rec_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
1263
1264 status = ive_api_function(nullptr, &s_ih264e_mem_rec_ip, &s_ih264e_mem_rec_op);
1265
1266 if (status != IV_SUCCESS) {
1267 ALOGE("Fill memory records failed = 0x%x\n",
1268 ps_fill_mem_rec_op->u4_error_code);
1269 return C2_CORRUPTED;
1270 }
1271 }
1272
1273 /* Allocating Memory for Mem Records */
1274 {
1275 WORD32 total_size;
1276 iv_mem_rec_t *ps_mem_rec;
1277 total_size = 0;
1278 ps_mem_rec = mMemRecords;
1279
1280 for (size_t i = 0; i < mNumMemRecords; i++) {
1281 ps_mem_rec->pv_base = ive_aligned_malloc(
1282 ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
1283 if (ps_mem_rec->pv_base == nullptr) {
1284 ALOGE("Allocation failure for mem record id %zu size %u\n", i,
1285 ps_mem_rec->u4_mem_size);
1286 return C2_CORRUPTED;
1287
1288 }
1289 total_size += ps_mem_rec->u4_mem_size;
1290
1291 ps_mem_rec++;
1292 }
1293 }
1294
1295 /* Codec Instance Creation */
1296 {
1297 ih264e_init_ip_t s_enc_ip = {};
1298 ih264e_init_op_t s_enc_op = {};
1299
1300 ive_init_ip_t *ps_init_ip = &s_enc_ip.s_ive_ip;
1301 ive_init_op_t *ps_init_op = &s_enc_op.s_ive_op;
1302
1303 mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
1304 mCodecCtx->u4_size = sizeof(iv_obj_t);
1305 mCodecCtx->pv_fxns = (void *)ive_api_function;
1306
1307 ps_init_ip->u4_size = sizeof(ih264e_init_ip_t);
1308 ps_init_op->u4_size = sizeof(ih264e_init_op_t);
1309
1310 ps_init_ip->e_cmd = IV_CMD_INIT;
1311 ps_init_ip->u4_num_mem_rec = mNumMemRecords;
1312 ps_init_ip->ps_mem_rec = mMemRecords;
1313 ps_init_ip->u4_max_wd = width;
1314 ps_init_ip->u4_max_ht = height;
1315 ps_init_ip->u4_max_ref_cnt = DEFAULT_MAX_REF_FRM;
1316 ps_init_ip->u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM;
1317 ps_init_ip->u4_max_level = mAVCEncLevel;
1318 ps_init_ip->e_inp_color_fmt = mIvVideoColorFormat;
1319
1320 if (mReconEnable || mPSNREnable) {
1321 ps_init_ip->u4_enable_recon = 1;
1322 } else {
1323 ps_init_ip->u4_enable_recon = 0;
1324 }
1325 ps_init_ip->e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT;
1326 ps_init_ip->e_rc_mode = DEFAULT_RC_MODE;
1327 ps_init_ip->u4_max_framerate = DEFAULT_MAX_FRAMERATE;
1328 ps_init_ip->u4_max_bitrate = DEFAULT_MAX_BITRATE;
1329 ps_init_ip->u4_num_bframes = mBframes;
1330 ps_init_ip->e_content_type = IV_PROGRESSIVE;
1331 ps_init_ip->u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X;
1332 ps_init_ip->u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y;
1333 ps_init_ip->e_slice_mode = mSliceMode;
1334 ps_init_ip->u4_slice_param = mSliceParam;
1335 ps_init_ip->e_arch = mArch;
1336 ps_init_ip->e_soc = DEFAULT_SOC;
1337
1338 status = ive_api_function(mCodecCtx, &s_enc_ip, &s_enc_op);
1339
1340 if (status != IV_SUCCESS) {
1341 ALOGE("Init encoder failed = 0x%x\n", ps_init_op->u4_error_code);
1342 return C2_CORRUPTED;
1343 }
1344 }
1345
1346 /* Get Codec Version */
1347 logVersion();
1348
1349 /* set processor details */
1350 setNumCores();
1351
1352 /* Video control Set Frame dimensions */
1353 setDimensions();
1354
1355 /* Video control Set Frame rates */
1356 setFrameRate();
1357
1358 /* Video control Set IPE Params */
1359 setIpeParams();
1360
1361 /* Video control Set Bitrate */
1362 setBitRate();
1363
1364 /* Video control Set QP */
1365 setQp();
1366
1367 /* Video control Set AIR params */
1368 setAirParams();
1369
1370 /* Video control Set VBV params */
1371 setVbvParams();
1372
1373 /* Video control Set Motion estimation params */
1374 setMeParams();
1375
1376 /* Video control Set GOP params */
1377 setGopParams();
1378
1379 /* Video control Set Deblock params */
1380 setDeblockParams();
1381
1382 /* Video control Set Profile params */
1383 setProfileParams();
1384
1385 /* Video control Set VUI params */
1386 setVuiParams();
1387
1388 /* Video control Set in Encode header mode */
1389 setEncMode(IVE_ENC_MODE_HEADER);
1390
1391 ALOGV("init_codec successfull");
1392
1393 mSpsPpsHeaderReceived = false;
1394 mStarted = true;
1395
1396 return C2_OK;
1397 }
1398
releaseEncoder()1399 c2_status_t C2SoftAvcEnc::releaseEncoder() {
1400 IV_STATUS_T status = IV_SUCCESS;
1401 iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip;
1402 iv_retrieve_mem_rec_op_t s_retrieve_mem_op;
1403 iv_mem_rec_t *ps_mem_rec;
1404
1405 if (!mStarted) {
1406 return C2_OK;
1407 }
1408
1409 s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t);
1410 s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t);
1411 s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC;
1412 s_retrieve_mem_ip.ps_mem_rec = mMemRecords;
1413
1414 status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op);
1415
1416 if (status != IV_SUCCESS) {
1417 ALOGE("Unable to retrieve memory records = 0x%x\n",
1418 s_retrieve_mem_op.u4_error_code);
1419 return C2_CORRUPTED;
1420 }
1421
1422 /* Free memory records */
1423 ps_mem_rec = mMemRecords;
1424 for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) {
1425 if (ps_mem_rec) ive_aligned_free(ps_mem_rec->pv_base);
1426 else {
1427 ALOGE("memory record is null.");
1428 return C2_CORRUPTED;
1429 }
1430 ps_mem_rec++;
1431 }
1432
1433 if (mMemRecords) free(mMemRecords);
1434
1435 // clear other pointers into the space being free()d
1436 mCodecCtx = nullptr;
1437
1438 mStarted = false;
1439
1440 return C2_OK;
1441 }
1442
setEncodeArgs(ive_video_encode_ip_t * ps_encode_ip,ive_video_encode_op_t * ps_encode_op,const C2GraphicView * const input,uint8_t * base,uint32_t capacity,uint64_t workIndex)1443 c2_status_t C2SoftAvcEnc::setEncodeArgs(
1444 ive_video_encode_ip_t *ps_encode_ip,
1445 ive_video_encode_op_t *ps_encode_op,
1446 const C2GraphicView *const input,
1447 uint8_t *base,
1448 uint32_t capacity,
1449 uint64_t workIndex) {
1450 iv_raw_buf_t *ps_inp_raw_buf;
1451 memset(ps_encode_ip, 0, sizeof(*ps_encode_ip));
1452 memset(ps_encode_op, 0, sizeof(*ps_encode_op));
1453
1454 ps_inp_raw_buf = &ps_encode_ip->s_inp_buf;
1455 ps_encode_ip->s_out_buf.pv_buf = base;
1456 ps_encode_ip->s_out_buf.u4_bytes = 0;
1457 ps_encode_ip->s_out_buf.u4_bufsize = capacity;
1458 ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t);
1459 ps_encode_op->u4_size = sizeof(ive_video_encode_op_t);
1460
1461 ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE;
1462 ps_encode_ip->pv_bufs = nullptr;
1463 ps_encode_ip->pv_mb_info = nullptr;
1464 ps_encode_ip->pv_pic_info = nullptr;
1465 ps_encode_ip->u4_mb_info_type = 0;
1466 ps_encode_ip->u4_pic_info_type = 0;
1467 ps_encode_ip->u4_is_last = 0;
1468 ps_encode_ip->u4_timestamp_high = workIndex >> 32;
1469 ps_encode_ip->u4_timestamp_low = workIndex & 0xFFFFFFFF;
1470 ps_encode_op->s_out_buf.pv_buf = nullptr;
1471
1472 /* Initialize color formats */
1473 memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t));
1474 ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t);
1475 ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat;
1476 if (input == nullptr) {
1477 if (mSawInputEOS) {
1478 ps_encode_ip->u4_is_last = 1;
1479 }
1480 return C2_OK;
1481 }
1482
1483 if (input->width() < mSize->width ||
1484 input->height() < mSize->height) {
1485 /* Expect width height to be configured */
1486 ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(),
1487 mSize->width, input->height(), mSize->height);
1488 return C2_BAD_VALUE;
1489 }
1490 ALOGV("width = %d, height = %d", input->width(), input->height());
1491 const C2PlanarLayout &layout = input->layout();
1492 uint8_t *yPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_Y]);
1493 uint8_t *uPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_U]);
1494 uint8_t *vPlane = const_cast<uint8_t *>(input->data()[C2PlanarLayout::PLANE_V]);
1495 int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
1496 int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
1497 int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
1498
1499 uint32_t width = mSize->width;
1500 uint32_t height = mSize->height;
1501 // width and height are always even (as block size is 16x16)
1502 CHECK_EQ((width & 1u), 0u);
1503 CHECK_EQ((height & 1u), 0u);
1504 size_t yPlaneSize = width * height;
1505
1506 switch (layout.type) {
1507 case C2PlanarLayout::TYPE_RGB:
1508 [[fallthrough]];
1509 case C2PlanarLayout::TYPE_RGBA: {
1510 ALOGV("yPlaneSize = %zu", yPlaneSize);
1511 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
1512 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
1513 yPlane = conversionBuffer.data();
1514 uPlane = yPlane + yPlaneSize;
1515 vPlane = uPlane + yPlaneSize / 4;
1516 yStride = width;
1517 uStride = vStride = yStride / 2;
1518 ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input);
1519 break;
1520 }
1521 case C2PlanarLayout::TYPE_YUV: {
1522 if (!IsYUV420(*input)) {
1523 ALOGE("input is not YUV420");
1524 return C2_BAD_VALUE;
1525 }
1526
1527 if (layout.planes[layout.PLANE_Y].colInc == 1
1528 && layout.planes[layout.PLANE_U].colInc == 1
1529 && layout.planes[layout.PLANE_V].colInc == 1
1530 && uStride == vStride
1531 && yStride == 2 * vStride) {
1532 // I420 compatible - already set up above
1533 break;
1534 }
1535
1536 // copy to I420
1537 yStride = width;
1538 uStride = vStride = yStride / 2;
1539 MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2);
1540 mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer);
1541 MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height);
1542 status_t err = ImageCopy(conversionBuffer.data(), &img, *input);
1543 if (err != OK) {
1544 ALOGE("Buffer conversion failed: %d", err);
1545 return C2_BAD_VALUE;
1546 }
1547 yPlane = conversionBuffer.data();
1548 uPlane = yPlane + yPlaneSize;
1549 vPlane = uPlane + yPlaneSize / 4;
1550 break;
1551
1552 }
1553
1554 case C2PlanarLayout::TYPE_YUVA:
1555 ALOGE("YUVA plane type is not supported");
1556 return C2_BAD_VALUE;
1557
1558 default:
1559 ALOGE("Unrecognized plane type: %d", layout.type);
1560 return C2_BAD_VALUE;
1561 }
1562
1563 switch (mIvVideoColorFormat) {
1564 case IV_YUV_420P:
1565 {
1566 // input buffer is supposed to be const but Ittiam API wants bare pointer.
1567 ps_inp_raw_buf->apv_bufs[0] = yPlane;
1568 ps_inp_raw_buf->apv_bufs[1] = uPlane;
1569 ps_inp_raw_buf->apv_bufs[2] = vPlane;
1570
1571 ps_inp_raw_buf->au4_wd[0] = mSize->width;
1572 ps_inp_raw_buf->au4_wd[1] = mSize->width / 2;
1573 ps_inp_raw_buf->au4_wd[2] = mSize->width / 2;
1574
1575 ps_inp_raw_buf->au4_ht[0] = mSize->height;
1576 ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
1577 ps_inp_raw_buf->au4_ht[2] = mSize->height / 2;
1578
1579 ps_inp_raw_buf->au4_strd[0] = yStride;
1580 ps_inp_raw_buf->au4_strd[1] = uStride;
1581 ps_inp_raw_buf->au4_strd[2] = vStride;
1582 break;
1583 }
1584
1585 case IV_YUV_422ILE:
1586 {
1587 // TODO
1588 // ps_inp_raw_buf->apv_bufs[0] = pu1_buf;
1589 // ps_inp_raw_buf->au4_wd[0] = mWidth * 2;
1590 // ps_inp_raw_buf->au4_ht[0] = mHeight;
1591 // ps_inp_raw_buf->au4_strd[0] = mStride * 2;
1592 break;
1593 }
1594
1595 case IV_YUV_420SP_UV:
1596 case IV_YUV_420SP_VU:
1597 default:
1598 {
1599 ps_inp_raw_buf->apv_bufs[0] = yPlane;
1600 ps_inp_raw_buf->apv_bufs[1] = uPlane;
1601
1602 ps_inp_raw_buf->au4_wd[0] = mSize->width;
1603 ps_inp_raw_buf->au4_wd[1] = mSize->width;
1604
1605 ps_inp_raw_buf->au4_ht[0] = mSize->height;
1606 ps_inp_raw_buf->au4_ht[1] = mSize->height / 2;
1607
1608 ps_inp_raw_buf->au4_strd[0] = yStride;
1609 ps_inp_raw_buf->au4_strd[1] = uStride;
1610 break;
1611 }
1612 }
1613 return C2_OK;
1614 }
1615
finishWork(uint64_t workIndex,const std::unique_ptr<C2Work> & work,ive_video_encode_op_t * ps_encode_op)1616 void C2SoftAvcEnc::finishWork(uint64_t workIndex, const std::unique_ptr<C2Work> &work,
1617 ive_video_encode_op_t *ps_encode_op) {
1618 std::shared_ptr<C2Buffer> buffer =
1619 createLinearBuffer(mOutBlock, 0, ps_encode_op->s_out_buf.u4_bytes);
1620 if (IV_IDR_FRAME == ps_encode_op->u4_encoded_frame_type) {
1621 ALOGV("IDR frame produced");
1622 buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
1623 0u /* stream id */, C2Config::SYNC_FRAME));
1624 }
1625 mOutBlock = nullptr;
1626
1627 auto fillWork = [buffer](const std::unique_ptr<C2Work> &work) {
1628 work->worklets.front()->output.flags = (C2FrameData::flags_t)0;
1629 work->worklets.front()->output.buffers.clear();
1630 work->worklets.front()->output.buffers.push_back(buffer);
1631 work->worklets.front()->output.ordinal = work->input.ordinal;
1632 work->workletsProcessed = 1u;
1633 };
1634 if (work && c2_cntr64_t(workIndex) == work->input.ordinal.frameIndex) {
1635 fillWork(work);
1636 if (mSawInputEOS) {
1637 work->worklets.front()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
1638 }
1639 } else {
1640 finish(workIndex, fillWork);
1641 }
1642 }
1643
process(const std::unique_ptr<C2Work> & work,const std::shared_ptr<C2BlockPool> & pool)1644 void C2SoftAvcEnc::process(
1645 const std::unique_ptr<C2Work> &work,
1646 const std::shared_ptr<C2BlockPool> &pool) {
1647 // Initialize output work
1648 work->result = C2_OK;
1649 work->workletsProcessed = 0u;
1650 work->worklets.front()->output.flags = work->input.flags;
1651
1652 IV_STATUS_T status;
1653 WORD32 timeDelay = 0;
1654 WORD32 timeTaken = 0;
1655 uint64_t workIndex = work->input.ordinal.frameIndex.peekull();
1656
1657 // Initialize encoder if not already initialized
1658 if (mCodecCtx == nullptr) {
1659 if (C2_OK != initEncoder()) {
1660 ALOGE("Failed to initialize encoder");
1661 mSignalledError = true;
1662 work->result = C2_CORRUPTED;
1663 work->workletsProcessed = 1u;
1664 return;
1665 }
1666 }
1667 if (mSignalledError) {
1668 return;
1669 }
1670 // while (!mSawOutputEOS && !outQueue.empty()) {
1671 c2_status_t error;
1672 ih264e_video_encode_ip_t s_video_encode_ip = {};
1673 ih264e_video_encode_op_t s_video_encode_op = {};
1674 ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
1675 ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
1676 memset(ps_encode_op, 0, sizeof(*ps_encode_op));
1677
1678 if (!mSpsPpsHeaderReceived) {
1679 constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE;
1680 uint8_t header[kHeaderLength];
1681 error = setEncodeArgs(
1682 ps_encode_ip, ps_encode_op, nullptr, header, kHeaderLength, workIndex);
1683 if (error != C2_OK) {
1684 ALOGE("setEncodeArgs failed: %d", error);
1685 mSignalledError = true;
1686 work->result = C2_CORRUPTED;
1687 work->workletsProcessed = 1u;
1688 return;
1689 }
1690 status = ive_api_function(mCodecCtx, ps_encode_ip, ps_encode_op);
1691
1692 if (IV_SUCCESS != status) {
1693 ALOGE("Encode header failed = 0x%x\n",
1694 ps_encode_op->u4_error_code);
1695 work->workletsProcessed = 1u;
1696 return;
1697 } else {
1698 ALOGV("Bytes Generated in header %d\n",
1699 ps_encode_op->s_out_buf.u4_bytes);
1700 }
1701
1702 mSpsPpsHeaderReceived = true;
1703
1704 std::unique_ptr<C2StreamInitDataInfo::output> csd =
1705 C2StreamInitDataInfo::output::AllocUnique(ps_encode_op->s_out_buf.u4_bytes, 0u);
1706 if (!csd) {
1707 ALOGE("CSD allocation failed");
1708 mSignalledError = true;
1709 work->result = C2_NO_MEMORY;
1710 work->workletsProcessed = 1u;
1711 return;
1712 }
1713 memcpy(csd->m.value, header, ps_encode_op->s_out_buf.u4_bytes);
1714 work->worklets.front()->output.configUpdate.push_back(std::move(csd));
1715
1716 DUMP_TO_FILE(
1717 mOutFile, csd->m.value, csd->flexCount());
1718 if (work->input.buffers.empty()) {
1719 work->workletsProcessed = 1u;
1720 return;
1721 }
1722 }
1723
1724 // handle dynamic config parameters
1725 {
1726 IntfImpl::Lock lock = mIntf->lock();
1727 std::shared_ptr<C2StreamIntraRefreshTuning::output> intraRefresh = mIntf->getIntraRefresh_l();
1728 std::shared_ptr<C2StreamBitrateInfo::output> bitrate = mIntf->getBitrate_l();
1729 std::shared_ptr<C2StreamRequestSyncFrameTuning::output> requestSync = mIntf->getRequestSync_l();
1730 lock.unlock();
1731
1732 if (bitrate != mBitrate) {
1733 mBitrate = bitrate;
1734 setBitRate();
1735 }
1736
1737 if (intraRefresh != mIntraRefresh) {
1738 mIntraRefresh = intraRefresh;
1739 setAirParams();
1740 }
1741
1742 if (requestSync != mRequestSync) {
1743 // we can handle IDR immediately
1744 if (requestSync->value) {
1745 // unset request
1746 C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE);
1747 std::vector<std::unique_ptr<C2SettingResult>> failures;
1748 mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures);
1749 ALOGV("Got sync request");
1750 setFrameType(IV_IDR_FRAME);
1751 }
1752 mRequestSync = requestSync;
1753 }
1754 }
1755
1756 if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
1757 mSawInputEOS = true;
1758 }
1759
1760 /* In normal mode, store inputBufferInfo and this will be returned
1761 when encoder consumes this input */
1762 // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) {
1763 // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) {
1764 // if (NULL == mInputBufferInfo[i]) {
1765 // mInputBufferInfo[i] = inputBufferInfo;
1766 // break;
1767 // }
1768 // }
1769 // }
1770 std::shared_ptr<const C2GraphicView> view;
1771 std::shared_ptr<C2Buffer> inputBuffer;
1772 if (!work->input.buffers.empty()) {
1773 inputBuffer = work->input.buffers[0];
1774 view = std::make_shared<const C2GraphicView>(
1775 inputBuffer->data().graphicBlocks().front().map().get());
1776 if (view->error() != C2_OK) {
1777 ALOGE("graphic view map err = %d", view->error());
1778 work->workletsProcessed = 1u;
1779 return;
1780 }
1781 }
1782
1783 do {
1784 if (mSawInputEOS && work->input.buffers.empty()) break;
1785 if (!mOutBlock) {
1786 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
1787 C2MemoryUsage::CPU_WRITE};
1788 // TODO: error handling, proper usage, etc.
1789 c2_status_t err =
1790 pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
1791 if (err != C2_OK) {
1792 ALOGE("fetch linear block err = %d", err);
1793 work->result = err;
1794 work->workletsProcessed = 1u;
1795 return;
1796 }
1797 }
1798 C2WriteView wView = mOutBlock->map().get();
1799 if (wView.error() != C2_OK) {
1800 ALOGE("write view map err = %d", wView.error());
1801 work->result = wView.error();
1802 work->workletsProcessed = 1u;
1803 return;
1804 }
1805
1806 error = setEncodeArgs(
1807 ps_encode_ip, ps_encode_op, view.get(), wView.base(), wView.capacity(), workIndex);
1808 if (error != C2_OK) {
1809 ALOGE("setEncodeArgs failed : %d", error);
1810 mSignalledError = true;
1811 work->result = error;
1812 work->workletsProcessed = 1u;
1813 return;
1814 }
1815
1816 // DUMP_TO_FILE(
1817 // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0],
1818 // (mHeight * mStride * 3 / 2));
1819
1820 GETTIME(&mTimeStart, nullptr);
1821 /* Compute time elapsed between end of previous decode()
1822 * to start of current decode() */
1823 TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
1824 status = ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
1825
1826 if (IV_SUCCESS != status) {
1827 if ((ps_encode_op->u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) {
1828 // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size?
1829 mOutBufferSize *= 2;
1830 mOutBlock.reset();
1831 continue;
1832 }
1833 ALOGE("Encode Frame failed = 0x%x\n",
1834 ps_encode_op->u4_error_code);
1835 mSignalledError = true;
1836 work->result = C2_CORRUPTED;
1837 work->workletsProcessed = 1u;
1838 return;
1839 }
1840 } while (IV_SUCCESS != status);
1841
1842 // Hold input buffer reference
1843 if (inputBuffer) {
1844 mBuffers[ps_encode_ip->s_inp_buf.apv_bufs[0]] = inputBuffer;
1845 }
1846
1847 GETTIME(&mTimeEnd, nullptr);
1848 /* Compute time taken for decode() */
1849 TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
1850
1851 ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
1852 ps_encode_op->s_out_buf.u4_bytes);
1853
1854 void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
1855 /* If encoder frees up an input buffer, mark it as free */
1856 if (freed != nullptr) {
1857 if (mBuffers.count(freed) == 0u) {
1858 ALOGD("buffer not tracked");
1859 } else {
1860 // Release input buffer reference
1861 mBuffers.erase(freed);
1862 mConversionBuffersInUse.erase(freed);
1863 }
1864 }
1865
1866 if (ps_encode_op->output_present) {
1867 if (!ps_encode_op->s_out_buf.u4_bytes) {
1868 ALOGE("Error: Output present but bytes generated is zero");
1869 mSignalledError = true;
1870 work->result = C2_CORRUPTED;
1871 work->workletsProcessed = 1u;
1872 return;
1873 }
1874 uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
1875 ps_encode_op->u4_timestamp_low;
1876 finishWork(workId, work, ps_encode_op);
1877 }
1878 if (mSawInputEOS) {
1879 drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work);
1880 }
1881 }
1882
drainInternal(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool,const std::unique_ptr<C2Work> & work)1883 c2_status_t C2SoftAvcEnc::drainInternal(
1884 uint32_t drainMode,
1885 const std::shared_ptr<C2BlockPool> &pool,
1886 const std::unique_ptr<C2Work> &work) {
1887
1888 if (drainMode == NO_DRAIN) {
1889 ALOGW("drain with NO_DRAIN: no-op");
1890 return C2_OK;
1891 }
1892 if (drainMode == DRAIN_CHAIN) {
1893 ALOGW("DRAIN_CHAIN not supported");
1894 return C2_OMITTED;
1895 }
1896
1897 while (true) {
1898 if (!mOutBlock) {
1899 C2MemoryUsage usage = {C2MemoryUsage::CPU_READ,
1900 C2MemoryUsage::CPU_WRITE};
1901 // TODO: error handling, proper usage, etc.
1902 c2_status_t err =
1903 pool->fetchLinearBlock(mOutBufferSize, usage, &mOutBlock);
1904 if (err != C2_OK) {
1905 ALOGE("fetch linear block err = %d", err);
1906 work->result = err;
1907 work->workletsProcessed = 1u;
1908 return err;
1909 }
1910 }
1911 C2WriteView wView = mOutBlock->map().get();
1912 if (wView.error()) {
1913 ALOGE("graphic view map failed %d", wView.error());
1914 return C2_CORRUPTED;
1915 }
1916 ih264e_video_encode_ip_t s_video_encode_ip = {};
1917 ih264e_video_encode_op_t s_video_encode_op = {};
1918 ive_video_encode_ip_t *ps_encode_ip = &s_video_encode_ip.s_ive_ip;
1919 ive_video_encode_op_t *ps_encode_op = &s_video_encode_op.s_ive_op;
1920 if (C2_OK != setEncodeArgs(ps_encode_ip, ps_encode_op, nullptr,
1921 wView.base(), wView.capacity(), 0)) {
1922 ALOGE("setEncodeArgs failed for drainInternal");
1923 mSignalledError = true;
1924 work->result = C2_CORRUPTED;
1925 work->workletsProcessed = 1u;
1926 return C2_CORRUPTED;
1927 }
1928 (void)ive_api_function(mCodecCtx, &s_video_encode_ip, &s_video_encode_op);
1929
1930 void *freed = ps_encode_op->s_inp_buf.apv_bufs[0];
1931 /* If encoder frees up an input buffer, mark it as free */
1932 if (freed != nullptr) {
1933 if (mBuffers.count(freed) == 0u) {
1934 ALOGD("buffer not tracked");
1935 } else {
1936 // Release input buffer reference
1937 mBuffers.erase(freed);
1938 mConversionBuffersInUse.erase(freed);
1939 }
1940 }
1941
1942 if (ps_encode_op->output_present) {
1943 uint64_t workId = ((uint64_t)ps_encode_op->u4_timestamp_high << 32) |
1944 ps_encode_op->u4_timestamp_low;
1945 finishWork(workId, work, ps_encode_op);
1946 } else {
1947 if (work->workletsProcessed != 1u) {
1948 work->worklets.front()->output.flags = work->input.flags;
1949 work->worklets.front()->output.ordinal = work->input.ordinal;
1950 work->worklets.front()->output.buffers.clear();
1951 work->workletsProcessed = 1u;
1952 }
1953 break;
1954 }
1955 }
1956
1957 return C2_OK;
1958 }
1959
drain(uint32_t drainMode,const std::shared_ptr<C2BlockPool> & pool)1960 c2_status_t C2SoftAvcEnc::drain(
1961 uint32_t drainMode,
1962 const std::shared_ptr<C2BlockPool> &pool) {
1963 return drainInternal(drainMode, pool, nullptr);
1964 }
1965
1966 class C2SoftAvcEncFactory : public C2ComponentFactory {
1967 public:
C2SoftAvcEncFactory()1968 C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
1969 GetCodec2PlatformComponentStore()->getParamReflector())) {
1970 }
1971
createComponent(c2_node_id_t id,std::shared_ptr<C2Component> * const component,std::function<void (C2Component *)> deleter)1972 virtual c2_status_t createComponent(
1973 c2_node_id_t id,
1974 std::shared_ptr<C2Component>* const component,
1975 std::function<void(C2Component*)> deleter) override {
1976 *component = std::shared_ptr<C2Component>(
1977 new C2SoftAvcEnc(COMPONENT_NAME,
1978 id,
1979 std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
1980 deleter);
1981 return C2_OK;
1982 }
1983
createInterface(c2_node_id_t id,std::shared_ptr<C2ComponentInterface> * const interface,std::function<void (C2ComponentInterface *)> deleter)1984 virtual c2_status_t createInterface(
1985 c2_node_id_t id,
1986 std::shared_ptr<C2ComponentInterface>* const interface,
1987 std::function<void(C2ComponentInterface*)> deleter) override {
1988 *interface = std::shared_ptr<C2ComponentInterface>(
1989 new SimpleInterface<C2SoftAvcEnc::IntfImpl>(
1990 COMPONENT_NAME, id, std::make_shared<C2SoftAvcEnc::IntfImpl>(mHelper)),
1991 deleter);
1992 return C2_OK;
1993 }
1994
1995 virtual ~C2SoftAvcEncFactory() override = default;
1996
1997 private:
1998 std::shared_ptr<C2ReflectorHelper> mHelper;
1999 };
2000
2001 } // namespace android
2002
2003 __attribute__((cfi_canonical_jump_table))
CreateCodec2Factory()2004 extern "C" ::C2ComponentFactory* CreateCodec2Factory() {
2005 ALOGV("in %s", __func__);
2006 return new ::android::C2SoftAvcEncFactory();
2007 }
2008
2009 __attribute__((cfi_canonical_jump_table))
DestroyCodec2Factory(::C2ComponentFactory * factory)2010 extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) {
2011 ALOGV("in %s", __func__);
2012 delete factory;
2013 }
2014