1 /*
2 * Copyright 2012, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #include "hidl/HidlSupport.h"
19 #define LOG_TAG "MediaCodec"
20 #include <utils/Log.h>
21
22 #include <set>
23 #include <stdlib.h>
24
25 #include <inttypes.h>
26 #include <stdlib.h>
27 #include <dlfcn.h>
28
29 #include <C2Buffer.h>
30
31 #include "include/SoftwareRenderer.h"
32 #include "PlaybackDurationAccumulator.h"
33
34 #include <android/binder_manager.h>
35 #include <android/content/pm/IPackageManagerNative.h>
36 #include <android/hardware/cas/native/1.0/IDescrambler.h>
37 #include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
38
39 #include <aidl/android/media/BnResourceManagerClient.h>
40 #include <aidl/android/media/IResourceManagerService.h>
41 #include <android/binder_ibinder.h>
42 #include <android/binder_manager.h>
43 #include <android/dlext.h>
44 #include <binder/IMemory.h>
45 #include <binder/IServiceManager.h>
46 #include <binder/MemoryDealer.h>
47 #include <cutils/properties.h>
48 #include <gui/BufferQueue.h>
49 #include <gui/Surface.h>
50 #include <hidlmemory/FrameworkUtils.h>
51 #include <mediadrm/ICrypto.h>
52 #include <media/IOMX.h>
53 #include <media/MediaCodecBuffer.h>
54 #include <media/MediaCodecInfo.h>
55 #include <media/MediaMetricsItem.h>
56 #include <media/MediaResource.h>
57 #include <media/NdkMediaErrorPriv.h>
58 #include <media/NdkMediaFormat.h>
59 #include <media/NdkMediaFormatPriv.h>
60 #include <media/formatshaper/FormatShaper.h>
61 #include <media/stagefright/foundation/ABuffer.h>
62 #include <media/stagefright/foundation/ADebug.h>
63 #include <media/stagefright/foundation/AMessage.h>
64 #include <media/stagefright/foundation/AString.h>
65 #include <media/stagefright/foundation/AUtils.h>
66 #include <media/stagefright/foundation/avc_utils.h>
67 #include <media/stagefright/foundation/hexdump.h>
68 #include <media/stagefright/ACodec.h>
69 #include <media/stagefright/BatteryChecker.h>
70 #include <media/stagefright/BufferProducerWrapper.h>
71 #include <media/stagefright/CCodec.h>
72 #include <media/stagefright/MediaCodec.h>
73 #include <media/stagefright/MediaCodecConstants.h>
74 #include <media/stagefright/MediaCodecList.h>
75 #include <media/stagefright/MediaCodecConstants.h>
76 #include <media/stagefright/MediaDefs.h>
77 #include <media/stagefright/MediaErrors.h>
78 #include <media/stagefright/MediaFilter.h>
79 #include <media/stagefright/OMXClient.h>
80 #include <media/stagefright/PersistentSurface.h>
81 #include <media/stagefright/SurfaceUtils.h>
82 #include <nativeloader/dlext_namespaces.h>
83 #include <private/android_filesystem_config.h>
84 #include <utils/Singleton.h>
85
86 namespace android {
87
88 using Status = ::ndk::ScopedAStatus;
89 using aidl::android::media::BnResourceManagerClient;
90 using aidl::android::media::IResourceManagerClient;
91 using aidl::android::media::IResourceManagerService;
92
93 // key for media statistics
94 static const char *kCodecKeyName = "codec";
95 // attrs for media statistics
96 // NB: these are matched with public Java API constants defined
97 // in frameworks/base/media/java/android/media/MediaCodec.java
98 // These must be kept synchronized with the constants there.
99 static const char *kCodecLogSessionId = "android.media.mediacodec.log-session-id";
100 static const char *kCodecCodec = "android.media.mediacodec.codec"; /* e.g. OMX.google.aac.decoder */
101 static const char *kCodecMime = "android.media.mediacodec.mime"; /* e.g. audio/mime */
102 static const char *kCodecMode = "android.media.mediacodec.mode"; /* audio, video */
103 static const char *kCodecModeVideo = "video"; /* values returned for kCodecMode */
104 static const char *kCodecModeAudio = "audio";
105 static const char *kCodecEncoder = "android.media.mediacodec.encoder"; /* 0,1 */
106 static const char *kCodecSecure = "android.media.mediacodec.secure"; /* 0, 1 */
107 static const char *kCodecWidth = "android.media.mediacodec.width"; /* 0..n */
108 static const char *kCodecHeight = "android.media.mediacodec.height"; /* 0..n */
109 static const char *kCodecRotation = "android.media.mediacodec.rotation-degrees"; /* 0/90/180/270 */
110 static const char *kCodecColorFormat = "android.media.mediacodec.color-format";
111 static const char *kCodecFrameRate = "android.media.mediacodec.frame-rate";
112 static const char *kCodecCaptureRate = "android.media.mediacodec.capture-rate";
113 static const char *kCodecOperatingRate = "android.media.mediacodec.operating-rate";
114 static const char *kCodecPriority = "android.media.mediacodec.priority";
115
116 // Min/Max QP before shaping
117 static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
118 static const char *kCodecOriginalVideoQPIMax = "android.media.mediacodec.original-video-qp-i-max";
119 static const char *kCodecOriginalVideoQPPMin = "android.media.mediacodec.original-video-qp-p-min";
120 static const char *kCodecOriginalVideoQPPMax = "android.media.mediacodec.original-video-qp-p-max";
121 static const char *kCodecOriginalVideoQPBMin = "android.media.mediacodec.original-video-qp-b-min";
122 static const char *kCodecOriginalVideoQPBMax = "android.media.mediacodec.original-video-qp-b-max";
123
124 // Min/Max QP after shaping
125 static const char *kCodecRequestedVideoQPIMin = "android.media.mediacodec.video-qp-i-min";
126 static const char *kCodecRequestedVideoQPIMax = "android.media.mediacodec.video-qp-i-max";
127 static const char *kCodecRequestedVideoQPPMin = "android.media.mediacodec.video-qp-p-min";
128 static const char *kCodecRequestedVideoQPPMax = "android.media.mediacodec.video-qp-p-max";
129 static const char *kCodecRequestedVideoQPBMin = "android.media.mediacodec.video-qp-b-min";
130 static const char *kCodecRequestedVideoQPBMax = "android.media.mediacodec.video-qp-b-max";
131
132 // NB: These are not yet exposed as public Java API constants.
133 static const char *kCodecCrypto = "android.media.mediacodec.crypto"; /* 0,1 */
134 static const char *kCodecProfile = "android.media.mediacodec.profile"; /* 0..n */
135 static const char *kCodecLevel = "android.media.mediacodec.level"; /* 0..n */
136 static const char *kCodecBitrateMode = "android.media.mediacodec.bitrate_mode"; /* CQ/VBR/CBR */
137 static const char *kCodecBitrate = "android.media.mediacodec.bitrate"; /* 0..n */
138 static const char *kCodecOriginalBitrate = "android.media.mediacodec.original.bitrate"; /* 0..n */
139 static const char *kCodecMaxWidth = "android.media.mediacodec.maxwidth"; /* 0..n */
140 static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
141 static const char *kCodecError = "android.media.mediacodec.errcode";
142 static const char *kCodecLifetimeMs = "android.media.mediacodec.lifetimeMs"; /* 0..n ms*/
143 static const char *kCodecErrorState = "android.media.mediacodec.errstate";
144 static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
145 static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
146 static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
147 static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
148 static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
149 static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
150 static const char *kCodecQueueSecureInputBufferError = "android.media.mediacodec.queueSecureInputBufferError";
151 static const char *kCodecQueueInputBufferError = "android.media.mediacodec.queueInputBufferError";
152
153 static const char *kCodecNumLowLatencyModeOn = "android.media.mediacodec.low-latency.on"; /* 0..n */
154 static const char *kCodecNumLowLatencyModeOff = "android.media.mediacodec.low-latency.off"; /* 0..n */
155 static const char *kCodecFirstFrameIndexLowLatencyModeOn = "android.media.mediacodec.low-latency.first-frame"; /* 0..n */
156 static const char *kCodecChannelCount = "android.media.mediacodec.channelCount";
157 static const char *kCodecSampleRate = "android.media.mediacodec.sampleRate";
158 static const char *kCodecVideoEncodedBytes = "android.media.mediacodec.vencode.bytes";
159 static const char *kCodecVideoEncodedFrames = "android.media.mediacodec.vencode.frames";
160 static const char *kCodecVideoInputBytes = "android.media.mediacodec.video.input.bytes";
161 static const char *kCodecVideoInputFrames = "android.media.mediacodec.video.input.frames";
162 static const char *kCodecVideoEncodedDurationUs = "android.media.mediacodec.vencode.durationUs";
163
164 // the kCodecRecent* fields appear only in getMetrics() results
165 static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
166 static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
167 static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
168 static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
169 static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
170 static const char *kCodecPlaybackDurationSec =
171 "android.media.mediacodec.playback-duration-sec"; /* in sec */
172
173 /* -1: shaper disabled
174 >=0: number of fields changed */
175 static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
176
177 // XXX suppress until we get our representation right
178 static bool kEmitHistogram = false;
179
180
getId(const std::shared_ptr<IResourceManagerClient> & client)181 static int64_t getId(const std::shared_ptr<IResourceManagerClient> &client) {
182 return (int64_t) client.get();
183 }
184
isResourceError(status_t err)185 static bool isResourceError(status_t err) {
186 return (err == NO_MEMORY);
187 }
188
189 static const int kMaxRetry = 2;
190 static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
191 static const int kNumBuffersAlign = 16;
192
193 static const C2MemoryUsage kDefaultReadWriteUsage{
194 C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE};
195
196 ////////////////////////////////////////////////////////////////////////////////
197
198 struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClientandroid::ResourceManagerClient199 explicit ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
200
reclaimResourceandroid::ResourceManagerClient201 Status reclaimResource(bool* _aidl_return) override {
202 sp<MediaCodec> codec = mMediaCodec.promote();
203 if (codec == NULL) {
204 // codec is already gone.
205 *_aidl_return = true;
206 return Status::ok();
207 }
208 status_t err = codec->reclaim();
209 if (err == WOULD_BLOCK) {
210 ALOGD("Wait for the client to release codec.");
211 usleep(kMaxReclaimWaitTimeInUs);
212 ALOGD("Try to reclaim again.");
213 err = codec->reclaim(true /* force */);
214 }
215 if (err != OK) {
216 ALOGW("ResourceManagerClient failed to release codec with err %d", err);
217 }
218 *_aidl_return = (err == OK);
219 return Status::ok();
220 }
221
getNameandroid::ResourceManagerClient222 Status getName(::std::string* _aidl_return) override {
223 _aidl_return->clear();
224 sp<MediaCodec> codec = mMediaCodec.promote();
225 if (codec == NULL) {
226 // codec is already gone.
227 return Status::ok();
228 }
229
230 AString name;
231 if (codec->getName(&name) == OK) {
232 *_aidl_return = name.c_str();
233 }
234 return Status::ok();
235 }
236
~ResourceManagerClientandroid::ResourceManagerClient237 virtual ~ResourceManagerClient() {}
238
239 private:
240 wp<MediaCodec> mMediaCodec;
241
242 DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
243 };
244
245 struct MediaCodec::ResourceManagerServiceProxy : public RefBase {
246 ResourceManagerServiceProxy(pid_t pid, uid_t uid,
247 const std::shared_ptr<IResourceManagerClient> &client);
248 virtual ~ResourceManagerServiceProxy();
249
250 void init();
251
252 // implements DeathRecipient
253 static void BinderDiedCallback(void* cookie);
254 void binderDied();
255 static Mutex sLockCookies;
256 static std::set<void*> sCookies;
257 static void addCookie(void* cookie);
258 static void removeCookie(void* cookie);
259
260 void addResource(const MediaResourceParcel &resource);
261 void removeResource(const MediaResourceParcel &resource);
262 void removeClient();
263 void markClientForPendingRemoval();
264 bool reclaimResource(const std::vector<MediaResourceParcel> &resources);
265
266 private:
267 Mutex mLock;
268 pid_t mPid;
269 uid_t mUid;
270 std::shared_ptr<IResourceManagerService> mService;
271 std::shared_ptr<IResourceManagerClient> mClient;
272 ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
273 };
274
ResourceManagerServiceProxy(pid_t pid,uid_t uid,const std::shared_ptr<IResourceManagerClient> & client)275 MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
276 pid_t pid, uid_t uid, const std::shared_ptr<IResourceManagerClient> &client)
277 : mPid(pid), mUid(uid), mClient(client),
278 mDeathRecipient(AIBinder_DeathRecipient_new(BinderDiedCallback)) {
279 if (mPid == MediaCodec::kNoPid) {
280 mPid = AIBinder_getCallingPid();
281 }
282 }
283
~ResourceManagerServiceProxy()284 MediaCodec::ResourceManagerServiceProxy::~ResourceManagerServiceProxy() {
285
286 // remove the cookie, so any in-flight death notification will get dropped
287 // by our handler.
288 removeCookie(this);
289
290 Mutex::Autolock _l(mLock);
291 if (mService != nullptr) {
292 AIBinder_unlinkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
293 mService = nullptr;
294 }
295 }
296
init()297 void MediaCodec::ResourceManagerServiceProxy::init() {
298 ::ndk::SpAIBinder binder(AServiceManager_getService("media.resource_manager"));
299 mService = IResourceManagerService::fromBinder(binder);
300 if (mService == nullptr) {
301 ALOGE("Failed to get ResourceManagerService");
302 return;
303 }
304
305 // Kill clients pending removal.
306 mService->reclaimResourcesFromClientsPendingRemoval(mPid);
307
308 // so our handler will process the death notifications
309 addCookie(this);
310
311 // after this, require mLock whenever using mService
312 AIBinder_linkToDeath(mService->asBinder().get(), mDeathRecipient.get(), this);
313 }
314
315 //static
316 Mutex MediaCodec::ResourceManagerServiceProxy::sLockCookies;
317 std::set<void*> MediaCodec::ResourceManagerServiceProxy::sCookies;
318
319 //static
addCookie(void * cookie)320 void MediaCodec::ResourceManagerServiceProxy::addCookie(void* cookie) {
321 Mutex::Autolock _l(sLockCookies);
322 sCookies.insert(cookie);
323 }
324
325 //static
removeCookie(void * cookie)326 void MediaCodec::ResourceManagerServiceProxy::removeCookie(void* cookie) {
327 Mutex::Autolock _l(sLockCookies);
328 sCookies.erase(cookie);
329 }
330
331 //static
BinderDiedCallback(void * cookie)332 void MediaCodec::ResourceManagerServiceProxy::BinderDiedCallback(void* cookie) {
333 Mutex::Autolock _l(sLockCookies);
334 if (sCookies.find(cookie) != sCookies.end()) {
335 auto thiz = static_cast<ResourceManagerServiceProxy*>(cookie);
336 thiz->binderDied();
337 }
338 }
339
binderDied()340 void MediaCodec::ResourceManagerServiceProxy::binderDied() {
341 ALOGW("ResourceManagerService died.");
342 Mutex::Autolock _l(mLock);
343 mService = nullptr;
344 }
345
addResource(const MediaResourceParcel & resource)346 void MediaCodec::ResourceManagerServiceProxy::addResource(
347 const MediaResourceParcel &resource) {
348 std::vector<MediaResourceParcel> resources;
349 resources.push_back(resource);
350
351 Mutex::Autolock _l(mLock);
352 if (mService == nullptr) {
353 return;
354 }
355 mService->addResource(mPid, mUid, getId(mClient), mClient, resources);
356 }
357
removeResource(const MediaResourceParcel & resource)358 void MediaCodec::ResourceManagerServiceProxy::removeResource(
359 const MediaResourceParcel &resource) {
360 std::vector<MediaResourceParcel> resources;
361 resources.push_back(resource);
362
363 Mutex::Autolock _l(mLock);
364 if (mService == nullptr) {
365 return;
366 }
367 mService->removeResource(mPid, getId(mClient), resources);
368 }
369
removeClient()370 void MediaCodec::ResourceManagerServiceProxy::removeClient() {
371 Mutex::Autolock _l(mLock);
372 if (mService == nullptr) {
373 return;
374 }
375 mService->removeClient(mPid, getId(mClient));
376 }
377
markClientForPendingRemoval()378 void MediaCodec::ResourceManagerServiceProxy::markClientForPendingRemoval() {
379 Mutex::Autolock _l(mLock);
380 if (mService == nullptr) {
381 return;
382 }
383 mService->markClientForPendingRemoval(mPid, getId(mClient));
384 }
385
reclaimResource(const std::vector<MediaResourceParcel> & resources)386 bool MediaCodec::ResourceManagerServiceProxy::reclaimResource(
387 const std::vector<MediaResourceParcel> &resources) {
388 Mutex::Autolock _l(mLock);
389 if (mService == NULL) {
390 return false;
391 }
392 bool success;
393 Status status = mService->reclaimResource(mPid, resources, &success);
394 return status.isOk() && success;
395 }
396
397 ////////////////////////////////////////////////////////////////////////////////
398
BufferInfo()399 MediaCodec::BufferInfo::BufferInfo() : mOwnedByClient(false) {}
400
401 ////////////////////////////////////////////////////////////////////////////////
402
403 class MediaCodec::ReleaseSurface {
404 public:
ReleaseSurface(uint64_t usage)405 explicit ReleaseSurface(uint64_t usage) {
406 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
407 mSurface = new Surface(mProducer, false /* controlledByApp */);
408 struct ConsumerListener : public BnConsumerListener {
409 ConsumerListener(const sp<IGraphicBufferConsumer> &consumer) {
410 mConsumer = consumer;
411 }
412 void onFrameAvailable(const BufferItem&) override {
413 BufferItem buffer;
414 // consume buffer
415 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
416 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == NO_ERROR) {
417 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
418 EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
419 }
420 }
421
422 wp<IGraphicBufferConsumer> mConsumer;
423 void onBuffersReleased() override {}
424 void onSidebandStreamChanged() override {}
425 };
426 sp<ConsumerListener> listener{new ConsumerListener(mConsumer)};
427 mConsumer->consumerConnect(listener, false);
428 mConsumer->setConsumerName(String8{"MediaCodec.release"});
429 mConsumer->setConsumerUsageBits(usage);
430 }
431
getSurface()432 const sp<Surface> &getSurface() {
433 return mSurface;
434 }
435
436 private:
437 sp<IGraphicBufferProducer> mProducer;
438 sp<IGraphicBufferConsumer> mConsumer;
439 sp<Surface> mSurface;
440 };
441
442 ////////////////////////////////////////////////////////////////////////////////
443
444 namespace {
445
446 enum {
447 kWhatFillThisBuffer = 'fill',
448 kWhatDrainThisBuffer = 'drai',
449 kWhatEOS = 'eos ',
450 kWhatStartCompleted = 'Scom',
451 kWhatStopCompleted = 'scom',
452 kWhatReleaseCompleted = 'rcom',
453 kWhatFlushCompleted = 'fcom',
454 kWhatError = 'erro',
455 kWhatComponentAllocated = 'cAll',
456 kWhatComponentConfigured = 'cCon',
457 kWhatInputSurfaceCreated = 'isfc',
458 kWhatInputSurfaceAccepted = 'isfa',
459 kWhatSignaledInputEOS = 'seos',
460 kWhatOutputFramesRendered = 'outR',
461 kWhatOutputBuffersChanged = 'outC',
462 kWhatFirstTunnelFrameReady = 'ftfR',
463 };
464
465 class BufferCallback : public CodecBase::BufferCallback {
466 public:
467 explicit BufferCallback(const sp<AMessage> ¬ify);
468 virtual ~BufferCallback() = default;
469
470 virtual void onInputBufferAvailable(
471 size_t index, const sp<MediaCodecBuffer> &buffer) override;
472 virtual void onOutputBufferAvailable(
473 size_t index, const sp<MediaCodecBuffer> &buffer) override;
474 private:
475 const sp<AMessage> mNotify;
476 };
477
BufferCallback(const sp<AMessage> & notify)478 BufferCallback::BufferCallback(const sp<AMessage> ¬ify)
479 : mNotify(notify) {}
480
onInputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)481 void BufferCallback::onInputBufferAvailable(
482 size_t index, const sp<MediaCodecBuffer> &buffer) {
483 sp<AMessage> notify(mNotify->dup());
484 notify->setInt32("what", kWhatFillThisBuffer);
485 notify->setSize("index", index);
486 notify->setObject("buffer", buffer);
487 notify->post();
488 }
489
onOutputBufferAvailable(size_t index,const sp<MediaCodecBuffer> & buffer)490 void BufferCallback::onOutputBufferAvailable(
491 size_t index, const sp<MediaCodecBuffer> &buffer) {
492 sp<AMessage> notify(mNotify->dup());
493 notify->setInt32("what", kWhatDrainThisBuffer);
494 notify->setSize("index", index);
495 notify->setObject("buffer", buffer);
496 notify->post();
497 }
498
499 class CodecCallback : public CodecBase::CodecCallback {
500 public:
501 explicit CodecCallback(const sp<AMessage> ¬ify);
502 virtual ~CodecCallback() = default;
503
504 virtual void onEos(status_t err) override;
505 virtual void onStartCompleted() override;
506 virtual void onStopCompleted() override;
507 virtual void onReleaseCompleted() override;
508 virtual void onFlushCompleted() override;
509 virtual void onError(status_t err, enum ActionCode actionCode) override;
510 virtual void onComponentAllocated(const char *componentName) override;
511 virtual void onComponentConfigured(
512 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) override;
513 virtual void onInputSurfaceCreated(
514 const sp<AMessage> &inputFormat,
515 const sp<AMessage> &outputFormat,
516 const sp<BufferProducerWrapper> &inputSurface) override;
517 virtual void onInputSurfaceCreationFailed(status_t err) override;
518 virtual void onInputSurfaceAccepted(
519 const sp<AMessage> &inputFormat,
520 const sp<AMessage> &outputFormat) override;
521 virtual void onInputSurfaceDeclined(status_t err) override;
522 virtual void onSignaledInputEOS(status_t err) override;
523 virtual void onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) override;
524 virtual void onOutputBuffersChanged() override;
525 virtual void onFirstTunnelFrameReady() override;
526 private:
527 const sp<AMessage> mNotify;
528 };
529
CodecCallback(const sp<AMessage> & notify)530 CodecCallback::CodecCallback(const sp<AMessage> ¬ify) : mNotify(notify) {}
531
onEos(status_t err)532 void CodecCallback::onEos(status_t err) {
533 sp<AMessage> notify(mNotify->dup());
534 notify->setInt32("what", kWhatEOS);
535 notify->setInt32("err", err);
536 notify->post();
537 }
538
onStartCompleted()539 void CodecCallback::onStartCompleted() {
540 sp<AMessage> notify(mNotify->dup());
541 notify->setInt32("what", kWhatStartCompleted);
542 notify->post();
543 }
544
onStopCompleted()545 void CodecCallback::onStopCompleted() {
546 sp<AMessage> notify(mNotify->dup());
547 notify->setInt32("what", kWhatStopCompleted);
548 notify->post();
549 }
550
onReleaseCompleted()551 void CodecCallback::onReleaseCompleted() {
552 sp<AMessage> notify(mNotify->dup());
553 notify->setInt32("what", kWhatReleaseCompleted);
554 notify->post();
555 }
556
onFlushCompleted()557 void CodecCallback::onFlushCompleted() {
558 sp<AMessage> notify(mNotify->dup());
559 notify->setInt32("what", kWhatFlushCompleted);
560 notify->post();
561 }
562
onError(status_t err,enum ActionCode actionCode)563 void CodecCallback::onError(status_t err, enum ActionCode actionCode) {
564 sp<AMessage> notify(mNotify->dup());
565 notify->setInt32("what", kWhatError);
566 notify->setInt32("err", err);
567 notify->setInt32("actionCode", actionCode);
568 notify->post();
569 }
570
onComponentAllocated(const char * componentName)571 void CodecCallback::onComponentAllocated(const char *componentName) {
572 sp<AMessage> notify(mNotify->dup());
573 notify->setInt32("what", kWhatComponentAllocated);
574 notify->setString("componentName", componentName);
575 notify->post();
576 }
577
onComponentConfigured(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)578 void CodecCallback::onComponentConfigured(
579 const sp<AMessage> &inputFormat, const sp<AMessage> &outputFormat) {
580 sp<AMessage> notify(mNotify->dup());
581 notify->setInt32("what", kWhatComponentConfigured);
582 notify->setMessage("input-format", inputFormat);
583 notify->setMessage("output-format", outputFormat);
584 notify->post();
585 }
586
onInputSurfaceCreated(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat,const sp<BufferProducerWrapper> & inputSurface)587 void CodecCallback::onInputSurfaceCreated(
588 const sp<AMessage> &inputFormat,
589 const sp<AMessage> &outputFormat,
590 const sp<BufferProducerWrapper> &inputSurface) {
591 sp<AMessage> notify(mNotify->dup());
592 notify->setInt32("what", kWhatInputSurfaceCreated);
593 notify->setMessage("input-format", inputFormat);
594 notify->setMessage("output-format", outputFormat);
595 notify->setObject("input-surface", inputSurface);
596 notify->post();
597 }
598
onInputSurfaceCreationFailed(status_t err)599 void CodecCallback::onInputSurfaceCreationFailed(status_t err) {
600 sp<AMessage> notify(mNotify->dup());
601 notify->setInt32("what", kWhatInputSurfaceCreated);
602 notify->setInt32("err", err);
603 notify->post();
604 }
605
onInputSurfaceAccepted(const sp<AMessage> & inputFormat,const sp<AMessage> & outputFormat)606 void CodecCallback::onInputSurfaceAccepted(
607 const sp<AMessage> &inputFormat,
608 const sp<AMessage> &outputFormat) {
609 sp<AMessage> notify(mNotify->dup());
610 notify->setInt32("what", kWhatInputSurfaceAccepted);
611 notify->setMessage("input-format", inputFormat);
612 notify->setMessage("output-format", outputFormat);
613 notify->post();
614 }
615
onInputSurfaceDeclined(status_t err)616 void CodecCallback::onInputSurfaceDeclined(status_t err) {
617 sp<AMessage> notify(mNotify->dup());
618 notify->setInt32("what", kWhatInputSurfaceAccepted);
619 notify->setInt32("err", err);
620 notify->post();
621 }
622
onSignaledInputEOS(status_t err)623 void CodecCallback::onSignaledInputEOS(status_t err) {
624 sp<AMessage> notify(mNotify->dup());
625 notify->setInt32("what", kWhatSignaledInputEOS);
626 if (err != OK) {
627 notify->setInt32("err", err);
628 }
629 notify->post();
630 }
631
onOutputFramesRendered(const std::list<FrameRenderTracker::Info> & done)632 void CodecCallback::onOutputFramesRendered(const std::list<FrameRenderTracker::Info> &done) {
633 sp<AMessage> notify(mNotify->dup());
634 notify->setInt32("what", kWhatOutputFramesRendered);
635 if (MediaCodec::CreateFramesRenderedMessage(done, notify)) {
636 notify->post();
637 }
638 }
639
onOutputBuffersChanged()640 void CodecCallback::onOutputBuffersChanged() {
641 sp<AMessage> notify(mNotify->dup());
642 notify->setInt32("what", kWhatOutputBuffersChanged);
643 notify->post();
644 }
645
onFirstTunnelFrameReady()646 void CodecCallback::onFirstTunnelFrameReady() {
647 sp<AMessage> notify(mNotify->dup());
648 notify->setInt32("what", kWhatFirstTunnelFrameReady);
649 notify->post();
650 }
651
652 } // namespace
653
654 ////////////////////////////////////////////////////////////////////////////////
655
656 // static
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid)657 sp<MediaCodec> MediaCodec::CreateByType(
658 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
659 uid_t uid) {
660 sp<AMessage> format;
661 return CreateByType(looper, mime, encoder, err, pid, uid, format);
662 }
663
CreateByType(const sp<ALooper> & looper,const AString & mime,bool encoder,status_t * err,pid_t pid,uid_t uid,sp<AMessage> format)664 sp<MediaCodec> MediaCodec::CreateByType(
665 const sp<ALooper> &looper, const AString &mime, bool encoder, status_t *err, pid_t pid,
666 uid_t uid, sp<AMessage> format) {
667 Vector<AString> matchingCodecs;
668
669 MediaCodecList::findMatchingCodecs(
670 mime.c_str(),
671 encoder,
672 0,
673 format,
674 &matchingCodecs);
675
676 if (err != NULL) {
677 *err = NAME_NOT_FOUND;
678 }
679 for (size_t i = 0; i < matchingCodecs.size(); ++i) {
680 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
681 AString componentName = matchingCodecs[i];
682 status_t ret = codec->init(componentName);
683 if (err != NULL) {
684 *err = ret;
685 }
686 if (ret == OK) {
687 return codec;
688 }
689 ALOGD("Allocating component '%s' failed (%d), try next one.",
690 componentName.c_str(), ret);
691 }
692 return NULL;
693 }
694
695 // static
CreateByComponentName(const sp<ALooper> & looper,const AString & name,status_t * err,pid_t pid,uid_t uid)696 sp<MediaCodec> MediaCodec::CreateByComponentName(
697 const sp<ALooper> &looper, const AString &name, status_t *err, pid_t pid, uid_t uid) {
698 sp<MediaCodec> codec = new MediaCodec(looper, pid, uid);
699
700 const status_t ret = codec->init(name);
701 if (err != NULL) {
702 *err = ret;
703 }
704 return ret == OK ? codec : NULL; // NULL deallocates codec.
705 }
706
707 // static
CreatePersistentInputSurface()708 sp<PersistentSurface> MediaCodec::CreatePersistentInputSurface() {
709 sp<PersistentSurface> pluginSurface = CCodec::CreateInputSurface();
710 if (pluginSurface != nullptr) {
711 return pluginSurface;
712 }
713
714 OMXClient client;
715 if (client.connect() != OK) {
716 ALOGE("Failed to connect to OMX to create persistent input surface.");
717 return NULL;
718 }
719
720 sp<IOMX> omx = client.interface();
721
722 sp<IGraphicBufferProducer> bufferProducer;
723 sp<hardware::media::omx::V1_0::IGraphicBufferSource> bufferSource;
724
725 status_t err = omx->createInputSurface(&bufferProducer, &bufferSource);
726
727 if (err != OK) {
728 ALOGE("Failed to create persistent input surface.");
729 return NULL;
730 }
731
732 return new PersistentSurface(bufferProducer, bufferSource);
733 }
734
MediaCodec(const sp<ALooper> & looper,pid_t pid,uid_t uid,std::function<sp<CodecBase> (const AString &,const char *)> getCodecBase,std::function<status_t (const AString &,sp<MediaCodecInfo> *)> getCodecInfo)735 MediaCodec::MediaCodec(
736 const sp<ALooper> &looper, pid_t pid, uid_t uid,
737 std::function<sp<CodecBase>(const AString &, const char *)> getCodecBase,
738 std::function<status_t(const AString &, sp<MediaCodecInfo> *)> getCodecInfo)
739 : mState(UNINITIALIZED),
740 mReleasedByResourceManager(false),
741 mLooper(looper),
742 mCodec(NULL),
743 mReplyID(0),
744 mFlags(0),
745 mStickyError(OK),
746 mSoftRenderer(NULL),
747 mIsVideo(false),
748 mVideoWidth(0),
749 mVideoHeight(0),
750 mRotationDegrees(0),
751 mDequeueInputTimeoutGeneration(0),
752 mDequeueInputReplyID(0),
753 mDequeueOutputTimeoutGeneration(0),
754 mDequeueOutputReplyID(0),
755 mTunneledInputWidth(0),
756 mTunneledInputHeight(0),
757 mTunneled(false),
758 mTunnelPeekState(TunnelPeekState::kEnabledNoBuffer),
759 mHaveInputSurface(false),
760 mHavePendingInputBuffers(false),
761 mCpuBoostRequested(false),
762 mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
763 mIsSurfaceToScreen(false),
764 mLatencyUnknown(0),
765 mBytesEncoded(0),
766 mEarliestEncodedPtsUs(INT64_MAX),
767 mLatestEncodedPtsUs(INT64_MIN),
768 mFramesEncoded(0),
769 mNumLowLatencyEnables(0),
770 mNumLowLatencyDisables(0),
771 mIsLowLatencyModeOn(false),
772 mIndexOfFirstFrameWhenLowLatencyOn(-1),
773 mInputBufferCounter(0),
774 mGetCodecBase(getCodecBase),
775 mGetCodecInfo(getCodecInfo) {
776 if (uid == kNoUid) {
777 mUid = AIBinder_getCallingUid();
778 } else {
779 mUid = uid;
780 }
781 mResourceManagerProxy = new ResourceManagerServiceProxy(pid, mUid,
782 ::ndk::SharedRefBase::make<ResourceManagerClient>(this));
783 if (!mGetCodecBase) {
784 mGetCodecBase = [](const AString &name, const char *owner) {
785 return GetCodecBase(name, owner);
786 };
787 }
788 if (!mGetCodecInfo) {
789 mGetCodecInfo = [](const AString &name, sp<MediaCodecInfo> *info) -> status_t {
790 *info = nullptr;
791 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
792 if (!mcl) {
793 return NO_INIT; // if called from Java should raise IOException
794 }
795 AString tmp = name;
796 if (tmp.endsWith(".secure")) {
797 tmp.erase(tmp.size() - 7, 7);
798 }
799 for (const AString &codecName : { name, tmp }) {
800 ssize_t codecIdx = mcl->findCodecByName(codecName.c_str());
801 if (codecIdx < 0) {
802 continue;
803 }
804 *info = mcl->getCodecInfo(codecIdx);
805 return OK;
806 }
807 return NAME_NOT_FOUND;
808 };
809 }
810
811 initMediametrics();
812 }
813
~MediaCodec()814 MediaCodec::~MediaCodec() {
815 CHECK_EQ(mState, UNINITIALIZED);
816 mResourceManagerProxy->removeClient();
817
818 flushMediametrics();
819 }
820
initMediametrics()821 void MediaCodec::initMediametrics() {
822 if (mMetricsHandle == 0) {
823 mMetricsHandle = mediametrics_create(kCodecKeyName);
824 }
825
826 mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
827
828 {
829 Mutex::Autolock al(mRecentLock);
830 for (int i = 0; i<kRecentLatencyFrames; i++) {
831 mRecentSamples[i] = kRecentSampleInvalid;
832 }
833 mRecentHead = 0;
834 }
835
836 {
837 Mutex::Autolock al(mLatencyLock);
838 mBuffersInFlight.clear();
839 mNumLowLatencyEnables = 0;
840 mNumLowLatencyDisables = 0;
841 mIsLowLatencyModeOn = false;
842 mIndexOfFirstFrameWhenLowLatencyOn = -1;
843 mInputBufferCounter = 0;
844 }
845
846 mLifetimeStartNs = systemTime(SYSTEM_TIME_MONOTONIC);
847 }
848
updateMediametrics()849 void MediaCodec::updateMediametrics() {
850 ALOGV("MediaCodec::updateMediametrics");
851 if (mMetricsHandle == 0) {
852 return;
853 }
854
855 if (mLatencyHist.getCount() != 0 ) {
856 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
857 mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
858 mediametrics_setInt64(mMetricsHandle, kCodecLatencyAvg, mLatencyHist.getAvg());
859 mediametrics_setInt64(mMetricsHandle, kCodecLatencyCount, mLatencyHist.getCount());
860
861 if (kEmitHistogram) {
862 // and the histogram itself
863 std::string hist = mLatencyHist.emit();
864 mediametrics_setCString(mMetricsHandle, kCodecLatencyHist, hist.c_str());
865 }
866 }
867 if (mLatencyUnknown > 0) {
868 mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
869 }
870 int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
871 if (playbackDurationSec > 0) {
872 mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
873 }
874 if (mLifetimeStartNs > 0) {
875 nsecs_t lifetime = systemTime(SYSTEM_TIME_MONOTONIC) - mLifetimeStartNs;
876 lifetime = lifetime / (1000 * 1000); // emitted in ms, truncated not rounded
877 mediametrics_setInt64(mMetricsHandle, kCodecLifetimeMs, lifetime);
878 }
879
880 if (mBytesEncoded) {
881 Mutex::Autolock al(mOutputStatsLock);
882
883 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedBytes, mBytesEncoded);
884 int64_t duration = 0;
885 if (mLatestEncodedPtsUs > mEarliestEncodedPtsUs) {
886 duration = mLatestEncodedPtsUs - mEarliestEncodedPtsUs;
887 }
888 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedDurationUs, duration);
889 mediametrics_setInt64(mMetricsHandle, kCodecVideoEncodedFrames, mFramesEncoded);
890 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputFrames, mFramesInput);
891 mediametrics_setInt64(mMetricsHandle, kCodecVideoInputBytes, mBytesInput);
892 }
893
894 {
895 Mutex::Autolock al(mLatencyLock);
896 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOn, mNumLowLatencyEnables);
897 mediametrics_setInt64(mMetricsHandle, kCodecNumLowLatencyModeOff, mNumLowLatencyDisables);
898 mediametrics_setInt64(mMetricsHandle, kCodecFirstFrameIndexLowLatencyModeOn,
899 mIndexOfFirstFrameWhenLowLatencyOn);
900 }
901 #if 0
902 // enable for short term, only while debugging
903 updateEphemeralMediametrics(mMetricsHandle);
904 #endif
905 }
906
updateEphemeralMediametrics(mediametrics_handle_t item)907 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
908 ALOGD("MediaCodec::updateEphemeralMediametrics()");
909
910 if (item == 0) {
911 return;
912 }
913
914 Histogram recentHist;
915
916 // build an empty histogram
917 recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
918
919 // stuff it with the samples in the ring buffer
920 {
921 Mutex::Autolock al(mRecentLock);
922
923 for (int i=0; i<kRecentLatencyFrames; i++) {
924 if (mRecentSamples[i] != kRecentSampleInvalid) {
925 recentHist.insert(mRecentSamples[i]);
926 }
927 }
928 }
929
930 // spit the data (if any) into the supplied analytics record
931 if (recentHist.getCount()!= 0 ) {
932 mediametrics_setInt64(item, kCodecRecentLatencyMax, recentHist.getMax());
933 mediametrics_setInt64(item, kCodecRecentLatencyMin, recentHist.getMin());
934 mediametrics_setInt64(item, kCodecRecentLatencyAvg, recentHist.getAvg());
935 mediametrics_setInt64(item, kCodecRecentLatencyCount, recentHist.getCount());
936
937 if (kEmitHistogram) {
938 // and the histogram itself
939 std::string hist = recentHist.emit();
940 mediametrics_setCString(item, kCodecRecentLatencyHist, hist.c_str());
941 }
942 }
943 }
944
flushMediametrics()945 void MediaCodec::flushMediametrics() {
946 updateMediametrics();
947 if (mMetricsHandle != 0) {
948 if (mediametrics_count(mMetricsHandle) > 0) {
949 mediametrics_selfRecord(mMetricsHandle);
950 }
951 mediametrics_delete(mMetricsHandle);
952 mMetricsHandle = 0;
953 }
954 }
955
updateLowLatency(const sp<AMessage> & msg)956 void MediaCodec::updateLowLatency(const sp<AMessage> &msg) {
957 int32_t lowLatency = 0;
958 if (msg->findInt32("low-latency", &lowLatency)) {
959 Mutex::Autolock al(mLatencyLock);
960 if (lowLatency > 0) {
961 ++mNumLowLatencyEnables;
962 // This is just an estimate since low latency mode change happens ONLY at key frame
963 mIsLowLatencyModeOn = true;
964 } else if (lowLatency == 0) {
965 ++mNumLowLatencyDisables;
966 // This is just an estimate since low latency mode change happens ONLY at key frame
967 mIsLowLatencyModeOn = false;
968 }
969 }
970 }
971
asString(TunnelPeekState state,const char * default_string)972 constexpr const char *MediaCodec::asString(TunnelPeekState state, const char *default_string){
973 switch(state) {
974 case TunnelPeekState::kEnabledNoBuffer:
975 return "EnabledNoBuffer";
976 case TunnelPeekState::kDisabledNoBuffer:
977 return "DisabledNoBuffer";
978 case TunnelPeekState::kBufferDecoded:
979 return "BufferDecoded";
980 case TunnelPeekState::kBufferRendered:
981 return "BufferRendered";
982 case TunnelPeekState::kDisabledQueued:
983 return "DisabledQueued";
984 case TunnelPeekState::kEnabledQueued:
985 return "EnabledQueued";
986 default:
987 return default_string;
988 }
989 }
990
updateTunnelPeek(const sp<AMessage> & msg)991 void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
992 int32_t tunnelPeek = 0;
993 if (!msg->findInt32("tunnel-peek", &tunnelPeek)){
994 return;
995 }
996
997 TunnelPeekState previousState = mTunnelPeekState;
998 if(tunnelPeek == 0){
999 switch (mTunnelPeekState) {
1000 case TunnelPeekState::kEnabledNoBuffer:
1001 mTunnelPeekState = TunnelPeekState::kDisabledNoBuffer;
1002 break;
1003 case TunnelPeekState::kEnabledQueued:
1004 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
1005 break;
1006 default:
1007 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1008 return;
1009 }
1010 } else {
1011 switch (mTunnelPeekState) {
1012 case TunnelPeekState::kDisabledNoBuffer:
1013 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
1014 break;
1015 case TunnelPeekState::kDisabledQueued:
1016 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
1017 break;
1018 case TunnelPeekState::kBufferDecoded:
1019 msg->setInt32("android._trigger-tunnel-peek", 1);
1020 mTunnelPeekState = TunnelPeekState::kBufferRendered;
1021 break;
1022 default:
1023 ALOGV("Ignoring tunnel-peek=%d for %s", tunnelPeek, asString(mTunnelPeekState));
1024 return;
1025 }
1026 }
1027
1028 ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
1029 }
1030
updatePlaybackDuration(const sp<AMessage> & msg)1031 void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
1032 int what = 0;
1033 msg->findInt32("what", &what);
1034 if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
1035 static bool logged = false;
1036 if (!logged) {
1037 logged = true;
1038 ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
1039 }
1040 return;
1041 }
1042 // Playback duration only counts if the buffers are going to the screen.
1043 if (!mIsSurfaceToScreen) {
1044 return;
1045 }
1046 int64_t renderTimeNs;
1047 size_t index = 0;
1048 while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
1049 mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
1050 }
1051 }
1052
setup(int nbuckets,int64_t width,int64_t floor)1053 bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
1054 {
1055 if (nbuckets <= 0 || width <= 0) {
1056 return false;
1057 }
1058
1059 // get histogram buckets
1060 if (nbuckets == mBucketCount && mBuckets != NULL) {
1061 // reuse our existing buffer
1062 memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
1063 } else {
1064 // get a new pre-zeroed buffer
1065 int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
1066 if (newbuckets == NULL) {
1067 goto bad;
1068 }
1069 if (mBuckets != NULL)
1070 free(mBuckets);
1071 mBuckets = newbuckets;
1072 }
1073
1074 mWidth = width;
1075 mFloor = floor;
1076 mCeiling = floor + nbuckets * width;
1077 mBucketCount = nbuckets;
1078
1079 mMin = INT64_MAX;
1080 mMax = INT64_MIN;
1081 mSum = 0;
1082 mCount = 0;
1083 mBelow = mAbove = 0;
1084
1085 return true;
1086
1087 bad:
1088 if (mBuckets != NULL) {
1089 free(mBuckets);
1090 mBuckets = NULL;
1091 }
1092
1093 return false;
1094 }
1095
insert(int64_t sample)1096 void MediaCodec::Histogram::insert(int64_t sample)
1097 {
1098 // histogram is not set up
1099 if (mBuckets == NULL) {
1100 return;
1101 }
1102
1103 mCount++;
1104 mSum += sample;
1105 if (mMin > sample) mMin = sample;
1106 if (mMax < sample) mMax = sample;
1107
1108 if (sample < mFloor) {
1109 mBelow++;
1110 } else if (sample >= mCeiling) {
1111 mAbove++;
1112 } else {
1113 int64_t slot = (sample - mFloor) / mWidth;
1114 CHECK(slot < mBucketCount);
1115 mBuckets[slot]++;
1116 }
1117 return;
1118 }
1119
emit()1120 std::string MediaCodec::Histogram::emit()
1121 {
1122 std::string value;
1123 char buffer[64];
1124
1125 // emits: width,Below{bucket0,bucket1,...., bucketN}above
1126 // unconfigured will emit: 0,0{}0
1127 // XXX: is this best representation?
1128 snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
1129 mFloor, mWidth, mBelow);
1130 value = buffer;
1131 for (int i = 0; i < mBucketCount; i++) {
1132 if (i != 0) {
1133 value = value + ",";
1134 }
1135 snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
1136 value = value + buffer;
1137 }
1138 snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
1139 value = value + buffer;
1140 return value;
1141 }
1142
1143 // when we send a buffer to the codec;
statsBufferSent(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1144 void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1145
1146 // only enqueue if we have a legitimate time
1147 if (presentationUs <= 0) {
1148 ALOGV("presentation time: %" PRId64, presentationUs);
1149 return;
1150 }
1151
1152 if (mBatteryChecker != nullptr) {
1153 mBatteryChecker->onCodecActivity([this] () {
1154 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1155 });
1156 }
1157
1158 if (mIsVideo && (mFlags & kFlagIsEncoder)) {
1159 mBytesInput += buffer->size();
1160 mFramesInput++;
1161 }
1162
1163 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1164 BufferFlightTiming_t startdata = { presentationUs, nowNs };
1165
1166 {
1167 // mutex access to mBuffersInFlight and other stats
1168 Mutex::Autolock al(mLatencyLock);
1169
1170
1171 // XXX: we *could* make sure that the time is later than the end of queue
1172 // as part of a consistency check...
1173 mBuffersInFlight.push_back(startdata);
1174
1175 if (mIsLowLatencyModeOn && mIndexOfFirstFrameWhenLowLatencyOn < 0) {
1176 mIndexOfFirstFrameWhenLowLatencyOn = mInputBufferCounter;
1177 }
1178 ++mInputBufferCounter;
1179 }
1180 }
1181
1182 // when we get a buffer back from the codec
statsBufferReceived(int64_t presentationUs,const sp<MediaCodecBuffer> & buffer)1183 void MediaCodec::statsBufferReceived(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
1184
1185 CHECK_NE(mState, UNINITIALIZED);
1186
1187 if (mIsVideo && (mFlags & kFlagIsEncoder)) {
1188 int32_t flags = 0;
1189 (void) buffer->meta()->findInt32("flags", &flags);
1190
1191 // some of these frames, we don't want to count
1192 // standalone EOS.... has an invalid timestamp
1193 if ((flags & (BUFFER_FLAG_CODECCONFIG|BUFFER_FLAG_EOS)) == 0) {
1194 mBytesEncoded += buffer->size();
1195 mFramesEncoded++;
1196
1197 Mutex::Autolock al(mOutputStatsLock);
1198 int64_t timeUs = 0;
1199 if (buffer->meta()->findInt64("timeUs", &timeUs)) {
1200 if (timeUs > mLatestEncodedPtsUs) {
1201 mLatestEncodedPtsUs = timeUs;
1202 }
1203 // can't chain as an else-if or this never triggers
1204 if (timeUs < mEarliestEncodedPtsUs) {
1205 mEarliestEncodedPtsUs = timeUs;
1206 }
1207 }
1208 }
1209 }
1210
1211 // mutex access to mBuffersInFlight and other stats
1212 Mutex::Autolock al(mLatencyLock);
1213
1214 // how long this buffer took for the round trip through the codec
1215 // NB: pipelining can/will make these times larger. e.g., if each packet
1216 // is always 2 msec and we have 3 in flight at any given time, we're going to
1217 // see "6 msec" as an answer.
1218
1219 // ignore stuff with no presentation time
1220 if (presentationUs <= 0) {
1221 ALOGV("-- returned buffer timestamp %" PRId64 " <= 0, ignore it", presentationUs);
1222 mLatencyUnknown++;
1223 return;
1224 }
1225
1226 if (mBatteryChecker != nullptr) {
1227 mBatteryChecker->onCodecActivity([this] () {
1228 mResourceManagerProxy->addResource(MediaResource::VideoBatteryResource());
1229 });
1230 }
1231
1232 BufferFlightTiming_t startdata;
1233 bool valid = false;
1234 while (mBuffersInFlight.size() > 0) {
1235 startdata = *mBuffersInFlight.begin();
1236 ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
1237 startdata.presentationUs, startdata.startedNs);
1238 if (startdata.presentationUs == presentationUs) {
1239 // a match
1240 ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
1241 startdata.presentationUs, presentationUs);
1242 mBuffersInFlight.pop_front();
1243 valid = true;
1244 break;
1245 } else if (startdata.presentationUs < presentationUs) {
1246 // we must have missed the match for this, drop it and keep looking
1247 ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
1248 startdata.presentationUs, presentationUs);
1249 mBuffersInFlight.pop_front();
1250 continue;
1251 } else {
1252 // head is after, so we don't have a frame for ourselves
1253 ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
1254 " we have nothing to pair with",
1255 startdata.presentationUs, presentationUs);
1256 mLatencyUnknown++;
1257 return;
1258 }
1259 }
1260 if (!valid) {
1261 ALOGV("-- empty queue, so ignore that.");
1262 mLatencyUnknown++;
1263 return;
1264 }
1265
1266 // now start our calculations
1267 const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
1268 int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
1269
1270 mLatencyHist.insert(latencyUs);
1271
1272 // push into the recent samples
1273 {
1274 Mutex::Autolock al(mRecentLock);
1275
1276 if (mRecentHead >= kRecentLatencyFrames) {
1277 mRecentHead = 0;
1278 }
1279 mRecentSamples[mRecentHead++] = latencyUs;
1280 }
1281 }
1282
1283 // static
PostAndAwaitResponse(const sp<AMessage> & msg,sp<AMessage> * response)1284 status_t MediaCodec::PostAndAwaitResponse(
1285 const sp<AMessage> &msg, sp<AMessage> *response) {
1286 status_t err = msg->postAndAwaitResponse(response);
1287
1288 if (err != OK) {
1289 return err;
1290 }
1291
1292 if (!(*response)->findInt32("err", &err)) {
1293 err = OK;
1294 }
1295
1296 return err;
1297 }
1298
PostReplyWithError(const sp<AMessage> & msg,int32_t err)1299 void MediaCodec::PostReplyWithError(const sp<AMessage> &msg, int32_t err) {
1300 sp<AReplyToken> replyID;
1301 CHECK(msg->senderAwaitsResponse(&replyID));
1302 PostReplyWithError(replyID, err);
1303 }
1304
PostReplyWithError(const sp<AReplyToken> & replyID,int32_t err)1305 void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) {
1306 int32_t finalErr = err;
1307 if (mReleasedByResourceManager) {
1308 // override the err code if MediaCodec has been released by ResourceManager.
1309 finalErr = DEAD_OBJECT;
1310 }
1311
1312 sp<AMessage> response = new AMessage;
1313 response->setInt32("err", finalErr);
1314 response->postReply(replyID);
1315 }
1316
CreateCCodec()1317 static CodecBase *CreateCCodec() {
1318 return new CCodec;
1319 }
1320
1321 //static
GetCodecBase(const AString & name,const char * owner)1322 sp<CodecBase> MediaCodec::GetCodecBase(const AString &name, const char *owner) {
1323 if (owner) {
1324 if (strcmp(owner, "default") == 0) {
1325 return new ACodec;
1326 } else if (strncmp(owner, "codec2", 6) == 0) {
1327 return CreateCCodec();
1328 }
1329 }
1330
1331 if (name.startsWithIgnoreCase("c2.")) {
1332 return CreateCCodec();
1333 } else if (name.startsWithIgnoreCase("omx.")) {
1334 // at this time only ACodec specifies a mime type.
1335 return new ACodec;
1336 } else if (name.startsWithIgnoreCase("android.filter.")) {
1337 return new MediaFilter;
1338 } else {
1339 return NULL;
1340 }
1341 }
1342
1343 struct CodecListCache {
CodecListCacheandroid::CodecListCache1344 CodecListCache()
1345 : mCodecInfoMap{[] {
1346 const sp<IMediaCodecList> mcl = MediaCodecList::getInstance();
1347 size_t count = mcl->countCodecs();
1348 std::map<std::string, sp<MediaCodecInfo>> codecInfoMap;
1349 for (size_t i = 0; i < count; ++i) {
1350 sp<MediaCodecInfo> info = mcl->getCodecInfo(i);
1351 codecInfoMap.emplace(info->getCodecName(), info);
1352 }
1353 return codecInfoMap;
1354 }()} {
1355 }
1356
1357 const std::map<std::string, sp<MediaCodecInfo>> mCodecInfoMap;
1358 };
1359
GetCodecListCache()1360 static const CodecListCache &GetCodecListCache() {
1361 static CodecListCache sCache{};
1362 return sCache;
1363 }
1364
init(const AString & name)1365 status_t MediaCodec::init(const AString &name) {
1366 mResourceManagerProxy->init();
1367
1368 // save init parameters for reset
1369 mInitName = name;
1370
1371 // Current video decoders do not return from OMX_FillThisBuffer
1372 // quickly, violating the OpenMAX specs, until that is remedied
1373 // we need to invest in an extra looper to free the main event
1374 // queue.
1375
1376 mCodecInfo.clear();
1377
1378 bool secureCodec = false;
1379 const char *owner = "";
1380 if (!name.startsWith("android.filter.")) {
1381 status_t err = mGetCodecInfo(name, &mCodecInfo);
1382 if (err != OK) {
1383 mCodec = NULL; // remove the codec.
1384 return err;
1385 }
1386 if (mCodecInfo == nullptr) {
1387 ALOGE("Getting codec info with name '%s' failed", name.c_str());
1388 return NAME_NOT_FOUND;
1389 }
1390 secureCodec = name.endsWith(".secure");
1391 Vector<AString> mediaTypes;
1392 mCodecInfo->getSupportedMediaTypes(&mediaTypes);
1393 for (size_t i = 0; i < mediaTypes.size(); ++i) {
1394 if (mediaTypes[i].startsWith("video/")) {
1395 mIsVideo = true;
1396 break;
1397 }
1398 }
1399 owner = mCodecInfo->getOwnerName();
1400 }
1401
1402 mCodec = mGetCodecBase(name, owner);
1403 if (mCodec == NULL) {
1404 ALOGE("Getting codec base with name '%s' (owner='%s') failed", name.c_str(), owner);
1405 return NAME_NOT_FOUND;
1406 }
1407
1408 if (mIsVideo) {
1409 // video codec needs dedicated looper
1410 if (mCodecLooper == NULL) {
1411 mCodecLooper = new ALooper;
1412 mCodecLooper->setName("CodecLooper");
1413 mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
1414 }
1415
1416 mCodecLooper->registerHandler(mCodec);
1417 } else {
1418 mLooper->registerHandler(mCodec);
1419 }
1420
1421 mLooper->registerHandler(this);
1422
1423 mCodec->setCallback(
1424 std::unique_ptr<CodecBase::CodecCallback>(
1425 new CodecCallback(new AMessage(kWhatCodecNotify, this))));
1426 mBufferChannel = mCodec->getBufferChannel();
1427 mBufferChannel->setCallback(
1428 std::unique_ptr<CodecBase::BufferCallback>(
1429 new BufferCallback(new AMessage(kWhatCodecNotify, this))));
1430
1431 sp<AMessage> msg = new AMessage(kWhatInit, this);
1432 if (mCodecInfo) {
1433 msg->setObject("codecInfo", mCodecInfo);
1434 // name may be different from mCodecInfo->getCodecName() if we stripped
1435 // ".secure"
1436 }
1437 msg->setString("name", name);
1438
1439 if (mMetricsHandle != 0) {
1440 mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
1441 mediametrics_setCString(mMetricsHandle, kCodecMode,
1442 mIsVideo ? kCodecModeVideo : kCodecModeAudio);
1443 }
1444
1445 if (mIsVideo) {
1446 mBatteryChecker = new BatteryChecker(new AMessage(kWhatCheckBatteryStats, this));
1447 }
1448
1449 status_t err;
1450 std::vector<MediaResourceParcel> resources;
1451 resources.push_back(MediaResource::CodecResource(secureCodec, mIsVideo));
1452 for (int i = 0; i <= kMaxRetry; ++i) {
1453 if (i > 0) {
1454 // Don't try to reclaim resource for the first time.
1455 if (!mResourceManagerProxy->reclaimResource(resources)) {
1456 break;
1457 }
1458 }
1459
1460 sp<AMessage> response;
1461 err = PostAndAwaitResponse(msg, &response);
1462 if (!isResourceError(err)) {
1463 break;
1464 }
1465 }
1466 return err;
1467 }
1468
setCallback(const sp<AMessage> & callback)1469 status_t MediaCodec::setCallback(const sp<AMessage> &callback) {
1470 sp<AMessage> msg = new AMessage(kWhatSetCallback, this);
1471 msg->setMessage("callback", callback);
1472
1473 sp<AMessage> response;
1474 return PostAndAwaitResponse(msg, &response);
1475 }
1476
setOnFrameRenderedNotification(const sp<AMessage> & notify)1477 status_t MediaCodec::setOnFrameRenderedNotification(const sp<AMessage> ¬ify) {
1478 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1479 msg->setMessage("on-frame-rendered", notify);
1480 return msg->post();
1481 }
1482
setOnFirstTunnelFrameReadyNotification(const sp<AMessage> & notify)1483 status_t MediaCodec::setOnFirstTunnelFrameReadyNotification(const sp<AMessage> ¬ify) {
1484 sp<AMessage> msg = new AMessage(kWhatSetNotification, this);
1485 msg->setMessage("first-tunnel-frame-ready", notify);
1486 return msg->post();
1487 }
1488
1489 /*
1490 * MediaFormat Shaping forward declarations
1491 * including the property name we use for control.
1492 */
1493 static int enableMediaFormatShapingDefault = 1;
1494 static const char enableMediaFormatShapingProperty[] = "debug.stagefright.enableshaping";
1495 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
1496 bool reverse);
1497
configure(const sp<AMessage> & format,const sp<Surface> & nativeWindow,const sp<ICrypto> & crypto,uint32_t flags)1498 status_t MediaCodec::configure(
1499 const sp<AMessage> &format,
1500 const sp<Surface> &nativeWindow,
1501 const sp<ICrypto> &crypto,
1502 uint32_t flags) {
1503 return configure(format, nativeWindow, crypto, NULL, flags);
1504 }
1505
configure(const sp<AMessage> & format,const sp<Surface> & surface,const sp<ICrypto> & crypto,const sp<IDescrambler> & descrambler,uint32_t flags)1506 status_t MediaCodec::configure(
1507 const sp<AMessage> &format,
1508 const sp<Surface> &surface,
1509 const sp<ICrypto> &crypto,
1510 const sp<IDescrambler> &descrambler,
1511 uint32_t flags) {
1512 sp<AMessage> msg = new AMessage(kWhatConfigure, this);
1513
1514 // TODO: validity check log-session-id: it should be a 32-hex-digit.
1515 format->findString("log-session-id", &mLogSessionId);
1516
1517 if (mMetricsHandle != 0) {
1518 int32_t profile = 0;
1519 if (format->findInt32("profile", &profile)) {
1520 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
1521 }
1522 int32_t level = 0;
1523 if (format->findInt32("level", &level)) {
1524 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
1525 }
1526 mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
1527 (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
1528
1529 mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
1530 }
1531
1532 if (mIsVideo) {
1533 format->findInt32("width", &mVideoWidth);
1534 format->findInt32("height", &mVideoHeight);
1535 if (!format->findInt32("rotation-degrees", &mRotationDegrees)) {
1536 mRotationDegrees = 0;
1537 }
1538
1539 if (mMetricsHandle != 0) {
1540 mediametrics_setInt32(mMetricsHandle, kCodecWidth, mVideoWidth);
1541 mediametrics_setInt32(mMetricsHandle, kCodecHeight, mVideoHeight);
1542 mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
1543 int32_t maxWidth = 0;
1544 if (format->findInt32("max-width", &maxWidth)) {
1545 mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
1546 }
1547 int32_t maxHeight = 0;
1548 if (format->findInt32("max-height", &maxHeight)) {
1549 mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
1550 }
1551 int32_t colorFormat = -1;
1552 if (format->findInt32("color-format", &colorFormat)) {
1553 mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
1554 }
1555 float frameRate = -1.0;
1556 if (format->findFloat("frame-rate", &frameRate)) {
1557 mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
1558 }
1559 float captureRate = -1.0;
1560 if (format->findFloat("capture-rate", &captureRate)) {
1561 mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
1562 }
1563 float operatingRate = -1.0;
1564 if (format->findFloat("operating-rate", &operatingRate)) {
1565 mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
1566 }
1567 int32_t priority = -1;
1568 if (format->findInt32("priority", &priority)) {
1569 mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
1570 }
1571 }
1572
1573 // Prevent possible integer overflow in downstream code.
1574 if (mVideoWidth < 0 || mVideoHeight < 0 ||
1575 (uint64_t)mVideoWidth * mVideoHeight > (uint64_t)INT32_MAX / 4) {
1576 ALOGE("Invalid size(s), width=%d, height=%d", mVideoWidth, mVideoHeight);
1577 return BAD_VALUE;
1578 }
1579
1580 } else {
1581 if (mMetricsHandle != 0) {
1582 int32_t channelCount;
1583 if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
1584 mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
1585 }
1586 int32_t sampleRate;
1587 if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
1588 mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
1589 }
1590 }
1591 }
1592
1593 if (flags & CONFIGURE_FLAG_ENCODE) {
1594 int8_t enableShaping = property_get_bool(enableMediaFormatShapingProperty,
1595 enableMediaFormatShapingDefault);
1596 if (!enableShaping) {
1597 ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
1598 if (mMetricsHandle != 0) {
1599 mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
1600 }
1601 } else {
1602 (void) shapeMediaFormat(format, flags);
1603 // XXX: do we want to do this regardless of shaping enablement?
1604 mapFormat(mComponentName, format, nullptr, false);
1605 }
1606 }
1607
1608 // push min/max QP to MediaMetrics after shaping
1609 if (mIsVideo && mMetricsHandle != 0) {
1610 int32_t qpIMin = -1;
1611 if (format->findInt32("video-qp-i-min", &qpIMin)) {
1612 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
1613 }
1614 int32_t qpIMax = -1;
1615 if (format->findInt32("video-qp-i-max", &qpIMax)) {
1616 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
1617 }
1618 int32_t qpPMin = -1;
1619 if (format->findInt32("video-qp-p-min", &qpPMin)) {
1620 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
1621 }
1622 int32_t qpPMax = -1;
1623 if (format->findInt32("video-qp-p-max", &qpPMax)) {
1624 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
1625 }
1626 int32_t qpBMin = -1;
1627 if (format->findInt32("video-qp-b-min", &qpBMin)) {
1628 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
1629 }
1630 int32_t qpBMax = -1;
1631 if (format->findInt32("video-qp-b-max", &qpBMax)) {
1632 mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
1633 }
1634 }
1635
1636 updateLowLatency(format);
1637
1638 msg->setMessage("format", format);
1639 msg->setInt32("flags", flags);
1640 msg->setObject("surface", surface);
1641
1642 if (crypto != NULL || descrambler != NULL) {
1643 if (crypto != NULL) {
1644 msg->setPointer("crypto", crypto.get());
1645 } else {
1646 msg->setPointer("descrambler", descrambler.get());
1647 }
1648 if (mMetricsHandle != 0) {
1649 mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
1650 }
1651 } else if (mFlags & kFlagIsSecure) {
1652 ALOGW("Crypto or descrambler should be given for secure codec");
1653 }
1654
1655 // save msg for reset
1656 mConfigureMsg = msg;
1657
1658 sp<AMessage> callback = mCallback;
1659
1660 status_t err;
1661 std::vector<MediaResourceParcel> resources;
1662 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
1663 // Don't know the buffer size at this point, but it's fine to use 1 because
1664 // the reclaimResource call doesn't consider the requester's buffer size for now.
1665 resources.push_back(MediaResource::GraphicMemoryResource(1));
1666 for (int i = 0; i <= kMaxRetry; ++i) {
1667 sp<AMessage> response;
1668 err = PostAndAwaitResponse(msg, &response);
1669 if (err != OK && err != INVALID_OPERATION) {
1670 if (isResourceError(err) && !mResourceManagerProxy->reclaimResource(resources)) {
1671 break;
1672 }
1673 // MediaCodec now set state to UNINITIALIZED upon any fatal error.
1674 // To maintain backward-compatibility, do a reset() to put codec
1675 // back into INITIALIZED state.
1676 // But don't reset if the err is INVALID_OPERATION, which means
1677 // the configure failure is due to wrong state.
1678
1679 ALOGE("configure failed with err 0x%08x, resetting...", err);
1680 status_t err2 = reset();
1681 if (err2 != OK) {
1682 ALOGE("retrying configure: failed to reset codec (%08x)", err2);
1683 break;
1684 }
1685 if (callback != nullptr) {
1686 err2 = setCallback(callback);
1687 if (err2 != OK) {
1688 ALOGE("retrying configure: failed to set callback (%08x)", err2);
1689 break;
1690 }
1691 }
1692 }
1693 if (!isResourceError(err)) {
1694 break;
1695 }
1696 }
1697
1698 return err;
1699 }
1700
1701 // Media Format Shaping support
1702 //
1703
1704 static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
1705 static bool sIsHandheld = true;
1706
connectFormatShaper()1707 static bool connectFormatShaper() {
1708 static std::once_flag sCheckOnce;
1709
1710 ALOGV("connectFormatShaper...");
1711
1712 std::call_once(sCheckOnce, [&](){
1713
1714 void *libHandle = NULL;
1715 nsecs_t loading_started = systemTime(SYSTEM_TIME_MONOTONIC);
1716
1717 // prefer any copy in the mainline module
1718 //
1719 android_namespace_t *mediaNs = android_get_exported_namespace("com_android_media");
1720 AString libraryName = "libmediaformatshaper.so";
1721
1722 if (mediaNs != NULL) {
1723 static const android_dlextinfo dlextinfo = {
1724 .flags = ANDROID_DLEXT_USE_NAMESPACE,
1725 .library_namespace = mediaNs,
1726 };
1727
1728 AString libraryMainline = "/apex/com.android.media/";
1729 #if __LP64__
1730 libraryMainline.append("lib64/");
1731 #else
1732 libraryMainline.append("lib/");
1733 #endif
1734 libraryMainline.append(libraryName);
1735
1736 libHandle = android_dlopen_ext(libraryMainline.c_str(), RTLD_NOW|RTLD_NODELETE,
1737 &dlextinfo);
1738
1739 if (libHandle != NULL) {
1740 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
1741 dlsym(libHandle, "shaper_ops");
1742 } else {
1743 ALOGW("connectFormatShaper: unable to load mainline formatshaper %s",
1744 libraryMainline.c_str());
1745 }
1746 } else {
1747 ALOGV("connectFormatShaper: couldn't find media namespace.");
1748 }
1749
1750 // fall back to the system partition, if present.
1751 //
1752 if (sShaperOps == NULL) {
1753
1754 libHandle = dlopen(libraryName.c_str(), RTLD_NOW|RTLD_NODELETE);
1755
1756 if (libHandle != NULL) {
1757 sShaperOps = (android::mediaformatshaper::FormatShaperOps_t*)
1758 dlsym(libHandle, "shaper_ops");
1759 } else {
1760 ALOGW("connectFormatShaper: unable to load formatshaper %s", libraryName.c_str());
1761 }
1762 }
1763
1764 if (sShaperOps != nullptr
1765 && sShaperOps->version != android::mediaformatshaper::SHAPER_VERSION_V1) {
1766 ALOGW("connectFormatShaper: unhandled version ShaperOps: %d, DISABLED",
1767 sShaperOps->version);
1768 sShaperOps = nullptr;
1769 }
1770
1771 if (sShaperOps != nullptr) {
1772 ALOGV("connectFormatShaper: connected to library %s", libraryName.c_str());
1773 }
1774
1775 nsecs_t loading_finished = systemTime(SYSTEM_TIME_MONOTONIC);
1776 ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
1777 (loading_finished - loading_started)/1000);
1778
1779
1780 // we also want to know whether this is a handheld device
1781 // start with assumption that the device is handheld.
1782 sIsHandheld = true;
1783 sp<IServiceManager> serviceMgr = defaultServiceManager();
1784 sp<content::pm::IPackageManagerNative> packageMgr;
1785 if (serviceMgr.get() != nullptr) {
1786 sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
1787 packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
1788 }
1789 // if we didn't get serviceMgr, we'll leave packageMgr as default null
1790 if (packageMgr != nullptr) {
1791
1792 // MUST have these
1793 static const String16 featuresNeeded[] = {
1794 String16("android.hardware.touchscreen")
1795 };
1796 // these must be present to be a handheld
1797 for (::android::String16 required : featuresNeeded) {
1798 bool hasFeature = false;
1799 binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
1800 if (!status.isOk()) {
1801 ALOGE("%s: hasSystemFeature failed: %s",
1802 __func__, status.exceptionMessage().c_str());
1803 continue;
1804 }
1805 ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
1806 if (!hasFeature) {
1807 ALOGV("... which means we are not handheld");
1808 sIsHandheld = false;
1809 break;
1810 }
1811 }
1812
1813 // MUST NOT have these
1814 static const String16 featuresDisallowed[] = {
1815 String16("android.hardware.type.automotive"),
1816 String16("android.hardware.type.television"),
1817 String16("android.hardware.type.watch")
1818 };
1819 // any of these present -- we aren't a handheld
1820 for (::android::String16 forbidden : featuresDisallowed) {
1821 bool hasFeature = false;
1822 binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
1823 if (!status.isOk()) {
1824 ALOGE("%s: hasSystemFeature failed: %s",
1825 __func__, status.exceptionMessage().c_str());
1826 continue;
1827 }
1828 ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
1829 if (hasFeature) {
1830 ALOGV("... which means we are not handheld");
1831 sIsHandheld = false;
1832 break;
1833 }
1834 }
1835 }
1836
1837 });
1838
1839 return true;
1840 }
1841
1842
1843 #if 0
1844 // a construct to force the above dlopen() to run very early.
1845 // goal: so the dlopen() doesn't happen on critical path of latency sensitive apps
1846 // failure of this means that cold start of those apps is slower by the time to dlopen()
1847 // TODO(b/183454066): tradeoffs between memory of early loading vs latency of late loading
1848 //
1849 static bool forceEarlyLoadingShaper = connectFormatShaper();
1850 #endif
1851
1852 // parse the codec's properties: mapping, whether it meets min quality, etc
1853 // and pass them into the video quality code
1854 //
loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,sp<MediaCodecInfo> codecInfo,AString mediaType)1855 static void loadCodecProperties(mediaformatshaper::shaperHandle_t shaperHandle,
1856 sp<MediaCodecInfo> codecInfo, AString mediaType) {
1857
1858 sp<MediaCodecInfo::Capabilities> capabilities =
1859 codecInfo->getCapabilitiesFor(mediaType.c_str());
1860 if (capabilities == nullptr) {
1861 ALOGI("no capabilities as part of the codec?");
1862 } else {
1863 const sp<AMessage> &details = capabilities->getDetails();
1864 AString mapTarget;
1865 int count = details->countEntries();
1866 for(int ix = 0; ix < count; ix++) {
1867 AMessage::Type entryType;
1868 const char *mapSrc = details->getEntryNameAt(ix, &entryType);
1869 // XXX: re-use ix from getEntryAt() to avoid additional findXXX() invocation
1870 //
1871 static const char *featurePrefix = "feature-";
1872 static const int featurePrefixLen = strlen(featurePrefix);
1873 static const char *tuningPrefix = "tuning-";
1874 static const int tuningPrefixLen = strlen(tuningPrefix);
1875 static const char *mappingPrefix = "mapping-";
1876 static const int mappingPrefixLen = strlen(mappingPrefix);
1877
1878 if (mapSrc == NULL) {
1879 continue;
1880 } else if (!strncmp(mapSrc, featurePrefix, featurePrefixLen)) {
1881 int32_t intValue;
1882 if (details->findInt32(mapSrc, &intValue)) {
1883 ALOGV("-- feature '%s' -> %d", mapSrc, intValue);
1884 (void)(sShaperOps->setFeature)(shaperHandle, &mapSrc[featurePrefixLen],
1885 intValue);
1886 }
1887 continue;
1888 } else if (!strncmp(mapSrc, tuningPrefix, tuningPrefixLen)) {
1889 AString value;
1890 if (details->findString(mapSrc, &value)) {
1891 ALOGV("-- tuning '%s' -> '%s'", mapSrc, value.c_str());
1892 (void)(sShaperOps->setTuning)(shaperHandle, &mapSrc[tuningPrefixLen],
1893 value.c_str());
1894 }
1895 continue;
1896 } else if (!strncmp(mapSrc, mappingPrefix, mappingPrefixLen)) {
1897 AString target;
1898 if (details->findString(mapSrc, &target)) {
1899 ALOGV("-- mapping %s: map %s to %s", mapSrc, &mapSrc[mappingPrefixLen],
1900 target.c_str());
1901 // key is really "kind-key"
1902 // separate that, so setMap() sees the triple kind, key, value
1903 const char *kind = &mapSrc[mappingPrefixLen];
1904 const char *sep = strchr(kind, '-');
1905 const char *key = sep+1;
1906 if (sep != NULL) {
1907 std::string xkind = std::string(kind, sep-kind);
1908 (void)(sShaperOps->setMap)(shaperHandle, xkind.c_str(),
1909 key, target.c_str());
1910 }
1911 }
1912 }
1913 }
1914 }
1915
1916 // we also carry in the codec description whether we are on a handheld device.
1917 // this info is eventually used by both the Codec and the C2 machinery to inform
1918 // the underlying codec whether to do any shaping.
1919 //
1920 if (sIsHandheld) {
1921 // set if we are indeed a handheld device (or in future 'any eligible device'
1922 // missing on devices that aren't eligible for minimum quality enforcement.
1923 (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
1924 // strictly speaking, it's a tuning, but those are strings and feature stores int
1925 (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
1926 }
1927 }
1928
setupFormatShaper(AString mediaType)1929 status_t MediaCodec::setupFormatShaper(AString mediaType) {
1930 ALOGV("setupFormatShaper: initializing shaper data for codec %s mediaType %s",
1931 mComponentName.c_str(), mediaType.c_str());
1932
1933 nsecs_t mapping_started = systemTime(SYSTEM_TIME_MONOTONIC);
1934
1935 // someone might have beaten us to it.
1936 mediaformatshaper::shaperHandle_t shaperHandle;
1937 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
1938 if (shaperHandle != nullptr) {
1939 ALOGV("shaperhandle %p -- no initialization needed", shaperHandle);
1940 return OK;
1941 }
1942
1943 // we get to build & register one
1944 shaperHandle = sShaperOps->createShaper(mComponentName.c_str(), mediaType.c_str());
1945 if (shaperHandle == nullptr) {
1946 ALOGW("unable to create a shaper for cocodec %s mediaType %s",
1947 mComponentName.c_str(), mediaType.c_str());
1948 return OK;
1949 }
1950
1951 (void) loadCodecProperties(shaperHandle, mCodecInfo, mediaType);
1952
1953 shaperHandle = sShaperOps->registerShaper(shaperHandle,
1954 mComponentName.c_str(), mediaType.c_str());
1955
1956 nsecs_t mapping_finished = systemTime(SYSTEM_TIME_MONOTONIC);
1957 ALOGV("setupFormatShaper: populated shaper node for codec %s: %" PRId64 " us",
1958 mComponentName.c_str(), (mapping_finished - mapping_started)/1000);
1959
1960 return OK;
1961 }
1962
1963
1964 // Format Shaping
1965 // Mapping and Manipulation of encoding parameters
1966 //
1967 // All of these decisions are pushed into the shaper instead of here within MediaCodec.
1968 // this includes decisions based on whether the codec implements minimum quality bars
1969 // itself or needs to be shaped outside of the codec.
1970 // This keeps all those decisions in one place.
1971 // It also means that we push some extra decision information (is this a handheld device
1972 // or one that is otherwise eligible for minimum quality manipulation, which generational
1973 // quality target is in force, etc). This allows those values to be cached in the
1974 // per-codec structures that are done 1 time within a process instead of for each
1975 // codec instantiation.
1976 //
1977
shapeMediaFormat(const sp<AMessage> & format,uint32_t flags)1978 status_t MediaCodec::shapeMediaFormat(
1979 const sp<AMessage> &format,
1980 uint32_t flags) {
1981 ALOGV("shapeMediaFormat entry");
1982
1983 if (!(flags & CONFIGURE_FLAG_ENCODE)) {
1984 ALOGW("shapeMediaFormat: not encoder");
1985 return OK;
1986 }
1987 if (mCodecInfo == NULL) {
1988 ALOGW("shapeMediaFormat: no codecinfo");
1989 return OK;
1990 }
1991
1992 AString mediaType;
1993 if (!format->findString("mime", &mediaType)) {
1994 ALOGW("shapeMediaFormat: no mediaType information");
1995 return OK;
1996 }
1997
1998 // make sure we have the function entry points for the shaper library
1999 //
2000
2001 connectFormatShaper();
2002 if (sShaperOps == nullptr) {
2003 ALOGW("shapeMediaFormat: no MediaFormatShaper hooks available");
2004 return OK;
2005 }
2006
2007 // find the shaper information for this codec+mediaType pair
2008 //
2009 mediaformatshaper::shaperHandle_t shaperHandle;
2010 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2011 if (shaperHandle == nullptr) {
2012 setupFormatShaper(mediaType);
2013 shaperHandle = sShaperOps->findShaper(mComponentName.c_str(), mediaType.c_str());
2014 }
2015 if (shaperHandle == nullptr) {
2016 ALOGW("shapeMediaFormat: no handler for codec %s mediatype %s",
2017 mComponentName.c_str(), mediaType.c_str());
2018 return OK;
2019 }
2020
2021 // run the shaper
2022 //
2023
2024 ALOGV("Shaping input: %s", format->debugString(0).c_str());
2025
2026 sp<AMessage> updatedFormat = format->dup();
2027 AMediaFormat *updatedNdkFormat = AMediaFormat_fromMsg(&updatedFormat);
2028
2029 int result = (*sShaperOps->shapeFormat)(shaperHandle, updatedNdkFormat, flags);
2030 if (result == 0) {
2031 AMediaFormat_getFormat(updatedNdkFormat, &updatedFormat);
2032
2033 sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
2034 size_t changeCount = deltas->countEntries();
2035 ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
2036 if (mMetricsHandle != 0) {
2037 mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
2038 }
2039 if (changeCount > 0) {
2040 if (mMetricsHandle != 0) {
2041 // save some old properties before we fold in the new ones
2042 int32_t bitrate;
2043 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
2044 mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
2045 }
2046 int32_t qpIMin = -1;
2047 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
2048 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
2049 }
2050 int32_t qpIMax = -1;
2051 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
2052 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
2053 }
2054 int32_t qpPMin = -1;
2055 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
2056 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
2057 }
2058 int32_t qpPMax = -1;
2059 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
2060 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
2061 }
2062 int32_t qpBMin = -1;
2063 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
2064 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
2065 }
2066 int32_t qpBMax = -1;
2067 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
2068 mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
2069 }
2070 }
2071 // NB: for any field in both format and deltas, the deltas copy wins
2072 format->extend(deltas);
2073 }
2074 }
2075
2076 AMediaFormat_delete(updatedNdkFormat);
2077 return OK;
2078 }
2079
mapFormat(AString componentName,const sp<AMessage> & format,const char * kind,bool reverse)2080 static void mapFormat(AString componentName, const sp<AMessage> &format, const char *kind,
2081 bool reverse) {
2082 AString mediaType;
2083 if (!format->findString("mime", &mediaType)) {
2084 ALOGW("mapFormat: no mediaType information");
2085 return;
2086 }
2087 ALOGV("mapFormat: codec %s mediatype %s kind %s reverse %d", componentName.c_str(),
2088 mediaType.c_str(), kind ? kind : "<all>", reverse);
2089
2090 // make sure we have the function entry points for the shaper library
2091 //
2092
2093 #if 0
2094 // let's play the faster "only do mapping if we've already loaded the library
2095 connectFormatShaper();
2096 #endif
2097 if (sShaperOps == nullptr) {
2098 ALOGV("mapFormat: no MediaFormatShaper hooks available");
2099 return;
2100 }
2101
2102 // find the shaper information for this codec+mediaType pair
2103 //
2104 mediaformatshaper::shaperHandle_t shaperHandle;
2105 shaperHandle = sShaperOps->findShaper(componentName.c_str(), mediaType.c_str());
2106 if (shaperHandle == nullptr) {
2107 ALOGV("mapFormat: no shaper handle");
2108 return;
2109 }
2110
2111 const char **mappings;
2112 if (reverse)
2113 mappings = sShaperOps->getReverseMappings(shaperHandle, kind);
2114 else
2115 mappings = sShaperOps->getMappings(shaperHandle, kind);
2116
2117 if (mappings == nullptr) {
2118 ALOGV("no mappings returned");
2119 return;
2120 }
2121
2122 ALOGV("Pre-mapping: %s", format->debugString(2).c_str());
2123 // do the mapping
2124 //
2125 int entries = format->countEntries();
2126 for (int i = 0; ; i += 2) {
2127 if (mappings[i] == nullptr) {
2128 break;
2129 }
2130
2131 size_t ix = format->findEntryByName(mappings[i]);
2132 if (ix < entries) {
2133 ALOGV("map '%s' to '%s'", mappings[i], mappings[i+1]);
2134 status_t status = format->setEntryNameAt(ix, mappings[i+1]);
2135 if (status != OK) {
2136 ALOGW("Unable to map from '%s' to '%s': status %d",
2137 mappings[i], mappings[i+1], status);
2138 }
2139 }
2140 }
2141 ALOGV("Post-mapping: %s", format->debugString(2).c_str());
2142
2143
2144 // reclaim the mapping memory
2145 for (int i = 0; ; i += 2) {
2146 if (mappings[i] == nullptr) {
2147 break;
2148 }
2149 free((void*)mappings[i]);
2150 free((void*)mappings[i + 1]);
2151 }
2152 free(mappings);
2153 mappings = nullptr;
2154 }
2155
2156 //
2157 // end of Format Shaping hooks within MediaCodec
2158 //
2159
releaseCrypto()2160 status_t MediaCodec::releaseCrypto()
2161 {
2162 ALOGV("releaseCrypto");
2163
2164 sp<AMessage> msg = new AMessage(kWhatDrmReleaseCrypto, this);
2165
2166 sp<AMessage> response;
2167 status_t status = msg->postAndAwaitResponse(&response);
2168
2169 if (status == OK && response != NULL) {
2170 CHECK(response->findInt32("status", &status));
2171 ALOGV("releaseCrypto ret: %d ", status);
2172 }
2173 else {
2174 ALOGE("releaseCrypto err: %d", status);
2175 }
2176
2177 return status;
2178 }
2179
onReleaseCrypto(const sp<AMessage> & msg)2180 void MediaCodec::onReleaseCrypto(const sp<AMessage>& msg)
2181 {
2182 status_t status = INVALID_OPERATION;
2183 if (mCrypto != NULL) {
2184 ALOGV("onReleaseCrypto: mCrypto: %p (%d)", mCrypto.get(), mCrypto->getStrongCount());
2185 mBufferChannel->setCrypto(NULL);
2186 // TODO change to ALOGV
2187 ALOGD("onReleaseCrypto: [before clear] mCrypto: %p (%d)",
2188 mCrypto.get(), mCrypto->getStrongCount());
2189 mCrypto.clear();
2190
2191 status = OK;
2192 }
2193 else {
2194 ALOGW("onReleaseCrypto: No mCrypto. err: %d", status);
2195 }
2196
2197 sp<AMessage> response = new AMessage;
2198 response->setInt32("status", status);
2199
2200 sp<AReplyToken> replyID;
2201 CHECK(msg->senderAwaitsResponse(&replyID));
2202 response->postReply(replyID);
2203 }
2204
setInputSurface(const sp<PersistentSurface> & surface)2205 status_t MediaCodec::setInputSurface(
2206 const sp<PersistentSurface> &surface) {
2207 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
2208 msg->setObject("input-surface", surface.get());
2209
2210 sp<AMessage> response;
2211 return PostAndAwaitResponse(msg, &response);
2212 }
2213
setSurface(const sp<Surface> & surface)2214 status_t MediaCodec::setSurface(const sp<Surface> &surface) {
2215 sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
2216 msg->setObject("surface", surface);
2217
2218 sp<AMessage> response;
2219 return PostAndAwaitResponse(msg, &response);
2220 }
2221
createInputSurface(sp<IGraphicBufferProducer> * bufferProducer)2222 status_t MediaCodec::createInputSurface(
2223 sp<IGraphicBufferProducer>* bufferProducer) {
2224 sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this);
2225
2226 sp<AMessage> response;
2227 status_t err = PostAndAwaitResponse(msg, &response);
2228 if (err == NO_ERROR) {
2229 // unwrap the sp<IGraphicBufferProducer>
2230 sp<RefBase> obj;
2231 bool found = response->findObject("input-surface", &obj);
2232 CHECK(found);
2233 sp<BufferProducerWrapper> wrapper(
2234 static_cast<BufferProducerWrapper*>(obj.get()));
2235 *bufferProducer = wrapper->getBufferProducer();
2236 } else {
2237 ALOGW("createInputSurface failed, err=%d", err);
2238 }
2239 return err;
2240 }
2241
getGraphicBufferSize()2242 uint64_t MediaCodec::getGraphicBufferSize() {
2243 if (!mIsVideo) {
2244 return 0;
2245 }
2246
2247 uint64_t size = 0;
2248 size_t portNum = sizeof(mPortBuffers) / sizeof((mPortBuffers)[0]);
2249 for (size_t i = 0; i < portNum; ++i) {
2250 // TODO: this is just an estimation, we should get the real buffer size from ACodec.
2251 size += mPortBuffers[i].size() * mVideoWidth * mVideoHeight * 3 / 2;
2252 }
2253 return size;
2254 }
2255
start()2256 status_t MediaCodec::start() {
2257 sp<AMessage> msg = new AMessage(kWhatStart, this);
2258
2259 sp<AMessage> callback;
2260
2261 status_t err;
2262 std::vector<MediaResourceParcel> resources;
2263 resources.push_back(MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
2264 // Don't know the buffer size at this point, but it's fine to use 1 because
2265 // the reclaimResource call doesn't consider the requester's buffer size for now.
2266 resources.push_back(MediaResource::GraphicMemoryResource(1));
2267 for (int i = 0; i <= kMaxRetry; ++i) {
2268 if (i > 0) {
2269 // Don't try to reclaim resource for the first time.
2270 if (!mResourceManagerProxy->reclaimResource(resources)) {
2271 break;
2272 }
2273 // Recover codec from previous error before retry start.
2274 err = reset();
2275 if (err != OK) {
2276 ALOGE("retrying start: failed to reset codec");
2277 break;
2278 }
2279 sp<AMessage> response;
2280 err = PostAndAwaitResponse(mConfigureMsg, &response);
2281 if (err != OK) {
2282 ALOGE("retrying start: failed to configure codec");
2283 break;
2284 }
2285 if (callback != nullptr) {
2286 err = setCallback(callback);
2287 if (err != OK) {
2288 ALOGE("retrying start: failed to set callback");
2289 break;
2290 }
2291 ALOGD("succeed to set callback for reclaim");
2292 }
2293 }
2294
2295 // Keep callback message after the first iteration if necessary.
2296 if (i == 0 && mCallback != nullptr && mFlags & kFlagIsAsync) {
2297 callback = mCallback;
2298 ALOGD("keep callback message for reclaim");
2299 }
2300
2301 sp<AMessage> response;
2302 err = PostAndAwaitResponse(msg, &response);
2303 if (!isResourceError(err)) {
2304 break;
2305 }
2306 }
2307 return err;
2308 }
2309
stop()2310 status_t MediaCodec::stop() {
2311 sp<AMessage> msg = new AMessage(kWhatStop, this);
2312
2313 sp<AMessage> response;
2314 return PostAndAwaitResponse(msg, &response);
2315 }
2316
hasPendingBuffer(int portIndex)2317 bool MediaCodec::hasPendingBuffer(int portIndex) {
2318 return std::any_of(
2319 mPortBuffers[portIndex].begin(), mPortBuffers[portIndex].end(),
2320 [](const BufferInfo &info) { return info.mOwnedByClient; });
2321 }
2322
hasPendingBuffer()2323 bool MediaCodec::hasPendingBuffer() {
2324 return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
2325 }
2326
reclaim(bool force)2327 status_t MediaCodec::reclaim(bool force) {
2328 ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
2329 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2330 msg->setInt32("reclaimed", 1);
2331 msg->setInt32("force", force ? 1 : 0);
2332
2333 sp<AMessage> response;
2334 status_t ret = PostAndAwaitResponse(msg, &response);
2335 if (ret == -ENOENT) {
2336 ALOGD("MediaCodec looper is gone, skip reclaim");
2337 ret = OK;
2338 }
2339 return ret;
2340 }
2341
release()2342 status_t MediaCodec::release() {
2343 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2344 sp<AMessage> response;
2345 return PostAndAwaitResponse(msg, &response);
2346 }
2347
releaseAsync(const sp<AMessage> & notify)2348 status_t MediaCodec::releaseAsync(const sp<AMessage> ¬ify) {
2349 sp<AMessage> msg = new AMessage(kWhatRelease, this);
2350 msg->setMessage("async", notify);
2351 sp<AMessage> response;
2352 return PostAndAwaitResponse(msg, &response);
2353 }
2354
reset()2355 status_t MediaCodec::reset() {
2356 /* When external-facing MediaCodec object is created,
2357 it is already initialized. Thus, reset is essentially
2358 release() followed by init(), plus clearing the state */
2359
2360 status_t err = release();
2361
2362 // unregister handlers
2363 if (mCodec != NULL) {
2364 if (mCodecLooper != NULL) {
2365 mCodecLooper->unregisterHandler(mCodec->id());
2366 } else {
2367 mLooper->unregisterHandler(mCodec->id());
2368 }
2369 mCodec = NULL;
2370 }
2371 mLooper->unregisterHandler(id());
2372
2373 mFlags = 0; // clear all flags
2374 mStickyError = OK;
2375
2376 // reset state not reset by setState(UNINITIALIZED)
2377 mDequeueInputReplyID = 0;
2378 mDequeueOutputReplyID = 0;
2379 mDequeueInputTimeoutGeneration = 0;
2380 mDequeueOutputTimeoutGeneration = 0;
2381 mHaveInputSurface = false;
2382
2383 if (err == OK) {
2384 err = init(mInitName);
2385 }
2386 return err;
2387 }
2388
queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2389 status_t MediaCodec::queueInputBuffer(
2390 size_t index,
2391 size_t offset,
2392 size_t size,
2393 int64_t presentationTimeUs,
2394 uint32_t flags,
2395 AString *errorDetailMsg) {
2396 if (errorDetailMsg != NULL) {
2397 errorDetailMsg->clear();
2398 }
2399
2400 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2401 msg->setSize("index", index);
2402 msg->setSize("offset", offset);
2403 msg->setSize("size", size);
2404 msg->setInt64("timeUs", presentationTimeUs);
2405 msg->setInt32("flags", flags);
2406 msg->setPointer("errorDetailMsg", errorDetailMsg);
2407
2408 sp<AMessage> response;
2409 return PostAndAwaitResponse(msg, &response);
2410 }
2411
queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,AString * errorDetailMsg)2412 status_t MediaCodec::queueSecureInputBuffer(
2413 size_t index,
2414 size_t offset,
2415 const CryptoPlugin::SubSample *subSamples,
2416 size_t numSubSamples,
2417 const uint8_t key[16],
2418 const uint8_t iv[16],
2419 CryptoPlugin::Mode mode,
2420 const CryptoPlugin::Pattern &pattern,
2421 int64_t presentationTimeUs,
2422 uint32_t flags,
2423 AString *errorDetailMsg) {
2424 if (errorDetailMsg != NULL) {
2425 errorDetailMsg->clear();
2426 }
2427
2428 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2429 msg->setSize("index", index);
2430 msg->setSize("offset", offset);
2431 msg->setPointer("subSamples", (void *)subSamples);
2432 msg->setSize("numSubSamples", numSubSamples);
2433 msg->setPointer("key", (void *)key);
2434 msg->setPointer("iv", (void *)iv);
2435 msg->setInt32("mode", mode);
2436 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2437 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2438 msg->setInt64("timeUs", presentationTimeUs);
2439 msg->setInt32("flags", flags);
2440 msg->setPointer("errorDetailMsg", errorDetailMsg);
2441
2442 sp<AMessage> response;
2443 status_t err = PostAndAwaitResponse(msg, &response);
2444
2445 return err;
2446 }
2447
queueBuffer(size_t index,const std::shared_ptr<C2Buffer> & buffer,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2448 status_t MediaCodec::queueBuffer(
2449 size_t index,
2450 const std::shared_ptr<C2Buffer> &buffer,
2451 int64_t presentationTimeUs,
2452 uint32_t flags,
2453 const sp<AMessage> &tunings,
2454 AString *errorDetailMsg) {
2455 if (errorDetailMsg != NULL) {
2456 errorDetailMsg->clear();
2457 }
2458
2459 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2460 msg->setSize("index", index);
2461 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
2462 new WrapperObject<std::shared_ptr<C2Buffer>>{buffer}};
2463 msg->setObject("c2buffer", obj);
2464 msg->setInt64("timeUs", presentationTimeUs);
2465 msg->setInt32("flags", flags);
2466 msg->setMessage("tunings", tunings);
2467 msg->setPointer("errorDetailMsg", errorDetailMsg);
2468
2469 sp<AMessage> response;
2470 status_t err = PostAndAwaitResponse(msg, &response);
2471
2472 return err;
2473 }
2474
queueEncryptedBuffer(size_t index,const sp<hardware::HidlMemory> & buffer,size_t offset,const CryptoPlugin::SubSample * subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,const CryptoPlugin::Pattern & pattern,int64_t presentationTimeUs,uint32_t flags,const sp<AMessage> & tunings,AString * errorDetailMsg)2475 status_t MediaCodec::queueEncryptedBuffer(
2476 size_t index,
2477 const sp<hardware::HidlMemory> &buffer,
2478 size_t offset,
2479 const CryptoPlugin::SubSample *subSamples,
2480 size_t numSubSamples,
2481 const uint8_t key[16],
2482 const uint8_t iv[16],
2483 CryptoPlugin::Mode mode,
2484 const CryptoPlugin::Pattern &pattern,
2485 int64_t presentationTimeUs,
2486 uint32_t flags,
2487 const sp<AMessage> &tunings,
2488 AString *errorDetailMsg) {
2489 if (errorDetailMsg != NULL) {
2490 errorDetailMsg->clear();
2491 }
2492
2493 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
2494 msg->setSize("index", index);
2495 sp<WrapperObject<sp<hardware::HidlMemory>>> memory{
2496 new WrapperObject<sp<hardware::HidlMemory>>{buffer}};
2497 msg->setObject("memory", memory);
2498 msg->setSize("offset", offset);
2499 msg->setPointer("subSamples", (void *)subSamples);
2500 msg->setSize("numSubSamples", numSubSamples);
2501 msg->setPointer("key", (void *)key);
2502 msg->setPointer("iv", (void *)iv);
2503 msg->setInt32("mode", mode);
2504 msg->setInt32("encryptBlocks", pattern.mEncryptBlocks);
2505 msg->setInt32("skipBlocks", pattern.mSkipBlocks);
2506 msg->setInt64("timeUs", presentationTimeUs);
2507 msg->setInt32("flags", flags);
2508 msg->setMessage("tunings", tunings);
2509 msg->setPointer("errorDetailMsg", errorDetailMsg);
2510
2511 sp<AMessage> response;
2512 status_t err = PostAndAwaitResponse(msg, &response);
2513
2514 return err;
2515 }
2516
dequeueInputBuffer(size_t * index,int64_t timeoutUs)2517 status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
2518 sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this);
2519 msg->setInt64("timeoutUs", timeoutUs);
2520
2521 sp<AMessage> response;
2522 status_t err;
2523 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2524 return err;
2525 }
2526
2527 CHECK(response->findSize("index", index));
2528
2529 return OK;
2530 }
2531
dequeueOutputBuffer(size_t * index,size_t * offset,size_t * size,int64_t * presentationTimeUs,uint32_t * flags,int64_t timeoutUs)2532 status_t MediaCodec::dequeueOutputBuffer(
2533 size_t *index,
2534 size_t *offset,
2535 size_t *size,
2536 int64_t *presentationTimeUs,
2537 uint32_t *flags,
2538 int64_t timeoutUs) {
2539 sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this);
2540 msg->setInt64("timeoutUs", timeoutUs);
2541
2542 sp<AMessage> response;
2543 status_t err;
2544 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2545 return err;
2546 }
2547
2548 CHECK(response->findSize("index", index));
2549 CHECK(response->findSize("offset", offset));
2550 CHECK(response->findSize("size", size));
2551 CHECK(response->findInt64("timeUs", presentationTimeUs));
2552 CHECK(response->findInt32("flags", (int32_t *)flags));
2553
2554 return OK;
2555 }
2556
renderOutputBufferAndRelease(size_t index)2557 status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
2558 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2559 msg->setSize("index", index);
2560 msg->setInt32("render", true);
2561
2562 sp<AMessage> response;
2563 return PostAndAwaitResponse(msg, &response);
2564 }
2565
renderOutputBufferAndRelease(size_t index,int64_t timestampNs)2566 status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
2567 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2568 msg->setSize("index", index);
2569 msg->setInt32("render", true);
2570 msg->setInt64("timestampNs", timestampNs);
2571
2572 sp<AMessage> response;
2573 return PostAndAwaitResponse(msg, &response);
2574 }
2575
releaseOutputBuffer(size_t index)2576 status_t MediaCodec::releaseOutputBuffer(size_t index) {
2577 sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this);
2578 msg->setSize("index", index);
2579
2580 sp<AMessage> response;
2581 return PostAndAwaitResponse(msg, &response);
2582 }
2583
signalEndOfInputStream()2584 status_t MediaCodec::signalEndOfInputStream() {
2585 sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this);
2586
2587 sp<AMessage> response;
2588 return PostAndAwaitResponse(msg, &response);
2589 }
2590
getOutputFormat(sp<AMessage> * format) const2591 status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
2592 sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this);
2593
2594 sp<AMessage> response;
2595 status_t err;
2596 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2597 return err;
2598 }
2599
2600 CHECK(response->findMessage("format", format));
2601
2602 return OK;
2603 }
2604
getInputFormat(sp<AMessage> * format) const2605 status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
2606 sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this);
2607
2608 sp<AMessage> response;
2609 status_t err;
2610 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2611 return err;
2612 }
2613
2614 CHECK(response->findMessage("format", format));
2615
2616 return OK;
2617 }
2618
getName(AString * name) const2619 status_t MediaCodec::getName(AString *name) const {
2620 sp<AMessage> msg = new AMessage(kWhatGetName, this);
2621
2622 sp<AMessage> response;
2623 status_t err;
2624 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2625 return err;
2626 }
2627
2628 CHECK(response->findString("name", name));
2629
2630 return OK;
2631 }
2632
getCodecInfo(sp<MediaCodecInfo> * codecInfo) const2633 status_t MediaCodec::getCodecInfo(sp<MediaCodecInfo> *codecInfo) const {
2634 sp<AMessage> msg = new AMessage(kWhatGetCodecInfo, this);
2635
2636 sp<AMessage> response;
2637 status_t err;
2638 if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
2639 return err;
2640 }
2641
2642 sp<RefBase> obj;
2643 CHECK(response->findObject("codecInfo", &obj));
2644 *codecInfo = static_cast<MediaCodecInfo *>(obj.get());
2645
2646 return OK;
2647 }
2648
getMetrics(mediametrics_handle_t & reply)2649 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
2650
2651 reply = 0;
2652
2653 // shouldn't happen, but be safe
2654 if (mMetricsHandle == 0) {
2655 return UNKNOWN_ERROR;
2656 }
2657
2658 // update any in-flight data that's not carried within the record
2659 updateMediametrics();
2660
2661 // send it back to the caller.
2662 reply = mediametrics_dup(mMetricsHandle);
2663
2664 updateEphemeralMediametrics(reply);
2665
2666 return OK;
2667 }
2668
getInputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2669 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2670 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2671 msg->setInt32("portIndex", kPortIndexInput);
2672 msg->setPointer("buffers", buffers);
2673
2674 sp<AMessage> response;
2675 return PostAndAwaitResponse(msg, &response);
2676 }
2677
getOutputBuffers(Vector<sp<MediaCodecBuffer>> * buffers) const2678 status_t MediaCodec::getOutputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
2679 sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
2680 msg->setInt32("portIndex", kPortIndexOutput);
2681 msg->setPointer("buffers", buffers);
2682
2683 sp<AMessage> response;
2684 return PostAndAwaitResponse(msg, &response);
2685 }
2686
getOutputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2687 status_t MediaCodec::getOutputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2688 sp<AMessage> format;
2689 return getBufferAndFormat(kPortIndexOutput, index, buffer, &format);
2690 }
2691
getOutputFormat(size_t index,sp<AMessage> * format)2692 status_t MediaCodec::getOutputFormat(size_t index, sp<AMessage> *format) {
2693 sp<MediaCodecBuffer> buffer;
2694 return getBufferAndFormat(kPortIndexOutput, index, &buffer, format);
2695 }
2696
getInputBuffer(size_t index,sp<MediaCodecBuffer> * buffer)2697 status_t MediaCodec::getInputBuffer(size_t index, sp<MediaCodecBuffer> *buffer) {
2698 sp<AMessage> format;
2699 return getBufferAndFormat(kPortIndexInput, index, buffer, &format);
2700 }
2701
isExecuting() const2702 bool MediaCodec::isExecuting() const {
2703 return mState == STARTED || mState == FLUSHED;
2704 }
2705
getBufferAndFormat(size_t portIndex,size_t index,sp<MediaCodecBuffer> * buffer,sp<AMessage> * format)2706 status_t MediaCodec::getBufferAndFormat(
2707 size_t portIndex, size_t index,
2708 sp<MediaCodecBuffer> *buffer, sp<AMessage> *format) {
2709 // use mutex instead of a context switch
2710 if (mReleasedByResourceManager) {
2711 ALOGE("getBufferAndFormat - resource already released");
2712 return DEAD_OBJECT;
2713 }
2714
2715 if (buffer == NULL) {
2716 ALOGE("getBufferAndFormat - null MediaCodecBuffer");
2717 return INVALID_OPERATION;
2718 }
2719
2720 if (format == NULL) {
2721 ALOGE("getBufferAndFormat - null AMessage");
2722 return INVALID_OPERATION;
2723 }
2724
2725 buffer->clear();
2726 format->clear();
2727
2728 if (!isExecuting()) {
2729 ALOGE("getBufferAndFormat - not executing");
2730 return INVALID_OPERATION;
2731 }
2732
2733 // we do not want mPortBuffers to change during this section
2734 // we also don't want mOwnedByClient to change during this
2735 Mutex::Autolock al(mBufferLock);
2736
2737 std::vector<BufferInfo> &buffers = mPortBuffers[portIndex];
2738 if (index >= buffers.size()) {
2739 ALOGE("getBufferAndFormat - trying to get buffer with "
2740 "bad index (index=%zu buffer_size=%zu)", index, buffers.size());
2741 return INVALID_OPERATION;
2742 }
2743
2744 const BufferInfo &info = buffers[index];
2745 if (!info.mOwnedByClient) {
2746 ALOGE("getBufferAndFormat - invalid operation "
2747 "(the index %zu is not owned by client)", index);
2748 return INVALID_OPERATION;
2749 }
2750
2751 *buffer = info.mData;
2752 *format = info.mData->format();
2753
2754 return OK;
2755 }
2756
flush()2757 status_t MediaCodec::flush() {
2758 sp<AMessage> msg = new AMessage(kWhatFlush, this);
2759
2760 sp<AMessage> response;
2761 return PostAndAwaitResponse(msg, &response);
2762 }
2763
requestIDRFrame()2764 status_t MediaCodec::requestIDRFrame() {
2765 (new AMessage(kWhatRequestIDRFrame, this))->post();
2766
2767 return OK;
2768 }
2769
querySupportedVendorParameters(std::vector<std::string> * names)2770 status_t MediaCodec::querySupportedVendorParameters(std::vector<std::string> *names) {
2771 return mCodec->querySupportedParameters(names);
2772 }
2773
describeParameter(const std::string & name,CodecParameterDescriptor * desc)2774 status_t MediaCodec::describeParameter(const std::string &name, CodecParameterDescriptor *desc) {
2775 return mCodec->describeParameter(name, desc);
2776 }
2777
subscribeToVendorParameters(const std::vector<std::string> & names)2778 status_t MediaCodec::subscribeToVendorParameters(const std::vector<std::string> &names) {
2779 return mCodec->subscribeToParameters(names);
2780 }
2781
unsubscribeFromVendorParameters(const std::vector<std::string> & names)2782 status_t MediaCodec::unsubscribeFromVendorParameters(const std::vector<std::string> &names) {
2783 return mCodec->unsubscribeFromParameters(names);
2784 }
2785
requestActivityNotification(const sp<AMessage> & notify)2786 void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) {
2787 sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this);
2788 msg->setMessage("notify", notify);
2789 msg->post();
2790 }
2791
requestCpuBoostIfNeeded()2792 void MediaCodec::requestCpuBoostIfNeeded() {
2793 if (mCpuBoostRequested) {
2794 return;
2795 }
2796 int32_t colorFormat;
2797 if (mOutputFormat->contains("hdr-static-info")
2798 && mOutputFormat->findInt32("color-format", &colorFormat)
2799 // check format for OMX only, for C2 the format is always opaque since the
2800 // software rendering doesn't go through client
2801 && ((mSoftRenderer != NULL && colorFormat == OMX_COLOR_FormatYUV420Planar16)
2802 || mOwnerName.equalsIgnoreCase("codec2::software"))) {
2803 int32_t left, top, right, bottom, width, height;
2804 int64_t totalPixel = 0;
2805 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
2806 totalPixel = (right - left + 1) * (bottom - top + 1);
2807 } else if (mOutputFormat->findInt32("width", &width)
2808 && mOutputFormat->findInt32("height", &height)) {
2809 totalPixel = width * height;
2810 }
2811 if (totalPixel >= 1920 * 1080) {
2812 mResourceManagerProxy->addResource(MediaResource::CpuBoostResource());
2813 mCpuBoostRequested = true;
2814 }
2815 }
2816 }
2817
BatteryChecker(const sp<AMessage> & msg,int64_t timeoutUs)2818 BatteryChecker::BatteryChecker(const sp<AMessage> &msg, int64_t timeoutUs)
2819 : mTimeoutUs(timeoutUs)
2820 , mLastActivityTimeUs(-1ll)
2821 , mBatteryStatNotified(false)
2822 , mBatteryCheckerGeneration(0)
2823 , mIsExecuting(false)
2824 , mBatteryCheckerMsg(msg) {}
2825
onCodecActivity(std::function<void ()> batteryOnCb)2826 void BatteryChecker::onCodecActivity(std::function<void()> batteryOnCb) {
2827 if (!isExecuting()) {
2828 // ignore if not executing
2829 return;
2830 }
2831 if (!mBatteryStatNotified) {
2832 batteryOnCb();
2833 mBatteryStatNotified = true;
2834 sp<AMessage> msg = mBatteryCheckerMsg->dup();
2835 msg->setInt32("generation", mBatteryCheckerGeneration);
2836
2837 // post checker and clear last activity time
2838 msg->post(mTimeoutUs);
2839 mLastActivityTimeUs = -1ll;
2840 } else {
2841 // update last activity time
2842 mLastActivityTimeUs = ALooper::GetNowUs();
2843 }
2844 }
2845
onCheckBatteryTimer(const sp<AMessage> & msg,std::function<void ()> batteryOffCb)2846 void BatteryChecker::onCheckBatteryTimer(
2847 const sp<AMessage> &msg, std::function<void()> batteryOffCb) {
2848 // ignore if this checker already expired because the client resource was removed
2849 int32_t generation;
2850 if (!msg->findInt32("generation", &generation)
2851 || generation != mBatteryCheckerGeneration) {
2852 return;
2853 }
2854
2855 if (mLastActivityTimeUs < 0ll) {
2856 // timed out inactive, do not repost checker
2857 batteryOffCb();
2858 mBatteryStatNotified = false;
2859 } else {
2860 // repost checker and clear last activity time
2861 msg->post(mTimeoutUs + mLastActivityTimeUs - ALooper::GetNowUs());
2862 mLastActivityTimeUs = -1ll;
2863 }
2864 }
2865
onClientRemoved()2866 void BatteryChecker::onClientRemoved() {
2867 mBatteryStatNotified = false;
2868 mBatteryCheckerGeneration++;
2869 }
2870
2871 ////////////////////////////////////////////////////////////////////////////////
2872
cancelPendingDequeueOperations()2873 void MediaCodec::cancelPendingDequeueOperations() {
2874 if (mFlags & kFlagDequeueInputPending) {
2875 PostReplyWithError(mDequeueInputReplyID, INVALID_OPERATION);
2876
2877 ++mDequeueInputTimeoutGeneration;
2878 mDequeueInputReplyID = 0;
2879 mFlags &= ~kFlagDequeueInputPending;
2880 }
2881
2882 if (mFlags & kFlagDequeueOutputPending) {
2883 PostReplyWithError(mDequeueOutputReplyID, INVALID_OPERATION);
2884
2885 ++mDequeueOutputTimeoutGeneration;
2886 mDequeueOutputReplyID = 0;
2887 mFlags &= ~kFlagDequeueOutputPending;
2888 }
2889 }
2890
handleDequeueInputBuffer(const sp<AReplyToken> & replyID,bool newRequest)2891 bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
2892 if (!isExecuting() || (mFlags & kFlagIsAsync)
2893 || (newRequest && (mFlags & kFlagDequeueInputPending))) {
2894 PostReplyWithError(replyID, INVALID_OPERATION);
2895 return true;
2896 } else if (mFlags & kFlagStickyError) {
2897 PostReplyWithError(replyID, getStickyError());
2898 return true;
2899 }
2900
2901 ssize_t index = dequeuePortBuffer(kPortIndexInput);
2902
2903 if (index < 0) {
2904 CHECK_EQ(index, -EAGAIN);
2905 return false;
2906 }
2907
2908 sp<AMessage> response = new AMessage;
2909 response->setSize("index", index);
2910 response->postReply(replyID);
2911
2912 return true;
2913 }
2914
handleDequeueOutputBuffer(const sp<AReplyToken> & replyID,bool newRequest)2915 bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) {
2916 if (!isExecuting() || (mFlags & kFlagIsAsync)
2917 || (newRequest && (mFlags & kFlagDequeueOutputPending))) {
2918 PostReplyWithError(replyID, INVALID_OPERATION);
2919 } else if (mFlags & kFlagStickyError) {
2920 PostReplyWithError(replyID, getStickyError());
2921 } else if (mFlags & kFlagOutputBuffersChanged) {
2922 PostReplyWithError(replyID, INFO_OUTPUT_BUFFERS_CHANGED);
2923 mFlags &= ~kFlagOutputBuffersChanged;
2924 } else {
2925 sp<AMessage> response = new AMessage;
2926 BufferInfo *info = peekNextPortBuffer(kPortIndexOutput);
2927 if (!info) {
2928 return false;
2929 }
2930
2931 // In synchronous mode, output format change should be handled
2932 // at dequeue to put the event at the correct order.
2933
2934 const sp<MediaCodecBuffer> &buffer = info->mData;
2935 handleOutputFormatChangeIfNeeded(buffer);
2936 if (mFlags & kFlagOutputFormatChanged) {
2937 PostReplyWithError(replyID, INFO_FORMAT_CHANGED);
2938 mFlags &= ~kFlagOutputFormatChanged;
2939 return true;
2940 }
2941
2942 ssize_t index = dequeuePortBuffer(kPortIndexOutput);
2943
2944 response->setSize("index", index);
2945 response->setSize("offset", buffer->offset());
2946 response->setSize("size", buffer->size());
2947
2948 int64_t timeUs;
2949 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
2950
2951 response->setInt64("timeUs", timeUs);
2952
2953 int32_t flags;
2954 CHECK(buffer->meta()->findInt32("flags", &flags));
2955
2956 response->setInt32("flags", flags);
2957
2958 statsBufferReceived(timeUs, buffer);
2959
2960 response->postReply(replyID);
2961 }
2962
2963 return true;
2964 }
2965
onMessageReceived(const sp<AMessage> & msg)2966 void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
2967 switch (msg->what()) {
2968 case kWhatCodecNotify:
2969 {
2970 int32_t what;
2971 CHECK(msg->findInt32("what", &what));
2972
2973 switch (what) {
2974 case kWhatError:
2975 {
2976 int32_t err, actionCode;
2977 CHECK(msg->findInt32("err", &err));
2978 CHECK(msg->findInt32("actionCode", &actionCode));
2979
2980 ALOGE("Codec reported err %#x, actionCode %d, while in state %d/%s",
2981 err, actionCode, mState, stateString(mState).c_str());
2982 if (err == DEAD_OBJECT) {
2983 mFlags |= kFlagSawMediaServerDie;
2984 mFlags &= ~kFlagIsComponentAllocated;
2985 }
2986
2987 bool sendErrorResponse = true;
2988 std::string origin{"kWhatError:"};
2989 origin += stateString(mState);
2990
2991 switch (mState) {
2992 case INITIALIZING:
2993 {
2994 setState(UNINITIALIZED);
2995 break;
2996 }
2997
2998 case CONFIGURING:
2999 {
3000 if (actionCode == ACTION_CODE_FATAL) {
3001 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3002 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3003 stateString(mState).c_str());
3004 flushMediametrics();
3005 initMediametrics();
3006 }
3007 setState(actionCode == ACTION_CODE_FATAL ?
3008 UNINITIALIZED : INITIALIZED);
3009 break;
3010 }
3011
3012 case STARTING:
3013 {
3014 if (actionCode == ACTION_CODE_FATAL) {
3015 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3016 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3017 stateString(mState).c_str());
3018 flushMediametrics();
3019 initMediametrics();
3020 }
3021 setState(actionCode == ACTION_CODE_FATAL ?
3022 UNINITIALIZED : CONFIGURED);
3023 break;
3024 }
3025
3026 case RELEASING:
3027 {
3028 // Ignore the error, assuming we'll still get
3029 // the shutdown complete notification. If we
3030 // don't, we'll timeout and force release.
3031 sendErrorResponse = false;
3032 FALLTHROUGH_INTENDED;
3033 }
3034 case STOPPING:
3035 {
3036 if (mFlags & kFlagSawMediaServerDie) {
3037 bool postPendingReplies = true;
3038 if (mState == RELEASING && !mReplyID) {
3039 ALOGD("Releasing asynchronously, so nothing to reply here.");
3040 postPendingReplies = false;
3041 }
3042 // MediaServer died, there definitely won't
3043 // be a shutdown complete notification after
3044 // all.
3045
3046 // note that we may be directly going from
3047 // STOPPING->UNINITIALIZED, instead of the
3048 // usual STOPPING->INITIALIZED state.
3049 setState(UNINITIALIZED);
3050 if (mState == RELEASING) {
3051 mComponentName.clear();
3052 }
3053 if (postPendingReplies) {
3054 postPendingRepliesAndDeferredMessages(origin + ":dead");
3055 }
3056 sendErrorResponse = false;
3057 } else if (!mReplyID) {
3058 sendErrorResponse = false;
3059 }
3060 break;
3061 }
3062
3063 case FLUSHING:
3064 {
3065 if (actionCode == ACTION_CODE_FATAL) {
3066 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3067 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3068 stateString(mState).c_str());
3069 flushMediametrics();
3070 initMediametrics();
3071
3072 setState(UNINITIALIZED);
3073 } else {
3074 setState(
3075 (mFlags & kFlagIsAsync) ? FLUSHED : STARTED);
3076 }
3077 break;
3078 }
3079
3080 case FLUSHED:
3081 case STARTED:
3082 {
3083 sendErrorResponse = (mReplyID != nullptr);
3084
3085 setStickyError(err);
3086 postActivityNotificationIfPossible();
3087
3088 cancelPendingDequeueOperations();
3089
3090 if (mFlags & kFlagIsAsync) {
3091 onError(err, actionCode);
3092 }
3093 switch (actionCode) {
3094 case ACTION_CODE_TRANSIENT:
3095 break;
3096 case ACTION_CODE_RECOVERABLE:
3097 setState(INITIALIZED);
3098 break;
3099 default:
3100 mediametrics_setInt32(mMetricsHandle, kCodecError, err);
3101 mediametrics_setCString(mMetricsHandle, kCodecErrorState,
3102 stateString(mState).c_str());
3103 flushMediametrics();
3104 initMediametrics();
3105 setState(UNINITIALIZED);
3106 break;
3107 }
3108 break;
3109 }
3110
3111 default:
3112 {
3113 sendErrorResponse = (mReplyID != nullptr);
3114
3115 setStickyError(err);
3116 postActivityNotificationIfPossible();
3117
3118 // actionCode in an uninitialized state is always fatal.
3119 if (mState == UNINITIALIZED) {
3120 actionCode = ACTION_CODE_FATAL;
3121 }
3122 if (mFlags & kFlagIsAsync) {
3123 onError(err, actionCode);
3124 }
3125 switch (actionCode) {
3126 case ACTION_CODE_TRANSIENT:
3127 break;
3128 case ACTION_CODE_RECOVERABLE:
3129 setState(INITIALIZED);
3130 break;
3131 default:
3132 setState(UNINITIALIZED);
3133 break;
3134 }
3135 break;
3136 }
3137 }
3138
3139 if (sendErrorResponse) {
3140 // TRICKY: replicate PostReplyWithError logic for
3141 // err code override
3142 int32_t finalErr = err;
3143 if (mReleasedByResourceManager) {
3144 // override the err code if MediaCodec has been
3145 // released by ResourceManager.
3146 finalErr = DEAD_OBJECT;
3147 }
3148 postPendingRepliesAndDeferredMessages(origin, finalErr);
3149 }
3150 break;
3151 }
3152
3153 case kWhatComponentAllocated:
3154 {
3155 if (mState == RELEASING || mState == UNINITIALIZED) {
3156 // In case a kWhatError or kWhatRelease message came in and replied,
3157 // we log a warning and ignore.
3158 ALOGW("allocate interrupted by error or release, current state %d/%s",
3159 mState, stateString(mState).c_str());
3160 break;
3161 }
3162 CHECK_EQ(mState, INITIALIZING);
3163 setState(INITIALIZED);
3164 mFlags |= kFlagIsComponentAllocated;
3165
3166 CHECK(msg->findString("componentName", &mComponentName));
3167
3168 if (mComponentName.c_str()) {
3169 mediametrics_setCString(mMetricsHandle, kCodecCodec,
3170 mComponentName.c_str());
3171 }
3172
3173 const char *owner = mCodecInfo ? mCodecInfo->getOwnerName() : "";
3174 if (mComponentName.startsWith("OMX.google.")
3175 && strncmp(owner, "default", 8) == 0) {
3176 mFlags |= kFlagUsesSoftwareRenderer;
3177 } else {
3178 mFlags &= ~kFlagUsesSoftwareRenderer;
3179 }
3180 mOwnerName = owner;
3181
3182 if (mComponentName.endsWith(".secure")) {
3183 mFlags |= kFlagIsSecure;
3184 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 1);
3185 } else {
3186 mFlags &= ~kFlagIsSecure;
3187 mediametrics_setInt32(mMetricsHandle, kCodecSecure, 0);
3188 }
3189
3190 if (mIsVideo) {
3191 // audio codec is currently ignored.
3192 mResourceManagerProxy->addResource(
3193 MediaResource::CodecResource(mFlags & kFlagIsSecure, mIsVideo));
3194 }
3195
3196 postPendingRepliesAndDeferredMessages("kWhatComponentAllocated");
3197 break;
3198 }
3199
3200 case kWhatComponentConfigured:
3201 {
3202 if (mState == RELEASING || mState == UNINITIALIZED || mState == INITIALIZED) {
3203 // In case a kWhatError or kWhatRelease message came in and replied,
3204 // we log a warning and ignore.
3205 ALOGW("configure interrupted by error or release, current state %d/%s",
3206 mState, stateString(mState).c_str());
3207 break;
3208 }
3209 CHECK_EQ(mState, CONFIGURING);
3210
3211 // reset input surface flag
3212 mHaveInputSurface = false;
3213
3214 CHECK(msg->findMessage("input-format", &mInputFormat));
3215 CHECK(msg->findMessage("output-format", &mOutputFormat));
3216
3217 // limit to confirming the opt-in behavior to minimize any behavioral change
3218 if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
3219 // signal frame dropping mode in the input format as this may also be
3220 // meaningful and confusing for an encoder in a transcoder scenario
3221 mInputFormat->setInt32(KEY_ALLOW_FRAME_DROP, mAllowFrameDroppingBySurface);
3222 }
3223 sp<AMessage> interestingFormat =
3224 (mFlags & kFlagIsEncoder) ? mOutputFormat : mInputFormat;
3225 ALOGV("[%s] configured as input format: %s, output format: %s",
3226 mComponentName.c_str(),
3227 mInputFormat->debugString(4).c_str(),
3228 mOutputFormat->debugString(4).c_str());
3229 int32_t usingSwRenderer;
3230 if (mOutputFormat->findInt32("using-sw-renderer", &usingSwRenderer)
3231 && usingSwRenderer) {
3232 mFlags |= kFlagUsesSoftwareRenderer;
3233 }
3234 setState(CONFIGURED);
3235 postPendingRepliesAndDeferredMessages("kWhatComponentConfigured");
3236
3237 // augment our media metrics info, now that we know more things
3238 // such as what the codec extracted from any CSD passed in.
3239 if (mMetricsHandle != 0) {
3240 sp<AMessage> format;
3241 if (mConfigureMsg != NULL &&
3242 mConfigureMsg->findMessage("format", &format)) {
3243 // format includes: mime
3244 AString mime;
3245 if (format->findString("mime", &mime)) {
3246 mediametrics_setCString(mMetricsHandle, kCodecMime,
3247 mime.c_str());
3248 }
3249 }
3250 // perhaps video only?
3251 int32_t profile = 0;
3252 if (interestingFormat->findInt32("profile", &profile)) {
3253 mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
3254 }
3255 int32_t level = 0;
3256 if (interestingFormat->findInt32("level", &level)) {
3257 mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
3258 }
3259 // bitrate and bitrate mode, encoder only
3260 if (mFlags & kFlagIsEncoder) {
3261 // encoder specific values
3262 int32_t bitrate_mode = -1;
3263 if (mOutputFormat->findInt32(KEY_BITRATE_MODE, &bitrate_mode)) {
3264 mediametrics_setCString(mMetricsHandle, kCodecBitrateMode,
3265 asString_BitrateMode(bitrate_mode));
3266 }
3267 int32_t bitrate = -1;
3268 if (mOutputFormat->findInt32(KEY_BIT_RATE, &bitrate)) {
3269 mediametrics_setInt32(mMetricsHandle, kCodecBitrate, bitrate);
3270 }
3271 } else {
3272 // decoder specific values
3273 }
3274 }
3275 break;
3276 }
3277
3278 case kWhatInputSurfaceCreated:
3279 {
3280 if (mState != CONFIGURED) {
3281 // state transitioned unexpectedly; we should have replied already.
3282 ALOGD("received kWhatInputSurfaceCreated message in state %s",
3283 stateString(mState).c_str());
3284 break;
3285 }
3286 // response to initiateCreateInputSurface()
3287 status_t err = NO_ERROR;
3288 sp<AMessage> response = new AMessage;
3289 if (!msg->findInt32("err", &err)) {
3290 sp<RefBase> obj;
3291 msg->findObject("input-surface", &obj);
3292 CHECK(msg->findMessage("input-format", &mInputFormat));
3293 CHECK(msg->findMessage("output-format", &mOutputFormat));
3294 ALOGV("[%s] input surface created as input format: %s, output format: %s",
3295 mComponentName.c_str(),
3296 mInputFormat->debugString(4).c_str(),
3297 mOutputFormat->debugString(4).c_str());
3298 CHECK(obj != NULL);
3299 response->setObject("input-surface", obj);
3300 mHaveInputSurface = true;
3301 } else {
3302 response->setInt32("err", err);
3303 }
3304 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceCreated", response);
3305 break;
3306 }
3307
3308 case kWhatInputSurfaceAccepted:
3309 {
3310 if (mState != CONFIGURED) {
3311 // state transitioned unexpectedly; we should have replied already.
3312 ALOGD("received kWhatInputSurfaceAccepted message in state %s",
3313 stateString(mState).c_str());
3314 break;
3315 }
3316 // response to initiateSetInputSurface()
3317 status_t err = NO_ERROR;
3318 sp<AMessage> response = new AMessage();
3319 if (!msg->findInt32("err", &err)) {
3320 CHECK(msg->findMessage("input-format", &mInputFormat));
3321 CHECK(msg->findMessage("output-format", &mOutputFormat));
3322 mHaveInputSurface = true;
3323 } else {
3324 response->setInt32("err", err);
3325 }
3326 postPendingRepliesAndDeferredMessages("kWhatInputSurfaceAccepted", response);
3327 break;
3328 }
3329
3330 case kWhatSignaledInputEOS:
3331 {
3332 if (!isExecuting()) {
3333 // state transitioned unexpectedly; we should have replied already.
3334 ALOGD("received kWhatSignaledInputEOS message in state %s",
3335 stateString(mState).c_str());
3336 break;
3337 }
3338 // response to signalEndOfInputStream()
3339 sp<AMessage> response = new AMessage;
3340 status_t err;
3341 if (msg->findInt32("err", &err)) {
3342 response->setInt32("err", err);
3343 }
3344 postPendingRepliesAndDeferredMessages("kWhatSignaledInputEOS", response);
3345 break;
3346 }
3347
3348 case kWhatStartCompleted:
3349 {
3350 if (mState == RELEASING || mState == UNINITIALIZED) {
3351 // In case a kWhatRelease message came in and replied,
3352 // we log a warning and ignore.
3353 ALOGW("start interrupted by release, current state %d/%s",
3354 mState, stateString(mState).c_str());
3355 break;
3356 }
3357
3358 CHECK_EQ(mState, STARTING);
3359 if (mIsVideo) {
3360 mResourceManagerProxy->addResource(
3361 MediaResource::GraphicMemoryResource(getGraphicBufferSize()));
3362 }
3363 setState(STARTED);
3364 postPendingRepliesAndDeferredMessages("kWhatStartCompleted");
3365 break;
3366 }
3367
3368 case kWhatOutputBuffersChanged:
3369 {
3370 mFlags |= kFlagOutputBuffersChanged;
3371 postActivityNotificationIfPossible();
3372 break;
3373 }
3374
3375 case kWhatOutputFramesRendered:
3376 {
3377 // ignore these in all states except running
3378 if (mState != STARTED) {
3379 break;
3380 }
3381 TunnelPeekState previousState = mTunnelPeekState;
3382 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3383 ALOGV("TunnelPeekState: %s -> %s",
3384 asString(previousState),
3385 asString(TunnelPeekState::kBufferRendered));
3386 updatePlaybackDuration(msg);
3387 // check that we have a notification set
3388 if (mOnFrameRenderedNotification != NULL) {
3389 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
3390 notify->setMessage("data", msg);
3391 notify->post();
3392 }
3393 break;
3394 }
3395
3396 case kWhatFirstTunnelFrameReady:
3397 {
3398 if (mState != STARTED) {
3399 break;
3400 }
3401 TunnelPeekState previousState = mTunnelPeekState;
3402 switch(mTunnelPeekState) {
3403 case TunnelPeekState::kDisabledNoBuffer:
3404 case TunnelPeekState::kDisabledQueued:
3405 mTunnelPeekState = TunnelPeekState::kBufferDecoded;
3406 ALOGV("First tunnel frame ready");
3407 ALOGV("TunnelPeekState: %s -> %s",
3408 asString(previousState),
3409 asString(mTunnelPeekState));
3410 break;
3411 case TunnelPeekState::kEnabledNoBuffer:
3412 case TunnelPeekState::kEnabledQueued:
3413 {
3414 sp<AMessage> parameters = new AMessage();
3415 parameters->setInt32("android._trigger-tunnel-peek", 1);
3416 mCodec->signalSetParameters(parameters);
3417 }
3418 mTunnelPeekState = TunnelPeekState::kBufferRendered;
3419 ALOGV("First tunnel frame ready");
3420 ALOGV("TunnelPeekState: %s -> %s",
3421 asString(previousState),
3422 asString(mTunnelPeekState));
3423 break;
3424 default:
3425 ALOGV("Ignoring first tunnel frame ready, TunnelPeekState: %s",
3426 asString(mTunnelPeekState));
3427 break;
3428 }
3429
3430 if (mOnFirstTunnelFrameReadyNotification != nullptr) {
3431 sp<AMessage> notify = mOnFirstTunnelFrameReadyNotification->dup();
3432 notify->setMessage("data", msg);
3433 notify->post();
3434 }
3435 break;
3436 }
3437
3438 case kWhatFillThisBuffer:
3439 {
3440 /* size_t index = */updateBuffers(kPortIndexInput, msg);
3441
3442 if (mState == FLUSHING
3443 || mState == STOPPING
3444 || mState == RELEASING) {
3445 returnBuffersToCodecOnPort(kPortIndexInput);
3446 break;
3447 }
3448
3449 if (!mCSD.empty()) {
3450 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3451 CHECK_GE(index, 0);
3452
3453 // If codec specific data had been specified as
3454 // part of the format in the call to configure and
3455 // if there's more csd left, we submit it here
3456 // clients only get access to input buffers once
3457 // this data has been exhausted.
3458
3459 status_t err = queueCSDInputBuffer(index);
3460
3461 if (err != OK) {
3462 ALOGE("queueCSDInputBuffer failed w/ error %d",
3463 err);
3464
3465 setStickyError(err);
3466 postActivityNotificationIfPossible();
3467
3468 cancelPendingDequeueOperations();
3469 }
3470 break;
3471 }
3472 if (!mLeftover.empty()) {
3473 ssize_t index = dequeuePortBuffer(kPortIndexInput);
3474 CHECK_GE(index, 0);
3475
3476 status_t err = handleLeftover(index);
3477 if (err != OK) {
3478 setStickyError(err);
3479 postActivityNotificationIfPossible();
3480 cancelPendingDequeueOperations();
3481 }
3482 break;
3483 }
3484
3485 if (mFlags & kFlagIsAsync) {
3486 if (!mHaveInputSurface) {
3487 if (mState == FLUSHED) {
3488 mHavePendingInputBuffers = true;
3489 } else {
3490 onInputBufferAvailable();
3491 }
3492 }
3493 } else if (mFlags & kFlagDequeueInputPending) {
3494 CHECK(handleDequeueInputBuffer(mDequeueInputReplyID));
3495
3496 ++mDequeueInputTimeoutGeneration;
3497 mFlags &= ~kFlagDequeueInputPending;
3498 mDequeueInputReplyID = 0;
3499 } else {
3500 postActivityNotificationIfPossible();
3501 }
3502 break;
3503 }
3504
3505 case kWhatDrainThisBuffer:
3506 {
3507 /* size_t index = */updateBuffers(kPortIndexOutput, msg);
3508
3509 if (mState == FLUSHING
3510 || mState == STOPPING
3511 || mState == RELEASING) {
3512 returnBuffersToCodecOnPort(kPortIndexOutput);
3513 break;
3514 }
3515
3516 if (mFlags & kFlagIsAsync) {
3517 sp<RefBase> obj;
3518 CHECK(msg->findObject("buffer", &obj));
3519 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
3520
3521 // In asynchronous mode, output format change is processed immediately.
3522 handleOutputFormatChangeIfNeeded(buffer);
3523 onOutputBufferAvailable();
3524 } else if (mFlags & kFlagDequeueOutputPending) {
3525 CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
3526
3527 ++mDequeueOutputTimeoutGeneration;
3528 mFlags &= ~kFlagDequeueOutputPending;
3529 mDequeueOutputReplyID = 0;
3530 } else {
3531 postActivityNotificationIfPossible();
3532 }
3533
3534 break;
3535 }
3536
3537 case kWhatEOS:
3538 {
3539 // We already notify the client of this by using the
3540 // corresponding flag in "onOutputBufferReady".
3541 break;
3542 }
3543
3544 case kWhatStopCompleted:
3545 {
3546 if (mState != STOPPING) {
3547 ALOGW("Received kWhatStopCompleted in state %d/%s",
3548 mState, stateString(mState).c_str());
3549 break;
3550 }
3551 setState(INITIALIZED);
3552 if (mReplyID) {
3553 postPendingRepliesAndDeferredMessages("kWhatStopCompleted");
3554 } else {
3555 ALOGW("kWhatStopCompleted: presumably an error occurred earlier, "
3556 "but the operation completed anyway. (last reply origin=%s)",
3557 mLastReplyOrigin.c_str());
3558 }
3559 break;
3560 }
3561
3562 case kWhatReleaseCompleted:
3563 {
3564 if (mState != RELEASING) {
3565 ALOGW("Received kWhatReleaseCompleted in state %d/%s",
3566 mState, stateString(mState).c_str());
3567 break;
3568 }
3569 setState(UNINITIALIZED);
3570 mComponentName.clear();
3571
3572 mFlags &= ~kFlagIsComponentAllocated;
3573
3574 // off since we're removing all resources including the battery on
3575 if (mBatteryChecker != nullptr) {
3576 mBatteryChecker->onClientRemoved();
3577 }
3578
3579 mResourceManagerProxy->removeClient();
3580 mReleaseSurface.reset();
3581
3582 if (mReplyID != nullptr) {
3583 postPendingRepliesAndDeferredMessages("kWhatReleaseCompleted");
3584 }
3585 if (mAsyncReleaseCompleteNotification != nullptr) {
3586 flushMediametrics();
3587 mAsyncReleaseCompleteNotification->post();
3588 mAsyncReleaseCompleteNotification.clear();
3589 }
3590 break;
3591 }
3592
3593 case kWhatFlushCompleted:
3594 {
3595 if (mState != FLUSHING) {
3596 ALOGW("received FlushCompleted message in state %d/%s",
3597 mState, stateString(mState).c_str());
3598 break;
3599 }
3600
3601 if (mFlags & kFlagIsAsync) {
3602 setState(FLUSHED);
3603 } else {
3604 setState(STARTED);
3605 mCodec->signalResume();
3606 }
3607
3608 postPendingRepliesAndDeferredMessages("kWhatFlushCompleted");
3609 break;
3610 }
3611
3612 default:
3613 TRESPASS();
3614 }
3615 break;
3616 }
3617
3618 case kWhatInit:
3619 {
3620 if (mState != UNINITIALIZED) {
3621 PostReplyWithError(msg, INVALID_OPERATION);
3622 break;
3623 }
3624
3625 if (mReplyID) {
3626 mDeferredMessages.push_back(msg);
3627 break;
3628 }
3629 sp<AReplyToken> replyID;
3630 CHECK(msg->senderAwaitsResponse(&replyID));
3631
3632 mReplyID = replyID;
3633 setState(INITIALIZING);
3634
3635 sp<RefBase> codecInfo;
3636 (void)msg->findObject("codecInfo", &codecInfo);
3637 AString name;
3638 CHECK(msg->findString("name", &name));
3639
3640 sp<AMessage> format = new AMessage;
3641 if (codecInfo) {
3642 format->setObject("codecInfo", codecInfo);
3643 }
3644 format->setString("componentName", name);
3645
3646 mCodec->initiateAllocateComponent(format);
3647 break;
3648 }
3649
3650 case kWhatSetNotification:
3651 {
3652 sp<AMessage> notify;
3653 if (msg->findMessage("on-frame-rendered", ¬ify)) {
3654 mOnFrameRenderedNotification = notify;
3655 }
3656 if (msg->findMessage("first-tunnel-frame-ready", ¬ify)) {
3657 mOnFirstTunnelFrameReadyNotification = notify;
3658 }
3659 break;
3660 }
3661
3662 case kWhatSetCallback:
3663 {
3664 sp<AReplyToken> replyID;
3665 CHECK(msg->senderAwaitsResponse(&replyID));
3666
3667 if (mState == UNINITIALIZED
3668 || mState == INITIALIZING
3669 || isExecuting()) {
3670 // callback can't be set after codec is executing,
3671 // or before it's initialized (as the callback
3672 // will be cleared when it goes to INITIALIZED)
3673 PostReplyWithError(replyID, INVALID_OPERATION);
3674 break;
3675 }
3676
3677 sp<AMessage> callback;
3678 CHECK(msg->findMessage("callback", &callback));
3679
3680 mCallback = callback;
3681
3682 if (mCallback != NULL) {
3683 ALOGI("MediaCodec will operate in async mode");
3684 mFlags |= kFlagIsAsync;
3685 } else {
3686 mFlags &= ~kFlagIsAsync;
3687 }
3688
3689 sp<AMessage> response = new AMessage;
3690 response->postReply(replyID);
3691 break;
3692 }
3693
3694 case kWhatConfigure:
3695 {
3696 if (mState != INITIALIZED) {
3697 PostReplyWithError(msg, INVALID_OPERATION);
3698 break;
3699 }
3700
3701 if (mReplyID) {
3702 mDeferredMessages.push_back(msg);
3703 break;
3704 }
3705 sp<AReplyToken> replyID;
3706 CHECK(msg->senderAwaitsResponse(&replyID));
3707
3708 sp<RefBase> obj;
3709 CHECK(msg->findObject("surface", &obj));
3710
3711 sp<AMessage> format;
3712 CHECK(msg->findMessage("format", &format));
3713
3714 int32_t push;
3715 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
3716 mFlags |= kFlagPushBlankBuffersOnShutdown;
3717 }
3718
3719 if (obj != NULL) {
3720 if (!format->findInt32(KEY_ALLOW_FRAME_DROP, &mAllowFrameDroppingBySurface)) {
3721 // allow frame dropping by surface by default
3722 mAllowFrameDroppingBySurface = true;
3723 }
3724
3725 format->setObject("native-window", obj);
3726 status_t err = handleSetSurface(static_cast<Surface *>(obj.get()));
3727 if (err != OK) {
3728 PostReplyWithError(replyID, err);
3729 break;
3730 }
3731 } else {
3732 // we are not using surface so this variable is not used, but initialize sensibly anyway
3733 mAllowFrameDroppingBySurface = false;
3734
3735 handleSetSurface(NULL);
3736 }
3737
3738 uint32_t flags;
3739 CHECK(msg->findInt32("flags", (int32_t *)&flags));
3740 if (flags & CONFIGURE_FLAG_USE_BLOCK_MODEL) {
3741 if (!(mFlags & kFlagIsAsync)) {
3742 PostReplyWithError(replyID, INVALID_OPERATION);
3743 break;
3744 }
3745 mFlags |= kFlagUseBlockModel;
3746 }
3747 mReplyID = replyID;
3748 setState(CONFIGURING);
3749
3750 void *crypto;
3751 if (!msg->findPointer("crypto", &crypto)) {
3752 crypto = NULL;
3753 }
3754
3755 ALOGV("kWhatConfigure: Old mCrypto: %p (%d)",
3756 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
3757
3758 mCrypto = static_cast<ICrypto *>(crypto);
3759 mBufferChannel->setCrypto(mCrypto);
3760
3761 ALOGV("kWhatConfigure: New mCrypto: %p (%d)",
3762 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
3763
3764 void *descrambler;
3765 if (!msg->findPointer("descrambler", &descrambler)) {
3766 descrambler = NULL;
3767 }
3768
3769 mDescrambler = static_cast<IDescrambler *>(descrambler);
3770 mBufferChannel->setDescrambler(mDescrambler);
3771
3772 format->setInt32("flags", flags);
3773 if (flags & CONFIGURE_FLAG_ENCODE) {
3774 format->setInt32("encoder", true);
3775 mFlags |= kFlagIsEncoder;
3776 }
3777
3778 extractCSD(format);
3779
3780 int32_t tunneled;
3781 if (format->findInt32("feature-tunneled-playback", &tunneled) && tunneled != 0) {
3782 ALOGI("Configuring TUNNELED video playback.");
3783 mTunneled = true;
3784 } else {
3785 mTunneled = false;
3786 }
3787
3788 int32_t background = 0;
3789 if (format->findInt32("android._background-mode", &background) && background) {
3790 androidSetThreadPriority(gettid(), ANDROID_PRIORITY_BACKGROUND);
3791 }
3792
3793 mCodec->initiateConfigureComponent(format);
3794 break;
3795 }
3796
3797 case kWhatSetSurface:
3798 {
3799 sp<AReplyToken> replyID;
3800 CHECK(msg->senderAwaitsResponse(&replyID));
3801
3802 status_t err = OK;
3803
3804 switch (mState) {
3805 case CONFIGURED:
3806 case STARTED:
3807 case FLUSHED:
3808 {
3809 sp<RefBase> obj;
3810 (void)msg->findObject("surface", &obj);
3811 sp<Surface> surface = static_cast<Surface *>(obj.get());
3812 if (mSurface == NULL) {
3813 // do not support setting surface if it was not set
3814 err = INVALID_OPERATION;
3815 } else if (obj == NULL) {
3816 // do not support unsetting surface
3817 err = BAD_VALUE;
3818 } else {
3819 err = connectToSurface(surface);
3820 if (err == ALREADY_EXISTS) {
3821 // reconnecting to same surface
3822 err = OK;
3823 } else {
3824 if (err == OK) {
3825 if (mFlags & kFlagUsesSoftwareRenderer) {
3826 if (mSoftRenderer != NULL
3827 && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
3828 pushBlankBuffersToNativeWindow(mSurface.get());
3829 }
3830 surface->setDequeueTimeout(-1);
3831 mSoftRenderer = new SoftwareRenderer(surface);
3832 // TODO: check if this was successful
3833 } else {
3834 err = mCodec->setSurface(surface);
3835 }
3836 }
3837 if (err == OK) {
3838 (void)disconnectFromSurface();
3839 mSurface = surface;
3840 }
3841 }
3842 }
3843 break;
3844 }
3845
3846 default:
3847 err = INVALID_OPERATION;
3848 break;
3849 }
3850
3851 PostReplyWithError(replyID, err);
3852 break;
3853 }
3854
3855 case kWhatCreateInputSurface:
3856 case kWhatSetInputSurface:
3857 {
3858 // Must be configured, but can't have been started yet.
3859 if (mState != CONFIGURED) {
3860 PostReplyWithError(msg, INVALID_OPERATION);
3861 break;
3862 }
3863
3864 if (mReplyID) {
3865 mDeferredMessages.push_back(msg);
3866 break;
3867 }
3868 sp<AReplyToken> replyID;
3869 CHECK(msg->senderAwaitsResponse(&replyID));
3870
3871 mReplyID = replyID;
3872 if (msg->what() == kWhatCreateInputSurface) {
3873 mCodec->initiateCreateInputSurface();
3874 } else {
3875 sp<RefBase> obj;
3876 CHECK(msg->findObject("input-surface", &obj));
3877
3878 mCodec->initiateSetInputSurface(
3879 static_cast<PersistentSurface *>(obj.get()));
3880 }
3881 break;
3882 }
3883 case kWhatStart:
3884 {
3885 if (mState == FLUSHED) {
3886 setState(STARTED);
3887 if (mHavePendingInputBuffers) {
3888 onInputBufferAvailable();
3889 mHavePendingInputBuffers = false;
3890 }
3891 mCodec->signalResume();
3892 PostReplyWithError(msg, OK);
3893 break;
3894 } else if (mState != CONFIGURED) {
3895 PostReplyWithError(msg, INVALID_OPERATION);
3896 break;
3897 }
3898
3899 if (mReplyID) {
3900 mDeferredMessages.push_back(msg);
3901 break;
3902 }
3903 sp<AReplyToken> replyID;
3904 CHECK(msg->senderAwaitsResponse(&replyID));
3905 TunnelPeekState previousState = mTunnelPeekState;
3906 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
3907 ALOGV("TunnelPeekState: %s -> %s",
3908 asString(previousState),
3909 asString(TunnelPeekState::kEnabledNoBuffer));
3910
3911 mReplyID = replyID;
3912 setState(STARTING);
3913
3914 mCodec->initiateStart();
3915 break;
3916 }
3917
3918 case kWhatStop: {
3919 if (mReplyID) {
3920 mDeferredMessages.push_back(msg);
3921 break;
3922 }
3923 [[fallthrough]];
3924 }
3925 case kWhatRelease:
3926 {
3927 State targetState =
3928 (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
3929
3930 if ((mState == RELEASING && targetState == UNINITIALIZED)
3931 || (mState == STOPPING && targetState == INITIALIZED)) {
3932 mDeferredMessages.push_back(msg);
3933 break;
3934 }
3935
3936 sp<AReplyToken> replyID;
3937 CHECK(msg->senderAwaitsResponse(&replyID));
3938
3939 sp<AMessage> asyncNotify;
3940 (void)msg->findMessage("async", &asyncNotify);
3941 // post asyncNotify if going out of scope.
3942 struct AsyncNotifyPost {
3943 AsyncNotifyPost(const sp<AMessage> &asyncNotify) : mAsyncNotify(asyncNotify) {}
3944 ~AsyncNotifyPost() {
3945 if (mAsyncNotify) {
3946 mAsyncNotify->post();
3947 }
3948 }
3949 void clear() { mAsyncNotify.clear(); }
3950 private:
3951 sp<AMessage> mAsyncNotify;
3952 } asyncNotifyPost{asyncNotify};
3953
3954 // already stopped/released
3955 if (mState == UNINITIALIZED && mReleasedByResourceManager) {
3956 sp<AMessage> response = new AMessage;
3957 response->setInt32("err", OK);
3958 response->postReply(replyID);
3959 break;
3960 }
3961
3962 int32_t reclaimed = 0;
3963 msg->findInt32("reclaimed", &reclaimed);
3964 if (reclaimed) {
3965 if (!mReleasedByResourceManager) {
3966 // notify the async client
3967 if (mFlags & kFlagIsAsync) {
3968 onError(DEAD_OBJECT, ACTION_CODE_FATAL);
3969 }
3970 mReleasedByResourceManager = true;
3971 }
3972
3973 int32_t force = 0;
3974 msg->findInt32("force", &force);
3975 if (!force && hasPendingBuffer()) {
3976 ALOGW("Can't reclaim codec right now due to pending buffers.");
3977
3978 // return WOULD_BLOCK to ask resource manager to retry later.
3979 sp<AMessage> response = new AMessage;
3980 response->setInt32("err", WOULD_BLOCK);
3981 response->postReply(replyID);
3982
3983 break;
3984 }
3985 }
3986
3987 bool isReleasingAllocatedComponent =
3988 (mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED;
3989 if (!isReleasingAllocatedComponent // See 1
3990 && mState != INITIALIZED
3991 && mState != CONFIGURED && !isExecuting()) {
3992 // 1) Permit release to shut down the component if allocated.
3993 //
3994 // 2) We may be in "UNINITIALIZED" state already and
3995 // also shutdown the encoder/decoder without the
3996 // client being aware of this if media server died while
3997 // we were being stopped. The client would assume that
3998 // after stop() returned, it would be safe to call release()
3999 // and it should be in this case, no harm to allow a release()
4000 // if we're already uninitialized.
4001 sp<AMessage> response = new AMessage;
4002 // TODO: we shouldn't throw an exception for stop/release. Change this to wait until
4003 // the previous stop/release completes and then reply with OK.
4004 status_t err = mState == targetState ? OK : INVALID_OPERATION;
4005 response->setInt32("err", err);
4006 if (err == OK && targetState == UNINITIALIZED) {
4007 mComponentName.clear();
4008 }
4009 response->postReply(replyID);
4010 break;
4011 }
4012
4013 // If we're flushing, configuring or starting but
4014 // received a release request, post the reply for the pending call
4015 // first, and consider it done. The reply token will be replaced
4016 // after this, and we'll no longer be able to reply.
4017 if (mState == FLUSHING || mState == CONFIGURING || mState == STARTING) {
4018 // mReply is always set if in these states.
4019 postPendingRepliesAndDeferredMessages(
4020 std::string("kWhatRelease:") + stateString(mState));
4021 }
4022 // If we're stopping but received a release request, post the reply
4023 // for the pending call if necessary. Note that the reply may have been
4024 // already posted due to an error.
4025 if (mState == STOPPING && mReplyID) {
4026 postPendingRepliesAndDeferredMessages("kWhatRelease:STOPPING");
4027 }
4028
4029 if (mFlags & kFlagSawMediaServerDie) {
4030 // It's dead, Jim. Don't expect initiateShutdown to yield
4031 // any useful results now...
4032 // Any pending reply would have been handled at kWhatError.
4033 setState(UNINITIALIZED);
4034 if (targetState == UNINITIALIZED) {
4035 mComponentName.clear();
4036 }
4037 (new AMessage)->postReply(replyID);
4038 break;
4039 }
4040
4041 // If we already have an error, component may not be able to
4042 // complete the shutdown properly. If we're stopping, post the
4043 // reply now with an error to unblock the client, client can
4044 // release after the failure (instead of ANR).
4045 if (msg->what() == kWhatStop && (mFlags & kFlagStickyError)) {
4046 // Any pending reply would have been handled at kWhatError.
4047 PostReplyWithError(replyID, getStickyError());
4048 break;
4049 }
4050
4051 if (asyncNotify != nullptr) {
4052 if (mSurface != NULL) {
4053 if (!mReleaseSurface) {
4054 uint64_t usage = 0;
4055 if (mSurface->getConsumerUsage(&usage) != OK) {
4056 usage = 0;
4057 }
4058 mReleaseSurface.reset(new ReleaseSurface(usage));
4059 }
4060 if (mSurface != mReleaseSurface->getSurface()) {
4061 status_t err = connectToSurface(mReleaseSurface->getSurface());
4062 ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
4063 if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
4064 err = mCodec->setSurface(mReleaseSurface->getSurface());
4065 ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
4066 }
4067 if (err == OK) {
4068 (void)disconnectFromSurface();
4069 mSurface = mReleaseSurface->getSurface();
4070 }
4071 }
4072 }
4073 }
4074
4075 if (mReplyID) {
4076 // State transition replies are handled above, so this reply
4077 // would not be related to state transition. As we are
4078 // shutting down the component, just fail the operation.
4079 postPendingRepliesAndDeferredMessages("kWhatRelease:reply", UNKNOWN_ERROR);
4080 }
4081 mReplyID = replyID;
4082 setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
4083
4084 mCodec->initiateShutdown(
4085 msg->what() == kWhatStop /* keepComponentAllocated */);
4086
4087 returnBuffersToCodec(reclaimed);
4088
4089 if (mSoftRenderer != NULL && (mFlags & kFlagPushBlankBuffersOnShutdown)) {
4090 pushBlankBuffersToNativeWindow(mSurface.get());
4091 }
4092
4093 if (asyncNotify != nullptr) {
4094 mResourceManagerProxy->markClientForPendingRemoval();
4095 postPendingRepliesAndDeferredMessages("kWhatRelease:async");
4096 asyncNotifyPost.clear();
4097 mAsyncReleaseCompleteNotification = asyncNotify;
4098 }
4099
4100 break;
4101 }
4102
4103 case kWhatDequeueInputBuffer:
4104 {
4105 sp<AReplyToken> replyID;
4106 CHECK(msg->senderAwaitsResponse(&replyID));
4107
4108 if (mFlags & kFlagIsAsync) {
4109 ALOGE("dequeueInputBuffer can't be used in async mode");
4110 PostReplyWithError(replyID, INVALID_OPERATION);
4111 break;
4112 }
4113
4114 if (mHaveInputSurface) {
4115 ALOGE("dequeueInputBuffer can't be used with input surface");
4116 PostReplyWithError(replyID, INVALID_OPERATION);
4117 break;
4118 }
4119
4120 if (handleDequeueInputBuffer(replyID, true /* new request */)) {
4121 break;
4122 }
4123
4124 int64_t timeoutUs;
4125 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4126
4127 if (timeoutUs == 0LL) {
4128 PostReplyWithError(replyID, -EAGAIN);
4129 break;
4130 }
4131
4132 mFlags |= kFlagDequeueInputPending;
4133 mDequeueInputReplyID = replyID;
4134
4135 if (timeoutUs > 0LL) {
4136 sp<AMessage> timeoutMsg =
4137 new AMessage(kWhatDequeueInputTimedOut, this);
4138 timeoutMsg->setInt32(
4139 "generation", ++mDequeueInputTimeoutGeneration);
4140 timeoutMsg->post(timeoutUs);
4141 }
4142 break;
4143 }
4144
4145 case kWhatDequeueInputTimedOut:
4146 {
4147 int32_t generation;
4148 CHECK(msg->findInt32("generation", &generation));
4149
4150 if (generation != mDequeueInputTimeoutGeneration) {
4151 // Obsolete
4152 break;
4153 }
4154
4155 CHECK(mFlags & kFlagDequeueInputPending);
4156
4157 PostReplyWithError(mDequeueInputReplyID, -EAGAIN);
4158
4159 mFlags &= ~kFlagDequeueInputPending;
4160 mDequeueInputReplyID = 0;
4161 break;
4162 }
4163
4164 case kWhatQueueInputBuffer:
4165 {
4166 sp<AReplyToken> replyID;
4167 CHECK(msg->senderAwaitsResponse(&replyID));
4168
4169 if (!isExecuting()) {
4170 PostReplyWithError(replyID, INVALID_OPERATION);
4171 break;
4172 } else if (mFlags & kFlagStickyError) {
4173 PostReplyWithError(replyID, getStickyError());
4174 break;
4175 }
4176
4177 status_t err = UNKNOWN_ERROR;
4178 if (!mLeftover.empty()) {
4179 mLeftover.push_back(msg);
4180 size_t index;
4181 msg->findSize("index", &index);
4182 err = handleLeftover(index);
4183 } else {
4184 err = onQueueInputBuffer(msg);
4185 }
4186
4187 PostReplyWithError(replyID, err);
4188 break;
4189 }
4190
4191 case kWhatDequeueOutputBuffer:
4192 {
4193 sp<AReplyToken> replyID;
4194 CHECK(msg->senderAwaitsResponse(&replyID));
4195
4196 if (mFlags & kFlagIsAsync) {
4197 ALOGE("dequeueOutputBuffer can't be used in async mode");
4198 PostReplyWithError(replyID, INVALID_OPERATION);
4199 break;
4200 }
4201
4202 if (handleDequeueOutputBuffer(replyID, true /* new request */)) {
4203 break;
4204 }
4205
4206 int64_t timeoutUs;
4207 CHECK(msg->findInt64("timeoutUs", &timeoutUs));
4208
4209 if (timeoutUs == 0LL) {
4210 PostReplyWithError(replyID, -EAGAIN);
4211 break;
4212 }
4213
4214 mFlags |= kFlagDequeueOutputPending;
4215 mDequeueOutputReplyID = replyID;
4216
4217 if (timeoutUs > 0LL) {
4218 sp<AMessage> timeoutMsg =
4219 new AMessage(kWhatDequeueOutputTimedOut, this);
4220 timeoutMsg->setInt32(
4221 "generation", ++mDequeueOutputTimeoutGeneration);
4222 timeoutMsg->post(timeoutUs);
4223 }
4224 break;
4225 }
4226
4227 case kWhatDequeueOutputTimedOut:
4228 {
4229 int32_t generation;
4230 CHECK(msg->findInt32("generation", &generation));
4231
4232 if (generation != mDequeueOutputTimeoutGeneration) {
4233 // Obsolete
4234 break;
4235 }
4236
4237 CHECK(mFlags & kFlagDequeueOutputPending);
4238
4239 PostReplyWithError(mDequeueOutputReplyID, -EAGAIN);
4240
4241 mFlags &= ~kFlagDequeueOutputPending;
4242 mDequeueOutputReplyID = 0;
4243 break;
4244 }
4245
4246 case kWhatReleaseOutputBuffer:
4247 {
4248 sp<AReplyToken> replyID;
4249 CHECK(msg->senderAwaitsResponse(&replyID));
4250
4251 if (!isExecuting()) {
4252 PostReplyWithError(replyID, INVALID_OPERATION);
4253 break;
4254 } else if (mFlags & kFlagStickyError) {
4255 PostReplyWithError(replyID, getStickyError());
4256 break;
4257 }
4258
4259 status_t err = onReleaseOutputBuffer(msg);
4260
4261 PostReplyWithError(replyID, err);
4262 break;
4263 }
4264
4265 case kWhatSignalEndOfInputStream:
4266 {
4267 if (!isExecuting() || !mHaveInputSurface) {
4268 PostReplyWithError(msg, INVALID_OPERATION);
4269 break;
4270 } else if (mFlags & kFlagStickyError) {
4271 PostReplyWithError(msg, getStickyError());
4272 break;
4273 }
4274
4275 if (mReplyID) {
4276 mDeferredMessages.push_back(msg);
4277 break;
4278 }
4279 sp<AReplyToken> replyID;
4280 CHECK(msg->senderAwaitsResponse(&replyID));
4281
4282 mReplyID = replyID;
4283 mCodec->signalEndOfInputStream();
4284 break;
4285 }
4286
4287 case kWhatGetBuffers:
4288 {
4289 sp<AReplyToken> replyID;
4290 CHECK(msg->senderAwaitsResponse(&replyID));
4291 if (!isExecuting() || (mFlags & kFlagIsAsync)) {
4292 PostReplyWithError(replyID, INVALID_OPERATION);
4293 break;
4294 } else if (mFlags & kFlagStickyError) {
4295 PostReplyWithError(replyID, getStickyError());
4296 break;
4297 }
4298
4299 int32_t portIndex;
4300 CHECK(msg->findInt32("portIndex", &portIndex));
4301
4302 Vector<sp<MediaCodecBuffer> > *dstBuffers;
4303 CHECK(msg->findPointer("buffers", (void **)&dstBuffers));
4304
4305 dstBuffers->clear();
4306 // If we're using input surface (either non-persistent created by
4307 // createInputSurface(), or persistent set by setInputSurface()),
4308 // give the client an empty input buffers array.
4309 if (portIndex != kPortIndexInput || !mHaveInputSurface) {
4310 if (portIndex == kPortIndexInput) {
4311 mBufferChannel->getInputBufferArray(dstBuffers);
4312 } else {
4313 mBufferChannel->getOutputBufferArray(dstBuffers);
4314 }
4315 }
4316
4317 (new AMessage)->postReply(replyID);
4318 break;
4319 }
4320
4321 case kWhatFlush:
4322 {
4323 if (!isExecuting()) {
4324 PostReplyWithError(msg, INVALID_OPERATION);
4325 break;
4326 } else if (mFlags & kFlagStickyError) {
4327 PostReplyWithError(msg, getStickyError());
4328 break;
4329 }
4330
4331 if (mReplyID) {
4332 mDeferredMessages.push_back(msg);
4333 break;
4334 }
4335 sp<AReplyToken> replyID;
4336 CHECK(msg->senderAwaitsResponse(&replyID));
4337
4338 mReplyID = replyID;
4339 // TODO: skip flushing if already FLUSHED
4340 setState(FLUSHING);
4341
4342 mCodec->signalFlush();
4343 returnBuffersToCodec();
4344 TunnelPeekState previousState = mTunnelPeekState;
4345 mTunnelPeekState = TunnelPeekState::kEnabledNoBuffer;
4346 ALOGV("TunnelPeekState: %s -> %s",
4347 asString(previousState),
4348 asString(TunnelPeekState::kEnabledNoBuffer));
4349 break;
4350 }
4351
4352 case kWhatGetInputFormat:
4353 case kWhatGetOutputFormat:
4354 {
4355 sp<AMessage> format =
4356 (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
4357
4358 sp<AReplyToken> replyID;
4359 CHECK(msg->senderAwaitsResponse(&replyID));
4360
4361 if ((mState != CONFIGURED && mState != STARTING &&
4362 mState != STARTED && mState != FLUSHING &&
4363 mState != FLUSHED)
4364 || format == NULL) {
4365 PostReplyWithError(replyID, INVALID_OPERATION);
4366 break;
4367 } else if (mFlags & kFlagStickyError) {
4368 PostReplyWithError(replyID, getStickyError());
4369 break;
4370 }
4371
4372 sp<AMessage> response = new AMessage;
4373 response->setMessage("format", format);
4374 response->postReply(replyID);
4375 break;
4376 }
4377
4378 case kWhatRequestIDRFrame:
4379 {
4380 mCodec->signalRequestIDRFrame();
4381 break;
4382 }
4383
4384 case kWhatRequestActivityNotification:
4385 {
4386 CHECK(mActivityNotify == NULL);
4387 CHECK(msg->findMessage("notify", &mActivityNotify));
4388
4389 postActivityNotificationIfPossible();
4390 break;
4391 }
4392
4393 case kWhatGetName:
4394 {
4395 sp<AReplyToken> replyID;
4396 CHECK(msg->senderAwaitsResponse(&replyID));
4397
4398 if (mComponentName.empty()) {
4399 PostReplyWithError(replyID, INVALID_OPERATION);
4400 break;
4401 }
4402
4403 sp<AMessage> response = new AMessage;
4404 response->setString("name", mComponentName.c_str());
4405 response->postReply(replyID);
4406 break;
4407 }
4408
4409 case kWhatGetCodecInfo:
4410 {
4411 sp<AReplyToken> replyID;
4412 CHECK(msg->senderAwaitsResponse(&replyID));
4413
4414 sp<AMessage> response = new AMessage;
4415 response->setObject("codecInfo", mCodecInfo);
4416 response->postReply(replyID);
4417 break;
4418 }
4419
4420 case kWhatSetParameters:
4421 {
4422 sp<AReplyToken> replyID;
4423 CHECK(msg->senderAwaitsResponse(&replyID));
4424
4425 sp<AMessage> params;
4426 CHECK(msg->findMessage("params", ¶ms));
4427
4428 status_t err = onSetParameters(params);
4429
4430 PostReplyWithError(replyID, err);
4431 break;
4432 }
4433
4434 case kWhatDrmReleaseCrypto:
4435 {
4436 onReleaseCrypto(msg);
4437 break;
4438 }
4439
4440 case kWhatCheckBatteryStats:
4441 {
4442 if (mBatteryChecker != nullptr) {
4443 mBatteryChecker->onCheckBatteryTimer(msg, [this] () {
4444 mResourceManagerProxy->removeResource(
4445 MediaResource::VideoBatteryResource());
4446 });
4447 }
4448 break;
4449 }
4450
4451 default:
4452 TRESPASS();
4453 }
4454 }
4455
handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> & buffer)4456 void MediaCodec::handleOutputFormatChangeIfNeeded(const sp<MediaCodecBuffer> &buffer) {
4457 sp<AMessage> format = buffer->format();
4458 if (mOutputFormat == format) {
4459 return;
4460 }
4461 if (mFlags & kFlagUseBlockModel) {
4462 sp<AMessage> diff1 = mOutputFormat->changesFrom(format);
4463 sp<AMessage> diff2 = format->changesFrom(mOutputFormat);
4464 std::set<std::string> keys;
4465 size_t numEntries = diff1->countEntries();
4466 AMessage::Type type;
4467 for (size_t i = 0; i < numEntries; ++i) {
4468 keys.emplace(diff1->getEntryNameAt(i, &type));
4469 }
4470 numEntries = diff2->countEntries();
4471 for (size_t i = 0; i < numEntries; ++i) {
4472 keys.emplace(diff2->getEntryNameAt(i, &type));
4473 }
4474 sp<WrapperObject<std::set<std::string>>> changedKeys{
4475 new WrapperObject<std::set<std::string>>{std::move(keys)}};
4476 buffer->meta()->setObject("changedKeys", changedKeys);
4477 }
4478 mOutputFormat = format;
4479 mapFormat(mComponentName, format, nullptr, true);
4480 ALOGV("[%s] output format changed to: %s",
4481 mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
4482
4483 if (mSoftRenderer == NULL &&
4484 mSurface != NULL &&
4485 (mFlags & kFlagUsesSoftwareRenderer)) {
4486 AString mime;
4487 CHECK(mOutputFormat->findString("mime", &mime));
4488
4489 // TODO: propagate color aspects to software renderer to allow better
4490 // color conversion to RGB. For now, just mark dataspace for YUV
4491 // rendering.
4492 int32_t dataSpace;
4493 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) {
4494 ALOGD("[%s] setting dataspace on output surface to #%x",
4495 mComponentName.c_str(), dataSpace);
4496 int err = native_window_set_buffers_data_space(
4497 mSurface.get(), (android_dataspace)dataSpace);
4498 ALOGW_IF(err != 0, "failed to set dataspace on surface (%d)", err);
4499 }
4500 if (mOutputFormat->contains("hdr-static-info")) {
4501 HDRStaticInfo info;
4502 if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
4503 setNativeWindowHdrMetadata(mSurface.get(), &info);
4504 }
4505 }
4506
4507 sp<ABuffer> hdr10PlusInfo;
4508 if (mOutputFormat->findBuffer("hdr10-plus-info", &hdr10PlusInfo)
4509 && hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
4510 native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
4511 hdr10PlusInfo->size(), hdr10PlusInfo->data());
4512 }
4513
4514 if (mime.startsWithIgnoreCase("video/")) {
4515 mSurface->setDequeueTimeout(-1);
4516 mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
4517 }
4518 }
4519
4520 requestCpuBoostIfNeeded();
4521
4522 if (mFlags & kFlagIsEncoder) {
4523 // Before we announce the format change we should
4524 // collect codec specific data and amend the output
4525 // format as necessary.
4526 int32_t flags = 0;
4527 (void) buffer->meta()->findInt32("flags", &flags);
4528 if ((flags & BUFFER_FLAG_CODECCONFIG) && !(mFlags & kFlagIsSecure)
4529 && !mOwnerName.startsWith("codec2::")) {
4530 status_t err =
4531 amendOutputFormatWithCodecSpecificData(buffer);
4532
4533 if (err != OK) {
4534 ALOGE("Codec spit out malformed codec "
4535 "specific data!");
4536 }
4537 }
4538 }
4539 if (mFlags & kFlagIsAsync) {
4540 onOutputFormatChanged();
4541 } else {
4542 mFlags |= kFlagOutputFormatChanged;
4543 postActivityNotificationIfPossible();
4544 }
4545
4546 // Notify mCrypto of video resolution changes
4547 if (mCrypto != NULL) {
4548 int32_t left, top, right, bottom, width, height;
4549 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
4550 mCrypto->notifyResolution(right - left + 1, bottom - top + 1);
4551 } else if (mOutputFormat->findInt32("width", &width)
4552 && mOutputFormat->findInt32("height", &height)) {
4553 mCrypto->notifyResolution(width, height);
4554 }
4555 }
4556 }
4557
extractCSD(const sp<AMessage> & format)4558 void MediaCodec::extractCSD(const sp<AMessage> &format) {
4559 mCSD.clear();
4560
4561 size_t i = 0;
4562 for (;;) {
4563 sp<ABuffer> csd;
4564 if (!format->findBuffer(AStringPrintf("csd-%u", i).c_str(), &csd)) {
4565 break;
4566 }
4567 if (csd->size() == 0) {
4568 ALOGW("csd-%zu size is 0", i);
4569 }
4570
4571 mCSD.push_back(csd);
4572 ++i;
4573 }
4574
4575 ALOGV("Found %zu pieces of codec specific data.", mCSD.size());
4576 }
4577
queueCSDInputBuffer(size_t bufferIndex)4578 status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) {
4579 CHECK(!mCSD.empty());
4580
4581 sp<ABuffer> csd = *mCSD.begin();
4582 mCSD.erase(mCSD.begin());
4583 std::shared_ptr<C2Buffer> c2Buffer;
4584 sp<hardware::HidlMemory> memory;
4585 size_t offset = 0;
4586
4587 if (mFlags & kFlagUseBlockModel) {
4588 if (hasCryptoOrDescrambler()) {
4589 constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
4590 thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
4591 kInitialDealerCapacity, "CSD(1MB)");
4592 sp<IMemory> mem = sDealer->allocate(csd->size());
4593 if (mem == nullptr) {
4594 size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
4595 while (csd->size() * 2 > newDealerCapacity) {
4596 newDealerCapacity *= 2;
4597 }
4598 sDealer = new MemoryDealer(
4599 newDealerCapacity,
4600 AStringPrintf("CSD(%dMB)", newDealerCapacity / 1048576).c_str());
4601 mem = sDealer->allocate(csd->size());
4602 }
4603 memcpy(mem->unsecurePointer(), csd->data(), csd->size());
4604 ssize_t heapOffset;
4605 memory = hardware::fromHeap(mem->getMemory(&heapOffset, nullptr));
4606 offset += heapOffset;
4607 } else {
4608 std::shared_ptr<C2LinearBlock> block =
4609 FetchLinearBlock(csd->size(), {std::string{mComponentName.c_str()}});
4610 C2WriteView view{block->map().get()};
4611 if (view.error() != C2_OK) {
4612 return -EINVAL;
4613 }
4614 if (csd->size() > view.capacity()) {
4615 return -EINVAL;
4616 }
4617 memcpy(view.base(), csd->data(), csd->size());
4618 c2Buffer = C2Buffer::CreateLinearBuffer(block->share(0, csd->size(), C2Fence{}));
4619 }
4620 } else {
4621 const BufferInfo &info = mPortBuffers[kPortIndexInput][bufferIndex];
4622 const sp<MediaCodecBuffer> &codecInputData = info.mData;
4623
4624 if (csd->size() > codecInputData->capacity()) {
4625 return -EINVAL;
4626 }
4627 if (codecInputData->data() == NULL) {
4628 ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
4629 return -EINVAL;
4630 }
4631
4632 memcpy(codecInputData->data(), csd->data(), csd->size());
4633 }
4634
4635 AString errorDetailMsg;
4636
4637 sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this);
4638 msg->setSize("index", bufferIndex);
4639 msg->setSize("offset", 0);
4640 msg->setSize("size", csd->size());
4641 msg->setInt64("timeUs", 0LL);
4642 msg->setInt32("flags", BUFFER_FLAG_CODECCONFIG);
4643 msg->setPointer("errorDetailMsg", &errorDetailMsg);
4644 if (c2Buffer) {
4645 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
4646 new WrapperObject<std::shared_ptr<C2Buffer>>{c2Buffer}};
4647 msg->setObject("c2buffer", obj);
4648 msg->setMessage("tunings", new AMessage);
4649 } else if (memory) {
4650 sp<WrapperObject<sp<hardware::HidlMemory>>> obj{
4651 new WrapperObject<sp<hardware::HidlMemory>>{memory}};
4652 msg->setObject("memory", obj);
4653 msg->setMessage("tunings", new AMessage);
4654 }
4655
4656 return onQueueInputBuffer(msg);
4657 }
4658
setState(State newState)4659 void MediaCodec::setState(State newState) {
4660 if (newState == INITIALIZED || newState == UNINITIALIZED) {
4661 delete mSoftRenderer;
4662 mSoftRenderer = NULL;
4663
4664 if ( mCrypto != NULL ) {
4665 ALOGV("setState: ~mCrypto: %p (%d)",
4666 mCrypto.get(), (mCrypto != NULL ? mCrypto->getStrongCount() : 0));
4667 }
4668 mCrypto.clear();
4669 mDescrambler.clear();
4670 handleSetSurface(NULL);
4671
4672 mInputFormat.clear();
4673 mOutputFormat.clear();
4674 mFlags &= ~kFlagOutputFormatChanged;
4675 mFlags &= ~kFlagOutputBuffersChanged;
4676 mFlags &= ~kFlagStickyError;
4677 mFlags &= ~kFlagIsEncoder;
4678 mFlags &= ~kFlagIsAsync;
4679 mStickyError = OK;
4680
4681 mActivityNotify.clear();
4682 mCallback.clear();
4683 }
4684
4685 if (newState == UNINITIALIZED) {
4686 // return any straggling buffers, e.g. if we got here on an error
4687 returnBuffersToCodec();
4688
4689 // The component is gone, mediaserver's probably back up already
4690 // but should definitely be back up should we try to instantiate
4691 // another component.. and the cycle continues.
4692 mFlags &= ~kFlagSawMediaServerDie;
4693 }
4694
4695 mState = newState;
4696
4697 if (mBatteryChecker != nullptr) {
4698 mBatteryChecker->setExecuting(isExecuting());
4699 }
4700
4701 cancelPendingDequeueOperations();
4702 }
4703
returnBuffersToCodec(bool isReclaim)4704 void MediaCodec::returnBuffersToCodec(bool isReclaim) {
4705 returnBuffersToCodecOnPort(kPortIndexInput, isReclaim);
4706 returnBuffersToCodecOnPort(kPortIndexOutput, isReclaim);
4707 }
4708
returnBuffersToCodecOnPort(int32_t portIndex,bool isReclaim)4709 void MediaCodec::returnBuffersToCodecOnPort(int32_t portIndex, bool isReclaim) {
4710 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
4711 Mutex::Autolock al(mBufferLock);
4712
4713 if (portIndex == kPortIndexInput) {
4714 mLeftover.clear();
4715 }
4716 for (size_t i = 0; i < mPortBuffers[portIndex].size(); ++i) {
4717 BufferInfo *info = &mPortBuffers[portIndex][i];
4718
4719 if (info->mData != nullptr) {
4720 sp<MediaCodecBuffer> buffer = info->mData;
4721 if (isReclaim && info->mOwnedByClient) {
4722 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
4723 portIndex, i);
4724 } else {
4725 info->mOwnedByClient = false;
4726 info->mData.clear();
4727 }
4728 mBufferChannel->discardBuffer(buffer);
4729 }
4730 }
4731
4732 mAvailPortBuffers[portIndex].clear();
4733 }
4734
updateBuffers(int32_t portIndex,const sp<AMessage> & msg)4735 size_t MediaCodec::updateBuffers(
4736 int32_t portIndex, const sp<AMessage> &msg) {
4737 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
4738 size_t index;
4739 CHECK(msg->findSize("index", &index));
4740 sp<RefBase> obj;
4741 CHECK(msg->findObject("buffer", &obj));
4742 sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
4743
4744 {
4745 Mutex::Autolock al(mBufferLock);
4746 if (mPortBuffers[portIndex].size() <= index) {
4747 mPortBuffers[portIndex].resize(align(index + 1, kNumBuffersAlign));
4748 }
4749 mPortBuffers[portIndex][index].mData = buffer;
4750 }
4751 mAvailPortBuffers[portIndex].push_back(index);
4752
4753 return index;
4754 }
4755
onQueueInputBuffer(const sp<AMessage> & msg)4756 status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
4757 size_t index;
4758 size_t offset;
4759 size_t size;
4760 int64_t timeUs;
4761 uint32_t flags;
4762 CHECK(msg->findSize("index", &index));
4763 CHECK(msg->findInt64("timeUs", &timeUs));
4764 CHECK(msg->findInt32("flags", (int32_t *)&flags));
4765 std::shared_ptr<C2Buffer> c2Buffer;
4766 sp<hardware::HidlMemory> memory;
4767 sp<RefBase> obj;
4768 if (msg->findObject("c2buffer", &obj)) {
4769 CHECK(obj);
4770 c2Buffer = static_cast<WrapperObject<std::shared_ptr<C2Buffer>> *>(obj.get())->value;
4771 } else if (msg->findObject("memory", &obj)) {
4772 CHECK(obj);
4773 memory = static_cast<WrapperObject<sp<hardware::HidlMemory>> *>(obj.get())->value;
4774 CHECK(msg->findSize("offset", &offset));
4775 } else {
4776 CHECK(msg->findSize("offset", &offset));
4777 }
4778 const CryptoPlugin::SubSample *subSamples;
4779 size_t numSubSamples;
4780 const uint8_t *key;
4781 const uint8_t *iv;
4782 CryptoPlugin::Mode mode = CryptoPlugin::kMode_Unencrypted;
4783
4784 // We allow the simpler queueInputBuffer API to be used even in
4785 // secure mode, by fabricating a single unencrypted subSample.
4786 CryptoPlugin::SubSample ss;
4787 CryptoPlugin::Pattern pattern;
4788
4789 if (msg->findSize("size", &size)) {
4790 if (hasCryptoOrDescrambler()) {
4791 ss.mNumBytesOfClearData = size;
4792 ss.mNumBytesOfEncryptedData = 0;
4793
4794 subSamples = &ss;
4795 numSubSamples = 1;
4796 key = NULL;
4797 iv = NULL;
4798 pattern.mEncryptBlocks = 0;
4799 pattern.mSkipBlocks = 0;
4800 }
4801 } else if (!c2Buffer) {
4802 if (!hasCryptoOrDescrambler()) {
4803 ALOGE("[%s] queuing secure buffer without mCrypto or mDescrambler!",
4804 mComponentName.c_str());
4805 return -EINVAL;
4806 }
4807
4808 CHECK(msg->findPointer("subSamples", (void **)&subSamples));
4809 CHECK(msg->findSize("numSubSamples", &numSubSamples));
4810 CHECK(msg->findPointer("key", (void **)&key));
4811 CHECK(msg->findPointer("iv", (void **)&iv));
4812 CHECK(msg->findInt32("encryptBlocks", (int32_t *)&pattern.mEncryptBlocks));
4813 CHECK(msg->findInt32("skipBlocks", (int32_t *)&pattern.mSkipBlocks));
4814
4815 int32_t tmp;
4816 CHECK(msg->findInt32("mode", &tmp));
4817
4818 mode = (CryptoPlugin::Mode)tmp;
4819
4820 size = 0;
4821 for (size_t i = 0; i < numSubSamples; ++i) {
4822 size += subSamples[i].mNumBytesOfClearData;
4823 size += subSamples[i].mNumBytesOfEncryptedData;
4824 }
4825 }
4826
4827 if (index >= mPortBuffers[kPortIndexInput].size()) {
4828 return -ERANGE;
4829 }
4830
4831 BufferInfo *info = &mPortBuffers[kPortIndexInput][index];
4832 sp<MediaCodecBuffer> buffer = info->mData;
4833
4834 if (c2Buffer || memory) {
4835 sp<AMessage> tunings;
4836 CHECK(msg->findMessage("tunings", &tunings));
4837 onSetParameters(tunings);
4838
4839 status_t err = OK;
4840 if (c2Buffer) {
4841 err = mBufferChannel->attachBuffer(c2Buffer, buffer);
4842 } else if (memory) {
4843 err = mBufferChannel->attachEncryptedBuffer(
4844 memory, (mFlags & kFlagIsSecure), key, iv, mode, pattern,
4845 offset, subSamples, numSubSamples, buffer);
4846 } else {
4847 err = UNKNOWN_ERROR;
4848 }
4849
4850 if (err == OK && !buffer->asC2Buffer()
4851 && c2Buffer && c2Buffer->data().type() == C2BufferData::LINEAR) {
4852 C2ConstLinearBlock block{c2Buffer->data().linearBlocks().front()};
4853 if (block.size() > buffer->size()) {
4854 C2ConstLinearBlock leftover = block.subBlock(
4855 block.offset() + buffer->size(), block.size() - buffer->size());
4856 sp<WrapperObject<std::shared_ptr<C2Buffer>>> obj{
4857 new WrapperObject<std::shared_ptr<C2Buffer>>{
4858 C2Buffer::CreateLinearBuffer(leftover)}};
4859 msg->setObject("c2buffer", obj);
4860 mLeftover.push_front(msg);
4861 // Not sending EOS if we have leftovers
4862 flags &= ~BUFFER_FLAG_EOS;
4863 }
4864 }
4865
4866 offset = buffer->offset();
4867 size = buffer->size();
4868 if (err != OK) {
4869 return err;
4870 }
4871 }
4872
4873 if (buffer == nullptr || !info->mOwnedByClient) {
4874 return -EACCES;
4875 }
4876
4877 if (offset + size > buffer->capacity()) {
4878 return -EINVAL;
4879 }
4880
4881 buffer->setRange(offset, size);
4882 buffer->meta()->setInt64("timeUs", timeUs);
4883 if (flags & BUFFER_FLAG_EOS) {
4884 buffer->meta()->setInt32("eos", true);
4885 }
4886
4887 if (flags & BUFFER_FLAG_CODECCONFIG) {
4888 buffer->meta()->setInt32("csd", true);
4889 }
4890
4891 if (mTunneled) {
4892 TunnelPeekState previousState = mTunnelPeekState;
4893 switch(mTunnelPeekState){
4894 case TunnelPeekState::kEnabledNoBuffer:
4895 buffer->meta()->setInt32("tunnel-first-frame", 1);
4896 mTunnelPeekState = TunnelPeekState::kEnabledQueued;
4897 ALOGV("TunnelPeekState: %s -> %s",
4898 asString(previousState),
4899 asString(mTunnelPeekState));
4900 break;
4901 case TunnelPeekState::kDisabledNoBuffer:
4902 buffer->meta()->setInt32("tunnel-first-frame", 1);
4903 mTunnelPeekState = TunnelPeekState::kDisabledQueued;
4904 ALOGV("TunnelPeekState: %s -> %s",
4905 asString(previousState),
4906 asString(mTunnelPeekState));
4907 break;
4908 default:
4909 break;
4910 }
4911 }
4912
4913 status_t err = OK;
4914 if (hasCryptoOrDescrambler() && !c2Buffer && !memory) {
4915 AString *errorDetailMsg;
4916 CHECK(msg->findPointer("errorDetailMsg", (void **)&errorDetailMsg));
4917 // Notify mCrypto of video resolution changes
4918 if (mTunneled && mCrypto != NULL) {
4919 int32_t width, height;
4920 if (mInputFormat->findInt32("width", &width) &&
4921 mInputFormat->findInt32("height", &height) && width > 0 && height > 0) {
4922 if (width != mTunneledInputWidth || height != mTunneledInputHeight) {
4923 mTunneledInputWidth = width;
4924 mTunneledInputHeight = height;
4925 mCrypto->notifyResolution(width, height);
4926 }
4927 }
4928 }
4929 err = mBufferChannel->queueSecureInputBuffer(
4930 buffer,
4931 (mFlags & kFlagIsSecure),
4932 key,
4933 iv,
4934 mode,
4935 pattern,
4936 subSamples,
4937 numSubSamples,
4938 errorDetailMsg);
4939 if (err != OK) {
4940 mediametrics_setInt32(mMetricsHandle, kCodecQueueSecureInputBufferError, err);
4941 ALOGW("Log queueSecureInputBuffer error: %d", err);
4942 }
4943 } else {
4944 err = mBufferChannel->queueInputBuffer(buffer);
4945 if (err != OK) {
4946 mediametrics_setInt32(mMetricsHandle, kCodecQueueInputBufferError, err);
4947 ALOGW("Log queueInputBuffer error: %d", err);
4948 }
4949 }
4950
4951 if (err == OK) {
4952 // synchronization boundary for getBufferAndFormat
4953 Mutex::Autolock al(mBufferLock);
4954 info->mOwnedByClient = false;
4955 info->mData.clear();
4956
4957 statsBufferSent(timeUs, buffer);
4958 }
4959
4960 return err;
4961 }
4962
handleLeftover(size_t index)4963 status_t MediaCodec::handleLeftover(size_t index) {
4964 if (mLeftover.empty()) {
4965 return OK;
4966 }
4967 sp<AMessage> msg = mLeftover.front();
4968 mLeftover.pop_front();
4969 msg->setSize("index", index);
4970 return onQueueInputBuffer(msg);
4971 }
4972
4973 //static
CreateFramesRenderedMessage(const std::list<FrameRenderTracker::Info> & done,sp<AMessage> & msg)4974 size_t MediaCodec::CreateFramesRenderedMessage(
4975 const std::list<FrameRenderTracker::Info> &done, sp<AMessage> &msg) {
4976 size_t index = 0;
4977
4978 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
4979 it != done.cend(); ++it) {
4980 if (it->getRenderTimeNs() < 0) {
4981 continue; // dropped frame from tracking
4982 }
4983 msg->setInt64(AStringPrintf("%zu-media-time-us", index).c_str(), it->getMediaTimeUs());
4984 msg->setInt64(AStringPrintf("%zu-system-nano", index).c_str(), it->getRenderTimeNs());
4985 ++index;
4986 }
4987 return index;
4988 }
4989
onReleaseOutputBuffer(const sp<AMessage> & msg)4990 status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
4991 size_t index;
4992 CHECK(msg->findSize("index", &index));
4993
4994 int32_t render;
4995 if (!msg->findInt32("render", &render)) {
4996 render = 0;
4997 }
4998
4999 if (!isExecuting()) {
5000 return -EINVAL;
5001 }
5002
5003 if (index >= mPortBuffers[kPortIndexOutput].size()) {
5004 return -ERANGE;
5005 }
5006
5007 BufferInfo *info = &mPortBuffers[kPortIndexOutput][index];
5008
5009 if (info->mData == nullptr || !info->mOwnedByClient) {
5010 return -EACCES;
5011 }
5012
5013 // synchronization boundary for getBufferAndFormat
5014 sp<MediaCodecBuffer> buffer;
5015 {
5016 Mutex::Autolock al(mBufferLock);
5017 info->mOwnedByClient = false;
5018 buffer = info->mData;
5019 info->mData.clear();
5020 }
5021
5022 if (render && buffer->size() != 0) {
5023 int64_t mediaTimeUs = -1;
5024 buffer->meta()->findInt64("timeUs", &mediaTimeUs);
5025
5026 int64_t renderTimeNs = 0;
5027 if (!msg->findInt64("timestampNs", &renderTimeNs)) {
5028 // use media timestamp if client did not request a specific render timestamp
5029 ALOGV("using buffer PTS of %lld", (long long)mediaTimeUs);
5030 renderTimeNs = mediaTimeUs * 1000;
5031 }
5032
5033 if (mSoftRenderer != NULL) {
5034 std::list<FrameRenderTracker::Info> doneFrames = mSoftRenderer->render(
5035 buffer->data(), buffer->size(), mediaTimeUs, renderTimeNs,
5036 mPortBuffers[kPortIndexOutput].size(), buffer->format());
5037
5038 // if we are running, notify rendered frames
5039 if (!doneFrames.empty() && mState == STARTED && mOnFrameRenderedNotification != NULL) {
5040 sp<AMessage> notify = mOnFrameRenderedNotification->dup();
5041 sp<AMessage> data = new AMessage;
5042 if (CreateFramesRenderedMessage(doneFrames, data)) {
5043 notify->setMessage("data", data);
5044 notify->post();
5045 }
5046 }
5047 }
5048 status_t err = mBufferChannel->renderOutputBuffer(buffer, renderTimeNs);
5049
5050 if (err == NO_INIT) {
5051 ALOGE("rendering to non-initilized(obsolete) surface");
5052 return err;
5053 }
5054 if (err != OK) {
5055 ALOGI("rendring output error %d", err);
5056 }
5057 } else {
5058 mBufferChannel->discardBuffer(buffer);
5059 }
5060
5061 return OK;
5062 }
5063
peekNextPortBuffer(int32_t portIndex)5064 MediaCodec::BufferInfo *MediaCodec::peekNextPortBuffer(int32_t portIndex) {
5065 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5066
5067 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5068
5069 if (availBuffers->empty()) {
5070 return nullptr;
5071 }
5072
5073 return &mPortBuffers[portIndex][*availBuffers->begin()];
5074 }
5075
dequeuePortBuffer(int32_t portIndex)5076 ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
5077 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
5078
5079 BufferInfo *info = peekNextPortBuffer(portIndex);
5080 if (!info) {
5081 return -EAGAIN;
5082 }
5083
5084 List<size_t> *availBuffers = &mAvailPortBuffers[portIndex];
5085 size_t index = *availBuffers->begin();
5086 CHECK_EQ(info, &mPortBuffers[portIndex][index]);
5087 availBuffers->erase(availBuffers->begin());
5088
5089 CHECK(!info->mOwnedByClient);
5090 {
5091 Mutex::Autolock al(mBufferLock);
5092 info->mOwnedByClient = true;
5093
5094 // set image-data
5095 if (info->mData->format() != NULL) {
5096 sp<ABuffer> imageData;
5097 if (info->mData->format()->findBuffer("image-data", &imageData)) {
5098 info->mData->meta()->setBuffer("image-data", imageData);
5099 }
5100 int32_t left, top, right, bottom;
5101 if (info->mData->format()->findRect("crop", &left, &top, &right, &bottom)) {
5102 info->mData->meta()->setRect("crop-rect", left, top, right, bottom);
5103 }
5104 }
5105 }
5106
5107 return index;
5108 }
5109
connectToSurface(const sp<Surface> & surface)5110 status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
5111 status_t err = OK;
5112 if (surface != NULL) {
5113 uint64_t oldId, newId;
5114 if (mSurface != NULL
5115 && surface->getUniqueId(&newId) == NO_ERROR
5116 && mSurface->getUniqueId(&oldId) == NO_ERROR
5117 && newId == oldId) {
5118 ALOGI("[%s] connecting to the same surface. Nothing to do.", mComponentName.c_str());
5119 return ALREADY_EXISTS;
5120 }
5121
5122 // in case we don't connect, ensure that we don't signal the surface is
5123 // connected to the screen
5124 mIsSurfaceToScreen = false;
5125
5126 err = nativeWindowConnect(surface.get(), "connectToSurface");
5127 if (err == OK) {
5128 // Require a fresh set of buffers after each connect by using a unique generation
5129 // number. Rely on the fact that max supported process id by Linux is 2^22.
5130 // PID is never 0 so we don't have to worry that we use the default generation of 0.
5131 // TODO: come up with a unique scheme if other producers also set the generation number.
5132 static uint32_t mSurfaceGeneration = 0;
5133 uint32_t generation = (getpid() << 10) | (++mSurfaceGeneration & ((1 << 10) - 1));
5134 surface->setGenerationNumber(generation);
5135 ALOGI("[%s] setting surface generation to %u", mComponentName.c_str(), generation);
5136
5137 // HACK: clear any free buffers. Remove when connect will automatically do this.
5138 // This is needed as the consumer may be holding onto stale frames that it can reattach
5139 // to this surface after disconnect/connect, and those free frames would inherit the new
5140 // generation number. Disconnecting after setting a unique generation prevents this.
5141 nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
5142 err = nativeWindowConnect(surface.get(), "connectToSurface(reconnect)");
5143 }
5144
5145 if (err != OK) {
5146 ALOGE("nativeWindowConnect returned an error: %s (%d)", strerror(-err), err);
5147 } else {
5148 if (!mAllowFrameDroppingBySurface) {
5149 disableLegacyBufferDropPostQ(surface);
5150 }
5151 // keep track whether or not the buffers of the connected surface go to the screen
5152 int result = 0;
5153 surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
5154 mIsSurfaceToScreen = result != 0;
5155 }
5156 }
5157 // do not return ALREADY_EXISTS unless surfaces are the same
5158 return err == ALREADY_EXISTS ? BAD_VALUE : err;
5159 }
5160
disconnectFromSurface()5161 status_t MediaCodec::disconnectFromSurface() {
5162 status_t err = OK;
5163 if (mSurface != NULL) {
5164 // Resetting generation is not technically needed, but there is no need to keep it either
5165 mSurface->setGenerationNumber(0);
5166 err = nativeWindowDisconnect(mSurface.get(), "disconnectFromSurface");
5167 if (err != OK) {
5168 ALOGW("nativeWindowDisconnect returned an error: %s (%d)", strerror(-err), err);
5169 }
5170 // assume disconnected even on error
5171 mSurface.clear();
5172 mIsSurfaceToScreen = false;
5173 }
5174 return err;
5175 }
5176
handleSetSurface(const sp<Surface> & surface)5177 status_t MediaCodec::handleSetSurface(const sp<Surface> &surface) {
5178 status_t err = OK;
5179 if (mSurface != NULL) {
5180 (void)disconnectFromSurface();
5181 }
5182 if (surface != NULL) {
5183 err = connectToSurface(surface);
5184 if (err == OK) {
5185 mSurface = surface;
5186 }
5187 }
5188 return err;
5189 }
5190
onInputBufferAvailable()5191 void MediaCodec::onInputBufferAvailable() {
5192 int32_t index;
5193 while ((index = dequeuePortBuffer(kPortIndexInput)) >= 0) {
5194 sp<AMessage> msg = mCallback->dup();
5195 msg->setInt32("callbackID", CB_INPUT_AVAILABLE);
5196 msg->setInt32("index", index);
5197 msg->post();
5198 }
5199 }
5200
onOutputBufferAvailable()5201 void MediaCodec::onOutputBufferAvailable() {
5202 int32_t index;
5203 while ((index = dequeuePortBuffer(kPortIndexOutput)) >= 0) {
5204 const sp<MediaCodecBuffer> &buffer =
5205 mPortBuffers[kPortIndexOutput][index].mData;
5206 sp<AMessage> msg = mCallback->dup();
5207 msg->setInt32("callbackID", CB_OUTPUT_AVAILABLE);
5208 msg->setInt32("index", index);
5209 msg->setSize("offset", buffer->offset());
5210 msg->setSize("size", buffer->size());
5211
5212 int64_t timeUs;
5213 CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
5214
5215 msg->setInt64("timeUs", timeUs);
5216
5217 int32_t flags;
5218 CHECK(buffer->meta()->findInt32("flags", &flags));
5219
5220 msg->setInt32("flags", flags);
5221
5222 statsBufferReceived(timeUs, buffer);
5223
5224 msg->post();
5225 }
5226 }
5227
onError(status_t err,int32_t actionCode,const char * detail)5228 void MediaCodec::onError(status_t err, int32_t actionCode, const char *detail) {
5229 if (mCallback != NULL) {
5230 sp<AMessage> msg = mCallback->dup();
5231 msg->setInt32("callbackID", CB_ERROR);
5232 msg->setInt32("err", err);
5233 msg->setInt32("actionCode", actionCode);
5234
5235 if (detail != NULL) {
5236 msg->setString("detail", detail);
5237 }
5238
5239 msg->post();
5240 }
5241 }
5242
onOutputFormatChanged()5243 void MediaCodec::onOutputFormatChanged() {
5244 if (mCallback != NULL) {
5245 sp<AMessage> msg = mCallback->dup();
5246 msg->setInt32("callbackID", CB_OUTPUT_FORMAT_CHANGED);
5247 msg->setMessage("format", mOutputFormat);
5248 msg->post();
5249 }
5250 }
5251
postActivityNotificationIfPossible()5252 void MediaCodec::postActivityNotificationIfPossible() {
5253 if (mActivityNotify == NULL) {
5254 return;
5255 }
5256
5257 bool isErrorOrOutputChanged =
5258 (mFlags & (kFlagStickyError
5259 | kFlagOutputBuffersChanged
5260 | kFlagOutputFormatChanged));
5261
5262 if (isErrorOrOutputChanged
5263 || !mAvailPortBuffers[kPortIndexInput].empty()
5264 || !mAvailPortBuffers[kPortIndexOutput].empty()) {
5265 mActivityNotify->setInt32("input-buffers",
5266 mAvailPortBuffers[kPortIndexInput].size());
5267
5268 if (isErrorOrOutputChanged) {
5269 // we want consumer to dequeue as many times as it can
5270 mActivityNotify->setInt32("output-buffers", INT32_MAX);
5271 } else {
5272 mActivityNotify->setInt32("output-buffers",
5273 mAvailPortBuffers[kPortIndexOutput].size());
5274 }
5275 mActivityNotify->post();
5276 mActivityNotify.clear();
5277 }
5278 }
5279
setParameters(const sp<AMessage> & params)5280 status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) {
5281 sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
5282 msg->setMessage("params", params);
5283
5284 sp<AMessage> response;
5285 return PostAndAwaitResponse(msg, &response);
5286 }
5287
onSetParameters(const sp<AMessage> & params)5288 status_t MediaCodec::onSetParameters(const sp<AMessage> ¶ms) {
5289 updateLowLatency(params);
5290 mapFormat(mComponentName, params, nullptr, false);
5291 updateTunnelPeek(params);
5292 mCodec->signalSetParameters(params);
5293
5294 return OK;
5295 }
5296
amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> & buffer)5297 status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
5298 const sp<MediaCodecBuffer> &buffer) {
5299 AString mime;
5300 CHECK(mOutputFormat->findString("mime", &mime));
5301
5302 if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
5303 // Codec specific data should be SPS and PPS in a single buffer,
5304 // each prefixed by a startcode (0x00 0x00 0x00 0x01).
5305 // We separate the two and put them into the output format
5306 // under the keys "csd-0" and "csd-1".
5307
5308 unsigned csdIndex = 0;
5309
5310 const uint8_t *data = buffer->data();
5311 size_t size = buffer->size();
5312
5313 const uint8_t *nalStart;
5314 size_t nalSize;
5315 while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
5316 sp<ABuffer> csd = new ABuffer(nalSize + 4);
5317 memcpy(csd->data(), "\x00\x00\x00\x01", 4);
5318 memcpy(csd->data() + 4, nalStart, nalSize);
5319
5320 mOutputFormat->setBuffer(
5321 AStringPrintf("csd-%u", csdIndex).c_str(), csd);
5322
5323 ++csdIndex;
5324 }
5325
5326 if (csdIndex != 2) {
5327 return ERROR_MALFORMED;
5328 }
5329 } else {
5330 // For everything else we just stash the codec specific data into
5331 // the output format as a single piece of csd under "csd-0".
5332 sp<ABuffer> csd = new ABuffer(buffer->size());
5333 memcpy(csd->data(), buffer->data(), buffer->size());
5334 csd->setRange(0, buffer->size());
5335 mOutputFormat->setBuffer("csd-0", csd);
5336 }
5337
5338 return OK;
5339 }
5340
postPendingRepliesAndDeferredMessages(std::string origin,status_t err)5341 void MediaCodec::postPendingRepliesAndDeferredMessages(
5342 std::string origin, status_t err /* = OK */) {
5343 sp<AMessage> response{new AMessage};
5344 if (err != OK) {
5345 response->setInt32("err", err);
5346 }
5347 postPendingRepliesAndDeferredMessages(origin, response);
5348 }
5349
postPendingRepliesAndDeferredMessages(std::string origin,const sp<AMessage> & response)5350 void MediaCodec::postPendingRepliesAndDeferredMessages(
5351 std::string origin, const sp<AMessage> &response) {
5352 LOG_ALWAYS_FATAL_IF(
5353 !mReplyID,
5354 "postPendingRepliesAndDeferredMessages: mReplyID == null, from %s following %s",
5355 origin.c_str(),
5356 mLastReplyOrigin.c_str());
5357 mLastReplyOrigin = origin;
5358 response->postReply(mReplyID);
5359 mReplyID.clear();
5360 ALOGV_IF(!mDeferredMessages.empty(),
5361 "posting %zu deferred messages", mDeferredMessages.size());
5362 for (sp<AMessage> msg : mDeferredMessages) {
5363 msg->post();
5364 }
5365 mDeferredMessages.clear();
5366 }
5367
stateString(State state)5368 std::string MediaCodec::stateString(State state) {
5369 const char *rval = NULL;
5370 char rawbuffer[16]; // room for "%d"
5371
5372 switch (state) {
5373 case UNINITIALIZED: rval = "UNINITIALIZED"; break;
5374 case INITIALIZING: rval = "INITIALIZING"; break;
5375 case INITIALIZED: rval = "INITIALIZED"; break;
5376 case CONFIGURING: rval = "CONFIGURING"; break;
5377 case CONFIGURED: rval = "CONFIGURED"; break;
5378 case STARTING: rval = "STARTING"; break;
5379 case STARTED: rval = "STARTED"; break;
5380 case FLUSHING: rval = "FLUSHING"; break;
5381 case FLUSHED: rval = "FLUSHED"; break;
5382 case STOPPING: rval = "STOPPING"; break;
5383 case RELEASING: rval = "RELEASING"; break;
5384 default:
5385 snprintf(rawbuffer, sizeof(rawbuffer), "%d", state);
5386 rval = rawbuffer;
5387 break;
5388 }
5389 return rval;
5390 }
5391
5392 // static
CanFetchLinearBlock(const std::vector<std::string> & names,bool * isCompatible)5393 status_t MediaCodec::CanFetchLinearBlock(
5394 const std::vector<std::string> &names, bool *isCompatible) {
5395 *isCompatible = false;
5396 if (names.size() == 0) {
5397 *isCompatible = true;
5398 return OK;
5399 }
5400 const CodecListCache &cache = GetCodecListCache();
5401 for (const std::string &name : names) {
5402 auto it = cache.mCodecInfoMap.find(name);
5403 if (it == cache.mCodecInfoMap.end()) {
5404 return NAME_NOT_FOUND;
5405 }
5406 const char *owner = it->second->getOwnerName();
5407 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5408 *isCompatible = false;
5409 return OK;
5410 } else if (strncmp(owner, "codec2::", 8) != 0) {
5411 return NAME_NOT_FOUND;
5412 }
5413 }
5414 return CCodec::CanFetchLinearBlock(names, kDefaultReadWriteUsage, isCompatible);
5415 }
5416
5417 // static
FetchLinearBlock(size_t capacity,const std::vector<std::string> & names)5418 std::shared_ptr<C2LinearBlock> MediaCodec::FetchLinearBlock(
5419 size_t capacity, const std::vector<std::string> &names) {
5420 return CCodec::FetchLinearBlock(capacity, kDefaultReadWriteUsage, names);
5421 }
5422
5423 // static
CanFetchGraphicBlock(const std::vector<std::string> & names,bool * isCompatible)5424 status_t MediaCodec::CanFetchGraphicBlock(
5425 const std::vector<std::string> &names, bool *isCompatible) {
5426 *isCompatible = false;
5427 if (names.size() == 0) {
5428 *isCompatible = true;
5429 return OK;
5430 }
5431 const CodecListCache &cache = GetCodecListCache();
5432 for (const std::string &name : names) {
5433 auto it = cache.mCodecInfoMap.find(name);
5434 if (it == cache.mCodecInfoMap.end()) {
5435 return NAME_NOT_FOUND;
5436 }
5437 const char *owner = it->second->getOwnerName();
5438 if (owner == nullptr || strncmp(owner, "default", 8) == 0) {
5439 *isCompatible = false;
5440 return OK;
5441 } else if (strncmp(owner, "codec2.", 7) != 0) {
5442 return NAME_NOT_FOUND;
5443 }
5444 }
5445 return CCodec::CanFetchGraphicBlock(names, isCompatible);
5446 }
5447
5448 // static
FetchGraphicBlock(int32_t width,int32_t height,int32_t format,uint64_t usage,const std::vector<std::string> & names)5449 std::shared_ptr<C2GraphicBlock> MediaCodec::FetchGraphicBlock(
5450 int32_t width,
5451 int32_t height,
5452 int32_t format,
5453 uint64_t usage,
5454 const std::vector<std::string> &names) {
5455 return CCodec::FetchGraphicBlock(width, height, format, usage, names);
5456 }
5457
5458 } // namespace android
5459