1 /*
2  * Copyright (C) 2009 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSource"
21 #include <utils/Log.h>
22 
23 #include <OMX_Component.h>
24 #include <binder/IPCThreadState.h>
25 #include <binder/MemoryBase.h>
26 #include <binder/MemoryHeapBase.h>
27 #include <media/hardware/HardwareAPI.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/CameraSource.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <camera/Camera.h>
34 #include <camera/CameraParameters.h>
35 #include <gui/Surface.h>
36 #include <utils/String8.h>
37 #include <cutils/properties.h>
38 
39 #if LOG_NDEBUG
40 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
41 #else
42 #define UNUSED_UNLESS_VERBOSE(x)
43 #endif
44 
45 namespace android {
46 
47 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
48 
getColorFormat(const char * colorFormat)49 static int32_t getColorFormat(const char* colorFormat) {
50     if (!colorFormat) {
51         ALOGE("Invalid color format");
52         return -1;
53     }
54 
55     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
56        return OMX_COLOR_FormatYUV420Planar;
57     }
58 
59     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
60        return OMX_COLOR_FormatYUV422SemiPlanar;
61     }
62 
63     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
64         return OMX_COLOR_FormatYUV420SemiPlanar;
65     }
66 
67     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
68         return OMX_COLOR_FormatYCbYCr;
69     }
70 
71     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
72        return OMX_COLOR_Format16bitRGB565;
73     }
74 
75     if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
76        return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
77     }
78 
79     if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
80         return OMX_COLOR_FormatAndroidOpaque;
81     }
82 
83     ALOGE("Uknown color format (%s), please add it to "
84          "CameraSource::getColorFormat", colorFormat);
85 
86     CHECK(!"Unknown color format");
87     return -1;
88 }
89 
90 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)91 CameraSource *CameraSource::CreateFromCamera(
92     const sp<hardware::ICamera>& camera,
93     const sp<ICameraRecordingProxy>& proxy,
94     int32_t cameraId,
95     const String16& clientName,
96     uid_t clientUid,
97     pid_t clientPid,
98     Size videoSize,
99     int32_t frameRate,
100     const sp<IGraphicBufferProducer>& surface) {
101 
102     CameraSource *source = new CameraSource(camera, proxy, cameraId,
103             clientName, clientUid, clientPid, videoSize, frameRate, surface);
104     return source;
105 }
106 
CameraSource(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)107 CameraSource::CameraSource(
108     const sp<hardware::ICamera>& camera,
109     const sp<ICameraRecordingProxy>& proxy,
110     int32_t cameraId,
111     const String16& clientName,
112     uid_t clientUid,
113     pid_t clientPid,
114     Size videoSize,
115     int32_t frameRate,
116     const sp<IGraphicBufferProducer>& surface)
117     : mCameraFlags(0),
118       mNumInputBuffers(0),
119       mVideoFrameRate(-1),
120       mCamera(0),
121       mSurface(surface),
122       mNumFramesReceived(0),
123       mLastFrameTimestampUs(0),
124       mStarted(false),
125       mEos(false),
126       mNumFramesEncoded(0),
127       mTimeBetweenFrameCaptureUs(0),
128       mFirstFrameTimeUs(0),
129       mStopSystemTimeUs(-1),
130       mNumFramesDropped(0),
131       mNumGlitches(0),
132       mGlitchDurationThresholdUs(200000),
133       mCollectStats(false) {
134     mVideoSize.width  = -1;
135     mVideoSize.height = -1;
136 
137     mInitCheck = init(camera, proxy, cameraId,
138                     clientName, clientUid, clientPid,
139                     videoSize, frameRate);
140     if (mInitCheck != OK) releaseCamera();
141 }
142 
initCheck() const143 status_t CameraSource::initCheck() const {
144     return mInitCheck;
145 }
146 
isCameraAvailable(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid)147 status_t CameraSource::isCameraAvailable(
148     const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
149     int32_t cameraId, const String16& clientName, uid_t clientUid, pid_t clientPid) {
150 
151     if (camera == 0) {
152         mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
153                 /*targetSdkVersion*/__ANDROID_API_FUTURE__);
154         if (mCamera == 0) return -EBUSY;
155         mCameraFlags &= ~FLAGS_HOT_CAMERA;
156     } else {
157         // We get the proxy from Camera, not ICamera. We need to get the proxy
158         // to the remote Camera owned by the application. Here mCamera is a
159         // local Camera object created by us. We cannot use the proxy from
160         // mCamera here.
161         mCamera = Camera::create(camera);
162         if (mCamera == 0) return -EBUSY;
163         mCameraRecordingProxy = proxy;
164         mCameraFlags |= FLAGS_HOT_CAMERA;
165         mDeathNotifier = new DeathNotifier();
166         // isBinderAlive needs linkToDeath to work.
167         IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
168     }
169 
170     mCamera->lock();
171 
172     return OK;
173 }
174 
175 
176 /*
177  * Check to see whether the requested video width and height is one
178  * of the supported sizes.
179  * @param width the video frame width in pixels
180  * @param height the video frame height in pixels
181  * @param suppportedSizes the vector of sizes that we check against
182  * @return true if the dimension (width and height) is supported.
183  */
isVideoSizeSupported(int32_t width,int32_t height,const Vector<Size> & supportedSizes)184 static bool isVideoSizeSupported(
185     int32_t width, int32_t height,
186     const Vector<Size>& supportedSizes) {
187 
188     ALOGV("isVideoSizeSupported");
189     for (size_t i = 0; i < supportedSizes.size(); ++i) {
190         if (width  == supportedSizes[i].width &&
191             height == supportedSizes[i].height) {
192             return true;
193         }
194     }
195     return false;
196 }
197 
198 /*
199  * If the preview and video output is separate, we only set the
200  * the video size, and applications should set the preview size
201  * to some proper value, and the recording framework will not
202  * change the preview size; otherwise, if the video and preview
203  * output is the same, we need to set the preview to be the same
204  * as the requested video size.
205  *
206  */
207 /*
208  * Query the camera to retrieve the supported video frame sizes
209  * and also to see whether CameraParameters::setVideoSize()
210  * is supported or not.
211  * @param params CameraParameters to retrieve the information
212  * @@param isSetVideoSizeSupported retunrs whether method
213  *      CameraParameters::setVideoSize() is supported or not.
214  * @param sizes returns the vector of Size objects for the
215  *      supported video frame sizes advertised by the camera.
216  */
getSupportedVideoSizes(const CameraParameters & params,bool * isSetVideoSizeSupported,Vector<Size> & sizes)217 static void getSupportedVideoSizes(
218     const CameraParameters& params,
219     bool *isSetVideoSizeSupported,
220     Vector<Size>& sizes) {
221 
222     *isSetVideoSizeSupported = true;
223     params.getSupportedVideoSizes(sizes);
224     if (sizes.size() == 0) {
225         ALOGD("Camera does not support setVideoSize()");
226         params.getSupportedPreviewSizes(sizes);
227         *isSetVideoSizeSupported = false;
228     }
229 }
230 
231 /*
232  * Check whether the camera has the supported color format
233  * @param params CameraParameters to retrieve the information
234  * @return OK if no error.
235  */
isCameraColorFormatSupported(const CameraParameters & params)236 status_t CameraSource::isCameraColorFormatSupported(
237         const CameraParameters& params) {
238     mColorFormat = getColorFormat(params.get(
239             CameraParameters::KEY_VIDEO_FRAME_FORMAT));
240     if (mColorFormat == -1) {
241         return BAD_VALUE;
242     }
243     return OK;
244 }
245 
246 /*
247  * Configure the camera to use the requested video size
248  * (width and height) and/or frame rate. If both width and
249  * height are -1, configuration on the video size is skipped.
250  * if frameRate is -1, configuration on the frame rate
251  * is skipped. Skipping the configuration allows one to
252  * use the current camera setting without the need to
253  * actually know the specific values (see Create() method).
254  *
255  * @param params the CameraParameters to be configured
256  * @param width the target video frame width in pixels
257  * @param height the target video frame height in pixels
258  * @param frameRate the target frame rate in frames per second.
259  * @return OK if no error.
260  */
configureCamera(CameraParameters * params,int32_t width,int32_t height,int32_t frameRate)261 status_t CameraSource::configureCamera(
262         CameraParameters* params,
263         int32_t width, int32_t height,
264         int32_t frameRate) {
265     ALOGV("configureCamera");
266     Vector<Size> sizes;
267     bool isSetVideoSizeSupportedByCamera = true;
268     getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
269     bool isCameraParamChanged = false;
270     if (width != -1 && height != -1) {
271         if (!isVideoSizeSupported(width, height, sizes)) {
272             ALOGE("Video dimension (%dx%d) is unsupported", width, height);
273             return BAD_VALUE;
274         }
275         if (isSetVideoSizeSupportedByCamera) {
276             params->setVideoSize(width, height);
277         } else {
278             params->setPreviewSize(width, height);
279         }
280         isCameraParamChanged = true;
281     } else if ((width == -1 && height != -1) ||
282                (width != -1 && height == -1)) {
283         // If one and only one of the width and height is -1
284         // we reject such a request.
285         ALOGE("Requested video size (%dx%d) is not supported", width, height);
286         return BAD_VALUE;
287     } else {  // width == -1 && height == -1
288         // Do not configure the camera.
289         // Use the current width and height value setting from the camera.
290     }
291 
292     if (frameRate != -1) {
293         CHECK(frameRate > 0 && frameRate <= 120);
294         const char* supportedFrameRates =
295                 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
296         CHECK(supportedFrameRates != NULL);
297         ALOGV("Supported frame rates: %s", supportedFrameRates);
298         char buf[4];
299         snprintf(buf, 4, "%d", frameRate);
300         if (strstr(supportedFrameRates, buf) == NULL) {
301             ALOGE("Requested frame rate (%d) is not supported: %s",
302                 frameRate, supportedFrameRates);
303             return BAD_VALUE;
304         }
305 
306         // The frame rate is supported, set the camera to the requested value.
307         params->setPreviewFrameRate(frameRate);
308         isCameraParamChanged = true;
309     } else {  // frameRate == -1
310         // Do not configure the camera.
311         // Use the current frame rate value setting from the camera
312     }
313 
314     if (isCameraParamChanged) {
315         // Either frame rate or frame size needs to be changed.
316         String8 s = params->flatten();
317         if (OK != mCamera->setParameters(s)) {
318             ALOGE("Could not change settings."
319                  " Someone else is using camera %p?", mCamera.get());
320             return -EBUSY;
321         }
322     }
323     return OK;
324 }
325 
326 /*
327  * Check whether the requested video frame size
328  * has been successfully configured or not. If both width and height
329  * are -1, check on the current width and height value setting
330  * is performed.
331  *
332  * @param params CameraParameters to retrieve the information
333  * @param the target video frame width in pixels to check against
334  * @param the target video frame height in pixels to check against
335  * @return OK if no error
336  */
checkVideoSize(const CameraParameters & params,int32_t width,int32_t height)337 status_t CameraSource::checkVideoSize(
338         const CameraParameters& params,
339         int32_t width, int32_t height) {
340 
341     ALOGV("checkVideoSize");
342     // The actual video size is the same as the preview size
343     // if the camera hal does not support separate video and
344     // preview output. In this case, we retrieve the video
345     // size from preview.
346     int32_t frameWidthActual = -1;
347     int32_t frameHeightActual = -1;
348     Vector<Size> sizes;
349     params.getSupportedVideoSizes(sizes);
350     if (sizes.size() == 0) {
351         // video size is the same as preview size
352         params.getPreviewSize(&frameWidthActual, &frameHeightActual);
353     } else {
354         // video size may not be the same as preview
355         params.getVideoSize(&frameWidthActual, &frameHeightActual);
356     }
357     if (frameWidthActual < 0 || frameHeightActual < 0) {
358         ALOGE("Failed to retrieve video frame size (%dx%d)",
359                 frameWidthActual, frameHeightActual);
360         return UNKNOWN_ERROR;
361     }
362 
363     // Check the actual video frame size against the target/requested
364     // video frame size.
365     if (width != -1 && height != -1) {
366         if (frameWidthActual != width || frameHeightActual != height) {
367             ALOGE("Failed to set video frame size to %dx%d. "
368                     "The actual video size is %dx%d ", width, height,
369                     frameWidthActual, frameHeightActual);
370             return UNKNOWN_ERROR;
371         }
372     }
373 
374     // Good now.
375     mVideoSize.width = frameWidthActual;
376     mVideoSize.height = frameHeightActual;
377     return OK;
378 }
379 
380 /*
381  * Check the requested frame rate has been successfully configured or not.
382  * If the target frameRate is -1, check on the current frame rate value
383  * setting is performed.
384  *
385  * @param params CameraParameters to retrieve the information
386  * @param the target video frame rate to check against
387  * @return OK if no error.
388  */
checkFrameRate(const CameraParameters & params,int32_t frameRate)389 status_t CameraSource::checkFrameRate(
390         const CameraParameters& params,
391         int32_t frameRate) {
392 
393     ALOGV("checkFrameRate");
394     int32_t frameRateActual = params.getPreviewFrameRate();
395     if (frameRateActual < 0) {
396         ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
397         return UNKNOWN_ERROR;
398     }
399 
400     // Check the actual video frame rate against the target/requested
401     // video frame rate.
402     if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
403         ALOGE("Failed to set preview frame rate to %d fps. The actual "
404                 "frame rate is %d", frameRate, frameRateActual);
405         return UNKNOWN_ERROR;
406     }
407 
408     // Good now.
409     mVideoFrameRate = frameRateActual;
410     return OK;
411 }
412 
413 /*
414  * Initialize the CameraSource to so that it becomes
415  * ready for providing the video input streams as requested.
416  * @param camera the camera object used for the video source
417  * @param cameraId if camera == 0, use camera with this id
418  *      as the video source
419  * @param videoSize the target video frame size. If both
420  *      width and height in videoSize is -1, use the current
421  *      width and heigth settings by the camera
422  * @param frameRate the target frame rate in frames per second.
423  *      if it is -1, use the current camera frame rate setting.
424  * @param storeMetaDataInVideoBuffers request to store meta
425  *      data or real YUV data in video buffers. Request to
426  *      store meta data in video buffers may not be honored
427  *      if the source does not support this feature.
428  *
429  * @return OK if no error.
430  */
init(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)431 status_t CameraSource::init(
432         const sp<hardware::ICamera>& camera,
433         const sp<ICameraRecordingProxy>& proxy,
434         int32_t cameraId,
435         const String16& clientName,
436         uid_t clientUid,
437         pid_t clientPid,
438         Size videoSize,
439         int32_t frameRate) {
440 
441     ALOGV("init");
442     status_t err = OK;
443     int64_t token = IPCThreadState::self()->clearCallingIdentity();
444     err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
445                                videoSize, frameRate);
446     IPCThreadState::self()->restoreCallingIdentity(token);
447     return err;
448 }
449 
createVideoBufferMemoryHeap(size_t size,uint32_t bufferCount)450 void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
451     mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
452             "StageFright-CameraSource-BufferHeap");
453     for (uint32_t i = 0; i < bufferCount; i++) {
454         mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
455     }
456 }
457 
initBufferQueue(uint32_t width,uint32_t height,uint32_t format,android_dataspace dataSpace,uint32_t bufferCount)458 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
459         uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
460     ALOGV("initBufferQueue");
461 
462     if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
463         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
464         return ALREADY_EXISTS;
465     }
466 
467     // Create a buffer queue.
468     sp<IGraphicBufferProducer> producer;
469     sp<IGraphicBufferConsumer> consumer;
470     BufferQueue::createBufferQueue(&producer, &consumer);
471 
472     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
473     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
474         usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
475     }
476 
477     bufferCount += kConsumerBufferCount;
478 
479     mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
480     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
481     mVideoBufferProducer = producer;
482 
483     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
484     if (res != OK) {
485         ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
486                 strerror(-res), res);
487         return res;
488     }
489 
490     res = mVideoBufferConsumer->setDefaultBufferFormat(format);
491     if (res != OK) {
492         ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
493                 strerror(-res), res);
494         return res;
495     }
496 
497     res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
498     if (res != OK) {
499         ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
500                 strerror(-res), res);
501         return res;
502     }
503 
504     res = mCamera->setVideoTarget(mVideoBufferProducer);
505     if (res != OK) {
506         ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
507         return res;
508     }
509 
510     // Create memory heap to store buffers as VideoNativeMetadata.
511     createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
512 
513     mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
514     res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
515     if (res != OK) {
516         ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
517                 strerror(-res), res);
518         return res;
519     }
520 
521     return OK;
522 }
523 
initWithCameraAccess(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)524 status_t CameraSource::initWithCameraAccess(
525         const sp<hardware::ICamera>& camera,
526         const sp<ICameraRecordingProxy>& proxy,
527         int32_t cameraId,
528         const String16& clientName,
529         uid_t clientUid,
530         pid_t clientPid,
531         Size videoSize,
532         int32_t frameRate) {
533     ALOGV("initWithCameraAccess");
534     status_t err = OK;
535 
536     if ((err = isCameraAvailable(camera, proxy, cameraId,
537             clientName, clientUid, clientPid)) != OK) {
538         ALOGE("Camera connection could not be established.");
539         return err;
540     }
541     CameraParameters params(mCamera->getParameters());
542     if ((err = isCameraColorFormatSupported(params)) != OK) {
543         return err;
544     }
545 
546     // Set the camera to use the requested video frame size
547     // and/or frame rate.
548     if ((err = configureCamera(&params,
549                     videoSize.width, videoSize.height,
550                     frameRate))) {
551         return err;
552     }
553 
554     // Check on video frame size and frame rate.
555     CameraParameters newCameraParams(mCamera->getParameters());
556     if ((err = checkVideoSize(newCameraParams,
557                 videoSize.width, videoSize.height)) != OK) {
558         return err;
559     }
560     if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
561         return err;
562     }
563 
564     // Set the preview display. Skip this if mSurface is null because
565     // applications may already set a surface to the camera.
566     if (mSurface != NULL) {
567         // This CHECK is good, since we just passed the lock/unlock
568         // check earlier by calling mCamera->setParameters().
569         CHECK_EQ((status_t)OK, mCamera->setPreviewTarget(mSurface));
570     }
571 
572     // Use buffer queue to receive video buffers from camera
573     err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
574     if (err != OK) {
575         ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
576                 "%s (err=%d)", __FUNCTION__, strerror(-err), err);
577         return err;
578     }
579 
580     int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
581     if (glitchDurationUs > mGlitchDurationThresholdUs) {
582         mGlitchDurationThresholdUs = glitchDurationUs;
583     }
584 
585     // XXX: query camera for the stride and slice height
586     // when the capability becomes available.
587     mMeta = new MetaData;
588     mMeta->setCString(kKeyMIMEType,  MEDIA_MIMETYPE_VIDEO_RAW);
589     mMeta->setInt32(kKeyColorFormat, mColorFormat);
590     mMeta->setInt32(kKeyWidth,       mVideoSize.width);
591     mMeta->setInt32(kKeyHeight,      mVideoSize.height);
592     mMeta->setInt32(kKeyStride,      mVideoSize.width);
593     mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
594     mMeta->setInt32(kKeyFrameRate,   mVideoFrameRate);
595     return OK;
596 }
597 
~CameraSource()598 CameraSource::~CameraSource() {
599     if (mStarted) {
600         reset();
601     } else if (mInitCheck == OK) {
602         // Camera is initialized but because start() is never called,
603         // the lock on Camera is never released(). This makes sure
604         // Camera's lock is released in this case.
605         releaseCamera();
606     }
607 }
608 
startCameraRecording()609 status_t CameraSource::startCameraRecording() {
610     ALOGV("startCameraRecording");
611     // Reset the identity to the current thread because media server owns the
612     // camera and recording is started by the applications. The applications
613     // will connect to the camera in ICameraRecordingProxy::startRecording.
614     int64_t token = IPCThreadState::self()->clearCallingIdentity();
615     status_t err;
616 
617     // Initialize buffer queue.
618     err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
619             (android_dataspace_t)mEncoderDataSpace,
620             mNumInputBuffers > 0 ? mNumInputBuffers : 1);
621     if (err != OK) {
622         ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
623                 strerror(-err), err);
624         return err;
625     }
626 
627     // Start data flow
628     err = OK;
629     if (mCameraFlags & FLAGS_HOT_CAMERA) {
630         mCamera->unlock();
631         mCamera.clear();
632         if ((err = mCameraRecordingProxy->startRecording()) != OK) {
633             ALOGE("Failed to start recording, received error: %s (%d)",
634                     strerror(-err), err);
635         }
636     } else {
637         mCamera->startRecording();
638         if (!mCamera->recordingEnabled()) {
639             err = -EINVAL;
640             ALOGE("Failed to start recording");
641         }
642     }
643     IPCThreadState::self()->restoreCallingIdentity(token);
644     return err;
645 }
646 
start(MetaData * meta)647 status_t CameraSource::start(MetaData *meta) {
648     ALOGV("start");
649     CHECK(!mStarted);
650     if (mInitCheck != OK) {
651         ALOGE("CameraSource is not initialized yet");
652         return mInitCheck;
653     }
654 
655     if (property_get_bool("media.stagefright.record-stats", false)) {
656         mCollectStats = true;
657     }
658 
659     mStartTimeUs = 0;
660     mNumInputBuffers = 0;
661     mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
662     mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
663 
664     if (meta) {
665         int64_t startTimeUs;
666         if (meta->findInt64(kKeyTime, &startTimeUs)) {
667             mStartTimeUs = startTimeUs;
668         }
669 
670         int32_t nBuffers;
671         if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
672             CHECK_GT(nBuffers, 0);
673             mNumInputBuffers = nBuffers;
674         }
675 
676         // apply encoder color format if specified
677         if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
678             ALOGI("Using encoder format: %#x", mEncoderFormat);
679         }
680         if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
681             ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
682             mBufferDataSpace = mEncoderDataSpace;
683         }
684     }
685 
686     status_t err;
687     if ((err = startCameraRecording()) == OK) {
688         mStarted = true;
689     }
690 
691     return err;
692 }
693 
stopCameraRecording()694 void CameraSource::stopCameraRecording() {
695     ALOGV("stopCameraRecording");
696     if (mCameraFlags & FLAGS_HOT_CAMERA) {
697         if (mCameraRecordingProxy != 0) {
698             mCameraRecordingProxy->stopRecording();
699         }
700     } else {
701         if (mCamera != 0) {
702             mCamera->stopRecording();
703         }
704     }
705 }
706 
releaseCamera()707 void CameraSource::releaseCamera() {
708     ALOGV("releaseCamera");
709     sp<Camera> camera;
710     bool coldCamera = false;
711     {
712         Mutex::Autolock autoLock(mLock);
713         // get a local ref and clear ref to mCamera now
714         camera = mCamera;
715         mCamera.clear();
716         coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
717     }
718 
719     if (camera != 0) {
720         int64_t token = IPCThreadState::self()->clearCallingIdentity();
721         if (coldCamera) {
722             ALOGV("Camera was cold when we started, stopping preview");
723             camera->stopPreview();
724             camera->disconnect();
725         }
726         camera->unlock();
727         IPCThreadState::self()->restoreCallingIdentity(token);
728     }
729 
730     {
731         Mutex::Autolock autoLock(mLock);
732         if (mCameraRecordingProxy != 0) {
733             IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
734             mCameraRecordingProxy.clear();
735         }
736         mCameraFlags = 0;
737     }
738 }
739 
reset()740 status_t CameraSource::reset() {
741     ALOGD("reset: E");
742 
743     {
744         Mutex::Autolock autoLock(mLock);
745         mStarted = false;
746         mEos = false;
747         mStopSystemTimeUs = -1;
748         mFrameAvailableCondition.signal();
749 
750         int64_t token;
751         bool isTokenValid = false;
752         if (mCamera != 0) {
753             token = IPCThreadState::self()->clearCallingIdentity();
754             isTokenValid = true;
755         }
756         releaseQueuedFrames();
757         while (!mFramesBeingEncoded.empty()) {
758             if (NO_ERROR !=
759                 mFrameCompleteCondition.waitRelative(mLock,
760                         mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
761                 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
762                     mFramesBeingEncoded.size());
763             }
764         }
765         stopCameraRecording();
766         if (isTokenValid) {
767             IPCThreadState::self()->restoreCallingIdentity(token);
768         }
769 
770         if (mCollectStats) {
771             ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
772                     mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
773                     mLastFrameTimestampUs - mFirstFrameTimeUs);
774         }
775 
776         if (mNumGlitches > 0) {
777             ALOGW("%d long delays between neighboring video frames", mNumGlitches);
778         }
779 
780         CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
781     }
782 
783     if (mBufferQueueListener != nullptr) {
784         mBufferQueueListener->requestExit();
785         mBufferQueueListener->join();
786         mBufferQueueListener.clear();
787     }
788 
789     mVideoBufferConsumer.clear();
790     mVideoBufferProducer.clear();
791     releaseCamera();
792 
793     ALOGD("reset: X");
794     return OK;
795 }
796 
releaseRecordingFrame(const sp<IMemory> & frame)797 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
798     ALOGV("releaseRecordingFrame");
799 
800     // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
801     ssize_t offset;
802     size_t size;
803     sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
804     if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
805         ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
806                 heap->getHeapID(), mMemoryHeapBase->getHeapID());
807         return;
808     }
809 
810     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
811         (uint8_t*)heap->getBase() + offset);
812 
813     // Find the corresponding buffer item for the native window buffer.
814     ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
815     if (index == NAME_NOT_FOUND) {
816         ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
817         return;
818     }
819 
820     BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
821     mReceivedBufferItemMap.removeItemsAt(index);
822     mVideoBufferConsumer->releaseBuffer(buffer);
823     mMemoryBases.push_back(frame);
824     mMemoryBaseAvailableCond.signal();
825 }
826 
releaseQueuedFrames()827 void CameraSource::releaseQueuedFrames() {
828     List<sp<IMemory> >::iterator it;
829     while (!mFramesReceived.empty()) {
830         it = mFramesReceived.begin();
831         releaseRecordingFrame(*it);
832         mFramesReceived.erase(it);
833         ++mNumFramesDropped;
834     }
835 }
836 
getFormat()837 sp<MetaData> CameraSource::getFormat() {
838     return mMeta;
839 }
840 
releaseOneRecordingFrame(const sp<IMemory> & frame)841 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
842     releaseRecordingFrame(frame);
843 }
844 
signalBufferReturned(MediaBufferBase * buffer)845 void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
846     ALOGV("signalBufferReturned: %p", buffer->data());
847     Mutex::Autolock autoLock(mLock);
848     for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
849          it != mFramesBeingEncoded.end(); ++it) {
850         if ((*it)->unsecurePointer() ==  buffer->data()) {
851             releaseOneRecordingFrame((*it));
852             mFramesBeingEncoded.erase(it);
853             ++mNumFramesEncoded;
854             buffer->setObserver(0);
855             buffer->release();
856             mFrameCompleteCondition.signal();
857             return;
858         }
859     }
860     CHECK(!"signalBufferReturned: bogus buffer");
861 }
862 
read(MediaBufferBase ** buffer,const ReadOptions * options)863 status_t CameraSource::read(
864         MediaBufferBase **buffer, const ReadOptions *options) {
865     ALOGV("read");
866 
867     *buffer = NULL;
868 
869     int64_t seekTimeUs;
870     ReadOptions::SeekMode mode;
871     if (options && options->getSeekTo(&seekTimeUs, &mode)) {
872         return ERROR_UNSUPPORTED;
873     }
874 
875     sp<IMemory> frame;
876     int64_t frameTime;
877 
878     {
879         Mutex::Autolock autoLock(mLock);
880         while (mStarted && !mEos && mFramesReceived.empty()) {
881             if (NO_ERROR !=
882                 mFrameAvailableCondition.waitRelative(mLock,
883                     mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
884                 if (mCameraRecordingProxy != 0 &&
885                     !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
886                     ALOGW("camera recording proxy is gone");
887                     return ERROR_END_OF_STREAM;
888                 }
889                 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
890                     mLastFrameTimestampUs);
891             }
892         }
893         if (!mStarted) {
894             return OK;
895         }
896         if (mFramesReceived.empty()) {
897             return ERROR_END_OF_STREAM;
898         }
899         frame = *mFramesReceived.begin();
900         mFramesReceived.erase(mFramesReceived.begin());
901 
902         frameTime = *mFrameTimes.begin();
903         mFrameTimes.erase(mFrameTimes.begin());
904         mFramesBeingEncoded.push_back(frame);
905         // TODO: Using unsecurePointer() has some associated security pitfalls
906         //       (see declaration for details).
907         //       Either document why it is safe in this case or address the
908         //       issue (e.g. by copying).
909         *buffer = new MediaBuffer(frame->unsecurePointer(), frame->size());
910         (*buffer)->setObserver(this);
911         (*buffer)->add_ref();
912         (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
913         if (mBufferDataSpace != mEncoderDataSpace) {
914             ALOGD("Data space updated to %x", mBufferDataSpace);
915             (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
916             mEncoderDataSpace = mBufferDataSpace;
917         }
918     }
919     return OK;
920 }
921 
setStopTimeUs(int64_t stopTimeUs)922 status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
923     Mutex::Autolock autoLock(mLock);
924     ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
925 
926     if (stopTimeUs < -1) {
927         ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
928         return BAD_VALUE;
929     } else if (stopTimeUs == -1) {
930         ALOGI("reset stopTime to be -1");
931     }
932 
933     mStopSystemTimeUs = stopTimeUs;
934     return OK;
935 }
936 
shouldSkipFrameLocked(int64_t timestampUs)937 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
938     if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
939         ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
940         return true;
941     }
942 
943     if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
944         ALOGV("Drop Camera frame at %lld  stop time: %lld us",
945                 (long long)timestampUs, (long long)mStopSystemTimeUs);
946         mEos = true;
947         mFrameAvailableCondition.signal();
948         return true;
949     }
950 
951     // May need to skip frame or modify timestamp. Currently implemented
952     // by the subclass CameraSourceTimeLapse.
953     if (skipCurrentFrame(timestampUs)) {
954         return true;
955     }
956 
957     if (mNumFramesReceived > 0) {
958         if (timestampUs <= mLastFrameTimestampUs) {
959             ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
960                     (long long)timestampUs, (long long)mLastFrameTimestampUs);
961             return true;
962         }
963         if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
964             ++mNumGlitches;
965         }
966     }
967 
968     mLastFrameTimestampUs = timestampUs;
969     if (mNumFramesReceived == 0) {
970         mFirstFrameTimeUs = timestampUs;
971         // Initial delay
972         if (mStartTimeUs > 0) {
973             if (timestampUs < mStartTimeUs) {
974                 // Frame was captured before recording was started
975                 // Drop it without updating the statistical data.
976                 return true;
977             }
978             mStartTimeUs = timestampUs - mStartTimeUs;
979         }
980     }
981 
982     return false;
983 }
984 
BufferQueueListener(const sp<BufferItemConsumer> & consumer,const sp<CameraSource> & cameraSource)985 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
986         const sp<CameraSource>& cameraSource) {
987     mConsumer = consumer;
988     mConsumer->setFrameAvailableListener(this);
989     mCameraSource = cameraSource;
990 }
991 
onFrameAvailable(const BufferItem &)992 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
993     ALOGV("%s: onFrameAvailable", __FUNCTION__);
994 
995     Mutex::Autolock l(mLock);
996 
997     if (!mFrameAvailable) {
998         mFrameAvailable = true;
999         mFrameAvailableSignal.signal();
1000     }
1001 }
1002 
threadLoop()1003 bool CameraSource::BufferQueueListener::threadLoop() {
1004     if (mConsumer == nullptr || mCameraSource == nullptr) {
1005         return false;
1006     }
1007 
1008     {
1009         Mutex::Autolock l(mLock);
1010         while (!mFrameAvailable) {
1011             if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1012                 return true;
1013             }
1014         }
1015         mFrameAvailable = false;
1016     }
1017 
1018     BufferItem buffer;
1019     while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1020         mCameraSource->processBufferQueueFrame(buffer);
1021     }
1022 
1023     return true;
1024 }
1025 
processBufferQueueFrame(BufferItem & buffer)1026 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1027     Mutex::Autolock autoLock(mLock);
1028 
1029     int64_t timestampUs = buffer.mTimestamp / 1000;
1030     if (shouldSkipFrameLocked(timestampUs)) {
1031         mVideoBufferConsumer->releaseBuffer(buffer);
1032         return;
1033     }
1034 
1035     while (mMemoryBases.empty()) {
1036         if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1037                 TIMED_OUT) {
1038             ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1039             mVideoBufferConsumer->releaseBuffer(buffer);
1040             return;
1041         }
1042     }
1043 
1044     ++mNumFramesReceived;
1045 
1046     // Find a available memory slot to store the buffer as VideoNativeMetadata.
1047     sp<IMemory> data = *mMemoryBases.begin();
1048     mMemoryBases.erase(mMemoryBases.begin());
1049     mBufferDataSpace = buffer.mDataSpace;
1050 
1051     ssize_t offset;
1052     size_t size;
1053     sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1054     VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1055         (uint8_t*)heap->getBase() + offset);
1056     memset(payload, 0, sizeof(VideoNativeMetadata));
1057     payload->eType = kMetadataBufferTypeANWBuffer;
1058     payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1059     payload->nFenceFd = -1;
1060 
1061     // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1062     // when the encoder returns the native window buffer.
1063     mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1064 
1065     mFramesReceived.push_back(data);
1066     int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1067     mFrameTimes.push_back(timeUs);
1068     ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1069         mStartTimeUs, timeUs);
1070     mFrameAvailableCondition.signal();
1071 }
1072 
metaDataStoredInVideoBuffers() const1073 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1074     ALOGV("metaDataStoredInVideoBuffers");
1075 
1076     return kMetadataBufferTypeANWBuffer;
1077 }
1078 
binderDied(const wp<IBinder> & who __unused)1079 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1080     ALOGI("Camera recording proxy died");
1081 }
1082 
1083 }  // namespace android
1084