1 /*
2 * Copyright (C) 2013-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera3-OutputStream"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <ctime>
22 #include <fstream>
23
24 #include <android-base/unique_fd.h>
25 #include <ui/GraphicBuffer.h>
26 #include <utils/Log.h>
27 #include <utils/Trace.h>
28
29 #include "api1/client2/JpegProcessor.h"
30 #include "Camera3OutputStream.h"
31 #include "utils/TraceHFR.h"
32
33 #ifndef container_of
34 #define container_of(ptr, type, member) \
35 (type *)((char*)(ptr) - offsetof(type, member))
36 #endif
37
38 namespace android {
39
40 namespace camera3 {
41
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)42 Camera3OutputStream::Camera3OutputStream(int id,
43 sp<Surface> consumer,
44 uint32_t width, uint32_t height, int format,
45 android_dataspace dataSpace, camera_stream_rotation_t rotation,
46 nsecs_t timestampOffset, const String8& physicalCameraId,
47 const std::unordered_set<int32_t> &sensorPixelModesUsed,
48 int setId, bool isMultiResolution) :
49 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
50 /*maxSize*/0, format, dataSpace, rotation,
51 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
52 mConsumer(consumer),
53 mTransform(0),
54 mTraceFirstBuffer(true),
55 mUseBufferManager(false),
56 mTimestampOffset(timestampOffset),
57 mConsumerUsage(0),
58 mDropBuffers(false),
59 mDequeueBufferLatency(kDequeueLatencyBinSize) {
60
61 if (mConsumer == NULL) {
62 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
63 mState = STATE_ERROR;
64 }
65
66 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
67 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
68 }
69
Camera3OutputStream(int id,sp<Surface> consumer,uint32_t width,uint32_t height,size_t maxSize,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)70 Camera3OutputStream::Camera3OutputStream(int id,
71 sp<Surface> consumer,
72 uint32_t width, uint32_t height, size_t maxSize, int format,
73 android_dataspace dataSpace, camera_stream_rotation_t rotation,
74 nsecs_t timestampOffset, const String8& physicalCameraId,
75 const std::unordered_set<int32_t> &sensorPixelModesUsed,
76 int setId, bool isMultiResolution) :
77 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
78 format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
79 setId, isMultiResolution),
80 mConsumer(consumer),
81 mTransform(0),
82 mTraceFirstBuffer(true),
83 mUseMonoTimestamp(false),
84 mUseBufferManager(false),
85 mTimestampOffset(timestampOffset),
86 mConsumerUsage(0),
87 mDropBuffers(false),
88 mDequeueBufferLatency(kDequeueLatencyBinSize) {
89
90 if (format != HAL_PIXEL_FORMAT_BLOB && format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
91 ALOGE("%s: Bad format for size-only stream: %d", __FUNCTION__,
92 format);
93 mState = STATE_ERROR;
94 }
95
96 if (mConsumer == NULL) {
97 ALOGE("%s: Consumer is NULL!", __FUNCTION__);
98 mState = STATE_ERROR;
99 }
100
101 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
102 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
103 }
104
Camera3OutputStream(int id,uint32_t width,uint32_t height,int format,uint64_t consumerUsage,android_dataspace dataSpace,camera_stream_rotation_t rotation,nsecs_t timestampOffset,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,int setId,bool isMultiResolution)105 Camera3OutputStream::Camera3OutputStream(int id,
106 uint32_t width, uint32_t height, int format,
107 uint64_t consumerUsage, android_dataspace dataSpace,
108 camera_stream_rotation_t rotation, nsecs_t timestampOffset,
109 const String8& physicalCameraId,
110 const std::unordered_set<int32_t> &sensorPixelModesUsed,
111 int setId, bool isMultiResolution) :
112 Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
113 /*maxSize*/0, format, dataSpace, rotation,
114 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
115 mConsumer(nullptr),
116 mTransform(0),
117 mTraceFirstBuffer(true),
118 mUseBufferManager(false),
119 mTimestampOffset(timestampOffset),
120 mConsumerUsage(consumerUsage),
121 mDropBuffers(false),
122 mDequeueBufferLatency(kDequeueLatencyBinSize) {
123 // Deferred consumer only support preview surface format now.
124 if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
125 ALOGE("%s: Deferred consumer only supports IMPLEMENTATION_DEFINED format now!",
126 __FUNCTION__);
127 mState = STATE_ERROR;
128 }
129
130 // Validation check for the consumer usage flag.
131 if ((consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) == 0 &&
132 (consumerUsage & GraphicBuffer::USAGE_HW_COMPOSER) == 0) {
133 ALOGE("%s: Deferred consumer usage flag is illegal %" PRIu64 "!",
134 __FUNCTION__, consumerUsage);
135 mState = STATE_ERROR;
136 }
137
138 mConsumerName = String8("Deferred");
139 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
140 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
141 }
142
Camera3OutputStream(int id,camera_stream_type_t type,uint32_t width,uint32_t height,int format,android_dataspace dataSpace,camera_stream_rotation_t rotation,const String8 & physicalCameraId,const std::unordered_set<int32_t> & sensorPixelModesUsed,uint64_t consumerUsage,nsecs_t timestampOffset,int setId,bool isMultiResolution)143 Camera3OutputStream::Camera3OutputStream(int id, camera_stream_type_t type,
144 uint32_t width, uint32_t height,
145 int format,
146 android_dataspace dataSpace,
147 camera_stream_rotation_t rotation,
148 const String8& physicalCameraId,
149 const std::unordered_set<int32_t> &sensorPixelModesUsed,
150 uint64_t consumerUsage, nsecs_t timestampOffset,
151 int setId, bool isMultiResolution) :
152 Camera3IOStreamBase(id, type, width, height,
153 /*maxSize*/0,
154 format, dataSpace, rotation,
155 physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution),
156 mTransform(0),
157 mTraceFirstBuffer(true),
158 mUseMonoTimestamp(false),
159 mUseBufferManager(false),
160 mTimestampOffset(timestampOffset),
161 mConsumerUsage(consumerUsage),
162 mDropBuffers(false),
163 mDequeueBufferLatency(kDequeueLatencyBinSize) {
164
165 bool needsReleaseNotify = setId > CAMERA3_STREAM_SET_ID_INVALID;
166 mBufferProducerListener = new BufferProducerListener(this, needsReleaseNotify);
167
168 // Subclasses expected to initialize mConsumer themselves
169 }
170
171
~Camera3OutputStream()172 Camera3OutputStream::~Camera3OutputStream() {
173 disconnectLocked();
174 }
175
getBufferLocked(camera_stream_buffer * buffer,const std::vector<size_t> &)176 status_t Camera3OutputStream::getBufferLocked(camera_stream_buffer *buffer,
177 const std::vector<size_t>&) {
178 ATRACE_HFR_CALL();
179
180 ANativeWindowBuffer* anb;
181 int fenceFd = -1;
182
183 status_t res;
184 res = getBufferLockedCommon(&anb, &fenceFd);
185 if (res != OK) {
186 return res;
187 }
188
189 /**
190 * FenceFD now owned by HAL except in case of error,
191 * in which case we reassign it to acquire_fence
192 */
193 handoutBufferLocked(*buffer, &(anb->handle), /*acquireFence*/fenceFd,
194 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
195
196 return OK;
197 }
198
getBuffersLocked(std::vector<OutstandingBuffer> * outBuffers)199 status_t Camera3OutputStream::getBuffersLocked(std::vector<OutstandingBuffer>* outBuffers) {
200 status_t res;
201
202 if ((res = getBufferPreconditionCheckLocked()) != OK) {
203 return res;
204 }
205
206 if (mUseBufferManager) {
207 ALOGE("%s: stream %d is managed by buffer manager and does not support batch operation",
208 __FUNCTION__, mId);
209 return INVALID_OPERATION;
210 }
211
212 sp<Surface> consumer = mConsumer;
213 /**
214 * Release the lock briefly to avoid deadlock for below scenario:
215 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
216 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
217 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
218 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
219 * StreamingProcessor lock.
220 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
221 * and try to lock bufferQueue lock.
222 * Then there is circular locking dependency.
223 */
224 mLock.unlock();
225
226 size_t numBuffersRequested = outBuffers->size();
227 std::vector<Surface::BatchBuffer> buffers(numBuffersRequested);
228
229 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
230 res = consumer->dequeueBuffers(&buffers);
231 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
232 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
233
234 mLock.lock();
235
236 if (res != OK) {
237 if (shouldLogError(res, mState)) {
238 ALOGE("%s: Stream %d: Can't dequeue %zu output buffers: %s (%d)",
239 __FUNCTION__, mId, numBuffersRequested, strerror(-res), res);
240 }
241 checkRetAndSetAbandonedLocked(res);
242 return res;
243 }
244 checkRemovedBuffersLocked();
245
246 /**
247 * FenceFD now owned by HAL except in case of error,
248 * in which case we reassign it to acquire_fence
249 */
250 for (size_t i = 0; i < numBuffersRequested; i++) {
251 handoutBufferLocked(*(outBuffers->at(i).outBuffer),
252 &(buffers[i].buffer->handle), /*acquireFence*/buffers[i].fenceFd,
253 /*releaseFence*/-1, CAMERA_BUFFER_STATUS_OK, /*output*/true);
254 }
255 return OK;
256 }
257
queueBufferToConsumer(sp<ANativeWindow> & consumer,ANativeWindowBuffer * buffer,int anwReleaseFence,const std::vector<size_t> &)258 status_t Camera3OutputStream::queueBufferToConsumer(sp<ANativeWindow>& consumer,
259 ANativeWindowBuffer* buffer, int anwReleaseFence,
260 const std::vector<size_t>&) {
261 return consumer->queueBuffer(consumer.get(), buffer, anwReleaseFence);
262 }
263
returnBufferLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,int32_t transform,const std::vector<size_t> & surface_ids)264 status_t Camera3OutputStream::returnBufferLocked(
265 const camera_stream_buffer &buffer,
266 nsecs_t timestamp, int32_t transform, const std::vector<size_t>& surface_ids) {
267 ATRACE_HFR_CALL();
268
269 if (mHandoutTotalBufferCount == 1) {
270 returnPrefetchedBuffersLocked();
271 }
272
273 status_t res = returnAnyBufferLocked(buffer, timestamp, /*output*/true, transform, surface_ids);
274
275 if (res != OK) {
276 return res;
277 }
278
279 mLastTimestamp = timestamp;
280 mFrameCount++;
281
282 return OK;
283 }
284
returnBufferCheckedLocked(const camera_stream_buffer & buffer,nsecs_t timestamp,bool output,int32_t transform,const std::vector<size_t> & surface_ids,sp<Fence> * releaseFenceOut)285 status_t Camera3OutputStream::returnBufferCheckedLocked(
286 const camera_stream_buffer &buffer,
287 nsecs_t timestamp,
288 bool output,
289 int32_t transform,
290 const std::vector<size_t>& surface_ids,
291 /*out*/
292 sp<Fence> *releaseFenceOut) {
293
294 (void)output;
295 ALOG_ASSERT(output, "Expected output to be true");
296
297 status_t res;
298
299 // Fence management - always honor release fence from HAL
300 sp<Fence> releaseFence = new Fence(buffer.release_fence);
301 int anwReleaseFence = releaseFence->dup();
302
303 /**
304 * Release the lock briefly to avoid deadlock with
305 * StreamingProcessor::startStream -> Camera3Stream::isConfiguring (this
306 * thread will go into StreamingProcessor::onFrameAvailable) during
307 * queueBuffer
308 */
309 sp<ANativeWindow> currentConsumer = mConsumer;
310 StreamState state = mState;
311 mLock.unlock();
312
313 ANativeWindowBuffer *anwBuffer = container_of(buffer.buffer, ANativeWindowBuffer, handle);
314 /**
315 * Return buffer back to ANativeWindow
316 */
317 if (buffer.status == CAMERA_BUFFER_STATUS_ERROR || mDropBuffers || timestamp == 0) {
318 // Cancel buffer
319 if (mDropBuffers) {
320 ALOGV("%s: Dropping a frame for stream %d.", __FUNCTION__, mId);
321 } else if (buffer.status == CAMERA_BUFFER_STATUS_ERROR) {
322 ALOGV("%s: A frame is dropped for stream %d due to buffer error.", __FUNCTION__, mId);
323 } else {
324 ALOGE("%s: Stream %d: timestamp shouldn't be 0", __FUNCTION__, mId);
325 }
326
327 res = currentConsumer->cancelBuffer(currentConsumer.get(),
328 anwBuffer,
329 anwReleaseFence);
330 if (shouldLogError(res, state)) {
331 ALOGE("%s: Stream %d: Error cancelling buffer to native window:"
332 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
333 }
334
335 notifyBufferReleased(anwBuffer);
336 if (mUseBufferManager) {
337 // Return this buffer back to buffer manager.
338 mBufferProducerListener->onBufferReleased();
339 }
340 } else {
341 if (mTraceFirstBuffer && (stream_type == CAMERA_STREAM_OUTPUT)) {
342 {
343 char traceLog[48];
344 snprintf(traceLog, sizeof(traceLog), "Stream %d: first full buffer\n", mId);
345 ATRACE_NAME(traceLog);
346 }
347 mTraceFirstBuffer = false;
348 }
349
350 if (transform != -1) {
351 setTransformLocked(transform);
352 }
353
354 /* Certain consumers (such as AudioSource or HardwareComposer) use
355 * MONOTONIC time, causing time misalignment if camera timestamp is
356 * in BOOTTIME. Do the conversion if necessary. */
357 res = native_window_set_buffers_timestamp(mConsumer.get(),
358 mUseMonoTimestamp ? timestamp - mTimestampOffset : timestamp);
359 if (res != OK) {
360 ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
361 __FUNCTION__, mId, strerror(-res), res);
362 return res;
363 }
364 // If this is a JPEG output, and image dump mask is set, save image to
365 // disk.
366 if (getFormat() == HAL_PIXEL_FORMAT_BLOB && getDataSpace() == HAL_DATASPACE_V0_JFIF &&
367 mImageDumpMask) {
368 dumpImageToDisk(timestamp, anwBuffer, anwReleaseFence);
369 }
370
371 res = queueBufferToConsumer(currentConsumer, anwBuffer, anwReleaseFence, surface_ids);
372 if (shouldLogError(res, state)) {
373 ALOGE("%s: Stream %d: Error queueing buffer to native window:"
374 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
375 }
376 }
377 mLock.lock();
378
379 // Once a valid buffer has been returned to the queue, can no longer
380 // dequeue all buffers for preallocation.
381 if (buffer.status != CAMERA_BUFFER_STATUS_ERROR) {
382 mStreamUnpreparable = true;
383 }
384
385 if (res != OK) {
386 close(anwReleaseFence);
387 }
388
389 *releaseFenceOut = releaseFence;
390
391 return res;
392 }
393
dump(int fd,const Vector<String16> & args) const394 void Camera3OutputStream::dump(int fd, const Vector<String16> &args) const {
395 (void) args;
396 String8 lines;
397 lines.appendFormat(" Stream[%d]: Output\n", mId);
398 lines.appendFormat(" Consumer name: %s\n", mConsumerName.string());
399 write(fd, lines.string(), lines.size());
400
401 Camera3IOStreamBase::dump(fd, args);
402
403 mDequeueBufferLatency.dump(fd,
404 " DequeueBuffer latency histogram:");
405 }
406
setTransform(int transform)407 status_t Camera3OutputStream::setTransform(int transform) {
408 ATRACE_CALL();
409 Mutex::Autolock l(mLock);
410 return setTransformLocked(transform);
411 }
412
setTransformLocked(int transform)413 status_t Camera3OutputStream::setTransformLocked(int transform) {
414 status_t res = OK;
415 if (mState == STATE_ERROR) {
416 ALOGE("%s: Stream in error state", __FUNCTION__);
417 return INVALID_OPERATION;
418 }
419
420 mTransform = transform;
421 if (mState == STATE_CONFIGURED) {
422 res = native_window_set_buffers_transform(mConsumer.get(),
423 transform);
424 if (res != OK) {
425 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
426 __FUNCTION__, transform, strerror(-res), res);
427 }
428 }
429 return res;
430 }
431
configureQueueLocked()432 status_t Camera3OutputStream::configureQueueLocked() {
433 status_t res;
434
435 mTraceFirstBuffer = true;
436 if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
437 return res;
438 }
439
440 if ((res = configureConsumerQueueLocked()) != OK) {
441 return res;
442 }
443
444 // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
445 // We need skip these cases as timeout will disable the non-blocking (async) mode.
446 if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
447 if (mUseBufferManager) {
448 // When buffer manager is handling the buffer, we should have available buffers in
449 // buffer queue before we calls into dequeueBuffer because buffer manager is tracking
450 // free buffers.
451 // There are however some consumer side feature (ImageReader::discardFreeBuffers) that
452 // can discard free buffers without notifying buffer manager. We want the timeout to
453 // happen immediately here so buffer manager can try to update its internal state and
454 // try to allocate a buffer instead of waiting.
455 mConsumer->setDequeueTimeout(0);
456 } else {
457 mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
458 }
459 }
460
461 return OK;
462 }
463
configureConsumerQueueLocked()464 status_t Camera3OutputStream::configureConsumerQueueLocked() {
465 status_t res;
466
467 mTraceFirstBuffer = true;
468
469 ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
470
471 // Configure consumer-side ANativeWindow interface. The listener may be used
472 // to notify buffer manager (if it is used) of the returned buffers.
473 res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
474 /*reportBufferRemoval*/true,
475 /*listener*/mBufferProducerListener);
476 if (res != OK) {
477 ALOGE("%s: Unable to connect to native window for stream %d",
478 __FUNCTION__, mId);
479 return res;
480 }
481
482 mConsumerName = mConsumer->getConsumerName();
483
484 res = native_window_set_usage(mConsumer.get(), mUsage);
485 if (res != OK) {
486 ALOGE("%s: Unable to configure usage %" PRIu64 " for stream %d",
487 __FUNCTION__, mUsage, mId);
488 return res;
489 }
490
491 res = native_window_set_scaling_mode(mConsumer.get(),
492 NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
493 if (res != OK) {
494 ALOGE("%s: Unable to configure stream scaling: %s (%d)",
495 __FUNCTION__, strerror(-res), res);
496 return res;
497 }
498
499 if (mMaxSize == 0) {
500 // For buffers of known size
501 res = native_window_set_buffers_dimensions(mConsumer.get(),
502 camera_stream::width, camera_stream::height);
503 } else {
504 // For buffers with bounded size
505 res = native_window_set_buffers_dimensions(mConsumer.get(),
506 mMaxSize, 1);
507 }
508 if (res != OK) {
509 ALOGE("%s: Unable to configure stream buffer dimensions"
510 " %d x %d (maxSize %zu) for stream %d",
511 __FUNCTION__, camera_stream::width, camera_stream::height,
512 mMaxSize, mId);
513 return res;
514 }
515 res = native_window_set_buffers_format(mConsumer.get(),
516 camera_stream::format);
517 if (res != OK) {
518 ALOGE("%s: Unable to configure stream buffer format %#x for stream %d",
519 __FUNCTION__, camera_stream::format, mId);
520 return res;
521 }
522
523 res = native_window_set_buffers_data_space(mConsumer.get(),
524 camera_stream::data_space);
525 if (res != OK) {
526 ALOGE("%s: Unable to configure stream dataspace %#x for stream %d",
527 __FUNCTION__, camera_stream::data_space, mId);
528 return res;
529 }
530
531 int maxConsumerBuffers;
532 res = static_cast<ANativeWindow*>(mConsumer.get())->query(
533 mConsumer.get(),
534 NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
535 if (res != OK) {
536 ALOGE("%s: Unable to query consumer undequeued"
537 " buffer count for stream %d", __FUNCTION__, mId);
538 return res;
539 }
540
541 ALOGV("%s: Consumer wants %d buffers, HAL wants %d", __FUNCTION__,
542 maxConsumerBuffers, camera_stream::max_buffers);
543 if (camera_stream::max_buffers == 0) {
544 ALOGE("%s: Camera HAL requested max_buffer count: %d, requires at least 1",
545 __FUNCTION__, camera_stream::max_buffers);
546 return INVALID_OPERATION;
547 }
548
549 mTotalBufferCount = maxConsumerBuffers + camera_stream::max_buffers;
550 mHandoutTotalBufferCount = 0;
551 mFrameCount = 0;
552 mLastTimestamp = 0;
553 mUseMonoTimestamp = (isConsumedByHWComposer() | isVideoStream());
554
555 res = native_window_set_buffer_count(mConsumer.get(),
556 mTotalBufferCount);
557 if (res != OK) {
558 ALOGE("%s: Unable to set buffer count for stream %d",
559 __FUNCTION__, mId);
560 return res;
561 }
562
563 res = native_window_set_buffers_transform(mConsumer.get(),
564 mTransform);
565 if (res != OK) {
566 ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
567 __FUNCTION__, mTransform, strerror(-res), res);
568 return res;
569 }
570
571 /**
572 * Camera3 Buffer manager is only supported by HAL3.3 onwards, as the older HALs requires
573 * buffers to be statically allocated for internal static buffer registration, while the
574 * buffers provided by buffer manager are really dynamically allocated. Camera3Device only
575 * sets the mBufferManager if device version is > HAL3.2, which guarantees that the buffer
576 * manager setup is skipped in below code. Note that HAL3.2 is also excluded here, as some
577 * HAL3.2 devices may not support the dynamic buffer registeration.
578 * Also Camera3BufferManager does not support display/texture streams as they have its own
579 * buffer management logic.
580 */
581 if (mBufferManager != 0 && mSetId > CAMERA3_STREAM_SET_ID_INVALID &&
582 !(isConsumedByHWComposer() || isConsumedByHWTexture())) {
583 uint64_t consumerUsage = 0;
584 getEndpointUsage(&consumerUsage);
585 uint32_t width = (mMaxSize == 0) ? getWidth() : mMaxSize;
586 uint32_t height = (mMaxSize == 0) ? getHeight() : 1;
587 StreamInfo streamInfo(
588 getId(), getStreamSetId(), width, height, getFormat(), getDataSpace(),
589 mUsage | consumerUsage, mTotalBufferCount,
590 /*isConfigured*/true, isMultiResolution());
591 wp<Camera3OutputStream> weakThis(this);
592 res = mBufferManager->registerStream(weakThis,
593 streamInfo);
594 if (res == OK) {
595 // Disable buffer allocation for this BufferQueue, buffer manager will take over
596 // the buffer allocation responsibility.
597 mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
598 mUseBufferManager = true;
599 } else {
600 ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
601 "(error %d %s), fall back to BufferQueue for buffer management!",
602 __FUNCTION__, mId, res, strerror(-res));
603 }
604 }
605
606 return OK;
607 }
608
getBufferLockedCommon(ANativeWindowBuffer ** anb,int * fenceFd)609 status_t Camera3OutputStream::getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd) {
610 ATRACE_HFR_CALL();
611 status_t res;
612
613 if ((res = getBufferPreconditionCheckLocked()) != OK) {
614 return res;
615 }
616
617 bool gotBufferFromManager = false;
618
619 if (mUseBufferManager) {
620 sp<GraphicBuffer> gb;
621 res = mBufferManager->getBufferForStream(getId(), getStreamSetId(),
622 isMultiResolution(), &gb, fenceFd);
623 if (res == OK) {
624 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after a
625 // successful return.
626 *anb = gb.get();
627 res = mConsumer->attachBuffer(*anb);
628 if (shouldLogError(res, mState)) {
629 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface: %s (%d)",
630 __FUNCTION__, mId, strerror(-res), res);
631 }
632 if (res != OK) {
633 checkRetAndSetAbandonedLocked(res);
634 return res;
635 }
636 gotBufferFromManager = true;
637 ALOGV("Stream %d: Attached new buffer", getId());
638 } else if (res == ALREADY_EXISTS) {
639 // Have sufficient free buffers already attached, can just
640 // dequeue from buffer queue
641 ALOGV("Stream %d: Reusing attached buffer", getId());
642 gotBufferFromManager = false;
643 } else if (res != OK) {
644 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager: %s (%d)",
645 __FUNCTION__, mId, strerror(-res), res);
646 return res;
647 }
648 }
649 if (!gotBufferFromManager) {
650 /**
651 * Release the lock briefly to avoid deadlock for below scenario:
652 * Thread 1: StreamingProcessor::startStream -> Camera3Stream::isConfiguring().
653 * This thread acquired StreamingProcessor lock and try to lock Camera3Stream lock.
654 * Thread 2: Camera3Stream::returnBuffer->StreamingProcessor::onFrameAvailable().
655 * This thread acquired Camera3Stream lock and bufferQueue lock, and try to lock
656 * StreamingProcessor lock.
657 * Thread 3: Camera3Stream::getBuffer(). This thread acquired Camera3Stream lock
658 * and try to lock bufferQueue lock.
659 * Then there is circular locking dependency.
660 */
661 sp<Surface> consumer = mConsumer;
662 size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
663 camera_stream::max_buffers) - mHandoutTotalBufferCount;
664 mLock.unlock();
665
666 nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
667
668 size_t batchSize = mBatchSize.load();
669 if (batchSize == 1) {
670 sp<ANativeWindow> anw = consumer;
671 res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
672 } else {
673 std::unique_lock<std::mutex> batchLock(mBatchLock);
674 res = OK;
675 if (mBatchedBuffers.size() == 0) {
676 if (remainingBuffers == 0) {
677 ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
678 return INVALID_OPERATION;
679 }
680 if (batchSize > remainingBuffers) {
681 batchSize = remainingBuffers;
682 }
683 batchLock.unlock();
684 // Refill batched buffers
685 std::vector<Surface::BatchBuffer> batchedBuffers;
686 batchedBuffers.resize(batchSize);
687 res = consumer->dequeueBuffers(&batchedBuffers);
688 batchLock.lock();
689 if (res != OK) {
690 ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
691 __FUNCTION__, strerror(-res), res);
692 } else {
693 mBatchedBuffers = std::move(batchedBuffers);
694 }
695 }
696
697 if (res == OK) {
698 // Dispatch batch buffers
699 *anb = mBatchedBuffers.back().buffer;
700 *fenceFd = mBatchedBuffers.back().fenceFd;
701 mBatchedBuffers.pop_back();
702 }
703 }
704
705 nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
706 mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
707
708 mLock.lock();
709
710 if (mUseBufferManager && res == TIMED_OUT) {
711 checkRemovedBuffersLocked();
712
713 sp<GraphicBuffer> gb;
714 res = mBufferManager->getBufferForStream(
715 getId(), getStreamSetId(), isMultiResolution(),
716 &gb, fenceFd, /*noFreeBuffer*/true);
717
718 if (res == OK) {
719 // Attach this buffer to the bufferQueue: the buffer will be in dequeue state after
720 // a successful return.
721 *anb = gb.get();
722 res = mConsumer->attachBuffer(*anb);
723 gotBufferFromManager = true;
724 ALOGV("Stream %d: Attached new buffer", getId());
725
726 if (res != OK) {
727 if (shouldLogError(res, mState)) {
728 ALOGE("%s: Stream %d: Can't attach the output buffer to this surface:"
729 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
730 }
731 checkRetAndSetAbandonedLocked(res);
732 return res;
733 }
734 } else {
735 ALOGE("%s: Stream %d: Can't get next output buffer from buffer manager:"
736 " %s (%d)", __FUNCTION__, mId, strerror(-res), res);
737 return res;
738 }
739 } else if (res != OK) {
740 if (shouldLogError(res, mState)) {
741 ALOGE("%s: Stream %d: Can't dequeue next output buffer: %s (%d)",
742 __FUNCTION__, mId, strerror(-res), res);
743 }
744 checkRetAndSetAbandonedLocked(res);
745 return res;
746 }
747 }
748
749 if (res == OK) {
750 checkRemovedBuffersLocked();
751 }
752
753 return res;
754 }
755
checkRemovedBuffersLocked(bool notifyBufferManager)756 void Camera3OutputStream::checkRemovedBuffersLocked(bool notifyBufferManager) {
757 std::vector<sp<GraphicBuffer>> removedBuffers;
758 status_t res = mConsumer->getAndFlushRemovedBuffers(&removedBuffers);
759 if (res == OK) {
760 onBuffersRemovedLocked(removedBuffers);
761
762 if (notifyBufferManager && mUseBufferManager && removedBuffers.size() > 0) {
763 mBufferManager->onBuffersRemoved(getId(), getStreamSetId(), isMultiResolution(),
764 removedBuffers.size());
765 }
766 }
767 }
768
checkRetAndSetAbandonedLocked(status_t res)769 void Camera3OutputStream::checkRetAndSetAbandonedLocked(status_t res) {
770 // Only transition to STATE_ABANDONED from STATE_CONFIGURED. (If it is
771 // STATE_PREPARING, let prepareNextBuffer handle the error.)
772 if ((res == NO_INIT || res == DEAD_OBJECT) && mState == STATE_CONFIGURED) {
773 mState = STATE_ABANDONED;
774 }
775 }
776
shouldLogError(status_t res,StreamState state)777 bool Camera3OutputStream::shouldLogError(status_t res, StreamState state) {
778 if (res == OK) {
779 return false;
780 }
781 if ((res == DEAD_OBJECT || res == NO_INIT) && state == STATE_ABANDONED) {
782 return false;
783 }
784 return true;
785 }
786
disconnectLocked()787 status_t Camera3OutputStream::disconnectLocked() {
788 status_t res;
789
790 if ((res = Camera3IOStreamBase::disconnectLocked()) != OK) {
791 return res;
792 }
793
794 // Stream configuration was not finished (can only be in STATE_IN_CONFIG or STATE_CONSTRUCTED
795 // state), don't need change the stream state, return OK.
796 if (mConsumer == nullptr) {
797 return OK;
798 }
799
800 returnPrefetchedBuffersLocked();
801
802 ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
803
804 res = native_window_api_disconnect(mConsumer.get(),
805 NATIVE_WINDOW_API_CAMERA);
806 /**
807 * This is not an error. if client calling process dies, the window will
808 * also die and all calls to it will return DEAD_OBJECT, thus it's already
809 * "disconnected"
810 */
811 if (res == DEAD_OBJECT) {
812 ALOGW("%s: While disconnecting stream %d from native window, the"
813 " native window died from under us", __FUNCTION__, mId);
814 }
815 else if (res != OK) {
816 ALOGE("%s: Unable to disconnect stream %d from native window "
817 "(error %d %s)",
818 __FUNCTION__, mId, res, strerror(-res));
819 mState = STATE_ERROR;
820 return res;
821 }
822
823 // Since device is already idle, there is no getBuffer call to buffer manager, unregister the
824 // stream at this point should be safe.
825 if (mUseBufferManager) {
826 res = mBufferManager->unregisterStream(getId(), getStreamSetId(), isMultiResolution());
827 if (res != OK) {
828 ALOGE("%s: Unable to unregister stream %d from buffer manager "
829 "(error %d %s)", __FUNCTION__, mId, res, strerror(-res));
830 mState = STATE_ERROR;
831 return res;
832 }
833 // Note that, to make prepare/teardown case work, we must not mBufferManager.clear(), as
834 // the stream is still in usable state after this call.
835 mUseBufferManager = false;
836 }
837
838 mState = (mState == STATE_IN_RECONFIG) ? STATE_IN_CONFIG
839 : STATE_CONSTRUCTED;
840
841 mDequeueBufferLatency.log("Stream %d dequeueBuffer latency histogram", mId);
842 mDequeueBufferLatency.reset();
843 return OK;
844 }
845
getEndpointUsage(uint64_t * usage) const846 status_t Camera3OutputStream::getEndpointUsage(uint64_t *usage) const {
847
848 status_t res;
849
850 if (mConsumer == nullptr) {
851 // mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
852 *usage = mConsumerUsage;
853 return OK;
854 }
855
856 res = getEndpointUsageForSurface(usage, mConsumer);
857
858 return res;
859 }
860
applyZSLUsageQuirk(int format,uint64_t * consumerUsage)861 void Camera3OutputStream::applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/) {
862 if (consumerUsage == nullptr) {
863 return;
864 }
865
866 // If an opaque output stream's endpoint is ImageReader, add
867 // GRALLOC_USAGE_HW_CAMERA_ZSL to the usage so HAL knows it will be used
868 // for the ZSL use case.
869 // Assume it's for ImageReader if the consumer usage doesn't have any of these bits set:
870 // 1. GRALLOC_USAGE_HW_TEXTURE
871 // 2. GRALLOC_USAGE_HW_RENDER
872 // 3. GRALLOC_USAGE_HW_COMPOSER
873 // 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
874 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
875 (*consumerUsage & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
876 GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
877 *consumerUsage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
878 }
879 }
880
getEndpointUsageForSurface(uint64_t * usage,const sp<Surface> & surface) const881 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
882 const sp<Surface>& surface) const {
883 status_t res;
884 uint64_t u = 0;
885
886 res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(surface.get()), &u);
887 applyZSLUsageQuirk(camera_stream::format, &u);
888 *usage = u;
889 return res;
890 }
891
isVideoStream() const892 bool Camera3OutputStream::isVideoStream() const {
893 uint64_t usage = 0;
894 status_t res = getEndpointUsage(&usage);
895 if (res != OK) {
896 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
897 return false;
898 }
899
900 return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
901 }
902
setBufferManager(sp<Camera3BufferManager> bufferManager)903 status_t Camera3OutputStream::setBufferManager(sp<Camera3BufferManager> bufferManager) {
904 Mutex::Autolock l(mLock);
905 if (mState != STATE_CONSTRUCTED) {
906 ALOGE("%s: this method can only be called when stream in CONSTRUCTED state.",
907 __FUNCTION__);
908 return INVALID_OPERATION;
909 }
910 mBufferManager = bufferManager;
911
912 return OK;
913 }
914
updateStream(const std::vector<sp<Surface>> &,const std::vector<OutputStreamInfo> &,const std::vector<size_t> &,KeyedVector<sp<Surface>,size_t> *)915 status_t Camera3OutputStream::updateStream(const std::vector<sp<Surface>> &/*outputSurfaces*/,
916 const std::vector<OutputStreamInfo> &/*outputInfo*/,
917 const std::vector<size_t> &/*removedSurfaceIds*/,
918 KeyedVector<sp<Surface>, size_t> * /*outputMapo*/) {
919 ALOGE("%s: this method is not supported!", __FUNCTION__);
920 return INVALID_OPERATION;
921 }
922
onBufferReleased()923 void Camera3OutputStream::BufferProducerListener::onBufferReleased() {
924 sp<Camera3OutputStream> stream = mParent.promote();
925 if (stream == nullptr) {
926 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
927 return;
928 }
929
930 Mutex::Autolock l(stream->mLock);
931 if (!(stream->mUseBufferManager)) {
932 return;
933 }
934
935 ALOGV("Stream %d: Buffer released", stream->getId());
936 bool shouldFreeBuffer = false;
937 status_t res = stream->mBufferManager->onBufferReleased(
938 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution(),
939 &shouldFreeBuffer);
940 if (res != OK) {
941 ALOGE("%s: signaling buffer release to buffer manager failed: %s (%d).", __FUNCTION__,
942 strerror(-res), res);
943 stream->mState = STATE_ERROR;
944 }
945
946 if (shouldFreeBuffer) {
947 sp<GraphicBuffer> buffer;
948 // Detach and free a buffer (when buffer goes out of scope)
949 stream->detachBufferLocked(&buffer, /*fenceFd*/ nullptr);
950 if (buffer.get() != nullptr) {
951 stream->mBufferManager->notifyBufferRemoved(
952 stream->getId(), stream->getStreamSetId(), stream->isMultiResolution());
953 }
954 }
955 }
956
onBuffersDiscarded(const std::vector<sp<GraphicBuffer>> & buffers)957 void Camera3OutputStream::BufferProducerListener::onBuffersDiscarded(
958 const std::vector<sp<GraphicBuffer>>& buffers) {
959 sp<Camera3OutputStream> stream = mParent.promote();
960 if (stream == nullptr) {
961 ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
962 return;
963 }
964
965 if (buffers.size() > 0) {
966 Mutex::Autolock l(stream->mLock);
967 stream->onBuffersRemovedLocked(buffers);
968 if (stream->mUseBufferManager) {
969 stream->mBufferManager->onBuffersRemoved(stream->getId(),
970 stream->getStreamSetId(), stream->isMultiResolution(), buffers.size());
971 }
972 ALOGV("Stream %d: %zu Buffers discarded.", stream->getId(), buffers.size());
973 }
974 }
975
onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>> & removedBuffers)976 void Camera3OutputStream::onBuffersRemovedLocked(
977 const std::vector<sp<GraphicBuffer>>& removedBuffers) {
978 sp<Camera3StreamBufferFreedListener> callback = mBufferFreedListener.promote();
979 if (callback != nullptr) {
980 for (const auto& gb : removedBuffers) {
981 callback->onBufferFreed(mId, gb->handle);
982 }
983 }
984 }
985
detachBuffer(sp<GraphicBuffer> * buffer,int * fenceFd)986 status_t Camera3OutputStream::detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) {
987 Mutex::Autolock l(mLock);
988 return detachBufferLocked(buffer, fenceFd);
989 }
990
detachBufferLocked(sp<GraphicBuffer> * buffer,int * fenceFd)991 status_t Camera3OutputStream::detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd) {
992 ALOGV("Stream %d: detachBuffer", getId());
993 if (buffer == nullptr) {
994 return BAD_VALUE;
995 }
996
997 sp<Fence> fence;
998 status_t res = mConsumer->detachNextBuffer(buffer, &fence);
999 if (res == NO_MEMORY) {
1000 // This may rarely happen, which indicates that the released buffer was freed by other
1001 // call (e.g., attachBuffer, dequeueBuffer etc.) before reaching here. We should notify the
1002 // buffer manager that this buffer has been freed. It's not fatal, but should be avoided,
1003 // therefore log a warning.
1004 *buffer = 0;
1005 ALOGW("%s: the released buffer has already been freed by the buffer queue!", __FUNCTION__);
1006 } else if (res != OK) {
1007 // Treat other errors as abandonment
1008 if (shouldLogError(res, mState)) {
1009 ALOGE("%s: detach next buffer failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1010 }
1011 mState = STATE_ABANDONED;
1012 return res;
1013 }
1014
1015 if (fenceFd != nullptr) {
1016 if (fence!= 0 && fence->isValid()) {
1017 *fenceFd = fence->dup();
1018 } else {
1019 *fenceFd = -1;
1020 }
1021 }
1022
1023 // Here we assume detachBuffer is called by buffer manager so it doesn't need to be notified
1024 checkRemovedBuffersLocked(/*notifyBufferManager*/false);
1025 return res;
1026 }
1027
dropBuffers(bool dropping)1028 status_t Camera3OutputStream::dropBuffers(bool dropping) {
1029 Mutex::Autolock l(mLock);
1030 mDropBuffers = dropping;
1031 return OK;
1032 }
1033
getPhysicalCameraId() const1034 const String8& Camera3OutputStream::getPhysicalCameraId() const {
1035 Mutex::Autolock l(mLock);
1036 return physicalCameraId();
1037 }
1038
notifyBufferReleased(ANativeWindowBuffer *)1039 status_t Camera3OutputStream::notifyBufferReleased(ANativeWindowBuffer* /*anwBuffer*/) {
1040 return OK;
1041 }
1042
isConsumerConfigurationDeferred(size_t surface_id) const1043 bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
1044 Mutex::Autolock l(mLock);
1045
1046 if (surface_id != 0) {
1047 ALOGE("%s: surface_id %zu for Camera3OutputStream should be 0!", __FUNCTION__, surface_id);
1048 }
1049 return mConsumer == nullptr;
1050 }
1051
setConsumers(const std::vector<sp<Surface>> & consumers)1052 status_t Camera3OutputStream::setConsumers(const std::vector<sp<Surface>>& consumers) {
1053 Mutex::Autolock l(mLock);
1054 if (consumers.size() != 1) {
1055 ALOGE("%s: it's illegal to set %zu consumer surfaces!",
1056 __FUNCTION__, consumers.size());
1057 return INVALID_OPERATION;
1058 }
1059 if (consumers[0] == nullptr) {
1060 ALOGE("%s: it's illegal to set null consumer surface!", __FUNCTION__);
1061 return INVALID_OPERATION;
1062 }
1063
1064 if (mConsumer != nullptr) {
1065 ALOGE("%s: consumer surface was already set!", __FUNCTION__);
1066 return INVALID_OPERATION;
1067 }
1068
1069 mConsumer = consumers[0];
1070 return OK;
1071 }
1072
isConsumedByHWComposer() const1073 bool Camera3OutputStream::isConsumedByHWComposer() const {
1074 uint64_t usage = 0;
1075 status_t res = getEndpointUsage(&usage);
1076 if (res != OK) {
1077 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1078 return false;
1079 }
1080
1081 return (usage & GRALLOC_USAGE_HW_COMPOSER) != 0;
1082 }
1083
isConsumedByHWTexture() const1084 bool Camera3OutputStream::isConsumedByHWTexture() const {
1085 uint64_t usage = 0;
1086 status_t res = getEndpointUsage(&usage);
1087 if (res != OK) {
1088 ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
1089 return false;
1090 }
1091
1092 return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
1093 }
1094
dumpImageToDisk(nsecs_t timestamp,ANativeWindowBuffer * anwBuffer,int fence)1095 void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
1096 ANativeWindowBuffer* anwBuffer, int fence) {
1097 // Deriver output file name
1098 std::string fileExtension = "jpg";
1099 char imageFileName[64];
1100 time_t now = time(0);
1101 tm *localTime = localtime(&now);
1102 snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
1103 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
1104 localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
1105 timestamp, fileExtension.c_str());
1106
1107 // Lock the image for CPU read
1108 sp<GraphicBuffer> graphicBuffer = GraphicBuffer::from(anwBuffer);
1109 void* mapped = nullptr;
1110 base::unique_fd fenceFd(dup(fence));
1111 status_t res = graphicBuffer->lockAsync(GraphicBuffer::USAGE_SW_READ_OFTEN, &mapped,
1112 fenceFd.get());
1113 if (res != OK) {
1114 ALOGE("%s: Failed to lock the buffer: %s (%d)", __FUNCTION__, strerror(-res), res);
1115 return;
1116 }
1117
1118 // Figure out actual file size
1119 auto actualJpegSize = android::camera2::JpegProcessor::findJpegSize((uint8_t*)mapped, mMaxSize);
1120 if (actualJpegSize == 0) {
1121 actualJpegSize = mMaxSize;
1122 }
1123
1124 // Output image data to file
1125 std::string filePath = "/data/misc/cameraserver/";
1126 filePath += imageFileName;
1127 std::ofstream imageFile(filePath.c_str(), std::ofstream::binary);
1128 if (!imageFile.is_open()) {
1129 ALOGE("%s: Unable to create file %s", __FUNCTION__, filePath.c_str());
1130 graphicBuffer->unlock();
1131 return;
1132 }
1133 imageFile.write((const char*)mapped, actualJpegSize);
1134
1135 graphicBuffer->unlock();
1136 }
1137
setBatchSize(size_t batchSize)1138 status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
1139 Mutex::Autolock l(mLock);
1140 if (batchSize == 0) {
1141 ALOGE("%s: invalid batch size 0", __FUNCTION__);
1142 return BAD_VALUE;
1143 }
1144
1145 if (mUseBufferManager) {
1146 ALOGE("%s: batch operation is not supported with buffer manager", __FUNCTION__);
1147 return INVALID_OPERATION;
1148 }
1149
1150 if (!isVideoStream()) {
1151 ALOGE("%s: batch operation is not supported with non-video stream", __FUNCTION__);
1152 return INVALID_OPERATION;
1153 }
1154
1155 if (camera_stream::max_buffers < batchSize) {
1156 ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
1157 camera_stream::max_buffers);
1158 batchSize = camera_stream::max_buffers;
1159 }
1160
1161 size_t defaultBatchSize = 1;
1162 if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
1163 ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
1164 __FUNCTION__, defaultBatchSize, batchSize);
1165 return INVALID_OPERATION;
1166 }
1167
1168 return OK;
1169 }
1170
returnPrefetchedBuffersLocked()1171 void Camera3OutputStream::returnPrefetchedBuffersLocked() {
1172 std::vector<Surface::BatchBuffer> batchedBuffers;
1173
1174 {
1175 std::lock_guard<std::mutex> batchLock(mBatchLock);
1176 if (mBatchedBuffers.size() != 0) {
1177 ALOGW("%s: %zu extra prefetched buffers detected. Returning",
1178 __FUNCTION__, mBatchedBuffers.size());
1179 batchedBuffers = std::move(mBatchedBuffers);
1180 }
1181 }
1182
1183 if (batchedBuffers.size() > 0) {
1184 mConsumer->cancelBuffers(batchedBuffers);
1185 }
1186 }
1187
1188 }; // namespace camera3
1189
1190 }; // namespace android
1191