1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera3-OutputUtils"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0  // Per-frame verbose logging
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27 
28 // Convenience macros for transitioning to the error state
29 #define SET_ERR(fmt, ...) states.setErrIntf.setErrorState(   \
30     "%s: " fmt, __FUNCTION__,                         \
31     ##__VA_ARGS__)
32 
33 #include <inttypes.h>
34 
35 #include <utils/Log.h>
36 #include <utils/SortedVector.h>
37 #include <utils/Trace.h>
38 
39 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
40 
41 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
42 #include <android/hardware/camera/device/3.5/ICameraDeviceCallback.h>
43 #include <android/hardware/camera/device/3.5/ICameraDeviceSession.h>
44 
45 #include <camera/CameraUtils.h>
46 #include <camera_metadata_hidden.h>
47 
48 #include "device3/Camera3OutputUtils.h"
49 
50 #include "system/camera_metadata.h"
51 
52 using namespace android::camera3;
53 using namespace android::hardware::camera;
54 
55 namespace android {
56 namespace camera3 {
57 
fixupMonochromeTags(CaptureOutputStates & states,const CameraMetadata & deviceInfo,CameraMetadata & resultMetadata)58 status_t fixupMonochromeTags(
59         CaptureOutputStates& states,
60         const CameraMetadata& deviceInfo,
61         CameraMetadata& resultMetadata) {
62     status_t res = OK;
63     if (!states.needFixupMonoChrome) {
64         return res;
65     }
66 
67     // Remove tags that are not applicable to monochrome camera.
68     int32_t tagsToRemove[] = {
69            ANDROID_SENSOR_GREEN_SPLIT,
70            ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
71            ANDROID_COLOR_CORRECTION_MODE,
72            ANDROID_COLOR_CORRECTION_TRANSFORM,
73            ANDROID_COLOR_CORRECTION_GAINS,
74     };
75     for (auto tag : tagsToRemove) {
76         res = resultMetadata.erase(tag);
77         if (res != OK) {
78             ALOGE("%s: Failed to remove tag %d for monochrome camera", __FUNCTION__, tag);
79             return res;
80         }
81     }
82 
83     // ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
84     camera_metadata_entry blEntry = resultMetadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
85     for (size_t i = 1; i < blEntry.count; i++) {
86         blEntry.data.f[i] = blEntry.data.f[0];
87     }
88 
89     // ANDROID_SENSOR_NOISE_PROFILE
90     camera_metadata_entry npEntry = resultMetadata.find(ANDROID_SENSOR_NOISE_PROFILE);
91     if (npEntry.count > 0 && npEntry.count % 2 == 0) {
92         double np[] = {npEntry.data.d[0], npEntry.data.d[1]};
93         res = resultMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, np, 2);
94         if (res != OK) {
95              ALOGE("%s: Failed to update SENSOR_NOISE_PROFILE: %s (%d)",
96                     __FUNCTION__, strerror(-res), res);
97             return res;
98         }
99     }
100 
101     // ANDROID_STATISTICS_LENS_SHADING_MAP
102     camera_metadata_ro_entry lsSizeEntry = deviceInfo.find(ANDROID_LENS_INFO_SHADING_MAP_SIZE);
103     camera_metadata_entry lsEntry = resultMetadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP);
104     if (lsSizeEntry.count == 2 && lsEntry.count > 0
105             && (int32_t)lsEntry.count == 4 * lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]) {
106         for (int32_t i = 0; i < lsSizeEntry.data.i32[0] * lsSizeEntry.data.i32[1]; i++) {
107             lsEntry.data.f[4*i+1] = lsEntry.data.f[4*i];
108             lsEntry.data.f[4*i+2] = lsEntry.data.f[4*i];
109             lsEntry.data.f[4*i+3] = lsEntry.data.f[4*i];
110         }
111     }
112 
113     // ANDROID_TONEMAP_CURVE_BLUE
114     // ANDROID_TONEMAP_CURVE_GREEN
115     // ANDROID_TONEMAP_CURVE_RED
116     camera_metadata_entry tcbEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_BLUE);
117     camera_metadata_entry tcgEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_GREEN);
118     camera_metadata_entry tcrEntry = resultMetadata.find(ANDROID_TONEMAP_CURVE_RED);
119     if (tcbEntry.count > 0
120             && tcbEntry.count == tcgEntry.count
121             && tcbEntry.count == tcrEntry.count) {
122         for (size_t i = 0; i < tcbEntry.count; i++) {
123             tcbEntry.data.f[i] = tcrEntry.data.f[i];
124             tcgEntry.data.f[i] = tcrEntry.data.f[i];
125         }
126     }
127 
128     return res;
129 }
130 
insertResultLocked(CaptureOutputStates & states,CaptureResult * result,uint32_t frameNumber)131 void insertResultLocked(CaptureOutputStates& states, CaptureResult *result, uint32_t frameNumber) {
132     if (result == nullptr) return;
133 
134     camera_metadata_t *meta = const_cast<camera_metadata_t *>(
135             result->mMetadata.getAndLock());
136     set_camera_metadata_vendor_id(meta, states.vendorTagId);
137     result->mMetadata.unlock(meta);
138 
139     if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
140             (int32_t*)&frameNumber, 1) != OK) {
141         SET_ERR("Failed to set frame number %d in metadata", frameNumber);
142         return;
143     }
144 
145     if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
146         SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
147         return;
148     }
149 
150     // Update vendor tag id for physical metadata
151     for (auto& physicalMetadata : result->mPhysicalMetadatas) {
152         camera_metadata_t *pmeta = const_cast<camera_metadata_t *>(
153                 physicalMetadata.mPhysicalCameraMetadata.getAndLock());
154         set_camera_metadata_vendor_id(pmeta, states.vendorTagId);
155         physicalMetadata.mPhysicalCameraMetadata.unlock(pmeta);
156     }
157 
158     // Valid result, insert into queue
159     std::list<CaptureResult>::iterator queuedResult =
160             states.resultQueue.insert(states.resultQueue.end(), CaptureResult(*result));
161     ALOGV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
162            ", burstId = %" PRId32, __FUNCTION__,
163            queuedResult->mResultExtras.requestId,
164            queuedResult->mResultExtras.frameNumber,
165            queuedResult->mResultExtras.burstId);
166 
167     states.resultSignal.notify_one();
168 }
169 
170 
sendPartialCaptureResult(CaptureOutputStates & states,const camera_metadata_t * partialResult,const CaptureResultExtras & resultExtras,uint32_t frameNumber)171 void sendPartialCaptureResult(CaptureOutputStates& states,
172         const camera_metadata_t * partialResult,
173         const CaptureResultExtras &resultExtras, uint32_t frameNumber) {
174     ATRACE_CALL();
175     std::lock_guard<std::mutex> l(states.outputLock);
176 
177     CaptureResult captureResult;
178     captureResult.mResultExtras = resultExtras;
179     captureResult.mMetadata = partialResult;
180 
181     // Fix up result metadata for monochrome camera.
182     status_t res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
183     if (res != OK) {
184         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
185         return;
186     }
187 
188     // Update partial result by removing keys remapped by DistortionCorrection, ZoomRatio,
189     // and RotationAndCrop mappers.
190     std::set<uint32_t> keysToRemove;
191 
192     auto iter = states.distortionMappers.find(states.cameraId.c_str());
193     if (iter != states.distortionMappers.end()) {
194         const auto& remappedKeys = iter->second.getRemappedKeys();
195         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
196     }
197 
198     const auto& remappedKeys = states.zoomRatioMappers[states.cameraId.c_str()].getRemappedKeys();
199     keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
200 
201     auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
202     if (mapper != states.rotateAndCropMappers.end()) {
203         const auto& remappedKeys = iter->second.getRemappedKeys();
204         keysToRemove.insert(remappedKeys.begin(), remappedKeys.end());
205     }
206 
207     for (uint32_t key : keysToRemove) {
208         captureResult.mMetadata.erase(key);
209     }
210 
211     // Send partial result
212     if (captureResult.mMetadata.entryCount() > 0) {
213         insertResultLocked(states, &captureResult, frameNumber);
214     }
215 }
216 
sendCaptureResult(CaptureOutputStates & states,CameraMetadata & pendingMetadata,CaptureResultExtras & resultExtras,CameraMetadata & collectedPartialResult,uint32_t frameNumber,bool reprocess,bool zslStillCapture,bool rotateAndCropAuto,const std::set<std::string> & cameraIdsWithZoom,const std::vector<PhysicalCaptureResultInfo> & physicalMetadatas)217 void sendCaptureResult(
218         CaptureOutputStates& states,
219         CameraMetadata &pendingMetadata,
220         CaptureResultExtras &resultExtras,
221         CameraMetadata &collectedPartialResult,
222         uint32_t frameNumber,
223         bool reprocess, bool zslStillCapture, bool rotateAndCropAuto,
224         const std::set<std::string>& cameraIdsWithZoom,
225         const std::vector<PhysicalCaptureResultInfo>& physicalMetadatas) {
226     ATRACE_CALL();
227     if (pendingMetadata.isEmpty())
228         return;
229 
230     std::lock_guard<std::mutex> l(states.outputLock);
231 
232     // TODO: need to track errors for tighter bounds on expected frame number
233     if (reprocess) {
234         if (frameNumber < states.nextReprocResultFrameNum) {
235             SET_ERR("Out-of-order reprocess capture result metadata submitted! "
236                 "(got frame number %d, expecting %d)",
237                 frameNumber, states.nextReprocResultFrameNum);
238             return;
239         }
240         states.nextReprocResultFrameNum = frameNumber + 1;
241     } else if (zslStillCapture) {
242         if (frameNumber < states.nextZslResultFrameNum) {
243             SET_ERR("Out-of-order ZSL still capture result metadata submitted! "
244                 "(got frame number %d, expecting %d)",
245                 frameNumber, states.nextZslResultFrameNum);
246             return;
247         }
248         states.nextZslResultFrameNum = frameNumber + 1;
249     } else {
250         if (frameNumber < states.nextResultFrameNum) {
251             SET_ERR("Out-of-order capture result metadata submitted! "
252                     "(got frame number %d, expecting %d)",
253                     frameNumber, states.nextResultFrameNum);
254             return;
255         }
256         states.nextResultFrameNum = frameNumber + 1;
257     }
258 
259     CaptureResult captureResult;
260     captureResult.mResultExtras = resultExtras;
261     captureResult.mMetadata = pendingMetadata;
262     captureResult.mPhysicalMetadatas = physicalMetadatas;
263 
264     // Append any previous partials to form a complete result
265     if (states.usePartialResult && !collectedPartialResult.isEmpty()) {
266         captureResult.mMetadata.append(collectedPartialResult);
267     }
268 
269     captureResult.mMetadata.sort();
270 
271     // Check that there's a timestamp in the result metadata
272     camera_metadata_entry timestamp = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
273     if (timestamp.count == 0) {
274         SET_ERR("No timestamp provided by HAL for frame %d!",
275                 frameNumber);
276         return;
277     }
278     nsecs_t sensorTimestamp = timestamp.data.i64[0];
279 
280     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
281         camera_metadata_entry timestamp =
282                 physicalMetadata.mPhysicalCameraMetadata.find(ANDROID_SENSOR_TIMESTAMP);
283         if (timestamp.count == 0) {
284             SET_ERR("No timestamp provided by HAL for physical camera %s frame %d!",
285                     String8(physicalMetadata.mPhysicalCameraId).c_str(), frameNumber);
286             return;
287         }
288     }
289 
290     // Fix up some result metadata to account for HAL-level distortion correction
291     status_t res = OK;
292     auto iter = states.distortionMappers.find(states.cameraId.c_str());
293     if (iter != states.distortionMappers.end()) {
294         res = iter->second.correctCaptureResult(&captureResult.mMetadata);
295         if (res != OK) {
296             SET_ERR("Unable to correct capture result metadata for frame %d: %s (%d)",
297                     frameNumber, strerror(-res), res);
298             return;
299         }
300     }
301 
302     // Fix up result metadata to account for zoom ratio availabilities between
303     // HAL and app.
304     bool zoomRatioIs1 = cameraIdsWithZoom.find(states.cameraId.c_str()) == cameraIdsWithZoom.end();
305     res = states.zoomRatioMappers[states.cameraId.c_str()].updateCaptureResult(
306             &captureResult.mMetadata, zoomRatioIs1);
307     if (res != OK) {
308         SET_ERR("Failed to update capture result zoom ratio metadata for frame %d: %s (%d)",
309                 frameNumber, strerror(-res), res);
310         return;
311     }
312 
313     // Fix up result metadata to account for rotateAndCrop in AUTO mode
314     if (rotateAndCropAuto) {
315         auto mapper = states.rotateAndCropMappers.find(states.cameraId.c_str());
316         if (mapper != states.rotateAndCropMappers.end()) {
317             res = mapper->second.updateCaptureResult(
318                     &captureResult.mMetadata);
319             if (res != OK) {
320                 SET_ERR("Unable to correct capture result rotate-and-crop for frame %d: %s (%d)",
321                         frameNumber, strerror(-res), res);
322                 return;
323             }
324         }
325     }
326 
327     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
328         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
329         auto mapper = states.distortionMappers.find(cameraId8.c_str());
330         if (mapper != states.distortionMappers.end()) {
331             res = mapper->second.correctCaptureResult(
332                     &physicalMetadata.mPhysicalCameraMetadata);
333             if (res != OK) {
334                 SET_ERR("Unable to correct physical capture result metadata for frame %d: %s (%d)",
335                         frameNumber, strerror(-res), res);
336                 return;
337             }
338         }
339 
340         zoomRatioIs1 = cameraIdsWithZoom.find(cameraId8.c_str()) == cameraIdsWithZoom.end();
341         res = states.zoomRatioMappers[cameraId8.c_str()].updateCaptureResult(
342                 &physicalMetadata.mPhysicalCameraMetadata, zoomRatioIs1);
343         if (res != OK) {
344             SET_ERR("Failed to update camera %s's physical zoom ratio metadata for "
345                     "frame %d: %s(%d)", cameraId8.c_str(), frameNumber, strerror(-res), res);
346             return;
347         }
348     }
349 
350     // Fix up result metadata for monochrome camera.
351     res = fixupMonochromeTags(states, states.deviceInfo, captureResult.mMetadata);
352     if (res != OK) {
353         SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
354         return;
355     }
356     for (auto& physicalMetadata : captureResult.mPhysicalMetadatas) {
357         String8 cameraId8(physicalMetadata.mPhysicalCameraId);
358         res = fixupMonochromeTags(states,
359                 states.physicalDeviceInfoMap.at(cameraId8.c_str()),
360                 physicalMetadata.mPhysicalCameraMetadata);
361         if (res != OK) {
362             SET_ERR("Failed to override result metadata: %s (%d)", strerror(-res), res);
363             return;
364         }
365     }
366 
367     std::unordered_map<std::string, CameraMetadata> monitoredPhysicalMetadata;
368     for (auto& m : physicalMetadatas) {
369         monitoredPhysicalMetadata.emplace(String8(m.mPhysicalCameraId).string(),
370                 CameraMetadata(m.mPhysicalCameraMetadata));
371     }
372     states.tagMonitor.monitorMetadata(TagMonitor::RESULT,
373             frameNumber, sensorTimestamp, captureResult.mMetadata,
374             monitoredPhysicalMetadata);
375 
376     insertResultLocked(states, &captureResult, frameNumber);
377 }
378 
379 // Reading one camera metadata from result argument via fmq or from the result
380 // Assuming the fmq is protected by a lock already
readOneCameraMetadataLocked(std::unique_ptr<ResultMetadataQueue> & fmq,uint64_t fmqResultSize,hardware::camera::device::V3_2::CameraMetadata & resultMetadata,const hardware::camera::device::V3_2::CameraMetadata & result)381 status_t readOneCameraMetadataLocked(
382         std::unique_ptr<ResultMetadataQueue>& fmq,
383         uint64_t fmqResultSize,
384         hardware::camera::device::V3_2::CameraMetadata& resultMetadata,
385         const hardware::camera::device::V3_2::CameraMetadata& result) {
386     if (fmqResultSize > 0) {
387         resultMetadata.resize(fmqResultSize);
388         if (fmq == nullptr) {
389             return NO_MEMORY; // logged in initialize()
390         }
391         if (!fmq->read(resultMetadata.data(), fmqResultSize)) {
392             ALOGE("%s: Cannot read camera metadata from fmq, size = %" PRIu64,
393                     __FUNCTION__, fmqResultSize);
394             return INVALID_OPERATION;
395         }
396     } else {
397         resultMetadata.setToExternal(const_cast<uint8_t *>(result.data()),
398                 result.size());
399     }
400 
401     if (resultMetadata.size() != 0) {
402         status_t res;
403         const camera_metadata_t* metadata =
404                 reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
405         size_t expected_metadata_size = resultMetadata.size();
406         if ((res = validate_camera_metadata_structure(metadata, &expected_metadata_size)) != OK) {
407             ALOGE("%s: Invalid camera metadata received by camera service from HAL: %s (%d)",
408                     __FUNCTION__, strerror(-res), res);
409             return INVALID_OPERATION;
410         }
411     }
412 
413     return OK;
414 }
415 
removeInFlightMapEntryLocked(CaptureOutputStates & states,int idx)416 void removeInFlightMapEntryLocked(CaptureOutputStates& states, int idx) {
417     ATRACE_CALL();
418     InFlightRequestMap& inflightMap = states.inflightMap;
419     nsecs_t duration = inflightMap.valueAt(idx).maxExpectedDuration;
420     inflightMap.removeItemsAt(idx, 1);
421 
422     states.inflightIntf.onInflightEntryRemovedLocked(duration);
423 }
424 
removeInFlightRequestIfReadyLocked(CaptureOutputStates & states,int idx)425 void removeInFlightRequestIfReadyLocked(CaptureOutputStates& states, int idx) {
426     InFlightRequestMap& inflightMap = states.inflightMap;
427     const InFlightRequest &request = inflightMap.valueAt(idx);
428     const uint32_t frameNumber = inflightMap.keyAt(idx);
429     SessionStatsBuilder& sessionStatsBuilder = states.sessionStatsBuilder;
430 
431     nsecs_t sensorTimestamp = request.sensorTimestamp;
432     nsecs_t shutterTimestamp = request.shutterTimestamp;
433 
434     // Check if it's okay to remove the request from InFlightMap:
435     // In the case of a successful request:
436     //      all input and output buffers, all result metadata, shutter callback
437     //      arrived.
438     // In the case of an unsuccessful request:
439     //      all input and output buffers, as well as request/result error notifications, arrived.
440     if (request.numBuffersLeft == 0 &&
441             (request.skipResultMetadata ||
442             (request.haveResultMetadata && shutterTimestamp != 0))) {
443         if (request.stillCapture) {
444             ATRACE_ASYNC_END("still capture", frameNumber);
445         }
446 
447         ATRACE_ASYNC_END("frame capture", frameNumber);
448 
449         // Validation check - if sensor timestamp matches shutter timestamp in the
450         // case of request having callback.
451         if (request.hasCallback && request.requestStatus == OK &&
452                 sensorTimestamp != shutterTimestamp) {
453             SET_ERR("sensor timestamp (%" PRId64
454                 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
455                 sensorTimestamp, frameNumber, shutterTimestamp);
456         }
457 
458         // for an unsuccessful request, it may have pending output buffers to
459         // return.
460         assert(request.requestStatus != OK ||
461                request.pendingOutputBuffers.size() == 0);
462 
463         returnOutputBuffers(
464             states.useHalBufManager, states.listener,
465             request.pendingOutputBuffers.array(),
466             request.pendingOutputBuffers.size(), 0,
467             /*requested*/true, request.requestTimeNs, states.sessionStatsBuilder,
468             /*timestampIncreasing*/true,
469             request.outputSurfaces, request.resultExtras,
470             request.errorBufStrategy, request.transform);
471 
472         // Note down the just completed frame number
473         if (request.hasInputBuffer) {
474             states.lastCompletedReprocessFrameNumber = frameNumber;
475         } else if (request.zslCapture) {
476             states.lastCompletedZslFrameNumber = frameNumber;
477         } else {
478             states.lastCompletedRegularFrameNumber = frameNumber;
479         }
480 
481         sessionStatsBuilder.incResultCounter(request.skipResultMetadata);
482 
483         removeInFlightMapEntryLocked(states, idx);
484         ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
485     }
486 
487     states.inflightIntf.checkInflightMapLengthLocked();
488 }
489 
490 // Erase the subset of physicalCameraIds that contains id
erasePhysicalCameraIdSet(std::set<std::set<String8>> & physicalCameraIds,const String8 & id)491 bool erasePhysicalCameraIdSet(
492         std::set<std::set<String8>>& physicalCameraIds, const String8& id) {
493     bool found = false;
494     for (auto iter = physicalCameraIds.begin(); iter != physicalCameraIds.end(); iter++) {
495         if (iter->count(id) == 1) {
496             physicalCameraIds.erase(iter);
497             found = true;
498             break;
499         }
500     }
501     return found;
502 }
503 
processCaptureResult(CaptureOutputStates & states,const camera_capture_result * result)504 void processCaptureResult(CaptureOutputStates& states, const camera_capture_result *result) {
505     ATRACE_CALL();
506 
507     status_t res;
508 
509     uint32_t frameNumber = result->frame_number;
510     if (result->result == NULL && result->num_output_buffers == 0 &&
511             result->input_buffer == NULL) {
512         SET_ERR("No result data provided by HAL for frame %d",
513                 frameNumber);
514         return;
515     }
516 
517     if (!states.usePartialResult &&
518             result->result != NULL &&
519             result->partial_result != 1) {
520         SET_ERR("Result is malformed for frame %d: partial_result %u must be 1"
521                 " if partial result is not supported",
522                 frameNumber, result->partial_result);
523         return;
524     }
525 
526     bool isPartialResult = false;
527     CameraMetadata collectedPartialResult;
528     bool hasInputBufferInRequest = false;
529 
530     // Get shutter timestamp and resultExtras from list of in-flight requests,
531     // where it was added by the shutter notification for this frame. If the
532     // shutter timestamp isn't received yet, append the output buffers to the
533     // in-flight request and they will be returned when the shutter timestamp
534     // arrives. Update the in-flight status and remove the in-flight entry if
535     // all result data and shutter timestamp have been received.
536     nsecs_t shutterTimestamp = 0;
537     {
538         std::lock_guard<std::mutex> l(states.inflightLock);
539         ssize_t idx = states.inflightMap.indexOfKey(frameNumber);
540         if (idx == NAME_NOT_FOUND) {
541             SET_ERR("Unknown frame number for capture result: %d",
542                     frameNumber);
543             return;
544         }
545         InFlightRequest &request = states.inflightMap.editValueAt(idx);
546         ALOGVV("%s: got InFlightRequest requestId = %" PRId32
547                 ", frameNumber = %" PRId64 ", burstId = %" PRId32
548                 ", partialResultCount = %d/%d, hasCallback = %d, num_output_buffers %d"
549                 ", usePartialResult = %d",
550                 __FUNCTION__, request.resultExtras.requestId,
551                 request.resultExtras.frameNumber, request.resultExtras.burstId,
552                 result->partial_result, states.numPartialResults,
553                 request.hasCallback, result->num_output_buffers,
554                 states.usePartialResult);
555         // Always update the partial count to the latest one if it's not 0
556         // (buffers only). When framework aggregates adjacent partial results
557         // into one, the latest partial count will be used.
558         if (result->partial_result != 0)
559             request.resultExtras.partialResultCount = result->partial_result;
560 
561         if ((result->result != nullptr) && !states.legacyClient) {
562             camera_metadata_ro_entry entry;
563             auto ret = find_camera_metadata_ro_entry(result->result,
564                     ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, &entry);
565             if ((ret == OK) && (entry.count > 0)) {
566                 std::string physicalId(reinterpret_cast<const char *>(entry.data.u8));
567                 auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
568                 if (deviceInfo != states.physicalDeviceInfoMap.end()) {
569                     auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
570                     if (orientation.count > 0) {
571                         ret = CameraUtils::getRotationTransform(deviceInfo->second,
572                                 &request.transform);
573                         if (ret != OK) {
574                             ALOGE("%s: Failed to calculate current stream transformation: %s (%d)",
575                                     __FUNCTION__, strerror(-ret), ret);
576                         }
577                     } else {
578                         ALOGE("%s: Physical device orientation absent!", __FUNCTION__);
579                     }
580                 } else {
581                     ALOGE("%s: Physical device not found in device info map found!", __FUNCTION__);
582                 }
583             }
584         }
585 
586         // Check if this result carries only partial metadata
587         if (states.usePartialResult && result->result != NULL) {
588             if (result->partial_result > states.numPartialResults || result->partial_result < 1) {
589                 SET_ERR("Result is malformed for frame %d: partial_result %u must be  in"
590                         " the range of [1, %d] when metadata is included in the result",
591                         frameNumber, result->partial_result, states.numPartialResults);
592                 return;
593             }
594             isPartialResult = (result->partial_result < states.numPartialResults);
595             if (isPartialResult && result->num_physcam_metadata) {
596                 SET_ERR("Result is malformed for frame %d: partial_result not allowed for"
597                         " physical camera result", frameNumber);
598                 return;
599             }
600             if (isPartialResult) {
601                 request.collectedPartialResult.append(result->result);
602             }
603 
604             if (isPartialResult && request.hasCallback) {
605                 // Send partial capture result
606                 sendPartialCaptureResult(states, result->result, request.resultExtras,
607                         frameNumber);
608             }
609         }
610 
611         shutterTimestamp = request.shutterTimestamp;
612         hasInputBufferInRequest = request.hasInputBuffer;
613 
614         // Did we get the (final) result metadata for this capture?
615         if (result->result != NULL && !isPartialResult) {
616             if (request.physicalCameraIds.size() != result->num_physcam_metadata) {
617                 SET_ERR("Expected physical Camera metadata count %d not equal to actual count %d",
618                         request.physicalCameraIds.size(), result->num_physcam_metadata);
619                 return;
620             }
621             if (request.haveResultMetadata) {
622                 SET_ERR("Called multiple times with metadata for frame %d",
623                         frameNumber);
624                 return;
625             }
626             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
627                 String8 physicalId(result->physcam_ids[i]);
628                 bool validPhysicalCameraMetadata =
629                         erasePhysicalCameraIdSet(request.physicalCameraIds, physicalId);
630                 if (!validPhysicalCameraMetadata) {
631                     SET_ERR("Unexpected total result for frame %d camera %s",
632                             frameNumber, physicalId.c_str());
633                     return;
634                 }
635             }
636             if (states.usePartialResult &&
637                     !request.collectedPartialResult.isEmpty()) {
638                 collectedPartialResult.acquire(
639                     request.collectedPartialResult);
640             }
641             request.haveResultMetadata = true;
642             request.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
643         }
644 
645         uint32_t numBuffersReturned = result->num_output_buffers;
646         if (result->input_buffer != NULL) {
647             if (hasInputBufferInRequest) {
648                 numBuffersReturned += 1;
649             } else {
650                 ALOGW("%s: Input buffer should be NULL if there is no input"
651                         " buffer sent in the request",
652                         __FUNCTION__);
653             }
654         }
655         request.numBuffersLeft -= numBuffersReturned;
656         if (request.numBuffersLeft < 0) {
657             SET_ERR("Too many buffers returned for frame %d",
658                     frameNumber);
659             return;
660         }
661 
662         camera_metadata_ro_entry_t entry;
663         res = find_camera_metadata_ro_entry(result->result,
664                 ANDROID_SENSOR_TIMESTAMP, &entry);
665         if (res == OK && entry.count == 1) {
666             request.sensorTimestamp = entry.data.i64[0];
667         }
668 
669         // If shutter event isn't received yet, do not return the pending output
670         // buffers.
671         request.pendingOutputBuffers.appendArray(result->output_buffers,
672                 result->num_output_buffers);
673         if (shutterTimestamp != 0) {
674             returnAndRemovePendingOutputBuffers(
675                 states.useHalBufManager, states.listener,
676                 request, states.sessionStatsBuilder);
677         }
678 
679         if (result->result != NULL && !isPartialResult) {
680             for (uint32_t i = 0; i < result->num_physcam_metadata; i++) {
681                 CameraMetadata physicalMetadata;
682                 physicalMetadata.append(result->physcam_metadata[i]);
683                 request.physicalMetadatas.push_back({String16(result->physcam_ids[i]),
684                         physicalMetadata});
685             }
686             if (shutterTimestamp == 0) {
687                 request.pendingMetadata = result->result;
688                 request.collectedPartialResult = collectedPartialResult;
689             } else if (request.hasCallback) {
690                 CameraMetadata metadata;
691                 metadata = result->result;
692                 sendCaptureResult(states, metadata, request.resultExtras,
693                     collectedPartialResult, frameNumber,
694                     hasInputBufferInRequest, request.zslCapture && request.stillCapture,
695                     request.rotateAndCropAuto, request.cameraIdsWithZoom,
696                     request.physicalMetadatas);
697             }
698         }
699         removeInFlightRequestIfReadyLocked(states, idx);
700     } // scope for states.inFlightLock
701 
702     if (result->input_buffer != NULL) {
703         if (hasInputBufferInRequest) {
704             Camera3Stream *stream =
705                 Camera3Stream::cast(result->input_buffer->stream);
706             res = stream->returnInputBuffer(*(result->input_buffer));
707             // Note: stream may be deallocated at this point, if this buffer was the
708             // last reference to it.
709             if (res != OK) {
710                 ALOGE("%s: RequestThread: Can't return input buffer for frame %d to"
711                       "  its stream:%s (%d)",  __FUNCTION__,
712                       frameNumber, strerror(-res), res);
713             }
714         } else {
715             ALOGW("%s: Input buffer should be NULL if there is no input"
716                     " buffer sent in the request, skipping input buffer return.",
717                     __FUNCTION__);
718         }
719     }
720 }
721 
processOneCaptureResultLocked(CaptureOutputStates & states,const hardware::camera::device::V3_2::CaptureResult & result,const hardware::hidl_vec<hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata)722 void processOneCaptureResultLocked(
723         CaptureOutputStates& states,
724         const hardware::camera::device::V3_2::CaptureResult& result,
725         const hardware::hidl_vec<
726                 hardware::camera::device::V3_4::PhysicalCameraMetadata> physicalCameraMetadata) {
727     using hardware::camera::device::V3_2::StreamBuffer;
728     using hardware::camera::device::V3_2::BufferStatus;
729     std::unique_ptr<ResultMetadataQueue>& fmq = states.fmq;
730     BufferRecordsInterface& bufferRecords = states.bufferRecordsIntf;
731     camera_capture_result r;
732     status_t res;
733     r.frame_number = result.frameNumber;
734 
735     // Read and validate the result metadata.
736     hardware::camera::device::V3_2::CameraMetadata resultMetadata;
737     res = readOneCameraMetadataLocked(
738             fmq, result.fmqResultSize,
739             resultMetadata, result.result);
740     if (res != OK) {
741         ALOGE("%s: Frame %d: Failed to read capture result metadata",
742                 __FUNCTION__, result.frameNumber);
743         return;
744     }
745     r.result = reinterpret_cast<const camera_metadata_t*>(resultMetadata.data());
746 
747     // Read and validate physical camera metadata
748     size_t physResultCount = physicalCameraMetadata.size();
749     std::vector<const char*> physCamIds(physResultCount);
750     std::vector<const camera_metadata_t *> phyCamMetadatas(physResultCount);
751     std::vector<hardware::camera::device::V3_2::CameraMetadata> physResultMetadata;
752     physResultMetadata.resize(physResultCount);
753     for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
754         res = readOneCameraMetadataLocked(fmq, physicalCameraMetadata[i].fmqMetadataSize,
755                 physResultMetadata[i], physicalCameraMetadata[i].metadata);
756         if (res != OK) {
757             ALOGE("%s: Frame %d: Failed to read capture result metadata for camera %s",
758                     __FUNCTION__, result.frameNumber,
759                     physicalCameraMetadata[i].physicalCameraId.c_str());
760             return;
761         }
762         physCamIds[i] = physicalCameraMetadata[i].physicalCameraId.c_str();
763         phyCamMetadatas[i] = reinterpret_cast<const camera_metadata_t*>(
764                 physResultMetadata[i].data());
765     }
766     r.num_physcam_metadata = physResultCount;
767     r.physcam_ids = physCamIds.data();
768     r.physcam_metadata = phyCamMetadatas.data();
769 
770     std::vector<camera_stream_buffer_t> outputBuffers(result.outputBuffers.size());
771     std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
772     for (size_t i = 0; i < result.outputBuffers.size(); i++) {
773         auto& bDst = outputBuffers[i];
774         const StreamBuffer &bSrc = result.outputBuffers[i];
775 
776         sp<Camera3StreamInterface> stream = states.outputStreams.get(bSrc.streamId);
777         if (stream == nullptr) {
778             ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
779                     __FUNCTION__, result.frameNumber, i, bSrc.streamId);
780             return;
781         }
782         bDst.stream = stream->asHalStream();
783 
784         bool noBufferReturned = false;
785         buffer_handle_t *buffer = nullptr;
786         if (states.useHalBufManager) {
787             // This is suspicious most of the time but can be correct during flush where HAL
788             // has to return capture result before a buffer is requested
789             if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
790                 if (bSrc.status == BufferStatus::OK) {
791                     ALOGE("%s: Frame %d: Buffer %zu: No bufferId for stream %d",
792                             __FUNCTION__, result.frameNumber, i, bSrc.streamId);
793                     // Still proceeds so other buffers can be returned
794                 }
795                 noBufferReturned = true;
796             }
797             if (noBufferReturned) {
798                 res = OK;
799             } else {
800                 res = bufferRecords.popInflightRequestBuffer(bSrc.bufferId, &buffer);
801             }
802         } else {
803             res = bufferRecords.popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
804         }
805 
806         if (res != OK) {
807             ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
808                     __FUNCTION__, result.frameNumber, i, bSrc.streamId);
809             return;
810         }
811 
812         bDst.buffer = buffer;
813         bDst.status = mapHidlBufferStatus(bSrc.status);
814         bDst.acquire_fence = -1;
815         if (bSrc.releaseFence == nullptr) {
816             bDst.release_fence = -1;
817         } else if (bSrc.releaseFence->numFds == 1) {
818             if (noBufferReturned) {
819                 ALOGE("%s: got releaseFence without output buffer!", __FUNCTION__);
820             }
821             bDst.release_fence = dup(bSrc.releaseFence->data[0]);
822         } else {
823             ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
824                     __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
825             return;
826         }
827     }
828     r.num_output_buffers = outputBuffers.size();
829     r.output_buffers = outputBuffers.data();
830 
831     camera_stream_buffer_t inputBuffer;
832     if (result.inputBuffer.streamId == -1) {
833         r.input_buffer = nullptr;
834     } else {
835         if (states.inputStream->getId() != result.inputBuffer.streamId) {
836             ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
837                     result.frameNumber, result.inputBuffer.streamId);
838             return;
839         }
840         inputBuffer.stream = states.inputStream->asHalStream();
841         buffer_handle_t *buffer;
842         res = bufferRecords.popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
843                 &buffer);
844         if (res != OK) {
845             ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
846                     __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
847             return;
848         }
849         inputBuffer.buffer = buffer;
850         inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
851         inputBuffer.acquire_fence = -1;
852         if (result.inputBuffer.releaseFence == nullptr) {
853             inputBuffer.release_fence = -1;
854         } else if (result.inputBuffer.releaseFence->numFds == 1) {
855             inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
856         } else {
857             ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
858                     __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
859             return;
860         }
861         r.input_buffer = &inputBuffer;
862     }
863 
864     r.partial_result = result.partialResult;
865 
866     processCaptureResult(states, &r);
867 }
868 
returnOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,const camera_stream_buffer_t * outputBuffers,size_t numBuffers,nsecs_t timestamp,bool requested,nsecs_t requestTimeNs,SessionStatsBuilder & sessionStatsBuilder,bool timestampIncreasing,const SurfaceMap & outputSurfaces,const CaptureResultExtras & inResultExtras,ERROR_BUF_STRATEGY errorBufStrategy,int32_t transform)869 void returnOutputBuffers(
870         bool useHalBufManager,
871         sp<NotificationListener> listener,
872         const camera_stream_buffer_t *outputBuffers, size_t numBuffers,
873         nsecs_t timestamp, bool requested, nsecs_t requestTimeNs,
874         SessionStatsBuilder& sessionStatsBuilder, bool timestampIncreasing,
875         const SurfaceMap& outputSurfaces,
876         const CaptureResultExtras &inResultExtras,
877         ERROR_BUF_STRATEGY errorBufStrategy, int32_t transform) {
878 
879     for (size_t i = 0; i < numBuffers; i++)
880     {
881         Camera3StreamInterface *stream = Camera3Stream::cast(outputBuffers[i].stream);
882         int streamId = stream->getId();
883 
884         // Call notify(ERROR_BUFFER) if necessary.
885         if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR &&
886                 errorBufStrategy == ERROR_BUF_RETURN_NOTIFY) {
887             if (listener != nullptr) {
888                 CaptureResultExtras extras = inResultExtras;
889                 extras.errorStreamId = streamId;
890                 listener->notifyError(
891                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
892                         extras);
893             }
894         }
895 
896         if (outputBuffers[i].buffer == nullptr) {
897             if (!useHalBufManager) {
898                 // With HAL buffer management API, HAL sometimes will have to return buffers that
899                 // has not got a output buffer handle filled yet. This is though illegal if HAL
900                 // buffer management API is not being used.
901                 ALOGE("%s: cannot return a null buffer!", __FUNCTION__);
902             } else {
903                 if (requested) {
904                     sessionStatsBuilder.incCounter(streamId, /*dropped*/true, 0);
905                 }
906             }
907             continue;
908         }
909 
910         const auto& it = outputSurfaces.find(streamId);
911         status_t res = OK;
912 
913         // Do not return the buffer if the buffer status is error, and the error
914         // buffer strategy is CACHE.
915         if (outputBuffers[i].status != CAMERA_BUFFER_STATUS_ERROR ||
916                 errorBufStrategy != ERROR_BUF_CACHE) {
917             if (it != outputSurfaces.end()) {
918                 res = stream->returnBuffer(
919                         outputBuffers[i], timestamp, timestampIncreasing, it->second,
920                         inResultExtras.frameNumber, transform);
921             } else {
922                 res = stream->returnBuffer(
923                         outputBuffers[i], timestamp, timestampIncreasing,
924                         std::vector<size_t> (), inResultExtras.frameNumber, transform);
925             }
926         }
927         // Note: stream may be deallocated at this point, if this buffer was
928         // the last reference to it.
929         bool dropped = false;
930         if (res == NO_INIT || res == DEAD_OBJECT) {
931             ALOGV("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
932             sessionStatsBuilder.stopCounter(streamId);
933         } else if (res != OK) {
934             ALOGE("Can't return buffer to its stream: %s (%d)", strerror(-res), res);
935             dropped = true;
936         } else {
937             if (outputBuffers[i].status == CAMERA_BUFFER_STATUS_ERROR || timestamp == 0) {
938                 dropped = true;
939             }
940         }
941         if (requested) {
942             nsecs_t bufferTimeNs = systemTime();
943             int32_t captureLatencyMs = ns2ms(bufferTimeNs - requestTimeNs);
944             sessionStatsBuilder.incCounter(streamId, dropped, captureLatencyMs);
945         }
946 
947         // Long processing consumers can cause returnBuffer timeout for shared stream
948         // If that happens, cancel the buffer and send a buffer error to client
949         if (it != outputSurfaces.end() && res == TIMED_OUT &&
950                 outputBuffers[i].status == CAMERA_BUFFER_STATUS_OK) {
951             // cancel the buffer
952             camera_stream_buffer_t sb = outputBuffers[i];
953             sb.status = CAMERA_BUFFER_STATUS_ERROR;
954             stream->returnBuffer(sb, /*timestamp*/0,
955                     timestampIncreasing, std::vector<size_t> (),
956                     inResultExtras.frameNumber, transform);
957 
958             if (listener != nullptr) {
959                 CaptureResultExtras extras = inResultExtras;
960                 extras.errorStreamId = streamId;
961                 listener->notifyError(
962                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER,
963                         extras);
964             }
965         }
966     }
967 }
968 
returnAndRemovePendingOutputBuffers(bool useHalBufManager,sp<NotificationListener> listener,InFlightRequest & request,SessionStatsBuilder & sessionStatsBuilder)969 void returnAndRemovePendingOutputBuffers(bool useHalBufManager,
970         sp<NotificationListener> listener, InFlightRequest& request,
971         SessionStatsBuilder& sessionStatsBuilder) {
972     bool timestampIncreasing = !(request.zslCapture || request.hasInputBuffer);
973     returnOutputBuffers(useHalBufManager, listener,
974             request.pendingOutputBuffers.array(),
975             request.pendingOutputBuffers.size(),
976             request.shutterTimestamp, /*requested*/true,
977             request.requestTimeNs, sessionStatsBuilder, timestampIncreasing,
978             request.outputSurfaces, request.resultExtras,
979             request.errorBufStrategy, request.transform);
980 
981     // Remove error buffers that are not cached.
982     for (auto iter = request.pendingOutputBuffers.begin();
983             iter != request.pendingOutputBuffers.end(); ) {
984         if (request.errorBufStrategy != ERROR_BUF_CACHE ||
985                 iter->status != CAMERA_BUFFER_STATUS_ERROR) {
986             iter = request.pendingOutputBuffers.erase(iter);
987         } else {
988             iter++;
989         }
990     }
991 }
992 
notifyShutter(CaptureOutputStates & states,const camera_shutter_msg_t & msg)993 void notifyShutter(CaptureOutputStates& states, const camera_shutter_msg_t &msg) {
994     ATRACE_CALL();
995     ssize_t idx;
996 
997     // Set timestamp for the request in the in-flight tracking
998     // and get the request ID to send upstream
999     {
1000         std::lock_guard<std::mutex> l(states.inflightLock);
1001         InFlightRequestMap& inflightMap = states.inflightMap;
1002         idx = inflightMap.indexOfKey(msg.frame_number);
1003         if (idx >= 0) {
1004             InFlightRequest &r = inflightMap.editValueAt(idx);
1005 
1006             // Verify ordering of shutter notifications
1007             {
1008                 std::lock_guard<std::mutex> l(states.outputLock);
1009                 // TODO: need to track errors for tighter bounds on expected frame number.
1010                 if (r.hasInputBuffer) {
1011                     if (msg.frame_number < states.nextReprocShutterFrameNum) {
1012                         SET_ERR("Reprocess shutter notification out-of-order. Expected "
1013                                 "notification for frame %d, got frame %d",
1014                                 states.nextReprocShutterFrameNum, msg.frame_number);
1015                         return;
1016                     }
1017                     states.nextReprocShutterFrameNum = msg.frame_number + 1;
1018                 } else if (r.zslCapture && r.stillCapture) {
1019                     if (msg.frame_number < states.nextZslShutterFrameNum) {
1020                         SET_ERR("ZSL still capture shutter notification out-of-order. Expected "
1021                                 "notification for frame %d, got frame %d",
1022                                 states.nextZslShutterFrameNum, msg.frame_number);
1023                         return;
1024                     }
1025                     states.nextZslShutterFrameNum = msg.frame_number + 1;
1026                 } else {
1027                     if (msg.frame_number < states.nextShutterFrameNum) {
1028                         SET_ERR("Shutter notification out-of-order. Expected "
1029                                 "notification for frame %d, got frame %d",
1030                                 states.nextShutterFrameNum, msg.frame_number);
1031                         return;
1032                     }
1033                     states.nextShutterFrameNum = msg.frame_number + 1;
1034                 }
1035             }
1036 
1037             r.shutterTimestamp = msg.timestamp;
1038             if (r.hasCallback) {
1039                 ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
1040                     states.cameraId.string(), __FUNCTION__,
1041                     msg.frame_number, r.resultExtras.requestId, msg.timestamp);
1042                 // Call listener, if any
1043                 if (states.listener != nullptr) {
1044                     r.resultExtras.lastCompletedRegularFrameNumber =
1045                             states.lastCompletedRegularFrameNumber;
1046                     r.resultExtras.lastCompletedReprocessFrameNumber =
1047                             states.lastCompletedReprocessFrameNumber;
1048                     r.resultExtras.lastCompletedZslFrameNumber =
1049                             states.lastCompletedZslFrameNumber;
1050                     states.listener->notifyShutter(r.resultExtras, msg.timestamp);
1051                 }
1052                 // send pending result and buffers
1053                 sendCaptureResult(states,
1054                     r.pendingMetadata, r.resultExtras,
1055                     r.collectedPartialResult, msg.frame_number,
1056                     r.hasInputBuffer, r.zslCapture && r.stillCapture,
1057                     r.rotateAndCropAuto, r.cameraIdsWithZoom, r.physicalMetadatas);
1058             }
1059             returnAndRemovePendingOutputBuffers(
1060                     states.useHalBufManager, states.listener, r, states.sessionStatsBuilder);
1061 
1062             removeInFlightRequestIfReadyLocked(states, idx);
1063         }
1064     }
1065     if (idx < 0) {
1066         SET_ERR("Shutter notification for non-existent frame number %d",
1067                 msg.frame_number);
1068     }
1069 }
1070 
notifyError(CaptureOutputStates & states,const camera_error_msg_t & msg)1071 void notifyError(CaptureOutputStates& states, const camera_error_msg_t &msg) {
1072     ATRACE_CALL();
1073     // Map camera HAL error codes to ICameraDeviceCallback error codes
1074     // Index into this with the HAL error code
1075     static const int32_t halErrorMap[CAMERA_MSG_NUM_ERRORS] = {
1076         // 0 = Unused error code
1077         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR,
1078         // 1 = CAMERA_MSG_ERROR_DEVICE
1079         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE,
1080         // 2 = CAMERA_MSG_ERROR_REQUEST
1081         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
1082         // 3 = CAMERA_MSG_ERROR_RESULT
1083         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT,
1084         // 4 = CAMERA_MSG_ERROR_BUFFER
1085         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER
1086     };
1087 
1088     int32_t errorCode =
1089             ((msg.error_code >= 0) &&
1090                     (msg.error_code < CAMERA_MSG_NUM_ERRORS)) ?
1091             halErrorMap[msg.error_code] :
1092             hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_INVALID_ERROR;
1093 
1094     int streamId = 0;
1095     String16 physicalCameraId;
1096     if (msg.error_stream != nullptr) {
1097         Camera3Stream *stream =
1098                 Camera3Stream::cast(msg.error_stream);
1099         streamId = stream->getId();
1100         physicalCameraId = String16(stream->physicalCameraId());
1101     }
1102     ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
1103             states.cameraId.string(), __FUNCTION__, msg.frame_number,
1104             streamId, msg.error_code);
1105 
1106     CaptureResultExtras resultExtras;
1107     switch (errorCode) {
1108         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE:
1109             // SET_ERR calls into listener to notify application
1110             SET_ERR("Camera HAL reported serious device error");
1111             break;
1112         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST:
1113         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT:
1114             {
1115                 std::lock_guard<std::mutex> l(states.inflightLock);
1116                 ssize_t idx = states.inflightMap.indexOfKey(msg.frame_number);
1117                 if (idx >= 0) {
1118                     InFlightRequest &r = states.inflightMap.editValueAt(idx);
1119                     r.requestStatus = msg.error_code;
1120                     resultExtras = r.resultExtras;
1121                     bool physicalDeviceResultError = false;
1122                     if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT ==
1123                             errorCode) {
1124                         if (physicalCameraId.size() > 0) {
1125                             String8 cameraId(physicalCameraId);
1126                             bool validPhysicalCameraId =
1127                                     erasePhysicalCameraIdSet(r.physicalCameraIds, cameraId);
1128                             if (!validPhysicalCameraId) {
1129                                 ALOGE("%s: Reported result failure for physical camera device: %s "
1130                                         " which is not part of the respective request!",
1131                                         __FUNCTION__, cameraId.string());
1132                                 break;
1133                             }
1134                             resultExtras.errorPhysicalCameraId = physicalCameraId;
1135                             physicalDeviceResultError = true;
1136                         }
1137                     }
1138 
1139                     if (!physicalDeviceResultError) {
1140                         r.skipResultMetadata = true;
1141                         if (hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_RESULT
1142                                 == errorCode) {
1143                             r.errorBufStrategy = ERROR_BUF_RETURN_NOTIFY;
1144                         } else {
1145                             // errorCode is ERROR_CAMERA_REQUEST
1146                             r.errorBufStrategy = ERROR_BUF_RETURN;
1147                         }
1148 
1149                         // Check whether the buffers returned. If they returned,
1150                         // remove inflight request.
1151                         removeInFlightRequestIfReadyLocked(states, idx);
1152                     }
1153                 } else {
1154                     resultExtras.frameNumber = msg.frame_number;
1155                     ALOGE("Camera %s: %s: cannot find in-flight request on "
1156                             "frame %" PRId64 " error", states.cameraId.string(), __FUNCTION__,
1157                             resultExtras.frameNumber);
1158                 }
1159             }
1160             resultExtras.errorStreamId = streamId;
1161             if (states.listener != nullptr) {
1162                 states.listener->notifyError(errorCode, resultExtras);
1163             } else {
1164                 ALOGE("Camera %s: %s: no listener available",
1165                         states.cameraId.string(), __FUNCTION__);
1166             }
1167             break;
1168         case hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_BUFFER:
1169             // Do not depend on HAL ERROR_CAMERA_BUFFER to send buffer error
1170             // callback to the app. Rather, use STATUS_ERROR of image buffers.
1171             break;
1172         default:
1173             // SET_ERR calls notifyError
1174             SET_ERR("Unknown error message from HAL: %d", msg.error_code);
1175             break;
1176     }
1177 }
1178 
notify(CaptureOutputStates & states,const camera_notify_msg * msg)1179 void notify(CaptureOutputStates& states, const camera_notify_msg *msg) {
1180     switch (msg->type) {
1181         case CAMERA_MSG_ERROR: {
1182             notifyError(states, msg->message.error);
1183             break;
1184         }
1185         case CAMERA_MSG_SHUTTER: {
1186             notifyShutter(states, msg->message.shutter);
1187             break;
1188         }
1189         default:
1190             SET_ERR("Unknown notify message from HAL: %d",
1191                     msg->type);
1192     }
1193 }
1194 
notify(CaptureOutputStates & states,const hardware::camera::device::V3_2::NotifyMsg & msg)1195 void notify(CaptureOutputStates& states,
1196         const hardware::camera::device::V3_2::NotifyMsg& msg) {
1197     using android::hardware::camera::device::V3_2::MsgType;
1198     using android::hardware::camera::device::V3_2::ErrorCode;
1199 
1200     ATRACE_CALL();
1201     camera_notify_msg m;
1202     switch (msg.type) {
1203         case MsgType::ERROR:
1204             m.type = CAMERA_MSG_ERROR;
1205             m.message.error.frame_number = msg.msg.error.frameNumber;
1206             if (msg.msg.error.errorStreamId >= 0) {
1207                 sp<Camera3StreamInterface> stream =
1208                         states.outputStreams.get(msg.msg.error.errorStreamId);
1209                 if (stream == nullptr) {
1210                     ALOGE("%s: Frame %d: Invalid error stream id %d", __FUNCTION__,
1211                             m.message.error.frame_number, msg.msg.error.errorStreamId);
1212                     return;
1213                 }
1214                 m.message.error.error_stream = stream->asHalStream();
1215             } else {
1216                 m.message.error.error_stream = nullptr;
1217             }
1218             switch (msg.msg.error.errorCode) {
1219                 case ErrorCode::ERROR_DEVICE:
1220                     m.message.error.error_code = CAMERA_MSG_ERROR_DEVICE;
1221                     break;
1222                 case ErrorCode::ERROR_REQUEST:
1223                     m.message.error.error_code = CAMERA_MSG_ERROR_REQUEST;
1224                     break;
1225                 case ErrorCode::ERROR_RESULT:
1226                     m.message.error.error_code = CAMERA_MSG_ERROR_RESULT;
1227                     break;
1228                 case ErrorCode::ERROR_BUFFER:
1229                     m.message.error.error_code = CAMERA_MSG_ERROR_BUFFER;
1230                     break;
1231             }
1232             break;
1233         case MsgType::SHUTTER:
1234             m.type = CAMERA_MSG_SHUTTER;
1235             m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
1236             m.message.shutter.timestamp = msg.msg.shutter.timestamp;
1237             break;
1238     }
1239     notify(states, &m);
1240 }
1241 
requestStreamBuffers(RequestBufferStates & states,const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest> & bufReqs,hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb)1242 void requestStreamBuffers(RequestBufferStates& states,
1243         const hardware::hidl_vec<hardware::camera::device::V3_5::BufferRequest>& bufReqs,
1244         hardware::camera::device::V3_5::ICameraDeviceCallback::requestStreamBuffers_cb _hidl_cb) {
1245     using android::hardware::camera::device::V3_2::BufferStatus;
1246     using android::hardware::camera::device::V3_2::StreamBuffer;
1247     using android::hardware::camera::device::V3_5::BufferRequestStatus;
1248     using android::hardware::camera::device::V3_5::StreamBufferRet;
1249     using android::hardware::camera::device::V3_5::StreamBufferRequestError;
1250 
1251     std::lock_guard<std::mutex> lock(states.reqBufferLock);
1252 
1253     hardware::hidl_vec<StreamBufferRet> bufRets;
1254     if (!states.useHalBufManager) {
1255         ALOGE("%s: Camera %s does not support HAL buffer management",
1256                 __FUNCTION__, states.cameraId.string());
1257         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1258         return;
1259     }
1260 
1261     SortedVector<int32_t> streamIds;
1262     ssize_t sz = streamIds.setCapacity(bufReqs.size());
1263     if (sz < 0 || static_cast<size_t>(sz) != bufReqs.size()) {
1264         ALOGE("%s: failed to allocate memory for %zu buffer requests",
1265                 __FUNCTION__, bufReqs.size());
1266         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1267         return;
1268     }
1269 
1270     if (bufReqs.size() > states.outputStreams.size()) {
1271         ALOGE("%s: too many buffer requests (%zu > # of output streams %zu)",
1272                 __FUNCTION__, bufReqs.size(), states.outputStreams.size());
1273         _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1274         return;
1275     }
1276 
1277     // Check for repeated streamId
1278     for (const auto& bufReq : bufReqs) {
1279         if (streamIds.indexOf(bufReq.streamId) != NAME_NOT_FOUND) {
1280             ALOGE("%s: Stream %d appear multiple times in buffer requests",
1281                     __FUNCTION__, bufReq.streamId);
1282             _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, bufRets);
1283             return;
1284         }
1285         streamIds.add(bufReq.streamId);
1286     }
1287 
1288     if (!states.reqBufferIntf.startRequestBuffer()) {
1289         ALOGE("%s: request buffer disallowed while camera service is configuring",
1290                 __FUNCTION__);
1291         _hidl_cb(BufferRequestStatus::FAILED_CONFIGURING, bufRets);
1292         return;
1293     }
1294 
1295     bufRets.resize(bufReqs.size());
1296 
1297     bool allReqsSucceeds = true;
1298     bool oneReqSucceeds = false;
1299     for (size_t i = 0; i < bufReqs.size(); i++) {
1300         const auto& bufReq = bufReqs[i];
1301         auto& bufRet = bufRets[i];
1302         int32_t streamId = bufReq.streamId;
1303         sp<Camera3OutputStreamInterface> outputStream = states.outputStreams.get(streamId);
1304         if (outputStream == nullptr) {
1305             ALOGE("%s: Output stream id %d not found!", __FUNCTION__, streamId);
1306             hardware::hidl_vec<StreamBufferRet> emptyBufRets;
1307             _hidl_cb(BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS, emptyBufRets);
1308             states.reqBufferIntf.endRequestBuffer();
1309             return;
1310         }
1311 
1312         bufRet.streamId = streamId;
1313         if (outputStream->isAbandoned()) {
1314             bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
1315             allReqsSucceeds = false;
1316             continue;
1317         }
1318 
1319         size_t handOutBufferCount = outputStream->getOutstandingBuffersCount();
1320         uint32_t numBuffersRequested = bufReq.numBuffersRequested;
1321         size_t totalHandout = handOutBufferCount + numBuffersRequested;
1322         uint32_t maxBuffers = outputStream->asHalStream()->max_buffers;
1323         if (totalHandout > maxBuffers) {
1324             // Not able to allocate enough buffer. Exit early for this stream
1325             ALOGE("%s: request too much buffers for stream %d: at HAL: %zu + requesting: %d"
1326                     " > max: %d", __FUNCTION__, streamId, handOutBufferCount,
1327                     numBuffersRequested, maxBuffers);
1328             bufRet.val.error(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
1329             allReqsSucceeds = false;
1330             continue;
1331         }
1332 
1333         hardware::hidl_vec<StreamBuffer> tmpRetBuffers(numBuffersRequested);
1334         bool currentReqSucceeds = true;
1335         std::vector<camera_stream_buffer_t> streamBuffers(numBuffersRequested);
1336         std::vector<buffer_handle_t> newBuffers;
1337         size_t numAllocatedBuffers = 0;
1338         size_t numPushedInflightBuffers = 0;
1339         for (size_t b = 0; b < numBuffersRequested; b++) {
1340             camera_stream_buffer_t& sb = streamBuffers[b];
1341             // Since this method can run concurrently with request thread
1342             // We need to update the wait duration everytime we call getbuffer
1343             nsecs_t waitDuration =  states.reqBufferIntf.getWaitDuration();
1344             status_t res = outputStream->getBuffer(&sb, waitDuration);
1345             if (res != OK) {
1346                 if (res == NO_INIT || res == DEAD_OBJECT) {
1347                     ALOGV("%s: Can't get output buffer for stream %d: %s (%d)",
1348                             __FUNCTION__, streamId, strerror(-res), res);
1349                     bufRet.val.error(StreamBufferRequestError::STREAM_DISCONNECTED);
1350                     states.sessionStatsBuilder.stopCounter(streamId);
1351                 } else {
1352                     ALOGE("%s: Can't get output buffer for stream %d: %s (%d)",
1353                             __FUNCTION__, streamId, strerror(-res), res);
1354                     if (res == TIMED_OUT || res == NO_MEMORY) {
1355                         bufRet.val.error(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
1356                     } else {
1357                         bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
1358                     }
1359                 }
1360                 currentReqSucceeds = false;
1361                 break;
1362             }
1363             numAllocatedBuffers++;
1364 
1365             buffer_handle_t *buffer = sb.buffer;
1366             auto pair = states.bufferRecordsIntf.getBufferId(*buffer, streamId);
1367             bool isNewBuffer = pair.first;
1368             uint64_t bufferId = pair.second;
1369             StreamBuffer& hBuf = tmpRetBuffers[b];
1370 
1371             hBuf.streamId = streamId;
1372             hBuf.bufferId = bufferId;
1373             hBuf.buffer = (isNewBuffer) ? *buffer : nullptr;
1374             hBuf.status = BufferStatus::OK;
1375             hBuf.releaseFence = nullptr;
1376             if (isNewBuffer) {
1377                 newBuffers.push_back(*buffer);
1378             }
1379 
1380             native_handle_t *acquireFence = nullptr;
1381             if (sb.acquire_fence != -1) {
1382                 acquireFence = native_handle_create(1,0);
1383                 acquireFence->data[0] = sb.acquire_fence;
1384             }
1385             hBuf.acquireFence.setTo(acquireFence, /*shouldOwn*/true);
1386             hBuf.releaseFence = nullptr;
1387 
1388             res = states.bufferRecordsIntf.pushInflightRequestBuffer(bufferId, buffer, streamId);
1389             if (res != OK) {
1390                 ALOGE("%s: Can't get register request buffers for stream %d: %s (%d)",
1391                         __FUNCTION__, streamId, strerror(-res), res);
1392                 bufRet.val.error(StreamBufferRequestError::UNKNOWN_ERROR);
1393                 currentReqSucceeds = false;
1394                 break;
1395             }
1396             numPushedInflightBuffers++;
1397         }
1398         if (currentReqSucceeds) {
1399             bufRet.val.buffers(std::move(tmpRetBuffers));
1400             oneReqSucceeds = true;
1401         } else {
1402             allReqsSucceeds = false;
1403             for (size_t b = 0; b < numPushedInflightBuffers; b++) {
1404                 StreamBuffer& hBuf = tmpRetBuffers[b];
1405                 buffer_handle_t* buffer;
1406                 status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1407                         hBuf.bufferId, &buffer);
1408                 if (res != OK) {
1409                     SET_ERR("%s: popInflightRequestBuffer failed for stream %d: %s (%d)",
1410                             __FUNCTION__, streamId, strerror(-res), res);
1411                 }
1412             }
1413             for (size_t b = 0; b < numAllocatedBuffers; b++) {
1414                 camera_stream_buffer_t& sb = streamBuffers[b];
1415                 sb.acquire_fence = -1;
1416                 sb.status = CAMERA_BUFFER_STATUS_ERROR;
1417             }
1418             returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
1419                     streamBuffers.data(), numAllocatedBuffers, 0, /*requested*/false,
1420                     /*requestTimeNs*/0, states.sessionStatsBuilder);
1421             for (auto buf : newBuffers) {
1422                 states.bufferRecordsIntf.removeOneBufferCache(streamId, buf);
1423             }
1424         }
1425     }
1426 
1427     _hidl_cb(allReqsSucceeds ? BufferRequestStatus::OK :
1428             oneReqSucceeds ? BufferRequestStatus::FAILED_PARTIAL :
1429                              BufferRequestStatus::FAILED_UNKNOWN,
1430             bufRets);
1431     states.reqBufferIntf.endRequestBuffer();
1432 }
1433 
returnStreamBuffers(ReturnBufferStates & states,const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer> & buffers)1434 void returnStreamBuffers(ReturnBufferStates& states,
1435         const hardware::hidl_vec<hardware::camera::device::V3_2::StreamBuffer>& buffers) {
1436     if (!states.useHalBufManager) {
1437         ALOGE("%s: Camera %s does not support HAL buffer managerment",
1438                 __FUNCTION__, states.cameraId.string());
1439         return;
1440     }
1441 
1442     for (const auto& buf : buffers) {
1443         if (buf.bufferId == BUFFER_ID_NO_BUFFER) {
1444             ALOGE("%s: cannot return a buffer without bufferId", __FUNCTION__);
1445             continue;
1446         }
1447 
1448         buffer_handle_t* buffer;
1449         status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(buf.bufferId, &buffer);
1450 
1451         if (res != OK) {
1452             ALOGE("%s: cannot find in-flight buffer %" PRIu64 " for stream %d",
1453                     __FUNCTION__, buf.bufferId, buf.streamId);
1454             continue;
1455         }
1456 
1457         camera_stream_buffer_t streamBuffer;
1458         streamBuffer.buffer = buffer;
1459         streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1460         streamBuffer.acquire_fence = -1;
1461         streamBuffer.release_fence = -1;
1462 
1463         if (buf.releaseFence == nullptr) {
1464             streamBuffer.release_fence = -1;
1465         } else if (buf.releaseFence->numFds == 1) {
1466             streamBuffer.release_fence = dup(buf.releaseFence->data[0]);
1467         } else {
1468             ALOGE("%s: Invalid release fence, fd count is %d, not 1",
1469                     __FUNCTION__, buf.releaseFence->numFds);
1470             continue;
1471         }
1472 
1473         sp<Camera3StreamInterface> stream = states.outputStreams.get(buf.streamId);
1474         if (stream == nullptr) {
1475             ALOGE("%s: Output stream id %d not found!", __FUNCTION__, buf.streamId);
1476             continue;
1477         }
1478         streamBuffer.stream = stream->asHalStream();
1479         returnOutputBuffers(states.useHalBufManager, /*listener*/nullptr,
1480                 &streamBuffer, /*size*/1, /*timestamp*/ 0, /*requested*/false,
1481                 /*requestTimeNs*/0, states.sessionStatsBuilder);
1482     }
1483 }
1484 
flushInflightRequests(FlushInflightReqStates & states)1485 void flushInflightRequests(FlushInflightReqStates& states) {
1486     ATRACE_CALL();
1487     { // First return buffers cached in inFlightMap
1488         std::lock_guard<std::mutex> l(states.inflightLock);
1489         for (size_t idx = 0; idx < states.inflightMap.size(); idx++) {
1490             const InFlightRequest &request = states.inflightMap.valueAt(idx);
1491             returnOutputBuffers(
1492                 states.useHalBufManager, states.listener,
1493                 request.pendingOutputBuffers.array(),
1494                 request.pendingOutputBuffers.size(), 0, /*requested*/true,
1495                 request.requestTimeNs, states.sessionStatsBuilder, /*timestampIncreasing*/true,
1496                 request.outputSurfaces, request.resultExtras, request.errorBufStrategy);
1497             ALOGW("%s: Frame %d |  Timestamp: %" PRId64 ", metadata"
1498                     " arrived: %s, buffers left: %d.\n", __FUNCTION__,
1499                     states.inflightMap.keyAt(idx), request.shutterTimestamp,
1500                     request.haveResultMetadata ? "true" : "false",
1501                     request.numBuffersLeft);
1502         }
1503 
1504         states.inflightMap.clear();
1505         states.inflightIntf.onInflightMapFlushedLocked();
1506     }
1507 
1508     // Then return all inflight buffers not returned by HAL
1509     std::vector<std::pair<int32_t, int32_t>> inflightKeys;
1510     states.flushBufferIntf.getInflightBufferKeys(&inflightKeys);
1511 
1512     // Inflight buffers for HAL buffer manager
1513     std::vector<uint64_t> inflightRequestBufferKeys;
1514     states.flushBufferIntf.getInflightRequestBufferKeys(&inflightRequestBufferKeys);
1515 
1516     // (streamId, frameNumber, buffer_handle_t*) tuple for all inflight buffers.
1517     // frameNumber will be -1 for buffers from HAL buffer manager
1518     std::vector<std::tuple<int32_t, int32_t, buffer_handle_t*>> inflightBuffers;
1519     inflightBuffers.reserve(inflightKeys.size() + inflightRequestBufferKeys.size());
1520 
1521     for (auto& pair : inflightKeys) {
1522         int32_t frameNumber = pair.first;
1523         int32_t streamId = pair.second;
1524         buffer_handle_t* buffer;
1525         status_t res = states.bufferRecordsIntf.popInflightBuffer(frameNumber, streamId, &buffer);
1526         if (res != OK) {
1527             ALOGE("%s: Frame %d: No in-flight buffer for stream %d",
1528                     __FUNCTION__, frameNumber, streamId);
1529             continue;
1530         }
1531         inflightBuffers.push_back(std::make_tuple(streamId, frameNumber, buffer));
1532     }
1533 
1534     for (auto& bufferId : inflightRequestBufferKeys) {
1535         int32_t streamId = -1;
1536         buffer_handle_t* buffer = nullptr;
1537         status_t res = states.bufferRecordsIntf.popInflightRequestBuffer(
1538                 bufferId, &buffer, &streamId);
1539         if (res != OK) {
1540             ALOGE("%s: cannot find in-flight buffer %" PRIu64, __FUNCTION__, bufferId);
1541             continue;
1542         }
1543         inflightBuffers.push_back(std::make_tuple(streamId, /*frameNumber*/-1, buffer));
1544     }
1545 
1546     std::vector<sp<Camera3StreamInterface>> streams = states.flushBufferIntf.getAllStreams();
1547 
1548     for (auto& tuple : inflightBuffers) {
1549         status_t res = OK;
1550         int32_t streamId = std::get<0>(tuple);
1551         int32_t frameNumber = std::get<1>(tuple);
1552         buffer_handle_t* buffer = std::get<2>(tuple);
1553 
1554         camera_stream_buffer_t streamBuffer;
1555         streamBuffer.buffer = buffer;
1556         streamBuffer.status = CAMERA_BUFFER_STATUS_ERROR;
1557         streamBuffer.acquire_fence = -1;
1558         streamBuffer.release_fence = -1;
1559 
1560         for (auto& stream : streams) {
1561             if (streamId == stream->getId()) {
1562                 // Return buffer to deleted stream
1563                 camera_stream* halStream = stream->asHalStream();
1564                 streamBuffer.stream = halStream;
1565                 switch (halStream->stream_type) {
1566                     case CAMERA_STREAM_OUTPUT:
1567                         res = stream->returnBuffer(streamBuffer, /*timestamp*/ 0,
1568                                 /*timestampIncreasing*/true,
1569                                 std::vector<size_t> (), frameNumber);
1570                         if (res != OK) {
1571                             ALOGE("%s: Can't return output buffer for frame %d to"
1572                                   " stream %d: %s (%d)",  __FUNCTION__,
1573                                   frameNumber, streamId, strerror(-res), res);
1574                         }
1575                         break;
1576                     case CAMERA_STREAM_INPUT:
1577                         res = stream->returnInputBuffer(streamBuffer);
1578                         if (res != OK) {
1579                             ALOGE("%s: Can't return input buffer for frame %d to"
1580                                   " stream %d: %s (%d)",  __FUNCTION__,
1581                                   frameNumber, streamId, strerror(-res), res);
1582                         }
1583                         break;
1584                     default: // Bi-direcitonal stream is deprecated
1585                         ALOGE("%s: stream %d has unknown stream type %d",
1586                                 __FUNCTION__, streamId, halStream->stream_type);
1587                         break;
1588                 }
1589                 break;
1590             }
1591         }
1592     }
1593 }
1594 
1595 } // camera3
1596 } // namespace android
1597