1 /*
2 * Copyright (C) 2012-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <netinet/in.h>
22
23 #include <binder/MemoryBase.h>
24 #include <binder/MemoryHeapBase.h>
25 #include <utils/Log.h>
26 #include <utils/Trace.h>
27 #include <gui/Surface.h>
28
29 #include "common/CameraDeviceBase.h"
30 #include "api1/Camera2Client.h"
31 #include "api1/client2/Camera2Heap.h"
32 #include "api1/client2/CaptureSequencer.h"
33 #include "api1/client2/JpegProcessor.h"
34
35 namespace android {
36 namespace camera2 {
37
38 using android::camera3::CAMERA_STREAM_ROTATION_0;
39
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)40 JpegProcessor::JpegProcessor(
41 sp<Camera2Client> client,
42 wp<CaptureSequencer> sequencer):
43 Thread(false),
44 mDevice(client->getCameraDevice()),
45 mSequencer(sequencer),
46 mId(client->getCameraId()),
47 mCaptureDone(false),
48 mCaptureSuccess(false),
49 mCaptureStreamId(NO_STREAM) {
50 }
51
~JpegProcessor()52 JpegProcessor::~JpegProcessor() {
53 ALOGV("%s: Exit", __FUNCTION__);
54 deleteStream();
55 }
56
onFrameAvailable(const BufferItem &)57 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
58 Mutex::Autolock l(mInputMutex);
59 ALOGV("%s", __FUNCTION__);
60 if (!mCaptureDone) {
61 mCaptureDone = true;
62 mCaptureSuccess = true;
63 mCaptureDoneSignal.signal();
64 }
65 }
66
updateStream(const Parameters & params)67 status_t JpegProcessor::updateStream(const Parameters ¶ms) {
68 ATRACE_CALL();
69 ALOGV("%s", __FUNCTION__);
70 status_t res;
71
72 Mutex::Autolock l(mInputMutex);
73
74 sp<CameraDeviceBase> device = mDevice.promote();
75 if (device == 0) {
76 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
77 return INVALID_OPERATION;
78 }
79
80 // Find out buffer size for JPEG
81 ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(String8("")),
82 params.pictureWidth, params.pictureHeight);
83 if (maxJpegSize <= 0) {
84 ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
85 __FUNCTION__, mId, maxJpegSize);
86 return INVALID_OPERATION;
87 }
88
89 if (mCaptureConsumer == 0) {
90 // Create CPU buffer queue endpoint
91 sp<IGraphicBufferProducer> producer;
92 sp<IGraphicBufferConsumer> consumer;
93 BufferQueue::createBufferQueue(&producer, &consumer);
94 mCaptureConsumer = new CpuConsumer(consumer, 1);
95 mCaptureConsumer->setFrameAvailableListener(this);
96 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
97 mCaptureWindow = new Surface(producer);
98 }
99
100 // Since ashmem heaps are rounded up to page size, don't reallocate if
101 // the capture heap isn't exactly the same size as the required JPEG buffer
102 const size_t HEAP_SLACK_FACTOR = 2;
103 if (mCaptureHeap == 0 ||
104 (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
105 (mCaptureHeap->getSize() >
106 static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
107 // Create memory for API consumption
108 mCaptureHeap.clear();
109 mCaptureHeap =
110 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
111 if (mCaptureHeap->getSize() == 0) {
112 ALOGE("%s: Camera %d: Unable to allocate memory for capture",
113 __FUNCTION__, mId);
114 return NO_MEMORY;
115 }
116 }
117 ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
118 __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
119
120 if (mCaptureStreamId != NO_STREAM) {
121 // Check if stream parameters have to change
122 CameraDeviceBase::StreamInfo streamInfo;
123 res = device->getStreamInfo(mCaptureStreamId, &streamInfo);
124 if (res != OK) {
125 ALOGE("%s: Camera %d: Error querying capture output stream info: "
126 "%s (%d)", __FUNCTION__,
127 mId, strerror(-res), res);
128 return res;
129 }
130 if (streamInfo.width != (uint32_t)params.pictureWidth ||
131 streamInfo.height != (uint32_t)params.pictureHeight) {
132 ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
133 __FUNCTION__, mId, mCaptureStreamId);
134 res = device->deleteStream(mCaptureStreamId);
135 if (res == -EBUSY) {
136 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
137 " after it becomes idle", __FUNCTION__, mId);
138 return res;
139 } else if (res != OK) {
140 ALOGE("%s: Camera %d: Unable to delete old output stream "
141 "for capture: %s (%d)", __FUNCTION__,
142 mId, strerror(-res), res);
143 return res;
144 }
145 mCaptureStreamId = NO_STREAM;
146 }
147 }
148
149 if (mCaptureStreamId == NO_STREAM) {
150 // Create stream for HAL production
151 res = device->createStream(mCaptureWindow,
152 params.pictureWidth, params.pictureHeight,
153 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
154 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
155 String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
156 if (res != OK) {
157 ALOGE("%s: Camera %d: Can't create output stream for capture: "
158 "%s (%d)", __FUNCTION__, mId,
159 strerror(-res), res);
160 return res;
161 }
162 }
163 return OK;
164 }
165
deleteStream()166 status_t JpegProcessor::deleteStream() {
167 ATRACE_CALL();
168
169 Mutex::Autolock l(mInputMutex);
170
171 if (mCaptureStreamId != NO_STREAM) {
172 sp<CameraDeviceBase> device = mDevice.promote();
173 if (device == 0) {
174 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
175 return INVALID_OPERATION;
176 }
177
178 status_t res = device->deleteStream(mCaptureStreamId);
179 if (res != OK) {
180 ALOGE("%s: delete stream %d failed!", __FUNCTION__, mCaptureStreamId);
181 return res;
182 }
183
184 mCaptureHeap.clear();
185 mCaptureWindow.clear();
186 mCaptureConsumer.clear();
187
188 mCaptureStreamId = NO_STREAM;
189 }
190 return OK;
191 }
192
getStreamId() const193 int JpegProcessor::getStreamId() const {
194 Mutex::Autolock l(mInputMutex);
195 return mCaptureStreamId;
196 }
197
dump(int,const Vector<String16> &) const198 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
199 }
200
threadLoop()201 bool JpegProcessor::threadLoop() {
202 status_t res;
203
204 bool captureSuccess = false;
205 {
206 Mutex::Autolock l(mInputMutex);
207
208 while (!mCaptureDone) {
209 res = mCaptureDoneSignal.waitRelative(mInputMutex,
210 kWaitDuration);
211 if (res == TIMED_OUT) return true;
212 }
213
214 captureSuccess = mCaptureSuccess;
215 mCaptureDone = false;
216 }
217
218 res = processNewCapture(captureSuccess);
219
220 return true;
221 }
222
processNewCapture(bool captureSuccess)223 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
224 ATRACE_CALL();
225 status_t res;
226 sp<Camera2Heap> captureHeap;
227 sp<MemoryBase> captureBuffer;
228
229 CpuConsumer::LockedBuffer imgBuffer;
230
231 if (captureSuccess) {
232 Mutex::Autolock l(mInputMutex);
233 if (mCaptureStreamId == NO_STREAM) {
234 ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
235 return INVALID_OPERATION;
236 }
237
238 res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
239 if (res != OK) {
240 if (res != BAD_VALUE) {
241 ALOGE("%s: Camera %d: Error receiving still image buffer: "
242 "%s (%d)", __FUNCTION__,
243 mId, strerror(-res), res);
244 }
245 return res;
246 }
247
248 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
249 mId);
250
251 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
252 ALOGE("%s: Camera %d: Unexpected format for still image: "
253 "%x, expected %x", __FUNCTION__, mId,
254 imgBuffer.format,
255 HAL_PIXEL_FORMAT_BLOB);
256 mCaptureConsumer->unlockBuffer(imgBuffer);
257 return OK;
258 }
259
260 // Find size of JPEG image
261 size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
262 if (jpegSize == 0) { // failed to find size, default to whole buffer
263 jpegSize = imgBuffer.width;
264 }
265 size_t heapSize = mCaptureHeap->getSize();
266 if (jpegSize > heapSize) {
267 ALOGW("%s: JPEG image is larger than expected, truncating "
268 "(got %zu, expected at most %zu bytes)",
269 __FUNCTION__, jpegSize, heapSize);
270 jpegSize = heapSize;
271 }
272
273 // TODO: Optimize this to avoid memcopy
274 captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
275 void* captureMemory = mCaptureHeap->getBase();
276 memcpy(captureMemory, imgBuffer.data, jpegSize);
277
278 mCaptureConsumer->unlockBuffer(imgBuffer);
279 }
280
281 sp<CaptureSequencer> sequencer = mSequencer.promote();
282 if (sequencer != 0) {
283 sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
284 }
285
286 return OK;
287 }
288
289 /*
290 * JPEG FILE FORMAT OVERVIEW.
291 * http://www.jpeg.org/public/jfif.pdf
292 * (JPEG is the image compression algorithm, actual file format is called JFIF)
293 *
294 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The
295 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
296 * (inclusive). Because every marker begins with the same byte, they are
297 * referred to by the second byte's value.
298 *
299 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
300 * Following it, "segment" sections begin with other markers, followed by a
301 * 2-byte length (in network byte order), then the segment data.
302 *
303 * For our purposes we will ignore the data, and just use the length to skip to
304 * the next segment. This is necessary because the data inside segments are
305 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
306 * naievely scanning until the end.
307 *
308 * After all the segments are processed, the jpeg compressed image stream begins.
309 * This can be considered an opaque format with one requirement: all 0xFF bytes
310 * in this stream must be followed with a 0x00 byte. This prevents any of the
311 * image data to be interpreted as a segment. The only exception to this is at
312 * the end of the image stream there is an End of Image (EOI) marker, which is
313 * 0xFF followed by a non-zero (0xD9) byte.
314 */
315
316 const uint8_t MARK = 0xFF; // First byte of marker
317 const uint8_t SOI = 0xD8; // Start of Image
318 const uint8_t EOI = 0xD9; // End of Image
319 const size_t MARKER_LENGTH = 2; // length of a marker
320
321 #pragma pack(push)
322 #pragma pack(1)
323 typedef struct segment {
324 uint8_t marker[MARKER_LENGTH];
325 uint16_t length;
326 } segment_t;
327 #pragma pack(pop)
328
329 /* HELPER FUNCTIONS */
330
331 // check for Start of Image marker
checkJpegStart(uint8_t * buf)332 bool checkJpegStart(uint8_t* buf) {
333 return buf[0] == MARK && buf[1] == SOI;
334 }
335 // check for End of Image marker
checkJpegEnd(uint8_t * buf)336 bool checkJpegEnd(uint8_t *buf) {
337 return buf[0] == MARK && buf[1] == EOI;
338 }
339 // check for arbitrary marker, returns marker type (second byte)
340 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)341 uint8_t checkJpegMarker(uint8_t *buf) {
342 if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
343 return buf[1];
344 }
345 return 0;
346 }
347
348 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)349 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
350 size_t size;
351
352 // First check for JPEG transport header at the end of the buffer
353 uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
354 struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
355 if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
356 size = blob->jpeg_size;
357 if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
358 // Verify SOI and EOI markers
359 size_t offset = size - MARKER_LENGTH;
360 uint8_t *end = jpegBuffer + offset;
361 if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
362 ALOGV("Found JPEG transport header, img size %zu", size);
363 return size;
364 } else {
365 ALOGW("Found JPEG transport header with bad Image Start/End");
366 }
367 } else {
368 ALOGW("Found JPEG transport header with bad size %zu", size);
369 }
370 }
371
372 // Check Start of Image
373 if ( !checkJpegStart(jpegBuffer) ) {
374 ALOGE("Could not find start of JPEG marker");
375 return 0;
376 }
377
378 // Read JFIF segment markers, skip over segment data
379 size = MARKER_LENGTH; //jump SOI;
380 while (size <= maxSize - MARKER_LENGTH) {
381 segment_t *segment = (segment_t*)(jpegBuffer + size);
382 uint8_t type = checkJpegMarker(segment->marker);
383 if (type == 0) { // invalid marker, no more segments, begin JPEG data
384 ALOGV("JPEG stream found beginning at offset %zu", size);
385 break;
386 }
387 if (type == EOI || size > maxSize - sizeof(segment_t)) {
388 ALOGE("Got premature End before JPEG data, offset %zu", size);
389 return 0;
390 }
391 size_t length = ntohs(segment->length);
392 ALOGV("JFIF Segment, type %x length %zx", type, length);
393 size += length + MARKER_LENGTH;
394 }
395
396 // Find End of Image
397 // Scan JPEG buffer until End of Image (EOI)
398 bool foundEnd = false;
399 for ( ; size <= maxSize - MARKER_LENGTH; size++) {
400 if ( checkJpegEnd(jpegBuffer + size) ) {
401 foundEnd = true;
402 size += MARKER_LENGTH;
403 break;
404 }
405 }
406 if (!foundEnd) {
407 ALOGE("Could not find end of JPEG marker");
408 return 0;
409 }
410
411 if (size > maxSize) {
412 ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
413 size = maxSize;
414 }
415 ALOGV("Final JPEG size %zu", size);
416 return size;
417 }
418
419 }; // namespace camera2
420 }; // namespace android
421