1 /*
2 * Copyright (C) 2012-2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-JpegProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <netinet/in.h>
22
23 #include <aidl/android/hardware/camera/device/CameraBlob.h>
24 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
25
26 #include <binder/MemoryBase.h>
27 #include <binder/MemoryHeapBase.h>
28 #include <utils/Log.h>
29 #include <utils/Trace.h>
30 #include <gui/Surface.h>
31
32 #include "common/CameraDeviceBase.h"
33 #include "api1/Camera2Client.h"
34 #include "api1/client2/Camera2Heap.h"
35 #include "api1/client2/CaptureSequencer.h"
36 #include "api1/client2/JpegProcessor.h"
37
38 namespace android {
39 namespace camera2 {
40
41 using android::camera3::CAMERA_STREAM_ROTATION_0;
42 using aidl::android::hardware::camera::device::CameraBlob;
43 using aidl::android::hardware::camera::device::CameraBlobId;
44
JpegProcessor(sp<Camera2Client> client,wp<CaptureSequencer> sequencer)45 JpegProcessor::JpegProcessor(
46 sp<Camera2Client> client,
47 wp<CaptureSequencer> sequencer):
48 Thread(false),
49 mDevice(client->getCameraDevice()),
50 mSequencer(sequencer),
51 mId(client->getCameraId()),
52 mCaptureDone(false),
53 mCaptureSuccess(false),
54 mCaptureStreamId(NO_STREAM) {
55 }
56
~JpegProcessor()57 JpegProcessor::~JpegProcessor() {
58 ALOGV("%s: Exit", __FUNCTION__);
59 deleteStream();
60 }
61
onFrameAvailable(const BufferItem &)62 void JpegProcessor::onFrameAvailable(const BufferItem& /*item*/) {
63 Mutex::Autolock l(mInputMutex);
64 ALOGV("%s", __FUNCTION__);
65 if (!mCaptureDone) {
66 mCaptureDone = true;
67 mCaptureSuccess = true;
68 mCaptureDoneSignal.signal();
69 }
70 }
71
updateStream(const Parameters & params)72 status_t JpegProcessor::updateStream(const Parameters ¶ms) {
73 ATRACE_CALL();
74 ALOGV("%s", __FUNCTION__);
75 status_t res;
76
77 Mutex::Autolock l(mInputMutex);
78
79 sp<CameraDeviceBase> device = mDevice.promote();
80 if (device == 0) {
81 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
82 return INVALID_OPERATION;
83 }
84
85 // Find out buffer size for JPEG
86 ssize_t maxJpegSize = device->getJpegBufferSize(device->infoPhysical(""),
87 params.pictureWidth, params.pictureHeight);
88 if (maxJpegSize <= 0) {
89 ALOGE("%s: Camera %d: Jpeg buffer size (%zu) is invalid ",
90 __FUNCTION__, mId, maxJpegSize);
91 return INVALID_OPERATION;
92 }
93
94 if (mCaptureConsumer == 0) {
95 // Create CPU buffer queue endpoint
96 sp<IGraphicBufferProducer> producer;
97 sp<IGraphicBufferConsumer> consumer;
98 BufferQueue::createBufferQueue(&producer, &consumer);
99 mCaptureConsumer = new CpuConsumer(consumer, 1);
100 mCaptureConsumer->setFrameAvailableListener(this);
101 mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
102 mCaptureWindow = new Surface(producer);
103 }
104
105 // Since ashmem heaps are rounded up to page size, don't reallocate if
106 // the capture heap isn't exactly the same size as the required JPEG buffer
107 const size_t HEAP_SLACK_FACTOR = 2;
108 if (mCaptureHeap == 0 ||
109 (mCaptureHeap->getSize() < static_cast<size_t>(maxJpegSize)) ||
110 (mCaptureHeap->getSize() >
111 static_cast<size_t>(maxJpegSize) * HEAP_SLACK_FACTOR) ) {
112 // Create memory for API consumption
113 mCaptureHeap.clear();
114 mCaptureHeap =
115 new MemoryHeapBase(maxJpegSize, 0, "Camera2Client::CaptureHeap");
116 if (mCaptureHeap->getSize() == 0) {
117 ALOGE("%s: Camera %d: Unable to allocate memory for capture",
118 __FUNCTION__, mId);
119 return NO_MEMORY;
120 }
121 }
122 ALOGV("%s: Camera %d: JPEG capture heap now %zu bytes; requested %zd bytes",
123 __FUNCTION__, mId, mCaptureHeap->getSize(), maxJpegSize);
124
125 if (mCaptureStreamId != NO_STREAM) {
126 // Check if stream parameters have to change
127 CameraDeviceBase::StreamInfo streamInfo;
128 res = device->getStreamInfo(mCaptureStreamId, &streamInfo);
129 if (res != OK) {
130 ALOGE("%s: Camera %d: Error querying capture output stream info: "
131 "%s (%d)", __FUNCTION__,
132 mId, strerror(-res), res);
133 return res;
134 }
135 if (streamInfo.width != (uint32_t)params.pictureWidth ||
136 streamInfo.height != (uint32_t)params.pictureHeight) {
137 ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
138 __FUNCTION__, mId, mCaptureStreamId);
139 res = device->deleteStream(mCaptureStreamId);
140 if (res == -EBUSY) {
141 ALOGV("%s: Camera %d: Device is busy, call updateStream again "
142 " after it becomes idle", __FUNCTION__, mId);
143 return res;
144 } else if (res != OK) {
145 ALOGE("%s: Camera %d: Unable to delete old output stream "
146 "for capture: %s (%d)", __FUNCTION__,
147 mId, strerror(-res), res);
148 return res;
149 }
150 mCaptureStreamId = NO_STREAM;
151 }
152 }
153
154 if (mCaptureStreamId == NO_STREAM) {
155 // Create stream for HAL production
156 res = device->createStream(mCaptureWindow,
157 params.pictureWidth, params.pictureHeight,
158 HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_V0_JFIF,
159 CAMERA_STREAM_ROTATION_0, &mCaptureStreamId,
160 std::string(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
161 if (res != OK) {
162 ALOGE("%s: Camera %d: Can't create output stream for capture: "
163 "%s (%d)", __FUNCTION__, mId,
164 strerror(-res), res);
165 return res;
166 }
167 }
168 return OK;
169 }
170
deleteStream()171 status_t JpegProcessor::deleteStream() {
172 ATRACE_CALL();
173
174 Mutex::Autolock l(mInputMutex);
175
176 if (mCaptureStreamId != NO_STREAM) {
177 sp<CameraDeviceBase> device = mDevice.promote();
178 if (device == 0) {
179 ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
180 return INVALID_OPERATION;
181 }
182
183 status_t res = device->deleteStream(mCaptureStreamId);
184 if (res != OK) {
185 ALOGE("%s: delete stream %d failed!", __FUNCTION__, mCaptureStreamId);
186 return res;
187 }
188
189 mCaptureHeap.clear();
190 mCaptureWindow.clear();
191 mCaptureConsumer.clear();
192
193 mCaptureStreamId = NO_STREAM;
194 }
195 return OK;
196 }
197
getStreamId() const198 int JpegProcessor::getStreamId() const {
199 Mutex::Autolock l(mInputMutex);
200 return mCaptureStreamId;
201 }
202
dump(int,const Vector<String16> &) const203 void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
204 }
205
threadLoop()206 bool JpegProcessor::threadLoop() {
207 status_t res;
208
209 bool captureSuccess = false;
210 {
211 Mutex::Autolock l(mInputMutex);
212
213 while (!mCaptureDone) {
214 res = mCaptureDoneSignal.waitRelative(mInputMutex,
215 kWaitDuration);
216 if (res == TIMED_OUT) return true;
217 }
218
219 captureSuccess = mCaptureSuccess;
220 mCaptureDone = false;
221 }
222
223 res = processNewCapture(captureSuccess);
224
225 return true;
226 }
227
processNewCapture(bool captureSuccess)228 status_t JpegProcessor::processNewCapture(bool captureSuccess) {
229 ATRACE_CALL();
230 status_t res;
231 sp<Camera2Heap> captureHeap;
232 sp<MemoryBase> captureBuffer;
233
234 CpuConsumer::LockedBuffer imgBuffer;
235
236 if (captureSuccess) {
237 Mutex::Autolock l(mInputMutex);
238 if (mCaptureStreamId == NO_STREAM) {
239 ALOGW("%s: Camera %d: No stream is available", __FUNCTION__, mId);
240 return INVALID_OPERATION;
241 }
242
243 res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
244 if (res != OK) {
245 if (res != BAD_VALUE) {
246 ALOGE("%s: Camera %d: Error receiving still image buffer: "
247 "%s (%d)", __FUNCTION__,
248 mId, strerror(-res), res);
249 }
250 return res;
251 }
252
253 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
254 mId);
255
256 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
257 ALOGE("%s: Camera %d: Unexpected format for still image: "
258 "%x, expected %x", __FUNCTION__, mId,
259 imgBuffer.format,
260 HAL_PIXEL_FORMAT_BLOB);
261 mCaptureConsumer->unlockBuffer(imgBuffer);
262 return OK;
263 }
264
265 // Find size of JPEG image
266 size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
267 if (jpegSize == 0) { // failed to find size, default to whole buffer
268 jpegSize = imgBuffer.width;
269 }
270 size_t heapSize = mCaptureHeap->getSize();
271 if (jpegSize > heapSize) {
272 ALOGW("%s: JPEG image is larger than expected, truncating "
273 "(got %zu, expected at most %zu bytes)",
274 __FUNCTION__, jpegSize, heapSize);
275 jpegSize = heapSize;
276 }
277
278 // TODO: Optimize this to avoid memcopy
279 captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
280 void* captureMemory = mCaptureHeap->getBase();
281 memcpy(captureMemory, imgBuffer.data, jpegSize);
282
283 mCaptureConsumer->unlockBuffer(imgBuffer);
284 }
285
286 sp<CaptureSequencer> sequencer = mSequencer.promote();
287 if (sequencer != 0) {
288 sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer, !captureSuccess);
289 }
290
291 return OK;
292 }
293
294 /*
295 * JPEG FILE FORMAT OVERVIEW.
296 * http://www.jpeg.org/public/jfif.pdf
297 * (JPEG is the image compression algorithm, actual file format is called JFIF)
298 *
299 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files. The
300 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
301 * (inclusive). Because every marker begins with the same byte, they are
302 * referred to by the second byte's value.
303 *
304 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
305 * Following it, "segment" sections begin with other markers, followed by a
306 * 2-byte length (in network byte order), then the segment data.
307 *
308 * For our purposes we will ignore the data, and just use the length to skip to
309 * the next segment. This is necessary because the data inside segments are
310 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
311 * naievely scanning until the end.
312 *
313 * After all the segments are processed, the jpeg compressed image stream begins.
314 * This can be considered an opaque format with one requirement: all 0xFF bytes
315 * in this stream must be followed with a 0x00 byte. This prevents any of the
316 * image data to be interpreted as a segment. The only exception to this is at
317 * the end of the image stream there is an End of Image (EOI) marker, which is
318 * 0xFF followed by a non-zero (0xD9) byte.
319 */
320
321 const uint8_t MARK = 0xFF; // First byte of marker
322 const uint8_t SOI = 0xD8; // Start of Image
323 const uint8_t EOI = 0xD9; // End of Image
324 const size_t MARKER_LENGTH = 2; // length of a marker
325
326 #pragma pack(push)
327 #pragma pack(1)
328 typedef struct segment {
329 uint8_t marker[MARKER_LENGTH];
330 uint16_t length;
331 } segment_t;
332 #pragma pack(pop)
333
334 /* HELPER FUNCTIONS */
335
336 // check for Start of Image marker
checkJpegStart(uint8_t * buf)337 bool checkJpegStart(uint8_t* buf) {
338 return buf[0] == MARK && buf[1] == SOI;
339 }
340 // check for End of Image marker
checkJpegEnd(uint8_t * buf)341 bool checkJpegEnd(uint8_t *buf) {
342 return buf[0] == MARK && buf[1] == EOI;
343 }
344 // check for arbitrary marker, returns marker type (second byte)
345 // returns 0 if no marker found. Note: 0x00 is not a valid marker type
checkJpegMarker(uint8_t * buf)346 uint8_t checkJpegMarker(uint8_t *buf) {
347 if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
348 return buf[1];
349 }
350 return 0;
351 }
352
353 // Return the size of the JPEG, 0 indicates failure
findJpegSize(uint8_t * jpegBuffer,size_t maxSize)354 size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
355 size_t size;
356
357 // First check for JPEG transport header at the end of the buffer
358 uint8_t *header = jpegBuffer + (maxSize - sizeof(CameraBlob));
359 CameraBlob *blob = (CameraBlob*)(header);
360 if (blob->blobId == CameraBlobId::JPEG) {
361 size = blob->blobSizeBytes;
362 if (size > 0 && size <= maxSize - sizeof(CameraBlob)) {
363 // Verify SOI and EOI markers
364 size_t offset = size - MARKER_LENGTH;
365 uint8_t *end = jpegBuffer + offset;
366 if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
367 ALOGV("Found JPEG transport header, img size %zu", size);
368 return size;
369 } else {
370 ALOGW("Found JPEG transport header with bad Image Start/End");
371 }
372 } else {
373 ALOGW("Found JPEG transport header with bad size %zu", size);
374 }
375 }
376
377 // Check Start of Image
378 if ( !checkJpegStart(jpegBuffer) ) {
379 ALOGE("Could not find start of JPEG marker");
380 return 0;
381 }
382
383 // Read JFIF segment markers, skip over segment data
384 size = MARKER_LENGTH; //jump SOI;
385 while (size <= maxSize - MARKER_LENGTH) {
386 segment_t *segment = (segment_t*)(jpegBuffer + size);
387 uint8_t type = checkJpegMarker(segment->marker);
388 if (type == 0) { // invalid marker, no more segments, begin JPEG data
389 ALOGV("JPEG stream found beginning at offset %zu", size);
390 break;
391 }
392 if (type == EOI || size > maxSize - sizeof(segment_t)) {
393 ALOGE("Got premature End before JPEG data, offset %zu", size);
394 return 0;
395 }
396 size_t length = ntohs(segment->length);
397 ALOGV("JFIF Segment, type %x length %zx", type, length);
398 size += length + MARKER_LENGTH;
399 }
400
401 // Find End of Image
402 // Scan JPEG buffer until End of Image (EOI)
403 bool foundEnd = false;
404 for ( ; size <= maxSize - MARKER_LENGTH; size++) {
405 if ( checkJpegEnd(jpegBuffer + size) ) {
406 foundEnd = true;
407 size += MARKER_LENGTH;
408 break;
409 }
410 }
411 if (!foundEnd) {
412 ALOGE("Could not find end of JPEG marker");
413 return 0;
414 }
415
416 if (size > maxSize) {
417 ALOGW("JPEG size %zu too large, reducing to maxSize %zu", size, maxSize);
418 size = maxSize;
419 }
420 ALOGV("Final JPEG size %zu", size);
421 return size;
422 }
423
424 }; // namespace camera2
425 }; // namespace android
426