1 /*
2 * Copyright (C) 2009 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <inttypes.h>
18
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSource"
21 #include <utils/Log.h>
22
23 #include <OMX_Component.h>
24 #include <binder/IPCThreadState.h>
25 #include <binder/MemoryBase.h>
26 #include <binder/MemoryHeapBase.h>
27 #include <media/hardware/HardwareAPI.h>
28 #include <media/stagefright/foundation/ADebug.h>
29 #include <media/stagefright/CameraSource.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <media/stagefright/MediaErrors.h>
32 #include <media/stagefright/MetaData.h>
33 #include <camera/Camera.h>
34 #include <camera/CameraParameters.h>
35 #include <camera/StringUtils.h>
36 #include <gui/Surface.h>
37 #include <utils/String8.h>
38 #include <cutils/properties.h>
39
40 #if LOG_NDEBUG
41 #define UNUSED_UNLESS_VERBOSE(x) (void)(x)
42 #else
43 #define UNUSED_UNLESS_VERBOSE(x)
44 #endif
45
46 namespace android {
47
48 static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
49
getColorFormat(const char * colorFormat)50 static int32_t getColorFormat(const char* colorFormat) {
51 if (!colorFormat) {
52 ALOGE("Invalid color format");
53 return -1;
54 }
55
56 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
57 return OMX_COLOR_FormatYUV420Planar;
58 }
59
60 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
61 return OMX_COLOR_FormatYUV422SemiPlanar;
62 }
63
64 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
65 return OMX_COLOR_FormatYUV420SemiPlanar;
66 }
67
68 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
69 return OMX_COLOR_FormatYCbYCr;
70 }
71
72 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_RGB565)) {
73 return OMX_COLOR_Format16bitRGB565;
74 }
75
76 if (!strcmp(colorFormat, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar")) {
77 return OMX_TI_COLOR_FormatYUV420PackedSemiPlanar;
78 }
79
80 if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_ANDROID_OPAQUE)) {
81 return OMX_COLOR_FormatAndroidOpaque;
82 }
83
84 ALOGE("Uknown color format (%s), please add it to "
85 "CameraSource::getColorFormat", colorFormat);
86
87 CHECK(!"Unknown color format");
88 return -1;
89 }
90
91 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)92 CameraSource *CameraSource::CreateFromCamera(
93 const sp<hardware::ICamera>& camera,
94 const sp<ICameraRecordingProxy>& proxy,
95 int32_t cameraId,
96 const String16& clientName,
97 uid_t clientUid,
98 pid_t clientPid,
99 Size videoSize,
100 int32_t frameRate,
101 const sp<IGraphicBufferProducer>& surface) {
102
103 CameraSource *source = new CameraSource(camera, proxy, cameraId,
104 clientName, clientUid, clientPid, videoSize, frameRate, surface);
105 return source;
106 }
107
CameraSource(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate,const sp<IGraphicBufferProducer> & surface)108 CameraSource::CameraSource(
109 const sp<hardware::ICamera>& camera,
110 const sp<ICameraRecordingProxy>& proxy,
111 int32_t cameraId,
112 const String16& clientName,
113 uid_t clientUid,
114 pid_t clientPid,
115 Size videoSize,
116 int32_t frameRate,
117 const sp<IGraphicBufferProducer>& surface)
118 : mCameraFlags(0),
119 mNumInputBuffers(0),
120 mVideoFrameRate(-1),
121 mCamera(0),
122 mSurface(surface),
123 mNumFramesReceived(0),
124 mLastFrameTimestampUs(0),
125 mStarted(false),
126 mEos(false),
127 mNumFramesEncoded(0),
128 mTimeBetweenFrameCaptureUs(0),
129 mFirstFrameTimeUs(0),
130 mStopSystemTimeUs(-1),
131 mNumFramesDropped(0),
132 mNumGlitches(0),
133 mGlitchDurationThresholdUs(200000),
134 mCollectStats(false) {
135 mVideoSize.width = -1;
136 mVideoSize.height = -1;
137
138 mInitCheck = init(camera, proxy, cameraId,
139 clientName, clientUid, clientPid,
140 videoSize, frameRate);
141 if (mInitCheck != OK) releaseCamera();
142 }
143
initCheck() const144 status_t CameraSource::initCheck() const {
145 return mInitCheck;
146 }
147
isCameraAvailable(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const std::string & clientName,uid_t clientUid,pid_t clientPid)148 status_t CameraSource::isCameraAvailable(
149 const sp<hardware::ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
150 int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
151
152 if (camera == 0) {
153 mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
154 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
155 /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
156 /*forceSlowJpegMode*/false);
157 if (mCamera == 0) return -EBUSY;
158 mCameraFlags &= ~FLAGS_HOT_CAMERA;
159 } else {
160 // We get the proxy from Camera, not ICamera. We need to get the proxy
161 // to the remote Camera owned by the application. Here mCamera is a
162 // local Camera object created by us. We cannot use the proxy from
163 // mCamera here.
164 mCamera = Camera::create(camera);
165 if (mCamera == 0) return -EBUSY;
166 mCameraRecordingProxy = proxy;
167 mCameraFlags |= FLAGS_HOT_CAMERA;
168 mDeathNotifier = new DeathNotifier();
169 // isBinderAlive needs linkToDeath to work.
170 IInterface::asBinder(mCameraRecordingProxy)->linkToDeath(mDeathNotifier);
171 }
172
173 mCamera->lock();
174
175 return OK;
176 }
177
178
179 /*
180 * Check to see whether the requested video width and height is one
181 * of the supported sizes.
182 * @param width the video frame width in pixels
183 * @param height the video frame height in pixels
184 * @param suppportedSizes the vector of sizes that we check against
185 * @return true if the dimension (width and height) is supported.
186 */
isVideoSizeSupported(int32_t width,int32_t height,const Vector<Size> & supportedSizes)187 static bool isVideoSizeSupported(
188 int32_t width, int32_t height,
189 const Vector<Size>& supportedSizes) {
190
191 ALOGV("isVideoSizeSupported");
192 for (size_t i = 0; i < supportedSizes.size(); ++i) {
193 if (width == supportedSizes[i].width &&
194 height == supportedSizes[i].height) {
195 return true;
196 }
197 }
198 return false;
199 }
200
201 /*
202 * If the preview and video output is separate, we only set the
203 * the video size, and applications should set the preview size
204 * to some proper value, and the recording framework will not
205 * change the preview size; otherwise, if the video and preview
206 * output is the same, we need to set the preview to be the same
207 * as the requested video size.
208 *
209 */
210 /*
211 * Query the camera to retrieve the supported video frame sizes
212 * and also to see whether CameraParameters::setVideoSize()
213 * is supported or not.
214 * @param params CameraParameters to retrieve the information
215 * @@param isSetVideoSizeSupported retunrs whether method
216 * CameraParameters::setVideoSize() is supported or not.
217 * @param sizes returns the vector of Size objects for the
218 * supported video frame sizes advertised by the camera.
219 */
getSupportedVideoSizes(const CameraParameters & params,bool * isSetVideoSizeSupported,Vector<Size> & sizes)220 static void getSupportedVideoSizes(
221 const CameraParameters& params,
222 bool *isSetVideoSizeSupported,
223 Vector<Size>& sizes) {
224
225 *isSetVideoSizeSupported = true;
226 params.getSupportedVideoSizes(sizes);
227 if (sizes.size() == 0) {
228 ALOGD("Camera does not support setVideoSize()");
229 params.getSupportedPreviewSizes(sizes);
230 *isSetVideoSizeSupported = false;
231 }
232 }
233
234 /*
235 * Check whether the camera has the supported color format
236 * @param params CameraParameters to retrieve the information
237 * @return OK if no error.
238 */
isCameraColorFormatSupported(const CameraParameters & params)239 status_t CameraSource::isCameraColorFormatSupported(
240 const CameraParameters& params) {
241 mColorFormat = getColorFormat(params.get(
242 CameraParameters::KEY_VIDEO_FRAME_FORMAT));
243 if (mColorFormat == -1) {
244 return BAD_VALUE;
245 }
246 return OK;
247 }
248
249 /*
250 * Configure the camera to use the requested video size
251 * (width and height) and/or frame rate. If both width and
252 * height are -1, configuration on the video size is skipped.
253 * if frameRate is -1, configuration on the frame rate
254 * is skipped. Skipping the configuration allows one to
255 * use the current camera setting without the need to
256 * actually know the specific values (see Create() method).
257 *
258 * @param params the CameraParameters to be configured
259 * @param width the target video frame width in pixels
260 * @param height the target video frame height in pixels
261 * @param frameRate the target frame rate in frames per second.
262 * @return OK if no error.
263 */
configureCamera(CameraParameters * params,int32_t width,int32_t height,int32_t frameRate)264 status_t CameraSource::configureCamera(
265 CameraParameters* params,
266 int32_t width, int32_t height,
267 int32_t frameRate) {
268 ALOGV("configureCamera");
269 Vector<Size> sizes;
270 bool isSetVideoSizeSupportedByCamera = true;
271 getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
272 bool isCameraParamChanged = false;
273 if (width != -1 && height != -1) {
274 if (!isVideoSizeSupported(width, height, sizes)) {
275 ALOGE("Video dimension (%dx%d) is unsupported", width, height);
276 return BAD_VALUE;
277 }
278 if (isSetVideoSizeSupportedByCamera) {
279 params->setVideoSize(width, height);
280 } else {
281 params->setPreviewSize(width, height);
282 }
283 isCameraParamChanged = true;
284 } else if ((width == -1 && height != -1) ||
285 (width != -1 && height == -1)) {
286 // If one and only one of the width and height is -1
287 // we reject such a request.
288 ALOGE("Requested video size (%dx%d) is not supported", width, height);
289 return BAD_VALUE;
290 } else { // width == -1 && height == -1
291 // Do not configure the camera.
292 // Use the current width and height value setting from the camera.
293 }
294
295 if (frameRate != -1) {
296 CHECK(frameRate > 0 && frameRate <= 120);
297 const char* supportedFrameRates =
298 params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
299 CHECK(supportedFrameRates != NULL);
300 ALOGV("Supported frame rates: %s", supportedFrameRates);
301 char buf[4];
302 snprintf(buf, 4, "%d", frameRate);
303 if (strstr(supportedFrameRates, buf) == NULL) {
304 ALOGE("Requested frame rate (%d) is not supported: %s",
305 frameRate, supportedFrameRates);
306 return BAD_VALUE;
307 }
308
309 // The frame rate is supported, set the camera to the requested value.
310 params->setPreviewFrameRate(frameRate);
311 isCameraParamChanged = true;
312 } else { // frameRate == -1
313 // Do not configure the camera.
314 // Use the current frame rate value setting from the camera
315 }
316
317 if (isCameraParamChanged) {
318 // Either frame rate or frame size needs to be changed.
319 String8 s = params->flatten();
320 if (OK != mCamera->setParameters(s)) {
321 ALOGE("Could not change settings."
322 " Someone else is using camera %p?", mCamera.get());
323 return -EBUSY;
324 }
325 }
326 return OK;
327 }
328
329 /*
330 * Check whether the requested video frame size
331 * has been successfully configured or not. If both width and height
332 * are -1, check on the current width and height value setting
333 * is performed.
334 *
335 * @param params CameraParameters to retrieve the information
336 * @param the target video frame width in pixels to check against
337 * @param the target video frame height in pixels to check against
338 * @return OK if no error
339 */
checkVideoSize(const CameraParameters & params,int32_t width,int32_t height)340 status_t CameraSource::checkVideoSize(
341 const CameraParameters& params,
342 int32_t width, int32_t height) {
343
344 ALOGV("checkVideoSize");
345 // The actual video size is the same as the preview size
346 // if the camera hal does not support separate video and
347 // preview output. In this case, we retrieve the video
348 // size from preview.
349 int32_t frameWidthActual = -1;
350 int32_t frameHeightActual = -1;
351 Vector<Size> sizes;
352 params.getSupportedVideoSizes(sizes);
353 if (sizes.size() == 0) {
354 // video size is the same as preview size
355 params.getPreviewSize(&frameWidthActual, &frameHeightActual);
356 } else {
357 // video size may not be the same as preview
358 params.getVideoSize(&frameWidthActual, &frameHeightActual);
359 }
360 if (frameWidthActual < 0 || frameHeightActual < 0) {
361 ALOGE("Failed to retrieve video frame size (%dx%d)",
362 frameWidthActual, frameHeightActual);
363 return UNKNOWN_ERROR;
364 }
365
366 // Check the actual video frame size against the target/requested
367 // video frame size.
368 if (width != -1 && height != -1) {
369 if (frameWidthActual != width || frameHeightActual != height) {
370 ALOGE("Failed to set video frame size to %dx%d. "
371 "The actual video size is %dx%d ", width, height,
372 frameWidthActual, frameHeightActual);
373 return UNKNOWN_ERROR;
374 }
375 }
376
377 // Good now.
378 mVideoSize.width = frameWidthActual;
379 mVideoSize.height = frameHeightActual;
380 return OK;
381 }
382
383 /*
384 * Check the requested frame rate has been successfully configured or not.
385 * If the target frameRate is -1, check on the current frame rate value
386 * setting is performed.
387 *
388 * @param params CameraParameters to retrieve the information
389 * @param the target video frame rate to check against
390 * @return OK if no error.
391 */
checkFrameRate(const CameraParameters & params,int32_t frameRate)392 status_t CameraSource::checkFrameRate(
393 const CameraParameters& params,
394 int32_t frameRate) {
395
396 ALOGV("checkFrameRate");
397 int32_t frameRateActual = params.getPreviewFrameRate();
398 if (frameRateActual < 0) {
399 ALOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
400 return UNKNOWN_ERROR;
401 }
402
403 // Check the actual video frame rate against the target/requested
404 // video frame rate.
405 if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
406 ALOGE("Failed to set preview frame rate to %d fps. The actual "
407 "frame rate is %d", frameRate, frameRateActual);
408 return UNKNOWN_ERROR;
409 }
410
411 // Good now.
412 mVideoFrameRate = frameRateActual;
413 return OK;
414 }
415
416 /*
417 * Initialize the CameraSource to so that it becomes
418 * ready for providing the video input streams as requested.
419 * @param camera the camera object used for the video source
420 * @param cameraId if camera == 0, use camera with this id
421 * as the video source
422 * @param videoSize the target video frame size. If both
423 * width and height in videoSize is -1, use the current
424 * width and heigth settings by the camera
425 * @param frameRate the target frame rate in frames per second.
426 * if it is -1, use the current camera frame rate setting.
427 * @param storeMetaDataInVideoBuffers request to store meta
428 * data or real YUV data in video buffers. Request to
429 * store meta data in video buffers may not be honored
430 * if the source does not support this feature.
431 *
432 * @return OK if no error.
433 */
init(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)434 status_t CameraSource::init(
435 const sp<hardware::ICamera>& camera,
436 const sp<ICameraRecordingProxy>& proxy,
437 int32_t cameraId,
438 const String16& clientName,
439 uid_t clientUid,
440 pid_t clientPid,
441 Size videoSize,
442 int32_t frameRate) {
443
444 ALOGV("init");
445 status_t err = OK;
446 int64_t token = IPCThreadState::self()->clearCallingIdentity();
447 err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid, clientPid,
448 videoSize, frameRate);
449 IPCThreadState::self()->restoreCallingIdentity(token);
450 return err;
451 }
452
createVideoBufferMemoryHeap(size_t size,uint32_t bufferCount)453 void CameraSource::createVideoBufferMemoryHeap(size_t size, uint32_t bufferCount) {
454 mMemoryHeapBase = new MemoryHeapBase(size * bufferCount, 0,
455 "StageFright-CameraSource-BufferHeap");
456 for (uint32_t i = 0; i < bufferCount; i++) {
457 mMemoryBases.push_back(new MemoryBase(mMemoryHeapBase, i * size, size));
458 }
459 }
460
initBufferQueue(uint32_t width,uint32_t height,uint32_t format,android_dataspace dataSpace,uint32_t bufferCount)461 status_t CameraSource::initBufferQueue(uint32_t width, uint32_t height,
462 uint32_t format, android_dataspace dataSpace, uint32_t bufferCount) {
463 ALOGV("initBufferQueue");
464
465 if (mVideoBufferConsumer != nullptr || mVideoBufferProducer != nullptr) {
466 ALOGE("%s: Buffer queue already exists", __FUNCTION__);
467 return ALREADY_EXISTS;
468 }
469
470 // Create a buffer queue.
471 sp<IGraphicBufferProducer> producer;
472 sp<IGraphicBufferConsumer> consumer;
473 BufferQueue::createBufferQueue(&producer, &consumer);
474
475 uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
476 if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
477 usage = GRALLOC_USAGE_HW_VIDEO_ENCODER;
478 }
479
480 bufferCount += kConsumerBufferCount;
481
482 mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
483 mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
484 mVideoBufferProducer = producer;
485
486 status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
487 if (res != OK) {
488 ALOGE("%s: Could not set buffer dimensions %dx%d: %s (%d)", __FUNCTION__, width, height,
489 strerror(-res), res);
490 return res;
491 }
492
493 res = mVideoBufferConsumer->setDefaultBufferFormat(format);
494 if (res != OK) {
495 ALOGE("%s: Could not set buffer format %d: %s (%d)", __FUNCTION__, format,
496 strerror(-res), res);
497 return res;
498 }
499
500 res = mVideoBufferConsumer->setDefaultBufferDataSpace(dataSpace);
501 if (res != OK) {
502 ALOGE("%s: Could not set data space %d: %s (%d)", __FUNCTION__, dataSpace,
503 strerror(-res), res);
504 return res;
505 }
506
507 res = mCamera->setVideoTarget(mVideoBufferProducer);
508 if (res != OK) {
509 ALOGE("%s: Failed to set video target: %s (%d)", __FUNCTION__, strerror(-res), res);
510 return res;
511 }
512
513 // Create memory heap to store buffers as VideoNativeMetadata.
514 createVideoBufferMemoryHeap(sizeof(VideoNativeMetadata), bufferCount);
515
516 mBufferQueueListener = new BufferQueueListener(mVideoBufferConsumer, this);
517 res = mBufferQueueListener->run("CameraSource-BufferQueueListener");
518 if (res != OK) {
519 ALOGE("%s: Could not run buffer queue listener thread: %s (%d)", __FUNCTION__,
520 strerror(-res), res);
521 return res;
522 }
523
524 return OK;
525 }
526
initWithCameraAccess(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t frameRate)527 status_t CameraSource::initWithCameraAccess(
528 const sp<hardware::ICamera>& camera,
529 const sp<ICameraRecordingProxy>& proxy,
530 int32_t cameraId,
531 const String16& clientName,
532 uid_t clientUid,
533 pid_t clientPid,
534 Size videoSize,
535 int32_t frameRate) {
536 ALOGV("initWithCameraAccess");
537 status_t err = OK;
538
539 if ((err = isCameraAvailable(camera, proxy, cameraId,
540 toStdString(clientName), clientUid, clientPid)) != OK) {
541 ALOGE("Camera connection could not be established.");
542 return err;
543 }
544 CameraParameters params(mCamera->getParameters());
545 if ((err = isCameraColorFormatSupported(params)) != OK) {
546 return err;
547 }
548
549 // Set the camera to use the requested video frame size
550 // and/or frame rate.
551 if ((err = configureCamera(¶ms,
552 videoSize.width, videoSize.height,
553 frameRate))) {
554 return err;
555 }
556
557 // Check on video frame size and frame rate.
558 CameraParameters newCameraParams(mCamera->getParameters());
559 if ((err = checkVideoSize(newCameraParams,
560 videoSize.width, videoSize.height)) != OK) {
561 return err;
562 }
563 if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
564 return err;
565 }
566
567 // Set the preview display. Skip this if mSurface is null because
568 // applications may already set a surface to the camera.
569 if (mSurface != NULL) {
570 // Surface may be set incorrectly or could already be used even if we just
571 // passed the lock/unlock check earlier by calling mCamera->setParameters().
572 if ((err = mCamera->setPreviewTarget(mSurface)) != OK) {
573 return err;
574 }
575 }
576
577 // Use buffer queue to receive video buffers from camera
578 err = mCamera->setVideoBufferMode(hardware::ICamera::VIDEO_BUFFER_MODE_BUFFER_QUEUE);
579 if (err != OK) {
580 ALOGE("%s: Setting video buffer mode to VIDEO_BUFFER_MODE_BUFFER_QUEUE failed: "
581 "%s (err=%d)", __FUNCTION__, strerror(-err), err);
582 return err;
583 }
584
585 int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
586 if (glitchDurationUs > mGlitchDurationThresholdUs) {
587 mGlitchDurationThresholdUs = glitchDurationUs;
588 }
589
590 // XXX: query camera for the stride and slice height
591 // when the capability becomes available.
592 mMeta = new MetaData;
593 mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
594 mMeta->setInt32(kKeyColorFormat, mColorFormat);
595 mMeta->setInt32(kKeyWidth, mVideoSize.width);
596 mMeta->setInt32(kKeyHeight, mVideoSize.height);
597 mMeta->setInt32(kKeyStride, mVideoSize.width);
598 mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
599 mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
600 return OK;
601 }
602
~CameraSource()603 CameraSource::~CameraSource() {
604 if (mStarted) {
605 reset();
606 } else if (mInitCheck == OK) {
607 // Camera is initialized but because start() is never called,
608 // the lock on Camera is never released(). This makes sure
609 // Camera's lock is released in this case.
610 releaseCamera();
611 }
612 }
613
startCameraRecording()614 status_t CameraSource::startCameraRecording() {
615 ALOGV("startCameraRecording");
616 // Reset the identity to the current thread because media server owns the
617 // camera and recording is started by the applications. The applications
618 // will connect to the camera in ICameraRecordingProxy::startRecording.
619 int64_t token = IPCThreadState::self()->clearCallingIdentity();
620 status_t err;
621
622 // Initialize buffer queue.
623 err = initBufferQueue(mVideoSize.width, mVideoSize.height, mEncoderFormat,
624 (android_dataspace_t)mEncoderDataSpace,
625 mNumInputBuffers > 0 ? mNumInputBuffers : 1);
626 if (err != OK) {
627 ALOGE("%s: Failed to initialize buffer queue: %s (err=%d)", __FUNCTION__,
628 strerror(-err), err);
629 return err;
630 }
631
632 // Start data flow
633 err = OK;
634 if (mCameraFlags & FLAGS_HOT_CAMERA) {
635 mCamera->unlock();
636 mCamera.clear();
637 if ((err = mCameraRecordingProxy->startRecording()) != OK) {
638 ALOGE("Failed to start recording, received error: %s (%d)",
639 strerror(-err), err);
640 }
641 } else {
642 mCamera->startRecording();
643 if (!mCamera->recordingEnabled()) {
644 err = -EINVAL;
645 ALOGE("Failed to start recording");
646 }
647 }
648 IPCThreadState::self()->restoreCallingIdentity(token);
649 return err;
650 }
651
start(MetaData * meta)652 status_t CameraSource::start(MetaData *meta) {
653 ALOGV("start");
654 CHECK(!mStarted);
655 if (mInitCheck != OK) {
656 ALOGE("CameraSource is not initialized yet");
657 return mInitCheck;
658 }
659
660 if (property_get_bool("media.stagefright.record-stats", false)) {
661 mCollectStats = true;
662 }
663
664 mStartTimeUs = 0;
665 mNumInputBuffers = 0;
666 mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
667 mEncoderDataSpace = mBufferDataSpace = HAL_DATASPACE_V0_BT709;
668
669 if (meta) {
670 int64_t startTimeUs;
671 if (meta->findInt64(kKeyTime, &startTimeUs)) {
672 mStartTimeUs = startTimeUs;
673 }
674
675 int32_t nBuffers;
676 if (meta->findInt32(kKeyNumBuffers, &nBuffers)) {
677 CHECK_GT(nBuffers, 0);
678 mNumInputBuffers = nBuffers;
679 }
680
681 // apply encoder color format if specified
682 if (meta->findInt32(kKeyPixelFormat, &mEncoderFormat)) {
683 ALOGI("Using encoder format: %#x", mEncoderFormat);
684 }
685 if (meta->findInt32(kKeyColorSpace, &mEncoderDataSpace)) {
686 ALOGI("Using encoder data space: %#x", mEncoderDataSpace);
687 mBufferDataSpace = mEncoderDataSpace;
688 }
689 }
690
691 status_t err;
692 if ((err = startCameraRecording()) == OK) {
693 mStarted = true;
694 }
695
696 return err;
697 }
698
stopCameraRecording()699 void CameraSource::stopCameraRecording() {
700 ALOGV("stopCameraRecording");
701 if (mCameraFlags & FLAGS_HOT_CAMERA) {
702 if (mCameraRecordingProxy != 0) {
703 mCameraRecordingProxy->stopRecording();
704 }
705 } else {
706 if (mCamera != 0) {
707 mCamera->stopRecording();
708 }
709 }
710 }
711
releaseCamera()712 void CameraSource::releaseCamera() {
713 ALOGV("releaseCamera");
714 sp<Camera> camera;
715 bool coldCamera = false;
716 {
717 Mutex::Autolock autoLock(mLock);
718 // get a local ref and clear ref to mCamera now
719 camera = mCamera;
720 mCamera.clear();
721 coldCamera = (mCameraFlags & FLAGS_HOT_CAMERA) == 0;
722 }
723
724 if (camera != 0) {
725 int64_t token = IPCThreadState::self()->clearCallingIdentity();
726 if (coldCamera) {
727 ALOGV("Camera was cold when we started, stopping preview");
728 camera->stopPreview();
729 camera->disconnect();
730 }
731 camera->unlock();
732 IPCThreadState::self()->restoreCallingIdentity(token);
733 }
734
735 {
736 Mutex::Autolock autoLock(mLock);
737 if (mCameraRecordingProxy != 0) {
738 IInterface::asBinder(mCameraRecordingProxy)->unlinkToDeath(mDeathNotifier);
739 mCameraRecordingProxy.clear();
740 }
741 mCameraFlags = 0;
742 }
743 }
744
reset()745 status_t CameraSource::reset() {
746 ALOGD("reset: E");
747
748 {
749 Mutex::Autolock autoLock(mLock);
750 mStarted = false;
751 mEos = false;
752 mStopSystemTimeUs = -1;
753 mFrameAvailableCondition.signal();
754
755 int64_t token;
756 bool isTokenValid = false;
757 if (mCamera != 0) {
758 token = IPCThreadState::self()->clearCallingIdentity();
759 isTokenValid = true;
760 }
761 releaseQueuedFrames();
762 while (!mFramesBeingEncoded.empty()) {
763 if (NO_ERROR !=
764 mFrameCompleteCondition.waitRelative(mLock,
765 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
766 ALOGW("Timed out waiting for outstanding frames being encoded: %zu",
767 mFramesBeingEncoded.size());
768 }
769 }
770 stopCameraRecording();
771 if (isTokenValid) {
772 IPCThreadState::self()->restoreCallingIdentity(token);
773 }
774
775 if (mCollectStats) {
776 ALOGI("Frames received/encoded/dropped: %d/%d/%d in %" PRId64 " us",
777 mNumFramesReceived, mNumFramesEncoded, mNumFramesDropped,
778 mLastFrameTimestampUs - mFirstFrameTimeUs);
779 }
780
781 if (mNumGlitches > 0) {
782 ALOGW("%d long delays between neighboring video frames", mNumGlitches);
783 }
784
785 CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped);
786 }
787
788 if (mBufferQueueListener != nullptr) {
789 mBufferQueueListener->requestExit();
790 mBufferQueueListener->join();
791 mBufferQueueListener.clear();
792 }
793
794 mVideoBufferConsumer.clear();
795 mVideoBufferProducer.clear();
796 releaseCamera();
797
798 ALOGD("reset: X");
799 return OK;
800 }
801
releaseRecordingFrame(const sp<IMemory> & frame)802 void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
803 ALOGV("releaseRecordingFrame");
804
805 // Return the buffer to buffer queue in VIDEO_BUFFER_MODE_BUFFER_QUEUE mode.
806 ssize_t offset;
807 size_t size;
808 sp<IMemoryHeap> heap = frame->getMemory(&offset, &size);
809 if (heap->getHeapID() != mMemoryHeapBase->getHeapID()) {
810 ALOGE("%s: Mismatched heap ID, ignoring release (got %x, expected %x)", __FUNCTION__,
811 heap->getHeapID(), mMemoryHeapBase->getHeapID());
812 return;
813 }
814
815 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
816 (uint8_t*)heap->getBase() + offset);
817
818 // Find the corresponding buffer item for the native window buffer.
819 ssize_t index = mReceivedBufferItemMap.indexOfKey(payload->pBuffer);
820 if (index == NAME_NOT_FOUND) {
821 ALOGE("%s: Couldn't find buffer item for %p", __FUNCTION__, payload->pBuffer);
822 return;
823 }
824
825 BufferItem buffer = mReceivedBufferItemMap.valueAt(index);
826 mReceivedBufferItemMap.removeItemsAt(index);
827 mVideoBufferConsumer->releaseBuffer(buffer);
828 mMemoryBases.push_back(frame);
829 mMemoryBaseAvailableCond.signal();
830 }
831
releaseQueuedFrames()832 void CameraSource::releaseQueuedFrames() {
833 List<sp<IMemory> >::iterator it;
834 while (!mFramesReceived.empty()) {
835 it = mFramesReceived.begin();
836 releaseRecordingFrame(*it);
837 mFramesReceived.erase(it);
838 ++mNumFramesDropped;
839 }
840 }
841
getFormat()842 sp<MetaData> CameraSource::getFormat() {
843 return mMeta;
844 }
845
releaseOneRecordingFrame(const sp<IMemory> & frame)846 void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
847 releaseRecordingFrame(frame);
848 }
849
signalBufferReturned(MediaBufferBase * buffer)850 void CameraSource::signalBufferReturned(MediaBufferBase *buffer) {
851 ALOGV("signalBufferReturned: %p", buffer->data());
852 Mutex::Autolock autoLock(mLock);
853 for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
854 it != mFramesBeingEncoded.end(); ++it) {
855 if ((*it)->unsecurePointer() == buffer->data()) {
856 releaseOneRecordingFrame((*it));
857 mFramesBeingEncoded.erase(it);
858 ++mNumFramesEncoded;
859 buffer->setObserver(0);
860 buffer->release();
861 mFrameCompleteCondition.signal();
862 return;
863 }
864 }
865 CHECK(!"signalBufferReturned: bogus buffer");
866 }
867
read(MediaBufferBase ** buffer,const ReadOptions * options)868 status_t CameraSource::read(
869 MediaBufferBase **buffer, const ReadOptions *options) {
870 ALOGV("read");
871
872 *buffer = NULL;
873
874 int64_t seekTimeUs;
875 ReadOptions::SeekMode mode;
876 if (options && options->getSeekTo(&seekTimeUs, &mode)) {
877 return ERROR_UNSUPPORTED;
878 }
879
880 sp<IMemory> frame;
881 int64_t frameTime;
882
883 {
884 Mutex::Autolock autoLock(mLock);
885 while (mStarted && !mEos && mFramesReceived.empty()) {
886 if (NO_ERROR !=
887 mFrameAvailableCondition.waitRelative(mLock,
888 mTimeBetweenFrameCaptureUs * 1000LL + CAMERA_SOURCE_TIMEOUT_NS)) {
889 if (mCameraRecordingProxy != 0 &&
890 !IInterface::asBinder(mCameraRecordingProxy)->isBinderAlive()) {
891 ALOGW("camera recording proxy is gone");
892 return ERROR_END_OF_STREAM;
893 }
894 ALOGW("Timed out waiting for incoming camera video frames: %" PRId64 " us",
895 mLastFrameTimestampUs);
896 }
897 }
898 if (!mStarted) {
899 return OK;
900 }
901 if (mFramesReceived.empty()) {
902 return ERROR_END_OF_STREAM;
903 }
904 frame = *mFramesReceived.begin();
905 mFramesReceived.erase(mFramesReceived.begin());
906
907 frameTime = *mFrameTimes.begin();
908 mFrameTimes.erase(mFrameTimes.begin());
909 mFramesBeingEncoded.push_back(frame);
910 // TODO: Using unsecurePointer() has some associated security pitfalls
911 // (see declaration for details).
912 // Either document why it is safe in this case or address the
913 // issue (e.g. by copying).
914 *buffer = new MediaBuffer(frame->unsecurePointer(), frame->size());
915 (*buffer)->setObserver(this);
916 (*buffer)->add_ref();
917 (*buffer)->meta_data().setInt64(kKeyTime, frameTime);
918 if (mBufferDataSpace != mEncoderDataSpace) {
919 ALOGD("Data space updated to %x", mBufferDataSpace);
920 (*buffer)->meta_data().setInt32(kKeyColorSpace, mBufferDataSpace);
921 mEncoderDataSpace = mBufferDataSpace;
922 }
923 }
924 return OK;
925 }
926
setStopTimeUs(int64_t stopTimeUs)927 status_t CameraSource::setStopTimeUs(int64_t stopTimeUs) {
928 Mutex::Autolock autoLock(mLock);
929 ALOGV("Set stoptime: %lld us", (long long)stopTimeUs);
930
931 if (stopTimeUs < -1) {
932 ALOGE("Invalid stop time %lld us", (long long)stopTimeUs);
933 return BAD_VALUE;
934 } else if (stopTimeUs == -1) {
935 ALOGI("reset stopTime to be -1");
936 }
937
938 mStopSystemTimeUs = stopTimeUs;
939 return OK;
940 }
941
shouldSkipFrameLocked(int64_t timestampUs)942 bool CameraSource::shouldSkipFrameLocked(int64_t timestampUs) {
943 if (!mStarted || (mNumFramesReceived == 0 && timestampUs < mStartTimeUs)) {
944 ALOGV("Drop frame at %lld/%lld us", (long long)timestampUs, (long long)mStartTimeUs);
945 return true;
946 }
947
948 if (mStopSystemTimeUs != -1 && timestampUs >= mStopSystemTimeUs) {
949 ALOGV("Drop Camera frame at %lld stop time: %lld us",
950 (long long)timestampUs, (long long)mStopSystemTimeUs);
951 mEos = true;
952 mFrameAvailableCondition.signal();
953 return true;
954 }
955
956 // May need to skip frame or modify timestamp. Currently implemented
957 // by the subclass CameraSourceTimeLapse.
958 if (skipCurrentFrame(timestampUs)) {
959 return true;
960 }
961
962 if (mNumFramesReceived > 0) {
963 if (timestampUs <= mLastFrameTimestampUs) {
964 ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
965 (long long)timestampUs, (long long)mLastFrameTimestampUs);
966 return true;
967 }
968 if (timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
969 ++mNumGlitches;
970 }
971 }
972
973 mLastFrameTimestampUs = timestampUs;
974 if (mNumFramesReceived == 0) {
975 mFirstFrameTimeUs = timestampUs;
976 // Initial delay
977 if (mStartTimeUs > 0) {
978 if (timestampUs < mStartTimeUs) {
979 // Frame was captured before recording was started
980 // Drop it without updating the statistical data.
981 return true;
982 }
983 mStartTimeUs = timestampUs - mStartTimeUs;
984 }
985 }
986
987 return false;
988 }
989
BufferQueueListener(const sp<BufferItemConsumer> & consumer,const sp<CameraSource> & cameraSource)990 CameraSource::BufferQueueListener::BufferQueueListener(const sp<BufferItemConsumer>& consumer,
991 const sp<CameraSource>& cameraSource) {
992 mConsumer = consumer;
993 mConsumer->setFrameAvailableListener(this);
994 mCameraSource = cameraSource;
995 }
996
onFrameAvailable(const BufferItem &)997 void CameraSource::BufferQueueListener::onFrameAvailable(const BufferItem& /*item*/) {
998 ALOGV("%s: onFrameAvailable", __FUNCTION__);
999
1000 Mutex::Autolock l(mLock);
1001
1002 if (!mFrameAvailable) {
1003 mFrameAvailable = true;
1004 mFrameAvailableSignal.signal();
1005 }
1006 }
1007
threadLoop()1008 bool CameraSource::BufferQueueListener::threadLoop() {
1009 if (mConsumer == nullptr || mCameraSource == nullptr) {
1010 return false;
1011 }
1012
1013 {
1014 Mutex::Autolock l(mLock);
1015 while (!mFrameAvailable) {
1016 if (mFrameAvailableSignal.waitRelative(mLock, kFrameAvailableTimeout) == TIMED_OUT) {
1017 return true;
1018 }
1019 }
1020 mFrameAvailable = false;
1021 }
1022
1023 BufferItem buffer;
1024 while (mConsumer->acquireBuffer(&buffer, 0) == OK) {
1025 mCameraSource->processBufferQueueFrame(buffer);
1026 }
1027
1028 return true;
1029 }
1030
processBufferQueueFrame(BufferItem & buffer)1031 void CameraSource::processBufferQueueFrame(BufferItem& buffer) {
1032 Mutex::Autolock autoLock(mLock);
1033
1034 int64_t timestampUs = buffer.mTimestamp / 1000;
1035 if (shouldSkipFrameLocked(timestampUs)) {
1036 mVideoBufferConsumer->releaseBuffer(buffer);
1037 return;
1038 }
1039
1040 while (mMemoryBases.empty()) {
1041 if (mMemoryBaseAvailableCond.waitRelative(mLock, kMemoryBaseAvailableTimeoutNs) ==
1042 TIMED_OUT) {
1043 ALOGW("Waiting on an available memory base timed out. Dropping a recording frame.");
1044 mVideoBufferConsumer->releaseBuffer(buffer);
1045 return;
1046 }
1047 }
1048
1049 ++mNumFramesReceived;
1050
1051 // Find a available memory slot to store the buffer as VideoNativeMetadata.
1052 sp<IMemory> data = *mMemoryBases.begin();
1053 mMemoryBases.erase(mMemoryBases.begin());
1054 mBufferDataSpace = buffer.mDataSpace;
1055
1056 ssize_t offset;
1057 size_t size;
1058 sp<IMemoryHeap> heap = data->getMemory(&offset, &size);
1059 VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>(
1060 (uint8_t*)heap->getBase() + offset);
1061 memset(payload, 0, sizeof(VideoNativeMetadata));
1062 payload->eType = kMetadataBufferTypeANWBuffer;
1063 payload->pBuffer = buffer.mGraphicBuffer->getNativeBuffer();
1064 payload->nFenceFd = -1;
1065
1066 // Add the mapping so we can find the corresponding buffer item to release to the buffer queue
1067 // when the encoder returns the native window buffer.
1068 mReceivedBufferItemMap.add(payload->pBuffer, buffer);
1069
1070 mFramesReceived.push_back(data);
1071 int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
1072 mFrameTimes.push_back(timeUs);
1073 ALOGV("initial delay: %" PRId64 ", current time stamp: %" PRId64,
1074 mStartTimeUs, timeUs);
1075 mFrameAvailableCondition.signal();
1076 }
1077
metaDataStoredInVideoBuffers() const1078 MetadataBufferType CameraSource::metaDataStoredInVideoBuffers() const {
1079 ALOGV("metaDataStoredInVideoBuffers");
1080
1081 return kMetadataBufferTypeANWBuffer;
1082 }
1083
binderDied(const wp<IBinder> & who __unused)1084 void CameraSource::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
1085 ALOGI("Camera recording proxy died");
1086 }
1087
1088 } // namespace android
1089