1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <inttypes.h>
18 
19 //#define LOG_NDEBUG 0
20 #define LOG_TAG "CameraSourceTimeLapse"
21 
22 #include <media/hardware/HardwareAPI.h>
23 #include <binder/IPCThreadState.h>
24 #include <binder/MemoryBase.h>
25 #include <binder/MemoryHeapBase.h>
26 #include <media/stagefright/foundation/ADebug.h>
27 #include <media/stagefright/CameraSource.h>
28 #include <media/stagefright/CameraSourceTimeLapse.h>
29 #include <media/stagefright/MetaData.h>
30 #include <camera/Camera.h>
31 #include <camera/CameraParameters.h>
32 #include <utils/String8.h>
33 #include <utils/Vector.h>
34 
35 namespace android {
36 
37 // static
CreateFromCamera(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t videoFrameRate,const sp<IGraphicBufferProducer> & surface,int64_t timeBetweenFrameCaptureUs)38 CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
39         const sp<hardware::ICamera> &camera,
40         const sp<ICameraRecordingProxy> &proxy,
41         int32_t cameraId,
42         const String16& clientName,
43         uid_t clientUid,
44         pid_t clientPid,
45         Size videoSize,
46         int32_t videoFrameRate,
47         const sp<IGraphicBufferProducer>& surface,
48         int64_t timeBetweenFrameCaptureUs) {
49 
50     CameraSourceTimeLapse *source = new
51             CameraSourceTimeLapse(camera, proxy, cameraId,
52                 clientName, clientUid, clientPid,
53                 videoSize, videoFrameRate, surface,
54                 timeBetweenFrameCaptureUs);
55 
56     if (source != NULL) {
57         if (source->initCheck() != OK) {
58             delete source;
59             return NULL;
60         }
61     }
62     return source;
63 }
64 
CameraSourceTimeLapse(const sp<hardware::ICamera> & camera,const sp<ICameraRecordingProxy> & proxy,int32_t cameraId,const String16 & clientName,uid_t clientUid,pid_t clientPid,Size videoSize,int32_t videoFrameRate,const sp<IGraphicBufferProducer> & surface,int64_t timeBetweenFrameCaptureUs)65 CameraSourceTimeLapse::CameraSourceTimeLapse(
66         const sp<hardware::ICamera>& camera,
67         const sp<ICameraRecordingProxy>& proxy,
68         int32_t cameraId,
69         const String16& clientName,
70         uid_t clientUid,
71         pid_t clientPid,
72         Size videoSize,
73         int32_t videoFrameRate,
74         const sp<IGraphicBufferProducer>& surface,
75         int64_t timeBetweenFrameCaptureUs)
76       : CameraSource(camera, proxy, cameraId, clientName, clientUid, clientPid,
77                 videoSize, videoFrameRate, surface),
78       mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
79       mLastTimeLapseFrameRealTimestampUs(0),
80       mSkipCurrentFrame(false) {
81 
82     mTimeBetweenFrameCaptureUs = timeBetweenFrameCaptureUs;
83     ALOGD("starting time lapse mode: %" PRId64 " us",
84         mTimeBetweenFrameCaptureUs);
85 
86     mVideoWidth = videoSize.width;
87     mVideoHeight = videoSize.height;
88 
89     if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
90         releaseCamera();
91         mInitCheck = NO_INIT;
92     }
93 
94     // Initialize quick stop variables.
95     mQuickStop = false;
96     mForceRead = false;
97     mLastReadBufferCopy = NULL;
98     mStopWaitingForIdleCamera = false;
99 }
100 
~CameraSourceTimeLapse()101 CameraSourceTimeLapse::~CameraSourceTimeLapse() {
102     if (mLastReadBufferCopy) {
103         mLastReadBufferCopy->release();
104         mLastReadBufferCopy = NULL;
105     }
106 }
107 
startQuickReadReturns()108 void CameraSourceTimeLapse::startQuickReadReturns() {
109     ALOGV("startQuickReadReturns");
110     Mutex::Autolock autoLock(mQuickStopLock);
111 
112     // Enable quick stop mode.
113     mQuickStop = true;
114 
115     // Force dataCallbackTimestamp() coming from the video camera to
116     // not skip the next frame as we want read() to get a get a frame
117     // right away.
118     mForceRead = true;
119 }
120 
trySettingVideoSize(int32_t width,int32_t height)121 bool CameraSourceTimeLapse::trySettingVideoSize(
122         int32_t width, int32_t height) {
123 
124     ALOGV("trySettingVideoSize");
125     int64_t token = IPCThreadState::self()->clearCallingIdentity();
126     String8 s = mCamera->getParameters();
127 
128     CameraParameters params(s);
129     Vector<Size> supportedSizes;
130     params.getSupportedVideoSizes(supportedSizes);
131     bool videoOutputSupported = false;
132     if (supportedSizes.size() == 0) {
133         params.getSupportedPreviewSizes(supportedSizes);
134     } else {
135         videoOutputSupported = true;
136     }
137 
138     bool videoSizeSupported = false;
139     for (size_t i = 0; i < supportedSizes.size(); ++i) {
140         int32_t pictureWidth = supportedSizes[i].width;
141         int32_t pictureHeight = supportedSizes[i].height;
142 
143         if ((pictureWidth == width) && (pictureHeight == height)) {
144             videoSizeSupported = true;
145         }
146     }
147 
148     bool isSuccessful = false;
149     if (videoSizeSupported) {
150         ALOGV("Video size (%d, %d) is supported", width, height);
151         if (videoOutputSupported) {
152             params.setVideoSize(width, height);
153         } else {
154             params.setPreviewSize(width, height);
155         }
156         if (mCamera->setParameters(params.flatten()) == OK) {
157             isSuccessful = true;
158         } else {
159             ALOGE("Failed to set preview size to %dx%d", width, height);
160             isSuccessful = false;
161         }
162     }
163 
164     IPCThreadState::self()->restoreCallingIdentity(token);
165     return isSuccessful;
166 }
167 
signalBufferReturned(MediaBufferBase * buffer)168 void CameraSourceTimeLapse::signalBufferReturned(MediaBufferBase* buffer) {
169     ALOGV("signalBufferReturned");
170     Mutex::Autolock autoLock(mQuickStopLock);
171     if (mQuickStop && (buffer == mLastReadBufferCopy)) {
172         buffer->setObserver(NULL);
173         buffer->release();
174         mLastReadBufferCopy = NULL;
175         mForceRead = true;
176     } else {
177         return CameraSource::signalBufferReturned(buffer);
178     }
179 }
180 
createMediaBufferCopy(const MediaBufferBase & sourceBuffer,int64_t frameTime,MediaBufferBase ** newBuffer)181 void createMediaBufferCopy(
182         const MediaBufferBase& sourceBuffer,
183         int64_t frameTime,
184         MediaBufferBase **newBuffer) {
185 
186     ALOGV("createMediaBufferCopy");
187     size_t sourceSize = sourceBuffer.size();
188     void* sourcePointer = sourceBuffer.data();
189 
190     (*newBuffer) = new MediaBuffer(sourceSize);
191     memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
192 
193     (*newBuffer)->meta_data().setInt64(kKeyTime, frameTime);
194 
195 }
196 
fillLastReadBufferCopy(MediaBufferBase & sourceBuffer)197 void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBufferBase& sourceBuffer) {
198     ALOGV("fillLastReadBufferCopy");
199     int64_t frameTime;
200     CHECK(sourceBuffer.meta_data().findInt64(kKeyTime, &frameTime));
201     createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
202     mLastReadBufferCopy->add_ref();
203     mLastReadBufferCopy->setObserver(this);
204 }
205 
read(MediaBufferBase ** buffer,const ReadOptions * options)206 status_t CameraSourceTimeLapse::read(
207         MediaBufferBase **buffer, const ReadOptions *options) {
208     ALOGV("read");
209     if (mLastReadBufferCopy == NULL) {
210         mLastReadStatus = CameraSource::read(buffer, options);
211 
212         // mQuickStop may have turned to true while read was blocked.
213         // Make a copy of the buffer in that case.
214         Mutex::Autolock autoLock(mQuickStopLock);
215         if (mQuickStop && *buffer) {
216             fillLastReadBufferCopy(**buffer);
217         }
218         return mLastReadStatus;
219     } else {
220         (*buffer) = mLastReadBufferCopy;
221         (*buffer)->add_ref();
222         return mLastReadStatus;
223     }
224 }
225 
skipCurrentFrame(int64_t)226 bool CameraSourceTimeLapse::skipCurrentFrame(int64_t /* timestampUs */) {
227     ALOGV("skipCurrentFrame");
228     if (mSkipCurrentFrame) {
229         mSkipCurrentFrame = false;
230         return true;
231     } else {
232         return false;
233     }
234 }
235 
skipFrameAndModifyTimeStamp(int64_t * timestampUs)236 bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
237     ALOGV("skipFrameAndModifyTimeStamp");
238     if (mLastTimeLapseFrameRealTimestampUs == 0) {
239         // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
240         // to current time (timestampUs) and save frame data.
241         ALOGV("dataCallbackTimestamp timelapse: initial frame");
242 
243         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
244         return false;
245     }
246 
247     {
248         Mutex::Autolock autoLock(mQuickStopLock);
249 
250         // mForceRead may be set to true by startQuickReadReturns(). In that
251         // case don't skip this frame.
252         if (mForceRead) {
253             ALOGV("dataCallbackTimestamp timelapse: forced read");
254             mForceRead = false;
255             *timestampUs =
256                 mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
257 
258             // Really make sure that this video recording frame will not be dropped.
259             if (*timestampUs < mStartTimeUs) {
260                 ALOGI("set timestampUs to start time stamp %" PRId64 " us", mStartTimeUs);
261                 *timestampUs = mStartTimeUs;
262             }
263             return false;
264         }
265     }
266 
267     // Workaround to bypass the first 2 input frames for skipping.
268     // The first 2 output frames from the encoder are: decoder specific info and
269     // the compressed video frame data for the first input video frame.
270     if (mNumFramesEncoded >= 1 && *timestampUs <
271         (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenFrameCaptureUs)) {
272         // Skip all frames from last encoded frame until
273         // sufficient time (mTimeBetweenFrameCaptureUs) has passed.
274         // Tell the camera to release its recording frame and return.
275         ALOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
276         return true;
277     } else {
278         // Desired frame has arrived after mTimeBetweenFrameCaptureUs time:
279         // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
280         // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
281         // of the last encoded frame's time stamp.
282         ALOGV("dataCallbackTimestamp timelapse: got timelapse frame");
283 
284         mLastTimeLapseFrameRealTimestampUs = *timestampUs;
285         *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
286         return false;
287     }
288     return false;
289 }
290 
processBufferQueueFrame(BufferItem & buffer)291 void CameraSourceTimeLapse::processBufferQueueFrame(BufferItem& buffer) {
292     ALOGV("processBufferQueueFrame");
293     int64_t timestampUs = buffer.mTimestamp / 1000;
294     mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
295     buffer.mTimestamp = timestampUs * 1000;
296     CameraSource::processBufferQueueFrame(buffer);
297 }
298 
299 }  // namespace android
300