1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "WebmFrameThread"
19
20 #include "WebmConstants.h"
21 #include "WebmFrameThread.h"
22
23 #include <media/stagefright/MetaData.h>
24 #include <media/stagefright/foundation/ADebug.h>
25
26 #include <utils/Log.h>
27 #include <inttypes.h>
28
29 using namespace webm;
30
31 namespace android {
32
wrap(void * arg)33 void *WebmFrameThread::wrap(void *arg) {
34 WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
35 worker->run();
36 return NULL;
37 }
38
start()39 status_t WebmFrameThread::start() {
40 status_t err = OK;
41 pthread_attr_t attr;
42 pthread_attr_init(&attr);
43 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
44 if ((err = pthread_create(&mThread, &attr, WebmFrameThread::wrap, this))) {
45 mThread = 0;
46 }
47 pthread_attr_destroy(&attr);
48 return err;
49 }
50
stop()51 status_t WebmFrameThread::stop() {
52 void *status = nullptr;
53 if (mThread) {
54 pthread_join(mThread, &status);
55 mThread = 0;
56 }
57 return (status_t)(intptr_t)status;
58 }
59
60 //=================================================================================================
61
WebmFrameSourceThread(int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink)62 WebmFrameSourceThread::WebmFrameSourceThread(
63 int type,
64 LinkedBlockingQueue<const sp<WebmFrame> >& sink)
65 : mType(type), mSink(sink) {
66 }
67
68 //=================================================================================================
69
WebmFrameSinkThread(const int & fd,const uint64_t & off,sp<WebmFrameSourceThread> videoThread,sp<WebmFrameSourceThread> audioThread,List<sp<WebmElement>> & cues)70 WebmFrameSinkThread::WebmFrameSinkThread(
71 const int& fd,
72 const uint64_t& off,
73 sp<WebmFrameSourceThread> videoThread,
74 sp<WebmFrameSourceThread> audioThread,
75 List<sp<WebmElement> >& cues)
76 : mFd(fd),
77 mSegmentDataStart(off),
78 mVideoFrames(videoThread->mSink),
79 mAudioFrames(audioThread->mSink),
80 mCues(cues),
81 mStartOffsetTimecode(UINT64_MAX),
82 mDone(true) {
83 }
84
WebmFrameSinkThread(const int & fd,const uint64_t & off,LinkedBlockingQueue<const sp<WebmFrame>> & videoSource,LinkedBlockingQueue<const sp<WebmFrame>> & audioSource,List<sp<WebmElement>> & cues)85 WebmFrameSinkThread::WebmFrameSinkThread(
86 const int& fd,
87 const uint64_t& off,
88 LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
89 LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
90 List<sp<WebmElement> >& cues)
91 : mFd(fd),
92 mSegmentDataStart(off),
93 mVideoFrames(videoSource),
94 mAudioFrames(audioSource),
95 mCues(cues),
96 mStartOffsetTimecode(UINT64_MAX),
97 mDone(true) {
98 }
99
100 // Initializes a webm cluster with its starting timecode.
101 //
102 // frames:
103 // sequence of input audio/video frames received from the source.
104 //
105 // clusterTimecodeL:
106 // the starting timecode of the cluster; this is the timecode of the first
107 // frame since frames are ordered by timestamp.
108 //
109 // children:
110 // list to hold child elements in a webm cluster (start timecode and
111 // simple blocks).
112 //
113 // static
initCluster(List<const sp<WebmFrame>> & frames,uint64_t & clusterTimecodeL,List<sp<WebmElement>> & children)114 void WebmFrameSinkThread::initCluster(
115 List<const sp<WebmFrame> >& frames,
116 uint64_t& clusterTimecodeL,
117 List<sp<WebmElement> >& children) {
118 CHECK(!frames.empty() && children.empty());
119
120 const sp<WebmFrame> f = *(frames.begin());
121 clusterTimecodeL = f->mAbsTimecode;
122 WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
123 children.clear();
124 children.push_back(clusterTimecode);
125 }
126
writeCluster(List<sp<WebmElement>> & children)127 void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
128 // children must contain at least one simpleblock and its timecode
129 CHECK_GE(children.size(), 2u);
130
131 uint64_t size;
132 sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
133 cluster->write(mFd, size);
134 children.clear();
135 }
136
137 // Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
138 //
139 // last:
140 // current flush is triggered by EOS instead of a second outstanding video key frame.
flushFrames(List<const sp<WebmFrame>> & frames,bool last)141 void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
142 if (frames.empty()) {
143 return;
144 }
145
146 uint64_t clusterTimecodeL;
147 List<sp<WebmElement> > children;
148 initCluster(frames, clusterTimecodeL, children);
149
150 uint64_t cueTime = clusterTimecodeL;
151 off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
152 size_t n = frames.size();
153 if (!last) {
154 // If we are not flushing the last sequence of outstanding frames, flushFrames
155 // must have been called right after we have pushed a second outstanding video key
156 // frame (the last frame), which belongs to the next cluster; also hold back on
157 // flushing the second to last frame before we check its type. A audio frame
158 // should precede the aforementioned video key frame in the next sequence, a video
159 // frame should be the last frame in the current (to-be-flushed) sequence.
160 CHECK_GE(n, 2u);
161 n -= 2;
162 }
163
164 for (size_t i = 0; i < n; i++) {
165 const sp<WebmFrame> f = *(frames.begin());
166 if (f->mType == kVideoType && f->mKey) {
167 cueTime = f->mAbsTimecode;
168 }
169
170 if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
171 writeCluster(children);
172 initCluster(frames, clusterTimecodeL, children);
173 }
174
175 frames.erase(frames.begin());
176 children.push_back(f->SimpleBlock(clusterTimecodeL));
177 }
178
179 // equivalent to last==false
180 if (!frames.empty()) {
181 // decide whether to write out the second to last frame.
182 const sp<WebmFrame> secondLastFrame = *(frames.begin());
183 if (secondLastFrame->mType == kVideoType) {
184 frames.erase(frames.begin());
185 children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
186 }
187 }
188
189 writeCluster(children);
190 sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
191 mCues.push_back(cuePoint);
192 }
193
start()194 status_t WebmFrameSinkThread::start() {
195 mDone = false;
196 return WebmFrameThread::start();
197 }
198
stop()199 status_t WebmFrameSinkThread::stop() {
200 mVideoFrames.push(WebmFrame::EOS);
201 mAudioFrames.push(WebmFrame::EOS);
202 return WebmFrameThread::stop();
203 }
204
run()205 void WebmFrameSinkThread::run() {
206 int numVideoKeyFrames = 0;
207 List<const sp<WebmFrame> > outstandingFrames;
208 while (!mDone) {
209 ALOGV("wait v frame");
210 const sp<WebmFrame> videoFrame = mVideoFrames.peek();
211 ALOGV("v frame: %p", videoFrame.get());
212
213 ALOGV("wait a frame");
214 const sp<WebmFrame> audioFrame = mAudioFrames.peek();
215 ALOGV("a frame: %p", audioFrame.get());
216
217 if (mStartOffsetTimecode == UINT64_MAX) {
218 mStartOffsetTimecode =
219 std::min(audioFrame->getAbsTimecode(), videoFrame->getAbsTimecode());
220 }
221
222 if (videoFrame->mEos && audioFrame->mEos) {
223 break;
224 }
225
226 if (*audioFrame < *videoFrame) {
227 ALOGV("take a frame");
228 mAudioFrames.take();
229 audioFrame->updateAbsTimecode(audioFrame->getAbsTimecode() - mStartOffsetTimecode);
230 outstandingFrames.push_back(audioFrame);
231 } else {
232 ALOGV("take v frame");
233 mVideoFrames.take();
234 videoFrame->updateAbsTimecode(videoFrame->getAbsTimecode() - mStartOffsetTimecode);
235 outstandingFrames.push_back(videoFrame);
236 if (videoFrame->mKey)
237 numVideoKeyFrames++;
238 }
239
240 if (numVideoKeyFrames == 2) {
241 flushFrames(outstandingFrames, /* last = */ false);
242 numVideoKeyFrames--;
243 }
244 }
245 ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
246 flushFrames(outstandingFrames, /* last = */ true);
247 mDone = true;
248 }
249
250 //=================================================================================================
251
252 static const int64_t kInitialDelayTimeUs = 700000LL;
253
clearFlags()254 void WebmFrameMediaSourceThread::clearFlags() {
255 mDone = false;
256 mPaused = false;
257 mResumed = false;
258 mStarted = false;
259 mReachedEOS = false;
260 }
261
WebmFrameMediaSourceThread(const sp<MediaSource> & source,int type,LinkedBlockingQueue<const sp<WebmFrame>> & sink,uint64_t timeCodeScale,int64_t startTimeRealUs,int32_t startTimeOffsetMs,int numTracks,bool realTimeRecording)262 WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
263 const sp<MediaSource>& source,
264 int type,
265 LinkedBlockingQueue<const sp<WebmFrame> >& sink,
266 uint64_t timeCodeScale,
267 int64_t startTimeRealUs,
268 int32_t startTimeOffsetMs,
269 int numTracks,
270 bool realTimeRecording)
271 : WebmFrameSourceThread(type, sink),
272 mSource(source),
273 mTimeCodeScale(timeCodeScale),
274 mTrackDurationUs(0) {
275 clearFlags();
276 mStartTimeUs = startTimeRealUs;
277 if (realTimeRecording && numTracks > 1) {
278 /*
279 * Copied from MPEG4Writer
280 *
281 * This extra delay of accepting incoming audio/video signals
282 * helps to align a/v start time at the beginning of a recording
283 * session, and it also helps eliminate the "recording" sound for
284 * camcorder applications.
285 *
286 * If client does not set the start time offset, we fall back to
287 * use the default initial delay value.
288 */
289 int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
290 if (startTimeOffsetUs < 0) { // Start time offset was not set
291 startTimeOffsetUs = kInitialDelayTimeUs;
292 }
293 mStartTimeUs += startTimeOffsetUs;
294 ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
295 }
296 }
297
start()298 status_t WebmFrameMediaSourceThread::start() {
299 sp<MetaData> meta = new MetaData;
300 meta->setInt64(kKeyTime, mStartTimeUs);
301 status_t err = mSource->start(meta.get());
302 if (err != OK) {
303 mDone = true;
304 mReachedEOS = true;
305 return err;
306 } else {
307 mStarted = true;
308 return WebmFrameThread::start();
309 }
310 }
311
resume()312 status_t WebmFrameMediaSourceThread::resume() {
313 if (!mDone && mPaused) {
314 mPaused = false;
315 mResumed = true;
316 }
317 return OK;
318 }
319
pause()320 status_t WebmFrameMediaSourceThread::pause() {
321 if (mStarted) {
322 mPaused = true;
323 mResumed = false;
324 }
325 return OK;
326 }
327
stop()328 status_t WebmFrameMediaSourceThread::stop() {
329 if (mStarted) {
330 mStarted = false;
331 mDone = true;
332 mSource->stop();
333 return WebmFrameThread::stop();
334 }
335 return OK;
336 }
337
run()338 void WebmFrameMediaSourceThread::run() {
339 int64_t timestampUs = 0xdeadbeef;
340 int64_t lastTimestampUs = 0; // Previous sample time stamp
341 int64_t lastDurationUs = 0; // Previous sample duration
342 int64_t previousPausedDurationUs = 0;
343
344 const uint64_t kUninitialized = 0xffffffffffffffffL;
345 mStartTimeUs = kUninitialized;
346
347 status_t err = OK;
348 MediaBufferBase *buffer;
349 while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
350 if (buffer->range_length() == 0) {
351 buffer->release();
352 buffer = NULL;
353 continue;
354 }
355
356 MetaDataBase &md = buffer->meta_data();
357
358 if (mType == kVideoType) {
359 int32_t isCodecConfig = 0;
360 if (md.findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) {
361 ALOGI("ignoring CSD for video track");
362 buffer->release();
363 buffer = NULL;
364 continue;
365 }
366 }
367
368 CHECK(md.findInt64(kKeyTime, ×tampUs));
369 if (mStartTimeUs == kUninitialized) {
370 mStartTimeUs = timestampUs;
371 }
372
373 if (mPaused && !mResumed) {
374 lastDurationUs = timestampUs - lastTimestampUs;
375 lastTimestampUs = timestampUs;
376 buffer->release();
377 buffer = NULL;
378 continue;
379 }
380
381 // adjust time-stamps after pause/resume
382 if (mResumed) {
383 int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
384 CHECK_GE(durExcludingEarlierPausesUs, 0LL);
385 int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
386 CHECK_GE(pausedDurationUs, lastDurationUs);
387 previousPausedDurationUs += pausedDurationUs - lastDurationUs;
388 mResumed = false;
389 }
390 timestampUs -= previousPausedDurationUs;
391 CHECK_GE(timestampUs, 0LL);
392
393 int32_t isSync = false;
394 md.findInt32(kKeyIsSyncFrame, &isSync);
395 const sp<WebmFrame> f = new WebmFrame(
396 mType,
397 isSync,
398 timestampUs * 1000 / mTimeCodeScale,
399 buffer);
400 mSink.push(f);
401
402 ALOGV(
403 "%s %s frame at %" PRId64 " size %zu\n",
404 mType == kVideoType ? "video" : "audio",
405 isSync ? "I" : "P",
406 timestampUs * 1000 / mTimeCodeScale,
407 buffer->range_length());
408
409 buffer->release();
410 buffer = NULL;
411
412 if (timestampUs > mTrackDurationUs) {
413 mTrackDurationUs = timestampUs;
414 }
415 lastDurationUs = timestampUs - lastTimestampUs;
416 lastTimestampUs = timestampUs;
417 }
418
419 mTrackDurationUs += lastDurationUs;
420 mSink.push(WebmFrame::EOS);
421 }
422 }
423