1 /*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "RTPSource"
19 #include <utils/Log.h>
20
21 #include "RTPSource.h"
22
23
24
25
26 #include <media/stagefright/MediaDefs.h>
27 #include <media/stagefright/MetaData.h>
28 #include <string.h>
29
30 namespace android {
31
32 const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
33 static int32_t kMaxAllowedStaleAccessUnits = 20;
34
RTPSource(const sp<AMessage> & notify,const String8 & rtpParams)35 NuPlayer::RTPSource::RTPSource(
36 const sp<AMessage> ¬ify,
37 const String8& rtpParams)
38 : Source(notify),
39 mRTPParams(rtpParams),
40 mFlags(0),
41 mState(DISCONNECTED),
42 mFinalResult(OK),
43 mBuffering(false),
44 mInPreparationPhase(true),
45 mRTPConn(new ARTPConnection(ARTPConnection::kViLTEConnection)),
46 mEOSTimeoutAudio(0),
47 mEOSTimeoutVideo(0),
48 mFirstAccessUnit(true),
49 mAllTracksHaveTime(false),
50 mNTPAnchorUs(-1),
51 mMediaAnchorUs(-1),
52 mLastMediaTimeUs(-1),
53 mNumAccessUnitsReceived(0),
54 mLastCVOUpdated(-1),
55 mReceivedFirstRTCPPacket(false),
56 mReceivedFirstRTPPacket(false),
57 mPausing(false),
58 mPauseGeneration(0) {
59 ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.c_str());
60 }
61
~RTPSource()62 NuPlayer::RTPSource::~RTPSource() {
63 if (mLooper != NULL) {
64 mLooper->unregisterHandler(id());
65 mLooper->unregisterHandler(mRTPConn->id());
66 mLooper->stop();
67 }
68 }
69
getBufferingSettings(BufferingSettings * buffering)70 status_t NuPlayer::RTPSource::getBufferingSettings(
71 BufferingSettings* buffering /* nonnull */) {
72 Mutex::Autolock _l(mBufferingSettingsLock);
73 *buffering = mBufferingSettings;
74 return OK;
75 }
76
setBufferingSettings(const BufferingSettings & buffering)77 status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
78 Mutex::Autolock _l(mBufferingSettingsLock);
79 mBufferingSettings = buffering;
80 return OK;
81 }
82
prepareAsync()83 void NuPlayer::RTPSource::prepareAsync() {
84 if (mLooper == NULL) {
85 mLooper = new ALooper;
86 mLooper->setName("rtp");
87 mLooper->start();
88
89 mLooper->registerHandler(this);
90 mLooper->registerHandler(mRTPConn);
91 }
92
93 CHECK_EQ(mState, (int)DISCONNECTED);
94 mState = CONNECTING;
95
96 setParameters(mRTPParams);
97
98 TrackInfo *info = NULL;
99 unsigned i;
100 for (i = 0; i < mTracks.size(); i++) {
101 info = &mTracks.editItemAt(i);
102
103 if (info == NULL)
104 break;
105
106 AString sdp;
107 ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
108 info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
109 NULL, info->mWidth, info->mHeight, info->mCVOExtMap);
110 ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
111
112 sp<ASessionDescription> desc = new ASessionDescription;
113 bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
114 ALOGV("RTPSource isValidSdp => %d", isValidSdp);
115
116 int sockRtp, sockRtcp;
117 ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
118 info->mLocalPort, info->mRemotePort, info->mSocketNetwork, info->mRtpSockOptEcn);
119
120 sp<AMessage> notify = new AMessage('accu', this);
121
122 ALOGV("RTPSource addStream. track-index=%d", i);
123 notify->setSize("trackIndex", i);
124 // index(i) should be started from 1. 0 is reserved for [root]
125 mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
126 mRTPConn->setSelfID(info->mSelfID);
127 mRTPConn->setStaticJitterTimeMs(info->mJbTimeMs);
128 mRTPConn->setRtpSockOptEcn(info->mRtpSockOptEcn);
129 mRTPConn->setIsIPv6(info->mLocalIp);
130
131 unsigned long PT;
132 AString formatDesc, formatParams;
133 // index(i) should be started from 1. 0 is reserved for [root]
134 desc->getFormatType(i + 1, &PT, &formatDesc, &formatParams);
135
136 int32_t clockRate, numChannels;
137 ASessionDescription::ParseFormatDesc(formatDesc.c_str(), &clockRate, &numChannels);
138 info->mTimeScale = clockRate;
139
140 info->mRTPSocket = sockRtp;
141 info->mRTCPSocket = sockRtcp;
142 info->mFirstSeqNumInSegment = 0;
143 info->mNewSegment = true;
144 info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
145 info->mRTPAnchor = 0;
146 info->mNTPAnchorUs = -1;
147 info->mNormalPlayTimeRTP = 0;
148 info->mNormalPlayTimeUs = 0ll;
149
150 // index(i) should be started from 1. 0 is reserved for [root]
151 info->mPacketSource = new APacketSource(desc, i + 1);
152
153 int32_t timeScale;
154 sp<MetaData> format = getTrackFormat(i, &timeScale);
155 sp<AnotherPacketSource> source = new AnotherPacketSource(format);
156
157 if (info->mIsAudio) {
158 mAudioTrack = source;
159 } else {
160 mVideoTrack = source;
161 }
162
163 info->mSource = source;
164 info->mRTPTime = 0;
165 info->mNormalPlaytimeUs = 0;
166 info->mNPTMappingValid = false;
167 }
168
169 if (mInPreparationPhase) {
170 mInPreparationPhase = false;
171 notifyPrepared();
172 }
173 }
174
start()175 void NuPlayer::RTPSource::start() {
176 }
177
pause()178 void NuPlayer::RTPSource::pause() {
179 mState = PAUSED;
180 }
181
resume()182 void NuPlayer::RTPSource::resume() {
183 mState = CONNECTING;
184 }
185
stop()186 void NuPlayer::RTPSource::stop() {
187 if (mLooper == NULL) {
188 return;
189 }
190 sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
191
192 sp<AMessage> dummy;
193 msg->postAndAwaitResponse(&dummy);
194 }
195
feedMoreTSData()196 status_t NuPlayer::RTPSource::feedMoreTSData() {
197 Mutex::Autolock _l(mBufferingLock);
198 return mFinalResult;
199 }
200
getFormatMeta(bool audio)201 sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
202 sp<AnotherPacketSource> source = getSource(audio);
203
204 if (source == NULL) {
205 return NULL;
206 }
207
208 return source->getFormat();
209 }
210
haveSufficientDataOnAllTracks()211 bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
212 // We're going to buffer at least 2 secs worth data on all tracks before
213 // starting playback (both at startup and after a seek).
214
215 static const int64_t kMinDurationUs = 2000000ll;
216
217 int64_t mediaDurationUs = 0;
218 getDuration(&mediaDurationUs);
219 if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
220 || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
221 return true;
222 }
223
224 status_t err;
225 int64_t durationUs;
226 if (mAudioTrack != NULL
227 && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
228 < kMinDurationUs
229 && err == OK) {
230 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
231 durationUs / 1E6);
232 return false;
233 }
234
235 if (mVideoTrack != NULL
236 && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
237 < kMinDurationUs
238 && err == OK) {
239 ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
240 durationUs / 1E6);
241 return false;
242 }
243
244 return true;
245 }
246
dequeueAccessUnit(bool audio,sp<ABuffer> * accessUnit)247 status_t NuPlayer::RTPSource::dequeueAccessUnit(
248 bool audio, sp<ABuffer> *accessUnit) {
249
250 sp<AnotherPacketSource> source = getSource(audio);
251
252 if (mState == PAUSED) {
253 ALOGV("-EWOULDBLOCK");
254 return -EWOULDBLOCK;
255 }
256
257 status_t finalResult;
258 if (!source->hasBufferAvailable(&finalResult)) {
259 if (finalResult == OK) {
260 int64_t mediaDurationUs = 0;
261 getDuration(&mediaDurationUs);
262 sp<AnotherPacketSource> otherSource = getSource(!audio);
263 status_t otherFinalResult;
264
265 // If other source already signaled EOS, this source should also signal EOS
266 if (otherSource != NULL &&
267 !otherSource->hasBufferAvailable(&otherFinalResult) &&
268 otherFinalResult == ERROR_END_OF_STREAM) {
269 source->signalEOS(ERROR_END_OF_STREAM);
270 return ERROR_END_OF_STREAM;
271 }
272
273 // If this source has detected near end, give it some time to retrieve more
274 // data before signaling EOS
275 if (source->isFinished(mediaDurationUs)) {
276 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
277 if (eosTimeout == 0) {
278 setEOSTimeout(audio, ALooper::GetNowUs());
279 } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
280 setEOSTimeout(audio, 0);
281 source->signalEOS(ERROR_END_OF_STREAM);
282 return ERROR_END_OF_STREAM;
283 }
284 return -EWOULDBLOCK;
285 }
286
287 if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
288 // We should not enter buffering mode
289 // if any of the sources already have detected EOS.
290 // TODO: needs to be checked whether below line is needed or not.
291 // startBufferingIfNecessary();
292 }
293
294 return -EWOULDBLOCK;
295 }
296 return finalResult;
297 }
298
299 setEOSTimeout(audio, 0);
300
301 finalResult = source->dequeueAccessUnit(accessUnit);
302 if (finalResult != OK) {
303 return finalResult;
304 }
305
306 int32_t cvo;
307 if ((*accessUnit) != NULL && (*accessUnit)->meta()->findInt32("cvo", &cvo) &&
308 cvo != mLastCVOUpdated) {
309 sp<AMessage> msg = new AMessage();
310 msg->setInt32("payload-type", ARTPSource::RTP_CVO);
311 msg->setInt32("cvo", cvo);
312
313 sp<AMessage> notify = dupNotify();
314 notify->setInt32("what", kWhatIMSRxNotice);
315 notify->setMessage("message", msg);
316 notify->post();
317
318 ALOGV("notify cvo updated (%d)->(%d) to upper layer", mLastCVOUpdated, cvo);
319 mLastCVOUpdated = cvo;
320 }
321
322 return finalResult;
323 }
324
getSource(bool audio)325 sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
326 return audio ? mAudioTrack : mVideoTrack;
327 }
328
setEOSTimeout(bool audio,int64_t timeout)329 void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
330 if (audio) {
331 mEOSTimeoutAudio = timeout;
332 } else {
333 mEOSTimeoutVideo = timeout;
334 }
335 }
336
getDuration(int64_t * durationUs)337 status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
338 *durationUs = 0ll;
339
340 int64_t audioDurationUs;
341 if (mAudioTrack != NULL && mAudioTrack->getFormat() != NULL
342 && mAudioTrack->getFormat()->findInt64(
343 kKeyDuration, &audioDurationUs)
344 && audioDurationUs > *durationUs) {
345 *durationUs = audioDurationUs;
346 }
347
348 int64_t videoDurationUs;
349 if (mVideoTrack != NULL && mVideoTrack->getFormat() != NULL
350 && mVideoTrack->getFormat()->findInt64(
351 kKeyDuration, &videoDurationUs)
352 && videoDurationUs > *durationUs) {
353 *durationUs = videoDurationUs;
354 }
355
356 return OK;
357 }
358
seekTo(int64_t seekTimeUs,MediaPlayerSeekMode mode)359 status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
360 ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
361 return OK;
362 }
363
schedulePollBuffering()364 void NuPlayer::RTPSource::schedulePollBuffering() {
365 sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
366 msg->post(kBufferingPollIntervalUs); // 1 second intervals
367 }
368
onPollBuffering()369 void NuPlayer::RTPSource::onPollBuffering() {
370 schedulePollBuffering();
371 }
372
isRealTime() const373 bool NuPlayer::RTPSource::isRealTime() const {
374 ALOGD("RTPSource::isRealTime=%d", true);
375 return true;
376 }
377
onMessageReceived(const sp<AMessage> & msg)378 void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
379 ALOGV("onMessageReceived =%d", msg->what());
380
381 switch (msg->what()) {
382 case kWhatAccessUnitComplete:
383 {
384 if (mState == CONNECTING) {
385 mState = CONNECTED;
386 }
387
388 int32_t timeUpdate;
389 //"time-update" raised from ARTPConnection::parseSR()
390 if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
391 size_t trackIndex;
392 CHECK(msg->findSize("trackIndex", &trackIndex));
393
394 uint32_t rtpTime;
395 uint64_t ntpTime;
396 CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
397 CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
398
399 onTimeUpdate(trackIndex, rtpTime, ntpTime);
400 }
401
402 int32_t IMSRxNotice;
403 if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
404 int32_t payloadType = 0, feedbackType = 0;
405 CHECK(msg->findInt32("payload-type", &payloadType));
406 msg->findInt32("feedback-type", &feedbackType);
407
408 sp<AMessage> notify = dupNotify();
409 notify->setInt32("what", kWhatIMSRxNotice);
410 notify->setMessage("message", msg);
411 notify->post();
412
413 ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
414 payloadType, feedbackType);
415 break;
416 }
417
418 size_t trackIndex;
419 CHECK(msg->findSize("trackIndex", &trackIndex));
420
421 sp<ABuffer> accessUnit;
422 if (msg->findBuffer("access-unit", &accessUnit) == false) {
423 break;
424 }
425
426 int32_t damaged;
427 if (accessUnit->meta()->findInt32("damaged", &damaged)
428 && damaged) {
429 ALOGD("dropping damaged access unit.");
430 break;
431 }
432
433 // Implicitly assert on valid trackIndex here, which we ensure by
434 // never removing tracks.
435 TrackInfo *info = &mTracks.editItemAt(trackIndex);
436
437 sp<AnotherPacketSource> source = info->mSource;
438 if (source != NULL) {
439 uint32_t rtpTime;
440 CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
441
442 /* AnotherPacketSource make an assertion if there is no ntp provided
443 RTPSource should provide ntpUs all the times.
444 if (!info->mNPTMappingValid) {
445 // This is a live stream, we didn't receive any normal
446 // playtime mapping. We won't map to npt time.
447 source->queueAccessUnit(accessUnit);
448 break;
449 }
450
451 int64_t nptUs =
452 ((double)rtpTime - (double)info->mRTPTime)
453 / info->mTimeScale
454 * 1000000ll
455 + info->mNormalPlaytimeUs;
456
457 */
458 accessUnit->meta()->setInt64("timeUs", ALooper::GetNowUs());
459
460 source->queueAccessUnit(accessUnit);
461 }
462
463 break;
464 }
465 case kWhatDisconnect:
466 {
467 sp<AReplyToken> replyID;
468 CHECK(msg->senderAwaitsResponse(&replyID));
469
470 for (size_t i = 0; i < mTracks.size(); ++i) {
471 TrackInfo *info = &mTracks.editItemAt(i);
472
473 if (info->mIsAudio) {
474 mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
475 mAudioTrack = NULL;
476 ALOGV("mAudioTrack disconnected");
477 } else {
478 mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
479 mVideoTrack = NULL;
480 ALOGV("mVideoTrack disconnected");
481 }
482
483 mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
484 close(info->mRTPSocket);
485 close(info->mRTCPSocket);
486 }
487
488 mTracks.clear();
489 mFirstAccessUnit = true;
490 mAllTracksHaveTime = false;
491 mNTPAnchorUs = -1;
492 mMediaAnchorUs = -1;
493 mLastMediaTimeUs = -1;
494 mNumAccessUnitsReceived = 0;
495 mReceivedFirstRTCPPacket = false;
496 mReceivedFirstRTPPacket = false;
497 mPausing = false;
498 mPauseGeneration = 0;
499
500 (new AMessage)->postReply(replyID);
501
502 break;
503 }
504 case kWhatPollBuffering:
505 break;
506 default:
507 TRESPASS();
508 }
509 }
510
setTargetBitrate(int32_t bitrate)511 void NuPlayer::RTPSource::setTargetBitrate(int32_t bitrate) {
512 mRTPConn->setTargetBitrate(bitrate);
513 }
514
onTimeUpdate(int32_t trackIndex,uint32_t rtpTime,uint64_t ntpTime)515 void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
516 ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
517 trackIndex, rtpTime, (long long)ntpTime);
518
519 // convert ntpTime in Q32 seconds to microseconds. Note: this will not lose precision
520 // because ntpTimeUs is at most 52 bits (double holds 53 bits)
521 int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
522
523 TrackInfo *track = &mTracks.editItemAt(trackIndex);
524
525 track->mRTPAnchor = rtpTime;
526 track->mNTPAnchorUs = ntpTimeUs;
527
528 if (mNTPAnchorUs < 0) {
529 mNTPAnchorUs = ntpTimeUs;
530 mMediaAnchorUs = mLastMediaTimeUs;
531 }
532
533 if (!mAllTracksHaveTime) {
534 bool allTracksHaveTime = (mTracks.size() > 0);
535 for (size_t i = 0; i < mTracks.size(); ++i) {
536 TrackInfo *track = &mTracks.editItemAt(i);
537 if (track->mNTPAnchorUs < 0) {
538 allTracksHaveTime = false;
539 break;
540 }
541 }
542 if (allTracksHaveTime) {
543 mAllTracksHaveTime = true;
544 ALOGI("Time now established for all tracks.");
545 }
546 }
547 if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
548 // Time is now established, lets start timestamping immediately
549 for (size_t i = 0; i < mTracks.size(); ++i) {
550 TrackInfo *trackInfo = &mTracks.editItemAt(i);
551 while (!trackInfo->mPackets.empty()) {
552 sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
553 trackInfo->mPackets.erase(trackInfo->mPackets.begin());
554
555 if (addMediaTimestamp(i, trackInfo, accessUnit)) {
556 postQueueAccessUnit(i, accessUnit);
557 }
558 }
559 }
560 }
561 }
562
addMediaTimestamp(int32_t trackIndex,const TrackInfo * track,const sp<ABuffer> & accessUnit)563 bool NuPlayer::RTPSource::addMediaTimestamp(
564 int32_t trackIndex, const TrackInfo *track,
565 const sp<ABuffer> &accessUnit) {
566
567 uint32_t rtpTime;
568 CHECK(accessUnit->meta()->findInt32(
569 "rtp-time", (int32_t *)&rtpTime));
570
571 int64_t relRtpTimeUs =
572 (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
573 / track->mTimeScale;
574
575 int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
576
577 int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
578
579 if (mediaTimeUs > mLastMediaTimeUs) {
580 mLastMediaTimeUs = mediaTimeUs;
581 }
582
583 if (mediaTimeUs < 0) {
584 ALOGV("dropping early accessUnit.");
585 return false;
586 }
587
588 ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
589 trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
590
591 accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
592
593 return true;
594 }
595
dataReceivedOnAllChannels()596 bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
597 TrackInfo *track;
598 for (size_t i = 0; i < mTracks.size(); ++i) {
599 track = &mTracks.editItemAt(i);
600 if (track->mPackets.empty()) {
601 return false;
602 }
603 }
604 return true;
605 }
606
postQueueAccessUnit(size_t trackIndex,const sp<ABuffer> & accessUnit)607 void NuPlayer::RTPSource::postQueueAccessUnit(
608 size_t trackIndex, const sp<ABuffer> &accessUnit) {
609 sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
610 msg->setInt32("what", kWhatAccessUnit);
611 msg->setSize("trackIndex", trackIndex);
612 msg->setBuffer("accessUnit", accessUnit);
613 msg->post();
614 }
615
postQueueEOS(size_t trackIndex,status_t finalResult)616 void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
617 sp<AMessage> msg = new AMessage(kWhatEOS, this);
618 msg->setInt32("what", kWhatEOS);
619 msg->setSize("trackIndex", trackIndex);
620 msg->setInt32("finalResult", finalResult);
621 msg->post();
622 }
623
getTrackFormat(size_t index,int32_t * timeScale)624 sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
625 CHECK_GE(index, 0u);
626 CHECK_LT(index, mTracks.size());
627
628 const TrackInfo &info = mTracks.itemAt(index);
629
630 *timeScale = info.mTimeScale;
631
632 return info.mPacketSource->getFormat();
633 }
634
onConnected()635 void NuPlayer::RTPSource::onConnected() {
636 ALOGV("onConnected");
637 mState = CONNECTED;
638 }
639
onDisconnected(const sp<AMessage> & msg)640 void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
641 if (mState == DISCONNECTED) {
642 return;
643 }
644
645 status_t err;
646 CHECK(msg->findInt32("result", &err));
647 CHECK_NE(err, (status_t)OK);
648
649 // mLooper->unregisterHandler(mHandler->id());
650 // mHandler.clear();
651
652 if (mState == CONNECTING) {
653 // We're still in the preparation phase, signal that it
654 // failed.
655 notifyPrepared(err);
656 }
657
658 mState = DISCONNECTED;
659 // setError(err);
660
661 }
662
setParameter(const String8 & key,const String8 & value)663 status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
664 ALOGV("setParameter: key (%s) => value (%s)", key.c_str(), value.c_str());
665
666 bool isAudioKey = key.contains("audio");
667 TrackInfo *info = NULL;
668 for (unsigned i = 0; i < mTracks.size(); ++i) {
669 info = &mTracks.editItemAt(i);
670 if (info != NULL && info->mIsAudio == isAudioKey) {
671 ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
672 break;
673 }
674 }
675
676 if (info == NULL) {
677 TrackInfo newTrackInfo;
678 newTrackInfo.mIsAudio = isAudioKey;
679 mTracks.push(newTrackInfo);
680 info = &mTracks.editTop();
681 info->mJbTimeMs = kStaticJitterTimeMs;
682 }
683
684 if (key == "rtp-param-mime-type") {
685 info->mMimeType = value;
686
687 const char *mime = value.c_str();
688 const char *delimiter = strchr(mime, '/');
689 info->mCodecName = delimiter ? (delimiter + 1) : "<none>";
690
691 ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
692 info->mMimeType.c_str(), info->mCodecName.c_str());
693 } else if (key == "video-param-decoder-profile") {
694 info->mCodecProfile = atoi(value);
695 } else if (key == "video-param-decoder-level") {
696 info->mCodecLevel = atoi(value);
697 } else if (key == "video-param-width") {
698 info->mWidth = atoi(value);
699 } else if (key == "video-param-height") {
700 info->mHeight = atoi(value);
701 } else if (key == "rtp-param-local-ip") {
702 info->mLocalIp = value;
703 } else if (key == "rtp-param-local-port") {
704 info->mLocalPort = atoi(value);
705 } else if (key == "rtp-param-remote-ip") {
706 info->mRemoteIp = value;
707 } else if (key == "rtp-param-remote-port") {
708 info->mRemotePort = atoi(value);
709 } else if (key == "rtp-param-payload-type") {
710 info->mPayloadType = atoi(value);
711 } else if (key == "rtp-param-as") {
712 //AS means guaranteed bit rate that negotiated from sdp.
713 info->mAS = atoi(value);
714 } else if (key == "rtp-param-rtp-timeout") {
715 } else if (key == "rtp-param-rtcp-timeout") {
716 } else if (key == "rtp-param-time-scale") {
717 } else if (key == "rtp-param-self-id") {
718 info->mSelfID = atoi(value);
719 } else if (key == "rtp-param-ext-cvo-extmap") {
720 info->mCVOExtMap = atoi(value);
721 } else if (key == "rtp-param-set-socket-network") {
722 int64_t networkHandle = atoll(value);
723 setSocketNetwork(networkHandle);
724 } else if (key == "rtp-param-set-socket-ecn") {
725 info->mRtpSockOptEcn = atoi(value);
726 } else if (key == "rtp-param-jitter-buffer-time") {
727 // clamping min at 40, max at 3000
728 info->mJbTimeMs = std::min(std::max(40, atoi(value)), 3000);
729 }
730
731 return OK;
732 }
733
setParameters(const String8 & params)734 status_t NuPlayer::RTPSource::setParameters(const String8 ¶ms) {
735 ALOGV("setParameters: %s", params.c_str());
736 const char *cparams = params.c_str();
737 const char *key_start = cparams;
738 for (;;) {
739 const char *equal_pos = strchr(key_start, '=');
740 if (equal_pos == NULL) {
741 ALOGE("Parameters %s miss a value", cparams);
742 return BAD_VALUE;
743 }
744 String8 key(key_start, equal_pos - key_start);
745 TrimString(&key);
746 if (key.length() == 0) {
747 ALOGE("Parameters %s contains an empty key", cparams);
748 return BAD_VALUE;
749 }
750 const char *value_start = equal_pos + 1;
751 const char *semicolon_pos = strchr(value_start, ';');
752 String8 value;
753 if (semicolon_pos == NULL) {
754 value = value_start;
755 } else {
756 value = String8(value_start, semicolon_pos - value_start);
757 }
758 if (setParameter(key, value) != OK) {
759 return BAD_VALUE;
760 }
761 if (semicolon_pos == NULL) {
762 break; // Reaches the end
763 }
764 key_start = semicolon_pos + 1;
765 }
766 return OK;
767 }
768
setSocketNetwork(int64_t networkHandle)769 void NuPlayer::RTPSource::setSocketNetwork(int64_t networkHandle) {
770 ALOGV("setSocketNetwork: %llu", (unsigned long long)networkHandle);
771
772 TrackInfo *info = NULL;
773 for (size_t i = 0; i < mTracks.size(); ++i) {
774 info = &mTracks.editItemAt(i);
775
776 if (info == NULL)
777 break;
778
779 info->mSocketNetwork = networkHandle;
780 }
781 }
782
783 // Trim both leading and trailing whitespace from the given string.
784 //static
TrimString(String8 * s)785 void NuPlayer::RTPSource::TrimString(String8 *s) {
786 size_t num_bytes = s->bytes();
787 const char *data = s->c_str();
788
789 size_t leading_space = 0;
790 while (leading_space < num_bytes && isspace(data[leading_space])) {
791 ++leading_space;
792 }
793
794 size_t i = num_bytes;
795 while (i > leading_space && isspace(data[i - 1])) {
796 --i;
797 }
798
799 *s = String8(&data[leading_space], i - leading_space);
800 }
801
802 } // namespace android
803