blob: 6f4933c3a5f25fa3ea254ac793580f443bb3c49c [file] [log] [blame]
Byeongjo Parkd157b792019-01-24 20:56:37 +09001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "RTPSource"
19#include <utils/Log.h>
20
21#include "RTPSource.h"
22
23
24
25
26#include <media/stagefright/MediaDefs.h>
27#include <media/stagefright/MetaData.h>
28#include <string.h>
29
30namespace android {
31
32const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
33static int32_t kMaxAllowedStaleAccessUnits = 20;
34
35NuPlayer::RTPSource::RTPSource(
36 const sp<AMessage> &notify,
37 const String8& rtpParams)
38 : Source(notify),
39 mRTPParams(rtpParams),
40 mFlags(0),
41 mState(DISCONNECTED),
42 mFinalResult(OK),
43 mBuffering(false),
44 mInPreparationPhase(true),
45 mRTPConn(new ARTPConnection),
46 mEOSTimeoutAudio(0),
47 mEOSTimeoutVideo(0) {
48 ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
49}
50
51NuPlayer::RTPSource::~RTPSource() {
52 if (mLooper != NULL) {
53 mLooper->unregisterHandler(id());
54 mLooper->unregisterHandler(mRTPConn->id());
55 mLooper->stop();
56 }
57}
58
59status_t NuPlayer::RTPSource::getBufferingSettings(
60 BufferingSettings* buffering /* nonnull */) {
61 Mutex::Autolock _l(mBufferingSettingsLock);
62 *buffering = mBufferingSettings;
63 return OK;
64}
65
66status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
67 Mutex::Autolock _l(mBufferingSettingsLock);
68 mBufferingSettings = buffering;
69 return OK;
70}
71
72void NuPlayer::RTPSource::prepareAsync() {
73 if (mLooper == NULL) {
74 mLooper = new ALooper;
75 mLooper->setName("rtp");
76 mLooper->start();
77
78 mLooper->registerHandler(this);
79 mLooper->registerHandler(mRTPConn);
80 }
81
82 setParameters(mRTPParams);
83
84 TrackInfo *info = NULL;
85 unsigned i;
86 for (i = 0; i < mTracks.size(); i++) {
87 info = &mTracks.editItemAt(i);
88
89 if (info == NULL)
90 break;
91
92 AString sdp;
93 ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
94 info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
95 NULL, info->mWidth, info->mHeight);
96 ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
97
98 sp<ASessionDescription> desc = new ASessionDescription;
99 bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
100 ALOGV("RTPSource isValidSdp => %d", isValidSdp);
101
102 int sockRtp, sockRtcp;
103 ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
104 info->mLocalPort, info->mRemotePort);
105
106 sp<AMessage> notify = new AMessage('accu', this);
107
108 ALOGV("RTPSource addStream. track-index=%d", i);
109 notify->setSize("trackIndex", i);
110 // index(i) should be started from 1. 0 is reserved for [root]
111 mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
Kim Sungyeond2875e92018-03-20 16:51:41 +0900112 mRTPConn->setSelfID(info->mSelfID);
Kim Sungyeonfd179622018-03-22 22:39:38 +0900113 mRTPConn->setMinMaxBitrate(videoMinBitrate, 512000);
Byeongjo Parkd157b792019-01-24 20:56:37 +0900114
115 info->mRTPSocket = sockRtp;
116 info->mRTCPSocket = sockRtcp;
117 info->mFirstSeqNumInSegment = 0;
118 info->mNewSegment = true;
119 info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
120 info->mRTPAnchor = 0;
121 info->mNTPAnchorUs = -1;
122 info->mNormalPlayTimeRTP = 0;
123 info->mNormalPlayTimeUs = 0ll;
124
125 // index(i) should be started from 1. 0 is reserved for [root]
126 info->mPacketSource = new APacketSource(desc, i + 1);
127
128 int32_t timeScale;
129 sp<MetaData> format = getTrackFormat(i, &timeScale);
130 sp<AnotherPacketSource> source = new AnotherPacketSource(format);
131
132 if (info->mIsAudio) {
133 mAudioTrack = source;
134 } else {
135 mVideoTrack = source;
136 }
137
138 info->mSource = source;
139 }
140
141 CHECK_EQ(mState, (int)DISCONNECTED);
142 mState = CONNECTING;
143
144 if (mInPreparationPhase) {
145 mInPreparationPhase = false;
146 notifyPrepared();
147 }
148}
149
150void NuPlayer::RTPSource::start() {
151}
152
153void NuPlayer::RTPSource::pause() {
154 mState = PAUSED;
155}
156
157void NuPlayer::RTPSource::resume() {
158 mState = CONNECTING;
159}
160
161void NuPlayer::RTPSource::stop() {
162 if (mLooper == NULL) {
163 return;
164 }
165 sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
166
167 sp<AMessage> dummy;
168 msg->postAndAwaitResponse(&dummy);
169}
170
171status_t NuPlayer::RTPSource::feedMoreTSData() {
172 Mutex::Autolock _l(mBufferingLock);
173 return mFinalResult;
174}
175
176sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
177 sp<AnotherPacketSource> source = getSource(audio);
178
179 if (source == NULL) {
180 return NULL;
181 }
182
183 return source->getFormat();
184}
185
186bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
187 // We're going to buffer at least 2 secs worth data on all tracks before
188 // starting playback (both at startup and after a seek).
189
190 static const int64_t kMinDurationUs = 2000000ll;
191
192 int64_t mediaDurationUs = 0;
193 getDuration(&mediaDurationUs);
194 if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
195 || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
196 return true;
197 }
198
199 status_t err;
200 int64_t durationUs;
201 if (mAudioTrack != NULL
202 && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
203 < kMinDurationUs
204 && err == OK) {
205 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
206 durationUs / 1E6);
207 return false;
208 }
209
210 if (mVideoTrack != NULL
211 && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
212 < kMinDurationUs
213 && err == OK) {
214 ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
215 durationUs / 1E6);
216 return false;
217 }
218
219 return true;
220}
221
222status_t NuPlayer::RTPSource::dequeueAccessUnit(
223 bool audio, sp<ABuffer> *accessUnit) {
224
225 sp<AnotherPacketSource> source = getSource(audio);
226
227 if (mState == PAUSED) {
228 ALOGV("-EWOULDBLOCK");
229 return -EWOULDBLOCK;
230 }
231
232 status_t finalResult;
233 if (!source->hasBufferAvailable(&finalResult)) {
234 if (finalResult == OK) {
235 int64_t mediaDurationUs = 0;
236 getDuration(&mediaDurationUs);
237 sp<AnotherPacketSource> otherSource = getSource(!audio);
238 status_t otherFinalResult;
239
240 // If other source already signaled EOS, this source should also signal EOS
241 if (otherSource != NULL &&
242 !otherSource->hasBufferAvailable(&otherFinalResult) &&
243 otherFinalResult == ERROR_END_OF_STREAM) {
244 source->signalEOS(ERROR_END_OF_STREAM);
245 return ERROR_END_OF_STREAM;
246 }
247
248 // If this source has detected near end, give it some time to retrieve more
249 // data before signaling EOS
250 if (source->isFinished(mediaDurationUs)) {
251 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
252 if (eosTimeout == 0) {
253 setEOSTimeout(audio, ALooper::GetNowUs());
254 } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
255 setEOSTimeout(audio, 0);
256 source->signalEOS(ERROR_END_OF_STREAM);
257 return ERROR_END_OF_STREAM;
258 }
259 return -EWOULDBLOCK;
260 }
261
262 if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
263 // We should not enter buffering mode
264 // if any of the sources already have detected EOS.
265 // TODO: needs to be checked whether below line is needed or not.
266 // startBufferingIfNecessary();
267 }
268
269 return -EWOULDBLOCK;
270 }
271 return finalResult;
272 }
273
274 setEOSTimeout(audio, 0);
275
276 return source->dequeueAccessUnit(accessUnit);
277}
278
279sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
280 return audio ? mAudioTrack : mVideoTrack;
281}
282
283void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
284 if (audio) {
285 mEOSTimeoutAudio = timeout;
286 } else {
287 mEOSTimeoutVideo = timeout;
288 }
289}
290
291status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
292 *durationUs = 0ll;
293
294 int64_t audioDurationUs;
295 if (mAudioTrack != NULL
296 && mAudioTrack->getFormat()->findInt64(
297 kKeyDuration, &audioDurationUs)
298 && audioDurationUs > *durationUs) {
299 *durationUs = audioDurationUs;
300 }
301
302 int64_t videoDurationUs;
303 if (mVideoTrack != NULL
304 && mVideoTrack->getFormat()->findInt64(
305 kKeyDuration, &videoDurationUs)
306 && videoDurationUs > *durationUs) {
307 *durationUs = videoDurationUs;
308 }
309
310 return OK;
311}
312
313status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
314 ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
315 return OK;
316}
317
318void NuPlayer::RTPSource::schedulePollBuffering() {
319 sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
320 msg->post(1000000ll); // 1 second intervals
321}
322
323void NuPlayer::RTPSource::onPollBuffering() {
324 schedulePollBuffering();
325}
326
327void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
328 ALOGV("onMessageReceived =%d", msg->what());
329
330 switch (msg->what()) {
331 case kWhatAccessUnitComplete:
332 {
333 if (mState == CONNECTING) {
334 mState = CONNECTED;
335 }
336
337 int32_t timeUpdate;
338 //"time-update" raised from ARTPConnection::parseSR()
339 if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
340 size_t trackIndex;
341 CHECK(msg->findSize("trackIndex", &trackIndex));
342
343 uint32_t rtpTime;
344 uint64_t ntpTime;
345 CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
346 CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
347
348 onTimeUpdate(trackIndex, rtpTime, ntpTime);
349 break;
350 }
351
352 int32_t firstRTCP;
353 if (msg->findInt32("first-rtcp", &firstRTCP)) {
354 // There won't be an access unit here, it's just a notification
355 // that the data communication worked since we got the first
356 // rtcp packet.
357 ALOGV("first-rtcp");
358 break;
359 }
360
Kim Sungyeond3c6b322018-03-02 14:41:19 +0900361 int32_t IMSRxNotice;
362 if (msg->findInt32("IMS-Rx-notice", &IMSRxNotice)) {
363 int32_t payloadType, feedbackType;
364 CHECK(msg->findInt32("payload-type", &payloadType));
365 CHECK(msg->findInt32("feedback-type", &feedbackType));
366
367 sp<AMessage> notify = dupNotify();
368 notify->setInt32("what", kWhatIMSRxNotice);
369 notify->setMessage("message", msg);
370 notify->post();
371
372 ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
373 payloadType, feedbackType);
374 break;
375 }
376
Byeongjo Parkd157b792019-01-24 20:56:37 +0900377 size_t trackIndex;
378 CHECK(msg->findSize("trackIndex", &trackIndex));
379
380 sp<ABuffer> accessUnit;
381 if (msg->findBuffer("access-unit", &accessUnit) == false) {
382 break;
383 }
384
385 int32_t damaged;
386 if (accessUnit->meta()->findInt32("damaged", &damaged)
387 && damaged) {
388 ALOGD("dropping damaged access unit.");
389 break;
390 }
391
392 TrackInfo *info = &mTracks.editItemAt(trackIndex);
393
394 sp<AnotherPacketSource> source = info->mSource;
395 if (source != NULL) {
396 uint32_t rtpTime;
397 CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
398
399 /* AnotherPacketSource make an assertion if there is no ntp provided
400 RTPSource should provide ntpUs all the times.
401 if (!info->mNPTMappingValid) {
402 // This is a live stream, we didn't receive any normal
403 // playtime mapping. We won't map to npt time.
404 source->queueAccessUnit(accessUnit);
405 break;
406 }
407 */
408
409 int64_t nptUs =
410 ((double)rtpTime - (double)info->mRTPTime)
411 / info->mTimeScale
412 * 1000000ll
413 + info->mNormalPlaytimeUs;
414
415 accessUnit->meta()->setInt64("timeUs", nptUs);
416
417 source->queueAccessUnit(accessUnit);
418 }
419
420 break;
421 }
422 case kWhatDisconnect:
423 {
424 sp<AReplyToken> replyID;
425 CHECK(msg->senderAwaitsResponse(&replyID));
426
427 for (size_t i = 0; i < mTracks.size(); ++i) {
428 TrackInfo *info = &mTracks.editItemAt(i);
429
430 if (info->mIsAudio) {
431 mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
432 mAudioTrack = NULL;
433 ALOGV("mAudioTrack disconnected");
434 } else {
435 mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
436 mVideoTrack = NULL;
437 ALOGV("mVideoTrack disconnected");
438 }
439
440 mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
441 close(info->mRTPSocket);
442 close(info->mRTCPSocket);
443 }
444
445 mTracks.clear();
446 mFirstAccessUnit = true;
447 mAllTracksHaveTime = false;
448 mNTPAnchorUs = -1;
449 mMediaAnchorUs = -1;
450 mLastMediaTimeUs = -1;
451 mNumAccessUnitsReceived = 0;
452 mReceivedFirstRTCPPacket = false;
453 mReceivedFirstRTPPacket = false;
454 mPausing = false;
455 mPauseGeneration = 0;
456
457 (new AMessage)->postReply(replyID);
458
459 break;
460 }
461 case kWhatPollBuffering:
462 break;
463 default:
464 TRESPASS();
465 }
466}
467
468void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
469 ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
470 trackIndex, rtpTime, (long long)ntpTime);
471
472 int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
473
474 TrackInfo *track = &mTracks.editItemAt(trackIndex);
475
476 track->mRTPAnchor = rtpTime;
477 track->mNTPAnchorUs = ntpTimeUs;
478
479 if (mNTPAnchorUs < 0) {
480 mNTPAnchorUs = ntpTimeUs;
481 mMediaAnchorUs = mLastMediaTimeUs;
482 }
483
484 if (!mAllTracksHaveTime) {
485 bool allTracksHaveTime = (mTracks.size() > 0);
486 for (size_t i = 0; i < mTracks.size(); ++i) {
487 TrackInfo *track = &mTracks.editItemAt(i);
488 if (track->mNTPAnchorUs < 0) {
489 allTracksHaveTime = false;
490 break;
491 }
492 }
493 if (allTracksHaveTime) {
494 mAllTracksHaveTime = true;
495 ALOGI("Time now established for all tracks.");
496 }
497 }
498 if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
499 // Time is now established, lets start timestamping immediately
500 for (size_t i = 0; i < mTracks.size(); ++i) {
501 TrackInfo *trackInfo = &mTracks.editItemAt(i);
502 while (!trackInfo->mPackets.empty()) {
503 sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
504 trackInfo->mPackets.erase(trackInfo->mPackets.begin());
505
506 if (addMediaTimestamp(i, trackInfo, accessUnit)) {
507 postQueueAccessUnit(i, accessUnit);
508 }
509 }
510 }
511 }
512}
513
514bool NuPlayer::RTPSource::addMediaTimestamp(
515 int32_t trackIndex, const TrackInfo *track,
516 const sp<ABuffer> &accessUnit) {
517
518 uint32_t rtpTime;
519 CHECK(accessUnit->meta()->findInt32(
520 "rtp-time", (int32_t *)&rtpTime));
521
522 int64_t relRtpTimeUs =
523 (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
524 / track->mTimeScale;
525
526 int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
527
528 int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
529
530 if (mediaTimeUs > mLastMediaTimeUs) {
531 mLastMediaTimeUs = mediaTimeUs;
532 }
533
534 if (mediaTimeUs < 0) {
535 ALOGV("dropping early accessUnit.");
536 return false;
537 }
538
539 ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
540 trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
541
542 accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
543
544 return true;
545}
546
547bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
548 TrackInfo *track;
549 for (size_t i = 0; i < mTracks.size(); ++i) {
550 track = &mTracks.editItemAt(i);
551 if (track->mPackets.empty()) {
552 return false;
553 }
554 }
555 return true;
556}
557
558void NuPlayer::RTPSource::postQueueAccessUnit(
559 size_t trackIndex, const sp<ABuffer> &accessUnit) {
560 sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
561 msg->setInt32("what", kWhatAccessUnit);
562 msg->setSize("trackIndex", trackIndex);
563 msg->setBuffer("accessUnit", accessUnit);
564 msg->post();
565}
566
567void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
568 sp<AMessage> msg = new AMessage(kWhatEOS, this);
569 msg->setInt32("what", kWhatEOS);
570 msg->setSize("trackIndex", trackIndex);
571 msg->setInt32("finalResult", finalResult);
572 msg->post();
573}
574
575sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
576 CHECK_GE(index, 0u);
577 CHECK_LT(index, mTracks.size());
578
579 const TrackInfo &info = mTracks.itemAt(index);
580
581 *timeScale = info.mTimeScale;
582
583 return info.mPacketSource->getFormat();
584}
585
586void NuPlayer::RTPSource::onConnected() {
587 ALOGV("onConnected");
588 mState = CONNECTED;
589}
590
591void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
592 if (mState == DISCONNECTED) {
593 return;
594 }
595
596 status_t err;
597 CHECK(msg->findInt32("result", &err));
598 CHECK_NE(err, (status_t)OK);
599
600// mLooper->unregisterHandler(mHandler->id());
601// mHandler.clear();
602
603 if (mState == CONNECTING) {
604 // We're still in the preparation phase, signal that it
605 // failed.
606 notifyPrepared(err);
607 }
608
609 mState = DISCONNECTED;
610// setError(err);
611
612}
613
614status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
615 ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
616
617 bool isAudioKey = key.contains("audio");
618 TrackInfo *info = NULL;
619 for (unsigned i = 0; i < mTracks.size(); ++i) {
620 info = &mTracks.editItemAt(i);
621 if (info != NULL && info->mIsAudio == isAudioKey) {
622 ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
623 break;
624 }
625 }
626
627 if (info == NULL) {
628 TrackInfo newTrackInfo;
629 newTrackInfo.mIsAudio = isAudioKey;
630 mTracks.push(newTrackInfo);
631 info = &mTracks.editTop();
632 }
633
634 if (key == "rtp-param-mime-type") {
635 info->mMimeType = value;
636
637 const char *mime = value.string();
638 const char *delimiter = strchr(mime, '/');
639 info->mCodecName = (delimiter + 1);
640
641 ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
642 info->mMimeType.string(), info->mCodecName.string());
643 } else if (key == "video-param-decoder-profile") {
644 info->mCodecProfile = atoi(value);
645 } else if (key == "video-param-decoder-level") {
646 info->mCodecLevel = atoi(value);
647 } else if (key == "video-param-width") {
648 info->mWidth = atoi(value);
649 } else if (key == "video-param-height") {
650 info->mHeight = atoi(value);
651 } else if (key == "rtp-param-local-ip") {
652 info->mLocalIp = value;
653 } else if (key == "rtp-param-local-port") {
654 info->mLocalPort = atoi(value);
655 } else if (key == "rtp-param-remote-ip") {
656 info->mRemoteIp = value;
657 } else if (key == "rtp-param-remote-port") {
658 info->mRemotePort = atoi(value);
659 } else if (key == "rtp-param-payload-type") {
660 info->mPayloadType = atoi(value);
661 } else if (key == "rtp-param-as") {
662 //AS means guaranteed bit rate that negotiated from sdp.
663 info->mAS = atoi(value);
664 } else if (key == "rtp-param-rtp-timeout") {
665 } else if (key == "rtp-param-rtcp-timeout") {
666 } else if (key == "rtp-param-time-scale") {
Kim Sungyeond2875e92018-03-20 16:51:41 +0900667 } else if (key == "rtp-param-self-id") {
668 info->mSelfID = atoi(value);
Byeongjo Parkd157b792019-01-24 20:56:37 +0900669 }
670
671 return OK;
672}
673
674status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
675 ALOGV("setParameters: %s", params.string());
676 const char *cparams = params.string();
677 const char *key_start = cparams;
678 for (;;) {
679 const char *equal_pos = strchr(key_start, '=');
680 if (equal_pos == NULL) {
681 ALOGE("Parameters %s miss a value", cparams);
682 return BAD_VALUE;
683 }
684 String8 key(key_start, equal_pos - key_start);
685 TrimString(&key);
686 if (key.length() == 0) {
687 ALOGE("Parameters %s contains an empty key", cparams);
688 return BAD_VALUE;
689 }
690 const char *value_start = equal_pos + 1;
691 const char *semicolon_pos = strchr(value_start, ';');
692 String8 value;
693 if (semicolon_pos == NULL) {
694 value.setTo(value_start);
695 } else {
696 value.setTo(value_start, semicolon_pos - value_start);
697 }
698 if (setParameter(key, value) != OK) {
699 return BAD_VALUE;
700 }
701 if (semicolon_pos == NULL) {
702 break; // Reaches the end
703 }
704 key_start = semicolon_pos + 1;
705 }
706 return OK;
707}
708
709// Trim both leading and trailing whitespace from the given string.
710//static
711void NuPlayer::RTPSource::TrimString(String8 *s) {
712 size_t num_bytes = s->bytes();
713 const char *data = s->string();
714
715 size_t leading_space = 0;
716 while (leading_space < num_bytes && isspace(data[leading_space])) {
717 ++leading_space;
718 }
719
720 size_t i = num_bytes;
721 while (i > leading_space && isspace(data[i - 1])) {
722 --i;
723 }
724
725 s->setTo(String8(&data[leading_space], i - leading_space));
726}
727
728} // namespace android