blob: 4b1b164c4d8add1a73b6f5dd3ff76768e62ff85d [file] [log] [blame]
Byeongjo Parkd157b792019-01-24 20:56:37 +09001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "RTPSource"
19#include <utils/Log.h>
20
21#include "RTPSource.h"
22
23
24
25
26#include <media/stagefright/MediaDefs.h>
27#include <media/stagefright/MetaData.h>
28#include <string.h>
29
30namespace android {
31
32const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
33static int32_t kMaxAllowedStaleAccessUnits = 20;
34
35NuPlayer::RTPSource::RTPSource(
36 const sp<AMessage> &notify,
37 const String8& rtpParams)
38 : Source(notify),
39 mRTPParams(rtpParams),
40 mFlags(0),
41 mState(DISCONNECTED),
42 mFinalResult(OK),
43 mBuffering(false),
44 mInPreparationPhase(true),
45 mRTPConn(new ARTPConnection),
46 mEOSTimeoutAudio(0),
47 mEOSTimeoutVideo(0) {
48 ALOGD("RTPSource initialized with rtpParams=%s", rtpParams.string());
49}
50
51NuPlayer::RTPSource::~RTPSource() {
52 if (mLooper != NULL) {
53 mLooper->unregisterHandler(id());
54 mLooper->unregisterHandler(mRTPConn->id());
55 mLooper->stop();
56 }
57}
58
59status_t NuPlayer::RTPSource::getBufferingSettings(
60 BufferingSettings* buffering /* nonnull */) {
61 Mutex::Autolock _l(mBufferingSettingsLock);
62 *buffering = mBufferingSettings;
63 return OK;
64}
65
66status_t NuPlayer::RTPSource::setBufferingSettings(const BufferingSettings& buffering) {
67 Mutex::Autolock _l(mBufferingSettingsLock);
68 mBufferingSettings = buffering;
69 return OK;
70}
71
72void NuPlayer::RTPSource::prepareAsync() {
73 if (mLooper == NULL) {
74 mLooper = new ALooper;
75 mLooper->setName("rtp");
76 mLooper->start();
77
78 mLooper->registerHandler(this);
79 mLooper->registerHandler(mRTPConn);
80 }
81
82 setParameters(mRTPParams);
83
84 TrackInfo *info = NULL;
85 unsigned i;
86 for (i = 0; i < mTracks.size(); i++) {
87 info = &mTracks.editItemAt(i);
88
89 if (info == NULL)
90 break;
91
92 AString sdp;
93 ASessionDescription::SDPStringFactory(sdp, info->mLocalIp,
94 info->mIsAudio, info->mLocalPort, info->mPayloadType, info->mAS, info->mCodecName,
95 NULL, info->mWidth, info->mHeight);
96 ALOGD("RTPSource SDP =>\n%s", sdp.c_str());
97
98 sp<ASessionDescription> desc = new ASessionDescription;
99 bool isValidSdp = desc->setTo(sdp.c_str(), sdp.size());
100 ALOGV("RTPSource isValidSdp => %d", isValidSdp);
101
102 int sockRtp, sockRtcp;
103 ARTPConnection::MakeRTPSocketPair(&sockRtp, &sockRtcp, info->mLocalIp, info->mRemoteIp,
104 info->mLocalPort, info->mRemotePort);
105
106 sp<AMessage> notify = new AMessage('accu', this);
107
108 ALOGV("RTPSource addStream. track-index=%d", i);
109 notify->setSize("trackIndex", i);
110 // index(i) should be started from 1. 0 is reserved for [root]
111 mRTPConn->addStream(sockRtp, sockRtcp, desc, i + 1, notify, false);
Kim Sungyeond2875e92018-03-20 16:51:41 +0900112 mRTPConn->setSelfID(info->mSelfID);
Byeongjo Parkd157b792019-01-24 20:56:37 +0900113
114 info->mRTPSocket = sockRtp;
115 info->mRTCPSocket = sockRtcp;
116 info->mFirstSeqNumInSegment = 0;
117 info->mNewSegment = true;
118 info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
119 info->mRTPAnchor = 0;
120 info->mNTPAnchorUs = -1;
121 info->mNormalPlayTimeRTP = 0;
122 info->mNormalPlayTimeUs = 0ll;
123
124 // index(i) should be started from 1. 0 is reserved for [root]
125 info->mPacketSource = new APacketSource(desc, i + 1);
126
127 int32_t timeScale;
128 sp<MetaData> format = getTrackFormat(i, &timeScale);
129 sp<AnotherPacketSource> source = new AnotherPacketSource(format);
130
131 if (info->mIsAudio) {
132 mAudioTrack = source;
133 } else {
134 mVideoTrack = source;
135 }
136
137 info->mSource = source;
138 }
139
140 CHECK_EQ(mState, (int)DISCONNECTED);
141 mState = CONNECTING;
142
143 if (mInPreparationPhase) {
144 mInPreparationPhase = false;
145 notifyPrepared();
146 }
147}
148
149void NuPlayer::RTPSource::start() {
150}
151
152void NuPlayer::RTPSource::pause() {
153 mState = PAUSED;
154}
155
156void NuPlayer::RTPSource::resume() {
157 mState = CONNECTING;
158}
159
160void NuPlayer::RTPSource::stop() {
161 if (mLooper == NULL) {
162 return;
163 }
164 sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
165
166 sp<AMessage> dummy;
167 msg->postAndAwaitResponse(&dummy);
168}
169
170status_t NuPlayer::RTPSource::feedMoreTSData() {
171 Mutex::Autolock _l(mBufferingLock);
172 return mFinalResult;
173}
174
175sp<MetaData> NuPlayer::RTPSource::getFormatMeta(bool audio) {
176 sp<AnotherPacketSource> source = getSource(audio);
177
178 if (source == NULL) {
179 return NULL;
180 }
181
182 return source->getFormat();
183}
184
185bool NuPlayer::RTPSource::haveSufficientDataOnAllTracks() {
186 // We're going to buffer at least 2 secs worth data on all tracks before
187 // starting playback (both at startup and after a seek).
188
189 static const int64_t kMinDurationUs = 2000000ll;
190
191 int64_t mediaDurationUs = 0;
192 getDuration(&mediaDurationUs);
193 if ((mAudioTrack != NULL && mAudioTrack->isFinished(mediaDurationUs))
194 || (mVideoTrack != NULL && mVideoTrack->isFinished(mediaDurationUs))) {
195 return true;
196 }
197
198 status_t err;
199 int64_t durationUs;
200 if (mAudioTrack != NULL
201 && (durationUs = mAudioTrack->getBufferedDurationUs(&err))
202 < kMinDurationUs
203 && err == OK) {
204 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
205 durationUs / 1E6);
206 return false;
207 }
208
209 if (mVideoTrack != NULL
210 && (durationUs = mVideoTrack->getBufferedDurationUs(&err))
211 < kMinDurationUs
212 && err == OK) {
213 ALOGV("video track doesn't have enough data yet. (%.2f secs buffered)",
214 durationUs / 1E6);
215 return false;
216 }
217
218 return true;
219}
220
221status_t NuPlayer::RTPSource::dequeueAccessUnit(
222 bool audio, sp<ABuffer> *accessUnit) {
223
224 sp<AnotherPacketSource> source = getSource(audio);
225
226 if (mState == PAUSED) {
227 ALOGV("-EWOULDBLOCK");
228 return -EWOULDBLOCK;
229 }
230
231 status_t finalResult;
232 if (!source->hasBufferAvailable(&finalResult)) {
233 if (finalResult == OK) {
234 int64_t mediaDurationUs = 0;
235 getDuration(&mediaDurationUs);
236 sp<AnotherPacketSource> otherSource = getSource(!audio);
237 status_t otherFinalResult;
238
239 // If other source already signaled EOS, this source should also signal EOS
240 if (otherSource != NULL &&
241 !otherSource->hasBufferAvailable(&otherFinalResult) &&
242 otherFinalResult == ERROR_END_OF_STREAM) {
243 source->signalEOS(ERROR_END_OF_STREAM);
244 return ERROR_END_OF_STREAM;
245 }
246
247 // If this source has detected near end, give it some time to retrieve more
248 // data before signaling EOS
249 if (source->isFinished(mediaDurationUs)) {
250 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
251 if (eosTimeout == 0) {
252 setEOSTimeout(audio, ALooper::GetNowUs());
253 } else if ((ALooper::GetNowUs() - eosTimeout) > kNearEOSTimeoutUs) {
254 setEOSTimeout(audio, 0);
255 source->signalEOS(ERROR_END_OF_STREAM);
256 return ERROR_END_OF_STREAM;
257 }
258 return -EWOULDBLOCK;
259 }
260
261 if (!(otherSource != NULL && otherSource->isFinished(mediaDurationUs))) {
262 // We should not enter buffering mode
263 // if any of the sources already have detected EOS.
264 // TODO: needs to be checked whether below line is needed or not.
265 // startBufferingIfNecessary();
266 }
267
268 return -EWOULDBLOCK;
269 }
270 return finalResult;
271 }
272
273 setEOSTimeout(audio, 0);
274
275 return source->dequeueAccessUnit(accessUnit);
276}
277
278sp<AnotherPacketSource> NuPlayer::RTPSource::getSource(bool audio) {
279 return audio ? mAudioTrack : mVideoTrack;
280}
281
282void NuPlayer::RTPSource::setEOSTimeout(bool audio, int64_t timeout) {
283 if (audio) {
284 mEOSTimeoutAudio = timeout;
285 } else {
286 mEOSTimeoutVideo = timeout;
287 }
288}
289
290status_t NuPlayer::RTPSource::getDuration(int64_t *durationUs) {
291 *durationUs = 0ll;
292
293 int64_t audioDurationUs;
294 if (mAudioTrack != NULL
295 && mAudioTrack->getFormat()->findInt64(
296 kKeyDuration, &audioDurationUs)
297 && audioDurationUs > *durationUs) {
298 *durationUs = audioDurationUs;
299 }
300
301 int64_t videoDurationUs;
302 if (mVideoTrack != NULL
303 && mVideoTrack->getFormat()->findInt64(
304 kKeyDuration, &videoDurationUs)
305 && videoDurationUs > *durationUs) {
306 *durationUs = videoDurationUs;
307 }
308
309 return OK;
310}
311
312status_t NuPlayer::RTPSource::seekTo(int64_t seekTimeUs, MediaPlayerSeekMode mode) {
313 ALOGV("RTPSource::seekTo=%d, mode=%d", (int)seekTimeUs, mode);
314 return OK;
315}
316
317void NuPlayer::RTPSource::schedulePollBuffering() {
318 sp<AMessage> msg = new AMessage(kWhatPollBuffering, this);
319 msg->post(1000000ll); // 1 second intervals
320}
321
322void NuPlayer::RTPSource::onPollBuffering() {
323 schedulePollBuffering();
324}
325
326void NuPlayer::RTPSource::onMessageReceived(const sp<AMessage> &msg) {
327 ALOGV("onMessageReceived =%d", msg->what());
328
329 switch (msg->what()) {
330 case kWhatAccessUnitComplete:
331 {
332 if (mState == CONNECTING) {
333 mState = CONNECTED;
334 }
335
336 int32_t timeUpdate;
337 //"time-update" raised from ARTPConnection::parseSR()
338 if (msg->findInt32("time-update", &timeUpdate) && timeUpdate) {
339 size_t trackIndex;
340 CHECK(msg->findSize("trackIndex", &trackIndex));
341
342 uint32_t rtpTime;
343 uint64_t ntpTime;
344 CHECK(msg->findInt32("rtp-time", (int32_t *)&rtpTime));
345 CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
346
347 onTimeUpdate(trackIndex, rtpTime, ntpTime);
348 break;
349 }
350
351 int32_t firstRTCP;
352 if (msg->findInt32("first-rtcp", &firstRTCP)) {
353 // There won't be an access unit here, it's just a notification
354 // that the data communication worked since we got the first
355 // rtcp packet.
356 ALOGV("first-rtcp");
357 break;
358 }
359
Kim Sungyeond3c6b322018-03-02 14:41:19 +0900360 int32_t IMSRxNotice;
361 if (msg->findInt32("IMS-Rx-notice", &IMSRxNotice)) {
362 int32_t payloadType, feedbackType;
363 CHECK(msg->findInt32("payload-type", &payloadType));
364 CHECK(msg->findInt32("feedback-type", &feedbackType));
365
366 sp<AMessage> notify = dupNotify();
367 notify->setInt32("what", kWhatIMSRxNotice);
368 notify->setMessage("message", msg);
369 notify->post();
370
371 ALOGV("IMSRxNotice \t\t payload : %d feedback : %d",
372 payloadType, feedbackType);
373 break;
374 }
375
Byeongjo Parkd157b792019-01-24 20:56:37 +0900376 size_t trackIndex;
377 CHECK(msg->findSize("trackIndex", &trackIndex));
378
379 sp<ABuffer> accessUnit;
380 if (msg->findBuffer("access-unit", &accessUnit) == false) {
381 break;
382 }
383
384 int32_t damaged;
385 if (accessUnit->meta()->findInt32("damaged", &damaged)
386 && damaged) {
387 ALOGD("dropping damaged access unit.");
388 break;
389 }
390
391 TrackInfo *info = &mTracks.editItemAt(trackIndex);
392
393 sp<AnotherPacketSource> source = info->mSource;
394 if (source != NULL) {
395 uint32_t rtpTime;
396 CHECK(accessUnit->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
397
398 /* AnotherPacketSource make an assertion if there is no ntp provided
399 RTPSource should provide ntpUs all the times.
400 if (!info->mNPTMappingValid) {
401 // This is a live stream, we didn't receive any normal
402 // playtime mapping. We won't map to npt time.
403 source->queueAccessUnit(accessUnit);
404 break;
405 }
406 */
407
408 int64_t nptUs =
409 ((double)rtpTime - (double)info->mRTPTime)
410 / info->mTimeScale
411 * 1000000ll
412 + info->mNormalPlaytimeUs;
413
414 accessUnit->meta()->setInt64("timeUs", nptUs);
415
416 source->queueAccessUnit(accessUnit);
417 }
418
419 break;
420 }
421 case kWhatDisconnect:
422 {
423 sp<AReplyToken> replyID;
424 CHECK(msg->senderAwaitsResponse(&replyID));
425
426 for (size_t i = 0; i < mTracks.size(); ++i) {
427 TrackInfo *info = &mTracks.editItemAt(i);
428
429 if (info->mIsAudio) {
430 mAudioTrack->signalEOS(ERROR_END_OF_STREAM);
431 mAudioTrack = NULL;
432 ALOGV("mAudioTrack disconnected");
433 } else {
434 mVideoTrack->signalEOS(ERROR_END_OF_STREAM);
435 mVideoTrack = NULL;
436 ALOGV("mVideoTrack disconnected");
437 }
438
439 mRTPConn->removeStream(info->mRTPSocket, info->mRTCPSocket);
440 close(info->mRTPSocket);
441 close(info->mRTCPSocket);
442 }
443
444 mTracks.clear();
445 mFirstAccessUnit = true;
446 mAllTracksHaveTime = false;
447 mNTPAnchorUs = -1;
448 mMediaAnchorUs = -1;
449 mLastMediaTimeUs = -1;
450 mNumAccessUnitsReceived = 0;
451 mReceivedFirstRTCPPacket = false;
452 mReceivedFirstRTPPacket = false;
453 mPausing = false;
454 mPauseGeneration = 0;
455
456 (new AMessage)->postReply(replyID);
457
458 break;
459 }
460 case kWhatPollBuffering:
461 break;
462 default:
463 TRESPASS();
464 }
465}
466
467void NuPlayer::RTPSource::onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
468 ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = %#016llx",
469 trackIndex, rtpTime, (long long)ntpTime);
470
471 int64_t ntpTimeUs = (int64_t)(ntpTime * 1E6 / (1ll << 32));
472
473 TrackInfo *track = &mTracks.editItemAt(trackIndex);
474
475 track->mRTPAnchor = rtpTime;
476 track->mNTPAnchorUs = ntpTimeUs;
477
478 if (mNTPAnchorUs < 0) {
479 mNTPAnchorUs = ntpTimeUs;
480 mMediaAnchorUs = mLastMediaTimeUs;
481 }
482
483 if (!mAllTracksHaveTime) {
484 bool allTracksHaveTime = (mTracks.size() > 0);
485 for (size_t i = 0; i < mTracks.size(); ++i) {
486 TrackInfo *track = &mTracks.editItemAt(i);
487 if (track->mNTPAnchorUs < 0) {
488 allTracksHaveTime = false;
489 break;
490 }
491 }
492 if (allTracksHaveTime) {
493 mAllTracksHaveTime = true;
494 ALOGI("Time now established for all tracks.");
495 }
496 }
497 if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
498 // Time is now established, lets start timestamping immediately
499 for (size_t i = 0; i < mTracks.size(); ++i) {
500 TrackInfo *trackInfo = &mTracks.editItemAt(i);
501 while (!trackInfo->mPackets.empty()) {
502 sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
503 trackInfo->mPackets.erase(trackInfo->mPackets.begin());
504
505 if (addMediaTimestamp(i, trackInfo, accessUnit)) {
506 postQueueAccessUnit(i, accessUnit);
507 }
508 }
509 }
510 }
511}
512
513bool NuPlayer::RTPSource::addMediaTimestamp(
514 int32_t trackIndex, const TrackInfo *track,
515 const sp<ABuffer> &accessUnit) {
516
517 uint32_t rtpTime;
518 CHECK(accessUnit->meta()->findInt32(
519 "rtp-time", (int32_t *)&rtpTime));
520
521 int64_t relRtpTimeUs =
522 (((int64_t)rtpTime - (int64_t)track->mRTPAnchor) * 1000000ll)
523 / track->mTimeScale;
524
525 int64_t ntpTimeUs = track->mNTPAnchorUs + relRtpTimeUs;
526
527 int64_t mediaTimeUs = mMediaAnchorUs + ntpTimeUs - mNTPAnchorUs;
528
529 if (mediaTimeUs > mLastMediaTimeUs) {
530 mLastMediaTimeUs = mediaTimeUs;
531 }
532
533 if (mediaTimeUs < 0) {
534 ALOGV("dropping early accessUnit.");
535 return false;
536 }
537
538 ALOGV("track %d rtpTime=%u mediaTimeUs = %lld us (%.2f secs)",
539 trackIndex, rtpTime, (long long)mediaTimeUs, mediaTimeUs / 1E6);
540
541 accessUnit->meta()->setInt64("timeUs", mediaTimeUs);
542
543 return true;
544}
545
546bool NuPlayer::RTPSource::dataReceivedOnAllChannels() {
547 TrackInfo *track;
548 for (size_t i = 0; i < mTracks.size(); ++i) {
549 track = &mTracks.editItemAt(i);
550 if (track->mPackets.empty()) {
551 return false;
552 }
553 }
554 return true;
555}
556
557void NuPlayer::RTPSource::postQueueAccessUnit(
558 size_t trackIndex, const sp<ABuffer> &accessUnit) {
559 sp<AMessage> msg = new AMessage(kWhatAccessUnit, this);
560 msg->setInt32("what", kWhatAccessUnit);
561 msg->setSize("trackIndex", trackIndex);
562 msg->setBuffer("accessUnit", accessUnit);
563 msg->post();
564}
565
566void NuPlayer::RTPSource::postQueueEOS(size_t trackIndex, status_t finalResult) {
567 sp<AMessage> msg = new AMessage(kWhatEOS, this);
568 msg->setInt32("what", kWhatEOS);
569 msg->setSize("trackIndex", trackIndex);
570 msg->setInt32("finalResult", finalResult);
571 msg->post();
572}
573
574sp<MetaData> NuPlayer::RTPSource::getTrackFormat(size_t index, int32_t *timeScale) {
575 CHECK_GE(index, 0u);
576 CHECK_LT(index, mTracks.size());
577
578 const TrackInfo &info = mTracks.itemAt(index);
579
580 *timeScale = info.mTimeScale;
581
582 return info.mPacketSource->getFormat();
583}
584
585void NuPlayer::RTPSource::onConnected() {
586 ALOGV("onConnected");
587 mState = CONNECTED;
588}
589
590void NuPlayer::RTPSource::onDisconnected(const sp<AMessage> &msg) {
591 if (mState == DISCONNECTED) {
592 return;
593 }
594
595 status_t err;
596 CHECK(msg->findInt32("result", &err));
597 CHECK_NE(err, (status_t)OK);
598
599// mLooper->unregisterHandler(mHandler->id());
600// mHandler.clear();
601
602 if (mState == CONNECTING) {
603 // We're still in the preparation phase, signal that it
604 // failed.
605 notifyPrepared(err);
606 }
607
608 mState = DISCONNECTED;
609// setError(err);
610
611}
612
613status_t NuPlayer::RTPSource::setParameter(const String8 &key, const String8 &value) {
614 ALOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
615
616 bool isAudioKey = key.contains("audio");
617 TrackInfo *info = NULL;
618 for (unsigned i = 0; i < mTracks.size(); ++i) {
619 info = &mTracks.editItemAt(i);
620 if (info != NULL && info->mIsAudio == isAudioKey) {
621 ALOGV("setParameter: %s track (%d) found", isAudioKey ? "audio" : "video" , i);
622 break;
623 }
624 }
625
626 if (info == NULL) {
627 TrackInfo newTrackInfo;
628 newTrackInfo.mIsAudio = isAudioKey;
629 mTracks.push(newTrackInfo);
630 info = &mTracks.editTop();
631 }
632
633 if (key == "rtp-param-mime-type") {
634 info->mMimeType = value;
635
636 const char *mime = value.string();
637 const char *delimiter = strchr(mime, '/');
638 info->mCodecName = (delimiter + 1);
639
640 ALOGV("rtp-param-mime-type: mMimeType (%s) => mCodecName (%s)",
641 info->mMimeType.string(), info->mCodecName.string());
642 } else if (key == "video-param-decoder-profile") {
643 info->mCodecProfile = atoi(value);
644 } else if (key == "video-param-decoder-level") {
645 info->mCodecLevel = atoi(value);
646 } else if (key == "video-param-width") {
647 info->mWidth = atoi(value);
648 } else if (key == "video-param-height") {
649 info->mHeight = atoi(value);
650 } else if (key == "rtp-param-local-ip") {
651 info->mLocalIp = value;
652 } else if (key == "rtp-param-local-port") {
653 info->mLocalPort = atoi(value);
654 } else if (key == "rtp-param-remote-ip") {
655 info->mRemoteIp = value;
656 } else if (key == "rtp-param-remote-port") {
657 info->mRemotePort = atoi(value);
658 } else if (key == "rtp-param-payload-type") {
659 info->mPayloadType = atoi(value);
660 } else if (key == "rtp-param-as") {
661 //AS means guaranteed bit rate that negotiated from sdp.
662 info->mAS = atoi(value);
663 } else if (key == "rtp-param-rtp-timeout") {
664 } else if (key == "rtp-param-rtcp-timeout") {
665 } else if (key == "rtp-param-time-scale") {
Kim Sungyeond2875e92018-03-20 16:51:41 +0900666 } else if (key == "rtp-param-self-id") {
667 info->mSelfID = atoi(value);
Byeongjo Parkd157b792019-01-24 20:56:37 +0900668 }
669
670 return OK;
671}
672
673status_t NuPlayer::RTPSource::setParameters(const String8 &params) {
674 ALOGV("setParameters: %s", params.string());
675 const char *cparams = params.string();
676 const char *key_start = cparams;
677 for (;;) {
678 const char *equal_pos = strchr(key_start, '=');
679 if (equal_pos == NULL) {
680 ALOGE("Parameters %s miss a value", cparams);
681 return BAD_VALUE;
682 }
683 String8 key(key_start, equal_pos - key_start);
684 TrimString(&key);
685 if (key.length() == 0) {
686 ALOGE("Parameters %s contains an empty key", cparams);
687 return BAD_VALUE;
688 }
689 const char *value_start = equal_pos + 1;
690 const char *semicolon_pos = strchr(value_start, ';');
691 String8 value;
692 if (semicolon_pos == NULL) {
693 value.setTo(value_start);
694 } else {
695 value.setTo(value_start, semicolon_pos - value_start);
696 }
697 if (setParameter(key, value) != OK) {
698 return BAD_VALUE;
699 }
700 if (semicolon_pos == NULL) {
701 break; // Reaches the end
702 }
703 key_start = semicolon_pos + 1;
704 }
705 return OK;
706}
707
708// Trim both leading and trailing whitespace from the given string.
709//static
710void NuPlayer::RTPSource::TrimString(String8 *s) {
711 size_t num_bytes = s->bytes();
712 const char *data = s->string();
713
714 size_t leading_space = 0;
715 while (leading_space < num_bytes && isspace(data[leading_space])) {
716 ++leading_space;
717 }
718
719 size_t i = num_bytes;
720 while (i > leading_space && isspace(data[i - 1])) {
721 --i;
722 }
723
724 s->setTo(String8(&data[leading_space], i - leading_space));
725}
726
727} // namespace android