热门标签 | HotTags
当前位置:  开发笔记 > 编程语言 > 正文

AndroidMediaServer-MediaPlayer-start(video)

1.MediaPlayerService::Client::start()Frameworksavmedialibmediaplayerservi

 

 

 

1.      MediaPlayerService::Client::start()

Frameworks/av/media/libmediaplayerservice/MediaPlayerService.h

Frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp

 

status_t MediaPlayerService::Client::start()

{

    ALOGV("[%d] start", mConnId);

 

// p 是一个 StageFrightPlayer对象

 

    sp p = getPlayer();

    if (p == 0) return UNKNOWN_ERROR;

    p->setLooping(mLoop);

 

// 调用StageFrightPlayer对象的start()

 

    return p->start();

}

 

2.      StagefrightPlayer::start ()

Frameworks/av/media/libmediaplayerservice/StageFrightPlayer.cpp

status_tStagefrightPlayer::start() {

    ALOGV("start");

 

//  调用AwesomePlayerplay()

 

    returnmPlayer->play();

}

 

3.      AwesomePlayer::play ()

Frameworks/av/media/libstagefright/AwesomePlayer.cpp

status_tAwesomePlayer::play() {

    ATRACE_CALL();

 

    Mutex::Autolock autoLock(mLock);

 

    modifyFlags(CACHE_UNDERRUN, CLEAR);

 

    return play_l();

}

 

status_tAwesomePlayer::play_l() {

    modifyFlags(SEEK_PREVIEW, CLEAR);

 

    if (mFlags & PLAYING) {

        return OK;

    }

 

    if (!(mFlags & PREPARED)) {

        status_t err = prepare_l();

 

        if (err != OK) {

            return err;

        }

    }

 

    modifyFlags(PLAYING, SET);

    modifyFlags(FIRST_FRAME, SET);

 

    if (mDecryptHandle != NULL) {

        int64_t position;

        getPosition(&position);

       mDrmManagerClient->setPlaybackStatus(mDecryptHandle,

                Playback::START, position /1000);

    }

 

    if (mAudioSource != NULL) {

        if (mAudioPlayer == NULL) {

            if (mAudioSink != NULL) {

                bool allowDeepBuffering;

                int64_t cachedDurationUs;

                bool eos;

                if (mVideoSource == NULL

                        && (mDurationUs >AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US ||

                       (getCachedDuration_l(&cachedDurationUs, &eos) &&

                        cachedDurationUs >AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US))) {

                    allowDeepBuffering = true;

                } else {

                    allowDeepBuffering = false;

                }

 

               mAudioPlayer = new AudioPlayer(mAudioSink, allowDeepBuffering, this);

               mAudioPlayer->setSource(mAudioSource);

 

                mTimeSource = mAudioPlayer;

 

                // If there was a seek requestbefore we ever started,

                // honor the request now.

                // Make sure to do this beforestarting the audio player

                // to avoid a race condition.

                seekAudioIfNecessary_l();

            }

        }

 

        CHECK(!(mFlags & AUDIO_RUNNING));

 

        if (mVideoSource == NULL) {

            // We don't want to post an errornotification at this point,

            // the error returned fromMediaPlayer::start() will suffice.

 

            status_t err = startAudioPlayer_l(

                    false /*sendErrorNotification */);

 

            if (err != OK) {

                delete mAudioPlayer;

               mAudioPlayer = NULL;

 

                modifyFlags((PLAYING |FIRST_FRAME), CLEAR);

 

                if (mDecryptHandle != NULL) {

                   mDrmManagerClient->setPlaybackStatus(

                            mDecryptHandle,Playback::STOP, 0);

                }

 

                return err;

            }

        }

    }

 

    if (mTimeSource == NULL &&mAudioPlayer == NULL) {

        mTimeSource = &mSystemTimeSource;

    }

 

    if (mVideoSource != NULL) {

        // Kick off video playback

       postVideoEvent_l();

 

        if (mAudioSource != NULL &&mVideoSource != NULL) {

            postVideoLagEvent_l();

        }

    }

 

    if (mFlags & AT_EOS) {

        // Legacy behaviour, if a streamfinishes playing and then

        // is started again, we play from thestart...

        seekTo_l(0);

    }

 

    uint32_t params =IMediaPlayerService::kBatteryDataCodecStarted

        |IMediaPlayerService::kBatteryDataTrackDecoder;

    if ((mAudioSource != NULL) && (mAudioSource!= mAudioTrack)) {

        params |=IMediaPlayerService::kBatteryDataTrackAudio;

    }

    if (mVideoSource != NULL) {

        params |=IMediaPlayerService::kBatteryDataTrackVideo;

    }

    addBatteryData(params);

 

    return OK;

}

 

4.      AwesomePlayer::postVideoEvent_l()

Frameworks/av/media/libstagefright/AwesomePlayer.cpp

voidAwesomePlayer::postVideoEvent_l(int64_t delayUs) {

    ATRACE_CALL();

 

    if (mVideoEventPending) {

        return;

    }

 

    mVideoEventPending = true;

    mQueue.postEventWithDelay(mVideoEvent,delayUs <0 ? 10000 : delayUs);

}

 

 

 

 

 

 

 

 

 

 

voidAwesomePlayer::onVideoEvent() {

    ATRACE_CALL();

    Mutex::Autolock autoLock(mLock);

    if (!mVideoEventPending) {

        // The event has been cancelled inreset_l() but had already

        // been scheduled for execution at thattime.

        return;

    }

    mVideoEventPending = false;

 

    if (mSeeking != NO_SEEK) {

        if (mVideoBuffer) {

            mVideoBuffer->release();

            mVideoBuffer = NULL;

        }

 

        if (mSeeking == SEEK &&isStreamingHTTP() && mAudioSource != NULL

                && !(mFlags &SEEK_PREVIEW)) {

            // We're going to seek the videosource first, followed by

            // the audio source.

            // In order to avoid jumps in theDataSource offset caused by

            // the audio codec prefetching datafrom the old locations

            // while the video codec is alreadyreading data from the new

            // locations, we'll "pause"the audio source, causing it to

            // stop reading input data until asubsequent seek.

 

            if (mAudioPlayer != NULL &&(mFlags & AUDIO_RUNNING)) {

                mAudioPlayer->pause();

 

                modifyFlags(AUDIO_RUNNING,CLEAR);

            }

            mAudioSource->pause();

        }

    }

 

    if (!mVideoBuffer) {

        MediaSource::ReadOptions options;

        if (mSeeking != NO_SEEK) {

            ALOGV("seeking to %lld us(%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);

 

            options.setSeekTo(

                    mSeekTimeUs,

                    mSeeking == SEEK_VIDEO_ONLY

                        ?MediaSource::ReadOptions::SEEK_NEXT_SYNC

                        :MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);

        }

        for (;;) {

            status_terr = mVideoSource->read(&mVideoBuffer, &options);

            options.clearSeekTo();

 

            if (err != OK) {

                CHECK(mVideoBuffer == NULL);

 

                if (err == INFO_FORMAT_CHANGED){

                    ALOGV("VideoSourcesignalled format change.");

 

                    notifyVideoSize_l();

 

                    if (mVideoRenderer != NULL){

                        mVideoRendererIsPreview= false;

                        initRenderer_l();

                    }

                    continue;

                }

 

                // So video playback iscomplete, but we may still have

                // a seek request pending thatneeds to be applied

                // to the audio track.

                if (mSeeking != NO_SEEK) {

                    ALOGV("video streamended while seeking!");

                }

                finishSeekIfNecessary(-1);

 

                if (mAudioPlayer != NULL

                        && !(mFlags& (AUDIO_RUNNING | SEEK_PREVIEW))) {

                    startAudioPlayer_l();

                }

 

                modifyFlags(VIDEO_AT_EOS, SET);

                postStreamDoneEvent_l(err);

                return;

            }

 

            if (mVideoBuffer->range_length()== 0) {

                // Some decoders, notably thePV AVC software decoder

                // return spurious emptybuffers that we just want to ignore.

 

                mVideoBuffer->release();

                mVideoBuffer = NULL;

                continue;

            }

 

            break;

        }

 

        {

            Mutex::AutolockautoLock(mStatsLock);

            ++mStats.mNumVideoFramesDecoded;

        }

    }

 

    int64_t timeUs;

    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime,&timeUs));

 

    mLastVideoTimeUs = timeUs;

 

    if (mSeeking == SEEK_VIDEO_ONLY) {

        if (mSeekTimeUs > timeUs) {

            ALOGI("XXX mSeekTimeUs = %lldus, timeUs = %lld us",

                 mSeekTimeUs, timeUs);

        }

    }

 

    {

        Mutex::AutolockautoLock(mMiscStateLock);

        mVideoTimeUs = timeUs;

    }

 

    SeekType wasSeeking = mSeeking;

    finishSeekIfNecessary(timeUs);

 

    if (mAudioPlayer != NULL &&!(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {

        status_t err = startAudioPlayer_l();

        if (err != OK) {

            ALOGE("Starting the audioplayer failed w/ err %d", err);

            return;

        }

    }

 

    if ((mFlags & TEXTPLAYER_INITIALIZED)

            && !(mFlags &(TEXT_RUNNING | SEEK_PREVIEW))) {

        mTextDriver->start();

        modifyFlags(TEXT_RUNNING, SET);

    }

 

    TimeSource *ts =

        ((mFlags & AUDIO_AT_EOS) ||!(mFlags & AUDIOPLAYER_STARTED))

            ? &mSystemTimeSource :mTimeSource;

 

    if (mFlags & FIRST_FRAME) {

        modifyFlags(FIRST_FRAME, CLEAR);

        mSinceLastDropped = 0;

        mTimeSourceDeltaUs =ts->getRealTimeUs() - timeUs;

    }

 

    int64_t realTimeUs, mediaTimeUs;

    if (!(mFlags & AUDIO_AT_EOS) &&mAudioPlayer != NULL

        &&mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {

        mTimeSourceDeltaUs = realTimeUs -mediaTimeUs;

    }

 

    if (wasSeeking == SEEK_VIDEO_ONLY) {

        int64_t nowUs = ts->getRealTimeUs()- mTimeSourceDeltaUs;

 

        int64_t latenessUs = nowUs - timeUs;

 

        ATRACE_INT("Video Lateness(ms)", latenessUs / 1E3);

 

        if (latenessUs > 0) {

            ALOGI("after SEEK_VIDEO_ONLYwe're late by %.2f secs", latenessUs / 1E6);

        }

    }

 

    if (wasSeeking == NO_SEEK) {

        // Let's display the first frame afterseeking right away.

 

        int64_t nowUs = ts->getRealTimeUs()- mTimeSourceDeltaUs;

 

        int64_t latenessUs = nowUs - timeUs;

 

        ATRACE_INT("Video Lateness(ms)", latenessUs / 1E3);

 

        if (latenessUs > 500000ll

                && mAudioPlayer != NULL

                &&mAudioPlayer->getMediaTimeMapping(

                    &realTimeUs,&mediaTimeUs)) {

            if (mWVMExtractor == NULL) {

                ALOGI("we're much too late(%.2f secs), video skipping ahead",

                     latenessUs / 1E6);

 

                mVideoBuffer->release();

                mVideoBuffer = NULL;

 

                mSeeking = SEEK_VIDEO_ONLY;

                mSeekTimeUs = mediaTimeUs;

 

                postVideoEvent_l();

                return;

            } else {

                // The widevine extractordoesn't deal well with seeking

                // audio and videoindependently. We'll just have to wait

                // until the decoder catchesup, which won't be long at all.

                ALOGI("we're very late(%.2f secs)", latenessUs / 1E6);

            }

        }

 

        if (latenessUs > 40000) {

            // We're more than 40ms late.

            ALOGV("we're late by %lld us (%.2fsecs)",

                 latenessUs, latenessUs / 1E6);

 

            if (!(mFlags &SLOW_DECODER_HACK)

                    || mSinceLastDropped >FRAME_DROP_FREQ)

            {

                ALOGV("we're late by %lldus (%.2f secs) dropping "

                     "one after %dframes",

                     latenessUs, latenessUs /1E6, mSinceLastDropped);

 

                mSinceLastDropped = 0;

                mVideoBuffer->release();

                mVideoBuffer = NULL;

 

                {

                    Mutex::AutolockautoLock(mStatsLock);

                   ++mStats.mNumVideoFramesDropped;

                }

 

               postVideoEvent_l();

                return;

            }

        }

 

        if (latenessUs <-10000) {

            // We're more than 10ms early.

            postVideoEvent_l(10000);

            return;

        }

    }

 

    if ((mNativeWindow != NULL)

            && (mVideoRendererIsPreview|| mVideoRenderer == NULL)) {

        mVideoRendererIsPreview = false;

 

       initRenderer_l();

    }

 

    if (mVideoRenderer != NULL) {

        mSinceLastDropped++;

       mVideoRenderer->render(mVideoBuffer);

        if (!mVideoRenderingStarted) {

            mVideoRenderingStarted = true;

            notifyListener_l(MEDIA_INFO,MEDIA_INFO_RENDERING_START);

        }

 

    }

 

    mVideoBuffer->release();

    mVideoBuffer = NULL;

 

    if (wasSeeking != NO_SEEK &&(mFlags & SEEK_PREVIEW)) {

        modifyFlags(SEEK_PREVIEW, CLEAR);

        return;

    }

 

    postVideoEvent_l();

}

 

 


推荐阅读
author-avatar
helenheling2007895
这个家伙很懒,什么也没留下!
PHP1.CN | 中国最专业的PHP中文社区 | DevBox开发工具箱 | json解析格式化 |PHP资讯 | PHP教程 | 数据库技术 | 服务器技术 | 前端开发技术 | PHP框架 | 开发工具 | 在线工具
Copyright © 1998 - 2020 PHP1.CN. All Rights Reserved | 京公网安备 11010802041100号 | 京ICP备19059560号-4 | PHP1.CN 第一PHP社区 版权所有