OSDN Git Service

Separate preview player from AwesomePlayer and also video editor audio player from...
authorJames Dong <jdong@google.com>
Sun, 1 May 2011 19:36:22 +0000 (12:36 -0700)
committerJames Dong <jdong@google.com>
Sun, 1 May 2011 20:52:00 +0000 (13:52 -0700)
Change-Id: I436ac541df3e34ad16be95a50c96d7af67fa15e4

libvideoeditor/lvpp/Android.mk
libvideoeditor/lvpp/AudioPlayerBase.cpp [new file with mode: 0644]
libvideoeditor/lvpp/AudioPlayerBase.h [new file with mode: 0644]
libvideoeditor/lvpp/PreviewPlayer.cpp
libvideoeditor/lvpp/PreviewPlayer.h
libvideoeditor/lvpp/PreviewPlayerBase.cpp [new file with mode: 0644]
libvideoeditor/lvpp/PreviewPlayerBase.h [new file with mode: 0644]
libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
libvideoeditor/lvpp/VideoEditorAudioPlayer.h

index 091258d..8b5a6f5 100755 (executable)
@@ -35,6 +35,8 @@ LOCAL_SRC_FILES:=          \
     DummyAudioSource.cpp \\r
     DummyVideoSource.cpp \\r
     VideoEditorBGAudioProcessing.cpp \\r
+    AudioPlayerBase.cpp \\r
+    PreviewPlayerBase.cpp \\r
     PreviewRenderer.cpp\r
 \r
 LOCAL_MODULE_TAGS := optional\r
@@ -50,6 +52,7 @@ LOCAL_SHARED_LIBRARIES := \
     libutils           \\r
     libcutils          \\r
     libmedia           \\r
+    libdrmframework    \\r
     libstagefright  \\r
     libstagefright_omx  \\r
     libstagefright_foundation \\r
diff --git a/libvideoeditor/lvpp/AudioPlayerBase.cpp b/libvideoeditor/lvpp/AudioPlayerBase.cpp
new file mode 100644 (file)
index 0000000..26c6a63
--- /dev/null
@@ -0,0 +1,510 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioPlayerBase"
+#include <utils/Log.h>
+
+#include <binder/IPCThreadState.h>
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+#include "AudioPlayerBase.h"
+#include "PreviewPlayerBase.h"
+
+namespace android {
+
+AudioPlayerBase::AudioPlayerBase(
+        const sp<MediaPlayerBase::AudioSink> &audioSink,
+        PreviewPlayerBase *observer)
+    : mAudioTrack(NULL),
+      mInputBuffer(NULL),
+      mSampleRate(0),
+      mLatencyUs(0),
+      mFrameSize(0),
+      mNumFramesPlayed(0),
+      mPositionTimeMediaUs(-1),
+      mPositionTimeRealUs(-1),
+      mSeeking(false),
+      mReachedEOS(false),
+      mFinalStatus(OK),
+      mStarted(false),
+      mIsFirstBuffer(false),
+      mFirstBufferResult(OK),
+      mFirstBuffer(NULL),
+      mAudioSink(audioSink),
+      mObserver(observer) {
+}
+
+AudioPlayerBase::~AudioPlayerBase() {
+    if (mStarted) {
+        reset();
+    }
+}
+
+void AudioPlayerBase::setSource(const sp<MediaSource> &source) {
+    CHECK_EQ(mSource, NULL);
+    mSource = source;
+}
+
+status_t AudioPlayerBase::start(bool sourceAlreadyStarted) {
+    CHECK(!mStarted);
+    CHECK(mSource != NULL);
+
+    status_t err;
+    if (!sourceAlreadyStarted) {
+        err = mSource->start();
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    // We allow an optional INFO_FORMAT_CHANGED at the very beginning
+    // of playback, if there is one, getFormat below will retrieve the
+    // updated format, if there isn't, we'll stash away the valid buffer
+    // of data to be used on the first audio callback.
+
+    CHECK(mFirstBuffer == NULL);
+
+    mFirstBufferResult = mSource->read(&mFirstBuffer);
+    if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
+        LOGV("INFO_FORMAT_CHANGED!!!");
+
+        CHECK(mFirstBuffer == NULL);
+        mFirstBufferResult = OK;
+        mIsFirstBuffer = false;
+    } else {
+        mIsFirstBuffer = true;
+    }
+
+    sp<MetaData> format = mSource->getFormat();
+    const char *mime;
+    bool success = format->findCString(kKeyMIMEType, &mime);
+    CHECK(success);
+    CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
+
+    success = format->findInt32(kKeySampleRate, &mSampleRate);
+    CHECK(success);
+
+    int32_t numChannels;
+    success = format->findInt32(kKeyChannelCount, &numChannels);
+    CHECK(success);
+
+    if (mAudioSink.get() != NULL) {
+        status_t err = mAudioSink->open(
+                mSampleRate, numChannels, AUDIO_FORMAT_PCM_16_BIT,
+                DEFAULT_AUDIOSINK_BUFFERCOUNT,
+                &AudioPlayerBase::AudioSinkCallback, this);
+        if (err != OK) {
+            if (mFirstBuffer != NULL) {
+                mFirstBuffer->release();
+                mFirstBuffer = NULL;
+            }
+
+            if (!sourceAlreadyStarted) {
+                mSource->stop();
+            }
+
+            return err;
+        }
+
+        mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
+        mFrameSize = mAudioSink->frameSize();
+
+        mAudioSink->start();
+    } else {
+        mAudioTrack = new AudioTrack(
+                AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT,
+                (numChannels == 2)
+                    ? AUDIO_CHANNEL_OUT_STEREO
+                    : AUDIO_CHANNEL_OUT_MONO,
+                0, 0, &AudioCallback, this, 0);
+
+        if ((err = mAudioTrack->initCheck()) != OK) {
+            delete mAudioTrack;
+            mAudioTrack = NULL;
+
+            if (mFirstBuffer != NULL) {
+                mFirstBuffer->release();
+                mFirstBuffer = NULL;
+            }
+
+            if (!sourceAlreadyStarted) {
+                mSource->stop();
+            }
+
+            return err;
+        }
+
+        mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
+        mFrameSize = mAudioTrack->frameSize();
+
+        mAudioTrack->start();
+    }
+
+    mStarted = true;
+
+    return OK;
+}
+
+void AudioPlayerBase::pause(bool playPendingSamples) {
+    CHECK(mStarted);
+
+    if (playPendingSamples) {
+        if (mAudioSink.get() != NULL) {
+            mAudioSink->stop();
+        } else {
+            mAudioTrack->stop();
+        }
+    } else {
+        if (mAudioSink.get() != NULL) {
+            mAudioSink->pause();
+        } else {
+            mAudioTrack->pause();
+        }
+    }
+}
+
+void AudioPlayerBase::resume() {
+    CHECK(mStarted);
+
+    if (mAudioSink.get() != NULL) {
+        mAudioSink->start();
+    } else {
+        mAudioTrack->start();
+    }
+}
+
+void AudioPlayerBase::reset() {
+    CHECK(mStarted);
+
+    if (mAudioSink.get() != NULL) {
+        mAudioSink->stop();
+        mAudioSink->close();
+    } else {
+        mAudioTrack->stop();
+
+        delete mAudioTrack;
+        mAudioTrack = NULL;
+    }
+
+    // Make sure to release any buffer we hold onto so that the
+    // source is able to stop().
+
+    if (mFirstBuffer != NULL) {
+        mFirstBuffer->release();
+        mFirstBuffer = NULL;
+    }
+
+    if (mInputBuffer != NULL) {
+        LOGV("AudioPlayerBase releasing input buffer.");
+
+        mInputBuffer->release();
+        mInputBuffer = NULL;
+    }
+
+    mSource->stop();
+
+    // The following hack is necessary to ensure that the OMX
+    // component is completely released by the time we may try
+    // to instantiate it again.
+    wp<MediaSource> tmp = mSource;
+    mSource.clear();
+    while (tmp.promote() != NULL) {
+        usleep(1000);
+    }
+    IPCThreadState::self()->flushCommands();
+
+    mNumFramesPlayed = 0;
+    mPositionTimeMediaUs = -1;
+    mPositionTimeRealUs = -1;
+    mSeeking = false;
+    mReachedEOS = false;
+    mFinalStatus = OK;
+    mStarted = false;
+}
+
+// static
+void AudioPlayerBase::AudioCallback(int event, void *user, void *info) {
+    static_cast<AudioPlayerBase *>(user)->AudioCallback(event, info);
+}
+
+bool AudioPlayerBase::isSeeking() {
+    Mutex::Autolock autoLock(mLock);
+    return mSeeking;
+}
+
+bool AudioPlayerBase::reachedEOS(status_t *finalStatus) {
+    *finalStatus = OK;
+
+    Mutex::Autolock autoLock(mLock);
+    *finalStatus = mFinalStatus;
+    return mReachedEOS;
+}
+
+// static
+size_t AudioPlayerBase::AudioSinkCallback(
+        MediaPlayerBase::AudioSink *audioSink,
+        void *buffer, size_t size, void *cookie) {
+    AudioPlayerBase *me = (AudioPlayerBase *)cookie;
+
+    return me->fillBuffer(buffer, size);
+}
+
+void AudioPlayerBase::AudioCallback(int event, void *info) {
+    if (event != AudioTrack::EVENT_MORE_DATA) {
+        return;
+    }
+
+    AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
+    size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
+
+    buffer->size = numBytesWritten;
+}
+
+uint32_t AudioPlayerBase::getNumFramesPendingPlayout() const {
+    uint32_t numFramesPlayedOut;
+    status_t err;
+
+    if (mAudioSink != NULL) {
+        err = mAudioSink->getPosition(&numFramesPlayedOut);
+    } else {
+        err = mAudioTrack->getPosition(&numFramesPlayedOut);
+    }
+
+    if (err != OK || mNumFramesPlayed < numFramesPlayedOut) {
+        return 0;
+    }
+
+    // mNumFramesPlayed is the number of frames submitted
+    // to the audio sink for playback, but not all of them
+    // may have played out by now.
+    return mNumFramesPlayed - numFramesPlayedOut;
+}
+
+size_t AudioPlayerBase::fillBuffer(void *data, size_t size) {
+    if (mNumFramesPlayed == 0) {
+        LOGV("AudioCallback");
+    }
+
+    if (mReachedEOS) {
+        return 0;
+    }
+
+    bool postSeekComplete = false;
+    bool postEOS = false;
+    int64_t postEOSDelayUs = 0;
+
+    size_t size_done = 0;
+    size_t size_remaining = size;
+    while (size_remaining > 0) {
+        MediaSource::ReadOptions options;
+
+        {
+            Mutex::Autolock autoLock(mLock);
+
+            if (mSeeking) {
+                if (mIsFirstBuffer) {
+                    if (mFirstBuffer != NULL) {
+                        mFirstBuffer->release();
+                        mFirstBuffer = NULL;
+                    }
+                    mIsFirstBuffer = false;
+                }
+
+                options.setSeekTo(mSeekTimeUs);
+
+                if (mInputBuffer != NULL) {
+                    mInputBuffer->release();
+                    mInputBuffer = NULL;
+                }
+
+                mSeeking = false;
+                if (mObserver) {
+                    postSeekComplete = true;
+                }
+            }
+        }
+
+        if (mInputBuffer == NULL) {
+            status_t err;
+
+            if (mIsFirstBuffer) {
+                mInputBuffer = mFirstBuffer;
+                mFirstBuffer = NULL;
+                err = mFirstBufferResult;
+
+                mIsFirstBuffer = false;
+            } else {
+                err = mSource->read(&mInputBuffer, &options);
+            }
+
+            CHECK((err == OK && mInputBuffer != NULL)
+                   || (err != OK && mInputBuffer == NULL));
+
+            Mutex::Autolock autoLock(mLock);
+
+            if (err != OK) {
+                if (mObserver && !mReachedEOS) {
+                    // We don't want to post EOS right away but only
+                    // after all frames have actually been played out.
+
+                    // These are the number of frames submitted to the
+                    // AudioTrack that you haven't heard yet.
+                    uint32_t numFramesPendingPlayout =
+                        getNumFramesPendingPlayout();
+
+                    // These are the number of frames we're going to
+                    // submit to the AudioTrack by returning from this
+                    // callback.
+                    uint32_t numAdditionalFrames = size_done / mFrameSize;
+
+                    numFramesPendingPlayout += numAdditionalFrames;
+
+                    int64_t timeToCompletionUs =
+                        (1000000ll * numFramesPendingPlayout) / mSampleRate;
+
+                    LOGV("total number of frames played: %lld (%lld us)",
+                            (mNumFramesPlayed + numAdditionalFrames),
+                            1000000ll * (mNumFramesPlayed + numAdditionalFrames)
+                                / mSampleRate);
+
+                    LOGV("%d frames left to play, %lld us (%.2f secs)",
+                         numFramesPendingPlayout,
+                         timeToCompletionUs, timeToCompletionUs / 1E6);
+
+                    postEOS = true;
+                    postEOSDelayUs = timeToCompletionUs + mLatencyUs;
+                }
+
+                mReachedEOS = true;
+                mFinalStatus = err;
+                break;
+            }
+
+            CHECK(mInputBuffer->meta_data()->findInt64(
+                        kKeyTime, &mPositionTimeMediaUs));
+
+            mPositionTimeRealUs =
+                ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
+                    / mSampleRate;
+
+            LOGV("buffer->size() = %d, "
+                 "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
+                 mInputBuffer->range_length(),
+                 mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
+        }
+
+        if (mInputBuffer->range_length() == 0) {
+            mInputBuffer->release();
+            mInputBuffer = NULL;
+
+            continue;
+        }
+
+        size_t copy = size_remaining;
+        if (copy > mInputBuffer->range_length()) {
+            copy = mInputBuffer->range_length();
+        }
+
+        memcpy((char *)data + size_done,
+               (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
+               copy);
+
+        mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
+                                mInputBuffer->range_length() - copy);
+
+        size_done += copy;
+        size_remaining -= copy;
+    }
+
+    {
+        Mutex::Autolock autoLock(mLock);
+        mNumFramesPlayed += size_done / mFrameSize;
+    }
+
+    if (postEOS) {
+        mObserver->postAudioEOS(postEOSDelayUs);
+    }
+
+    if (postSeekComplete) {
+        mObserver->postAudioSeekComplete();
+    }
+
+    return size_done;
+}
+
+int64_t AudioPlayerBase::getRealTimeUs() {
+    Mutex::Autolock autoLock(mLock);
+    return getRealTimeUsLocked();
+}
+
+int64_t AudioPlayerBase::getRealTimeUsLocked() const {
+    return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate;
+}
+
+int64_t AudioPlayerBase::getMediaTimeUs() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
+        if (mSeeking) {
+            return mSeekTimeUs;
+        }
+
+        return 0;
+    }
+
+    int64_t realTimeOffset = getRealTimeUsLocked() - mPositionTimeRealUs;
+    if (realTimeOffset < 0) {
+        realTimeOffset = 0;
+    }
+
+    return mPositionTimeMediaUs + realTimeOffset;
+}
+
+bool AudioPlayerBase::getMediaTimeMapping(
+        int64_t *realtime_us, int64_t *mediatime_us) {
+    Mutex::Autolock autoLock(mLock);
+
+    *realtime_us = mPositionTimeRealUs;
+    *mediatime_us = mPositionTimeMediaUs;
+
+    return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1;
+}
+
+status_t AudioPlayerBase::seekTo(int64_t time_us) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSeeking = true;
+    mPositionTimeRealUs = mPositionTimeMediaUs = -1;
+    mReachedEOS = false;
+    mSeekTimeUs = time_us;
+
+    if (mAudioSink != NULL) {
+        mAudioSink->flush();
+    } else {
+        mAudioTrack->flush();
+    }
+
+    return OK;
+}
+
+}
diff --git a/libvideoeditor/lvpp/AudioPlayerBase.h b/libvideoeditor/lvpp/AudioPlayerBase.h
new file mode 100644 (file)
index 0000000..31b9fa2
--- /dev/null
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_PLAYER_BASE_H_
+
+#define AUDIO_PLAYER_BASE_H_
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/TimeSource.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class MediaSource;
+class AudioTrack;
+class PreviewPlayerBase;
+
+class AudioPlayerBase : public TimeSource {
+public:
+    enum {
+        REACHED_EOS,
+        SEEK_COMPLETE
+    };
+
+    AudioPlayerBase(const sp<MediaPlayerBase::AudioSink> &audioSink,
+                PreviewPlayerBase *audioObserver = NULL);
+
+    virtual ~AudioPlayerBase();
+
+    // Caller retains ownership of "source".
+    void setSource(const sp<MediaSource> &source);
+
+    // Return time in us.
+    virtual int64_t getRealTimeUs();
+
+    status_t start(bool sourceAlreadyStarted = false);
+
+    void pause(bool playPendingSamples = false);
+    void resume();
+
+    // Returns the timestamp of the last buffer played (in us).
+    int64_t getMediaTimeUs();
+
+    // Returns true iff a mapping is established, i.e. the AudioPlayerBase
+    // has played at least one frame of audio.
+    bool getMediaTimeMapping(int64_t *realtime_us, int64_t *mediatime_us);
+
+    status_t seekTo(int64_t time_us);
+
+    bool isSeeking();
+    bool reachedEOS(status_t *finalStatus);
+
+private:
+    friend class VideoEditorAudioPlayer;
+    sp<MediaSource> mSource;
+    AudioTrack *mAudioTrack;
+
+    MediaBuffer *mInputBuffer;
+
+    int mSampleRate;
+    int64_t mLatencyUs;
+    size_t mFrameSize;
+
+    Mutex mLock;
+    int64_t mNumFramesPlayed;
+
+    int64_t mPositionTimeMediaUs;
+    int64_t mPositionTimeRealUs;
+
+    bool mSeeking;
+    bool mReachedEOS;
+    status_t mFinalStatus;
+    int64_t mSeekTimeUs;
+
+    bool mStarted;
+
+    bool mIsFirstBuffer;
+    status_t mFirstBufferResult;
+    MediaBuffer *mFirstBuffer;
+
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+    PreviewPlayerBase *mObserver;
+
+    static void AudioCallback(int event, void *user, void *info);
+    void AudioCallback(int event, void *info);
+
+    static size_t AudioSinkCallback(
+            MediaPlayerBase::AudioSink *audioSink,
+            void *data, size_t size, void *me);
+
+    size_t fillBuffer(void *data, size_t size);
+
+    int64_t getRealTimeUsLocked() const;
+
+    void reset();
+
+    uint32_t getNumFramesPendingPlayout() const;
+
+    AudioPlayerBase(const AudioPlayerBase &);
+    AudioPlayerBase &operator=(const AudioPlayerBase &);
+};
+
+}  // namespace android
+
+#endif  // AUDIO_PLAYER_BASE_H_
index 552e1cd..8c9ef10 100755 (executable)
@@ -170,7 +170,7 @@ int PreviewLocalRenderer::init(
 }
 
 PreviewPlayer::PreviewPlayer()
-    : AwesomePlayer(),
+    : PreviewPlayerBase(),
       mCurrFramingEffectIndex(0)   ,
       mReportedWidth(0),
       mReportedHeight(0),
@@ -202,7 +202,7 @@ PreviewPlayer::PreviewPlayer()
     mStreamDoneEventPending = false;
 
     mCheckAudioStatusEvent = new PreviewPlayerEvent(
-        this, &AwesomePlayer::onCheckAudioStatus);
+        this, &PreviewPlayerBase::onCheckAudioStatus);
 
     mAudioStatusEventPending = false;
 
@@ -493,7 +493,7 @@ status_t PreviewPlayer::startAudioPlayer_l() {
     return OK;
 }
 
-status_t PreviewPlayer::setAudioPlayer(AudioPlayer *audioPlayer) {
+status_t PreviewPlayer::setAudioPlayer(AudioPlayerBase *audioPlayer) {
     Mutex::Autolock autoLock(mLock);
     CHECK(!(mFlags & PLAYING));
     mAudioPlayer = audioPlayer;
@@ -1513,13 +1513,13 @@ status_t PreviewPlayer::resume() {
 
     status_t err;
     if (state->mFileSource != NULL) {
-        err = AwesomePlayer::setDataSource_l(state->mFileSource);
+        err = PreviewPlayerBase::setDataSource_l(state->mFileSource);
 
         if (err == OK) {
             mFileSource = state->mFileSource;
         }
     } else {
-        err = AwesomePlayer::setDataSource_l(state->mUri, &state->mUriHeaders);
+        err = PreviewPlayerBase::setDataSource_l(state->mUri, &state->mUriHeaders);
     }
 
     if (err != OK) {
index ad34d6c..b793639 100755 (executable)
 #include <media/stagefright/OMXClient.h>
 #include <media/stagefright/TimeSource.h>
 #include <utils/threads.h>
-#include <AwesomePlayer.h>
+#include "PreviewPlayerBase.h"
 #include "VideoEditorPreviewController.h"
 
 namespace android {
 
-struct AudioPlayer;
+struct AudioPlayerBase;
 struct DataSource;
 struct MediaBuffer;
 struct MediaExtractor;
@@ -51,7 +51,7 @@ private:
     PreviewPlayerRenderer &operator=(const PreviewPlayerRenderer &);
 };
 
-struct PreviewPlayer : public AwesomePlayer {
+struct PreviewPlayer : public PreviewPlayerBase {
     PreviewPlayer();
     ~PreviewPlayer();
 
@@ -93,7 +93,7 @@ struct PreviewPlayer : public AwesomePlayer {
     status_t setImageClipProperties(uint32_t width, uint32_t height);
     status_t readFirstVideoFrame();
     status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
-    status_t setAudioPlayer(AudioPlayer *audioPlayer);
+    status_t setAudioPlayer(AudioPlayerBase *audioPlayer);
 
 private:
     friend struct PreviewPlayerEvent;
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.cpp b/libvideoeditor/lvpp/PreviewPlayerBase.cpp
new file mode 100644 (file)
index 0000000..62b8a72
--- /dev/null
@@ -0,0 +1,1915 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#undef DEBUG_HDCP
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PreviewPlayerBase"
+#include <utils/Log.h>
+
+#include <dlfcn.h>
+
+#include "include/ARTSPController.h"
+#include "PreviewPlayerBase.h"
+#include "AudioPlayerBase.h"
+#include "include/SoftwareRenderer.h"
+#include "include/NuCachedSource2.h"
+#include "include/ThrottledSource.h"
+#include "include/MPEG2TSExtractor.h"
+
+#include <binder/IPCThreadState.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXCodec.h>
+
+#include <surfaceflinger/Surface.h>
+#include <gui/ISurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <surfaceflinger/ISurfaceComposer.h>
+
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <cutils/properties.h>
+
+#define USE_SURFACE_ALLOC 1
+
+namespace android {
+
+static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
+static int64_t kHighWaterMarkUs = 10000000ll;  // 10secs
+static int64_t kHighWaterMarkRTSPUs = 4000000ll;  // 4secs
+static const size_t kLowWaterMarkBytes = 40000;
+static const size_t kHighWaterMarkBytes = 200000;
+
+struct AwesomeEvent : public TimedEventQueue::Event {
+    AwesomeEvent(
+            PreviewPlayerBase *player,
+            void (PreviewPlayerBase::*method)())
+        : mPlayer(player),
+          mMethod(method) {
+    }
+
+protected:
+    virtual ~AwesomeEvent() {}
+
+    virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
+        (mPlayer->*mMethod)();
+    }
+
+private:
+    PreviewPlayerBase *mPlayer;
+    void (PreviewPlayerBase::*mMethod)();
+
+    AwesomeEvent(const AwesomeEvent &);
+    AwesomeEvent &operator=(const AwesomeEvent &);
+};
+
+struct AwesomeLocalRenderer : public AwesomeRenderer {
+    AwesomeLocalRenderer(
+            const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
+        : mTarget(new SoftwareRenderer(nativeWindow, meta)) {
+    }
+
+    virtual void render(MediaBuffer *buffer) {
+        render((const uint8_t *)buffer->data() + buffer->range_offset(),
+               buffer->range_length());
+    }
+
+    void render(const void *data, size_t size) {
+        mTarget->render(data, size, NULL);
+    }
+
+protected:
+    virtual ~AwesomeLocalRenderer() {
+        delete mTarget;
+        mTarget = NULL;
+    }
+
+private:
+    SoftwareRenderer *mTarget;
+
+    AwesomeLocalRenderer(const AwesomeLocalRenderer &);
+    AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);;
+};
+
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+    AwesomeNativeWindowRenderer(
+            const sp<ANativeWindow> &nativeWindow,
+            int32_t rotationDegrees)
+        : mNativeWindow(nativeWindow) {
+        applyRotation(rotationDegrees);
+    }
+
+    virtual void render(MediaBuffer *buffer) {
+        status_t err = mNativeWindow->queueBuffer(
+                mNativeWindow.get(), buffer->graphicBuffer().get());
+        if (err != 0) {
+            LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+                    -err);
+            return;
+        }
+
+        sp<MetaData> metaData = buffer->meta_data();
+        metaData->setInt32(kKeyRendered, 1);
+    }
+
+protected:
+    virtual ~AwesomeNativeWindowRenderer() {}
+
+private:
+    sp<ANativeWindow> mNativeWindow;
+
+    void applyRotation(int32_t rotationDegrees) {
+        uint32_t transform;
+        switch (rotationDegrees) {
+            case 0: transform = 0; break;
+            case 90: transform = HAL_TRANSFORM_ROT_90; break;
+            case 180: transform = HAL_TRANSFORM_ROT_180; break;
+            case 270: transform = HAL_TRANSFORM_ROT_270; break;
+            default: transform = 0; break;
+        }
+
+        if (transform) {
+            CHECK_EQ(0, native_window_set_buffers_transform(
+                        mNativeWindow.get(), transform));
+        }
+    }
+
+    AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+    AwesomeNativeWindowRenderer &operator=(
+            const AwesomeNativeWindowRenderer &);
+};
+
+// To collect the decoder usage
+void addBatteryData(uint32_t params) {
+    sp<IBinder> binder =
+        defaultServiceManager()->getService(String16("media.player"));
+    sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+    CHECK(service.get() != NULL);
+
+    service->addBatteryData(params);
+}
+
+////////////////////////////////////////////////////////////////////////////////
+PreviewPlayerBase::PreviewPlayerBase()
+    : mQueueStarted(false),
+      mTimeSource(NULL),
+      mVideoRendererIsPreview(false),
+      mAudioPlayer(NULL),
+      mDisplayWidth(0),
+      mDisplayHeight(0),
+      mFlags(0),
+      mExtractorFlags(0),
+      mVideoBuffer(NULL),
+      mDecryptHandle(NULL),
+      mLastVideoTimeUs(-1) {
+    CHECK_EQ(mClient.connect(), (status_t)OK);
+
+    DataSource::RegisterDefaultSniffers();
+
+    mVideoEvent = new AwesomeEvent(this, &PreviewPlayerBase::onVideoEvent);
+    mVideoEventPending = false;
+    mStreamDoneEvent = new AwesomeEvent(this, &PreviewPlayerBase::onStreamDone);
+    mStreamDoneEventPending = false;
+    mBufferingEvent = new AwesomeEvent(this, &PreviewPlayerBase::onBufferingUpdate);
+    mBufferingEventPending = false;
+    mVideoLagEvent = new AwesomeEvent(this, &PreviewPlayerBase::onVideoLagUpdate);
+    mVideoEventPending = false;
+
+    mCheckAudioStatusEvent = new AwesomeEvent(
+            this, &PreviewPlayerBase::onCheckAudioStatus);
+
+    mAudioStatusEventPending = false;
+
+    reset();
+}
+
+PreviewPlayerBase::~PreviewPlayerBase() {
+    if (mQueueStarted) {
+        mQueue.stop();
+    }
+
+    reset();
+
+    mClient.disconnect();
+}
+
+void PreviewPlayerBase::cancelPlayerEvents(bool keepBufferingGoing) {
+    mQueue.cancelEvent(mVideoEvent->eventID());
+    mVideoEventPending = false;
+    mQueue.cancelEvent(mStreamDoneEvent->eventID());
+    mStreamDoneEventPending = false;
+    mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
+    mAudioStatusEventPending = false;
+    mQueue.cancelEvent(mVideoLagEvent->eventID());
+    mVideoLagEventPending = false;
+
+    if (!keepBufferingGoing) {
+        mQueue.cancelEvent(mBufferingEvent->eventID());
+        mBufferingEventPending = false;
+    }
+}
+
+void PreviewPlayerBase::setListener(const wp<MediaPlayerBase> &listener) {
+    Mutex::Autolock autoLock(mLock);
+    mListener = listener;
+}
+
+status_t PreviewPlayerBase::setDataSource(
+        const char *uri, const KeyedVector<String8, String8> *headers) {
+    Mutex::Autolock autoLock(mLock);
+    return setDataSource_l(uri, headers);
+}
+
+status_t PreviewPlayerBase::setDataSource_l(
+        const char *uri, const KeyedVector<String8, String8> *headers) {
+    reset_l();
+
+    mUri = uri;
+
+    if (headers) {
+        mUriHeaders = *headers;
+
+        ssize_t index = mUriHeaders.indexOfKey(String8("x-hide-urls-from-log"));
+        if (index >= 0) {
+            // Browser is in "incognito" mode, suppress logging URLs.
+
+            // This isn't something that should be passed to the server.
+            mUriHeaders.removeItemsAt(index);
+
+            mFlags |= INCOGNITO;
+        }
+    }
+
+    if (!(mFlags & INCOGNITO)) {
+        LOGI("setDataSource_l('%s')", mUri.string());
+    } else {
+        LOGI("setDataSource_l(URL suppressed)");
+    }
+
+    // The actual work will be done during preparation in the call to
+    // ::finishSetDataSource_l to avoid blocking the calling thread in
+    // setDataSource for any significant time.
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::setDataSource(
+        int fd, int64_t offset, int64_t length) {
+    Mutex::Autolock autoLock(mLock);
+
+    reset_l();
+
+    sp<DataSource> dataSource = new FileSource(fd, offset, length);
+
+    status_t err = dataSource->initCheck();
+
+    if (err != OK) {
+        return err;
+    }
+
+    mFileSource = dataSource;
+
+    return setDataSource_l(dataSource);
+}
+
+status_t PreviewPlayerBase::setDataSource(const sp<IStreamSource> &source) {
+    return INVALID_OPERATION;
+}
+
+status_t PreviewPlayerBase::setDataSource_l(
+        const sp<DataSource> &dataSource) {
+    sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+
+    if (extractor == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
+    if (mDecryptHandle != NULL) {
+        CHECK(mDrmManagerClient);
+        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+        }
+    }
+
+    return setDataSource_l(extractor);
+}
+
+status_t PreviewPlayerBase::setDataSource_l(const sp<MediaExtractor> &extractor) {
+    // Attempt to approximate overall stream bitrate by summing all
+    // tracks' individual bitrates, if not all of them advertise bitrate,
+    // we have to fail.
+
+    int64_t totalBitRate = 0;
+
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+        int32_t bitrate;
+        if (!meta->findInt32(kKeyBitRate, &bitrate)) {
+            totalBitRate = -1;
+            break;
+        }
+
+        totalBitRate += bitrate;
+    }
+
+    mBitrate = totalBitRate;
+
+    LOGV("mBitrate = %lld bits/sec", mBitrate);
+
+    bool haveAudio = false;
+    bool haveVideo = false;
+    for (size_t i = 0; i < extractor->countTracks(); ++i) {
+        sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+        const char *mime;
+        CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+        if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
+            setVideoSource(extractor->getTrack(i));
+            haveVideo = true;
+
+            // Set the presentation/display size
+            int32_t displayWidth, displayHeight;
+            bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth);
+            if (success) {
+                success = meta->findInt32(kKeyDisplayHeight, &displayHeight);
+            }
+            if (success) {
+                mDisplayWidth = displayWidth;
+                mDisplayHeight = displayHeight;
+            }
+
+        } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
+            setAudioSource(extractor->getTrack(i));
+            haveAudio = true;
+
+            if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+                // Only do this for vorbis audio, none of the other audio
+                // formats even support this ringtone specific hack and
+                // retrieving the metadata on some extractors may turn out
+                // to be very expensive.
+                sp<MetaData> fileMeta = extractor->getMetaData();
+                int32_t loop;
+                if (fileMeta != NULL
+                        && fileMeta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
+                    mFlags |= AUTO_LOOPING;
+                }
+            }
+        }
+
+        if (haveAudio && haveVideo) {
+            break;
+        }
+    }
+
+    if (!haveAudio && !haveVideo) {
+        return UNKNOWN_ERROR;
+    }
+
+    mExtractorFlags = extractor->flags();
+
+    return OK;
+}
+
+void PreviewPlayerBase::reset() {
+    Mutex::Autolock autoLock(mLock);
+    reset_l();
+}
+
+void PreviewPlayerBase::reset_l() {
+    mDisplayWidth = 0;
+    mDisplayHeight = 0;
+
+    if (mDecryptHandle != NULL) {
+            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                    Playback::STOP, 0);
+            mDecryptHandle = NULL;
+            mDrmManagerClient = NULL;
+    }
+
+    if (mFlags & PLAYING) {
+        uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
+        if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+            params |= IMediaPlayerService::kBatteryDataTrackAudio;
+        }
+        if (mVideoSource != NULL) {
+            params |= IMediaPlayerService::kBatteryDataTrackVideo;
+        }
+        addBatteryData(params);
+    }
+
+    if (mFlags & PREPARING) {
+        mFlags |= PREPARE_CANCELLED;
+        if (mConnectingDataSource != NULL) {
+            LOGI("interrupting the connection process");
+            mConnectingDataSource->disconnect();
+        } else if (mConnectingRTSPController != NULL) {
+            LOGI("interrupting the connection process");
+            mConnectingRTSPController->disconnect();
+        }
+
+        if (mFlags & PREPARING_CONNECTED) {
+            // We are basically done preparing, we're just buffering
+            // enough data to start playback, we can safely interrupt that.
+            finishAsyncPrepare_l();
+        }
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    cancelPlayerEvents();
+
+    mCachedSource.clear();
+    mAudioTrack.clear();
+    mVideoTrack.clear();
+
+    // Shutdown audio first, so that the respone to the reset request
+    // appears to happen instantaneously as far as the user is concerned
+    // If we did this later, audio would continue playing while we
+    // shutdown the video-related resources and the player appear to
+    // not be as responsive to a reset request.
+    if (mAudioPlayer == NULL && mAudioSource != NULL) {
+        // If we had an audio player, it would have effectively
+        // taken possession of the audio source and stopped it when
+        // _it_ is stopped. Otherwise this is still our responsibility.
+        mAudioSource->stop();
+    }
+    mAudioSource.clear();
+
+    mTimeSource = NULL;
+
+    delete mAudioPlayer;
+    mAudioPlayer = NULL;
+
+    mVideoRenderer.clear();
+
+    if (mRTSPController != NULL) {
+        mRTSPController->disconnect();
+        mRTSPController.clear();
+    }
+
+    if (mVideoSource != NULL) {
+        shutdownVideoDecoder_l();
+    }
+
+    mDurationUs = -1;
+    mFlags = 0;
+    mExtractorFlags = 0;
+    mTimeSourceDeltaUs = 0;
+    mVideoTimeUs = 0;
+
+    mSeeking = NO_SEEK;
+    mSeekNotificationSent = false;
+    mSeekTimeUs = 0;
+
+    mUri.setTo("");
+    mUriHeaders.clear();
+
+    mFileSource.clear();
+
+    mBitrate = -1;
+    mLastVideoTimeUs = -1;
+}
+
+void PreviewPlayerBase::notifyListener_l(int msg, int ext1, int ext2) {
+    if (mListener != NULL) {
+        sp<MediaPlayerBase> listener = mListener.promote();
+
+        if (listener != NULL) {
+            listener->sendEvent(msg, ext1, ext2);
+        }
+    }
+}
+
+bool PreviewPlayerBase::getBitrate(int64_t *bitrate) {
+    off64_t size;
+    if (mDurationUs >= 0 && mCachedSource != NULL
+            && mCachedSource->getSize(&size) == OK) {
+        *bitrate = size * 8000000ll / mDurationUs;  // in bits/sec
+        return true;
+    }
+
+    if (mBitrate >= 0) {
+        *bitrate = mBitrate;
+        return true;
+    }
+
+    *bitrate = 0;
+
+    return false;
+}
+
+// Returns true iff cached duration is available/applicable.
+bool PreviewPlayerBase::getCachedDuration_l(int64_t *durationUs, bool *eos) {
+    int64_t bitrate;
+
+    if (mRTSPController != NULL) {
+        *durationUs = mRTSPController->getQueueDurationUs(eos);
+        return true;
+    } else if (mCachedSource != NULL && getBitrate(&bitrate)) {
+        status_t finalStatus;
+        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
+        *durationUs = cachedDataRemaining * 8000000ll / bitrate;
+        *eos = (finalStatus != OK);
+        return true;
+    }
+
+    return false;
+}
+
+void PreviewPlayerBase::ensureCacheIsFetching_l() {
+    if (mCachedSource != NULL) {
+        mCachedSource->resumeFetchingIfNecessary();
+    }
+}
+
+void PreviewPlayerBase::onVideoLagUpdate() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = false;
+
+    int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs();
+    int64_t videoLateByUs = audioTimeUs - mVideoTimeUs;
+
+    if (!(mFlags & VIDEO_AT_EOS) && videoLateByUs > 300000ll) {
+        LOGV("video late by %lld ms.", videoLateByUs / 1000ll);
+
+        notifyListener_l(
+                MEDIA_INFO,
+                MEDIA_INFO_VIDEO_TRACK_LAGGING,
+                videoLateByUs / 1000ll);
+    }
+
+    postVideoLagEvent_l();
+}
+
+void PreviewPlayerBase::onBufferingUpdate() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mBufferingEventPending) {
+        return;
+    }
+    mBufferingEventPending = false;
+
+    if (mCachedSource != NULL) {
+        status_t finalStatus;
+        size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus);
+        bool eos = (finalStatus != OK);
+
+        if (eos) {
+            if (finalStatus == ERROR_END_OF_STREAM) {
+                notifyListener_l(MEDIA_BUFFERING_UPDATE, 100);
+            }
+            if (mFlags & PREPARING) {
+                LOGV("cache has reached EOS, prepare is done.");
+                finishAsyncPrepare_l();
+            }
+        } else {
+            int64_t bitrate;
+            if (getBitrate(&bitrate)) {
+                size_t cachedSize = mCachedSource->cachedSize();
+                int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate;
+
+                int percentage = 100.0 * (double)cachedDurationUs / mDurationUs;
+                if (percentage > 100) {
+                    percentage = 100;
+                }
+
+                notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage);
+            } else {
+                // We don't know the bitrate of the stream, use absolute size
+                // limits to maintain the cache.
+
+                if ((mFlags & PLAYING) && !eos
+                        && (cachedDataRemaining < kLowWaterMarkBytes)) {
+                    LOGI("cache is running low (< %d) , pausing.",
+                         kLowWaterMarkBytes);
+                    mFlags |= CACHE_UNDERRUN;
+                    pause_l();
+                    ensureCacheIsFetching_l();
+                    notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
+                } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
+                    if (mFlags & CACHE_UNDERRUN) {
+                        LOGI("cache has filled up (> %d), resuming.",
+                             kHighWaterMarkBytes);
+                        mFlags &= ~CACHE_UNDERRUN;
+                        play_l();
+                        notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END);
+                    } else if (mFlags & PREPARING) {
+                        LOGV("cache has filled up (> %d), prepare is done",
+                             kHighWaterMarkBytes);
+                        finishAsyncPrepare_l();
+                    }
+                }
+            }
+        }
+    }
+
+    int64_t cachedDurationUs;
+    bool eos;
+    if (getCachedDuration_l(&cachedDurationUs, &eos)) {
+        LOGV("cachedDurationUs = %.2f secs, eos=%d",
+             cachedDurationUs / 1E6, eos);
+
+        int64_t highWaterMarkUs =
+            (mRTSPController != NULL) ? kHighWaterMarkRTSPUs : kHighWaterMarkUs;
+
+        if ((mFlags & PLAYING) && !eos
+                && (cachedDurationUs < kLowWaterMarkUs)) {
+            LOGI("cache is running low (%.2f secs) , pausing.",
+                 cachedDurationUs / 1E6);
+            mFlags |= CACHE_UNDERRUN;
+            pause_l();
+            ensureCacheIsFetching_l();
+            notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
+        } else if (eos || cachedDurationUs > highWaterMarkUs) {
+            if (mFlags & CACHE_UNDERRUN) {
+                LOGI("cache has filled up (%.2f secs), resuming.",
+                     cachedDurationUs / 1E6);
+                mFlags &= ~CACHE_UNDERRUN;
+                play_l();
+                notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_END);
+            } else if (mFlags & PREPARING) {
+                LOGV("cache has filled up (%.2f secs), prepare is done",
+                     cachedDurationUs / 1E6);
+                finishAsyncPrepare_l();
+            }
+        }
+    }
+
+    postBufferingEvent_l();
+}
+
+void PreviewPlayerBase::onStreamDone() {
+    // Posted whenever any stream finishes playing.
+
+    Mutex::Autolock autoLock(mLock);
+    if (!mStreamDoneEventPending) {
+        return;
+    }
+    mStreamDoneEventPending = false;
+
+    if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
+        LOGV("MEDIA_ERROR %d", mStreamDoneStatus);
+
+        notifyListener_l(
+                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
+
+        pause_l(true /* at eos */);
+
+        mFlags |= AT_EOS;
+        return;
+    }
+
+    const bool allDone =
+        (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
+            && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
+
+    if (!allDone) {
+        return;
+    }
+
+    if (mFlags & (LOOPING | AUTO_LOOPING)) {
+        seekTo_l(0);
+
+        if (mVideoSource != NULL) {
+            postVideoEvent_l();
+        }
+    } else {
+        LOGV("MEDIA_PLAYBACK_COMPLETE");
+        notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
+
+        pause_l(true /* at eos */);
+
+        mFlags |= AT_EOS;
+    }
+}
+
+status_t PreviewPlayerBase::play() {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags &= ~CACHE_UNDERRUN;
+
+    return play_l();
+}
+
+status_t PreviewPlayerBase::play_l() {
+    mFlags &= ~SEEK_PREVIEW;
+
+    if (mFlags & PLAYING) {
+        return OK;
+    }
+
+    if (!(mFlags & PREPARED)) {
+        status_t err = prepare_l();
+
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    mFlags |= PLAYING;
+    mFlags |= FIRST_FRAME;
+
+    if (mDecryptHandle != NULL) {
+        int64_t position;
+        getPosition(&position);
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::START, position / 1000);
+    }
+
+    if (mAudioSource != NULL) {
+        if (mAudioPlayer == NULL) {
+            if (mAudioSink != NULL) {
+                mAudioPlayer = new AudioPlayerBase(mAudioSink, this);
+                mAudioPlayer->setSource(mAudioSource);
+
+                mTimeSource = mAudioPlayer;
+
+                // If there was a seek request before we ever started,
+                // honor the request now.
+                // Make sure to do this before starting the audio player
+                // to avoid a race condition.
+                seekAudioIfNecessary_l();
+            }
+        }
+
+        CHECK(!(mFlags & AUDIO_RUNNING));
+
+        if (mVideoSource == NULL) {
+            status_t err = startAudioPlayer_l();
+
+            if (err != OK) {
+                delete mAudioPlayer;
+                mAudioPlayer = NULL;
+
+                mFlags &= ~(PLAYING | FIRST_FRAME);
+
+                if (mDecryptHandle != NULL) {
+                    mDrmManagerClient->setPlaybackStatus(
+                            mDecryptHandle, Playback::STOP, 0);
+                }
+
+                return err;
+            }
+        }
+    }
+
+    if (mTimeSource == NULL && mAudioPlayer == NULL) {
+        mTimeSource = &mSystemTimeSource;
+    }
+
+    if (mVideoSource != NULL) {
+        // Kick off video playback
+        postVideoEvent_l();
+
+        if (mAudioSource != NULL && mVideoSource != NULL) {
+            postVideoLagEvent_l();
+        }
+    }
+
+    if (mFlags & AT_EOS) {
+        // Legacy behaviour, if a stream finishes playing and then
+        // is started again, we play from the start...
+        seekTo_l(0);
+    }
+
+    uint32_t params = IMediaPlayerService::kBatteryDataCodecStarted
+        | IMediaPlayerService::kBatteryDataTrackDecoder;
+    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+        params |= IMediaPlayerService::kBatteryDataTrackAudio;
+    }
+    if (mVideoSource != NULL) {
+        params |= IMediaPlayerService::kBatteryDataTrackVideo;
+    }
+    addBatteryData(params);
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::startAudioPlayer_l() {
+    CHECK(!(mFlags & AUDIO_RUNNING));
+
+    if (mAudioSource == NULL || mAudioPlayer == NULL) {
+        return OK;
+    }
+
+    if (!(mFlags & AUDIOPLAYER_STARTED)) {
+        mFlags |= AUDIOPLAYER_STARTED;
+
+        // We've already started the MediaSource in order to enable
+        // the prefetcher to read its data.
+        status_t err = mAudioPlayer->start(
+                true /* sourceAlreadyStarted */);
+
+        if (err != OK) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+            return err;
+        }
+    } else {
+        mAudioPlayer->resume();
+    }
+
+    mFlags |= AUDIO_RUNNING;
+
+    mWatchForAudioEOS = true;
+
+    return OK;
+}
+
+void PreviewPlayerBase::notifyVideoSize_l() {
+    sp<MetaData> meta = mVideoSource->getFormat();
+
+    int32_t cropLeft, cropTop, cropRight, cropBottom;
+    if (!meta->findRect(
+                kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
+        int32_t width, height;
+        CHECK(meta->findInt32(kKeyWidth, &width));
+        CHECK(meta->findInt32(kKeyHeight, &height));
+
+        cropLeft = cropTop = 0;
+        cropRight = width - 1;
+        cropBottom = height - 1;
+
+        LOGV("got dimensions only %d x %d", width, height);
+    } else {
+        LOGV("got crop rect %d, %d, %d, %d",
+             cropLeft, cropTop, cropRight, cropBottom);
+    }
+
+    int32_t displayWidth;
+    if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
+        LOGV("Display width changed (%d=>%d)", mDisplayWidth, displayWidth);
+        mDisplayWidth = displayWidth;
+    }
+    int32_t displayHeight;
+    if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
+        LOGV("Display height changed (%d=>%d)", mDisplayHeight, displayHeight);
+        mDisplayHeight = displayHeight;
+    }
+
+    int32_t usableWidth = cropRight - cropLeft + 1;
+    int32_t usableHeight = cropBottom - cropTop + 1;
+    if (mDisplayWidth != 0) {
+        usableWidth = mDisplayWidth;
+    }
+    if (mDisplayHeight != 0) {
+        usableHeight = mDisplayHeight;
+    }
+
+    int32_t rotationDegrees;
+    if (!mVideoTrack->getFormat()->findInt32(
+                kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    if (rotationDegrees == 90 || rotationDegrees == 270) {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
+    } else {
+        notifyListener_l(
+                MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
+    }
+}
+
+void PreviewPlayerBase::initRenderer_l() {
+    if (mNativeWindow == NULL) {
+        return;
+    }
+
+    sp<MetaData> meta = mVideoSource->getFormat();
+
+    int32_t format;
+    const char *component;
+    int32_t decodedWidth, decodedHeight;
+    CHECK(meta->findInt32(kKeyColorFormat, &format));
+    CHECK(meta->findCString(kKeyDecoderComponent, &component));
+    CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+    CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
+
+    int32_t rotationDegrees;
+    if (!mVideoTrack->getFormat()->findInt32(
+                kKeyRotation, &rotationDegrees)) {
+        rotationDegrees = 0;
+    }
+
+    mVideoRenderer.clear();
+
+    // Must ensure that mVideoRenderer's destructor is actually executed
+    // before creating a new one.
+    IPCThreadState::self()->flushCommands();
+
+    if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+        // Hardware decoders avoid the CPU color conversion by decoding
+        // directly to ANativeBuffers, so we must use a renderer that
+        // just pushes those buffers to the ANativeWindow.
+        mVideoRenderer =
+            new AwesomeNativeWindowRenderer(mNativeWindow, rotationDegrees);
+    } else {
+        // Other decoders are instantiated locally and as a consequence
+        // allocate their buffers in local address space.  This renderer
+        // then performs a color conversion and copy to get the data
+        // into the ANativeBuffer.
+        mVideoRenderer = new AwesomeLocalRenderer(mNativeWindow, meta);
+    }
+}
+
+status_t PreviewPlayerBase::pause() {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags &= ~CACHE_UNDERRUN;
+
+    return pause_l();
+}
+
+status_t PreviewPlayerBase::pause_l(bool at_eos) {
+    if (!(mFlags & PLAYING)) {
+        return OK;
+    }
+
+    cancelPlayerEvents(true /* keepBufferingGoing */);
+
+    if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
+        if (at_eos) {
+            // If we played the audio stream to completion we
+            // want to make sure that all samples remaining in the audio
+            // track's queue are played out.
+            mAudioPlayer->pause(true /* playPendingSamples */);
+        } else {
+            mAudioPlayer->pause();
+        }
+
+        mFlags &= ~AUDIO_RUNNING;
+    }
+
+    mFlags &= ~PLAYING;
+
+    if (mDecryptHandle != NULL) {
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::PAUSE, 0);
+    }
+
+    uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
+    if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
+        params |= IMediaPlayerService::kBatteryDataTrackAudio;
+    }
+    if (mVideoSource != NULL) {
+        params |= IMediaPlayerService::kBatteryDataTrackVideo;
+    }
+
+    addBatteryData(params);
+
+    return OK;
+}
+
+bool PreviewPlayerBase::isPlaying() const {
+    return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
+}
+
+void PreviewPlayerBase::setSurface(const sp<Surface> &surface) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface = surface;
+    setNativeWindow_l(surface);
+}
+
+void PreviewPlayerBase::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+    Mutex::Autolock autoLock(mLock);
+
+    mSurface.clear();
+    if (surfaceTexture != NULL) {
+        setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
+    }
+}
+
+void PreviewPlayerBase::shutdownVideoDecoder_l() {
+    if (mVideoBuffer) {
+        mVideoBuffer->release();
+        mVideoBuffer = NULL;
+    }
+
+    mVideoSource->stop();
+
+    // The following hack is necessary to ensure that the OMX
+    // component is completely released by the time we may try
+    // to instantiate it again.
+    wp<MediaSource> tmp = mVideoSource;
+    mVideoSource.clear();
+    while (tmp.promote() != NULL) {
+        usleep(1000);
+    }
+    IPCThreadState::self()->flushCommands();
+}
+
+void PreviewPlayerBase::setNativeWindow_l(const sp<ANativeWindow> &native) {
+    mNativeWindow = native;
+
+    if (mVideoSource == NULL) {
+        return;
+    }
+
+    LOGI("attempting to reconfigure to use new surface");
+
+    bool wasPlaying = (mFlags & PLAYING) != 0;
+
+    pause_l();
+    mVideoRenderer.clear();
+
+    shutdownVideoDecoder_l();
+
+    CHECK_EQ(initVideoDecoder(), (status_t)OK);
+
+    if (mLastVideoTimeUs >= 0) {
+        mSeeking = SEEK;
+        mSeekNotificationSent = true;
+        mSeekTimeUs = mLastVideoTimeUs;
+        mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
+    }
+
+    if (wasPlaying) {
+        play_l();
+    }
+}
+
+void PreviewPlayerBase::setAudioSink(
+        const sp<MediaPlayerBase::AudioSink> &audioSink) {
+    Mutex::Autolock autoLock(mLock);
+
+    mAudioSink = audioSink;
+}
+
+status_t PreviewPlayerBase::setLooping(bool shouldLoop) {
+    Mutex::Autolock autoLock(mLock);
+
+    mFlags = mFlags & ~LOOPING;
+
+    if (shouldLoop) {
+        mFlags |= LOOPING;
+    }
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::getDuration(int64_t *durationUs) {
+    Mutex::Autolock autoLock(mMiscStateLock);
+
+    if (mDurationUs < 0) {
+        return UNKNOWN_ERROR;
+    }
+
+    *durationUs = mDurationUs;
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::getPosition(int64_t *positionUs) {
+    if (mRTSPController != NULL) {
+        *positionUs = mRTSPController->getNormalPlayTimeUs();
+    }
+    else if (mSeeking != NO_SEEK) {
+        *positionUs = mSeekTimeUs;
+    } else if (mVideoSource != NULL
+            && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
+        Mutex::Autolock autoLock(mMiscStateLock);
+        *positionUs = mVideoTimeUs;
+    } else if (mAudioPlayer != NULL) {
+        *positionUs = mAudioPlayer->getMediaTimeUs();
+    } else {
+        *positionUs = 0;
+    }
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::seekTo(int64_t timeUs) {
+    if (mExtractorFlags & MediaExtractor::CAN_SEEK) {
+        Mutex::Autolock autoLock(mLock);
+        return seekTo_l(timeUs);
+    }
+
+    return OK;
+}
+
+// static
+void PreviewPlayerBase::OnRTSPSeekDoneWrapper(void *cookie) {
+    static_cast<PreviewPlayerBase *>(cookie)->onRTSPSeekDone();
+}
+
+void PreviewPlayerBase::onRTSPSeekDone() {
+    notifyListener_l(MEDIA_SEEK_COMPLETE);
+    mSeekNotificationSent = true;
+}
+
+status_t PreviewPlayerBase::seekTo_l(int64_t timeUs) {
+    if (mRTSPController != NULL) {
+        mRTSPController->seekAsync(timeUs, OnRTSPSeekDoneWrapper, this);
+        return OK;
+    }
+
+    if (mFlags & CACHE_UNDERRUN) {
+        mFlags &= ~CACHE_UNDERRUN;
+        play_l();
+    }
+
+    if ((mFlags & PLAYING) && mVideoSource != NULL && (mFlags & VIDEO_AT_EOS)) {
+        // Video playback completed before, there's no pending
+        // video event right now. In order for this new seek
+        // to be honored, we need to post one.
+
+        postVideoEvent_l();
+    }
+
+    mSeeking = SEEK;
+    mSeekNotificationSent = false;
+    mSeekTimeUs = timeUs;
+    mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
+
+    seekAudioIfNecessary_l();
+
+    if (!(mFlags & PLAYING)) {
+        LOGV("seeking while paused, sending SEEK_COMPLETE notification"
+             " immediately.");
+
+        notifyListener_l(MEDIA_SEEK_COMPLETE);
+        mSeekNotificationSent = true;
+
+        if ((mFlags & PREPARED) && mVideoSource != NULL) {
+            mFlags |= SEEK_PREVIEW;
+            postVideoEvent_l();
+        }
+    }
+
+    return OK;
+}
+
+void PreviewPlayerBase::seekAudioIfNecessary_l() {
+    if (mSeeking != NO_SEEK && mVideoSource == NULL && mAudioPlayer != NULL) {
+        mAudioPlayer->seekTo(mSeekTimeUs);
+
+        mWatchForAudioSeekComplete = true;
+        mWatchForAudioEOS = true;
+
+        if (mDecryptHandle != NULL) {
+            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                    Playback::PAUSE, 0);
+            mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                    Playback::START, mSeekTimeUs / 1000);
+        }
+    }
+}
+
+void PreviewPlayerBase::setAudioSource(sp<MediaSource> source) {
+    CHECK(source != NULL);
+
+    mAudioTrack = source;
+}
+
+status_t PreviewPlayerBase::initAudioDecoder() {
+    sp<MetaData> meta = mAudioTrack->getFormat();
+
+    const char *mime;
+    CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+    if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
+        mAudioSource = mAudioTrack;
+    } else {
+        mAudioSource = OMXCodec::Create(
+                mClient.interface(), mAudioTrack->getFormat(),
+                false, // createEncoder
+                mAudioTrack);
+    }
+
+    if (mAudioSource != NULL) {
+        int64_t durationUs;
+        if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+            Mutex::Autolock autoLock(mMiscStateLock);
+            if (mDurationUs < 0 || durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+
+        status_t err = mAudioSource->start();
+
+        if (err != OK) {
+            mAudioSource.clear();
+            return err;
+        }
+    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
+        // For legacy reasons we're simply going to ignore the absence
+        // of an audio decoder for QCELP instead of aborting playback
+        // altogether.
+        return OK;
+    }
+
+    return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
+}
+
+void PreviewPlayerBase::setVideoSource(sp<MediaSource> source) {
+    CHECK(source != NULL);
+
+    mVideoTrack = source;
+}
+
+status_t PreviewPlayerBase::initVideoDecoder(uint32_t flags) {
+
+    // Either the application or the DRM system can independently say
+    // that there must be a hardware-protected path to an external video sink.
+    // For now we always require a hardware-protected path to external video sink
+    // if content is DRMed, but eventually this could be optional per DRM agent.
+    // When the application wants protection, then
+    //   (USE_SURFACE_ALLOC && (mSurface != 0) &&
+    //   (mSurface->getFlags() & ISurfaceComposer::eProtectedByApp))
+    // will be true, but that part is already handled by SurfaceFlinger.
+
+#ifdef DEBUG_HDCP
+    // For debugging, we allow a system property to control the protected usage.
+    // In case of uninitialized or unexpected property, we default to "DRM only".
+    bool setProtectionBit = false;
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("persist.sys.hdcp_checking", value, NULL)) {
+        if (!strcmp(value, "never")) {
+            // nop
+        } else if (!strcmp(value, "always")) {
+            setProtectionBit = true;
+        } else if (!strcmp(value, "drm-only")) {
+            if (mDecryptHandle != NULL) {
+                setProtectionBit = true;
+            }
+        // property value is empty, or unexpected value
+        } else {
+            if (mDecryptHandle != NULL) {
+                setProtectionBit = true;
+            }
+        }
+    // can' read property value
+    } else {
+        if (mDecryptHandle != NULL) {
+            setProtectionBit = true;
+        }
+    }
+    // note that usage bit is already cleared, so no need to clear it in the "else" case
+    if (setProtectionBit) {
+        flags |= OMXCodec::kEnableGrallocUsageProtected;
+    }
+#else
+    if (mDecryptHandle != NULL) {
+        flags |= OMXCodec::kEnableGrallocUsageProtected;
+    }
+#endif
+    LOGV("initVideoDecoder flags=0x%x", flags);
+    mVideoSource = OMXCodec::Create(
+            mClient.interface(), mVideoTrack->getFormat(),
+            false, // createEncoder
+            mVideoTrack,
+            NULL, flags, USE_SURFACE_ALLOC ? mNativeWindow : NULL);
+
+    if (mVideoSource != NULL) {
+        int64_t durationUs;
+        if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
+            Mutex::Autolock autoLock(mMiscStateLock);
+            if (mDurationUs < 0 || durationUs > mDurationUs) {
+                mDurationUs = durationUs;
+            }
+        }
+
+        status_t err = mVideoSource->start();
+
+        if (err != OK) {
+            mVideoSource.clear();
+            return err;
+        }
+    }
+
+    return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
+}
+
+void PreviewPlayerBase::finishSeekIfNecessary(int64_t videoTimeUs) {
+    if (mSeeking == SEEK_VIDEO_ONLY) {
+        mSeeking = NO_SEEK;
+        return;
+    }
+
+    if (mSeeking == NO_SEEK || (mFlags & SEEK_PREVIEW)) {
+        return;
+    }
+
+    if (mAudioPlayer != NULL) {
+        LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
+
+        // If we don't have a video time, seek audio to the originally
+        // requested seek time instead.
+
+        mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
+        mWatchForAudioSeekComplete = true;
+        mWatchForAudioEOS = true;
+    } else if (!mSeekNotificationSent) {
+        // If we're playing video only, report seek complete now,
+        // otherwise audio player will notify us later.
+        notifyListener_l(MEDIA_SEEK_COMPLETE);
+        mSeekNotificationSent = true;
+    }
+
+    mFlags |= FIRST_FRAME;
+    mSeeking = NO_SEEK;
+
+    if (mDecryptHandle != NULL) {
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::PAUSE, 0);
+        mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+                Playback::START, videoTimeUs / 1000);
+    }
+}
+
+void PreviewPlayerBase::onVideoEvent() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mVideoEventPending) {
+        // The event has been cancelled in reset_l() but had already
+        // been scheduled for execution at that time.
+        return;
+    }
+    mVideoEventPending = false;
+
+    if (mSeeking != NO_SEEK) {
+        if (mVideoBuffer) {
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
+        }
+
+        if (mSeeking == SEEK && mCachedSource != NULL && mAudioSource != NULL
+                && !(mFlags & SEEK_PREVIEW)) {
+            // We're going to seek the video source first, followed by
+            // the audio source.
+            // In order to avoid jumps in the DataSource offset caused by
+            // the audio codec prefetching data from the old locations
+            // while the video codec is already reading data from the new
+            // locations, we'll "pause" the audio source, causing it to
+            // stop reading input data until a subsequent seek.
+
+            if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
+                mAudioPlayer->pause();
+
+                mFlags &= ~AUDIO_RUNNING;
+            }
+            mAudioSource->pause();
+        }
+    }
+
+    if (!mVideoBuffer) {
+        MediaSource::ReadOptions options;
+        if (mSeeking != NO_SEEK) {
+            LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
+
+            options.setSeekTo(
+                    mSeekTimeUs,
+                    mSeeking == SEEK_VIDEO_ONLY
+                        ? MediaSource::ReadOptions::SEEK_NEXT_SYNC
+                        : MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
+        }
+        for (;;) {
+            status_t err = mVideoSource->read(&mVideoBuffer, &options);
+            options.clearSeekTo();
+
+            if (err != OK) {
+                CHECK(mVideoBuffer == NULL);
+
+                if (err == INFO_FORMAT_CHANGED) {
+                    LOGV("VideoSource signalled format change.");
+
+                    notifyVideoSize_l();
+
+                    if (mVideoRenderer != NULL) {
+                        mVideoRendererIsPreview = false;
+                        initRenderer_l();
+                    }
+                    continue;
+                }
+
+                // So video playback is complete, but we may still have
+                // a seek request pending that needs to be applied
+                // to the audio track.
+                if (mSeeking != NO_SEEK) {
+                    LOGV("video stream ended while seeking!");
+                }
+                finishSeekIfNecessary(-1);
+
+                if (mAudioPlayer != NULL
+                        && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
+                    startAudioPlayer_l();
+                }
+
+                mFlags |= VIDEO_AT_EOS;
+                postStreamDoneEvent_l(err);
+                return;
+            }
+
+            if (mVideoBuffer->range_length() == 0) {
+                // Some decoders, notably the PV AVC software decoder
+                // return spurious empty buffers that we just want to ignore.
+
+                mVideoBuffer->release();
+                mVideoBuffer = NULL;
+                continue;
+            }
+
+            break;
+        }
+    }
+
+    int64_t timeUs;
+    CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+    mLastVideoTimeUs = timeUs;
+
+    if (mSeeking == SEEK_VIDEO_ONLY) {
+        if (mSeekTimeUs > timeUs) {
+            LOGI("XXX mSeekTimeUs = %lld us, timeUs = %lld us",
+                 mSeekTimeUs, timeUs);
+        }
+    }
+
+    {
+        Mutex::Autolock autoLock(mMiscStateLock);
+        mVideoTimeUs = timeUs;
+    }
+
+    SeekType wasSeeking = mSeeking;
+    finishSeekIfNecessary(timeUs);
+
+    if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING | SEEK_PREVIEW))) {
+        status_t err = startAudioPlayer_l();
+        if (err != OK) {
+            LOGE("Startung the audio player failed w/ err %d", err);
+            return;
+        }
+    }
+
+    TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
+
+    if (mFlags & FIRST_FRAME) {
+        mFlags &= ~FIRST_FRAME;
+        mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
+    }
+
+    int64_t realTimeUs, mediaTimeUs;
+    if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
+        && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
+        mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
+    }
+
+    if (wasSeeking == SEEK_VIDEO_ONLY) {
+        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+
+        int64_t latenessUs = nowUs - timeUs;
+
+        if (latenessUs > 0) {
+            LOGI("after SEEK_VIDEO_ONLY we're late by %.2f secs", latenessUs / 1E6);
+        }
+    }
+
+    if (wasSeeking == NO_SEEK) {
+        // Let's display the first frame after seeking right away.
+
+        int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
+
+        int64_t latenessUs = nowUs - timeUs;
+
+        if (latenessUs > 500000ll
+                && mRTSPController == NULL
+                && mAudioPlayer != NULL
+                && mAudioPlayer->getMediaTimeMapping(
+                    &realTimeUs, &mediaTimeUs)) {
+            LOGI("we're much too late (%.2f secs), video skipping ahead",
+                 latenessUs / 1E6);
+
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
+
+            mSeeking = SEEK_VIDEO_ONLY;
+            mSeekTimeUs = mediaTimeUs;
+
+            postVideoEvent_l();
+            return;
+        }
+
+        if (latenessUs > 40000) {
+            // We're more than 40ms late.
+            LOGV("we're late by %lld us (%.2f secs), dropping frame",
+                 latenessUs, latenessUs / 1E6);
+            mVideoBuffer->release();
+            mVideoBuffer = NULL;
+
+            postVideoEvent_l();
+            return;
+        }
+
+        if (latenessUs < -10000) {
+            // We're more than 10ms early.
+
+            postVideoEvent_l(10000);
+            return;
+        }
+    }
+
+    if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
+        mVideoRendererIsPreview = false;
+
+        initRenderer_l();
+    }
+
+    if (mVideoRenderer != NULL) {
+        mVideoRenderer->render(mVideoBuffer);
+    }
+
+    mVideoBuffer->release();
+    mVideoBuffer = NULL;
+
+    if (wasSeeking != NO_SEEK && (mFlags & SEEK_PREVIEW)) {
+        mFlags &= ~SEEK_PREVIEW;
+        return;
+    }
+
+    postVideoEvent_l();
+}
+
+void PreviewPlayerBase::postVideoEvent_l(int64_t delayUs) {
+    if (mVideoEventPending) {
+        return;
+    }
+
+    mVideoEventPending = true;
+    mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
+}
+
+void PreviewPlayerBase::postStreamDoneEvent_l(status_t status) {
+    if (mStreamDoneEventPending) {
+        return;
+    }
+    mStreamDoneEventPending = true;
+
+    mStreamDoneStatus = status;
+    mQueue.postEvent(mStreamDoneEvent);
+}
+
+void PreviewPlayerBase::postBufferingEvent_l() {
+    if (mBufferingEventPending) {
+        return;
+    }
+    mBufferingEventPending = true;
+    mQueue.postEventWithDelay(mBufferingEvent, 1000000ll);
+}
+
+void PreviewPlayerBase::postVideoLagEvent_l() {
+    if (mVideoLagEventPending) {
+        return;
+    }
+    mVideoLagEventPending = true;
+    mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll);
+}
+
+void PreviewPlayerBase::postCheckAudioStatusEvent_l(int64_t delayUs) {
+    if (mAudioStatusEventPending) {
+        return;
+    }
+    mAudioStatusEventPending = true;
+    mQueue.postEventWithDelay(mCheckAudioStatusEvent, delayUs);
+}
+
+void PreviewPlayerBase::onCheckAudioStatus() {
+    Mutex::Autolock autoLock(mLock);
+    if (!mAudioStatusEventPending) {
+        // Event was dispatched and while we were blocking on the mutex,
+        // has already been cancelled.
+        return;
+    }
+
+    mAudioStatusEventPending = false;
+
+    if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) {
+        mWatchForAudioSeekComplete = false;
+
+        if (!mSeekNotificationSent) {
+            notifyListener_l(MEDIA_SEEK_COMPLETE);
+            mSeekNotificationSent = true;
+        }
+
+        mSeeking = NO_SEEK;
+    }
+
+    status_t finalStatus;
+    if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) {
+        mWatchForAudioEOS = false;
+        mFlags |= AUDIO_AT_EOS;
+        mFlags |= FIRST_FRAME;
+        postStreamDoneEvent_l(finalStatus);
+    }
+}
+
+status_t PreviewPlayerBase::prepare() {
+    Mutex::Autolock autoLock(mLock);
+    return prepare_l();
+}
+
+status_t PreviewPlayerBase::prepare_l() {
+    if (mFlags & PREPARED) {
+        return OK;
+    }
+
+    if (mFlags & PREPARING) {
+        return UNKNOWN_ERROR;
+    }
+
+    mIsAsyncPrepare = false;
+    status_t err = prepareAsync_l();
+
+    if (err != OK) {
+        return err;
+    }
+
+    while (mFlags & PREPARING) {
+        mPreparedCondition.wait(mLock);
+    }
+
+    return mPrepareResult;
+}
+
+status_t PreviewPlayerBase::prepareAsync() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFlags & PREPARING) {
+        return UNKNOWN_ERROR;  // async prepare already pending
+    }
+
+    mIsAsyncPrepare = true;
+    return prepareAsync_l();
+}
+
+status_t PreviewPlayerBase::prepareAsync_l() {
+    if (mFlags & PREPARING) {
+        return UNKNOWN_ERROR;  // async prepare already pending
+    }
+
+    if (!mQueueStarted) {
+        mQueue.start();
+        mQueueStarted = true;
+    }
+
+    mFlags |= PREPARING;
+    mAsyncPrepareEvent = new AwesomeEvent(
+            this, &PreviewPlayerBase::onPrepareAsyncEvent);
+
+    mQueue.postEvent(mAsyncPrepareEvent);
+
+    return OK;
+}
+
+status_t PreviewPlayerBase::finishSetDataSource_l() {
+    sp<DataSource> dataSource;
+
+    if (!strncasecmp("http://", mUri.string(), 7)
+            || !strncasecmp("https://", mUri.string(), 8)) {
+        mConnectingDataSource = HTTPBase::Create(
+                (mFlags & INCOGNITO)
+                    ? HTTPBase::kFlagIncognito
+                    : 0);
+
+        mLock.unlock();
+        status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders);
+        mLock.lock();
+
+        if (err != OK) {
+            mConnectingDataSource.clear();
+
+            LOGI("mConnectingDataSource->connect() returned %d", err);
+            return err;
+        }
+
+#if 0
+        mCachedSource = new NuCachedSource2(
+                new ThrottledSource(
+                    mConnectingDataSource, 50 * 1024 /* bytes/sec */));
+#else
+        mCachedSource = new NuCachedSource2(mConnectingDataSource);
+#endif
+        mConnectingDataSource.clear();
+
+        dataSource = mCachedSource;
+
+        String8 contentType = dataSource->getMIMEType();
+
+        if (strncasecmp(contentType.string(), "audio/", 6)) {
+            // We're not doing this for streams that appear to be audio-only
+            // streams to ensure that even low bandwidth streams start
+            // playing back fairly instantly.
+
+            // We're going to prefill the cache before trying to instantiate
+            // the extractor below, as the latter is an operation that otherwise
+            // could block on the datasource for a significant amount of time.
+            // During that time we'd be unable to abort the preparation phase
+            // without this prefill.
+
+            mLock.unlock();
+
+            for (;;) {
+                status_t finalStatus;
+                size_t cachedDataRemaining =
+                    mCachedSource->approxDataRemaining(&finalStatus);
+
+                if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes
+                        || (mFlags & PREPARE_CANCELLED)) {
+                    break;
+                }
+
+                usleep(200000);
+            }
+
+            mLock.lock();
+        }
+
+        if (mFlags & PREPARE_CANCELLED) {
+            LOGI("Prepare cancelled while waiting for initial cache fill.");
+            return UNKNOWN_ERROR;
+        }
+    } else if (!strncasecmp("rtsp://", mUri.string(), 7)) {
+        if (mLooper == NULL) {
+            mLooper = new ALooper;
+            mLooper->setName("rtsp");
+            mLooper->start();
+        }
+        mRTSPController = new ARTSPController(mLooper);
+        mConnectingRTSPController = mRTSPController;
+
+        mLock.unlock();
+        status_t err = mRTSPController->connect(mUri.string());
+        mLock.lock();
+
+        mConnectingRTSPController.clear();
+
+        LOGI("ARTSPController::connect returned %d", err);
+
+        if (err != OK) {
+            mRTSPController.clear();
+            return err;
+        }
+
+        sp<MediaExtractor> extractor = mRTSPController.get();
+        return setDataSource_l(extractor);
+    } else {
+        dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+    }
+
+    if (dataSource == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+
+    if (extractor == NULL) {
+        return UNKNOWN_ERROR;
+    }
+
+    dataSource->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
+
+    if (mDecryptHandle != NULL) {
+        CHECK(mDrmManagerClient);
+        if (RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
+            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_DRM_NO_LICENSE);
+        }
+    }
+
+    return setDataSource_l(extractor);
+}
+
+void PreviewPlayerBase::abortPrepare(status_t err) {
+    CHECK(err != OK);
+
+    if (mIsAsyncPrepare) {
+        notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
+    }
+
+    mPrepareResult = err;
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
+    mAsyncPrepareEvent = NULL;
+    mPreparedCondition.broadcast();
+}
+
+// static
+bool PreviewPlayerBase::ContinuePreparation(void *cookie) {
+    PreviewPlayerBase *me = static_cast<PreviewPlayerBase *>(cookie);
+
+    return (me->mFlags & PREPARE_CANCELLED) == 0;
+}
+
+void PreviewPlayerBase::onPrepareAsyncEvent() {
+    Mutex::Autolock autoLock(mLock);
+
+    if (mFlags & PREPARE_CANCELLED) {
+        LOGI("prepare was cancelled before doing anything");
+        abortPrepare(UNKNOWN_ERROR);
+        return;
+    }
+
+    if (mUri.size() > 0) {
+        status_t err = finishSetDataSource_l();
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+
+    if (mVideoTrack != NULL && mVideoSource == NULL) {
+        status_t err = initVideoDecoder();
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+
+    if (mAudioTrack != NULL && mAudioSource == NULL) {
+        status_t err = initAudioDecoder();
+
+        if (err != OK) {
+            abortPrepare(err);
+            return;
+        }
+    }
+
+    mFlags |= PREPARING_CONNECTED;
+
+    if (mCachedSource != NULL || mRTSPController != NULL) {
+        postBufferingEvent_l();
+    } else {
+        finishAsyncPrepare_l();
+    }
+}
+
+void PreviewPlayerBase::finishAsyncPrepare_l() {
+    if (mIsAsyncPrepare) {
+        if (mVideoSource == NULL) {
+            notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
+        } else {
+            notifyVideoSize_l();
+        }
+
+        notifyListener_l(MEDIA_PREPARED);
+    }
+
+    mPrepareResult = OK;
+    mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
+    mFlags |= PREPARED;
+    mAsyncPrepareEvent = NULL;
+    mPreparedCondition.broadcast();
+}
+
+uint32_t PreviewPlayerBase::flags() const {
+    return mExtractorFlags;
+}
+
+void PreviewPlayerBase::postAudioEOS(int64_t delayUs) {
+    Mutex::Autolock autoLock(mLock);
+    postCheckAudioStatusEvent_l(delayUs);
+}
+
+void PreviewPlayerBase::postAudioSeekComplete() {
+    Mutex::Autolock autoLock(mLock);
+    postCheckAudioStatusEvent_l(0 /* delayUs */);
+}
+
+status_t PreviewPlayerBase::setParameter(int key, const Parcel &request) {
+    return OK;
+}
+
+status_t PreviewPlayerBase::getParameter(int key, Parcel *reply) {
+    return OK;
+}
+}  // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.h b/libvideoeditor/lvpp/PreviewPlayerBase.h
new file mode 100644 (file)
index 0000000..a68d53c
--- /dev/null
@@ -0,0 +1,285 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PREVIEW_PLAYER_BASE_H_
+
+#define PREVIEW_PLAYER_BASE_H_
+
+#include "HTTPBase.h"
+#include "TimedEventQueue.h"
+
+#include <media/MediaPlayerInterface.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/TimeSource.h>
+#include <utils/threads.h>
+#include <drm/DrmManagerClient.h>
+
+namespace android {
+
+struct AudioPlayerBase;
+struct DataSource;
+struct MediaBuffer;
+struct MediaExtractor;
+struct MediaSource;
+struct NuCachedSource2;
+struct ISurfaceTexture;
+
+struct ALooper;
+struct ARTSPController;
+
+class DrmManagerClinet;
+class DecryptHandle;
+
+struct AwesomeRenderer : public RefBase {
+    AwesomeRenderer() {}
+
+    virtual void render(MediaBuffer *buffer) = 0;
+
+private:
+    AwesomeRenderer(const AwesomeRenderer &);
+    AwesomeRenderer &operator=(const AwesomeRenderer &);
+};
+
+struct PreviewPlayerBase {
+    PreviewPlayerBase();
+    ~PreviewPlayerBase();
+
+    void setListener(const wp<MediaPlayerBase> &listener);
+
+    status_t setDataSource(
+            const char *uri,
+            const KeyedVector<String8, String8> *headers = NULL);
+
+    status_t setDataSource(int fd, int64_t offset, int64_t length);
+
+    status_t setDataSource(const sp<IStreamSource> &source);
+
+    void reset();
+
+    status_t prepare();
+    status_t prepare_l();
+    status_t prepareAsync();
+    status_t prepareAsync_l();
+
+    status_t play();
+    status_t pause();
+
+    bool isPlaying() const;
+
+    void setSurface(const sp<Surface> &surface);
+    void setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
+    void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
+    status_t setLooping(bool shouldLoop);
+
+    status_t getDuration(int64_t *durationUs);
+    status_t getPosition(int64_t *positionUs);
+
+    status_t setParameter(int key, const Parcel &request);
+    status_t getParameter(int key, Parcel *reply);
+
+    status_t seekTo(int64_t timeUs);
+
+    // This is a mask of MediaExtractor::Flags.
+    uint32_t flags() const;
+
+    void postAudioEOS(int64_t delayUs = 0ll);
+    void postAudioSeekComplete();
+
+private:
+    friend struct AwesomeEvent;
+    friend struct PreviewPlayer;
+
+    enum {
+        PLAYING             = 1,
+        LOOPING             = 2,
+        FIRST_FRAME         = 4,
+        PREPARING           = 8,
+        PREPARED            = 16,
+        AT_EOS              = 32,
+        PREPARE_CANCELLED   = 64,
+        CACHE_UNDERRUN      = 128,
+        AUDIO_AT_EOS        = 256,
+        VIDEO_AT_EOS        = 512,
+        AUTO_LOOPING        = 1024,
+
+        // We are basically done preparing but are currently buffering
+        // sufficient data to begin playback and finish the preparation phase
+        // for good.
+        PREPARING_CONNECTED = 2048,
+
+        // We're triggering a single video event to display the first frame
+        // after the seekpoint.
+        SEEK_PREVIEW        = 4096,
+
+        AUDIO_RUNNING       = 8192,
+        AUDIOPLAYER_STARTED = 16384,
+
+        INCOGNITO           = 32768,
+    };
+
+    mutable Mutex mLock;
+    Mutex mMiscStateLock;
+
+    OMXClient mClient;
+    TimedEventQueue mQueue;
+    bool mQueueStarted;
+    wp<MediaPlayerBase> mListener;
+
+    sp<Surface> mSurface;
+    sp<ANativeWindow> mNativeWindow;
+    sp<MediaPlayerBase::AudioSink> mAudioSink;
+
+    SystemTimeSource mSystemTimeSource;
+    TimeSource *mTimeSource;
+
+    String8 mUri;
+    KeyedVector<String8, String8> mUriHeaders;
+
+    sp<DataSource> mFileSource;
+
+    sp<MediaSource> mVideoTrack;
+    sp<MediaSource> mVideoSource;
+    sp<AwesomeRenderer> mVideoRenderer;
+    bool mVideoRendererIsPreview;
+
+    sp<MediaSource> mAudioTrack;
+    sp<MediaSource> mAudioSource;
+    AudioPlayerBase *mAudioPlayer;
+    int64_t mDurationUs;
+
+    int32_t mDisplayWidth;
+    int32_t mDisplayHeight;
+
+    uint32_t mFlags;
+    uint32_t mExtractorFlags;
+
+    int64_t mTimeSourceDeltaUs;
+    int64_t mVideoTimeUs;
+
+    enum SeekType {
+        NO_SEEK,
+        SEEK,
+        SEEK_VIDEO_ONLY
+    };
+    SeekType mSeeking;
+
+    bool mSeekNotificationSent;
+    int64_t mSeekTimeUs;
+
+    int64_t mBitrate;  // total bitrate of the file (in bps) or -1 if unknown.
+
+    bool mWatchForAudioSeekComplete;
+    bool mWatchForAudioEOS;
+
+    sp<TimedEventQueue::Event> mVideoEvent;
+    bool mVideoEventPending;
+    sp<TimedEventQueue::Event> mStreamDoneEvent;
+    bool mStreamDoneEventPending;
+    sp<TimedEventQueue::Event> mBufferingEvent;
+    bool mBufferingEventPending;
+    sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
+    bool mAudioStatusEventPending;
+    sp<TimedEventQueue::Event> mVideoLagEvent;
+    bool mVideoLagEventPending;
+
+    sp<TimedEventQueue::Event> mAsyncPrepareEvent;
+    Condition mPreparedCondition;
+    bool mIsAsyncPrepare;
+    status_t mPrepareResult;
+    status_t mStreamDoneStatus;
+
+    void postVideoEvent_l(int64_t delayUs = -1);
+    void postBufferingEvent_l();
+    void postStreamDoneEvent_l(status_t status);
+    void postCheckAudioStatusEvent_l(int64_t delayUs);
+    void postVideoLagEvent_l();
+    status_t play_l();
+
+    MediaBuffer *mVideoBuffer;
+
+    sp<HTTPBase> mConnectingDataSource;
+    sp<NuCachedSource2> mCachedSource;
+
+    sp<ALooper> mLooper;
+    sp<ARTSPController> mRTSPController;
+    sp<ARTSPController> mConnectingRTSPController;
+
+    DrmManagerClient *mDrmManagerClient;
+    sp<DecryptHandle> mDecryptHandle;
+
+    int64_t mLastVideoTimeUs;
+
+    status_t setDataSource_l(
+            const char *uri,
+            const KeyedVector<String8, String8> *headers = NULL);
+
+    status_t setDataSource_l(const sp<DataSource> &dataSource);
+    status_t setDataSource_l(const sp<MediaExtractor> &extractor);
+    void reset_l();
+    status_t seekTo_l(int64_t timeUs);
+    status_t pause_l(bool at_eos = false);
+    void initRenderer_l();
+    void notifyVideoSize_l();
+    void seekAudioIfNecessary_l();
+
+    void cancelPlayerEvents(bool keepBufferingGoing = false);
+
+    void setAudioSource(sp<MediaSource> source);
+    status_t initAudioDecoder();
+
+    void setVideoSource(sp<MediaSource> source);
+    status_t initVideoDecoder(uint32_t flags = 0);
+
+    void onStreamDone();
+
+    void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0);
+
+    void onVideoEvent();
+    void onBufferingUpdate();
+    void onCheckAudioStatus();
+    void onPrepareAsyncEvent();
+    void abortPrepare(status_t err);
+    void finishAsyncPrepare_l();
+    void onVideoLagUpdate();
+
+    bool getCachedDuration_l(int64_t *durationUs, bool *eos);
+
+    status_t finishSetDataSource_l();
+
+    static bool ContinuePreparation(void *cookie);
+
+    static void OnRTSPSeekDoneWrapper(void *cookie);
+    void onRTSPSeekDone();
+
+    bool getBitrate(int64_t *bitrate);
+
+    void finishSeekIfNecessary(int64_t videoTimeUs);
+    void ensureCacheIsFetching_l();
+
+    status_t startAudioPlayer_l();
+
+    void shutdownVideoDecoder_l();
+    void setNativeWindow_l(const sp<ANativeWindow> &native);
+
+    PreviewPlayerBase(const PreviewPlayerBase &);
+    PreviewPlayerBase &operator=(const PreviewPlayerBase &);
+};
+
+}  // namespace android
+
+#endif  // PREVIEW_PLAYER_BASE_H_
+
index 1628d33..c49328a 100755 (executable)
@@ -35,8 +35,8 @@ namespace android {
 
 VideoEditorAudioPlayer::VideoEditorAudioPlayer(
         const sp<MediaPlayerBase::AudioSink> &audioSink,
-        AwesomePlayer *observer)
-    : AudioPlayer(audioSink, observer) {
+        PreviewPlayerBase *observer)
+    : AudioPlayerBase(audioSink, observer) {
 
     LOGV("VideoEditorAudioPlayer");
     mBGAudioPCMFileHandle = NULL;
@@ -97,7 +97,7 @@ sp<MediaSource> VideoEditorAudioPlayer::getSource() {
     return mSource;
 }
 
-void VideoEditorAudioPlayer::setObserver(AwesomePlayer *observer) {
+void VideoEditorAudioPlayer::setObserver(PreviewPlayerBase *observer) {
     LOGV("setObserver");
     //CHECK(!mStarted);
     mObserver = observer;
@@ -380,13 +380,13 @@ void VideoEditorAudioPlayer::resume() {
     mAudioProcess->veSetAudioProcessingParams(audioMixSettings);
 
     //Call the base class
-    AudioPlayer::resume();
+    AudioPlayerBase::resume();
 }
 
 void VideoEditorAudioPlayer::reset() {
 
     LOGV("reset");
-    AudioPlayer::reset();
+    AudioPlayerBase::reset();
 
     // Capture the current seek point
     mBGAudioPCMFileSeekPoint = 0;
index f5232cf..73aea65 100755 (executable)
@@ -27,7 +27,8 @@
 #include "VideoEditorMain.h"\r
 #include "M4OSA_FileReader.h"\r
 #include "VideoEditorBGAudioProcessing.h"\r
-#include <media/stagefright/AudioPlayer.h>\r
+#include "AudioPlayerBase.h"\r
+#include "PreviewPlayerBase.h"\r
 \r
 namespace android {\r
 \r
@@ -36,7 +37,7 @@ class AudioTrack;
 class PreviewPlayer;\r
 \r
 \r
-class VideoEditorAudioPlayer : public AudioPlayer {\r
+class VideoEditorAudioPlayer : public AudioPlayerBase {\r
 public:\r
     enum {\r
         REACHED_EOS,\r
@@ -44,7 +45,7 @@ public:
     };\r
 \r
     VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,\r
-        AwesomePlayer *audioObserver = NULL);\r
+        PreviewPlayerBase *audioObserver = NULL);\r
 \r
     virtual ~VideoEditorAudioPlayer();\r
 \r
@@ -58,7 +59,7 @@ public:
         M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,\r
         M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);\r
 \r
-    void setObserver(AwesomePlayer *observer);\r
+    void setObserver(PreviewPlayerBase *observer);\r
     void setSource(const sp<MediaSource> &source);\r
     sp<MediaSource> getSource();\r
 \r