OSDN Git Service

Squashed commit of the following:
authorAndreas Huber <andih@google.com>
Tue, 29 Jan 2013 17:22:16 +0000 (09:22 -0800)
committerAndreas Huber <andih@google.com>
Tue, 29 Jan 2013 18:30:47 +0000 (10:30 -0800)
commit f2c38e5cf8cee3b597c744c9d6a9c0969ac8599a
Author: Andreas Huber <andih@google.com>
Date:   Mon Jan 28 16:33:07 2013 -0800

    Proper support for video format selection/negotiation.

    Change-Id: I7db86cef939d63b8064be1c74de9ad78e85d45d9

commit 488023b7bad086692ffe942114fa3cc0e59a16c0
Author: Andreas Huber <andih@google.com>
Date:   Mon Jan 28 11:21:23 2013 -0800

    Sink now notifies clients once it is disconnected.

    Change-Id: I2f0a458ef1ec30dda1272ad5a013fee4ee70edc9

commit 783932e40dd904aa531c263ad51280d9ca814dcb
Author: Andreas Huber <andih@google.com>
Date:   Tue Dec 18 15:03:40 2012 -0800

    Alternative DirectRenderer implementation.

    Change-Id: I307beb913d7a61cb938bcb02696cc2e82d2b8b07

commit 1935cc9a87824aea71fc8ebe2162f62ec634ce5a
Author: Andreas Huber <andih@google.com>
Date:   Tue Dec 18 10:24:27 2012 -0800

    Experimenting with wifi sink timing.

    Change-Id: I059bae9762cf11777666988a8b4ab2012b5807be

commit a859ee1eadd6a1d6a080667917e8b102c3770d61
Author: Andreas Huber <andih@google.com>
Date:   Thu Nov 15 11:16:30 2012 -0800

    wfd sink update.

    Change-Id: I026dfc580be92aa40dbbe7c1bc061fadf3b08be8

Change-Id: I191d3d7015869ca99254d813d074328fb5b2f479

17 files changed:
media/libstagefright/mpeg2ts/ATSParser.cpp
media/libstagefright/wifi-display/Android.mk
media/libstagefright/wifi-display/VideoFormats.cpp [new file with mode: 0644]
media/libstagefright/wifi-display/VideoFormats.h [new file with mode: 0644]
media/libstagefright/wifi-display/sink/DirectRenderer.cpp [new file with mode: 0644]
media/libstagefright/wifi-display/sink/DirectRenderer.h [new file with mode: 0644]
media/libstagefright/wifi-display/sink/RTPSink.cpp
media/libstagefright/wifi-display/sink/RTPSink.h
media/libstagefright/wifi-display/sink/TunnelRenderer.cpp
media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
media/libstagefright/wifi-display/sink/WifiDisplaySink.h
media/libstagefright/wifi-display/source/PlaybackSession.cpp
media/libstagefright/wifi-display/source/PlaybackSession.h
media/libstagefright/wifi-display/source/Sender.cpp
media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
media/libstagefright/wifi-display/source/WifiDisplaySource.h
media/libstagefright/wifi-display/wfd.cpp

index 4f6c4b2..a167b5a 100644 (file)
@@ -534,6 +534,16 @@ status_t ATSParser::Stream::parse(
         mBuffer->setRange(0, 0);
         mExpectedContinuityCounter = -1;
 
+#if 0
+        // Uncomment this if you'd rather see no corruption whatsoever on
+        // screen and suspend updates until we come across another IDR frame.
+
+        if (mStreamType == STREAMTYPE_H264) {
+            ALOGI("clearing video queue");
+            mQueue->clear(true /* clearFormat */);
+        }
+#endif
+
         return OK;
     }
 
index 75098f1..5095e82 100644 (file)
@@ -6,6 +6,7 @@ LOCAL_SRC_FILES:= \
         ANetworkSession.cpp             \
         Parameters.cpp                  \
         ParsedMessage.cpp               \
+        sink/DirectRenderer.cpp         \
         sink/LinearRegression.cpp       \
         sink/RTPSink.cpp                \
         sink/TunnelRenderer.cpp         \
@@ -18,6 +19,7 @@ LOCAL_SRC_FILES:= \
         source/TSPacketizer.cpp         \
         source/WifiDisplaySource.cpp    \
         TimeSeries.cpp                  \
+        VideoFormats.cpp                \
 
 LOCAL_C_INCLUDES:= \
         $(TOP)/frameworks/av/media/libstagefright \
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
new file mode 100644 (file)
index 0000000..9ad8c3c
--- /dev/null
@@ -0,0 +1,370 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFormats"
+#include <utils/Log.h>
+
+#include "VideoFormats.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+VideoFormats::VideoFormats() {
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0;
+    }
+
+    setNativeResolution(RESOLUTION_CEA, 0);  // default to 640x480 p60
+}
+
+void VideoFormats::setNativeResolution(ResolutionType type, size_t index) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    mNativeType = type;
+    mNativeIndex = index;
+
+    setResolutionEnabled(type, index);
+}
+
+void VideoFormats::getNativeResolution(
+        ResolutionType *type, size_t *index) const {
+    *type = mNativeType;
+    *index = mNativeIndex;
+}
+
+void VideoFormats::disableAll() {
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0;
+    }
+}
+
+void VideoFormats::enableAll() {
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        mResolutionEnabled[i] = 0xffffffff;
+    }
+}
+
+void VideoFormats::setResolutionEnabled(
+        ResolutionType type, size_t index, bool enabled) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    if (enabled) {
+        mResolutionEnabled[type] |= (1ul << index);
+    } else {
+        mResolutionEnabled[type] &= ~(1ul << index);
+    }
+}
+
+bool VideoFormats::isResolutionEnabled(
+        ResolutionType type, size_t index) const {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    return mResolutionEnabled[type] & (1ul << index);
+}
+
+// static
+bool VideoFormats::GetConfiguration(
+        ResolutionType type,
+        size_t index,
+        size_t *width, size_t *height, size_t *framesPerSecond,
+        bool *interlaced) {
+    CHECK_LT(type, kNumResolutionTypes);
+
+    if (index >= 32) {
+        return false;
+    }
+
+    static const struct config_t {
+        size_t width, height, framesPerSecond;
+        bool interlaced;
+    } kConfigs[kNumResolutionTypes][32] = {
+        {
+            // CEA Resolutions
+            { 640, 480, 60, false },
+            { 720, 480, 60, false },
+            { 720, 480, 60, true },
+            { 720, 576, 50, false },
+            { 720, 576, 50, true },
+            { 1280, 720, 30, false },
+            { 1280, 720, 60, false },
+            { 1920, 1080, 30, false },
+            { 1920, 1080, 60, false },
+            { 1920, 1080, 60, true },
+            { 1280, 720, 25, false },
+            { 1280, 720, 50, false },
+            { 1920, 1080, 25, false },
+            { 1920, 1080, 50, false },
+            { 1920, 1080, 50, true },
+            { 1280, 720, 24, false },
+            { 1920, 1080, 24, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+        },
+        {
+            // VESA Resolutions
+            { 800, 600, 30, false },
+            { 800, 600, 60, false },
+            { 1024, 768, 30, false },
+            { 1024, 768, 60, false },
+            { 1152, 864, 30, false },
+            { 1152, 864, 60, false },
+            { 1280, 768, 30, false },
+            { 1280, 768, 60, false },
+            { 1280, 800, 30, false },
+            { 1280, 800, 60, false },
+            { 1360, 768, 30, false },
+            { 1360, 768, 60, false },
+            { 1366, 768, 30, false },
+            { 1366, 768, 60, false },
+            { 1280, 1024, 30, false },
+            { 1280, 1024, 60, false },
+            { 1400, 1050, 30, false },
+            { 1400, 1050, 60, false },
+            { 1440, 900, 30, false },
+            { 1440, 900, 60, false },
+            { 1600, 900, 30, false },
+            { 1600, 900, 60, false },
+            { 1600, 1200, 30, false },
+            { 1600, 1200, 60, false },
+            { 1680, 1024, 30, false },
+            { 1680, 1024, 60, false },
+            { 1680, 1050, 30, false },
+            { 1680, 1050, 60, false },
+            { 1920, 1200, 30, false },
+            { 1920, 1200, 60, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+        },
+        {
+            // HH Resolutions
+            { 800, 480, 30, false },
+            { 800, 480, 60, false },
+            { 854, 480, 30, false },
+            { 854, 480, 60, false },
+            { 864, 480, 30, false },
+            { 864, 480, 60, false },
+            { 640, 360, 30, false },
+            { 640, 360, 60, false },
+            { 960, 540, 30, false },
+            { 960, 540, 60, false },
+            { 848, 480, 30, false },
+            { 848, 480, 60, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+            { 0, 0, 0, false },
+        }
+    };
+
+    const config_t *config = &kConfigs[type][index];
+
+    if (config->width == 0) {
+        return false;
+    }
+
+    if (width) {
+        *width = config->width;
+    }
+
+    if (height) {
+        *height = config->height;
+    }
+
+    if (framesPerSecond) {
+        *framesPerSecond = config->framesPerSecond;
+    }
+
+    if (interlaced) {
+        *interlaced = config->interlaced;
+    }
+
+    return true;
+}
+
+bool VideoFormats::parseFormatSpec(const char *spec) {
+    CHECK_EQ(kNumResolutionTypes, 3);
+
+    unsigned native, dummy;
+
+    if (sscanf(
+            spec,
+            "%02x %02x %02x %02x %08X %08X %08X",
+            &native,
+            &dummy,
+            &dummy,
+            &dummy,
+            &mResolutionEnabled[0],
+            &mResolutionEnabled[1],
+            &mResolutionEnabled[2]) != 7) {
+        return false;
+    }
+
+    mNativeIndex = native >> 3;
+    mNativeType = (ResolutionType)(native & 7);
+
+    if (mNativeType >= kNumResolutionTypes) {
+        return false;
+    }
+
+    return GetConfiguration(mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
+}
+
+AString VideoFormats::getFormatSpec() const {
+    CHECK_EQ(kNumResolutionTypes, 3);
+
+    // wfd_video_formats:
+    // 1 byte "native"
+    // 1 byte "preferred-display-mode-supported" 0 or 1
+    // one or more avc codec structures
+    //   1 byte profile
+    //   1 byte level
+    //   4 byte CEA mask
+    //   4 byte VESA mask
+    //   4 byte HH mask
+    //   1 byte latency
+    //   2 byte min-slice-slice
+    //   2 byte slice-enc-params
+    //   1 byte framerate-control-support
+    //   max-hres (none or 2 byte)
+    //   max-vres (none or 2 byte)
+
+    return StringPrintf(
+            "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none",
+            (mNativeIndex << 3) | mNativeType,
+            mResolutionEnabled[0],
+            mResolutionEnabled[1],
+            mResolutionEnabled[2]);
+}
+
+// static
+bool VideoFormats::PickBestFormat(
+        const VideoFormats &sinkSupported,
+        const VideoFormats &sourceSupported,
+        ResolutionType *chosenType,
+        size_t *chosenIndex) {
+    ResolutionType nativeType;
+    size_t nativeIndex;
+    sinkSupported.getNativeResolution(&nativeType, &nativeIndex);
+    if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+        if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+            ALOGI("Choosing sink's native resolution");
+            *chosenType = nativeType;
+            *chosenIndex = nativeIndex;
+            return true;
+        }
+    } else {
+        ALOGW("Sink advertised native resolution that it doesn't "
+              "actually support... ignoring");
+    }
+
+    sourceSupported.getNativeResolution(&nativeType, &nativeIndex);
+    if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+        if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+            ALOGI("Choosing source's native resolution");
+            *chosenType = nativeType;
+            *chosenIndex = nativeIndex;
+            return true;
+        }
+    } else {
+        ALOGW("Source advertised native resolution that it doesn't "
+              "actually support... ignoring");
+    }
+
+    bool first = true;
+    uint32_t bestScore = 0;
+    size_t bestType = 0;
+    size_t bestIndex = 0;
+    for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+        for (size_t j = 0; j < 32; ++j) {
+            size_t width, height, framesPerSecond;
+            bool interlaced;
+            if (!GetConfiguration(
+                        (ResolutionType)i,
+                        j,
+                        &width, &height, &framesPerSecond, &interlaced)) {
+                break;
+            }
+
+            if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j)
+                    || !sourceSupported.isResolutionEnabled(
+                        (ResolutionType)i, j)) {
+                continue;
+            }
+
+            ALOGV("type %u, index %u, %u x %u %c%u supported",
+                  i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);
+
+            uint32_t score = width * height * framesPerSecond;
+            if (!interlaced) {
+                score *= 2;
+            }
+
+            if (first || score > bestScore) {
+                bestScore = score;
+                bestType = i;
+                bestIndex = j;
+
+                first = false;
+            }
+        }
+    }
+
+    if (first) {
+        return false;
+    }
+
+    *chosenType = (ResolutionType)bestType;
+    *chosenIndex = bestIndex;
+
+    return true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h
new file mode 100644 (file)
index 0000000..a84407a
--- /dev/null
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FORMATS_H_
+
+#define VIDEO_FORMATS_H_
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include <stdint.h>
+
+namespace android {
+
+struct AString;
+
+// This class encapsulates that video resolution capabilities of a wfd source
+// or sink as outlined in the wfd specs. Currently three sets of resolutions
+// are specified, each of which supports up to 32 resolutions.
+// In addition to its capabilities each sink/source also publishes its
+// "native" resolution, presumably one that is preferred among all others
+// because it wouldn't require any scaling and directly corresponds to the
+// display capabilities/pixels.
+struct VideoFormats {
+    VideoFormats();
+
+    enum ResolutionType {
+        RESOLUTION_CEA,
+        RESOLUTION_VESA,
+        RESOLUTION_HH,
+        kNumResolutionTypes,
+    };
+
+    void setNativeResolution(ResolutionType type, size_t index);
+    void getNativeResolution(ResolutionType *type, size_t *index) const;
+
+    void disableAll();
+    void enableAll();
+
+    void setResolutionEnabled(
+            ResolutionType type, size_t index, bool enabled = true);
+
+    bool isResolutionEnabled(ResolutionType type, size_t index) const;
+
+    static bool GetConfiguration(
+            ResolutionType type, size_t index,
+            size_t *width, size_t *height, size_t *framesPerSecond,
+            bool *interlaced);
+
+    bool parseFormatSpec(const char *spec);
+    AString getFormatSpec() const;
+
+    static bool PickBestFormat(
+            const VideoFormats &sinkSupported,
+            const VideoFormats &sourceSupported,
+            ResolutionType *chosenType,
+            size_t *chosenIndex);
+
+private:
+    ResolutionType mNativeType;
+    size_t mNativeIndex;
+
+    uint32_t mResolutionEnabled[kNumResolutionTypes];
+
+    DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
+};
+
+}  // namespace android
+
+#endif  // VIDEO_FORMATS_H_
+
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.cpp b/media/libstagefright/wifi-display/sink/DirectRenderer.cpp
new file mode 100644 (file)
index 0000000..8120634
--- /dev/null
@@ -0,0 +1,428 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DirectRenderer"
+#include <utils/Log.h>
+
+#include "DirectRenderer.h"
+
+#include "AnotherPacketSource.h"
+#include "ATSParser.h"
+
+#include <gui/SurfaceComposerClient.h>
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+// static
+const int64_t DirectRenderer::kPacketLostDelayUs = 80000ll;
+
+// static
+const int64_t DirectRenderer::kPacketLateDelayUs = 60000ll;
+
+DirectRenderer::DirectRenderer(
+        const sp<AMessage> &notifyLost,
+        const sp<IGraphicBufferProducer> &bufferProducer)
+    : mNotifyLost(notifyLost),
+      mSurfaceTex(bufferProducer),
+      mTSParser(new ATSParser(ATSParser::ALIGNED_VIDEO_DATA)),
+      mVideoDecoderNotificationPending(false),
+      mAwaitingExtSeqNo(-1),
+      mRequestedRetransmission(false),
+      mPacketLostGeneration(0) {
+}
+
+DirectRenderer::~DirectRenderer() {
+    if (mVideoDecoder != NULL) {
+        mVideoDecoder->release();
+        mVideoDecoder.clear();
+
+        mVideoDecoderLooper->stop();
+        mVideoDecoderLooper.clear();
+    }
+}
+
+void DirectRenderer::onMessageReceived(const sp<AMessage> &msg) {
+    switch (msg->what()) {
+        case kWhatQueueBuffer:
+        {
+            sp<ABuffer> buffer;
+            CHECK(msg->findBuffer("buffer", &buffer));
+
+            onQueueBuffer(buffer);
+
+            dequeueMore();
+            break;
+        }
+
+        case kWhatPacketLate:
+        case kWhatPacketLost:
+        {
+            int32_t generation;
+            CHECK(msg->findInt32("generation", &generation));
+
+            if (generation != mPacketLostGeneration) {
+                // stale.
+                break;
+            }
+
+            if (msg->what() == kWhatPacketLate) {
+                CHECK(!mRequestedRetransmission);
+                CHECK_GE(mAwaitingExtSeqNo, 0);
+
+                ALOGV("packet extSeqNo %d is late, requesting retransmission.",
+                      mAwaitingExtSeqNo);
+
+                sp<AMessage> notify = mNotifyLost->dup();
+                notify->setInt32("seqNo", (mAwaitingExtSeqNo & 0xffff));
+                notify->post();
+
+                mRequestedRetransmission = true;
+                break;
+            }
+
+            ALOGW("lost packet extSeqNo %d", mAwaitingExtSeqNo);
+
+            sp<AMessage> extra;
+            mTSParser->signalDiscontinuity(
+                    ATSParser::DISCONTINUITY_TIME, extra);
+
+            mAwaitingExtSeqNo = -1;
+            mRequestedRetransmission = false;
+            dequeueMore();
+            break;
+        }
+
+        case kWhatVideoDecoderNotify:
+        {
+            onVideoDecoderNotify();
+            break;
+        }
+
+        default:
+            TRESPASS();
+    }
+}
+
+void DirectRenderer::onQueueBuffer(const sp<ABuffer> &buffer) {
+    int32_t newExtendedSeqNo = buffer->int32Data();
+
+    if (mPackets.empty()) {
+        mPackets.push_back(buffer);
+        return;
+    }
+
+    if (mAwaitingExtSeqNo > 0 && newExtendedSeqNo < mAwaitingExtSeqNo) {
+        // We're no longer interested in these. They're old.
+        return;
+    }
+
+    List<sp<ABuffer> >::iterator firstIt = mPackets.begin();
+    List<sp<ABuffer> >::iterator it = --mPackets.end();
+    for (;;) {
+        int32_t extendedSeqNo = (*it)->int32Data();
+
+        if (extendedSeqNo == newExtendedSeqNo) {
+            // Duplicate packet.
+            return;
+        }
+
+        if (extendedSeqNo < newExtendedSeqNo) {
+            // Insert new packet after the one at "it".
+            mPackets.insert(++it, buffer);
+            return;
+        }
+
+        if (it == firstIt) {
+            // Insert new packet before the first existing one.
+            mPackets.insert(it, buffer);
+            return;
+        }
+
+        --it;
+    }
+}
+
+void DirectRenderer::dequeueMore() {
+    if (mAwaitingExtSeqNo >= 0) {
+        // Remove all packets before the one we're looking for, they had
+        // their chance.
+        while (!mPackets.empty()
+                && (*mPackets.begin())->int32Data() < mAwaitingExtSeqNo) {
+            ALOGV("dropping late packet extSeqNo %d",
+                  (*mPackets.begin())->int32Data());
+
+            mPackets.erase(mPackets.begin());
+        }
+    }
+
+    bool packetLostScheduled = (mAwaitingExtSeqNo >= 0);
+
+    while (!mPackets.empty()) {
+        sp<ABuffer> buffer = *mPackets.begin();
+        int32_t extSeqNo = buffer->int32Data();
+
+        if (mAwaitingExtSeqNo >= 0 && extSeqNo != mAwaitingExtSeqNo) {
+            break;
+        }
+
+        mPackets.erase(mPackets.begin());
+
+        if (packetLostScheduled) {
+            packetLostScheduled = false;
+            cancelPacketLost();
+        }
+
+        if (mRequestedRetransmission) {
+            ALOGV("recovered after requesting retransmission of extSeqNo %d",
+                  mAwaitingExtSeqNo);
+        }
+
+        CHECK_EQ(buffer->size() % 188, 0u);
+
+        for (size_t offset = 0; offset < buffer->size(); offset += 188) {
+            status_t err = mTSParser->feedTSPacket(
+                    buffer->data() + offset, 188);
+
+            CHECK_EQ(err, (status_t)OK);
+        }
+
+        mAwaitingExtSeqNo = extSeqNo + 1;
+        mRequestedRetransmission = false;
+    }
+
+    if (!packetLostScheduled && mAwaitingExtSeqNo >= 0) {
+        schedulePacketLost();
+    }
+
+    dequeueAccessUnits();
+}
+
+void DirectRenderer::dequeueAccessUnits() {
+    sp<AnotherPacketSource> audioSource =
+        static_cast<AnotherPacketSource *>(
+                mTSParser->getSource(ATSParser::AUDIO).get());
+
+    if (audioSource != NULL) {
+        status_t finalResult;
+        size_t n = 0;
+        while (audioSource->hasBufferAvailable(&finalResult)) {
+            sp<ABuffer> accessUnit;
+            status_t err = audioSource->dequeueAccessUnit(&accessUnit);
+            if (err == OK) {
+                ++n;
+            }
+        }
+
+        if (n > 0) {
+            ALOGV("dequeued %d audio access units.", n);
+        }
+    }
+
+    sp<AnotherPacketSource> videoSource =
+        static_cast<AnotherPacketSource *>(
+                mTSParser->getSource(ATSParser::VIDEO).get());
+
+    if (videoSource != NULL) {
+        if (mVideoDecoder == NULL) {
+            sp<MetaData> meta = videoSource->getFormat();
+            if (meta != NULL) {
+                sp<AMessage> videoFormat;
+                status_t err = convertMetaDataToMessage(meta, &videoFormat);
+                CHECK_EQ(err, (status_t)OK);
+
+                AString mime;
+                CHECK(videoFormat->findString("mime", &mime));
+
+                mVideoDecoderLooper = new ALooper;
+                mVideoDecoderLooper->setName("video codec looper");
+
+                mVideoDecoderLooper->start(
+                        false /* runOnCallingThread */,
+                        false /* canCallJava */,
+                        PRIORITY_DEFAULT);
+
+                mVideoDecoder = MediaCodec::CreateByType(
+                        mVideoDecoderLooper, mime.c_str(), false /* encoder */);
+
+                CHECK(mVideoDecoder != NULL);
+
+                err = mVideoDecoder->configure(
+                        videoFormat,
+                        new SurfaceTextureClient(mSurfaceTex),
+                        NULL /* crypto */,
+                        0 /* flags */);
+
+                CHECK_EQ(err, (status_t)OK);
+
+                err = mVideoDecoder->start();
+                CHECK_EQ(err, (status_t)OK);
+
+                err = mVideoDecoder->getInputBuffers(
+                        &mVideoDecoderInputBuffers);
+                CHECK_EQ(err, (status_t)OK);
+
+                scheduleVideoDecoderNotification();
+            }
+        }
+
+        status_t finalResult;
+        size_t n = 0;
+        while (videoSource->hasBufferAvailable(&finalResult)) {
+            sp<ABuffer> accessUnit;
+            status_t err = videoSource->dequeueAccessUnit(&accessUnit);
+            if (err == OK) {
+                mVideoAccessUnits.push_back(accessUnit);
+                ++n;
+            }
+        }
+
+        if (n > 0) {
+            ALOGV("dequeued %d video access units.", n);
+            queueVideoDecoderInputBuffers();
+        }
+    }
+}
+
+void DirectRenderer::schedulePacketLost() {
+    sp<AMessage> msg;
+
+#if 1
+    msg = new AMessage(kWhatPacketLate, id());
+    msg->setInt32("generation", mPacketLostGeneration);
+    msg->post(kPacketLateDelayUs);
+#endif
+
+    msg = new AMessage(kWhatPacketLost, id());
+    msg->setInt32("generation", mPacketLostGeneration);
+    msg->post(kPacketLostDelayUs);
+}
+
+void DirectRenderer::cancelPacketLost() {
+    ++mPacketLostGeneration;
+}
+
+void DirectRenderer::queueVideoDecoderInputBuffers() {
+    if (mVideoDecoder == NULL) {
+        return;
+    }
+
+    bool submittedMore = false;
+
+    while (!mVideoAccessUnits.empty()
+            && !mVideoDecoderInputBuffersAvailable.empty()) {
+        size_t index = *mVideoDecoderInputBuffersAvailable.begin();
+
+        mVideoDecoderInputBuffersAvailable.erase(
+                mVideoDecoderInputBuffersAvailable.begin());
+
+        sp<ABuffer> srcBuffer = *mVideoAccessUnits.begin();
+        mVideoAccessUnits.erase(mVideoAccessUnits.begin());
+
+        const sp<ABuffer> &dstBuffer =
+            mVideoDecoderInputBuffers.itemAt(index);
+
+        memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
+
+        int64_t timeUs;
+        CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
+
+        status_t err = mVideoDecoder->queueInputBuffer(
+                index,
+                0 /* offset */,
+                srcBuffer->size(),
+                timeUs,
+                0 /* flags */);
+        CHECK_EQ(err, (status_t)OK);
+
+        submittedMore = true;
+    }
+
+    if (submittedMore) {
+        scheduleVideoDecoderNotification();
+    }
+}
+
+void DirectRenderer::onVideoDecoderNotify() {
+    mVideoDecoderNotificationPending = false;
+
+    for (;;) {
+        size_t index;
+        status_t err = mVideoDecoder->dequeueInputBuffer(&index);
+
+        if (err == OK) {
+            mVideoDecoderInputBuffersAvailable.push_back(index);
+        } else if (err == -EAGAIN) {
+            break;
+        } else {
+            TRESPASS();
+        }
+    }
+
+    queueVideoDecoderInputBuffers();
+
+    for (;;) {
+        size_t index;
+        size_t offset;
+        size_t size;
+        int64_t timeUs;
+        uint32_t flags;
+        status_t err = mVideoDecoder->dequeueOutputBuffer(
+                &index,
+                &offset,
+                &size,
+                &timeUs,
+                &flags);
+
+        if (err == OK) {
+            err = mVideoDecoder->renderOutputBufferAndRelease(index);
+            CHECK_EQ(err, (status_t)OK);
+        } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+            // We don't care.
+        } else if (err == INFO_FORMAT_CHANGED) {
+            // We don't care.
+        } else if (err == -EAGAIN) {
+            break;
+        } else {
+            TRESPASS();
+        }
+    }
+
+    scheduleVideoDecoderNotification();
+}
+
+void DirectRenderer::scheduleVideoDecoderNotification() {
+    if (mVideoDecoderNotificationPending) {
+        return;
+    }
+
+    sp<AMessage> notify =
+        new AMessage(kWhatVideoDecoderNotify, id());
+
+    mVideoDecoder->requestActivityNotification(notify);
+    mVideoDecoderNotificationPending = true;
+}
+
+}  // namespace android
+
diff --git a/media/libstagefright/wifi-display/sink/DirectRenderer.h b/media/libstagefright/wifi-display/sink/DirectRenderer.h
new file mode 100644 (file)
index 0000000..2babcb8
--- /dev/null
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2012, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DIRECT_RENDERER_H_
+
+#define DIRECT_RENDERER_H_
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct ATSParser;
+struct IGraphicBufferProducer;
+struct MediaCodec;
+
+// An experimental renderer that only supports video and decodes video data
+// as soon as it arrives using a MediaCodec instance, rendering it without
+// delay. Primarily meant to finetune packet loss discovery and minimize
+// latency.
+struct DirectRenderer : public AHandler {
+    DirectRenderer(
+            const sp<AMessage> &notifyLost,
+            const sp<IGraphicBufferProducer> &bufferProducer);
+
+    enum {
+        kWhatQueueBuffer = 'queB',
+    };
+
+protected:
+    virtual void onMessageReceived(const sp<AMessage> &msg);
+    virtual ~DirectRenderer();
+
+private:
+    enum {
+        kWhatPacketLate,
+        kWhatPacketLost,
+        kWhatVideoDecoderNotify,
+    };
+
+    static const int64_t kPacketLateDelayUs;
+    static const int64_t kPacketLostDelayUs;
+
+    sp<AMessage> mNotifyLost;
+    sp<IGraphicBufferProducer> mSurfaceTex;
+
+    // Ordered by extended seq number.
+    List<sp<ABuffer> > mPackets;
+
+    sp<ATSParser> mTSParser;
+
+    sp<ALooper> mVideoDecoderLooper;
+    sp<MediaCodec> mVideoDecoder;
+    Vector<sp<ABuffer> > mVideoDecoderInputBuffers;
+    List<size_t> mVideoDecoderInputBuffersAvailable;
+    bool mVideoDecoderNotificationPending;
+
+    List<sp<ABuffer> > mVideoAccessUnits;
+
+    int32_t mAwaitingExtSeqNo;
+    bool mRequestedRetransmission;
+    int32_t mPacketLostGeneration;
+
+    void onQueueBuffer(const sp<ABuffer> &buffer);
+    void onVideoDecoderNotify();
+
+    void dequeueMore();
+    void dequeueAccessUnits();
+
+    void schedulePacketLost();
+    void cancelPacketLost();
+
+    void queueVideoDecoderInputBuffers();
+    void scheduleVideoDecoderNotification();
+
+    DISALLOW_EVIL_CONSTRUCTORS(DirectRenderer);
+};
+
+}  // namespace android
+
+#endif  // DIRECT_RENDERER_H_
index 640e055..ad75373 100644 (file)
 #include "RTPSink.h"
 
 #include "ANetworkSession.h"
+
+#if USE_TUNNEL_RENDERER
 #include "TunnelRenderer.h"
+#define RENDERER_CLASS TunnelRenderer
+#else
+#include "DirectRenderer.h"
+#define RENDERER_CLASS DirectRenderer
+#endif
 
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
@@ -238,9 +245,11 @@ void RTPSink::Source::addReportBlock(
 
 RTPSink::RTPSink(
         const sp<ANetworkSession> &netSession,
-        const sp<IGraphicBufferProducer> &bufferProducer)
+        const sp<IGraphicBufferProducer> &bufferProducer,
+        const sp<AMessage> &notify)
     : mNetSession(netSession),
       mSurfaceTex(bufferProducer),
+      mNotify(notify),
       mRTPPort(0),
       mRTPSessionID(0),
       mRTCPSessionID(0),
@@ -470,6 +479,7 @@ status_t RTPSink::parseRTP(const sp<ABuffer> &buffer) {
     uint32_t rtpTime = U32_AT(&data[4]);
     uint16_t seqNo = U16_AT(&data[2]);
 
+#if 0
     int64_t arrivalTimeUs;
     CHECK(buffer->meta()->findInt64("arrivalTimeUs", &arrivalTimeUs));
 
@@ -500,6 +510,7 @@ status_t RTPSink::parseRTP(const sp<ABuffer> &buffer) {
             ALOGI("packet was %.2f ms late", latenessMs);
         }
     }
+#endif
 
     sp<AMessage> meta = buffer->meta();
     meta->setInt32("ssrc", srcId);
@@ -515,12 +526,12 @@ status_t RTPSink::parseRTP(const sp<ABuffer> &buffer) {
             sp<AMessage> notifyLost = new AMessage(kWhatPacketLost, id());
             notifyLost->setInt32("ssrc", srcId);
 
-            mRenderer = new TunnelRenderer(notifyLost, mSurfaceTex);
+            mRenderer = new RENDERER_CLASS(notifyLost, mSurfaceTex);
             looper()->registerHandler(mRenderer);
         }
 
         sp<AMessage> queueBufferMsg =
-            new AMessage(TunnelRenderer::kWhatQueueBuffer, mRenderer->id());
+            new AMessage(RENDERER_CLASS::kWhatQueueBuffer, mRenderer->id());
 
         sp<Source> source = new Source(seqNo, buffer, queueBufferMsg);
         mSources.add(srcId, source);
index 2183fd6..6e40185 100644 (file)
 
 #include <gui/Surface.h>
 
+#define USE_TUNNEL_RENDERER     0
+
 namespace android {
 
 struct ABuffer;
 struct ANetworkSession;
+
+#if USE_TUNNEL_RENDERER
 struct TunnelRenderer;
+#else
+struct DirectRenderer;
+#endif
 
 // Creates a pair of sockets for RTP/RTCP traffic, instantiates a renderer
 // for incoming transport stream data and occasionally sends statistics over
 // the RTCP channel.
 struct RTPSink : public AHandler {
     RTPSink(const sp<ANetworkSession> &netSession,
-            const sp<IGraphicBufferProducer> &bufferProducer);
+            const sp<IGraphicBufferProducer> &bufferProducer,
+            const sp<AMessage> &notify);
 
     // If TCP interleaving is used, no UDP sockets are created, instead
     // incoming RTP/RTCP packets (arriving on the RTSP control connection)
@@ -67,6 +75,7 @@ private:
 
     sp<ANetworkSession> mNetSession;
     sp<IGraphicBufferProducer> mSurfaceTex;
+    sp<AMessage> mNotify;
     KeyedVector<uint32_t, sp<Source> > mSources;
 
     int32_t mRTPPort;
@@ -78,7 +87,11 @@ private:
     LinearRegression mRegression;
     int64_t mMaxDelayMs;
 
+#if USE_TUNNEL_RENDERER
     sp<TunnelRenderer> mRenderer;
+#else
+    sp<DirectRenderer> mRenderer;
+#endif
 
     status_t parseRTP(const sp<ABuffer> &buffer);
     status_t parseRTCP(const sp<ABuffer> &buffer);
index f3f4536..04dbd7b 100644 (file)
@@ -264,16 +264,17 @@ sp<ABuffer> TunnelRenderer::dequeueBuffer() {
     if (mFirstFailedAttemptUs < 0ll) {
         mFirstFailedAttemptUs = ALooper::GetNowUs();
 
-        ALOGI("failed to get the correct packet the first time.");
+        ALOGV("failed to get the correct packet the first time.");
         return NULL;
     }
 
     if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) {
         // We're willing to wait a little while to get the right packet.
 
-#if 0
+#if 1
         if (!mRequestedRetransmission) {
-            ALOGI("requesting retransmission of seqNo %d",
+            ALOGI("requesting retransmission of extSeqNo %d (seqNo %d)",
+                  mLastDequeuedExtSeqNo + 1,
                   (mLastDequeuedExtSeqNo + 1) & 0xffff);
 
             sp<AMessage> notify = mNotifyLost->dup();
@@ -284,7 +285,7 @@ sp<ABuffer> TunnelRenderer::dequeueBuffer() {
         } else
 #endif
         {
-            ALOGI("still waiting for the correct packet to arrive.");
+            ALOGV("still waiting for the correct packet to arrive.");
         }
 
         return NULL;
index 0f0caf1..46c40c7 100644 (file)
@@ -31,12 +31,27 @@ namespace android {
 
 WifiDisplaySink::WifiDisplaySink(
         const sp<ANetworkSession> &netSession,
-        const sp<IGraphicBufferProducer> &bufferProducer)
+        const sp<IGraphicBufferProducer> &bufferProducer,
+        const sp<AMessage> &notify)
     : mState(UNDEFINED),
       mNetSession(netSession),
       mSurfaceTex(bufferProducer),
+      mNotify(notify),
       mSessionID(0),
       mNextCSeq(1) {
+#if 1
+    // We support any and all resolutions, but prefer 720p30
+    mSinkSupportedVideoFormats.setNativeResolution(
+            VideoFormats::RESOLUTION_CEA, 5);  // 1280 x 720 p30
+
+    mSinkSupportedVideoFormats.enableAll();
+#else
+    // We only support 800 x 600 p60.
+    mSinkSupportedVideoFormats.disableAll();
+
+    mSinkSupportedVideoFormats.setNativeResolution(
+            VideoFormats::RESOLUTION_VESA, 1);  // 800 x 600 p60
+#endif
 }
 
 WifiDisplaySink::~WifiDisplaySink() {
@@ -123,6 +138,8 @@ void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
     switch (msg->what()) {
         case kWhatStart:
         {
+            sleep(2);  // XXX
+
             int32_t sourcePort;
 
             if (msg->findString("setupURI", &mSetupURI)) {
@@ -176,7 +193,13 @@ void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
                         mNetSession->destroySession(mSessionID);
                         mSessionID = 0;
 
-                        looper()->stop();
+                        if (mNotify == NULL) {
+                            looper()->stop();
+                        } else {
+                            sp<AMessage> notify = mNotify->dup();
+                            notify->setInt32("what", kWhatDisconnected);
+                            notify->post();
+                        }
                     }
                     break;
                 }
@@ -227,6 +250,18 @@ void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
             break;
         }
 
+        case kWhatRequestIDRFrame:
+        {
+            ALOGI("requesting IDR frame");
+            sendIDRFrameRequest(mSessionID);
+            break;
+        }
+
+        case kWhatRTPSinkNotify:
+        {
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -392,6 +427,11 @@ status_t WifiDisplaySink::onReceivePlayResponse(
     return OK;
 }
 
+status_t WifiDisplaySink::onReceiveIDRFrameRequestResponse(
+        int32_t sessionID, const sp<ParsedMessage> &msg) {
+    return OK;
+}
+
 void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) {
     int32_t sessionID;
     CHECK(msg->findInt32("sessionID", &sessionID));
@@ -474,11 +514,11 @@ void WifiDisplaySink::onGetParameterRequest(
         int32_t sessionID,
         int32_t cseq,
         const sp<ParsedMessage> &data) {
-    AString body =
-        "wfd_video_formats: "
-        "28 00 02 02 FFFFFFFF 0000000 00000000 00 0000 0000 00 none none\r\n"
-        "wfd_audio_codecs: AAC 0000000F 00\r\n"
-        "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n";
+    AString body = "wfd_video_formats: ";
+    body.append(mSinkSupportedVideoFormats.getFormatSpec());
+    body.append(
+            "\r\nwfd_audio_codecs: AAC 0000000F 00\r\n"
+            "wfd_client_rtp_ports: RTP/AVP/UDP;unicast 19000 0 mode=play\r\n");
 
     AString response = "RTSP/1.0 200 OK\r\n";
     AppendCommonResponse(&response, cseq);
@@ -517,7 +557,9 @@ status_t WifiDisplaySink::sendDescribe(int32_t sessionID, const char *uri) {
 }
 
 status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) {
-    mRTPSink = new RTPSink(mNetSession, mSurfaceTex);
+    sp<AMessage> notify = new AMessage(kWhatRTPSinkNotify, id());
+
+    mRTPSink = new RTPSink(mNetSession, mSurfaceTex, notify);
     looper()->registerHandler(mRTPSink);
 
     status_t err = mRTPSink->init(sUseTCPInterleaving);
@@ -584,6 +626,35 @@ status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) {
     return OK;
 }
 
+status_t WifiDisplaySink::sendIDRFrameRequest(int32_t sessionID) {
+    AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
+
+    AppendCommonResponse(&request, mNextCSeq);
+
+    AString content = "wfd_idr_request\r\n";
+
+    request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
+    request.append(StringPrintf("Content-Length: %d\r\n", content.size()));
+    request.append("\r\n");
+    request.append(content);
+
+    status_t err =
+        mNetSession->sendRequest(sessionID, request.c_str(), request.size());
+
+    if (err != OK) {
+        return err;
+    }
+
+    registerResponseHandler(
+            sessionID,
+            mNextCSeq,
+            &WifiDisplaySink::onReceiveIDRFrameRequestResponse);
+
+    ++mNextCSeq;
+
+    return OK;
+}
+
 void WifiDisplaySink::onSetParameterRequest(
         int32_t sessionID,
         int32_t cseq,
index a508839..5f86519 100644 (file)
 
 #include "ANetworkSession.h"
 
+#include "VideoFormats.h"
+
 #include <gui/Surface.h>
 #include <media/stagefright/foundation/AHandler.h>
 
 namespace android {
 
+struct AMessage;
 struct ParsedMessage;
 struct RTPSink;
 
@@ -32,9 +35,18 @@ struct RTPSink;
 // Connects to a wifi display source and renders the incoming
 // transport stream using a MediaPlayer instance.
 struct WifiDisplaySink : public AHandler {
+    enum {
+        kWhatDisconnected,
+    };
+
+    // If no notification message is specified (notify == NULL)
+    // the sink will stop its looper() once the session ends,
+    // otherwise it will post an appropriate notification but leave
+    // the looper() running.
     WifiDisplaySink(
             const sp<ANetworkSession> &netSession,
-            const sp<IGraphicBufferProducer> &bufferProducer = NULL);
+            const sp<IGraphicBufferProducer> &bufferProducer = NULL,
+            const sp<AMessage> &notify = NULL);
 
     void start(const char *sourceHost, int32_t sourcePort);
     void start(const char *uri);
@@ -56,6 +68,8 @@ private:
         kWhatStart,
         kWhatRTSPNotify,
         kWhatStop,
+        kWhatRequestIDRFrame,
+        kWhatRTPSinkNotify,
     };
 
     struct ResponseID {
@@ -75,8 +89,10 @@ private:
     static const bool sUseTCPInterleaving = false;
 
     State mState;
+    VideoFormats mSinkSupportedVideoFormats;
     sp<ANetworkSession> mNetSession;
     sp<IGraphicBufferProducer> mSurfaceTex;
+    sp<AMessage> mNotify;
     AString mSetupURI;
     AString mRTSPHost;
     int32_t mSessionID;
@@ -93,6 +109,7 @@ private:
     status_t sendDescribe(int32_t sessionID, const char *uri);
     status_t sendSetup(int32_t sessionID, const char *uri);
     status_t sendPlay(int32_t sessionID, const char *uri);
+    status_t sendIDRFrameRequest(int32_t sessionID);
 
     status_t onReceiveM2Response(
             int32_t sessionID, const sp<ParsedMessage> &msg);
@@ -108,6 +125,9 @@ private:
     status_t onReceivePlayResponse(
             int32_t sessionID, const sp<ParsedMessage> &msg);
 
+    status_t onReceiveIDRFrameRequestResponse(
+            int32_t sessionID, const sp<ParsedMessage> &msg);
+
     void registerResponseHandler(
             int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
 
index d6b87a7..91dc1fa 100644 (file)
@@ -346,8 +346,17 @@ WifiDisplaySource::PlaybackSession::PlaybackSession(
 status_t WifiDisplaySource::PlaybackSession::init(
         const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
         Sender::TransportMode transportMode,
-        bool usePCMAudio) {
-    status_t err = setupPacketizer(usePCMAudio);
+        bool enableAudio,
+        bool usePCMAudio,
+        bool enableVideo,
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex) {
+    status_t err = setupPacketizer(
+            enableAudio,
+            usePCMAudio,
+            enableVideo,
+            videoResolutionType,
+            videoResolutionIndex);
 
     if (err != OK) {
         return err;
@@ -639,13 +648,27 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
     }
 }
 
-status_t WifiDisplaySource::PlaybackSession::setupPacketizer(bool usePCMAudio) {
+status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
+        bool enableAudio,
+        bool usePCMAudio,
+        bool enableVideo,
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex) {
+    CHECK(enableAudio || enableVideo);
+
     mPacketizer = new TSPacketizer;
 
-    status_t err = addVideoSource();
+    if (enableVideo) {
+        status_t err = addVideoSource(
+                videoResolutionType, videoResolutionIndex);
 
-    if (err != OK) {
-        return err;
+        if (err != OK) {
+            return err;
+        }
+    }
+
+    if (!enableAudio) {
+        return OK;
     }
 
     return addAudioSource(usePCMAudio);
@@ -735,27 +758,30 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
     return OK;
 }
 
-status_t WifiDisplaySource::PlaybackSession::addVideoSource() {
-    sp<SurfaceMediaSource> source = new SurfaceMediaSource(width(), height());
+status_t WifiDisplaySource::PlaybackSession::addVideoSource(
+        VideoFormats::ResolutionType videoResolutionType,
+        size_t videoResolutionIndex) {
+    size_t width, height, framesPerSecond;
+    bool interlaced;
+    CHECK(VideoFormats::GetConfiguration(
+                videoResolutionType,
+                videoResolutionIndex,
+                &width,
+                &height,
+                &framesPerSecond,
+                &interlaced));
+
+    sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
 
     source->setUseAbsoluteTimestamps();
 
-#if 1
     sp<RepeaterSource> videoSource =
-        new RepeaterSource(source, 30.0 /* rateHz */);
-#endif
+        new RepeaterSource(source, framesPerSecond);
 
-#if 1
     size_t numInputBuffers;
     status_t err = addSource(
             true /* isVideo */, videoSource, true /* isRepeaterSource */,
             false /* usePCMAudio */, &numInputBuffers);
-#else
-    size_t numInputBuffers;
-    status_t err = addSource(
-            true /* isVideo */, source, false /* isRepeaterSource */,
-            false /* usePCMAudio */, &numInputBuffers);
-#endif
 
     if (err != OK) {
         return err;
@@ -790,22 +816,6 @@ sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture
     return mBufferQueue;
 }
 
-int32_t WifiDisplaySource::PlaybackSession::width() const {
-#if USE_1080P
-    return 1920;
-#else
-    return 1280;
-#endif
-}
-
-int32_t WifiDisplaySource::PlaybackSession::height() const {
-#if USE_1080P
-    return 1080;
-#else
-    return 720;
-#endif
-}
-
 void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
     for (size_t i = 0; i < mTracks.size(); ++i) {
         const sp<Track> &track = mTracks.valueAt(i);
index 281548d..7365c78 100644 (file)
@@ -19,6 +19,7 @@
 #define PLAYBACK_SESSION_H_
 
 #include "Sender.h"
+#include "VideoFormats.h"
 #include "WifiDisplaySource.h"
 
 namespace android {
@@ -43,7 +44,11 @@ struct WifiDisplaySource::PlaybackSession : public AHandler {
     status_t init(
             const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
             Sender::TransportMode transportMode,
-            bool usePCMAudio);
+            bool enableAudio,
+            bool usePCMAudio,
+            bool enableVideo,
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex);
 
     void destroyAsync();
 
@@ -57,8 +62,6 @@ struct WifiDisplaySource::PlaybackSession : public AHandler {
     status_t pause();
 
     sp<IGraphicBufferProducer> getSurfaceTexture();
-    int32_t width() const;
-    int32_t height() const;
 
     void requestIDRFrame();
 
@@ -109,7 +112,12 @@ private:
 
     bool mAllTracksHavePacketizerIndex;
 
-    status_t setupPacketizer(bool usePCMAudio);
+    status_t setupPacketizer(
+            bool enableAudio,
+            bool usePCMAudio,
+            bool enableVideo,
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex);
 
     status_t addSource(
             bool isVideo,
@@ -118,7 +126,10 @@ private:
             bool usePCMAudio,
             size_t *numInputBuffers);
 
-    status_t addVideoSource();
+    status_t addVideoSource(
+            VideoFormats::ResolutionType videoResolutionType,
+            size_t videoResolutionIndex);
+
     status_t addAudioSource(bool usePCMAudio);
 
     ssize_t appendTSData(
index 9048691..8b7d93f 100644 (file)
@@ -685,7 +685,15 @@ status_t Sender::parseTSFB(
 
         if (!foundSeqNo || blp != 0) {
             ALOGI("Some sequence numbers were no longer available for "
-                  "retransmission");
+                  "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)",
+                  seqNo, foundSeqNo, blp);
+
+            if (!mHistory.empty()) {
+                int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff;
+                int32_t latest = (*--mHistory.end())->int32Data() & 0xffff;
+
+                ALOGI("have seq numbers from %d - %d", earliest, latest);
+            }
         }
     }
 
index 9ec1064..0fed19b 100644 (file)
@@ -58,8 +58,19 @@ WifiDisplaySource::WifiDisplaySource(
       mIsHDCP2_0(false),
       mHDCPPort(0),
       mHDCPInitializationComplete(false),
-      mSetupTriggerDeferred(false)
-{
+      mSetupTriggerDeferred(false) {
+    mSupportedSourceVideoFormats.enableAll();
+
+    mSupportedSourceVideoFormats.setNativeResolution(
+            VideoFormats::RESOLUTION_CEA, 5);  // 1280x720 p30
+
+    // Disable resolutions above 1080p since the encoder won't be able to
+    // handle them.
+    mSupportedSourceVideoFormats.setResolutionEnabled(
+            VideoFormats::RESOLUTION_VESA, 28, false);  // 1920x1200 p30
+
+    mSupportedSourceVideoFormats.setResolutionEnabled(
+            VideoFormats::RESOLUTION_VESA, 29, false);  // 1920x1200 p60
 }
 
 WifiDisplaySource::~WifiDisplaySource() {
@@ -375,13 +386,33 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
                         IRemoteDisplayClient::kDisplayErrorUnknown);
             } else if (what == PlaybackSession::kWhatSessionEstablished) {
                 if (mClient != NULL) {
-                    mClient->onDisplayConnected(
-                            mClientInfo.mPlaybackSession->getSurfaceTexture(),
-                            mClientInfo.mPlaybackSession->width(),
-                            mClientInfo.mPlaybackSession->height(),
-                            mUsingHDCP
-                                ? IRemoteDisplayClient::kDisplayFlagSecure
-                                : 0);
+                    if (!mSinkSupportsVideo) {
+                        mClient->onDisplayConnected(
+                                NULL,  // SurfaceTexture
+                                0, // width,
+                                0, // height,
+                                mUsingHDCP
+                                    ? IRemoteDisplayClient::kDisplayFlagSecure
+                                    : 0);
+                    } else {
+                        size_t width, height;
+
+                        CHECK(VideoFormats::GetConfiguration(
+                                    mChosenVideoResolutionType,
+                                    mChosenVideoResolutionIndex,
+                                    &width,
+                                    &height,
+                                    NULL /* framesPerSecond */,
+                                    NULL /* interlaced */));
+
+                        mClient->onDisplayConnected(
+                                mClientInfo.mPlaybackSession->getSurfaceTexture(),
+                                width,
+                                height,
+                                mUsingHDCP
+                                    ? IRemoteDisplayClient::kDisplayFlagSecure
+                                    : 0);
+                    }
                 }
 
                 if (mState == ABOUT_TO_PLAY) {
@@ -564,22 +595,6 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) {
 }
 
 status_t WifiDisplaySource::sendM4(int32_t sessionID) {
-    // wfd_video_formats:
-    // 1 byte "native"
-    // 1 byte "preferred-display-mode-supported" 0 or 1
-    // one or more avc codec structures
-    //   1 byte profile
-    //   1 byte level
-    //   4 byte CEA mask
-    //   4 byte VESA mask
-    //   4 byte HH mask
-    //   1 byte latency
-    //   2 byte min-slice-slice
-    //   2 byte slice-enc-params
-    //   1 byte framerate-control-support
-    //   max-hres (none or 2 byte)
-    //   max-vres (none or 2 byte)
-
     CHECK_EQ(sessionID, mClientSessionID);
 
     AString transportString = "UDP";
@@ -591,28 +606,35 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) {
         transportString = "TCP";
     }
 
-    // For 720p60:
-    //   use "30 00 02 02 00000040 00000000 00000000 00 0000 0000 00 none none\r\n"
-    // For 720p30:
-    //   use "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n"
-    // For 720p24:
-    //   use "78 00 02 02 00008000 00000000 00000000 00 0000 0000 00 none none\r\n"
-    // For 1080p30:
-    //   use "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n"
-    AString body = StringPrintf(
-        "wfd_video_formats: "
-#if USE_1080P
-        "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n"
-#else
-        "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n"
-#endif
-        "wfd_audio_codecs: %s\r\n"
-        "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n"
-        "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n",
-        (mUsingPCMAudio
-            ? "LPCM 00000002 00" // 2 ch PCM 48kHz
-            : "AAC 00000001 00"),  // 2 ch AAC 48kHz
-        mClientInfo.mLocalIP.c_str(), transportString.c_str(), mChosenRTPPort);
+    AString body;
+
+    if (mSinkSupportsVideo) {
+        body.append("wfd_video_formats: ");
+
+        VideoFormats chosenVideoFormat;
+        chosenVideoFormat.disableAll();
+        chosenVideoFormat.setNativeResolution(
+                mChosenVideoResolutionType, mChosenVideoResolutionIndex);
+
+        body.append(chosenVideoFormat.getFormatSpec());
+        body.append("\r\n");
+    }
+
+    if (mSinkSupportsAudio) {
+        body.append(
+                StringPrintf("wfd_audio_codecs: %s\r\n",
+                             (mUsingPCMAudio
+                                ? "LPCM 00000002 00" // 2 ch PCM 48kHz
+                                : "AAC 00000001 00")));  // 2 ch AAC 48kHz
+    }
+
+    body.append(
+            StringPrintf(
+                "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n"
+                "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n",
+                mClientInfo.mLocalIP.c_str(),
+                transportString.c_str(),
+                mChosenRTPPort));
 
     AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
     AppendCommonResponse(&request, mNextCSeq);
@@ -789,39 +811,90 @@ status_t WifiDisplaySource::onReceiveM3Response(
 
     mChosenRTPPort = port0;
 
+    if (!params->findParameter("wfd_video_formats", &value)) {
+        ALOGE("Sink doesn't report its choice of wfd_video_formats.");
+        return ERROR_MALFORMED;
+    }
+
+    mSinkSupportsVideo = false;
+
+    if  (!(value == "none")) {
+        mSinkSupportsVideo = true;
+        if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) {
+            ALOGE("Failed to parse sink provided wfd_video_formats (%s)",
+                  value.c_str());
+
+            return ERROR_MALFORMED;
+        }
+
+        if (!VideoFormats::PickBestFormat(
+                    mSupportedSinkVideoFormats,
+                    mSupportedSourceVideoFormats,
+                    &mChosenVideoResolutionType,
+                    &mChosenVideoResolutionIndex)) {
+            ALOGE("Sink and source share no commonly supported video "
+                  "formats.");
+
+            return ERROR_UNSUPPORTED;
+        }
+
+        size_t width, height, framesPerSecond;
+        bool interlaced;
+        CHECK(VideoFormats::GetConfiguration(
+                    mChosenVideoResolutionType,
+                    mChosenVideoResolutionIndex,
+                    &width,
+                    &height,
+                    &framesPerSecond,
+                    &interlaced));
+
+        ALOGI("Picked video resolution %u x %u %c%u",
+              width, height, interlaced ? 'i' : 'p', framesPerSecond);
+    } else {
+        ALOGI("Sink doesn't support video at all.");
+    }
+
     if (!params->findParameter("wfd_audio_codecs", &value)) {
         ALOGE("Sink doesn't report its choice of wfd_audio_codecs.");
         return ERROR_MALFORMED;
     }
 
-    if  (value == "none") {
-        ALOGE("Sink doesn't support audio at all.");
-        return ERROR_UNSUPPORTED;
-    }
+    mSinkSupportsAudio = false;
 
-    uint32_t modes;
-    GetAudioModes(value.c_str(), "AAC", &modes);
+    if  (!(value == "none")) {
+        mSinkSupportsAudio = true;
 
-    bool supportsAAC = (modes & 1) != 0;  // AAC 2ch 48kHz
+        uint32_t modes;
+        GetAudioModes(value.c_str(), "AAC", &modes);
 
-    GetAudioModes(value.c_str(), "LPCM", &modes);
+        bool supportsAAC = (modes & 1) != 0;  // AAC 2ch 48kHz
 
-    bool supportsPCM = (modes & 2) != 0;  // LPCM 2ch 48kHz
+        GetAudioModes(value.c_str(), "LPCM", &modes);
 
-    char val[PROPERTY_VALUE_MAX];
-    if (supportsPCM
-            && property_get("media.wfd.use-pcm-audio", val, NULL)
-            && (!strcasecmp("true", val) || !strcmp("1", val))) {
-        ALOGI("Using PCM audio.");
-        mUsingPCMAudio = true;
-    } else if (supportsAAC) {
-        ALOGI("Using AAC audio.");
-        mUsingPCMAudio = false;
-    } else if (supportsPCM) {
-        ALOGI("Using PCM audio.");
-        mUsingPCMAudio = true;
+        bool supportsPCM = (modes & 2) != 0;  // LPCM 2ch 48kHz
+
+        char val[PROPERTY_VALUE_MAX];
+        if (supportsPCM
+                && property_get("media.wfd.use-pcm-audio", val, NULL)
+                && (!strcasecmp("true", val) || !strcmp("1", val))) {
+            ALOGI("Using PCM audio.");
+            mUsingPCMAudio = true;
+        } else if (supportsAAC) {
+            ALOGI("Using AAC audio.");
+            mUsingPCMAudio = false;
+        } else if (supportsPCM) {
+            ALOGI("Using PCM audio.");
+            mUsingPCMAudio = true;
+        } else {
+            ALOGI("Sink doesn't support an audio format we do.");
+            return ERROR_UNSUPPORTED;
+        }
     } else {
-        ALOGI("Sink doesn't support an audio format we do.");
+        ALOGI("Sink doesn't support audio at all.");
+    }
+
+    if (!mSinkSupportsVideo && !mSinkSupportsAudio) {
+        ALOGE("Sink supports neither video nor audio...");
         return ERROR_UNSUPPORTED;
     }
 
@@ -1160,7 +1233,11 @@ status_t WifiDisplaySource::onSetupRequest(
             clientRtp,
             clientRtcp,
             transportMode,
-            mUsingPCMAudio);
+            mSinkSupportsAudio,
+            mUsingPCMAudio,
+            mSinkSupportsVideo,
+            mChosenVideoResolutionType,
+            mChosenVideoResolutionIndex);
 
     if (err != OK) {
         looper()->unregisterHandler(playbackSession->id());
index 974e070..fec2c6d 100644 (file)
@@ -19,6 +19,7 @@
 #define WIFI_DISPLAY_SOURCE_H_
 
 #include "ANetworkSession.h"
+#include "VideoFormats.h"
 
 #include <media/stagefright/foundation/AHandler.h>
 
@@ -26,8 +27,6 @@
 
 namespace android {
 
-#define USE_1080P       0
-
 struct IHDCP;
 struct IRemoteDisplayClient;
 struct ParsedMessage;
@@ -112,6 +111,7 @@ private:
         kPlaybackSessionTimeoutSecs * 1000000ll;
 
     State mState;
+    VideoFormats mSupportedSourceVideoFormats;
     sp<ANetworkSession> mNetSession;
     sp<IRemoteDisplayClient> mClient;
     struct in_addr mInterfaceAddr;
@@ -121,6 +121,14 @@ private:
 
     int32_t mChosenRTPPort;  // extracted from "wfd_client_rtp_ports"
 
+    bool mSinkSupportsVideo;
+    VideoFormats mSupportedSinkVideoFormats;
+
+    VideoFormats::ResolutionType mChosenVideoResolutionType;
+    size_t mChosenVideoResolutionIndex;
+
+    bool mSinkSupportsAudio;
+
     bool mUsingPCMAudio;
     int32_t mClientSessionID;
 
index 2ec9b4f..be9e35e 100644 (file)
@@ -30,6 +30,7 @@
 #include <media/IRemoteDisplayClient.h>
 #include <media/stagefright/DataSource.h>
 #include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
 
 namespace android {