OSDN Git Service

wifi-display: add support for metadata mode on encoder output
authorChong Zhang <chz@google.com>
Sat, 4 May 2013 04:54:17 +0000 (21:54 -0700)
committerChong Zhang <chz@google.com>
Fri, 17 May 2013 23:18:28 +0000 (16:18 -0700)
pass buffer_handle_t from encoder output to HDCP encryptor input

Bug: 8968123

Change-Id: Iea8007ce568641e213fd2e3cf6947a6f7a95746c

16 files changed:
include/media/IHDCP.h
include/media/stagefright/ACodec.h
media/libmedia/IHDCP.cpp
media/libmediaplayerservice/HDCP.cpp
media/libmediaplayerservice/HDCP.h
media/libstagefright/ACodec.cpp
media/libstagefright/wifi-display/MediaSender.cpp
media/libstagefright/wifi-display/VideoFormats.cpp
media/libstagefright/wifi-display/VideoFormats.h
media/libstagefright/wifi-display/source/Converter.cpp
media/libstagefright/wifi-display/source/Converter.h
media/libstagefright/wifi-display/source/PlaybackSession.cpp
media/libstagefright/wifi-display/source/PlaybackSession.h
media/libstagefright/wifi-display/source/TSPacketizer.cpp
media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
media/libstagefright/wifi-display/source/WifiDisplaySource.h

index 6d27b18..54fefa3 100644 (file)
@@ -17,6 +17,7 @@
 #include <binder/IInterface.h>
 #include <media/hardware/HDCPAPI.h>
 #include <media/stagefright/foundation/ABase.h>
+#include <ui/GraphicBuffer.h>
 
 namespace android {
 
@@ -59,6 +60,20 @@ struct IHDCP : public IInterface {
             const void *inData, size_t size, uint32_t streamCTR,
             uint64_t *outInputCTR, void *outData) = 0;
 
+    // Encrypt data according to the HDCP spec. "size" bytes of data starting
+    // at location "offset" are available in "buffer" (buffer handle). "size"
+    // may not be a multiple of 128 bits (16 bytes). An equal number of
+    // encrypted bytes should be written to the buffer at "outData" (virtual
+    // address). This operation is to be synchronous, i.e. this call does not
+    // return until outData contains size bytes of encrypted data.
+    // streamCTR will be assigned by the caller (to 0 for the first PES stream,
+    // 1 for the second and so on)
+    // inputCTR _will_be_maintained_by_the_callee_ for each PES stream.
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) = 0;
+
     // DECRYPTION only:
     // Decrypt data according to the HDCP spec.
     // "size" bytes of encrypted data are available at "inData"
index 34bae29..391c089 100644 (file)
@@ -182,7 +182,7 @@ private:
 
     bool mSentFormat;
     bool mIsEncoder;
-
+    bool mUseMetadataOnEncoderOutput;
     bool mShutdownInProgress;
 
     // If "mKeepComponentAllocated" we only transition back to Loaded state
index f13addc..a46ff91 100644 (file)
@@ -31,6 +31,7 @@ enum {
     HDCP_INIT_ASYNC,
     HDCP_SHUTDOWN_ASYNC,
     HDCP_ENCRYPT,
+    HDCP_ENCRYPT_NATIVE,
     HDCP_DECRYPT,
 };
 
@@ -108,6 +109,31 @@ struct BpHDCP : public BpInterface<IHDCP> {
         return err;
     }
 
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData) {
+        Parcel data, reply;
+        data.writeInterfaceToken(IHDCP::getInterfaceDescriptor());
+        data.write(*graphicBuffer);
+        data.writeInt32(offset);
+        data.writeInt32(size);
+        data.writeInt32(streamCTR);
+        remote()->transact(HDCP_ENCRYPT_NATIVE, data, &reply);
+
+        status_t err = reply.readInt32();
+
+        if (err != OK) {
+            *outInputCTR = 0;
+            return err;
+        }
+
+        *outInputCTR = reply.readInt64();
+        reply.read(outData, size);
+
+        return err;
+    }
+
     virtual status_t decrypt(
             const void *inData, size_t size,
             uint32_t streamCTR, uint64_t inputCTR,
@@ -222,6 +248,34 @@ status_t BnHDCP::onTransact(
             return OK;
         }
 
+        case HDCP_ENCRYPT_NATIVE:
+        {
+            CHECK_INTERFACE(IHDCP, data, reply);
+
+            sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+            data.read(*graphicBuffer);
+            size_t offset = data.readInt32();
+            size_t size = data.readInt32();
+            uint32_t streamCTR = data.readInt32();
+            void *outData = malloc(size);
+            uint64_t inputCTR;
+
+            status_t err = encryptNative(graphicBuffer, offset, size,
+                                         streamCTR, &inputCTR, outData);
+
+            reply->writeInt32(err);
+
+            if (err == OK) {
+                reply->writeInt64(inputCTR);
+                reply->write(outData, size);
+            }
+
+            free(outData);
+            outData = NULL;
+
+            return OK;
+        }
+
         case HDCP_DECRYPT:
         {
             size_t size = data.readInt32();
index 469a02e..8a3188c 100644 (file)
@@ -116,6 +116,24 @@ status_t HDCP::encrypt(
     return mHDCPModule->encrypt(inData, size, streamCTR, outInputCTR, outData);
 }
 
+status_t HDCP::encryptNative(
+        const sp<GraphicBuffer> &graphicBuffer,
+        size_t offset, size_t size, uint32_t streamCTR,
+        uint64_t *outInputCTR, void *outData) {
+    Mutex::Autolock autoLock(mLock);
+
+    CHECK(mIsEncryptionModule);
+
+    if (mHDCPModule == NULL) {
+        *outInputCTR = 0;
+
+        return NO_INIT;
+    }
+
+    return mHDCPModule->encryptNative(graphicBuffer->handle,
+                    offset, size, streamCTR, outInputCTR, outData);
+}
+
 status_t HDCP::decrypt(
         const void *inData, size_t size,
         uint32_t streamCTR, uint64_t outInputCTR, void *outData) {
index 42e6467..c60c2e0 100644 (file)
@@ -35,6 +35,11 @@ struct HDCP : public BnHDCP {
             const void *inData, size_t size, uint32_t streamCTR,
             uint64_t *outInputCTR, void *outData);
 
+    virtual status_t encryptNative(
+            const sp<GraphicBuffer> &graphicBuffer,
+            size_t offset, size_t size, uint32_t streamCTR,
+            uint64_t *outInputCTR, void *outData);
+
     virtual status_t decrypt(
             const void *inData, size_t size,
             uint32_t streamCTR, uint64_t outInputCTR, void *outData);
index 64e3885..d3ac734 100644 (file)
@@ -359,6 +359,7 @@ ACodec::ACodec()
       mNode(NULL),
       mSentFormat(false),
       mIsEncoder(false),
+      mUseMetadataOnEncoderOutput(false),
       mShutdownInProgress(false),
       mEncoderDelay(0),
       mEncoderPadding(0),
@@ -483,7 +484,8 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
                         ? OMXCodec::kRequiresAllocateBufferOnInputPorts
                         : OMXCodec::kRequiresAllocateBufferOnOutputPorts;
 
-                if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) {
+                if ((portIndex == kPortIndexInput && (mFlags & kFlagIsSecure))
+                        || mUseMetadataOnEncoderOutput) {
                     mem.clear();
 
                     void *ptr;
@@ -491,7 +493,10 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
                             mNode, portIndex, def.nBufferSize, &info.mBufferID,
                             &ptr);
 
-                    info.mData = new ABuffer(ptr, def.nBufferSize);
+                    int32_t bufSize = mUseMetadataOnEncoderOutput ?
+                            (4 + sizeof(buffer_handle_t)) : def.nBufferSize;
+
+                    info.mData = new ABuffer(ptr, bufSize);
                 } else if (mQuirks & requiresAllocateBufferBit) {
                     err = mOMX->allocateBufferWithBackup(
                             mNode, portIndex, mem, &info.mBufferID);
@@ -912,14 +917,14 @@ status_t ACodec::configureCodec(
         err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
 
         if (err != OK) {
-            ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
-                  mComponentName.c_str(), err);
+              ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
+                    mComponentName.c_str(), err);
 
-            return err;
-        }
-    }
+              return err;
+          }
+      }
 
-    int32_t prependSPSPPS;
+    int32_t prependSPSPPS = 0;
     if (encoder
             && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
             && prependSPSPPS != 0) {
@@ -946,7 +951,27 @@ status_t ACodec::configureCodec(
         }
     }
 
-    if (!strncasecmp(mime, "video/", 6)) {
+    // Only enable metadata mode on encoder output if encoder can prepend
+    // sps/pps to idr frames, since in metadata mode the bitstream is in an
+    // opaque handle, to which we don't have access.
+    int32_t video = !strncasecmp(mime, "video/", 6);
+    if (encoder && video) {
+        OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
+            && msg->findInt32("store-metadata-in-buffers-output", &storeMeta)
+            && storeMeta != 0);
+
+        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable);
+
+        if (err != OK) {
+            ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
+                mComponentName.c_str(), err);
+            mUseMetadataOnEncoderOutput = 0;
+        } else {
+            mUseMetadataOnEncoderOutput = enable;
+        }
+    }
+
+    if (video) {
         if (encoder) {
             err = setupVideoEncoder(mime, msg);
         } else {
@@ -3061,7 +3086,15 @@ bool ACodec::BaseState::onOMXFillBufferDone(
                 mCodec->sendFormatChange();
             }
 
-            info->mData->setRange(rangeOffset, rangeLength);
+            if (mCodec->mUseMetadataOnEncoderOutput) {
+                native_handle_t* handle =
+                        *(native_handle_t**)(info->mData->data() + 4);
+                info->mData->meta()->setPointer("handle", handle);
+                info->mData->meta()->setInt32("rangeOffset", rangeOffset);
+                info->mData->meta()->setInt32("rangeLength", rangeLength);
+            } else {
+                info->mData->setRange(rangeOffset, rangeLength);
+            }
 #if 0
             if (mCodec->mNativeWindow == NULL) {
                 if (IsIDR(info->mData)) {
@@ -3215,6 +3248,7 @@ void ACodec::UninitializedState::stateEntered() {
     mCodec->mOMX.clear();
     mCodec->mQuirks = 0;
     mCodec->mFlags = 0;
+    mCodec->mUseMetadataOnEncoderOutput = 0;
     mCodec->mComponentName.clear();
 }
 
index 33af66d..a230cd8 100644 (file)
 #include "include/avc_utils.h"
 
 #include <media/IHDCP.h>
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <ui/GraphicBuffer.h>
 
 namespace android {
 
@@ -408,11 +410,36 @@ status_t MediaSender::packetizeAccessUnit(
                     info.mPacketizerTrackIndex, accessUnit);
         }
 
-        status_t err = mHDCP->encrypt(
-                accessUnit->data(), accessUnit->size(),
-                trackIndex  /* streamCTR */,
-                &inputCTR,
-                accessUnit->data());
+        status_t err;
+        native_handle_t* handle;
+        if (accessUnit->meta()->findPointer("handle", (void**)&handle)
+                && handle != NULL) {
+            int32_t rangeLength, rangeOffset;
+            sp<AMessage> notify;
+            CHECK(accessUnit->meta()->findInt32("rangeOffset", &rangeOffset));
+            CHECK(accessUnit->meta()->findInt32("rangeLength", &rangeLength));
+            CHECK(accessUnit->meta()->findMessage("notify", &notify)
+                    && notify != NULL);
+            CHECK_GE(accessUnit->size(), rangeLength);
+
+            sp<GraphicBuffer> grbuf(new GraphicBuffer(
+                    rangeOffset + rangeLength, 1, HAL_PIXEL_FORMAT_Y8,
+                    GRALLOC_USAGE_HW_VIDEO_ENCODER, rangeOffset + rangeLength,
+                    handle, false));
+
+            err = mHDCP->encryptNative(
+                    grbuf, rangeOffset, rangeLength,
+                    trackIndex  /* streamCTR */,
+                    &inputCTR,
+                    accessUnit->data());
+            notify->post();
+        } else {
+            err = mHDCP->encrypt(
+                    accessUnit->data(), accessUnit->size(),
+                    trackIndex  /* streamCTR */,
+                    &inputCTR,
+                    accessUnit->data());
+        }
 
         if (err != OK) {
             ALOGE("Failed to HDCP-encrypt media data (err %d)",
index 458b163..c368c38 100644 (file)
@@ -24,7 +24,8 @@
 
 namespace android {
 
-VideoFormats::config_t VideoFormats::mConfigs[][32] = {
+// static
+const VideoFormats::config_t VideoFormats::mResolutionTable[][32] = {
     {
         // CEA Resolutions
         { 640, 480, 60, false, 0, 0},
@@ -133,6 +134,8 @@ VideoFormats::config_t VideoFormats::mConfigs[][32] = {
 };
 
 VideoFormats::VideoFormats() {
+    memcpy(mConfigs, mResolutionTable, sizeof(mConfigs));
+
     for (size_t i = 0; i < kNumResolutionTypes; ++i) {
         mResolutionEnabled[i] = 0;
     }
@@ -182,11 +185,56 @@ void VideoFormats::setResolutionEnabled(
 
     if (enabled) {
         mResolutionEnabled[type] |= (1ul << index);
+        mConfigs[type][index].profile = (1ul << PROFILE_CBP);
+        mConfigs[type][index].level = (1ul << LEVEL_31);
     } else {
         mResolutionEnabled[type] &= ~(1ul << index);
+        mConfigs[type][index].profile = 0;
+        mConfigs[type][index].level = 0;
     }
 }
 
+void VideoFormats::setProfileLevel(
+        ResolutionType type, size_t index,
+        ProfileType profile, LevelType level) {
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    mConfigs[type][index].profile = (1ul << profile);
+    mConfigs[type][index].level = (1ul << level);
+}
+
+void VideoFormats::getProfileLevel(
+        ResolutionType type, size_t index,
+        ProfileType *profile, LevelType *level) const{
+    CHECK_LT(type, kNumResolutionTypes);
+    CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+    int i, bestProfile = -1, bestLevel = -1;
+
+    for (i = 0; i < kNumProfileTypes; ++i) {
+        if (mConfigs[type][index].profile & (1ul << i)) {
+            bestProfile = i;
+        }
+    }
+
+    for (i = 0; i < kNumLevelTypes; ++i) {
+        if (mConfigs[type][index].level & (1ul << i)) {
+            bestLevel = i;
+        }
+    }
+
+    if (bestProfile == -1 || bestLevel == -1) {
+        ALOGE("Profile or level not set for resolution type %d, index %d",
+              type, index);
+        bestProfile = PROFILE_CBP;
+        bestLevel = LEVEL_31;
+    }
+
+    *profile = (ProfileType) bestProfile;
+    *level = (LevelType) bestLevel;
+}
+
 bool VideoFormats::isResolutionEnabled(
         ResolutionType type, size_t index) const {
     CHECK_LT(type, kNumResolutionTypes);
@@ -207,7 +255,7 @@ bool VideoFormats::GetConfiguration(
         return false;
     }
 
-    const config_t *config = &mConfigs[type][index];
+    const config_t *config = &mResolutionTable[type][index];
 
     if (config->width == 0) {
         return false;
@@ -251,9 +299,12 @@ bool VideoFormats::parseH264Codec(const char *spec) {
             if (res[i] & (1ul << j)){
                 mResolutionEnabled[i] |= (1ul << j);
                 if (profile > mConfigs[i][j].profile) {
+                    // prefer higher profile (even if level is lower)
                     mConfigs[i][j].profile = profile;
-                    if (level > mConfigs[i][j].level)
-                        mConfigs[i][j].level = level;
+                    mConfigs[i][j].level = level;
+                } else if (profile == mConfigs[i][j].profile &&
+                           level > mConfigs[i][j].level) {
+                    mConfigs[i][j].level = level;
                 }
             }
         }
@@ -262,9 +313,51 @@ bool VideoFormats::parseH264Codec(const char *spec) {
     return true;
 }
 
+// static
+bool VideoFormats::GetProfileLevel(
+        ProfileType profile, LevelType level, unsigned *profileIdc,
+        unsigned *levelIdc, unsigned *constraintSet) {
+    CHECK_LT(profile, kNumProfileTypes);
+    CHECK_LT(level, kNumLevelTypes);
+
+    static const unsigned kProfileIDC[kNumProfileTypes] = {
+        66,     // PROFILE_CBP
+        100,    // PROFILE_CHP
+    };
+
+    static const unsigned kLevelIDC[kNumLevelTypes] = {
+        31,     // LEVEL_31
+        32,     // LEVEL_32
+        40,     // LEVEL_40
+        41,     // LEVEL_41
+        42,     // LEVEL_42
+    };
+
+    static const unsigned kConstraintSet[kNumProfileTypes] = {
+        0xc0,   // PROFILE_CBP
+        0x0c,   // PROFILE_CHP
+    };
+
+    if (profileIdc) {
+        *profileIdc = kProfileIDC[profile];
+    }
+
+    if (levelIdc) {
+        *levelIdc = kLevelIDC[level];
+    }
+
+    if (constraintSet) {
+        *constraintSet = kConstraintSet[profile];
+    }
+
+    return true;
+}
+
 bool VideoFormats::parseFormatSpec(const char *spec) {
     CHECK_EQ(kNumResolutionTypes, 3);
 
+    disableAll();
+
     unsigned native, dummy;
     unsigned res[3];
     size_t size = strlen(spec);
@@ -320,8 +413,10 @@ AString VideoFormats::getFormatSpec(bool forM4Message) const {
     //   max-vres (none or 2 byte)
 
     return StringPrintf(
-            "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none",
+            "%02x 00 %02x %02x %08x %08x %08x 00 0000 0000 00 none none",
             forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
+            mConfigs[mNativeType][mNativeIndex].profile,
+            mConfigs[mNativeType][mNativeIndex].level,
             mResolutionEnabled[0],
             mResolutionEnabled[1],
             mResolutionEnabled[2]);
@@ -332,7 +427,9 @@ bool VideoFormats::PickBestFormat(
         const VideoFormats &sinkSupported,
         const VideoFormats &sourceSupported,
         ResolutionType *chosenType,
-        size_t *chosenIndex) {
+        size_t *chosenIndex,
+        ProfileType *chosenProfile,
+        LevelType *chosenLevel) {
 #if 0
     // Support for the native format is a great idea, the spec includes
     // these features, but nobody supports it and the tests don't validate it.
@@ -412,6 +509,18 @@ bool VideoFormats::PickBestFormat(
     *chosenType = (ResolutionType)bestType;
     *chosenIndex = bestIndex;
 
+    // Pick the best profile/level supported by both sink and source.
+    ProfileType srcProfile, sinkProfile;
+    LevelType srcLevel, sinkLevel;
+    sourceSupported.getProfileLevel(
+                        (ResolutionType)bestType, bestIndex,
+                        &srcProfile, &srcLevel);
+    sinkSupported.getProfileLevel(
+                        (ResolutionType)bestType, bestIndex,
+                        &sinkProfile, &sinkLevel);
+    *chosenProfile = srcProfile < sinkProfile ? srcProfile : sinkProfile;
+    *chosenLevel = srcLevel < sinkLevel ? srcLevel : sinkLevel;
+
     return true;
 }
 
index 01de246..b918652 100644 (file)
@@ -75,11 +75,24 @@ struct VideoFormats {
 
     bool isResolutionEnabled(ResolutionType type, size_t index) const;
 
+    void setProfileLevel(
+            ResolutionType type, size_t index,
+            ProfileType profile, LevelType level);
+
+    void getProfileLevel(
+            ResolutionType type, size_t index,
+            ProfileType *profile, LevelType *level) const;
+
     static bool GetConfiguration(
             ResolutionType type, size_t index,
             size_t *width, size_t *height, size_t *framesPerSecond,
             bool *interlaced);
 
+    static bool GetProfileLevel(
+            ProfileType profile, LevelType level,
+            unsigned *profileIdc, unsigned *levelIdc,
+            unsigned *constraintSet);
+
     bool parseFormatSpec(const char *spec);
     AString getFormatSpec(bool forM4Message = false) const;
 
@@ -87,7 +100,9 @@ struct VideoFormats {
             const VideoFormats &sinkSupported,
             const VideoFormats &sourceSupported,
             ResolutionType *chosenType,
-            size_t *chosenIndex);
+            size_t *chosenIndex,
+            ProfileType *chosenProfile,
+            LevelType *chosenLevel);
 
 private:
     bool parseH264Codec(const char *spec);
@@ -95,7 +110,8 @@ private:
     size_t mNativeIndex;
 
     uint32_t mResolutionEnabled[kNumResolutionTypes];
-    static config_t mConfigs[kNumResolutionTypes][32];
+    static const config_t mResolutionTable[kNumResolutionTypes][32];
+    config_t mConfigs[kNumResolutionTypes][32];
 
     DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
 };
index 5344623..e62505d 100644 (file)
@@ -438,6 +438,17 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
             break;
         }
 
+        case kWhatReleaseOutputBuffer:
+        {
+            if (mEncoder != NULL) {
+                size_t bufferIndex;
+                CHECK(msg->findInt32("bufferIndex", (int32_t*)&bufferIndex));
+                CHECK(bufferIndex < mEncoderOutputBuffers.size());
+                mEncoder->releaseOutputBuffer(bufferIndex);
+            }
+            break;
+        }
+
         default:
             TRESPASS();
     }
@@ -645,6 +656,7 @@ status_t Converter::doMoreWork() {
         size_t size;
         int64_t timeUs;
         uint32_t flags;
+        native_handle_t* handle = NULL;
         err = mEncoder->dequeueOutputBuffer(
                 &bufferIndex, &offset, &size, &timeUs, &flags);
 
@@ -667,18 +679,43 @@ status_t Converter::doMoreWork() {
             notify->setInt32("what", kWhatEOS);
             notify->post();
         } else {
-            sp<ABuffer> buffer = new ABuffer(size);
+            sp<ABuffer> buffer;
+            sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
+
+            if (outbuf->meta()->findPointer("handle", (void**)&handle) &&
+                    handle != NULL) {
+                int32_t rangeLength, rangeOffset;
+                CHECK(outbuf->meta()->findInt32("rangeOffset", &rangeOffset));
+                CHECK(outbuf->meta()->findInt32("rangeLength", &rangeLength));
+                outbuf->meta()->setPointer("handle", NULL);
+
+                // MediaSender will post the following message when HDCP
+                // is done, to release the output buffer back to encoder.
+                sp<AMessage> notify(new AMessage(
+                        kWhatReleaseOutputBuffer, id()));
+                notify->setInt32("bufferIndex", bufferIndex);
+
+                buffer = new ABuffer(
+                        rangeLength > (int32_t)size ? rangeLength : size);
+                buffer->meta()->setPointer("handle", handle);
+                buffer->meta()->setInt32("rangeOffset", rangeOffset);
+                buffer->meta()->setInt32("rangeLength", rangeLength);
+                buffer->meta()->setMessage("notify", notify);
+            } else {
+                buffer = new ABuffer(size);
+            }
+
             buffer->meta()->setInt64("timeUs", timeUs);
 
             ALOGV("[%s] time %lld us (%.2f secs)",
                   mIsVideo ? "video" : "audio", timeUs, timeUs / 1E6);
 
-            memcpy(buffer->data(),
-                   mEncoderOutputBuffers.itemAt(bufferIndex)->base() + offset,
-                   size);
+            memcpy(buffer->data(), outbuf->base() + offset, size);
 
             if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
-                mOutputFormat->setBuffer("csd-0", buffer);
+                if (!handle) {
+                    mOutputFormat->setBuffer("csd-0", buffer);
+                }
             } else {
                 sp<AMessage> notify = mNotify->dup();
                 notify->setInt32("what", kWhatAccessUnit);
@@ -687,7 +724,9 @@ status_t Converter::doMoreWork() {
             }
         }
 
-        mEncoder->releaseOutputBuffer(bufferIndex);
+        if (!handle) {
+            mEncoder->releaseOutputBuffer(bufferIndex);
+        }
 
         if (flags & MediaCodec::BUFFER_FLAG_EOS) {
             break;
index ba297c4..fceef55 100644 (file)
@@ -66,6 +66,7 @@ struct Converter : public AHandler {
         kWhatMediaPullerNotify,
         kWhatEncoderActivity,
         kWhatDropAFrame,
+        kWhatReleaseOutputBuffer,
     };
 
     void shutdownAsync();
index cacfcca..7f0ba96 100644 (file)
@@ -378,7 +378,9 @@ status_t WifiDisplaySource::PlaybackSession::init(
         bool usePCMAudio,
         bool enableVideo,
         VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex) {
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
     sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id());
     mMediaSender = new MediaSender(mNetSession, notify);
     looper()->registerHandler(mMediaSender);
@@ -390,7 +392,9 @@ status_t WifiDisplaySource::PlaybackSession::init(
             usePCMAudio,
             enableVideo,
             videoResolutionType,
-            videoResolutionIndex);
+            videoResolutionIndex,
+            videoProfileType,
+            videoLevelType);
 
     if (err == OK) {
         err = mMediaSender->initAsync(
@@ -870,7 +874,9 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
         bool usePCMAudio,
         bool enableVideo,
         VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex) {
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
     CHECK(enableAudio || enableVideo);
 
     if (!mMediaPath.empty()) {
@@ -879,7 +885,8 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
 
     if (enableVideo) {
         status_t err = addVideoSource(
-                videoResolutionType, videoResolutionIndex);
+                videoResolutionType, videoResolutionIndex, videoProfileType,
+                videoLevelType);
 
         if (err != OK) {
             return err;
@@ -895,9 +902,13 @@ status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
 
 status_t WifiDisplaySource::PlaybackSession::addSource(
         bool isVideo, const sp<MediaSource> &source, bool isRepeaterSource,
-        bool usePCMAudio, size_t *numInputBuffers) {
+        bool usePCMAudio, unsigned profileIdc, unsigned levelIdc,
+        unsigned constraintSet, size_t *numInputBuffers) {
     CHECK(!usePCMAudio || !isVideo);
     CHECK(!isRepeaterSource || isVideo);
+    CHECK(!profileIdc || isVideo);
+    CHECK(!levelIdc || isVideo);
+    CHECK(!constraintSet || isVideo);
 
     sp<ALooper> pullLooper = new ALooper;
     pullLooper->setName("pull_looper");
@@ -927,9 +938,12 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
 
     if (isVideo) {
         format->setInt32("store-metadata-in-buffers", true);
-
+        format->setInt32("store-metadata-in-buffers-output", (mHDCP != NULL));
         format->setInt32(
                 "color-format", OMX_COLOR_FormatAndroidOpaque);
+        format->setInt32("profile-idc", profileIdc);
+        format->setInt32("level-idc", levelIdc);
+        format->setInt32("constraint-set", constraintSet);
     }
 
     notify = new AMessage(kWhatConverterNotify, id());
@@ -990,7 +1004,9 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
 
 status_t WifiDisplaySource::PlaybackSession::addVideoSource(
         VideoFormats::ResolutionType videoResolutionType,
-        size_t videoResolutionIndex) {
+        size_t videoResolutionIndex,
+        VideoFormats::ProfileType videoProfileType,
+        VideoFormats::LevelType videoLevelType) {
     size_t width, height, framesPerSecond;
     bool interlaced;
     CHECK(VideoFormats::GetConfiguration(
@@ -1001,6 +1017,14 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource(
                 &framesPerSecond,
                 &interlaced));
 
+    unsigned profileIdc, levelIdc, constraintSet;
+    CHECK(VideoFormats::GetProfileLevel(
+                videoProfileType,
+                videoLevelType,
+                &profileIdc,
+                &levelIdc,
+                &constraintSet));
+
     sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
 
     source->setUseAbsoluteTimestamps();
@@ -1011,7 +1035,8 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource(
     size_t numInputBuffers;
     status_t err = addSource(
             true /* isVideo */, videoSource, true /* isRepeaterSource */,
-            false /* usePCMAudio */, &numInputBuffers);
+            false /* usePCMAudio */, profileIdc, levelIdc, constraintSet,
+            &numInputBuffers);
 
     if (err != OK) {
         return err;
@@ -1034,7 +1059,8 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
     if (audioSource->initCheck() == OK) {
         return addSource(
                 false /* isVideo */, audioSource, false /* isRepeaterSource */,
-                usePCMAudio, NULL /* numInputBuffers */);
+                usePCMAudio, 0 /* profileIdc */, 0 /* levelIdc */,
+                0 /* constraintSet */, NULL /* numInputBuffers */);
     }
 
     ALOGW("Unable to instantiate audio source");
index 39086a1..5c8ee94 100644 (file)
@@ -53,7 +53,9 @@ struct WifiDisplaySource::PlaybackSession : public AHandler {
             bool usePCMAudio,
             bool enableVideo,
             VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex);
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
 
     void destroyAsync();
 
@@ -130,18 +132,25 @@ private:
             bool usePCMAudio,
             bool enableVideo,
             VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex);
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
 
     status_t addSource(
             bool isVideo,
             const sp<MediaSource> &source,
             bool isRepeaterSource,
             bool usePCMAudio,
+            unsigned profileIdc,
+            unsigned levelIdc,
+            unsigned contraintSet,
             size_t *numInputBuffers);
 
     status_t addVideoSource(
             VideoFormats::ResolutionType videoResolutionType,
-            size_t videoResolutionIndex);
+            size_t videoResolutionIndex,
+            VideoFormats::ProfileType videoProfileType,
+            VideoFormats::LevelType videoLevelType);
 
     status_t addAudioSource(bool usePCMAudio);
 
index 2c4a373..c674700 100644 (file)
@@ -261,12 +261,24 @@ void TSPacketizer::Track::finalize() {
             data[0] = 40;  // descriptor_tag
             data[1] = 4;  // descriptor_length
 
-            CHECK_GE(mCSD.size(), 1u);
-            const sp<ABuffer> &sps = mCSD.itemAt(0);
-            CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4));
-            CHECK_GE(sps->size(), 7u);
-            // profile_idc, constraint_set*, level_idc
-            memcpy(&data[2], sps->data() + 4, 3);
+            if (mCSD.size() > 0) {
+                CHECK_GE(mCSD.size(), 1u);
+                const sp<ABuffer> &sps = mCSD.itemAt(0);
+                CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4));
+                CHECK_GE(sps->size(), 7u);
+                // profile_idc, constraint_set*, level_idc
+                memcpy(&data[2], sps->data() + 4, 3);
+            } else {
+                int32_t profileIdc, levelIdc, constraintSet;
+                CHECK(mFormat->findInt32("profile-idc", &profileIdc));
+                CHECK(mFormat->findInt32("level-idc", &levelIdc));
+                CHECK(mFormat->findInt32("constraint-set", &constraintSet));
+                CHECK_GE(profileIdc, 0u);
+                CHECK_GE(levelIdc, 0u);
+                data[2] = profileIdc;    // profile_idc
+                data[3] = constraintSet; // constraint_set*
+                data[4] = levelIdc;      // level_idc
+            }
 
             // AVC_still_present=0, AVC_24_hour_picture_flag=0, reserved
             data[5] = 0x3f;
index b2cc66c..0b714f0 100644 (file)
@@ -74,6 +74,11 @@ WifiDisplaySource::WifiDisplaySource(
 
     mSupportedSourceVideoFormats.setNativeResolution(
             VideoFormats::RESOLUTION_CEA, 5);  // 1280x720 p30
+
+    mSupportedSourceVideoFormats.setProfileLevel(
+            VideoFormats::RESOLUTION_CEA, 5,
+            VideoFormats::PROFILE_CHP,  // Constrained High Profile
+            VideoFormats::LEVEL_32);    // Level 3.2
 }
 
 WifiDisplaySource::~WifiDisplaySource() {
@@ -631,6 +636,9 @@ status_t WifiDisplaySource::sendM4(int32_t sessionID) {
         chosenVideoFormat.disableAll();
         chosenVideoFormat.setNativeResolution(
                 mChosenVideoResolutionType, mChosenVideoResolutionIndex);
+        chosenVideoFormat.setProfileLevel(
+                mChosenVideoResolutionType, mChosenVideoResolutionIndex,
+                mChosenVideoProfile, mChosenVideoLevel);
 
         body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */));
         body.append("\r\n");
@@ -859,7 +867,9 @@ status_t WifiDisplaySource::onReceiveM3Response(
                     mSupportedSinkVideoFormats,
                     mSupportedSourceVideoFormats,
                     &mChosenVideoResolutionType,
-                    &mChosenVideoResolutionIndex)) {
+                    &mChosenVideoResolutionIndex,
+                    &mChosenVideoProfile,
+                    &mChosenVideoLevel)) {
             ALOGE("Sink and source share no commonly supported video "
                   "formats.");
 
@@ -878,6 +888,9 @@ status_t WifiDisplaySource::onReceiveM3Response(
 
         ALOGI("Picked video resolution %u x %u %c%u",
               width, height, interlaced ? 'i' : 'p', framesPerSecond);
+
+        ALOGI("Picked AVC profile %d, level %d",
+              mChosenVideoProfile, mChosenVideoLevel);
     } else {
         ALOGI("Sink doesn't support video at all.");
     }
@@ -1271,7 +1284,9 @@ status_t WifiDisplaySource::onSetupRequest(
             mUsingPCMAudio,
             mSinkSupportsVideo,
             mChosenVideoResolutionType,
-            mChosenVideoResolutionIndex);
+            mChosenVideoResolutionIndex,
+            mChosenVideoProfile,
+            mChosenVideoLevel);
 
     if (err != OK) {
         looper()->unregisterHandler(playbackSession->id());
index 3efa0b4..64186fc 100644 (file)
@@ -134,6 +134,8 @@ private:
 
     VideoFormats::ResolutionType mChosenVideoResolutionType;
     size_t mChosenVideoResolutionIndex;
+    VideoFormats::ProfileType mChosenVideoProfile;
+    VideoFormats::LevelType mChosenVideoLevel;
 
     bool mSinkSupportsAudio;