OSDN Git Service

fix [2143798] Need to figure out how to do video on Passion w/ GPU
authorMathias Agopian <mathias@google.com>
Fri, 6 Nov 2009 07:08:00 +0000 (23:08 -0800)
committerMathias Agopian <mathias@google.com>
Thu, 12 Nov 2009 00:44:43 +0000 (16:44 -0800)
This builds on the EGLImage solution. We simply use copybit to convert from the
YUV frame into an EGLImage created for that purpose and proceed with the
regular EGLImage code.
We need to do this because "regular" GL doesn't support YUV textures.

We could improve upon this by detecting exacly what the GL supports and bypass
this extra step if not required, but we'll do this later if needed.

libs/surfaceflinger/LayerBuffer.cpp
libs/surfaceflinger/LayerBuffer.h

index a36304c..28d7c48 100644 (file)
@@ -26,6 +26,8 @@
 #include <ui/GraphicBuffer.h>
 #include <ui/PixelFormat.h>
 #include <ui/FramebufferNativeWindow.h>
+#include <ui/Rect.h>
+#include <ui/Region.h>
 
 #include <hardware/copybit.h>
 
@@ -46,12 +48,15 @@ gralloc_module_t const* LayerBuffer::sGrallocModule = 0;
 LayerBuffer::LayerBuffer(SurfaceFlinger* flinger, DisplayID display,
         const sp<Client>& client, int32_t i)
     : LayerBaseClient(flinger, display, client, i),
-      mNeedsBlending(false)
+      mNeedsBlending(false), mBlitEngine(0)
 {
 }
 
 LayerBuffer::~LayerBuffer()
 {
+    if (mBlitEngine) {
+        copybit_close(mBlitEngine);
+    }
 }
 
 void LayerBuffer::onFirstRef()
@@ -69,6 +74,10 @@ void LayerBuffer::onFirstRef()
             sGrallocModule = (gralloc_module_t const *)module;
         }
     }
+
+    if (hw_get_module(COPYBIT_HARDWARE_MODULE_ID, &module) == 0) {
+        copybit_open(module, &mBlitEngine);
+    }
 }
 
 sp<LayerBaseClient::Surface> LayerBuffer::createSurface() const
@@ -350,6 +359,35 @@ LayerBuffer::BufferSource::BufferSource(LayerBuffer& layer,
         return;
     }
 
+    if (mLayer.mBlitEngine) {
+        // create our temporary buffer and corresponding EGLImageKHR.
+        // note that the size of this buffer doesn't really matter,
+        // the final image will always be drawn with proper aspect ratio.
+
+        int w = buffers.w;
+        int h = buffers.h;
+        mTempGraphicBuffer.clear();
+        mTempGraphicBuffer = new GraphicBuffer(
+                w, h, HAL_PIXEL_FORMAT_RGBX_8888,
+                GraphicBuffer::USAGE_HW_TEXTURE |
+                GraphicBuffer::USAGE_HW_2D);
+
+        if (mTempGraphicBuffer->initCheck() == NO_ERROR) {
+            NativeBuffer& dst(mTempBuffer);
+            dst.img.w = mTempGraphicBuffer->getStride();
+            dst.img.h = mTempGraphicBuffer->getHeight();
+            dst.img.format = mTempGraphicBuffer->getPixelFormat();
+            dst.img.handle = (native_handle_t *)mTempGraphicBuffer->handle;
+            dst.img.base = 0;
+            dst.crop.l = 0;
+            dst.crop.t = 0;
+            dst.crop.r = mTempGraphicBuffer->getWidth();
+            dst.crop.b = mTempGraphicBuffer->getHeight();
+        } else {
+            mTempGraphicBuffer.clear();
+        }
+    }
+
     mBufferHeap = buffers;
     mLayer.setNeedsBlending((info.h_alpha - info.l_alpha) > 0);    
     mBufferSize = info.getScanlineSize(buffers.hor_stride)*buffers.ver_stride;
@@ -438,15 +476,35 @@ void LayerBuffer::BufferSource::onDraw(const Region& clip) const
 
 #if defined(EGL_ANDROID_image_native_buffer)
     if (mLayer.mFlags & DisplayHardware::DIRECT_TEXTURE) {
-         // NOTE: Assume the buffer is  allocated with the proper USAGE flags
-        sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(
-                src.crop.r, src.crop.b, src.img.format, 
-                GraphicBuffer::USAGE_HW_TEXTURE,
-                src.img.w, src.img.handle, false);
+        copybit_device_t* copybit = mLayer.mBlitEngine;
+        if (copybit) {
+            // create our EGLImageKHR the first time
+            if (mTexture.image == EGL_NO_IMAGE_KHR) {
+                err = NO_MEMORY;
+                if (mTempGraphicBuffer!=0) {
+                    err = mLayer.initializeEglImage(
+                            mTempGraphicBuffer, &mTexture);
+                    // once the EGLImage has been created (whether it fails
+                    // or not) we don't need the graphic buffer reference
+                    // anymore.
+                    mTempGraphicBuffer.clear();
+                }
+            }
 
-        graphicBuffer->setVerticalStride(src.img.h);
+            if (err == NO_ERROR) {
+                // NOTE: Assume the buffer is allocated with the proper USAGE flags
+                const NativeBuffer& dst(mTempBuffer);
+                region_iterator clip(Region(Rect(dst.crop.r, dst.crop.b)));
+                copybit->set_parameter(copybit, COPYBIT_TRANSFORM, 0);
+                copybit->set_parameter(copybit, COPYBIT_PLANE_ALPHA, 0xFF);
+                copybit->set_parameter(copybit, COPYBIT_DITHER, COPYBIT_ENABLE);
+                err = copybit->stretch(copybit, &dst.img, &src.img,
+                        &dst.crop, &src.crop, &clip);
 
-        err = mLayer.initializeEglImage(graphicBuffer, &mTexture);
+            }
+        } else {
+            err = INVALID_OPERATION;
+        }
     }
 #endif
     else {
index 47482f4..1abb103 100644 (file)
@@ -135,8 +135,9 @@ private:
         status_t                        mStatus;
         ISurface::BufferHeap            mBufferHeap;
         size_t                          mBufferSize;
-        mutable sp<GraphicBuffer>       mTempBitmap;
         mutable LayerBase::Texture      mTexture;
+        NativeBuffer                    mTempBuffer;
+        mutable sp<GraphicBuffer>       mTempGraphicBuffer;
     };
     
     class OverlaySource : public Source {
@@ -205,6 +206,7 @@ private:
     sp<Surface>     mSurface;
     bool            mInvalidate;
     bool            mNeedsBlending;
+    copybit_device_t* mBlitEngine;
 };
 
 // ---------------------------------------------------------------------------