#endif // DEBUG
+// Limits the number of ANativeWindows that can be allocated for video playback.
+// The limit is currently set to 2 as that is the current max number of
+// simultaneous HW decodes that our OMX implementation allows. This forces the
+// media producer to use their own SW decoders for subsequent video streams.
+#define MAX_WINDOW_COUNT 2
+
namespace WebCore {
-MediaTexture::MediaTexture(jobject weakWebViewRef) : android::LightRefBase<MediaTexture>()
+MediaTexture::MediaTexture(jobject webViewRef) : android::LightRefBase<MediaTexture>()
{
- m_weakWebViewRef = weakWebViewRef;
- m_textureId = 0;
- m_dimensions.setEmpty();
+ if (webViewRef) {
+ JNIEnv* env = JSC::Bindings::getJNIEnv();
+ m_weakWebViewRef = env->NewWeakGlobalRef(webViewRef);
+ } else {
+ m_weakWebViewRef = 0;
+ }
+
+ m_contentTexture = 0;
+ m_isContentInverted = false;
m_newWindowRequest = false;
- m_newWindowReady = false;
- m_mediaListener = new MediaListener(m_weakWebViewRef);
}
MediaTexture::~MediaTexture()
{
- releaseNativeWindow();
- if (m_textureId)
- glDeleteTextures(1, &m_textureId);
+ if (m_contentTexture)
+ deleteTexture(m_contentTexture, true);
+ for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
+ deleteTexture(m_videoTextures[i], true);
+ }
+
if (m_weakWebViewRef) {
JNIEnv* env = JSC::Bindings::getJNIEnv();
env->DeleteWeakGlobalRef(m_weakWebViewRef);
}
}
+bool MediaTexture::isContentInverted()
+{
+ android::Mutex::Autolock lock(m_mediaLock);
+ return m_isContentInverted;
+}
+void MediaTexture::invertContents(bool invertContent)
+{
+ android::Mutex::Autolock lock(m_mediaLock);
+ m_isContentInverted = invertContent;
+}
+
void MediaTexture::initNativeWindowIfNeeded()
{
{
android::Mutex::Autolock lock(m_mediaLock);
- if(!m_newWindowRequest)
- return;
+ // check to see if there are any unused textures to delete
+ if (m_unusedTextures.size() != 0) {
+ for (unsigned int i = 0; i < m_unusedTextures.size(); i++) {
+ glDeleteTextures(1, &m_unusedTextures[i]);
+ }
+ m_unusedTextures.clear();
+ }
- // reuse an existing texture if possible
- if (!m_textureId)
- glGenTextures(1, &m_textureId);
+ // create a content texture if none exists
+ if (!m_contentTexture) {
+ m_contentTexture = createTexture();
+
+ // send a message to the WebKit thread to notify the plugin that it can draw
+ if (m_weakWebViewRef) {
+ JNIEnv* env = JSC::Bindings::getJNIEnv();
+ jobject localWebViewRef = env->NewLocalRef(m_weakWebViewRef);
+ if (localWebViewRef) {
+ jclass wvClass = env->GetObjectClass(localWebViewRef);
+ jmethodID sendPluginDrawMsg =
+ env->GetMethodID(wvClass, "sendPluginDrawMsg", "()V");
+ env->CallVoidMethod(localWebViewRef, sendPluginDrawMsg);
+ env->DeleteLocalRef(wvClass);
+ env->DeleteLocalRef(localWebViewRef);
+ }
+ checkException(env);
+ }
+ }
- m_surfaceTexture = new android::SurfaceTexture(m_textureId);
- m_surfaceTextureClient = new android::SurfaceTextureClient(m_surfaceTexture);
+ // finally create a video texture if needed
+ if (!m_newWindowRequest)
+ return;
- //setup callback
- m_mediaListener->resetFrameAvailable();
- m_surfaceTexture->setFrameAvailableListener(m_mediaListener);
+ // add the texture and add it to the list
+ TextureWrapper* videoTexture = createTexture();
+ m_videoTextures.append(videoTexture);
+ // setup the state variables to signal the other thread
m_newWindowRequest = false;
- m_newWindowReady = true;
+ m_newWindow = videoTexture->nativeWindow;
}
+
+ // signal the WebKit thread in case it is waiting
m_newMediaRequestCond.signal();
}
-void MediaTexture::drawContent(const TransformationMatrix& matrix)
+void MediaTexture::draw(const TransformationMatrix& contentMatrix,
+ const TransformationMatrix& videoMatrix,
+ const SkRect& mediaBounds)
{
android::Mutex::Autolock lock(m_mediaLock);
- if(!m_surfaceTexture.get() || m_dimensions.isEmpty()
- || !m_mediaListener->isFrameAvailable())
+ if (mediaBounds.isEmpty())
return;
- m_surfaceTexture->updateTexImage();
+ // draw all the video textures first
+ for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
- bool forceBlending = ANativeWindow_getFormat(m_surfaceTextureClient.get()) == WINDOW_FORMAT_RGB_565;
- TilesManager::instance()->shader()->drawLayerQuad(matrix, m_dimensions,
- m_textureId, 1.0f,
- forceBlending, GL_TEXTURE_EXTERNAL_OES);
-}
-
-void MediaTexture::drawVideo(const TransformationMatrix& matrix, const SkRect& parentBounds)
-{
- android::Mutex::Autolock lock(m_mediaLock);
+ TextureWrapper* video = m_videoTextures[i];
- if(!m_surfaceTexture.get() || m_dimensions.isEmpty()
- || !m_mediaListener->isFrameAvailable())
- return;
+ if (!video->surfaceTexture.get() || video->dimensions.isEmpty()
+ || !video->mediaListener->isFrameAvailable())
+ continue;
- m_surfaceTexture->updateTexImage();
+ video->surfaceTexture->updateTexImage();
- float surfaceMatrix[16];
- m_surfaceTexture->getTransformMatrix(surfaceMatrix);
+ float surfaceMatrix[16];
+ video->surfaceTexture->getTransformMatrix(surfaceMatrix);
- SkRect dimensions = m_dimensions;
- dimensions.offset(parentBounds.fLeft, parentBounds.fTop);
+ SkRect dimensions = video->dimensions;
+ dimensions.offset(mediaBounds.fLeft, mediaBounds.fTop);
#ifdef DEBUG
- if (!parentBounds.contains(dimensions)) {
- XLOG("The video exceeds is parent's bounds.");
- }
+ if (!mediaBounds.contains(dimensions)) {
+ XLOG("The video exceeds is parent's bounds.");
+ }
#endif // DEBUG
- TilesManager::instance()->shader()->drawVideoLayerQuad(matrix, surfaceMatrix,
- dimensions, m_textureId);
+ TilesManager::instance()->shader()->drawVideoLayerQuad(videoMatrix,
+ surfaceMatrix, dimensions, video->textureId);
+ }
+
+ if (!m_contentTexture->mediaListener->isFrameAvailable())
+ return;
+
+ m_contentTexture->surfaceTexture->updateTexImage();
+
+ sp<GraphicBuffer> buf = m_contentTexture->surfaceTexture->getCurrentBuffer();
+
+ PixelFormat f = buf->getPixelFormat();
+ // only attempt to use alpha blending if alpha channel exists
+ bool forceAlphaBlending = !(
+ PIXEL_FORMAT_RGBX_8888 == f ||
+ PIXEL_FORMAT_RGB_888 == f ||
+ PIXEL_FORMAT_RGB_565 == f ||
+ PIXEL_FORMAT_RGB_332 == f);
+
+ TilesManager::instance()->shader()->drawLayerQuad(contentMatrix,
+ mediaBounds,
+ m_contentTexture->textureId,
+ 1.0f, forceAlphaBlending,
+ GL_TEXTURE_EXTERNAL_OES);
}
-ANativeWindow* MediaTexture::requestNewWindow()
+ANativeWindow* MediaTexture::requestNativeWindowForVideo()
{
android::Mutex::Autolock lock(m_mediaLock);
// the window was not ready before the timeout so return it this time
- if (m_newWindowReady) {
- m_newWindowReady = false;
- return m_surfaceTextureClient.get();
+ if (ANativeWindow* window = m_newWindow.get()) {
+ m_newWindow.clear();
+ return window;
}
- // we only allow for one texture, so if one already exists return null
- else if (m_surfaceTextureClient.get()) {
+
+ // we only allow for so many textures, so return NULL if we exceed that limit
+ else if (m_videoTextures.size() >= MAX_WINDOW_COUNT) {
return 0;
}
timedOut = ret == TIMED_OUT;
}
- if (m_surfaceTextureClient.get())
- m_newWindowReady = false;
+ // if the window is ready then return it otherwise return NULL
+ if (ANativeWindow* window = m_newWindow.get()) {
+ m_newWindow.clear();
+ return window;
+ }
+ return 0;
+}
- return m_surfaceTextureClient.get();
+ANativeWindow* MediaTexture::getNativeWindowForContent()
+{
+ android::Mutex::Autolock lock(m_mediaLock);
+ if (m_contentTexture)
+ return m_contentTexture->nativeWindow.get();
+ else
+ return 0;
}
-ANativeWindow* MediaTexture::getNativeWindow()
+void MediaTexture::releaseNativeWindow(const ANativeWindow* window)
{
android::Mutex::Autolock lock(m_mediaLock);
- return m_surfaceTextureClient.get();
+ for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
+ if (m_videoTextures[i]->nativeWindow.get() == window) {
+ deleteTexture(m_videoTextures[i]);
+ m_videoTextures.remove(i);
+ break;
+ }
+ }
}
-void MediaTexture::releaseNativeWindow()
+void MediaTexture::setDimensions(const ANativeWindow* window,
+ const SkRect& dimensions)
{
android::Mutex::Autolock lock(m_mediaLock);
- m_dimensions.setEmpty();
+ for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
+ if (m_videoTextures[i]->nativeWindow.get() == window) {
+ m_videoTextures[i]->dimensions = dimensions;
+ break;
+ }
+ }
+}
- if (m_surfaceTexture.get())
- m_surfaceTexture->setFrameAvailableListener(0);
+void MediaTexture::setFramerateCallback(const ANativeWindow* window,
+ FramerateCallbackProc callback)
+{
+ XLOG("Release ANW %p (%p):(%p)", this, m_surfaceTexture.get(), m_surfaceTextureClient.get());
+ android::Mutex::Autolock lock(m_mediaLock);
+ for (unsigned int i = 0; i < m_videoTextures.size(); i++) {
+ if (m_videoTextures[i]->nativeWindow.get() == window) {
+ m_videoTextures[i]->mediaListener->setFramerateCallback(callback);
+ break;
+ }
+ }
+}
- // clear the strong pointer references
- m_surfaceTextureClient.clear();
- m_surfaceTexture.clear();
+MediaTexture::TextureWrapper* MediaTexture::createTexture()
+{
+ TextureWrapper* wrapper = new TextureWrapper();
+
+ // populate the wrapper
+ glGenTextures(1, &wrapper->textureId);
+ wrapper->surfaceTexture = new android::SurfaceTexture(wrapper->textureId);
+ wrapper->nativeWindow = new android::SurfaceTextureClient(wrapper->surfaceTexture);
+ wrapper->dimensions.setEmpty();
+
+ // setup callback
+ wrapper->mediaListener = new MediaListener(m_weakWebViewRef,
+ wrapper->surfaceTexture,
+ wrapper->nativeWindow);
+ wrapper->surfaceTexture->setFrameAvailableListener(wrapper->mediaListener);
+
+ return wrapper;
}
-void MediaTexture::setDimensions(const SkRect& dimensions)
+void MediaTexture::deleteTexture(TextureWrapper* texture, bool force)
{
- android::Mutex::Autolock lock(m_mediaLock);
- m_dimensions = dimensions;
+ if (texture->surfaceTexture.get())
+ texture->surfaceTexture->setFrameAvailableListener(0);
+
+ if (force)
+ glDeleteTextures(1, &texture->textureId);
+ else
+ m_unusedTextures.append(texture->textureId);
+
+ // clear the strong pointer references
+ texture->mediaListener.clear();
+ texture->nativeWindow.clear();
+ texture->surfaceTexture.clear();
+
+ delete texture;
}
} // namespace WebCore