import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.os.ConditionVariable;
+import android.opengl.Matrix;
import java.io.IOException;
import java.io.FileDescriptor;
* provide width and height information for the SurfaceTextureSource, which it
* should obtain from wherever the SurfaceTexture data is coming from to avoid
* unnecessary resampling.</p>
+ *
+ * @hide
*/
public class SurfaceTextureSource extends Filter {
@GenerateFieldPort(name = "waitTimeout", hasDefault = true)
private int mWaitTimeout = 1000;
+ /** Whether a timeout is an exception-causing failure, or just causes the
+ * filter to close.
+ */
+ @GenerateFieldPort(name = "closeOnTimeout", hasDefault = true)
+ private boolean mCloseOnTimeout = false;
+
// Variables for input->output conversion
private GLFrame mMediaFrame;
private ShaderProgram mFrameExtractor;
private SurfaceTexture mSurfaceTexture;
private MutableFrameFormat mOutputFormat;
private ConditionVariable mNewFrameAvailable;
- private float[] mFrameTransform;
private boolean mFirstFrame;
+ private float[] mFrameTransform;
+ private float[] mMappedCoords;
+ // These default source coordinates perform the necessary flip
+ // for converting from MFF/Bitmap origin to OpenGL origin.
+ private static final float[] mSourceCoords = { 0, 1, 0, 1,
+ 1, 1, 0, 1,
+ 0, 0, 0, 1,
+ 1, 0, 0, 1 };
// Shader for output
private final String mRenderShader =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
- "uniform mat4 frame_transform;\n" +
"uniform samplerExternalOES tex_sampler_0;\n" +
"varying vec2 v_texcoord;\n" +
"void main() {\n" +
- " vec2 transformed_texcoord = (frame_transform * vec4(v_texcoord, 0., 1.) ).xy;" +
- " gl_FragColor = texture2D(tex_sampler_0, transformed_texcoord);\n" +
+ " gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
"}\n";
// Variables for logging
- private static final boolean LOGV = true;
- private static final boolean LOGVV = false;
private static final String TAG = "SurfaceTextureSource";
+ private static final boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
public SurfaceTextureSource(String name) {
super(name);
mNewFrameAvailable = new ConditionVariable();
mFrameTransform = new float[16];
+ mMappedCoords = new float[16];
}
@Override
@Override
protected void prepare(FilterContext context) {
- if (LOGV) Log.v(TAG, "Preparing SurfaceTextureSource");
+ if (mLogVerbose) Log.v(TAG, "Preparing SurfaceTextureSource");
createFormats();
0);
// Prepare output
- mFrameExtractor = new ShaderProgram(mRenderShader);
- // SurfaceTexture defines (0,0) to be bottom-left. The filter framework
- // defines (0,0) as top-left, so do the flip here.
- mFrameExtractor.setSourceRect(0, 1, 1, -1);
+ mFrameExtractor = new ShaderProgram(context, mRenderShader);
}
@Override
public void open(FilterContext context) {
- if (LOGV) Log.v(TAG, "Opening SurfaceTextureSource");
+ if (mLogVerbose) Log.v(TAG, "Opening SurfaceTextureSource");
// Create SurfaceTexture anew each time - it can use substantial memory.
mSurfaceTexture = new SurfaceTexture(mMediaFrame.getTextureId());
- // Connect SurfaceTexture to source
- mSourceListener.onSurfaceTextureSourceReady(mSurfaceTexture);
// Connect SurfaceTexture to callback
mSurfaceTexture.setOnFrameAvailableListener(onFrameAvailableListener);
+ // Connect SurfaceTexture to source
+ mSourceListener.onSurfaceTextureSourceReady(mSurfaceTexture);
mFirstFrame = true;
}
@Override
public void process(FilterContext context) {
- if (LOGVV) Log.v(TAG, "Processing new frame");
+ if (mLogVerbose) Log.v(TAG, "Processing new frame");
// First, get new frame if available
if (mWaitForNewFrame || mFirstFrame) {
if (mWaitTimeout != 0) {
gotNewFrame = mNewFrameAvailable.block(mWaitTimeout);
if (!gotNewFrame) {
- throw new RuntimeException("Timeout waiting for new frame");
+ if (!mCloseOnTimeout) {
+ throw new RuntimeException("Timeout waiting for new frame");
+ } else {
+ if (mLogVerbose) Log.v(TAG, "Timeout waiting for a new frame. Closing.");
+ closeOutputPort("video");
+ return;
+ }
}
} else {
mNewFrameAvailable.block();
mSurfaceTexture.updateTexImage();
mSurfaceTexture.getTransformMatrix(mFrameTransform);
-
+ Matrix.multiplyMM(mMappedCoords, 0,
+ mFrameTransform, 0,
+ mSourceCoords, 0);
+ mFrameExtractor.setSourceRegion(mMappedCoords[0], mMappedCoords[1],
+ mMappedCoords[4], mMappedCoords[5],
+ mMappedCoords[8], mMappedCoords[9],
+ mMappedCoords[12], mMappedCoords[13]);
// Next, render to output
- mFrameExtractor.setHostValue("frame_transform", mFrameTransform);
-
Frame output = context.getFrameManager().newFrame(mOutputFormat);
mFrameExtractor.process(mMediaFrame, output);
+ output.setTimestamp(mSurfaceTexture.getTimestamp());
+
pushOutput("video", output);
output.release();
}
@Override
public void close(FilterContext context) {
- if (LOGV) Log.v(TAG, "SurfaceTextureSource closed");
+ if (mLogVerbose) Log.v(TAG, "SurfaceTextureSource closed");
mSourceListener.onSurfaceTextureSourceReady(null);
+ mSurfaceTexture.release();
mSurfaceTexture = null;
}
private SurfaceTexture.OnFrameAvailableListener onFrameAvailableListener =
new SurfaceTexture.OnFrameAvailableListener() {
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
- if (LOGVV) Log.v(TAG, "New frame from SurfaceTextureSource");
+ if (mLogVerbose) Log.v(TAG, "New frame from SurfaceTextureSource");
mNewFrameAvailable.open();
}
};