2 * Copyright (C) 2011 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 * use this file except in compliance with the License. You may obtain a copy of
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 * License for the specific language governing permissions and limitations under
17 package com.android.camera;
19 import android.annotation.TargetApi;
20 import android.content.Context;
21 import android.graphics.SurfaceTexture;
22 import android.hardware.Camera;
23 import android.media.CamcorderProfile;
24 import android.media.MediaRecorder;
25 import android.os.Handler;
26 import android.os.Looper;
27 import android.util.Log;
29 import com.android.gallery3d.common.ApiHelper;
31 import java.io.FileDescriptor;
32 import java.io.IOException;
33 import java.io.Serializable;
34 import java.lang.reflect.Constructor;
35 import java.lang.reflect.InvocationHandler;
36 import java.lang.reflect.Method;
37 import java.lang.reflect.Proxy;
41 * Encapsulates the mobile filter framework components needed to record video
42 * with effects applied. Modeled after MediaRecorder.
44 @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture
45 public class EffectsRecorder {
46 private static final String TAG = "EffectsRecorder";
48 private static Class<?> sClassFilter;
49 private static Method sFilterIsAvailable;
50 private static EffectsRecorder sEffectsRecorder;
51 // The index of the current effects recorder.
52 private static int sEffectsRecorderIndex;
54 private static boolean sReflectionInited = false;
56 private static Class<?> sClsLearningDoneListener;
57 private static Class<?> sClsOnRunnerDoneListener;
58 private static Class<?> sClsOnRecordingDoneListener;
59 private static Class<?> sClsSurfaceTextureSourceListener;
61 private static Method sFilterSetInputValue;
63 private static Constructor<?> sCtPoint;
64 private static Constructor<?> sCtQuad;
66 private static Method sLearningDoneListenerOnLearningDone;
68 private static Method sObjectEquals;
69 private static Method sObjectToString;
71 private static Class<?> sClsGraphRunner;
72 private static Method sGraphRunnerGetGraph;
73 private static Method sGraphRunnerSetDoneCallback;
74 private static Method sGraphRunnerRun;
75 private static Method sGraphRunnerGetError;
76 private static Method sGraphRunnerStop;
78 private static Method sFilterGraphGetFilter;
79 private static Method sFilterGraphTearDown;
81 private static Method sOnRunnerDoneListenerOnRunnerDone;
83 private static Class<?> sClsGraphEnvironment;
84 private static Constructor<?> sCtGraphEnvironment;
85 private static Method sGraphEnvironmentCreateGLEnvironment;
86 private static Method sGraphEnvironmentGetRunner;
87 private static Method sGraphEnvironmentAddReferences;
88 private static Method sGraphEnvironmentLoadGraph;
89 private static Method sGraphEnvironmentGetContext;
91 private static Method sFilterContextGetGLEnvironment;
92 private static Method sGLEnvironmentIsActive;
93 private static Method sGLEnvironmentActivate;
94 private static Method sGLEnvironmentDeactivate;
95 private static Method sSurfaceTextureTargetDisconnect;
96 private static Method sOnRecordingDoneListenerOnRecordingDone;
97 private static Method sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady;
99 private Object mLearningDoneListener;
100 private Object mRunnerDoneCallback;
101 private Object mSourceReadyCallback;
102 // A callback to finalize the media after the recording is done.
103 private Object mRecordingDoneListener;
107 sClassFilter = Class.forName("android.filterfw.core.Filter");
108 sFilterIsAvailable = sClassFilter.getMethod("isAvailable",
110 } catch (ClassNotFoundException ex) {
111 Log.v(TAG, "Can't find the class android.filterfw.core.Filter");
112 } catch (NoSuchMethodException e) {
113 Log.v(TAG, "Can't find the method Filter.isAvailable");
117 public static final int EFFECT_NONE = 0;
118 public static final int EFFECT_GOOFY_FACE = 1;
119 public static final int EFFECT_BACKDROPPER = 2;
121 public static final int EFFECT_GF_SQUEEZE = 0;
122 public static final int EFFECT_GF_BIG_EYES = 1;
123 public static final int EFFECT_GF_BIG_MOUTH = 2;
124 public static final int EFFECT_GF_SMALL_MOUTH = 3;
125 public static final int EFFECT_GF_BIG_NOSE = 4;
126 public static final int EFFECT_GF_SMALL_EYES = 5;
127 public static final int NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
129 public static final int EFFECT_MSG_STARTED_LEARNING = 0;
130 public static final int EFFECT_MSG_DONE_LEARNING = 1;
131 public static final int EFFECT_MSG_SWITCHING_EFFECT = 2;
132 public static final int EFFECT_MSG_EFFECTS_STOPPED = 3;
133 public static final int EFFECT_MSG_RECORDING_DONE = 4;
134 public static final int EFFECT_MSG_PREVIEW_RUNNING = 5;
136 private Context mContext;
137 private Handler mHandler;
139 private CameraManager.CameraProxy mCameraDevice;
140 private CamcorderProfile mProfile;
141 private double mCaptureRate = 0;
142 private SurfaceTexture mPreviewSurfaceTexture;
143 private int mPreviewWidth;
144 private int mPreviewHeight;
145 private MediaRecorder.OnInfoListener mInfoListener;
146 private MediaRecorder.OnErrorListener mErrorListener;
148 private String mOutputFile;
149 private FileDescriptor mFd;
150 private int mOrientationHint = 0;
151 private long mMaxFileSize = 0;
152 private int mMaxDurationMs = 0;
153 private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
154 private int mCameraDisplayOrientation;
156 private int mEffect = EFFECT_NONE;
157 private int mCurrentEffect = EFFECT_NONE;
158 private EffectsListener mEffectsListener;
160 private Object mEffectParameter;
162 private Object mGraphEnv;
163 private int mGraphId;
164 private Object mRunner = null;
165 private Object mOldRunner = null;
167 private SurfaceTexture mTextureSource;
169 private static final int STATE_CONFIGURE = 0;
170 private static final int STATE_WAITING_FOR_SURFACE = 1;
171 private static final int STATE_STARTING_PREVIEW = 2;
172 private static final int STATE_PREVIEW = 3;
173 private static final int STATE_RECORD = 4;
174 private static final int STATE_RELEASED = 5;
175 private int mState = STATE_CONFIGURE;
177 private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
178 private SoundClips.Player mSoundPlayer;
180 /** Determine if a given effect is supported at runtime
181 * Some effects require libraries not available on all devices
183 public static boolean isEffectSupported(int effectId) {
184 if (sFilterIsAvailable == null) return false;
188 case EFFECT_GOOFY_FACE:
189 return (Boolean) sFilterIsAvailable.invoke(null,
190 "com.google.android.filterpacks.facedetect.GoofyRenderFilter");
191 case EFFECT_BACKDROPPER:
192 return (Boolean) sFilterIsAvailable.invoke(null,
193 "android.filterpacks.videoproc.BackDropperFilter");
197 } catch (Exception ex) {
198 Log.e(TAG, "Fail to check filter", ex);
203 public EffectsRecorder(Context context) {
204 if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
206 if (!sReflectionInited) {
208 sFilterSetInputValue = sClassFilter.getMethod("setInputValue",
209 new Class[] {String.class, Object.class});
211 Class<?> clsPoint = Class.forName("android.filterfw.geometry.Point");
212 sCtPoint = clsPoint.getConstructor(new Class[] {float.class,
215 Class<?> clsQuad = Class.forName("android.filterfw.geometry.Quad");
216 sCtQuad = clsQuad.getConstructor(new Class[] {clsPoint, clsPoint,
217 clsPoint, clsPoint});
219 Class<?> clsBackDropperFilter = Class.forName(
220 "android.filterpacks.videoproc.BackDropperFilter");
221 sClsLearningDoneListener = Class.forName(
222 "android.filterpacks.videoproc.BackDropperFilter$LearningDoneListener");
223 sLearningDoneListenerOnLearningDone = sClsLearningDoneListener
224 .getMethod("onLearningDone", new Class[] {clsBackDropperFilter});
226 sObjectEquals = Object.class.getMethod("equals", new Class[] {Object.class});
227 sObjectToString = Object.class.getMethod("toString");
229 sClsOnRunnerDoneListener = Class.forName(
230 "android.filterfw.core.GraphRunner$OnRunnerDoneListener");
231 sOnRunnerDoneListenerOnRunnerDone = sClsOnRunnerDoneListener.getMethod(
232 "onRunnerDone", new Class[] {int.class});
234 sClsGraphRunner = Class.forName("android.filterfw.core.GraphRunner");
235 sGraphRunnerGetGraph = sClsGraphRunner.getMethod("getGraph");
236 sGraphRunnerSetDoneCallback = sClsGraphRunner.getMethod(
237 "setDoneCallback", new Class[] {sClsOnRunnerDoneListener});
238 sGraphRunnerRun = sClsGraphRunner.getMethod("run");
239 sGraphRunnerGetError = sClsGraphRunner.getMethod("getError");
240 sGraphRunnerStop = sClsGraphRunner.getMethod("stop");
242 Class<?> clsFilterContext = Class.forName("android.filterfw.core.FilterContext");
243 sFilterContextGetGLEnvironment = clsFilterContext.getMethod(
246 Class<?> clsFilterGraph = Class.forName("android.filterfw.core.FilterGraph");
247 sFilterGraphGetFilter = clsFilterGraph.getMethod("getFilter",
248 new Class[] {String.class});
249 sFilterGraphTearDown = clsFilterGraph.getMethod("tearDown",
250 new Class[] {clsFilterContext});
252 sClsGraphEnvironment = Class.forName("android.filterfw.GraphEnvironment");
253 sCtGraphEnvironment = sClsGraphEnvironment.getConstructor();
254 sGraphEnvironmentCreateGLEnvironment = sClsGraphEnvironment.getMethod(
255 "createGLEnvironment");
256 sGraphEnvironmentGetRunner = sClsGraphEnvironment.getMethod(
257 "getRunner", new Class[] {int.class, int.class});
258 sGraphEnvironmentAddReferences = sClsGraphEnvironment.getMethod(
259 "addReferences", new Class[] {Object[].class});
260 sGraphEnvironmentLoadGraph = sClsGraphEnvironment.getMethod(
261 "loadGraph", new Class[] {Context.class, int.class});
262 sGraphEnvironmentGetContext = sClsGraphEnvironment.getMethod(
265 Class<?> clsGLEnvironment = Class.forName("android.filterfw.core.GLEnvironment");
266 sGLEnvironmentIsActive = clsGLEnvironment.getMethod("isActive");
267 sGLEnvironmentActivate = clsGLEnvironment.getMethod("activate");
268 sGLEnvironmentDeactivate = clsGLEnvironment.getMethod("deactivate");
270 Class<?> clsSurfaceTextureTarget = Class.forName(
271 "android.filterpacks.videosrc.SurfaceTextureTarget");
272 sSurfaceTextureTargetDisconnect = clsSurfaceTextureTarget.getMethod(
273 "disconnect", new Class[] {clsFilterContext});
275 sClsOnRecordingDoneListener = Class.forName(
276 "android.filterpacks.videosink.MediaEncoderFilter$OnRecordingDoneListener");
277 sOnRecordingDoneListenerOnRecordingDone =
278 sClsOnRecordingDoneListener.getMethod("onRecordingDone");
280 sClsSurfaceTextureSourceListener = Class.forName(
281 "android.filterpacks.videosrc.SurfaceTextureSource$SurfaceTextureSourceListener");
282 sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady =
283 sClsSurfaceTextureSourceListener.getMethod(
284 "onSurfaceTextureSourceReady",
285 new Class[] {SurfaceTexture.class});
286 } catch (Exception ex) {
287 throw new RuntimeException(ex);
290 sReflectionInited = true;
293 sEffectsRecorderIndex++;
294 Log.v(TAG, "Current effects recorder index is " + sEffectsRecorderIndex);
295 sEffectsRecorder = this;
296 SerializableInvocationHandler sih = new SerializableInvocationHandler(
297 sEffectsRecorderIndex);
298 mLearningDoneListener = Proxy.newProxyInstance(
299 sClsLearningDoneListener.getClassLoader(),
300 new Class[] {sClsLearningDoneListener}, sih);
301 mRunnerDoneCallback = Proxy.newProxyInstance(
302 sClsOnRunnerDoneListener.getClassLoader(),
303 new Class[] {sClsOnRunnerDoneListener}, sih);
304 mSourceReadyCallback = Proxy.newProxyInstance(
305 sClsSurfaceTextureSourceListener.getClassLoader(),
306 new Class[] {sClsSurfaceTextureSourceListener}, sih);
307 mRecordingDoneListener = Proxy.newProxyInstance(
308 sClsOnRecordingDoneListener.getClassLoader(),
309 new Class[] {sClsOnRecordingDoneListener}, sih);
312 mHandler = new Handler(Looper.getMainLooper());
313 mSoundPlayer = SoundClips.getPlayer(context);
316 public synchronized void setCamera(CameraManager.CameraProxy cameraDevice) {
319 throw new RuntimeException("setCamera cannot be called while previewing!");
321 throw new RuntimeException("setCamera cannot be called while recording!");
323 throw new RuntimeException("setCamera called on an already released recorder!");
328 mCameraDevice = cameraDevice;
331 public void setProfile(CamcorderProfile profile) {
334 throw new RuntimeException("setProfile cannot be called while recording!");
336 throw new RuntimeException("setProfile called on an already released recorder!");
343 public void setOutputFile(String outputFile) {
346 throw new RuntimeException("setOutputFile cannot be called while recording!");
348 throw new RuntimeException("setOutputFile called on an already released recorder!");
353 mOutputFile = outputFile;
357 public void setOutputFile(FileDescriptor fd) {
360 throw new RuntimeException("setOutputFile cannot be called while recording!");
362 throw new RuntimeException("setOutputFile called on an already released recorder!");
372 * Sets the maximum filesize (in bytes) of the recording session.
373 * This will be passed on to the MediaEncoderFilter and then to the
374 * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
377 public synchronized void setMaxFileSize(long maxFileSize) {
380 throw new RuntimeException("setMaxFileSize cannot be called while recording!");
382 throw new RuntimeException(
383 "setMaxFileSize called on an already released recorder!");
387 mMaxFileSize = maxFileSize;
391 * Sets the maximum recording duration (in ms) for the next recording session
392 * Setting it to zero (the default) disables the limit.
394 public synchronized void setMaxDuration(int maxDurationMs) {
397 throw new RuntimeException("setMaxDuration cannot be called while recording!");
399 throw new RuntimeException(
400 "setMaxDuration called on an already released recorder!");
404 mMaxDurationMs = maxDurationMs;
408 public void setCaptureRate(double fps) {
411 throw new RuntimeException("setCaptureRate cannot be called while recording!");
413 throw new RuntimeException(
414 "setCaptureRate called on an already released recorder!");
419 if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
423 public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture,
426 if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")");
429 throw new RuntimeException(
430 "setPreviewSurfaceTexture cannot be called while recording!");
432 throw new RuntimeException(
433 "setPreviewSurfaceTexture called on an already released recorder!");
438 mPreviewSurfaceTexture = previewSurfaceTexture;
439 mPreviewWidth = previewWidth;
440 mPreviewHeight = previewHeight;
443 case STATE_WAITING_FOR_SURFACE:
446 case STATE_STARTING_PREVIEW:
448 initializeEffect(true);
453 public void setEffect(int effect, Object effectParameter) {
454 if (mLogVerbose) Log.v(TAG,
455 "setEffect: effect ID " + effect +
456 ", parameter " + effectParameter.toString());
459 throw new RuntimeException("setEffect cannot be called while recording!");
461 throw new RuntimeException("setEffect called on an already released recorder!");
467 mEffectParameter = effectParameter;
469 if (mState == STATE_PREVIEW ||
470 mState == STATE_STARTING_PREVIEW) {
471 initializeEffect(false);
475 public interface EffectsListener {
476 public void onEffectsUpdate(int effectId, int effectMsg);
477 public void onEffectsError(Exception exception, String filePath);
480 public void setEffectsListener(EffectsListener listener) {
481 mEffectsListener = listener;
484 private void setFaceDetectOrientation() {
485 if (mCurrentEffect == EFFECT_GOOFY_FACE) {
486 Object rotateFilter = getGraphFilter(mRunner, "rotate");
487 Object metaRotateFilter = getGraphFilter(mRunner, "metarotate");
488 setInputValue(rotateFilter, "rotation", mOrientationHint);
489 int reverseDegrees = (360 - mOrientationHint) % 360;
490 setInputValue(metaRotateFilter, "rotation", reverseDegrees);
494 private void setRecordingOrientation() {
495 if (mState != STATE_RECORD && mRunner != null) {
496 Object bl = newInstance(sCtPoint, new Object[] {0, 0});
497 Object br = newInstance(sCtPoint, new Object[] {1, 0});
498 Object tl = newInstance(sCtPoint, new Object[] {0, 1});
499 Object tr = newInstance(sCtPoint, new Object[] {1, 1});
500 Object recordingRegion;
501 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
502 // The back camera is not mirrored, so use a identity transform
503 recordingRegion = newInstance(sCtQuad, new Object[] {bl, br, tl, tr});
505 // Recording region needs to be tweaked for front cameras, since they
506 // mirror their preview
507 if (mOrientationHint == 0 || mOrientationHint == 180) {
508 // Horizontal flip in landscape
509 recordingRegion = newInstance(sCtQuad, new Object[] {br, bl, tr, tl});
511 // Horizontal flip in portrait
512 recordingRegion = newInstance(sCtQuad, new Object[] {tl, tr, bl, br});
515 Object recorder = getGraphFilter(mRunner, "recorder");
516 setInputValue(recorder, "inputRegion", recordingRegion);
519 public void setOrientationHint(int degrees) {
522 throw new RuntimeException(
523 "setOrientationHint called on an already released recorder!");
527 if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
528 mOrientationHint = degrees;
529 setFaceDetectOrientation();
530 setRecordingOrientation();
533 public void setCameraDisplayOrientation(int orientation) {
534 if (mState != STATE_CONFIGURE) {
535 throw new RuntimeException(
536 "setCameraDisplayOrientation called after configuration!");
538 mCameraDisplayOrientation = orientation;
541 public void setCameraFacing(int facing) {
544 throw new RuntimeException(
545 "setCameraFacing called on alrady released recorder!");
549 mCameraFacing = facing;
550 setRecordingOrientation();
553 public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
556 throw new RuntimeException("setInfoListener cannot be called while recording!");
558 throw new RuntimeException(
559 "setInfoListener called on an already released recorder!");
563 mInfoListener = infoListener;
566 public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
569 throw new RuntimeException("setErrorListener cannot be called while recording!");
571 throw new RuntimeException(
572 "setErrorListener called on an already released recorder!");
576 mErrorListener = errorListener;
579 private void initializeFilterFramework() {
580 mGraphEnv = newInstance(sCtGraphEnvironment);
581 invoke(mGraphEnv, sGraphEnvironmentCreateGLEnvironment);
583 int videoFrameWidth = mProfile.videoFrameWidth;
584 int videoFrameHeight = mProfile.videoFrameHeight;
585 if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) {
586 int tmp = videoFrameWidth;
587 videoFrameWidth = videoFrameHeight;
588 videoFrameHeight = tmp;
591 invoke(mGraphEnv, sGraphEnvironmentAddReferences,
592 new Object[] {new Object[] {
593 "textureSourceCallback", mSourceReadyCallback,
594 "recordingWidth", videoFrameWidth,
595 "recordingHeight", videoFrameHeight,
596 "recordingProfile", mProfile,
597 "learningDoneListener", mLearningDoneListener,
598 "recordingDoneListener", mRecordingDoneListener}});
601 mCurrentEffect = EFFECT_NONE;
604 private synchronized void initializeEffect(boolean forceReset) {
606 mCurrentEffect != mEffect ||
607 mCurrentEffect == EFFECT_BACKDROPPER) {
609 invoke(mGraphEnv, sGraphEnvironmentAddReferences,
610 new Object[] {new Object[] {
611 "previewSurfaceTexture", mPreviewSurfaceTexture,
612 "previewWidth", mPreviewWidth,
613 "previewHeight", mPreviewHeight,
614 "orientation", mOrientationHint}});
615 if (mState == STATE_PREVIEW ||
616 mState == STATE_STARTING_PREVIEW) {
617 // Switching effects while running. Inform video camera.
618 sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
622 case EFFECT_GOOFY_FACE:
623 mGraphId = (Integer) invoke(mGraphEnv,
624 sGraphEnvironmentLoadGraph,
625 new Object[] {mContext, R.raw.goofy_face});
627 case EFFECT_BACKDROPPER:
628 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
629 mGraphId = (Integer) invoke(mGraphEnv,
630 sGraphEnvironmentLoadGraph,
631 new Object[] {mContext, R.raw.backdropper});
634 throw new RuntimeException("Unknown effect ID" + mEffect + "!");
636 mCurrentEffect = mEffect;
638 mOldRunner = mRunner;
639 mRunner = invoke(mGraphEnv, sGraphEnvironmentGetRunner,
640 new Object[] {mGraphId,
641 getConstant(sClsGraphEnvironment, "MODE_ASYNCHRONOUS")});
642 invoke(mRunner, sGraphRunnerSetDoneCallback, new Object[] {mRunnerDoneCallback});
644 Log.v(TAG, "New runner: " + mRunner
645 + ". Old runner: " + mOldRunner);
647 if (mState == STATE_PREVIEW ||
648 mState == STATE_STARTING_PREVIEW) {
649 // Switching effects while running. Stop existing runner.
650 // The stop callback will take care of starting new runner.
651 mCameraDevice.stopPreview();
652 mCameraDevice.setPreviewTextureAsync(null);
653 invoke(mOldRunner, sGraphRunnerStop);
657 switch (mCurrentEffect) {
658 case EFFECT_GOOFY_FACE:
659 tryEnableVideoStabilization(true);
660 Object goofyFilter = getGraphFilter(mRunner, "goofyrenderer");
661 setInputValue(goofyFilter, "currentEffect",
662 ((Integer) mEffectParameter).intValue());
664 case EFFECT_BACKDROPPER:
665 tryEnableVideoStabilization(false);
666 Object backgroundSrc = getGraphFilter(mRunner, "background");
667 if (ApiHelper.HAS_EFFECTS_RECORDING_CONTEXT_INPUT) {
668 // Set the context first before setting sourceUrl to
669 // guarantee the content URI get resolved properly.
670 setInputValue(backgroundSrc, "context", mContext);
672 setInputValue(backgroundSrc, "sourceUrl", mEffectParameter);
673 // For front camera, the background video needs to be mirrored in the
674 // backdropper filter
675 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
676 Object replacer = getGraphFilter(mRunner, "replacer");
677 setInputValue(replacer, "mirrorBg", true);
678 if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
684 setFaceDetectOrientation();
685 setRecordingOrientation();
688 public synchronized void startPreview() {
689 if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
692 case STATE_STARTING_PREVIEW:
694 // Already running preview
695 Log.w(TAG, "startPreview called when already running preview");
698 throw new RuntimeException("Cannot start preview when already recording!");
700 throw new RuntimeException("setEffect called on an already released recorder!");
705 if (mEffect == EFFECT_NONE) {
706 throw new RuntimeException("No effect selected!");
708 if (mEffectParameter == null) {
709 throw new RuntimeException("No effect parameter provided!");
711 if (mProfile == null) {
712 throw new RuntimeException("No recording profile provided!");
714 if (mPreviewSurfaceTexture == null) {
715 if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one");
716 mState = STATE_WAITING_FOR_SURFACE;
719 if (mCameraDevice == null) {
720 throw new RuntimeException("No camera to record from!");
723 if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph.");
724 initializeFilterFramework();
726 initializeEffect(true);
728 mState = STATE_STARTING_PREVIEW;
729 invoke(mRunner, sGraphRunnerRun);
730 // Rest of preview startup handled in mSourceReadyCallback
733 private Object invokeObjectEquals(Object proxy, Object[] args) {
734 return Boolean.valueOf(proxy == args[0]);
737 private Object invokeObjectToString() {
738 return "Proxy-" + toString();
741 private void invokeOnLearningDone() {
742 if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
743 // Called in a processing thread, so have to post message back to UI
745 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
749 private void invokeOnRunnerDone(Object[] args) {
750 int runnerDoneResult = (Integer) args[0];
751 synchronized (EffectsRecorder.this) {
754 "Graph runner done (" + EffectsRecorder.this
755 + ", mRunner " + mRunner
756 + ", mOldRunner " + mOldRunner + ")");
758 if (runnerDoneResult ==
759 (Integer) getConstant(sClsGraphRunner, "RESULT_ERROR")) {
761 Log.e(TAG, "Error running filter graph!");
763 if (mRunner != null) {
764 e = (Exception) invoke(mRunner, sGraphRunnerGetError);
765 } else if (mOldRunner != null) {
766 e = (Exception) invoke(mOldRunner, sGraphRunnerGetError);
770 if (mOldRunner != null) {
771 // Tear down old graph if available
772 if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
773 Object glEnv = getContextGLEnvironment(mGraphEnv);
774 if (glEnv != null && !(Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
775 invoke(glEnv, sGLEnvironmentActivate);
777 getGraphTearDown(mOldRunner,
778 invoke(mGraphEnv, sGraphEnvironmentGetContext));
779 if (glEnv != null && (Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
780 invoke(glEnv, sGLEnvironmentDeactivate);
784 if (mState == STATE_PREVIEW ||
785 mState == STATE_STARTING_PREVIEW) {
786 // Switching effects, start up the new runner
788 Log.v(TAG, "Previous effect halted. Running graph again. state: "
791 tryEnable3ALocks(false);
792 // In case of an error, the graph restarts from beginning and in case
793 // of the BACKDROPPER effect, the learner re-learns the background.
794 // Hence, we need to show the learning dialogue to the user
795 // to avoid recording before the learning is done. Else, the user
796 // could start recording before the learning is done and the new
797 // background comes up later leading to an end result video
798 // with a heterogeneous background.
799 // For BACKDROPPER effect, this path is also executed sometimes at
800 // the end of a normal recording session. In such a case, the graph
801 // does not restart and hence the learner does not re-learn. So we
802 // do not want to show the learning dialogue then.
803 if (runnerDoneResult == (Integer) getConstant(
804 sClsGraphRunner, "RESULT_ERROR")
805 && mCurrentEffect == EFFECT_BACKDROPPER) {
806 sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
808 invoke(mRunner, sGraphRunnerRun);
809 } else if (mState != STATE_RELEASED) {
810 // Shutting down effects
811 if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
812 tryEnable3ALocks(false);
813 sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
815 // STATE_RELEASED - camera will be/has been released as well, do nothing.
820 private void invokeOnSurfaceTextureSourceReady(Object[] args) {
821 SurfaceTexture source = (SurfaceTexture) args[0];
822 if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
823 synchronized (EffectsRecorder.this) {
824 mTextureSource = source;
826 if (mState == STATE_CONFIGURE) {
827 // Stop preview happened while the runner was doing startup tasks
828 // Since we haven't started anything up, don't do anything
829 // Rest of cleanup will happen in onRunnerDone
830 if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
833 if (mState == STATE_RELEASED) {
834 // EffectsRecorder has been released, so don't touch the camera device
836 if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
839 if (source == null) {
841 Log.v(TAG, "Ready callback: source null! Looks like graph was closed!");
843 if (mState == STATE_PREVIEW ||
844 mState == STATE_STARTING_PREVIEW ||
845 mState == STATE_RECORD) {
846 // A null source here means the graph is shutting down
847 // unexpectedly, so we need to turn off preview before
848 // the surface texture goes away.
850 Log.v(TAG, "Ready callback: State: " + mState
851 + ". stopCameraPreview");
859 // Lock AE/AWB to reduce transition flicker
860 tryEnable3ALocks(true);
862 mCameraDevice.stopPreview();
863 if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
864 mCameraDevice.setPreviewTextureAsync(mTextureSource);
866 mCameraDevice.startPreviewAsync();
868 // Unlock AE/AWB after preview started
869 tryEnable3ALocks(false);
871 mState = STATE_PREVIEW;
873 if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
875 // Sending a message to listener that preview is complete
876 sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
880 private void invokeOnRecordingDone() {
881 // Forward the callback to the VideoModule object (as an asynchronous event).
882 if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
883 sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
886 public synchronized void startRecording() {
887 if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
891 throw new RuntimeException("Already recording, cannot begin anew!");
893 throw new RuntimeException(
894 "startRecording called on an already released recorder!");
899 if ((mOutputFile == null) && (mFd == null)) {
900 throw new RuntimeException("No output file name or descriptor provided!");
903 if (mState == STATE_CONFIGURE) {
907 Object recorder = getGraphFilter(mRunner, "recorder");
909 setInputValue(recorder, "outputFileDescriptor", mFd);
911 setInputValue(recorder, "outputFile", mOutputFile);
913 // It is ok to set the audiosource without checking for timelapse here
914 // since that check will be done in the MediaEncoderFilter itself
915 setInputValue(recorder, "audioSource", MediaRecorder.AudioSource.CAMCORDER);
916 setInputValue(recorder, "recordingProfile", mProfile);
917 setInputValue(recorder, "orientationHint", mOrientationHint);
918 // Important to set the timelapseinterval to 0 if the capture rate is not >0
919 // since the recorder does not get created every time the recording starts.
920 // The recorder infers whether the capture is timelapsed based on the value of
922 boolean captureTimeLapse = mCaptureRate > 0;
923 if (captureTimeLapse) {
924 double timeBetweenFrameCapture = 1 / mCaptureRate;
925 setInputValue(recorder, "timelapseRecordingIntervalUs",
926 (long) (1000000 * timeBetweenFrameCapture));
929 setInputValue(recorder, "timelapseRecordingIntervalUs", 0L);
932 if (mInfoListener != null) {
933 setInputValue(recorder, "infoListener", mInfoListener);
935 if (mErrorListener != null) {
936 setInputValue(recorder, "errorListener", mErrorListener);
938 setInputValue(recorder, "maxFileSize", mMaxFileSize);
939 setInputValue(recorder, "maxDurationMs", mMaxDurationMs);
940 setInputValue(recorder, "recording", true);
941 mSoundPlayer.play(SoundClips.START_VIDEO_RECORDING);
942 mState = STATE_RECORD;
945 public synchronized void stopRecording() {
946 if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
949 case STATE_CONFIGURE:
950 case STATE_STARTING_PREVIEW:
952 Log.w(TAG, "StopRecording called when recording not active!");
955 throw new RuntimeException("stopRecording called on released EffectsRecorder!");
959 Object recorder = getGraphFilter(mRunner, "recorder");
960 setInputValue(recorder, "recording", false);
961 mSoundPlayer.play(SoundClips.STOP_VIDEO_RECORDING);
962 mState = STATE_PREVIEW;
965 // Called to tell the filter graph that the display surfacetexture is not valid anymore.
966 // So the filter graph should not hold any reference to the surface created with that.
967 public synchronized void disconnectDisplay() {
968 if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " +
970 Object display = getGraphFilter(mRunner, "display");
971 invoke(display, sSurfaceTextureTargetDisconnect, new Object[] {
972 invoke(mGraphEnv, sGraphEnvironmentGetContext)});
975 // The VideoModule will call this to notify that the camera is being
976 // released to the outside world. This call should happen after the
977 // stopRecording call. Else, the effects may throw an exception.
978 // With the recording stopped, the stopPreview call will not try to
979 // release the camera again.
980 // This must be called in onPause() if the effects are ON.
981 public synchronized void disconnectCamera() {
982 if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera");
984 mCameraDevice = null;
987 // In a normal case, when the disconnect is not called, we should not
988 // set the camera device to null, since on return callback, we try to
989 // enable 3A locks, which need the cameradevice.
990 public synchronized void stopCameraPreview() {
991 if (mLogVerbose) Log.v(TAG, "Stopping camera preview.");
992 if (mCameraDevice == null) {
993 Log.d(TAG, "Camera already null. Nothing to disconnect");
996 mCameraDevice.stopPreview();
997 mCameraDevice.setPreviewTextureAsync(null);
1000 // Stop and release effect resources
1001 public synchronized void stopPreview() {
1002 if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
1004 case STATE_CONFIGURE:
1005 Log.w(TAG, "StopPreview called when preview not active!");
1007 case STATE_RELEASED:
1008 throw new RuntimeException("stopPreview called on released EffectsRecorder!");
1013 if (mState == STATE_RECORD) {
1017 mCurrentEffect = EFFECT_NONE;
1019 // This will not do anything if the camera has already been disconnected.
1020 stopCameraPreview();
1022 mState = STATE_CONFIGURE;
1023 mOldRunner = mRunner;
1024 invoke(mRunner, sGraphRunnerStop);
1026 // Rest of stop and release handled in mRunnerDoneCallback
1029 // Try to enable/disable video stabilization if supported; otherwise return false
1030 // It is called from a synchronized block.
1031 boolean tryEnableVideoStabilization(boolean toggle) {
1032 if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization.");
1033 if (mCameraDevice == null) {
1034 Log.d(TAG, "Camera already null. Not enabling video stabilization.");
1037 Camera.Parameters params = mCameraDevice.getParameters();
1039 String vstabSupported = params.get("video-stabilization-supported");
1040 if ("true".equals(vstabSupported)) {
1041 if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
1042 params.set("video-stabilization", toggle ? "true" : "false");
1043 mCameraDevice.setParameters(params);
1046 if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
1050 // Try to enable/disable 3A locks if supported; otherwise return false
1051 @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
1052 synchronized boolean tryEnable3ALocks(boolean toggle) {
1053 if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks");
1054 if (mCameraDevice == null) {
1055 Log.d(TAG, "Camera already null. Not tryenabling 3A locks.");
1058 Camera.Parameters params = mCameraDevice.getParameters();
1059 if (Util.isAutoExposureLockSupported(params) &&
1060 Util.isAutoWhiteBalanceLockSupported(params)) {
1061 params.setAutoExposureLock(toggle);
1062 params.setAutoWhiteBalanceLock(toggle);
1063 mCameraDevice.setParameters(params);
1069 // Try to enable/disable 3A locks if supported; otherwise, throw error
1070 // Use this when locks are essential to success
1071 synchronized void enable3ALocks(boolean toggle) {
1072 if (mLogVerbose) Log.v(TAG, "Enable3ALocks");
1073 if (mCameraDevice == null) {
1074 Log.d(TAG, "Camera already null. Not enabling 3A locks.");
1077 Camera.Parameters params = mCameraDevice.getParameters();
1078 if (!tryEnable3ALocks(toggle)) {
1079 throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
1083 static class SerializableInvocationHandler
1084 implements InvocationHandler, Serializable {
1085 private final int mEffectsRecorderIndex;
1086 public SerializableInvocationHandler(int index) {
1087 mEffectsRecorderIndex = index;
1091 public Object invoke(Object proxy, Method method, Object[] args)
1093 if (sEffectsRecorder == null) return null;
1094 if (mEffectsRecorderIndex != sEffectsRecorderIndex) {
1095 Log.v(TAG, "Ignore old callback " + mEffectsRecorderIndex);
1098 if (method.equals(sObjectEquals)) {
1099 return sEffectsRecorder.invokeObjectEquals(proxy, args);
1100 } else if (method.equals(sObjectToString)) {
1101 return sEffectsRecorder.invokeObjectToString();
1102 } else if (method.equals(sLearningDoneListenerOnLearningDone)) {
1103 sEffectsRecorder.invokeOnLearningDone();
1104 } else if (method.equals(sOnRunnerDoneListenerOnRunnerDone)) {
1105 sEffectsRecorder.invokeOnRunnerDone(args);
1106 } else if (method.equals(
1107 sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady)) {
1108 sEffectsRecorder.invokeOnSurfaceTextureSourceReady(args);
1109 } else if (method.equals(sOnRecordingDoneListenerOnRecordingDone)) {
1110 sEffectsRecorder.invokeOnRecordingDone();
1116 // Indicates that all camera/recording activity needs to halt
1117 public synchronized void release() {
1118 if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
1122 case STATE_STARTING_PREVIEW:
1127 if (mSoundPlayer != null) {
1128 mSoundPlayer.release();
1129 mSoundPlayer = null;
1131 mState = STATE_RELEASED;
1134 sEffectsRecorder = null;
1137 private void sendMessage(final int effect, final int msg) {
1138 if (mEffectsListener != null) {
1139 mHandler.post(new Runnable() {
1142 mEffectsListener.onEffectsUpdate(effect, msg);
1148 private void raiseError(final Exception exception) {
1149 if (mEffectsListener != null) {
1150 mHandler.post(new Runnable() {
1154 mEffectsListener.onEffectsError(exception, null);
1156 mEffectsListener.onEffectsError(exception, mOutputFile);
1163 // invoke method on receiver with no arguments
1164 private Object invoke(Object receiver, Method method) {
1166 return method.invoke(receiver);
1167 } catch (Exception ex) {
1168 throw new RuntimeException(ex);
1172 // invoke method on receiver with arguments
1173 private Object invoke(Object receiver, Method method, Object[] args) {
1175 return method.invoke(receiver, args);
1176 } catch (Exception ex) {
1177 throw new RuntimeException(ex);
1181 private void setInputValue(Object receiver, String key, Object value) {
1183 sFilterSetInputValue.invoke(receiver, new Object[] {key, value});
1184 } catch (Exception ex) {
1185 throw new RuntimeException(ex);
1189 private Object newInstance(Constructor<?> ct, Object[] initArgs) {
1191 return ct.newInstance(initArgs);
1192 } catch (Exception ex) {
1193 throw new RuntimeException(ex);
1197 private Object newInstance(Constructor<?> ct) {
1199 return ct.newInstance();
1200 } catch (Exception ex) {
1201 throw new RuntimeException(ex);
1205 private Object getGraphFilter(Object receiver, String name) {
1207 return sFilterGraphGetFilter.invoke(sGraphRunnerGetGraph
1208 .invoke(receiver), new Object[] {name});
1209 } catch (Exception ex) {
1210 throw new RuntimeException(ex);
1214 private Object getContextGLEnvironment(Object receiver) {
1216 return sFilterContextGetGLEnvironment
1217 .invoke(sGraphEnvironmentGetContext.invoke(receiver));
1218 } catch (Exception ex) {
1219 throw new RuntimeException(ex);
1223 private void getGraphTearDown(Object receiver, Object filterContext) {
1225 sFilterGraphTearDown.invoke(sGraphRunnerGetGraph.invoke(receiver),
1226 new Object[]{filterContext});
1227 } catch (Exception ex) {
1228 throw new RuntimeException(ex);
1232 private Object getConstant(Class<?> cls, String name) {
1234 return cls.getDeclaredField(name).get(null);
1235 } catch (Exception ex) {
1236 throw new RuntimeException(ex);