2 * Copyright (C) 2014 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5 * in compliance with the License. You may obtain a copy of the License at
7 * http://www.apache.org/licenses/LICENSE-2.0
9 * Unless required by applicable law or agreed to in writing, software distributed under the License
10 * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11 * or implied. See the License for the specific language governing permissions and limitations under
15 package com.android.camera.one.v2;
17 import android.annotation.TargetApi;
18 import android.content.Context;
19 import android.graphics.ImageFormat;
20 import android.graphics.Rect;
21 import android.hardware.camera2.CameraAccessException;
22 import android.hardware.camera2.CameraCaptureSession;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.CaptureResult;
28 import android.hardware.camera2.CaptureResult.Key;
29 import android.hardware.camera2.TotalCaptureResult;
30 import android.hardware.camera2.params.MeteringRectangle;
31 import android.hardware.camera2.params.StreamConfigurationMap;
32 import android.location.Location;
33 import android.media.CameraProfile;
34 import android.media.Image;
35 import android.media.ImageReader;
36 import android.media.MediaActionSound;
37 import android.net.Uri;
38 import android.os.Build;
39 import android.os.Handler;
40 import android.os.HandlerThread;
41 import android.os.SystemClock;
42 import androidx.core.util.Pools;
43 import android.view.Surface;
45 import com.android.camera.CaptureModuleUtil;
46 import com.android.camera.debug.Log;
47 import com.android.camera.debug.Log.Tag;
48 import com.android.camera.exif.ExifInterface;
49 import com.android.camera.exif.ExifTag;
50 import com.android.camera.exif.Rational;
51 import com.android.camera.one.AbstractOneCamera;
52 import com.android.camera.one.CameraDirectionProvider;
53 import com.android.camera.one.OneCamera;
54 import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
55 import com.android.camera.one.Settings3A;
56 import com.android.camera.one.v2.ImageCaptureManager.ImageCaptureListener;
57 import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
58 import com.android.camera.one.v2.camera2proxy.AndroidCaptureResultProxy;
59 import com.android.camera.one.v2.camera2proxy.AndroidImageProxy;
60 import com.android.camera.one.v2.camera2proxy.CaptureResultProxy;
61 import com.android.camera.processing.imagebackend.TaskImageContainer;
62 import com.android.camera.session.CaptureSession;
63 import com.android.camera.ui.focus.LensRangeCalculator;
64 import com.android.camera.ui.motion.LinearScale;
65 import com.android.camera.util.CameraUtil;
66 import com.android.camera.util.ExifUtil;
67 import com.android.camera.util.JpegUtilNative;
68 import com.android.camera.util.ListenerCombiner;
69 import com.android.camera.util.Size;
70 import com.google.common.base.Optional;
71 import com.google.common.util.concurrent.FutureCallback;
72 import com.google.common.util.concurrent.Futures;
73 import com.google.common.util.concurrent.ListenableFuture;
75 import java.nio.ByteBuffer;
76 import java.security.InvalidParameterException;
77 import java.util.ArrayList;
78 import java.util.Collections;
79 import java.util.HashSet;
80 import java.util.List;
82 import java.util.concurrent.LinkedBlockingQueue;
83 import java.util.concurrent.ThreadPoolExecutor;
84 import java.util.concurrent.TimeUnit;
87 * {@link OneCamera} implementation directly on top of the Camera2 API with zero
89 * TODO: Determine what the maximum number of full YUV capture frames is.
91 @TargetApi(Build.VERSION_CODES.LOLLIPOP)
93 public class OneCameraZslImpl extends AbstractOneCamera {
94 private static final Tag TAG = new Tag("OneCameraZslImpl2");
96 /** Default JPEG encoding quality. */
97 private static final int JPEG_QUALITY =
98 CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
100 * The maximum number of images to store in the full-size ZSL ring buffer.
102 * TODO: Determine this number dynamically based on available memory and the
105 private static final int MAX_CAPTURE_IMAGES = 12;
107 * True if zero-shutter-lag images should be captured. Some devices produce
108 * lower-quality images for the high-frequency stream, so we may wish to
109 * disable ZSL in that case.
111 private static final boolean ZSL_ENABLED = true;
114 * Tags which may be used in CaptureRequests.
116 private static enum RequestTag {
118 * Indicates that the request was explicitly sent for a single
119 * high-quality still capture. Unlike other requests, such as the
120 * repeating (ZSL) stream and AF/AE triggers, requests with this tag
121 * should always be saved.
127 * Set to ImageFormat.JPEG to use the hardware encoder, or
128 * ImageFormat.YUV_420_888 to use the software encoder. No other image
129 * formats are supported.
131 private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
133 * Token for callbacks posted to {@link #mCameraHandler} to resume
136 private static final String FOCUS_RESUME_CALLBACK_TOKEN = "RESUME_CONTINUOUS_AF";
138 /** Zero weight 3A region, to reset regions per API. */
139 /*package*/ MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
142 * Thread on which high-priority camera operations, such as grabbing preview
143 * frames for the viewfinder, are running.
145 private final HandlerThread mCameraThread;
146 /** Handler of the {@link #mCameraThread}. */
147 private final Handler mCameraHandler;
149 /** Thread on which low-priority camera listeners are running. */
150 private final HandlerThread mCameraListenerThread;
151 private final Handler mCameraListenerHandler;
153 /** The characteristics of this camera. */
154 private final CameraCharacteristics mCharacteristics;
155 /** Converts focus distance units into ratio values */
156 private final LinearScale mLensRange;
157 /** The underlying Camera2 API camera device. */
158 private final CameraDevice mDevice;
159 private final CameraDirectionProvider mDirection;
162 * The aspect ratio (width/height) of the full resolution for this camera.
163 * Usually the native aspect ratio of this camera.
165 private final float mFullSizeAspectRatio;
166 /** The Camera2 API capture session currently active. */
167 private CameraCaptureSession mCaptureSession;
168 /** The surface onto which to render the preview. */
169 private Surface mPreviewSurface;
170 /** Whether closing of this device has been requested. */
171 private volatile boolean mIsClosed = false;
173 /** Receives the normal captured images. */
174 private final ImageReader mCaptureImageReader;
177 * Maintains a buffer of images and their associated {@link CaptureResult}s.
179 private ImageCaptureManager mCaptureManager;
182 * The sensor timestamps (which may not be relative to the system time) of
183 * the most recently captured images.
185 private final Set<Long> mCapturedImageTimestamps = Collections.synchronizedSet(
186 new HashSet<Long>());
188 /** Thread pool for performing slow jpeg encoding and saving tasks. */
189 private final ThreadPoolExecutor mImageSaverThreadPool;
191 /** Pool of native byte buffers on which to store jpeg-encoded images. */
192 private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
193 new Pools.SynchronizedPool<ByteBuffer>(64);
195 /** Current zoom value. 1.0 is no zoom. */
196 private float mZoomValue = 1f;
197 /** Current crop region: set from mZoomValue. */
198 private Rect mCropRegion;
199 /** Current AE, AF, and AWB regions */
200 private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
201 private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
203 private MediaActionSound mMediaActionSound = new MediaActionSound();
206 * Ready state (typically displayed by the UI shutter-button) depends on two
209 * <li>{@link #mCaptureManager} must be ready.</li>
210 * <li>We must not be in the process of capturing a single, high-quality,
213 * See {@link ListenerCombiner} and {@link #mReadyStateManager} for
214 * details of how this is managed.
216 private static enum ReadyStateRequirement {
217 CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
221 * Handles the thread-safe logic of dispatching whenever the logical AND of
222 * these constraints changes.
224 private final ListenerCombiner<ReadyStateRequirement>
225 mReadyStateManager = new ListenerCombiner<ReadyStateRequirement>(
226 ReadyStateRequirement.class, new ListenerCombiner.StateChangeListener() {
228 public void onStateChange(boolean state) {
229 broadcastReadyState(state);
234 * An {@link ImageCaptureListener} which will compress and save an image to
237 private class ImageCaptureTask implements ImageCaptureListener {
238 private final PhotoCaptureParameters mParams;
239 private final CaptureSession mSession;
241 public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
242 mParams = parameters;
247 public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
248 long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
250 // We should only capture the image if it hasn't been captured
251 // before. Synchronization is necessary since
252 // mCapturedImageTimestamps is read & modified elsewhere.
253 synchronized (mCapturedImageTimestamps) {
254 if (!mCapturedImageTimestamps.contains(timestamp)) {
255 mCapturedImageTimestamps.add(timestamp);
257 // There was a more recent (or identical) image which has
258 // begun being saved, so abort.
262 // Clear out old timestamps from the set.
263 // We must keep old timestamps in the set a little longer (a
264 // factor of 2 seems adequate) to ensure they are cleared out of
265 // the ring buffer before their timestamp is removed from the
267 long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
268 if (mCapturedImageTimestamps.size() > maxTimestamps) {
269 ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
270 Collections.sort(timestamps);
271 for (int i = 0; i < timestamps.size()
272 && mCapturedImageTimestamps.size() > maxTimestamps; i++) {
273 mCapturedImageTimestamps.remove(timestamps.get(i));
278 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
280 savePicture(image, mParams, mSession, captureResult);
281 mParams.callback.onPictureTaken(mSession);
282 Log.v(TAG, "Image saved. Frame number = " + captureResult.getFrameNumber());
287 * Instantiates a new camera based on Camera 2 API.
289 * @param device The underlying Camera 2 device.
290 * @param characteristics The device's characteristics.
291 * @param pictureSize the size of the final image to be taken.
293 OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
294 Log.v(TAG, "Creating new OneCameraZslImpl");
297 mCharacteristics = characteristics;
298 mLensRange = LensRangeCalculator
299 .getDiopterToRatioCalculator(characteristics);
300 mDirection = new CameraDirectionProvider(mCharacteristics);
301 mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
303 mCameraThread = new HandlerThread("OneCamera2");
304 // If this thread stalls, it will delay viewfinder frames.
305 mCameraThread.setPriority(Thread.MAX_PRIORITY);
306 mCameraThread.start();
307 mCameraHandler = new Handler(mCameraThread.getLooper());
309 mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
310 mCameraListenerThread.start();
311 mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());
313 // TODO: Encoding on multiple cores results in preview jank due to
315 int numEncodingCores = CameraUtil.getNumCpuCores();
316 mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
317 TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
320 new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
321 mImageSaverThreadPool);
322 mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
324 public void onReadyStateChange(boolean capturePossible) {
325 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY,
330 // Listen for changes to auto focus state and dispatch to
331 // mFocusStateListener.
332 mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
333 new ImageCaptureManager.MetadataChangeListener() {
335 public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
336 CaptureResult result) {
337 FocusStateListener listener = mFocusStateListener;
338 if (listener != null) {
339 listener.onFocusStatusUpdate(
340 AutoFocusHelper.stateFromCamera2State(
341 result.get(CaptureResult.CONTROL_AF_STATE)),
342 result.getFrameNumber());
347 // Allocate the image reader to store all images received from the
349 if (pictureSize == null) {
350 // TODO The default should be selected by the caller, and
351 // pictureSize should never be null.
352 pictureSize = getDefaultPictureSize();
354 mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
355 pictureSize.getHeight(),
356 sCaptureImageFormat, MAX_CAPTURE_IMAGES);
358 mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
359 mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
363 public void setFocusDistanceListener(FocusDistanceListener focusDistanceListener) {
364 if(mFocusDistanceListener == null) {
365 mCaptureManager.addMetadataChangeListener(CaptureResult.LENS_FOCUS_DISTANCE,
366 new ImageCaptureManager.MetadataChangeListener() {
368 public void onImageMetadataChange(Key<?> key, Object oldValue,
370 CaptureResult result) {
371 Integer state = result.get(CaptureResult.LENS_STATE);
373 // Forward changes if we have a new value and the camera
374 // A) Doesn't support lens state or B) lens state is
375 // reported and it is reported as moving.
376 if (newValue != null &&
377 (state == null || state == CameraMetadata.LENS_STATE_MOVING)) {
378 mFocusDistanceListener.onFocusDistance((float) newValue, mLensRange);
383 mFocusDistanceListener = focusDistanceListener;
387 * @return The largest supported picture size.
389 public Size getDefaultPictureSize() {
390 StreamConfigurationMap configs =
391 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
392 android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
394 // Find the largest supported size.
395 android.util.Size largestSupportedSize = supportedSizes[0];
396 long largestSupportedSizePixels =
397 largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
398 for (int i = 0; i < supportedSizes.length; i++) {
399 long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
400 if (numPixels > largestSupportedSizePixels) {
401 largestSupportedSize = supportedSizes[i];
402 largestSupportedSizePixels = numPixels;
406 return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
409 private void onShutterInvokeUI(final PhotoCaptureParameters params) {
410 // Tell CaptureModule shutter has occurred so it can flash the screen.
411 params.callback.onQuickExpose();
412 // Play shutter click sound.
413 mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
420 public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
421 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
423 boolean useZSL = ZSL_ENABLED;
425 // We will only capture images from the zsl ring-buffer which satisfy
427 ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
428 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
429 zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
431 public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
432 Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
433 Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
434 Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
435 Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
436 Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
437 Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
438 Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
440 if (lensState == null) {
441 lensState = CaptureResult.LENS_STATE_STATIONARY;
443 if (flashState == null) {
444 flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
446 if (flashMode == null) {
447 flashMode = CaptureResult.FLASH_MODE_OFF;
449 if (aeState == null) {
450 aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
452 if (afState == null) {
453 afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
455 if (awbState == null) {
456 awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
459 synchronized (mCapturedImageTimestamps) {
460 if (mCapturedImageTimestamps.contains(timestamp)) {
461 // Don't save frames which we've already saved.
466 if (lensState == CaptureResult.LENS_STATE_MOVING) {
467 // If we know the lens was moving, don't use this image.
471 if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
472 || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
476 if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
477 || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
481 if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
488 // This constraint lets us capture images which have been explicitly
489 // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
490 ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
491 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
492 singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
494 public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
495 Object tag = captureResult.getRequest().getTag();
496 return tag == RequestTag.EXPLICIT_CAPTURE;
500 // If we can use ZSL, try to save a previously-captured frame, if an
501 // acceptable one exists in the buffer.
503 boolean capturedPreviousFrame = mCaptureManager.tryCaptureExistingImage(
504 new ImageCaptureTask(params, session), zslConstraints);
505 if (capturedPreviousFrame) {
506 Log.v(TAG, "Saving previous frame");
507 onShutterInvokeUI(params);
509 Log.v(TAG, "No good image Available. Capturing next available good image.");
510 // If there was no good frame available in the ring buffer
511 // already, capture the next good image.
512 // TODO Disable the shutter button until this image is captured.
514 Flash flashMode = Flash.OFF;
516 if (flashMode == Flash.ON || flashMode == Flash.AUTO) {
517 // We must issue a request for a single capture using the
518 // flash, including an AE precapture trigger.
520 // The following sets up a sequence of events which will
521 // occur in reverse order to the associated method
523 // 1. Send a request to trigger the Auto Exposure Precapture
524 // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
525 // and then send a request for a single image, with the
526 // appropriate flash settings.
527 // 3. Capture the next appropriate image, which should be
528 // the one we requested in (2).
530 mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
531 singleCaptureConstraint);
533 mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
534 new MetadataChangeListener() {
536 public void onImageMetadataChange(Key<?> key, Object oldValue,
538 CaptureResult result) {
539 Log.v(TAG, "AE State Changed");
540 if (oldValue.equals(Integer.valueOf(
541 CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
542 mCaptureManager.removeMetadataChangeListener(key, this);
543 sendSingleRequest(params);
544 // TODO: Delay this until
545 // onCaptureStarted().
546 onShutterInvokeUI(params);
551 sendAutoExposureTriggerRequest(flashMode);
553 // We may get here if, for example, the auto focus is in the
555 // If the flash is off, we should just wait for the next
556 // image that arrives. This will have minimal delay since we
557 // do not need to send a new capture request.
558 mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
563 // TODO If we can't save a previous frame, create a new capture
564 // request to do what we need (e.g. flash) and call
565 // captureNextImage().
566 throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
571 public void startPreview(Surface previewSurface, CaptureReadyCallback listener) {
572 mPreviewSurface = previewSurface;
573 setupAsync(mPreviewSurface, listener);
577 public void close() {
579 Log.w(TAG, "Camera is already closed.");
583 mCaptureSession.stopRepeating();
584 } catch (CameraAccessException e) {
585 Log.e(TAG, "Could not abort captures in progress.");
588 mCameraThread.quitSafely();
590 mCaptureManager.close();
591 mCaptureImageReader.close();
594 public Size[] getSupportedPreviewSizes() {
595 StreamConfigurationMap config =
596 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
597 return Size.convert(config.getOutputSizes(sCaptureImageFormat));
600 public float getFullSizeAspectRatio() {
601 return mFullSizeAspectRatio;
605 public Facing getDirection() {
606 return mDirection.getDirection();
610 private void savePicture(Image image, final PhotoCaptureParameters captureParams,
611 CaptureSession session, CaptureResult result) {
612 int heading = captureParams.heading;
613 int degrees = CameraUtil.getJpegRotation(captureParams.orientation, mCharacteristics);
615 ExifInterface exif = new ExifInterface();
616 // TODO: Add more exif tags here.
618 Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
621 exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
622 exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
625 exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
627 // Set GPS heading direction based on sensor, if location is on.
629 ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
630 ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
631 ExifTag directionTag =
632 exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
633 exif.setTag(directionRefTag);
634 exif.setTag(directionTag);
636 new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage>absent(),
637 Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)),
638 Optional.<Location>absent());
639 ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(
640 acquireJpegBytes(image, degrees),
641 size.getWidth(), size.getHeight(), 0, exif);
642 Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() {
644 public void onSuccess(Optional<Uri> uriOptional) {
645 captureParams.callback.onPictureSaved(uriOptional.orNull());
649 public void onFailure(Throwable throwable) {
650 captureParams.callback.onPictureSaved(null);
656 * Asynchronously sets up the capture session.
658 * @param previewSurface the surface onto which the preview should be
660 * @param listener called when setup is completed.
662 private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) {
663 mCameraHandler.post(new Runnable() {
666 setup(previewSurface, listener);
672 * Configures and attempts to create a capture session.
674 * @param previewSurface the surface onto which the preview should be
676 * @param listener called when the setup is completed.
678 private void setup(Surface previewSurface, final CaptureReadyCallback listener) {
680 if (mCaptureSession != null) {
681 mCaptureSession.abortCaptures();
682 mCaptureSession = null;
684 List<Surface> outputSurfaces = new ArrayList<Surface>(2);
685 outputSurfaces.add(previewSurface);
686 outputSurfaces.add(mCaptureImageReader.getSurface());
688 mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
690 public void onConfigureFailed(CameraCaptureSession session) {
691 listener.onSetupFailed();
695 public void onConfigured(CameraCaptureSession session) {
696 mCaptureSession = session;
697 mAFRegions = ZERO_WEIGHT_3A_REGION;
698 mAERegions = ZERO_WEIGHT_3A_REGION;
700 mCropRegion = cropRegionForZoom(mZoomValue);
701 boolean success = sendRepeatingCaptureRequest();
703 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS,
705 mReadyStateManager.notifyListeners();
706 listener.onReadyForCapture();
708 listener.onSetupFailed();
713 public void onClosed(CameraCaptureSession session) {
714 super.onClosed(session);
717 } catch (CameraAccessException ex) {
718 Log.e(TAG, "Could not set up capture session", ex);
719 listener.onSetupFailed();
723 private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) {
724 builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
725 builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
726 builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion);
729 private void addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode) {
732 builder.set(CaptureRequest.CONTROL_AE_MODE,
733 CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
734 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
737 builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
738 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
741 builder.set(CaptureRequest.CONTROL_AE_MODE,
742 CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
748 * Request a stream of images.
750 * @return true if successful, false if there was an error submitting the
753 private boolean sendRepeatingCaptureRequest() {
754 Log.v(TAG, "sendRepeatingCaptureRequest()");
756 CaptureRequest.Builder builder;
758 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
760 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
763 builder.addTarget(mPreviewSurface);
766 builder.addTarget(mCaptureImageReader.getSurface());
769 builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
771 builder.set(CaptureRequest.CONTROL_AF_MODE,
772 CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
773 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
775 builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
776 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
778 addRegionsToCaptureRequestBuilder(builder);
780 mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
782 } catch (CameraAccessException e) {
784 Log.v(TAG, "Could not execute zero-shutter-lag repeating request.", e);
786 Log.v(TAG, "Could not execute preview request.", e);
793 * Request a single image.
795 * @return true if successful, false if there was an error submitting the
798 private boolean sendSingleRequest(OneCamera.PhotoCaptureParameters params) {
799 Log.v(TAG, "sendSingleRequest()");
801 CaptureRequest.Builder builder;
802 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
804 builder.addTarget(mPreviewSurface);
806 // Always add this surface for single image capture requests.
807 builder.addTarget(mCaptureImageReader.getSurface());
809 builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
811 Flash flashMode = Flash.OFF;
812 addFlashToCaptureRequestBuilder(builder, flashMode);
813 addRegionsToCaptureRequestBuilder(builder);
815 builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
816 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
818 // Tag this as a special request which should be saved.
819 builder.setTag(RequestTag.EXPLICIT_CAPTURE);
821 if (sCaptureImageFormat == ImageFormat.JPEG) {
822 builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
823 builder.set(CaptureRequest.JPEG_ORIENTATION,
824 CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
827 mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
829 } catch (CameraAccessException e) {
830 Log.v(TAG, "Could not execute single still capture request.", e);
835 private boolean sendRepeatingBurstCaptureRequest() {
836 Log.v(TAG, "sendRepeatingBurstCaptureRequest()");
838 CaptureRequest.Builder builder;
839 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
840 builder.addTarget(mPreviewSurface);
843 builder.addTarget(mCaptureImageReader.getSurface());
846 builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
847 builder.set(CaptureRequest.CONTROL_AF_MODE,
848 CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
849 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
851 builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
852 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
854 addRegionsToCaptureRequestBuilder(builder);
856 mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
858 } catch (CameraAccessException e) {
859 Log.v(TAG, "Could not send repeating burst capture request.", e);
864 private boolean sendAutoExposureTriggerRequest(Flash flashMode) {
865 Log.v(TAG, "sendAutoExposureTriggerRequest()");
867 CaptureRequest.Builder builder;
869 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
871 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
874 builder.addTarget(mPreviewSurface);
877 builder.addTarget(mCaptureImageReader.getSurface());
880 builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
882 builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
883 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
885 addRegionsToCaptureRequestBuilder(builder);
886 addFlashToCaptureRequestBuilder(builder, flashMode);
888 mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
891 } catch (CameraAccessException e) {
892 Log.v(TAG, "Could not execute auto exposure trigger request.", e);
899 private boolean sendAutoFocusTriggerRequest() {
900 Log.v(TAG, "sendAutoFocusTriggerRequest()");
902 CaptureRequest.Builder builder;
904 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
906 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
909 builder.addTarget(mPreviewSurface);
912 builder.addTarget(mCaptureImageReader.getSurface());
915 builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
917 addRegionsToCaptureRequestBuilder(builder);
919 builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
920 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
922 mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
925 } catch (CameraAccessException e) {
926 Log.v(TAG, "Could not execute auto focus trigger request.", e);
932 * Like {@link #sendRepeatingCaptureRequest()}, but with the focus held
935 * @return true if successful, false if there was an error submitting the
938 private boolean sendAutoFocusHoldRequest() {
939 Log.v(TAG, "sendAutoFocusHoldRequest()");
941 CaptureRequest.Builder builder;
943 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
945 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
948 builder.addTarget(mPreviewSurface);
951 builder.addTarget(mCaptureImageReader.getSurface());
954 builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
956 builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
957 builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
959 addRegionsToCaptureRequestBuilder(builder);
960 // TODO: This should fire the torch, if appropriate.
962 mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
965 } catch (CameraAccessException e) {
966 Log.v(TAG, "Could not execute auto focus hold request.", e);
972 * Calculate the aspect ratio of the full size capture on this device.
974 * @param characteristics the characteristics of the camera device.
975 * @return The aspect ration, in terms of width/height of the full capture
978 private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
979 Rect activeArraySize =
980 characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
981 return ((float) activeArraySize.width()) / activeArraySize.height();
985 * @param originalWidth the width of the original image captured from the
987 * @param originalHeight the height of the original image captured from the
989 * @param orientation the rotation to apply, in degrees.
990 * @return The size of the final rotated image
992 private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
994 if (orientation == 0 || orientation == 180) {
995 return new Size(originalWidth, originalHeight);
996 } else if (orientation == 90 || orientation == 270) {
997 return new Size(originalHeight, originalWidth);
999 throw new InvalidParameterException("Orientation not supported.");
1004 * Given an image reader, extracts the JPEG image bytes and then closes the
1007 * @param img the image from which to extract jpeg bytes or compress to
1009 * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
1010 * only applied to YUV images.
1011 * @return The bytes of the JPEG image. Newly allocated.
1013 private byte[] acquireJpegBytes(Image img, int degrees) {
1016 if (img.getFormat() == ImageFormat.JPEG) {
1017 Image.Plane plane0 = img.getPlanes()[0];
1018 buffer = plane0.getBuffer();
1020 byte[] imageBytes = new byte[buffer.remaining()];
1021 buffer.get(imageBytes);
1024 } else if (img.getFormat() == ImageFormat.YUV_420_888) {
1025 buffer = mJpegByteBufferPool.acquire();
1026 if (buffer == null) {
1027 buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
1030 int numBytes = JpegUtilNative.compressJpegFromYUV420Image(
1031 new AndroidImageProxy(img), buffer, JPEG_QUALITY,
1035 throw new RuntimeException("Error compressing jpeg.");
1038 buffer.limit(numBytes);
1040 byte[] imageBytes = new byte[buffer.remaining()];
1041 buffer.get(imageBytes);
1044 mJpegByteBufferPool.release(buffer);
1048 throw new RuntimeException("Unsupported image format.");
1052 private void startAFCycle() {
1053 // Clean up any existing AF cycle's pending callbacks.
1054 mCameraHandler.removeCallbacksAndMessages(FOCUS_RESUME_CALLBACK_TOKEN);
1056 // Send a single CONTROL_AF_TRIGGER_START capture request.
1057 sendAutoFocusTriggerRequest();
1059 // Immediately send a request for a regular preview stream, but with
1060 // CONTROL_AF_MODE_AUTO set so that the focus remains constant after the
1061 // AF cycle completes.
1062 sendAutoFocusHoldRequest();
1064 // Waits Settings3A.getFocusHoldMillis() milliseconds before sending
1065 // a request for a regular preview stream to resume.
1066 mCameraHandler.postAtTime(new Runnable() {
1069 mAERegions = ZERO_WEIGHT_3A_REGION;
1070 mAFRegions = ZERO_WEIGHT_3A_REGION;
1071 sendRepeatingCaptureRequest();
1073 }, FOCUS_RESUME_CALLBACK_TOKEN,
1074 SystemClock.uptimeMillis() + Settings3A.getFocusHoldMillis());
1078 * @see com.android.camera.one.OneCamera#triggerFocusAndMeterAtPoint(float,
1082 public void triggerFocusAndMeterAtPoint(float nx, float ny) {
1083 int sensorOrientation = mCharacteristics.get(
1084 CameraCharacteristics.SENSOR_ORIENTATION);
1085 mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1086 mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1092 public Size pickPreviewSize(Size pictureSize, Context context) {
1093 if (pictureSize == null) {
1094 // TODO The default should be selected by the caller, and
1095 // pictureSize should never be null.
1096 pictureSize = getDefaultPictureSize();
1098 float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
1099 return CaptureModuleUtil.getOptimalPreviewSize(getSupportedPreviewSizes(),
1100 pictureAspectRatio);
1104 public float getMaxZoom() {
1105 return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
1109 public void setZoom(float zoom) {
1111 mCropRegion = cropRegionForZoom(zoom);
1112 sendRepeatingCaptureRequest();
1115 private Rect cropRegionForZoom(float zoom) {
1116 return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);