OSDN Git Service

Migrated various apps under packages/apps/Camera2 to androidx
[android-x86/packages-apps-Camera2.git] / src / com / android / camera / one / v2 / OneCameraZslImpl.java
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
5  * in compliance with the License. You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software distributed under the License
10  * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
11  * or implied. See the License for the specific language governing permissions and limitations under
12  * the License.
13  */
14
15 package com.android.camera.one.v2;
16
17 import android.annotation.TargetApi;
18 import android.content.Context;
19 import android.graphics.ImageFormat;
20 import android.graphics.Rect;
21 import android.hardware.camera2.CameraAccessException;
22 import android.hardware.camera2.CameraCaptureSession;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraMetadata;
26 import android.hardware.camera2.CaptureRequest;
27 import android.hardware.camera2.CaptureResult;
28 import android.hardware.camera2.CaptureResult.Key;
29 import android.hardware.camera2.TotalCaptureResult;
30 import android.hardware.camera2.params.MeteringRectangle;
31 import android.hardware.camera2.params.StreamConfigurationMap;
32 import android.location.Location;
33 import android.media.CameraProfile;
34 import android.media.Image;
35 import android.media.ImageReader;
36 import android.media.MediaActionSound;
37 import android.net.Uri;
38 import android.os.Build;
39 import android.os.Handler;
40 import android.os.HandlerThread;
41 import android.os.SystemClock;
42 import androidx.core.util.Pools;
43 import android.view.Surface;
44
45 import com.android.camera.CaptureModuleUtil;
46 import com.android.camera.debug.Log;
47 import com.android.camera.debug.Log.Tag;
48 import com.android.camera.exif.ExifInterface;
49 import com.android.camera.exif.ExifTag;
50 import com.android.camera.exif.Rational;
51 import com.android.camera.one.AbstractOneCamera;
52 import com.android.camera.one.CameraDirectionProvider;
53 import com.android.camera.one.OneCamera;
54 import com.android.camera.one.OneCamera.PhotoCaptureParameters.Flash;
55 import com.android.camera.one.Settings3A;
56 import com.android.camera.one.v2.ImageCaptureManager.ImageCaptureListener;
57 import com.android.camera.one.v2.ImageCaptureManager.MetadataChangeListener;
58 import com.android.camera.one.v2.camera2proxy.AndroidCaptureResultProxy;
59 import com.android.camera.one.v2.camera2proxy.AndroidImageProxy;
60 import com.android.camera.one.v2.camera2proxy.CaptureResultProxy;
61 import com.android.camera.processing.imagebackend.TaskImageContainer;
62 import com.android.camera.session.CaptureSession;
63 import com.android.camera.ui.focus.LensRangeCalculator;
64 import com.android.camera.ui.motion.LinearScale;
65 import com.android.camera.util.CameraUtil;
66 import com.android.camera.util.ExifUtil;
67 import com.android.camera.util.JpegUtilNative;
68 import com.android.camera.util.ListenerCombiner;
69 import com.android.camera.util.Size;
70 import com.google.common.base.Optional;
71 import com.google.common.util.concurrent.FutureCallback;
72 import com.google.common.util.concurrent.Futures;
73 import com.google.common.util.concurrent.ListenableFuture;
74
75 import java.nio.ByteBuffer;
76 import java.security.InvalidParameterException;
77 import java.util.ArrayList;
78 import java.util.Collections;
79 import java.util.HashSet;
80 import java.util.List;
81 import java.util.Set;
82 import java.util.concurrent.LinkedBlockingQueue;
83 import java.util.concurrent.ThreadPoolExecutor;
84 import java.util.concurrent.TimeUnit;
85
86 /**
87  * {@link OneCamera} implementation directly on top of the Camera2 API with zero
88  * shutter lag.<br>
89  * TODO: Determine what the maximum number of full YUV capture frames is.
90  */
91 @TargetApi(Build.VERSION_CODES.LOLLIPOP)
92 @Deprecated
93 public class OneCameraZslImpl extends AbstractOneCamera {
94     private static final Tag TAG = new Tag("OneCameraZslImpl2");
95
96     /** Default JPEG encoding quality. */
97     private static final int JPEG_QUALITY =
98             CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
99     /**
100      * The maximum number of images to store in the full-size ZSL ring buffer.
101      * <br>
102      * TODO: Determine this number dynamically based on available memory and the
103      * size of frames.
104      */
105     private static final int MAX_CAPTURE_IMAGES = 12;
106     /**
107      * True if zero-shutter-lag images should be captured. Some devices produce
108      * lower-quality images for the high-frequency stream, so we may wish to
109      * disable ZSL in that case.
110      */
111     private static final boolean ZSL_ENABLED = true;
112
113     /**
114      * Tags which may be used in CaptureRequests.
115      */
116     private static enum RequestTag {
117         /**
118          * Indicates that the request was explicitly sent for a single
119          * high-quality still capture. Unlike other requests, such as the
120          * repeating (ZSL) stream and AF/AE triggers, requests with this tag
121          * should always be saved.
122          */
123         EXPLICIT_CAPTURE
124     }
125
126     /**
127      * Set to ImageFormat.JPEG to use the hardware encoder, or
128      * ImageFormat.YUV_420_888 to use the software encoder. No other image
129      * formats are supported.
130      */
131     private static final int sCaptureImageFormat = ImageFormat.YUV_420_888;
132     /**
133      * Token for callbacks posted to {@link #mCameraHandler} to resume
134      * continuous AF.
135      */
136     private static final String FOCUS_RESUME_CALLBACK_TOKEN = "RESUME_CONTINUOUS_AF";
137
138     /** Zero weight 3A region, to reset regions per API. */
139     /*package*/ MeteringRectangle[] ZERO_WEIGHT_3A_REGION = AutoFocusHelper.getZeroWeightRegion();
140
141     /**
142      * Thread on which high-priority camera operations, such as grabbing preview
143      * frames for the viewfinder, are running.
144      */
145     private final HandlerThread mCameraThread;
146     /** Handler of the {@link #mCameraThread}. */
147     private final Handler mCameraHandler;
148
149     /** Thread on which low-priority camera listeners are running. */
150     private final HandlerThread mCameraListenerThread;
151     private final Handler mCameraListenerHandler;
152
153     /** The characteristics of this camera. */
154     private final CameraCharacteristics mCharacteristics;
155     /** Converts focus distance units into ratio values */
156     private final LinearScale mLensRange;
157     /** The underlying Camera2 API camera device. */
158     private final CameraDevice mDevice;
159     private final CameraDirectionProvider mDirection;
160
161     /**
162      * The aspect ratio (width/height) of the full resolution for this camera.
163      * Usually the native aspect ratio of this camera.
164      */
165     private final float mFullSizeAspectRatio;
166     /** The Camera2 API capture session currently active. */
167     private CameraCaptureSession mCaptureSession;
168     /** The surface onto which to render the preview. */
169     private Surface mPreviewSurface;
170     /** Whether closing of this device has been requested. */
171     private volatile boolean mIsClosed = false;
172
173     /** Receives the normal captured images. */
174     private final ImageReader mCaptureImageReader;
175
176     /**
177      * Maintains a buffer of images and their associated {@link CaptureResult}s.
178      */
179     private ImageCaptureManager mCaptureManager;
180
181     /**
182      * The sensor timestamps (which may not be relative to the system time) of
183      * the most recently captured images.
184      */
185     private final Set<Long> mCapturedImageTimestamps = Collections.synchronizedSet(
186             new HashSet<Long>());
187
188     /** Thread pool for performing slow jpeg encoding and saving tasks. */
189     private final ThreadPoolExecutor mImageSaverThreadPool;
190
191     /** Pool of native byte buffers on which to store jpeg-encoded images. */
192     private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
193             new Pools.SynchronizedPool<ByteBuffer>(64);
194
195     /** Current zoom value. 1.0 is no zoom. */
196     private float mZoomValue = 1f;
197     /** Current crop region: set from mZoomValue. */
198     private Rect mCropRegion;
199     /** Current AE, AF, and AWB regions */
200     private MeteringRectangle[] mAFRegions = ZERO_WEIGHT_3A_REGION;
201     private MeteringRectangle[] mAERegions = ZERO_WEIGHT_3A_REGION;
202
203     private MediaActionSound mMediaActionSound = new MediaActionSound();
204
205     /**
206      * Ready state (typically displayed by the UI shutter-button) depends on two
207      * things:<br>
208      * <ol>
209      * <li>{@link #mCaptureManager} must be ready.</li>
210      * <li>We must not be in the process of capturing a single, high-quality,
211      * image.</li>
212      * </ol>
213      * See {@link ListenerCombiner} and {@link #mReadyStateManager} for
214      * details of how this is managed.
215      */
216     private static enum ReadyStateRequirement {
217         CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
218     }
219
220     /**
221      * Handles the thread-safe logic of dispatching whenever the logical AND of
222      * these constraints changes.
223      */
224     private final ListenerCombiner<ReadyStateRequirement>
225             mReadyStateManager = new ListenerCombiner<ReadyStateRequirement>(
226                     ReadyStateRequirement.class, new ListenerCombiner.StateChangeListener() {
227                             @Override
228                         public void onStateChange(boolean state) {
229                             broadcastReadyState(state);
230                         }
231                     });
232
233     /**
234      * An {@link ImageCaptureListener} which will compress and save an image to
235      * disk.
236      */
237     private class ImageCaptureTask implements ImageCaptureListener {
238         private final PhotoCaptureParameters mParams;
239         private final CaptureSession mSession;
240
241         public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
242             mParams = parameters;
243             mSession = session;
244         }
245
246         @Override
247         public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
248             long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
249
250             // We should only capture the image if it hasn't been captured
251             // before. Synchronization is necessary since
252             // mCapturedImageTimestamps is read & modified elsewhere.
253             synchronized (mCapturedImageTimestamps) {
254                 if (!mCapturedImageTimestamps.contains(timestamp)) {
255                     mCapturedImageTimestamps.add(timestamp);
256                 } else {
257                     // There was a more recent (or identical) image which has
258                     // begun being saved, so abort.
259                     return;
260                 }
261
262                 // Clear out old timestamps from the set.
263                 // We must keep old timestamps in the set a little longer (a
264                 // factor of 2 seems adequate) to ensure they are cleared out of
265                 // the ring buffer before their timestamp is removed from the
266                 // set.
267                 long maxTimestamps = MAX_CAPTURE_IMAGES * 2;
268                 if (mCapturedImageTimestamps.size() > maxTimestamps) {
269                     ArrayList<Long> timestamps = new ArrayList<Long>(mCapturedImageTimestamps);
270                     Collections.sort(timestamps);
271                     for (int i = 0; i < timestamps.size()
272                             && mCapturedImageTimestamps.size() > maxTimestamps; i++) {
273                         mCapturedImageTimestamps.remove(timestamps.get(i));
274                     }
275                 }
276             }
277
278             mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
279
280             savePicture(image, mParams, mSession, captureResult);
281             mParams.callback.onPictureTaken(mSession);
282             Log.v(TAG, "Image saved.  Frame number = " + captureResult.getFrameNumber());
283         }
284     }
285
286     /**
287      * Instantiates a new camera based on Camera 2 API.
288      *
289      * @param device The underlying Camera 2 device.
290      * @param characteristics The device's characteristics.
291      * @param pictureSize the size of the final image to be taken.
292      */
293     OneCameraZslImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
294         Log.v(TAG, "Creating new OneCameraZslImpl");
295
296         mDevice = device;
297         mCharacteristics = characteristics;
298         mLensRange = LensRangeCalculator
299               .getDiopterToRatioCalculator(characteristics);
300         mDirection = new CameraDirectionProvider(mCharacteristics);
301         mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
302
303         mCameraThread = new HandlerThread("OneCamera2");
304         // If this thread stalls, it will delay viewfinder frames.
305         mCameraThread.setPriority(Thread.MAX_PRIORITY);
306         mCameraThread.start();
307         mCameraHandler = new Handler(mCameraThread.getLooper());
308
309         mCameraListenerThread = new HandlerThread("OneCamera2-Listener");
310         mCameraListenerThread.start();
311         mCameraListenerHandler = new Handler(mCameraListenerThread.getLooper());
312
313         // TODO: Encoding on multiple cores results in preview jank due to
314         // excessive GC.
315         int numEncodingCores = CameraUtil.getNumCpuCores();
316         mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
317                 TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
318
319         mCaptureManager =
320                 new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
321                         mImageSaverThreadPool);
322         mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
323                 @Override
324             public void onReadyStateChange(boolean capturePossible) {
325                 mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_MANAGER_READY,
326                         capturePossible);
327             }
328         });
329
330         // Listen for changes to auto focus state and dispatch to
331         // mFocusStateListener.
332         mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AF_STATE,
333                 new ImageCaptureManager.MetadataChangeListener() {
334                 @Override
335                     public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
336                             CaptureResult result) {
337                         FocusStateListener listener = mFocusStateListener;
338                         if (listener != null) {
339                             listener.onFocusStatusUpdate(
340                                     AutoFocusHelper.stateFromCamera2State(
341                                             result.get(CaptureResult.CONTROL_AF_STATE)),
342                                 result.getFrameNumber());
343                         }
344                     }
345                 });
346
347         // Allocate the image reader to store all images received from the
348         // camera.
349         if (pictureSize == null) {
350             // TODO The default should be selected by the caller, and
351             // pictureSize should never be null.
352             pictureSize = getDefaultPictureSize();
353         }
354         mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
355                 pictureSize.getHeight(),
356                 sCaptureImageFormat, MAX_CAPTURE_IMAGES);
357
358         mCaptureImageReader.setOnImageAvailableListener(mCaptureManager, mCameraHandler);
359         mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
360     }
361
362     @Override
363     public void setFocusDistanceListener(FocusDistanceListener focusDistanceListener) {
364         if(mFocusDistanceListener == null) {
365             mCaptureManager.addMetadataChangeListener(CaptureResult.LENS_FOCUS_DISTANCE,
366                   new ImageCaptureManager.MetadataChangeListener() {
367                       @Override
368                       public void onImageMetadataChange(Key<?> key, Object oldValue,
369                             Object newValue,
370                             CaptureResult result) {
371                           Integer state = result.get(CaptureResult.LENS_STATE);
372
373                           // Forward changes if we have a new value and the camera
374                           // A) Doesn't support lens state or B) lens state is
375                           // reported and it is reported as moving.
376                           if (newValue != null &&
377                                 (state == null || state == CameraMetadata.LENS_STATE_MOVING)) {
378                               mFocusDistanceListener.onFocusDistance((float) newValue, mLensRange);
379                           }
380                       }
381                   });
382         }
383         mFocusDistanceListener = focusDistanceListener;
384     }
385
386     /**
387      * @return The largest supported picture size.
388      */
389     public Size getDefaultPictureSize() {
390         StreamConfigurationMap configs =
391                 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
392         android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
393
394         // Find the largest supported size.
395         android.util.Size largestSupportedSize = supportedSizes[0];
396         long largestSupportedSizePixels =
397                 largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
398         for (int i = 0; i < supportedSizes.length; i++) {
399             long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
400             if (numPixels > largestSupportedSizePixels) {
401                 largestSupportedSize = supportedSizes[i];
402                 largestSupportedSizePixels = numPixels;
403             }
404         }
405
406         return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
407     }
408
409     private void onShutterInvokeUI(final PhotoCaptureParameters params) {
410         // Tell CaptureModule shutter has occurred so it can flash the screen.
411         params.callback.onQuickExpose();
412         // Play shutter click sound.
413         mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
414     }
415
416     /**
417      * Take a picture.
418      */
419     @Override
420     public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
421         mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
422
423         boolean useZSL = ZSL_ENABLED;
424
425         // We will only capture images from the zsl ring-buffer which satisfy
426         // this constraint.
427         ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
428                 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
429         zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
430                 @Override
431             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
432                 Long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
433                 Integer lensState = captureResult.get(CaptureResult.LENS_STATE);
434                 Integer flashState = captureResult.get(CaptureResult.FLASH_STATE);
435                 Integer flashMode = captureResult.get(CaptureResult.FLASH_MODE);
436                 Integer aeState = captureResult.get(CaptureResult.CONTROL_AE_STATE);
437                 Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
438                 Integer awbState = captureResult.get(CaptureResult.CONTROL_AWB_STATE);
439
440                 if (lensState == null) {
441                     lensState = CaptureResult.LENS_STATE_STATIONARY;
442                 }
443                 if (flashState == null) {
444                     flashState = CaptureResult.FLASH_STATE_UNAVAILABLE;
445                 }
446                 if (flashMode == null) {
447                     flashMode = CaptureResult.FLASH_MODE_OFF;
448                 }
449                 if (aeState == null) {
450                     aeState = CaptureResult.CONTROL_AE_STATE_INACTIVE;
451                 }
452                 if (afState == null) {
453                     afState = CaptureResult.CONTROL_AF_STATE_INACTIVE;
454                 }
455                 if (awbState == null) {
456                     awbState = CaptureResult.CONTROL_AWB_STATE_INACTIVE;
457                 }
458
459                 synchronized (mCapturedImageTimestamps) {
460                     if (mCapturedImageTimestamps.contains(timestamp)) {
461                         // Don't save frames which we've already saved.
462                         return false;
463                     }
464                 }
465
466                 if (lensState == CaptureResult.LENS_STATE_MOVING) {
467                     // If we know the lens was moving, don't use this image.
468                     return false;
469                 }
470
471                 if (aeState == CaptureResult.CONTROL_AE_STATE_SEARCHING
472                         || aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
473                     return false;
474                 }
475
476                 if (afState == CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN
477                         || afState == CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN) {
478                     return false;
479                 }
480
481                 if (awbState == CaptureResult.CONTROL_AWB_STATE_SEARCHING) {
482                     return false;
483                 }
484
485                 return true;
486             }
487         });
488         // This constraint lets us capture images which have been explicitly
489         // requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
490         ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
491                 new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
492         singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
493                 @Override
494             public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
495                 Object tag = captureResult.getRequest().getTag();
496                 return tag == RequestTag.EXPLICIT_CAPTURE;
497             }
498         });
499
500         // If we can use ZSL, try to save a previously-captured frame, if an
501         // acceptable one exists in the buffer.
502         if (useZSL) {
503             boolean capturedPreviousFrame = mCaptureManager.tryCaptureExistingImage(
504                     new ImageCaptureTask(params, session), zslConstraints);
505             if (capturedPreviousFrame) {
506                 Log.v(TAG, "Saving previous frame");
507                 onShutterInvokeUI(params);
508             } else {
509                 Log.v(TAG, "No good image Available.  Capturing next available good image.");
510                 // If there was no good frame available in the ring buffer
511                 // already, capture the next good image.
512                 // TODO Disable the shutter button until this image is captured.
513
514                 Flash flashMode = Flash.OFF;
515
516                 if (flashMode == Flash.ON || flashMode == Flash.AUTO) {
517                     // We must issue a request for a single capture using the
518                     // flash, including an AE precapture trigger.
519
520                     // The following sets up a sequence of events which will
521                     // occur in reverse order to the associated method
522                     // calls:
523                     // 1. Send a request to trigger the Auto Exposure Precapture
524                     // 2. Wait for the AE_STATE to leave the PRECAPTURE state,
525                     // and then send a request for a single image, with the
526                     // appropriate flash settings.
527                     // 3. Capture the next appropriate image, which should be
528                     // the one we requested in (2).
529
530                     mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
531                             singleCaptureConstraint);
532
533                     mCaptureManager.addMetadataChangeListener(CaptureResult.CONTROL_AE_STATE,
534                             new MetadataChangeListener() {
535                             @Override
536                                 public void onImageMetadataChange(Key<?> key, Object oldValue,
537                                         Object newValue,
538                                         CaptureResult result) {
539                                     Log.v(TAG, "AE State Changed");
540                                     if (oldValue.equals(Integer.valueOf(
541                                             CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
542                                         mCaptureManager.removeMetadataChangeListener(key, this);
543                                         sendSingleRequest(params);
544                                         // TODO: Delay this until
545                                         // onCaptureStarted().
546                                         onShutterInvokeUI(params);
547                                     }
548                                 }
549                             });
550
551                     sendAutoExposureTriggerRequest(flashMode);
552                 } else {
553                     // We may get here if, for example, the auto focus is in the
554                     // middle of a scan.
555                     // If the flash is off, we should just wait for the next
556                     // image that arrives. This will have minimal delay since we
557                     // do not need to send a new capture request.
558                     mCaptureManager.captureNextImage(new ImageCaptureTask(params, session),
559                             zslConstraints);
560                 }
561             }
562         } else {
563             // TODO If we can't save a previous frame, create a new capture
564             // request to do what we need (e.g. flash) and call
565             // captureNextImage().
566             throw new UnsupportedOperationException("Non-ZSL capture not yet supported");
567         }
568     }
569
570     @Override
571     public void startPreview(Surface previewSurface, CaptureReadyCallback listener) {
572         mPreviewSurface = previewSurface;
573         setupAsync(mPreviewSurface, listener);
574     }
575
576     @Override
577     public void close() {
578         if (mIsClosed) {
579             Log.w(TAG, "Camera is already closed.");
580             return;
581         }
582         try {
583             mCaptureSession.stopRepeating();
584         } catch (CameraAccessException e) {
585             Log.e(TAG, "Could not abort captures in progress.");
586         }
587         mIsClosed = true;
588         mCameraThread.quitSafely();
589         mDevice.close();
590         mCaptureManager.close();
591         mCaptureImageReader.close();
592     }
593
594     public Size[] getSupportedPreviewSizes() {
595         StreamConfigurationMap config =
596                 mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
597         return Size.convert(config.getOutputSizes(sCaptureImageFormat));
598     }
599
600     public float getFullSizeAspectRatio() {
601         return mFullSizeAspectRatio;
602     }
603
604     @Override
605     public Facing getDirection() {
606         return mDirection.getDirection();
607    }
608
609
610     private void savePicture(Image image, final PhotoCaptureParameters captureParams,
611             CaptureSession session, CaptureResult result) {
612         int heading = captureParams.heading;
613         int degrees = CameraUtil.getJpegRotation(captureParams.orientation, mCharacteristics);
614
615         ExifInterface exif = new ExifInterface();
616         // TODO: Add more exif tags here.
617
618         Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
619                 degrees);
620
621         exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
622         exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
623
624         exif.setTag(
625                 exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
626
627         // Set GPS heading direction based on sensor, if location is on.
628         if (heading >= 0) {
629             ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
630                     ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
631             ExifTag directionTag =
632                     exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
633             exif.setTag(directionRefTag);
634             exif.setTag(directionTag);
635         }
636         new ExifUtil(exif).populateExif(Optional.<TaskImageContainer.TaskImage>absent(),
637                 Optional.of((CaptureResultProxy) new AndroidCaptureResultProxy(result)),
638                 Optional.<Location>absent());
639         ListenableFuture<Optional<Uri>> futureUri = session.saveAndFinish(
640                 acquireJpegBytes(image, degrees),
641                 size.getWidth(), size.getHeight(), 0, exif);
642         Futures.addCallback(futureUri, new FutureCallback<Optional<Uri>>() {
643             @Override
644             public void onSuccess(Optional<Uri> uriOptional) {
645                 captureParams.callback.onPictureSaved(uriOptional.orNull());
646             }
647
648             @Override
649             public void onFailure(Throwable throwable) {
650                 captureParams.callback.onPictureSaved(null);
651             }
652         });
653     }
654
655     /**
656      * Asynchronously sets up the capture session.
657      *
658      * @param previewSurface the surface onto which the preview should be
659      *            rendered.
660      * @param listener called when setup is completed.
661      */
662     private void setupAsync(final Surface previewSurface, final CaptureReadyCallback listener) {
663         mCameraHandler.post(new Runnable() {
664                 @Override
665             public void run() {
666                 setup(previewSurface, listener);
667             }
668         });
669     }
670
671     /**
672      * Configures and attempts to create a capture session.
673      *
674      * @param previewSurface the surface onto which the preview should be
675      *            rendered.
676      * @param listener called when the setup is completed.
677      */
678     private void setup(Surface previewSurface, final CaptureReadyCallback listener) {
679         try {
680             if (mCaptureSession != null) {
681                 mCaptureSession.abortCaptures();
682                 mCaptureSession = null;
683             }
684             List<Surface> outputSurfaces = new ArrayList<Surface>(2);
685             outputSurfaces.add(previewSurface);
686             outputSurfaces.add(mCaptureImageReader.getSurface());
687
688             mDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
689                     @Override
690                 public void onConfigureFailed(CameraCaptureSession session) {
691                     listener.onSetupFailed();
692                 }
693
694                     @Override
695                 public void onConfigured(CameraCaptureSession session) {
696                     mCaptureSession = session;
697                     mAFRegions = ZERO_WEIGHT_3A_REGION;
698                     mAERegions = ZERO_WEIGHT_3A_REGION;
699                     mZoomValue = 1f;
700                     mCropRegion = cropRegionForZoom(mZoomValue);
701                     boolean success = sendRepeatingCaptureRequest();
702                     if (success) {
703                         mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS,
704                                 true);
705                         mReadyStateManager.notifyListeners();
706                         listener.onReadyForCapture();
707                     } else {
708                         listener.onSetupFailed();
709                     }
710                 }
711
712                     @Override
713                 public void onClosed(CameraCaptureSession session) {
714                     super.onClosed(session);
715                 }
716             }, mCameraHandler);
717         } catch (CameraAccessException ex) {
718             Log.e(TAG, "Could not set up capture session", ex);
719             listener.onSetupFailed();
720         }
721     }
722
723     private void addRegionsToCaptureRequestBuilder(CaptureRequest.Builder builder) {
724         builder.set(CaptureRequest.CONTROL_AE_REGIONS, mAERegions);
725         builder.set(CaptureRequest.CONTROL_AF_REGIONS, mAFRegions);
726         builder.set(CaptureRequest.SCALER_CROP_REGION, mCropRegion);
727     }
728
729     private void addFlashToCaptureRequestBuilder(CaptureRequest.Builder builder, Flash flashMode) {
730         switch (flashMode) {
731             case ON:
732                 builder.set(CaptureRequest.CONTROL_AE_MODE,
733                         CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
734                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
735                 break;
736             case OFF:
737                 builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
738                 builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
739                 break;
740             case AUTO:
741                 builder.set(CaptureRequest.CONTROL_AE_MODE,
742                         CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
743                 break;
744         }
745     }
746
747     /**
748      * Request a stream of images.
749      *
750      * @return true if successful, false if there was an error submitting the
751      *         capture request.
752      */
753     private boolean sendRepeatingCaptureRequest() {
754         Log.v(TAG, "sendRepeatingCaptureRequest()");
755         try {
756             CaptureRequest.Builder builder;
757             if (ZSL_ENABLED) {
758                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
759             } else {
760                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
761             }
762
763             builder.addTarget(mPreviewSurface);
764
765             if (ZSL_ENABLED) {
766                 builder.addTarget(mCaptureImageReader.getSurface());
767             }
768
769             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
770
771             builder.set(CaptureRequest.CONTROL_AF_MODE,
772                     CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
773             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
774
775             builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
776             builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
777
778             addRegionsToCaptureRequestBuilder(builder);
779
780             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
781             return true;
782         } catch (CameraAccessException e) {
783             if (ZSL_ENABLED) {
784                 Log.v(TAG, "Could not execute zero-shutter-lag repeating request.", e);
785             } else {
786                 Log.v(TAG, "Could not execute preview request.", e);
787             }
788             return false;
789         }
790     }
791
792     /**
793      * Request a single image.
794      *
795      * @return true if successful, false if there was an error submitting the
796      *         capture request.
797      */
798     private boolean sendSingleRequest(OneCamera.PhotoCaptureParameters params) {
799         Log.v(TAG, "sendSingleRequest()");
800         try {
801             CaptureRequest.Builder builder;
802             builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
803
804             builder.addTarget(mPreviewSurface);
805
806             // Always add this surface for single image capture requests.
807             builder.addTarget(mCaptureImageReader.getSurface());
808
809             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
810
811             Flash flashMode = Flash.OFF;
812             addFlashToCaptureRequestBuilder(builder, flashMode);
813             addRegionsToCaptureRequestBuilder(builder);
814
815             builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
816             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
817
818             // Tag this as a special request which should be saved.
819             builder.setTag(RequestTag.EXPLICIT_CAPTURE);
820
821             if (sCaptureImageFormat == ImageFormat.JPEG) {
822                 builder.set(CaptureRequest.JPEG_QUALITY, (byte) (JPEG_QUALITY));
823                 builder.set(CaptureRequest.JPEG_ORIENTATION,
824                         CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
825             }
826
827             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
828             return true;
829         } catch (CameraAccessException e) {
830             Log.v(TAG, "Could not execute single still capture request.", e);
831             return false;
832         }
833     }
834
835     private boolean sendRepeatingBurstCaptureRequest() {
836         Log.v(TAG, "sendRepeatingBurstCaptureRequest()");
837         try {
838             CaptureRequest.Builder builder;
839             builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_VIDEO_SNAPSHOT);
840             builder.addTarget(mPreviewSurface);
841
842             if (ZSL_ENABLED) {
843                 builder.addTarget(mCaptureImageReader.getSurface());
844             }
845
846             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
847             builder.set(CaptureRequest.CONTROL_AF_MODE,
848                     CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
849             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
850
851             builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
852             builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
853
854             addRegionsToCaptureRequestBuilder(builder);
855
856             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
857             return true;
858         } catch (CameraAccessException e) {
859             Log.v(TAG, "Could not send repeating burst capture request.", e);
860             return false;
861         }
862     }
863
864     private boolean sendAutoExposureTriggerRequest(Flash flashMode) {
865         Log.v(TAG, "sendAutoExposureTriggerRequest()");
866         try {
867             CaptureRequest.Builder builder;
868             if (ZSL_ENABLED) {
869                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
870             } else {
871                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
872             }
873
874             builder.addTarget(mPreviewSurface);
875
876             if (ZSL_ENABLED) {
877                 builder.addTarget(mCaptureImageReader.getSurface());
878             }
879
880             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
881
882             builder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
883                     CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
884
885             addRegionsToCaptureRequestBuilder(builder);
886             addFlashToCaptureRequestBuilder(builder, flashMode);
887
888             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
889
890             return true;
891         } catch (CameraAccessException e) {
892             Log.v(TAG, "Could not execute auto exposure trigger request.", e);
893             return false;
894         }
895     }
896
897     /**
898      */
899     private boolean sendAutoFocusTriggerRequest() {
900         Log.v(TAG, "sendAutoFocusTriggerRequest()");
901         try {
902             CaptureRequest.Builder builder;
903             if (ZSL_ENABLED) {
904                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
905             } else {
906                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
907             }
908
909             builder.addTarget(mPreviewSurface);
910
911             if (ZSL_ENABLED) {
912                 builder.addTarget(mCaptureImageReader.getSurface());
913             }
914
915             builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
916
917             addRegionsToCaptureRequestBuilder(builder);
918
919             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
920             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
921
922             mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
923
924             return true;
925         } catch (CameraAccessException e) {
926             Log.v(TAG, "Could not execute auto focus trigger request.", e);
927             return false;
928         }
929     }
930
931     /**
932      * Like {@link #sendRepeatingCaptureRequest()}, but with the focus held
933      * constant.
934      *
935      * @return true if successful, false if there was an error submitting the
936      *         capture request.
937      */
938     private boolean sendAutoFocusHoldRequest() {
939         Log.v(TAG, "sendAutoFocusHoldRequest()");
940         try {
941             CaptureRequest.Builder builder;
942             if (ZSL_ENABLED) {
943                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
944             } else {
945                 builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
946             }
947
948             builder.addTarget(mPreviewSurface);
949
950             if (ZSL_ENABLED) {
951                 builder.addTarget(mCaptureImageReader.getSurface());
952             }
953
954             builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
955
956             builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
957             builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
958
959             addRegionsToCaptureRequestBuilder(builder);
960             // TODO: This should fire the torch, if appropriate.
961
962             mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
963
964             return true;
965         } catch (CameraAccessException e) {
966             Log.v(TAG, "Could not execute auto focus hold request.", e);
967             return false;
968         }
969     }
970
971     /**
972      * Calculate the aspect ratio of the full size capture on this device.
973      *
974      * @param characteristics the characteristics of the camera device.
975      * @return The aspect ration, in terms of width/height of the full capture
976      *         size.
977      */
978     private static float calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
979         Rect activeArraySize =
980                 characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
981         return ((float) activeArraySize.width()) / activeArraySize.height();
982     }
983
984     /**
985      * @param originalWidth the width of the original image captured from the
986      *            camera
987      * @param originalHeight the height of the original image captured from the
988      *            camera
989      * @param orientation the rotation to apply, in degrees.
990      * @return The size of the final rotated image
991      */
992     private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
993             int orientation) {
994         if (orientation == 0 || orientation == 180) {
995             return new Size(originalWidth, originalHeight);
996         } else if (orientation == 90 || orientation == 270) {
997             return new Size(originalHeight, originalWidth);
998         } else {
999             throw new InvalidParameterException("Orientation not supported.");
1000         }
1001     }
1002
1003     /**
1004      * Given an image reader, extracts the JPEG image bytes and then closes the
1005      * reader.
1006      *
1007      * @param img the image from which to extract jpeg bytes or compress to
1008      *            jpeg.
1009      * @param degrees the angle to rotate the image clockwise, in degrees. Rotation is
1010      *            only applied to YUV images.
1011      * @return The bytes of the JPEG image. Newly allocated.
1012      */
1013     private byte[] acquireJpegBytes(Image img, int degrees) {
1014         ByteBuffer buffer;
1015
1016         if (img.getFormat() == ImageFormat.JPEG) {
1017             Image.Plane plane0 = img.getPlanes()[0];
1018             buffer = plane0.getBuffer();
1019
1020             byte[] imageBytes = new byte[buffer.remaining()];
1021             buffer.get(imageBytes);
1022             buffer.rewind();
1023             return imageBytes;
1024         } else if (img.getFormat() == ImageFormat.YUV_420_888) {
1025             buffer = mJpegByteBufferPool.acquire();
1026             if (buffer == null) {
1027                 buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
1028             }
1029
1030             int numBytes = JpegUtilNative.compressJpegFromYUV420Image(
1031                     new AndroidImageProxy(img), buffer, JPEG_QUALITY,
1032                     degrees);
1033
1034             if (numBytes < 0) {
1035                 throw new RuntimeException("Error compressing jpeg.");
1036             }
1037
1038             buffer.limit(numBytes);
1039
1040             byte[] imageBytes = new byte[buffer.remaining()];
1041             buffer.get(imageBytes);
1042
1043             buffer.clear();
1044             mJpegByteBufferPool.release(buffer);
1045
1046             return imageBytes;
1047         } else {
1048             throw new RuntimeException("Unsupported image format.");
1049         }
1050     }
1051
1052     private void startAFCycle() {
1053         // Clean up any existing AF cycle's pending callbacks.
1054         mCameraHandler.removeCallbacksAndMessages(FOCUS_RESUME_CALLBACK_TOKEN);
1055
1056         // Send a single CONTROL_AF_TRIGGER_START capture request.
1057         sendAutoFocusTriggerRequest();
1058
1059         // Immediately send a request for a regular preview stream, but with
1060         // CONTROL_AF_MODE_AUTO set so that the focus remains constant after the
1061         // AF cycle completes.
1062         sendAutoFocusHoldRequest();
1063
1064         // Waits Settings3A.getFocusHoldMillis() milliseconds before sending
1065         // a request for a regular preview stream to resume.
1066         mCameraHandler.postAtTime(new Runnable() {
1067                 @Override
1068             public void run() {
1069                 mAERegions = ZERO_WEIGHT_3A_REGION;
1070                 mAFRegions = ZERO_WEIGHT_3A_REGION;
1071                 sendRepeatingCaptureRequest();
1072             }
1073         }, FOCUS_RESUME_CALLBACK_TOKEN,
1074                 SystemClock.uptimeMillis() + Settings3A.getFocusHoldMillis());
1075     }
1076
1077     /**
1078      * @see com.android.camera.one.OneCamera#triggerFocusAndMeterAtPoint(float,
1079      *      float)
1080      */
1081     @Override
1082     public void triggerFocusAndMeterAtPoint(float nx, float ny) {
1083         int sensorOrientation = mCharacteristics.get(
1084             CameraCharacteristics.SENSOR_ORIENTATION);
1085         mAERegions = AutoFocusHelper.aeRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1086         mAFRegions = AutoFocusHelper.afRegionsForNormalizedCoord(nx, ny, mCropRegion, sensorOrientation);
1087
1088         startAFCycle();
1089     }
1090
1091     @Override
1092     public Size pickPreviewSize(Size pictureSize, Context context) {
1093         if (pictureSize == null) {
1094             // TODO The default should be selected by the caller, and
1095             // pictureSize should never be null.
1096             pictureSize = getDefaultPictureSize();
1097         }
1098         float pictureAspectRatio = pictureSize.getWidth() / (float) pictureSize.getHeight();
1099         return CaptureModuleUtil.getOptimalPreviewSize(getSupportedPreviewSizes(),
1100               pictureAspectRatio);
1101     }
1102
1103     @Override
1104     public float getMaxZoom() {
1105         return mCharacteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
1106     }
1107
1108     @Override
1109     public void setZoom(float zoom) {
1110         mZoomValue = zoom;
1111         mCropRegion = cropRegionForZoom(zoom);
1112         sendRepeatingCaptureRequest();
1113     }
1114
1115     private Rect cropRegionForZoom(float zoom) {
1116         return AutoFocusHelper.cropRegionForZoom(mCharacteristics, zoom);
1117     }
1118 }