/*
* Copyright (C) 2014 The Android Open Source Project
*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
+ * in compliance with the License. You may obtain a copy of the License at
*
- * http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
*/
package com.android.camera.one.v2;
import com.android.camera.util.Size;
import java.nio.ByteBuffer;
+import java.security.InvalidParameterException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingQueue;
private static final Tag TAG = new Tag("OneCameraZslImpl2");
/** Default JPEG encoding quality. */
- private static final int JPEG_QUALITY = CameraProfile.getJpegEncodingQualityParameter(
- CameraProfile.QUALITY_HIGH);
+ private static final int JPEG_QUALITY =
+ CameraProfile.getJpegEncodingQualityParameter(CameraProfile.QUALITY_HIGH);
/**
- * The maximum number of images to store in the full-size ZSL ring buffer.
+ * The maximum number of images to store in the full-size ZSL ring buffer.
* <br>
* TODO: Determine this number dynamically based on available memory and the
* size of frames.
private final ThreadPoolExecutor mImageSaverThreadPool;
/** Pool of native byte buffers on which to store jpeg-encoded images. */
- private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool = new
- Pools.SynchronizedPool<ByteBuffer>(64);
+ private final Pools.SynchronizedPool<ByteBuffer> mJpegByteBufferPool =
+ new Pools.SynchronizedPool<ByteBuffer>(64);
/** Current zoom value. 1.0 is no zoom. */
private float mZoomValue = 1f;
* details of how this is managed.
*/
private static enum ReadyStateRequirement {
- CAPTURE_MANAGER_READY,
- CAPTURE_NOT_IN_PROGRESS
+ CAPTURE_MANAGER_READY, CAPTURE_NOT_IN_PROGRESS
}
/**
private final PhotoCaptureParameters mParams;
private final CaptureSession mSession;
- public ImageCaptureTask(PhotoCaptureParameters parameters,
- CaptureSession session) {
+ public ImageCaptureTask(PhotoCaptureParameters parameters, CaptureSession session) {
mParams = parameters;
mSession = session;
}
@Override
- public void onImageCaptured(Image image, TotalCaptureResult
- captureResult) {
+ public void onImageCaptured(Image image, TotalCaptureResult captureResult) {
long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
// We should only capture the image if it's more recent than the
}
}
- mReadyStateManager.setInput(
- ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
+ mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
mSession.startEmpty();
savePicture(image, mParams, mSession);
mImageSaverThreadPool = new ThreadPoolExecutor(numEncodingCores, numEncodingCores, 10,
TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
- mCaptureManager = new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
- mImageSaverThreadPool);
+ mCaptureManager =
+ new ImageCaptureManager(MAX_CAPTURE_IMAGES, mCameraListenerHandler,
+ mImageSaverThreadPool);
mCaptureManager.setCaptureReadyListener(new ImageCaptureManager.CaptureReadyListener() {
@Override
public void onReadyStateChange(boolean capturePossible) {
@Override
public void onImageMetadataChange(Key<?> key, Object oldValue, Object newValue,
CaptureResult result) {
- mFocusStateListener.onFocusStatusUpdate(
- AutoFocusHelper.stateFromCamera2State(
- result.get(CaptureResult.CONTROL_AF_STATE)));
+ FocusStateListener listener = mFocusStateListener;
+ if (listener != null) {
+ listener.onFocusStatusUpdate(
+ AutoFocusHelper.stateFromCamera2State(
+ result.get(CaptureResult.CONTROL_AF_STATE)));
+ }
}
});
* @return The largest supported picture size.
*/
public Size getDefaultPictureSize() {
- StreamConfigurationMap configs = mCharacteristics.get(
- CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ StreamConfigurationMap configs =
+ mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] supportedSizes = configs.getOutputSizes(sCaptureImageFormat);
// Find the largest supported size.
android.util.Size largestSupportedSize = supportedSizes[0];
- long largestSupportedSizePixels = largestSupportedSize.getWidth()
- * largestSupportedSize.getHeight();
+ long largestSupportedSizePixels =
+ largestSupportedSize.getWidth() * largestSupportedSize.getHeight();
for (int i = 0; i < supportedSizes.length; i++) {
long numPixels = supportedSizes[i].getWidth() * supportedSizes[i].getHeight();
if (numPixels > largestSupportedSizePixels) {
}
}
- return new Size(largestSupportedSize.getWidth(),
- largestSupportedSize.getHeight());
+ return new Size(largestSupportedSize.getWidth(), largestSupportedSize.getHeight());
}
-
private void onShutterInvokeUI(final PhotoCaptureParameters params) {
// Tell CaptureModule shutter has occurred so it can flash the screen.
params.callback.onQuickExpose();
public void takePicture(final PhotoCaptureParameters params, final CaptureSession session) {
params.checkSanity();
- mReadyStateManager.setInput(
- ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
+ mReadyStateManager.setInput(ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, false);
boolean useZSL = ZSL_ENABLED;
// We will only capture images from the zsl ring-buffer which satisfy
// this constraint.
- ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints = new ArrayList<
- ImageCaptureManager.CapturedImageConstraint>();
+ ArrayList<ImageCaptureManager.CapturedImageConstraint> zslConstraints =
+ new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
zslConstraints.add(new ImageCaptureManager.CapturedImageConstraint() {
@Override
public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
});
// This constraint lets us capture images which have been explicitly
// requested. See {@link RequestTag.EXPLICIT_CAPTURE}.
- ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint = new ArrayList<
- ImageCaptureManager.CapturedImageConstraint>();
+ ArrayList<ImageCaptureManager.CapturedImageConstraint> singleCaptureConstraint =
+ new ArrayList<ImageCaptureManager.CapturedImageConstraint>();
singleCaptureConstraint.add(new ImageCaptureManager.CapturedImageConstraint() {
@Override
public boolean satisfiesConstraint(TotalCaptureResult captureResult) {
new MetadataChangeListener() {
@Override
public void onImageMetadataChange(Key<?> key, Object oldValue,
- Object newValue, CaptureResult result) {
+ Object newValue,
+ CaptureResult result) {
Log.v(TAG, "AE State Changed");
- if (oldValue.equals(
- Integer.valueOf(
- CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
+ if (oldValue.equals(Integer.valueOf(
+ CaptureResult.CONTROL_AE_STATE_PRECAPTURE))) {
mCaptureManager.removeMetadataChangeListener(key, this);
sendSingleRequest(params);
- // TODO: Delay this until onCaptureStarted().
+ // TODO: Delay this until
+ // onCaptureStarted().
onShutterInvokeUI(params);
}
}
@Override
public Size[] getSupportedSizes() {
- StreamConfigurationMap config = mCharacteristics
- .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ StreamConfigurationMap config =
+ mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
return Size.convert(config.getOutputSizes(sCaptureImageFormat));
}
CaptureSession session) {
int heading = captureParams.heading;
- int width = image.getWidth();
- int height = image.getHeight();
- int rotation = 0;
+ int degrees = (captureParams.orientation + 270) % 360;
ExifInterface exif = null;
exif = new ExifInterface();
// TODO: Add more exif tags here.
- exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, width));
- exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, height));
+ Size size = getImageSizeForOrientation(image.getWidth(), image.getHeight(),
+ degrees);
- // TODO: Handle rotation correctly.
+ exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_X_DIMENSION, size.getWidth()));
+ exif.setTag(exif.buildTag(ExifInterface.TAG_PIXEL_Y_DIMENSION, size.getHeight()));
+
+ exif.setTag(
+ exif.buildTag(ExifInterface.TAG_ORIENTATION, ExifInterface.Orientation.TOP_LEFT));
// Set GPS heading direction based on sensor, if location is on.
if (heading >= 0) {
- ExifTag directionRefTag = exif.buildTag(
- ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
+ ExifTag directionRefTag = exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
- ExifTag directionTag = exif.buildTag(
- ExifInterface.TAG_GPS_IMG_DIRECTION,
- new Rational(heading, 1));
+ ExifTag directionTag =
+ exif.buildTag(ExifInterface.TAG_GPS_IMG_DIRECTION, new Rational(heading, 1));
exif.setTag(directionRefTag);
exif.setTag(directionTag);
}
-
- session.saveAndFinish(acquireJpegBytes(image), width, height, rotation, exif,
- new OnMediaSavedListener() {
- @Override
+ // TODO Find out why this is off by -90 degrees.
+ session.saveAndFinish(acquireJpegBytes(image, degrees),
+ size.getWidth(), size.getHeight(), 0, exif, new OnMediaSavedListener() {
+ @Override
public void onMediaSaved(Uri uri) {
captureParams.callback.onPictureSaved(uri);
}
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_SINGLE);
break;
case OFF:
- builder.set(CaptureRequest.CONTROL_AE_MODE,
- CaptureRequest.CONTROL_AE_MODE_ON);
+ builder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
builder.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
break;
case AUTO:
try {
CaptureRequest.Builder builder;
if (ZSL_ENABLED) {
- builder = mDevice.
- createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
} else {
builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
addRegionsToCaptureRequestBuilder(builder);
- mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager,
- mCameraHandler);
+ mCaptureSession.setRepeatingRequest(builder.build(), mCaptureManager, mCameraHandler);
return true;
} catch (CameraAccessException e) {
if (ZSL_ENABLED) {
CameraUtil.getJpegRotation(params.orientation, mCharacteristics));
}
- mCaptureSession.capture(builder.build(), mCaptureManager,
- mCameraHandler);
+ mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
return true;
} catch (CameraAccessException e) {
Log.v(TAG, "Could not execute single still capture request.", e);
try {
CaptureRequest.Builder builder;
if (ZSL_ENABLED) {
- builder = mDevice.
- createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
} else {
builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
addRegionsToCaptureRequestBuilder(builder);
addFlashToCaptureRequestBuilder(builder, flashMode);
- mCaptureSession.capture(builder.build(), mCaptureManager,
- mCameraHandler);
+ mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
return true;
} catch (CameraAccessException e) {
try {
CaptureRequest.Builder builder;
if (ZSL_ENABLED) {
- builder = mDevice.
- createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
} else {
builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
builder.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
builder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_START);
- mCaptureSession.capture(builder.build(), mCaptureManager,
- mCameraHandler);
+ mCaptureSession.capture(builder.build(), mCaptureManager, mCameraHandler);
return true;
} catch (CameraAccessException e) {
try {
CaptureRequest.Builder builder;
if (ZSL_ENABLED) {
- builder = mDevice.
- createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
} else {
builder = mDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
}
* size.
*/
private static double calculateFullSizeAspectRatio(CameraCharacteristics characteristics) {
- Rect activeArraySize =
- characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ Rect activeArraySize = characteristics.get(
+ CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
return (double) activeArraySize.width() / activeArraySize.height();
}
/**
+ * @param originalWidth the width of the original image captured from the
+ * camera
+ * @param originalHeight the height of the original image captured from the
+ * camera
+ * @param orientation the rotation to apply, in degrees.
+ * @return The size of the final rotated image
+ */
+ private Size getImageSizeForOrientation(int originalWidth, int originalHeight,
+ int orientation) {
+ if (orientation == 0 || orientation == 180) {
+ return new Size(originalWidth, originalHeight);
+ } else if (orientation == 90 || orientation == 270) {
+ return new Size(originalHeight, originalWidth);
+ } else {
+ throw new InvalidParameterException("Orientation not supported.");
+ }
+ }
+
+ /**
* Given an image reader, extracts the JPEG image bytes and then closes the
* reader.
*
* @param img the image from which to extract jpeg bytes or compress to
* jpeg.
+ * @param degrees the angle to rotate the image, in degrees. Rotation is
+ * only applied to YUV images.
* @return The bytes of the JPEG image. Newly allocated.
*/
- private byte[] acquireJpegBytes(Image img) {
+ private byte[] acquireJpegBytes(Image img, int degrees) {
ByteBuffer buffer;
if (img.getFormat() == ImageFormat.JPEG) {
buffer = ByteBuffer.allocateDirect(img.getWidth() * img.getHeight() * 3);
}
- int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY);
+ int numBytes = JpegUtilNative.compressJpegFromYUV420Image(img, buffer, JPEG_QUALITY,
+ degrees);
if (numBytes < 0) {
throw new RuntimeException("Error compressing jpeg.");
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package com.android.camera.util;
+import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.media.Image;
import android.media.Image.Plane;
* Compresses an image from YUV422 format to jpeg.
*
* @param yBuf the buffer containing the Y component of the image
- * @param yPStride the stride between adjacent pixels in the same row in yBuf
+ * @param yPStride the stride between adjacent pixels in the same row in
+ * yBuf
* @param yRStride the stride between adjacent rows in yBuf
* @param cbBuf the buffer containing the Cb component of the image
- * @param cbPStride the stride between adjacent pixels in the same row in cbBuf
+ * @param cbPStride the stride between adjacent pixels in the same row in
+ * cbBuf
* @param cbRStride the stride between adjacent rows in cbBuf
* @param crBuf the buffer containing the Cr component of the image
- * @param crPStride the stride between adjacent pixels in the same row in crBuf
+ * @param crPStride the stride between adjacent pixels in the same row in
+ * crBuf
* @param crRStride the stride between adjacent rows in crBuf
* @param quality the quality level (0 to 100) to use
- * @return The number of bytes written, or a negative value indicating an error
+ * @return The number of bytes written, or a negative value indicating an
+ * error
*/
private static native int compressJpegFromYUV420pNative(
int width, int height,
Object outBuf, int outBufCapacity, int quality);
/**
- * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int, java.lang.Object, int, int,
- * java.lang.Object, int, int, java.lang.Object, int, int, java.lang.Object, int, int)
+ * Copies the Image.Plane specified by planeBuf, pStride, and rStride to the
+ * Bitmap.
+ *
+ * @param width the width of the image
+ * @param height the height of the image
+ * @param planeBuf the native ByteBuffer containing the image plane data
+ * @param pStride the stride between adjacent pixels in the same row of
+ * planeBuf
+ * @param rStride the stride between adjacent rows in planeBuf
+ */
+ private static native void copyImagePlaneToBitmap(int width, int height, Object planeBuf,
+ int pStride, int rStride, Object outBitmap, int rot90);
+
+ public static void copyImagePlaneToBitmap(Image.Plane plane, Bitmap bitmap, int rot90) {
+ if (bitmap.getConfig() != Bitmap.Config.ALPHA_8) {
+ throw new RuntimeException("Unsupported bitmap format");
+ }
+
+ int width = bitmap.getWidth();
+ int height = bitmap.getHeight();
+
+ copyImagePlaneToBitmap(width, height, plane.getBuffer(), plane.getPixelStride(),
+ plane.getRowStride(), bitmap, rot90);
+ }
+
+ /**
+ * @see JpegUtilNative#compressJpegFromYUV420pNative(int, int,
+ * java.lang.Object, int, int, java.lang.Object, int, int,
+ * java.lang.Object, int, int, java.lang.Object, int, int)
*/
public static int compressJpegFromYUV420p(
int width, int height,
ByteBuffer crBuf, int crPStride, int crRStride,
ByteBuffer outBuf, int quality) {
return compressJpegFromYUV420pNative(width, height, yBuf, yPStride, yRStride, cbBuf,
- cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(), quality);
+ cbPStride, cbRStride, crBuf, crPStride, crRStride, outBuf, outBuf.capacity(),
+ quality);
}
/**
- * Compresses the given image to jpeg. Note that only ImageFormat.YUV_420_888 is currently
- * supported. Furthermore, all planes must use direct byte buffers.
+ * Compresses the given image to jpeg. Note that only
+ * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+ * must use direct byte buffers.
*
* @param img the image to compress
* @param outBuf a direct byte buffer to hold the output jpeg.
+ * @param quality the jpeg encoder quality (0 to 100)
* @return The number of bytes written to outBuf
*/
public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality) {
return numBytesWritten;
}
+
+ /**
+ * Compresses the given image to jpeg. Note that only
+ * ImageFormat.YUV_420_888 is currently supported. Furthermore, all planes
+ * must use direct byte buffers.<br>
+ * FIXME TODO OPTIMIZE This method is *incredibly* inefficient.
+ *
+ * @param img the image to compress
+ * @param outBuf a direct byte buffer to hold the output jpeg.
+ * @param quality the jpeg encoder quality (0 to 100)
+ * @param rotation the amount to rotate the image clockwise, in degrees.
+ * @return The number of bytes written to outBuf
+ */
+ public static int compressJpegFromYUV420Image(Image img, ByteBuffer outBuf, int quality,
+ int degrees) {
+ if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
+ throw new RuntimeException("Unsupported rotation angle");
+ }
+
+ if (degrees == 0) {
+ return compressJpegFromYUV420Image(img, outBuf, quality);
+ }
+
+ if (img.getFormat() != ImageFormat.YUV_420_888) {
+ throw new RuntimeException("Unsupported Image Format.");
+ }
+
+ final int NUM_PLANES = 3;
+
+ if (img.getPlanes().length != NUM_PLANES) {
+ throw new RuntimeException("Output buffer must be direct.");
+ }
+
+ if (!outBuf.isDirect()) {
+ throw new RuntimeException("Output buffer must be direct.");
+ }
+
+ ByteBuffer[] planeBuf = new ByteBuffer[NUM_PLANES];
+ int[] pixelStride = new int[NUM_PLANES];
+ int[] rowStride = new int[NUM_PLANES];
+
+ for (int i = 0; i < NUM_PLANES; i++) {
+ Plane plane = img.getPlanes()[i];
+
+ if (!plane.getBuffer().isDirect()) {
+ return -1;
+ }
+
+ int width = img.getWidth();
+ int height = img.getHeight();
+
+ if (i > 0) {
+ // The image plane for the Cb and Cr channels is downsampled.
+ width /= 2;
+ height /= 2;
+ }
+
+ if (degrees == 90 || degrees == 270) {
+ int tmp = width;
+ width = height;
+ height = tmp;
+ }
+
+ Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ALPHA_8);
+
+ copyImagePlaneToBitmap(plane, bitmap, degrees / 90);
+
+ Bitmap rotatedBitmap = bitmap;
+
+ ByteBuffer rotatedBitmapBuffer = ByteBuffer.allocateDirect(
+ rotatedBitmap.getWidth() * rotatedBitmap.getHeight());
+
+ rotatedBitmap.copyPixelsToBuffer(rotatedBitmapBuffer);
+
+ planeBuf[i] = rotatedBitmapBuffer;
+ pixelStride[i] = 1;
+ rowStride[i] = rotatedBitmap.getWidth();
+ }
+
+ outBuf.clear();
+
+ int width = img.getWidth();
+ int height = img.getHeight();
+ if (degrees == 90 || degrees == 270) {
+ int tmp = width;
+ width = height;
+ height = tmp;
+ }
+
+ int numBytesWritten = compressJpegFromYUV420p(
+ width, height,
+ planeBuf[0], pixelStride[0], rowStride[0],
+ planeBuf[1], pixelStride[1], rowStride[1],
+ planeBuf[2], pixelStride[2], rowStride[2],
+ outBuf, quality);
+
+ outBuf.limit(numBytesWritten);
+
+ return numBytesWritten;
+ }
}