package com.android.camera.one.v2.photo;
import android.graphics.Bitmap;
+import android.graphics.Matrix;
import android.net.Uri;
import com.android.camera.app.OrientationManager;
final ImageProcessorListener previewListener = new ImageProcessorListener() {
@Override
- public void onStart(TaskImageContainer.TaskInfo task) {
- // Start Animation
- if (task.result.format
- == TaskImageContainer.TaskImage.EXTRA_USER_DEFINED_FORMAT_ARGB_8888) {
- pictureSaverCallback.onThumbnailProcessingBegun();
+ public synchronized void onStart(TaskImageContainer.TaskInfo task) {
+ switch (task.destination) {
+ case FAST_THUMBNAIL:
+ // Start Animation
+ if (task.result.format
+ == TaskImageContainer.TaskImage.EXTRA_USER_DEFINED_FORMAT_ARGB_8888) {
+ pictureSaverCallback.onThumbnailProcessingBegun();
+ }
+ break;
+ case INTERMEDIATE_THUMBNAIL:
+ // Do nothing
+ break;
}
}
@Override
public void onResultUncompressed(TaskImageContainer.TaskInfo task,
TaskImageContainer.UncompressedPayload payload) {
- final Bitmap bitmap = Bitmap.createBitmap(payload.data,
- task.result.width,
- task.result.height, Bitmap.Config.ARGB_8888);
- pictureSaverCallback.onThumbnailAvailable(bitmap, imageRotation.getDegrees());
-
- mExecutor.execute(new Runnable() {
- @Override
- public void run() {
- // TODO: Finalize and I18N string.
- session.startSession(bitmap, "Saving image ...");
- session.setProgress(42);
- }
- });
-
- // Remove yourself from the listener
- listenerProxy.unregisterListener(this);
+ // Load bitmap into CameraAppUI
+ switch (task.destination) {
+ case FAST_THUMBNAIL:
+ final Bitmap bitmap = Bitmap.createBitmap(payload.data,
+ task.result.width,
+ task.result.height, Bitmap.Config.ARGB_8888);
+ pictureSaverCallback.onThumbnailAvailable(bitmap,
+ imageRotation.getDegrees());
+ break;
+ case INTERMEDIATE_THUMBNAIL:
+ final Bitmap bitmapIntermediate = Bitmap.createBitmap(payload.data,
+ task.result.width,
+ task.result.height, Bitmap.Config.ARGB_8888);
+ Matrix matrix = new Matrix();
+ matrix.postRotate(imageRotation.getDegrees());
+ final Bitmap bitmapIntermediateRotated = Bitmap.createBitmap(
+ bitmapIntermediate, 0, 0, bitmapIntermediate.getWidth(),
+ bitmapIntermediate.getHeight(), matrix, true);
+ mExecutor.execute(new Runnable() {
+ @Override
+ public void run() {
+ // TODO: Put proper I18n string message here.
+ session.startSession(bitmapIntermediateRotated,
+ "Saving rotated image ...");
+ session.setProgress(20);
+ }
+ });
+ break;
+ }
+
}
@Override
public void onResultUri(TaskImageContainer.TaskInfo task, Uri uri) {
+ // Remove yourself from the listener after JPEG save.
+ // TODO: This should really be done by the ImageBackend to guarantee
+ // ordering, since technically this could happen out of order.
+ listenerProxy.unregisterListener(this);
}
};
import com.android.camera.debug.Log;
import com.android.camera.one.v2.camera2proxy.ImageProxy;
import com.android.camera.session.CaptureSession;
+import com.android.camera.util.Size;
import java.util.ArrayList;
import java.util.HashMap;
private final static Log.Tag TAG = new Log.Tag("ImageBackend");
+ /**
+ * Approximate viewable size (in pixels) for the fast thumbnail in the
+ * current UX definition of the product. Note that these values will be the
+ * minimum size of FAST_THUMBNAIL target for the
+ * CONVERT_IMAGE_TO_RGB_PREVIEW task.
+ */
+ private final static Size FAST_THUMBNAIL_TARGET_SIZE = new Size(160, 100);
+
+ /**
+ * A standard viewable size (in pixels) for the filmstrip thumbnail in the
+ * current UX definition of the product. Note that this size is the minimum
+ * size for the Preview on the filmstrip associated with
+ * COMPRESS_IMAGE_TO_JPEG task.
+ */
+ private final static Size FILMSTRIP_THUMBNAIL_TARGET_SIZE = new Size(512, 384);
+
// Some invariants to know that we're keeping track of everything
// that reflect the state of mImageSemaphoreMap
private int mOutstandingImageRefs = 0;
if (processingFlags.contains(ImageTaskFlags.COMPRESS_IMAGE_TO_JPEG)
|| processingFlags.contains(ImageTaskFlags.WRITE_IMAGE_TO_DISK)) {
// Add this type of task to the appropriate queue.
- tasksToExecute.add(new TaskCompressImageToJpeg(img, executor, this, session));
+ // tasksToExecute.add(new TaskCompressImageToJpeg(img, executor, this, session));
+ tasksToExecute.add(new TaskPreviewChainedJpeg(img, executor, this, session,
+ FILMSTRIP_THUMBNAIL_TARGET_SIZE));
}
if (processingFlags.contains(ImageTaskFlags.CONVERT_IMAGE_TO_RGB_PREVIEW)) {
// Add this type of task to the appropriate queue.
- tasksToExecute.add(new TaskConvertImageToRGBPreview(img, executor, this, session, 160,
- 100));
+ tasksToExecute.add(new TaskConvertImageToRGBPreview(img, executor,
+ this, TaskImageContainer.ProcessingPriority.FAST, session,
+ FAST_THUMBNAIL_TARGET_SIZE,
+ TaskConvertImageToRGBPreview.ThumbnailShape.SQUARE_ASPECT_CIRCULAR_INSET));
}
if (processingFlags.contains(ImageTaskFlags.WRITE_IMAGE_TO_DISK)) {
*/
public TaskConvertImageToRGBPreview createTaskConvertImageToRGBPreview(
ImageToProcess image, Executor executor, ImageBackend imageBackend,
- CaptureSession session, int targetWidth, int targetHeight) {
- return new TaskConvertImageToRGBPreview(image, executor, imageBackend, session,
- targetWidth,
- targetHeight);
+ CaptureSession session, Size targetSize,
+ TaskConvertImageToRGBPreview.ThumbnailShape thumbnailShape) {
+ return new TaskConvertImageToRGBPreview(image, executor, imageBackend,
+ TaskImageContainer.ProcessingPriority.FAST, session,
+ FAST_THUMBNAIL_TARGET_SIZE, thumbnailShape);
}
public TaskCompressImageToJpeg createTaskCompressImageToJpeg(ImageToProcess image,
import java.util.List;
/**
- * Implements the ability for the object to send events to multiple listeners in a thread-safe
- * manner. Also, listeners can also filter messages based on the a specific image result. TODO:
- * Replace this object with a more generic listener classes.
+ * Implements the ability for the object to send events to multiple listeners in
+ * a thread-safe manner. Also, listeners can also filter messages based on the a
+ * specific image result. TODO: Replace this object with a more generic listener
+ * classes.
*/
public class ImageProcessorProxyListener implements ImageProcessorListener {
private HashMap<ImageProcessorListener, Long> mImageFilter = null;
+ /**
+ * Wrapper for the log to avoid direct references to Android Log objects that will
+ * crash unit tests. Subclasses may override this method for debugging.
+ * @param message
+ */
+ protected void logWrapper(String message) {
+ // Do Nothing
+ }
+
ImageProcessorProxyListener() {
mRegisteredListeners = new ArrayList<ImageProcessorListener>();
mImageFilter = new HashMap<ImageProcessorListener, Long>();
if (mRegisteredListeners.contains(listener)) {
mRegisteredListeners.remove(listener);
mImageFilter.remove(listener);
- Log.e(TAG, "There are " + mRegisteredListeners.size() + " listeners after removal");
+ logWrapper("There are " + mRegisteredListeners.size() + " listeners after removal");
} else {
- Log.e(TAG, "Couldn't find listener. There are " + mRegisteredListeners.size()
+ logWrapper("Couldn't find listener. There are " + mRegisteredListeners.size()
+ " listeners after removal");
}
}
final TaskImage resultImage = new TaskImage(mImage.rotation, img.proxy.getWidth(),
img.proxy.getHeight(), ImageFormat.JPEG);
- onStart(mId, inputImage, resultImage);
+ onStart(mId, inputImage, resultImage, TaskInfo.Destination.FINAL_IMAGE);
int[] strides = new int[3];
// Do the byte copy
// Image is closed by now. Do NOT reference image directly.
byte[] compressedData = convertNv21toJpeg(chainedDataCopy,
resultImage.height, resultImage.width, chainedStrides);
- onJpegEncodeDone(mId, inputImage, resultImage, compressedData);
+ onJpegEncodeDone(mId, inputImage, resultImage, compressedData,
+ TaskInfo.Destination.FINAL_IMAGE);
logWrapper("Finished off a chained task now that image is released.");
}
};
DeviceOrientation.CLOCKWISE_0, resultSize.getWidth(), resultSize.getHeight(),
ImageFormat.JPEG);
- onStart(mId, inputImage, resultImage);
+ onStart(mId, inputImage, resultImage, TaskInfo.Destination.FINAL_IMAGE);
logWrapper("TIMER_END Full-size YUV buffer available, w=" + img.proxy.getWidth() + " h="
+ img.proxy.getHeight() + " of format " + img.proxy.getFormat()
+ " (35==YUV_420_888)");
// Release the image now that you have a usable copy
mImageTaskManager.releaseSemaphoreReference(img, mExecutor);
- onJpegEncodeDone(mId, inputImage, resultImage, writeOut);
+ onJpegEncodeDone(mId, inputImage, resultImage, writeOut, TaskInfo.Destination.FINAL_IMAGE);
// TODO: the app actually crashes here on a race condition: TaskCompressImageToJpeg might
// complete before TaskConvertImageToRGBPreview.
new MediaSaver.OnMediaSavedListener() {
@Override
public void onMediaSaved(Uri uri) {
- onUriResolved(mId, inputImage, resultImage, uri);
+ onUriResolved(mId, inputImage, resultImage, uri,
+ TaskInfo.Destination.FINAL_IMAGE);
}
});
}
import com.android.camera.debug.Log;
import com.android.camera.one.v2.camera2proxy.ImageProxy;
import com.android.camera.session.CaptureSession;
+import com.android.camera.util.Size;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.Executor;
/**
- * Implements the conversion of a YUV_420_888 image to subsampled image
- * inscribed in a circle.
+ * Implements the conversion of a YUV_420_888 image to subsampled image targeted
+ * toward a given resolution. The task automatically calculates the largest
+ * integer sub-sample factor that is greater than the target resolution. There
+ * are four different thumbnail types:
+ * <ol>
+ * <li>DEBUG_SQUARE_ASPECT_CIRCULAR_INSET: a center-weighted circularly cropped
+ * gradient image</li>
+ * <li>SQUARE_ASPECT_CIRCULAR_INSET: a center-weighted circularly cropped
+ * sub-sampled image</li>
+ * <li>SQUARE_ASPECT_NO_INSET: a center-weighted square cropped sub-sampled
+ * image</li>
+ * <li>MAINTAIN_ASPECT_NO_INSET: a sub-sampled image without cropping (except to
+ * maintain even values of width and height for the image</li>
+ * </ol>
*/
public class TaskConvertImageToRGBPreview extends TaskImageContainer {
+ public enum ThumbnailShape {
+ DEBUG_SQUARE_ASPECT_CIRCULAR_INSET,
+ SQUARE_ASPECT_CIRCULAR_INSET,
+ SQUARE_ASPECT_NO_INSET,
+ MAINTAIN_ASPECT_NO_INSET,
+ }
+
// 24 bit-vector to be written for images that are out of bounds.
public final static int OUT_OF_BOUNDS_COLOR = 0x00000000;
/**
* Quick n' Dirty YUV to RGB conversion
* <ol>
- * <li> R = Y + 1.402V' </li>
- * <li> G = Y - 0.344U'- 0.714V' </li>
- * <li> B = Y + 1.770U' </li>
+ * <li>R = Y + 1.402V'</li>
+ * <li>G = Y - 0.344U'- 0.714V'</li>
+ * <li>B = Y + 1.770U'</li>
* </ol>
- * to be calculated at compile time.
+ * to be calculated at compile time.
*/
- public final static int SHIFTED_BIT_APPROXIMATION = 8 ;
- public final static double SHIFTED_BITS_AS_VALUE = (double) (1 << SHIFTED_BIT_APPROXIMATION);
- public final static int V_FACTOR_FOR_R = (int) ( 1.402 * SHIFTED_BITS_AS_VALUE);
- public final static int U_FACTOR_FOR_G = (int) ( -0.344 * SHIFTED_BITS_AS_VALUE);
- public final static int V_FACTOR_FOR_G = (int) ( -0.714 * SHIFTED_BITS_AS_VALUE);
- public final static int U_FACTOR_FOR_B = (int) ( 1.772 * SHIFTED_BITS_AS_VALUE);
+ public final static int SHIFT_APPROXIMATION = 8;
+ public final static double SHIFTED_BITS_AS_VALUE = (double) (1 << SHIFT_APPROXIMATION);
+ public final static int V_FACTOR_FOR_R = (int) (1.402 * SHIFTED_BITS_AS_VALUE);
+ public final static int U_FACTOR_FOR_G = (int) (-0.344 * SHIFTED_BITS_AS_VALUE);
+ public final static int V_FACTOR_FOR_G = (int) (-0.714 * SHIFTED_BITS_AS_VALUE);
+ public final static int U_FACTOR_FOR_B = (int) (1.772 * SHIFTED_BITS_AS_VALUE);
protected final static Log.Tag TAG = new Log.Tag("TaskRGBPreview");
- private int mTargetHeight;
- private int mTargetWidth;
+ protected final ThumbnailShape mThumbnailShape;
+
+ protected Size mTargetSize;
/**
* Constructor
* @param imageTaskManager Image task manager that allows reference counting
* and task spawning
* @param captureSession Capture session that bound to this image
- * @param targetWidth Approximate viewable pixel height of the desired
+ * @param targetSize Approximate viewable pixel dimensions of the desired
* preview Image (Resultant image may NOT be of this width)
- * @param targetHeight Approximate viewable pixel width of the desired
- * preview Image (Resulant image may NOT be of this height)
+ * @param thumbnailShape the desired thumbnail shape for resultant image
+ * artifact
*/
TaskConvertImageToRGBPreview(ImageToProcess image, Executor executor,
- ImageTaskManager imageTaskManager, CaptureSession captureSession, int targetWidth,
- int targetHeight) {
- super(image, executor, imageTaskManager, ProcessingPriority.FAST, captureSession);
- mTargetWidth = targetWidth;
- mTargetHeight = targetHeight;
+ ImageTaskManager imageTaskManager, ProcessingPriority processingPriority,
+ CaptureSession captureSession, Size targetSize, ThumbnailShape thumbnailShape) {
+ super(image, executor, imageTaskManager, processingPriority, captureSession);
+ mTargetSize = targetSize;
+ mThumbnailShape = thumbnailShape;
}
public void logWrapper(String message) {
* convert raw images into the lowest resolution raw images in visually
* lossless manner without changing the aspect ratio or creating subsample
* artifacts.
- *
- * @param height height of the input image
- * @param width width of the input image
- * @param targetWidth width of the image as it will be seen on the screen in
- * raw pixels
- * @param targetHeight height of the image as it will be seen on the screen
- * in raw pixels
+ * @param imageSize Dimensions of the original image
+ * @param targetSize Target dimensions of the resultant image
* @return inscribed image as ARGB_8888
*/
- protected int calculateBestSubsampleFactor(int height, int width, int targetWidth,
- int targetHeight) {
- int maxSubsample = Math.min(height / targetHeight, width / targetWidth);
+ protected int calculateBestSubsampleFactor(Size imageSize, Size targetSize) {
+ int maxSubsample = Math.min( imageSize.getWidth()/ targetSize.getWidth(),
+ imageSize.getHeight() / targetSize.getHeight());
if (maxSubsample < 1) {
return 1;
}
// account
// for chroma subsampled images such as YUV
for (int i = maxSubsample; i >= 1; i--) {
- if (((height % (2 * i) == 0) && (width % (2 * i) == 0))) {
+ if (((imageSize.getWidth() % (2 * i) == 0)
+ && (imageSize.getHeight() % (2 * i) == 0))) {
return i;
}
}
protected int[] colorInscribedDataCircleFromYuvImage(ImageProxy img, int subsample) {
final List<ImageProxy.Plane> planeList = img.getPlanes();
if (planeList.size() != 3) {
- throw new IllegalArgumentException("Incorrect number planes ("+planeList.size()+") in YUV Image Object");
+ throw new IllegalArgumentException("Incorrect number planes (" + planeList.size()
+ + ") in YUV Image Object");
}
int w = img.getWidth() / subsample;
int[] colors = new int[len];
int alpha = 255 << 24;
-
- logWrapper("TIMER_BEGIN Starting Native Java YUV420-to-RGB Quick n' Dirty Conversion 4");
+ logWrapper("TIMER_BEGIN Starting Native Java YUV420-to-RGB Circular Conversion");
logWrapper("\t Y-Plane Size=" + w + "x" + h);
logWrapper("\t U-Plane Size=" + planeList.get(1).getRowStride() + " Pixel Stride="
+ planeList.get(1).getPixelStride());
// for all pixels in the 2x2 block
int u = (int) (bufU.get(offsetU) & 255) - 128;
int v = (int) (bufV.get(offsetV) & 255) - 128;
- int redDiff = (v * V_FACTOR_FOR_R) >> SHIFTED_BIT_APPROXIMATION;
- int greenDiff = ((u * U_FACTOR_FOR_G + v * V_FACTOR_FOR_G) >> SHIFTED_BIT_APPROXIMATION);
- int blueDiff = (u * U_FACTOR_FOR_B) >> SHIFTED_BIT_APPROXIMATION;
+ int redDiff = (v * V_FACTOR_FOR_R) >> SHIFT_APPROXIMATION;
+ int greenDiff =
+ ((u * U_FACTOR_FOR_G + v * V_FACTOR_FOR_G) >> SHIFT_APPROXIMATION);
+ int blueDiff = (u * U_FACTOR_FOR_B) >> SHIFT_APPROXIMATION;
if (i > circleMax0 || i < circleMin0) {
colors[offsetColor] = OUT_OF_BOUNDS_COLOR;
}
}
- logWrapper("TIMER_END Starting Native Java YUV420-to-RGB Quick n' Dirty Conversion 4");
+ logWrapper("TIMER_END Starting Native Java YUV420-to-RGB Circular Conversion");
+
+ return colors;
+ }
+
+ /**
+ * Converts an Android Image to a subsampled image of ARGB_8888 data in a
+ * super-optimized loop unroll. Guarantees only one subsampled pass over the
+ * YUV data.
+ *
+ * @param img YUV420_888 Image to convert
+ * @param subsample width/height subsample factor
+ * @param enableSquareInscribe true, output is an cropped square output;
+ * false, output maintains aspect ratio of input image
+ * @return inscribed image as ARGB_8888
+ */
+ protected int[] colorSubSampleFromYuvImage(ImageProxy img, int subsample,
+ boolean enableSquareInscribe) {
+ final List<ImageProxy.Plane> planeList = img.getPlanes();
+ if (planeList.size() != 3) {
+ throw new IllegalArgumentException("Incorrect number planes (" + planeList.size()
+ + ") in YUV Image Object");
+ }
+
+ int outputWidth = img.getWidth() / subsample;
+ int outputHeight = img.getHeight() / subsample;
+
+ // Set up input read boundaries.
+
+ ByteBuffer bufY = planeList.get(0).getBuffer();
+ ByteBuffer bufU = planeList.get(1).getBuffer(); // Downsampled by 2
+ ByteBuffer bufV = planeList.get(2).getBuffer(); // Downsampled by 2
+ int yByteStride = planeList.get(0).getRowStride() * subsample;
+ int uByteStride = planeList.get(1).getRowStride() * subsample;
+ int vByteStride = planeList.get(2).getRowStride() * subsample;
+ int yPixelStride = planeList.get(0).getPixelStride() * subsample;
+ int uPixelStride = planeList.get(1).getPixelStride() * subsample;
+ int vPixelStride = planeList.get(2).getPixelStride() * subsample;
+ int outputPixelStride = outputWidth;
+
+ int len = outputWidth * outputHeight;
+
+ // Set up default input read boundaries.
+ int inscribedXMin = 0;
+ int inscribedXMax = quantizeBy2(outputWidth);
+ int inscribedYMin = 0;
+ int inscribedYMax = quantizeBy2(outputHeight);
+
+ if (enableSquareInscribe) {
+ int r = inscribedCircleRadius(outputWidth, outputHeight);
+
+ if (outputWidth > outputHeight) {
+ // since we're 2x2 blocks we need to quantize these values by 2
+ inscribedXMin = quantizeBy2(outputWidth / 2 - r);
+ inscribedXMax = quantizeBy2(outputWidth / 2 + r);
+ inscribedYMin = 0;
+ inscribedYMax = outputHeight;
+ } else {
+ inscribedXMin = 0;
+ inscribedXMax = outputWidth;
+ // since we're 2x2 blocks we need to quantize these values by 2
+ inscribedYMin = quantizeBy2(outputHeight / 2 - r);
+ inscribedYMax = quantizeBy2(outputHeight / 2 + r);
+ }
+
+ len = r * r * 4;
+ outputPixelStride = r * 2;
+ }
+
+ int[] colors = new int[len];
+ int alpha = 255 << 24;
+
+ logWrapper("TIMER_BEGIN Starting Native Java YUV420-to-RGB Rectangular Conversion");
+ logWrapper("\t Y-Plane Size=" + outputWidth + "x" + outputHeight);
+ logWrapper("\t U-Plane Size=" + planeList.get(1).getRowStride() + " Pixel Stride="
+ + planeList.get(1).getPixelStride());
+ logWrapper("\t V-Plane Size=" + planeList.get(2).getRowStride() + " Pixel Stride="
+ + planeList.get(2).getPixelStride());
+ // Take in vertical lines by factor of two because of the u/v component
+ // subsample
+ for (int j = inscribedYMin; j < inscribedYMax; j += 2) {
+ int offsetY = j * yByteStride + inscribedXMin;
+ int offsetColor = (j - inscribedYMin) * (outputPixelStride);
+ int offsetU = (j / 2) * (uByteStride) + (inscribedXMin);
+ int offsetV = (j / 2) * (vByteStride) + (inscribedXMin);
+
+ // Take in horizontal lines by factor of two because of the u/v
+ // component subsample
+ // and everything as 2x2 blocks.
+ for (int i = inscribedXMin; i < inscribedXMax; i += 2, offsetY += 2 * yPixelStride,
+ offsetColor += 2, offsetU += uPixelStride, offsetV += vPixelStride) {
+ // Note i and j are in terms of pixels of the subsampled image
+ // offsetY, offsetU, and offsetV are in terms of bytes of the
+ // image
+ // offsetColor, output_pixel stride are in terms of the packed
+ // output image
+
+ // calculate the RGB component of the u/v channels and use it
+ // for all pixels in the 2x2 block
+ int u = (int) (bufU.get(offsetU) & 255) - 128;
+ int v = (int) (bufV.get(offsetV) & 255) - 128;
+ int redDiff = (v * V_FACTOR_FOR_R) >> SHIFT_APPROXIMATION;
+ int greenDiff = ((u * U_FACTOR_FOR_G + v * V_FACTOR_FOR_G) >> SHIFT_APPROXIMATION);
+ int blueDiff = (u * U_FACTOR_FOR_B) >> SHIFT_APPROXIMATION;
+
+ // Do a little alpha feathering on the edges
+ int alpha00 = (255 << 24);
+
+ int y00 = (int) (bufY.get(offsetY) & 255);
+
+ int green00 = y00 + greenDiff;
+ int blue00 = y00 + blueDiff;
+ int red00 = y00 + redDiff;
+
+ // Get the railing correct
+ if (green00 < 0) {
+ green00 = 0;
+ }
+ if (red00 < 0) {
+ red00 = 0;
+ }
+ if (blue00 < 0) {
+ blue00 = 0;
+ }
+
+ if (green00 > 255) {
+ green00 = 255;
+ }
+ if (red00 > 255) {
+ red00 = 255;
+ }
+ if (blue00 > 255) {
+ blue00 = 255;
+ }
+
+ colors[offsetColor] = (red00 & 255) << 16 | (green00 & 255) << 8
+ | (blue00 & 255) | alpha00;
+
+ int alpha01 = (255 << 24);
+ int y01 = (int) (bufY.get(offsetY + yPixelStride) & 255);
+ int green01 = y01 + greenDiff;
+ int blue01 = y01 + blueDiff;
+ int red01 = y01 + redDiff;
+
+ // Get the railing correct
+ if (green01 < 0) {
+ green01 = 0;
+ }
+ if (red01 < 0) {
+ red01 = 0;
+ }
+ if (blue01 < 0) {
+ blue01 = 0;
+ }
+
+ if (green01 > 255) {
+ green01 = 255;
+ }
+ if (red01 > 255) {
+ red01 = 255;
+ }
+ if (blue01 > 255) {
+ blue01 = 255;
+ }
+ colors[offsetColor + 1] = (red01 & 255) << 16 | (green01 & 255) << 8
+ | (blue01 & 255) | alpha01;
+
+ int alpha10 = (255 << 24);
+ int y10 = (int) (bufY.get(offsetY + yByteStride) & 255);
+ int green10 = y10 + greenDiff;
+ int blue10 = y10 + blueDiff;
+ int red10 = y10 + redDiff;
+
+ // Get the railing correct
+ if (green10 < 0) {
+ green10 = 0;
+ }
+ if (red10 < 0) {
+ red10 = 0;
+ }
+ if (blue10 < 0) {
+ blue10 = 0;
+ }
+ if (green10 > 255) {
+ green10 = 255;
+ }
+ if (red10 > 255) {
+ red10 = 255;
+ }
+ if (blue10 > 255) {
+ blue10 = 255;
+ }
+
+ colors[offsetColor + outputPixelStride] = (red10 & 255) << 16
+ | (green10 & 255) << 8 | (blue10 & 255) | alpha10;
+
+ int alpha11 = (255 << 24);
+ int y11 = (int) (bufY.get(offsetY + yByteStride + yPixelStride) & 255);
+ int green11 = y11 + greenDiff;
+ int blue11 = y11 + blueDiff;
+ int red11 = y11 + redDiff;
+
+ // Get the railing correct
+ if (green11 < 0) {
+ green11 = 0;
+ }
+ if (red11 < 0) {
+ red11 = 0;
+ }
+ if (blue11 < 0) {
+ blue11 = 0;
+ }
+
+ if (green11 > 255) {
+ green11 = 255;
+ }
+
+ if (red11 > 255) {
+ red11 = 255;
+ }
+ if (blue11 > 255) {
+ blue11 = 255;
+ }
+ colors[offsetColor + outputPixelStride + 1] = (red11 & 255) << 16
+ | (green11 & 255) << 8 | (blue11 & 255) | alpha11;
+ }
+ }
+ logWrapper("TIMER_END Starting Native Java YUV420-to-RGB Rectangular Conversion");
return colors;
}
return colors;
}
- @Override
- public void run() {
- ImageToProcess img = mImage;
-
- final TaskImage inputImage = new TaskImage(img.rotation, img.proxy.getWidth(),
+ /**
+ * Calculates the input Task Image specification an ImageProxy
+ *
+ * @param img Specified ImageToProcess
+ * @return Calculated specification
+ */
+ protected TaskImage calculateInputImage(ImageToProcess img) {
+ return new TaskImage(img.rotation, img.proxy.getWidth(),
img.proxy.getHeight(), img.proxy.getFormat());
- final int subsample = calculateBestSubsampleFactor(inputImage.width, inputImage.height,
- mTargetWidth, mTargetHeight);
+ }
+
+ /**
+ * Calculates the resultant Task Image specification, given the shape
+ * selected at the time of task construction
+ *
+ * @param img Specified image to process
+ * @param subsample Amount of subsampling to be applied
+ * @return Calculated Specification
+ */
+ protected TaskImage calculateResultImage(ImageToProcess img, int subsample) {
+ final TaskImage inputImage = calculateInputImage(img);
+ int resultWidth, resultHeight;
+
final int radius = inscribedCircleRadius(inputImage.width / subsample, inputImage.height
/ subsample);
- final TaskImage resultImage = new TaskImage(img.rotation, radius * 2, radius * 2,
+ if (mThumbnailShape == ThumbnailShape.MAINTAIN_ASPECT_NO_INSET) {
+ resultWidth = inputImage.width / subsample;
+ resultHeight = inputImage.height / subsample;
+ } else {
+ resultWidth = 2 * radius;
+ resultHeight = 2 * radius;
+ }
+
+ return new TaskImage(img.rotation, resultWidth, resultHeight,
TaskImage.EXTRA_USER_DEFINED_FORMAT_ARGB_8888);
- onStart(mId, inputImage, resultImage);
+ }
+
+ /**
+ * Runs the correct image conversion routine, based upon the selected thumbnail
+ * shape.
+ *
+ * @param img Image to be converted
+ * @param subsample Amount of image subsampling
+ * @return an ARGB_888 packed array ready for Bitmap conversion
+ */
+ protected int[] runSelectedConversion(ImageProxy img, int subsample) {
+ switch (mThumbnailShape) {
+ case DEBUG_SQUARE_ASPECT_CIRCULAR_INSET:
+ return dummyColorInscribedDataCircleFromYuvImage(img, subsample);
+ case SQUARE_ASPECT_CIRCULAR_INSET:
+ return colorInscribedDataCircleFromYuvImage(img, subsample);
+ case SQUARE_ASPECT_NO_INSET:
+ return colorSubSampleFromYuvImage(img, subsample, true);
+ case MAINTAIN_ASPECT_NO_INSET:
+ return colorSubSampleFromYuvImage(img, subsample, false);
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Runnable implementation
+ */
+ @Override
+ public void run() {
+ ImageToProcess img = mImage;
+
+ final TaskImage inputImage = calculateInputImage(img);
+ final int subsample = calculateBestSubsampleFactor(
+ new Size(inputImage.width, inputImage.height),
+ mTargetSize);
+ final TaskImage resultImage = calculateResultImage(img, subsample);
+
+ onStart(mId, inputImage, resultImage, TaskInfo.Destination.FAST_THUMBNAIL);
logWrapper("TIMER_END Rendering preview YUV buffer available, w=" + img.proxy.getWidth()
/ subsample + " h=" + img.proxy.getHeight() / subsample + " of subsample "
+ subsample);
- // For dummy version, use
- // dummyColorInscribedDataCircleFromYuvImage
- final int[] convertedImage = colorInscribedDataCircleFromYuvImage(img.proxy, subsample);
+ final int[] convertedImage = runSelectedConversion(img.proxy, subsample);
// Signal backend that reference has been released
mImageTaskManager.releaseSemaphoreReference(img, mExecutor);
-
- onPreviewDone(resultImage, inputImage, convertedImage);
+ onPreviewDone(resultImage, inputImage, convertedImage, TaskInfo.Destination.FAST_THUMBNAIL);
}
/**
* @param resultImage Image specification of result image
* @param inputImage Image specification of the input image
* @param colors Uncompressed data buffer
+ * @param destination Specifies the purpose of this image processing
+ * artifact
*/
- public void onPreviewDone(TaskImage resultImage, TaskImage inputImage, int[] colors) {
- TaskInfo job = new TaskInfo(mId, inputImage, resultImage);
+ public void onPreviewDone(TaskImage resultImage, TaskImage inputImage, int[] colors,
+ TaskInfo.Destination destination) {
+ TaskInfo job = new TaskInfo(mId, inputImage, resultImage, destination);
final ImageProcessorListener listener = mImageTaskManager.getProxyListener();
listener.onResultUncompressed(job, new UncompressedPayload(colors));
*/
public abstract class TaskImageContainer implements Runnable {
-
/**
* Simple helper class to encapsulate uncompressed payloads. Could be more complex in
* the future.
* Simple helper class to encapsulate input and resultant image specification.
* TasksImageContainer classes can be uniquely identified by triplet of its content (currently,
* the global timestamp of when the object was taken), the image specification of the input and
- * the desired output image specification.
+ * the desired output image specification. Added a field to specify the destination of the
+ * image artifact, since spawn tasks may created multiple un/compressed artifacts of
+ * different size that need to be routed to different components.
*/
static public class TaskInfo {
+
+ /**
+ * A single task graph can often create multiple imaging processing
+ * artifacts and the listener needs to distinguish an uncompressed image
+ * meant for image destinations. The different destinations are as
+ * follows:
+ * <ul>
+ * <li>FAST_THUMBNAIL: Small image required as soon as possible</li>
+ * <li>INTERMEDIATE_THUMBNAIL: Mid-sized image required for filmstrips
+ * at approximately 100-500ms latency</li>
+ * <li>FINAL_IMAGE: Full-resolution image artifact where latency > 500
+ * ms</li>
+ * </ul>
+ */
+ public enum Destination {
+ FAST_THUMBNAIL,
+ INTERMEDIATE_THUMBNAIL,
+ FINAL_IMAGE
+ }
+
+ public final Destination destination;
// The unique Id of the image being processed.
public final long contentId;
public final TaskImage result;
- TaskInfo(long aContentId, TaskImage inputSpec, TaskImage outputSpec) {
+ TaskInfo(long aContentId, TaskImage inputSpec, TaskImage outputSpec, Destination aDestination) {
contentId = aContentId;
input = inputSpec;
result = outputSpec;
+ destination = aDestination;
}
}
* @param id Id for image content
* @param input Image specification for task input
* @param result Image specification for task result
+ * @param aDestination Purpose of image processing artifact
*/
- public void onStart(long id, TaskImage input, TaskImage result) {
- TaskInfo job = new TaskInfo(id, input, result);
+ public void onStart(long id, TaskImage input, TaskImage result, TaskInfo.Destination aDestination) {
+ TaskInfo job = new TaskInfo(id, input, result, aDestination);
final ImageProcessorListener listener = mImageTaskManager.getProxyListener();
listener.onStart(job);
}
import java.util.concurrent.Executor;
/**
- * TaskJpegEncode are the base class of tasks that wish to do JPEG encoding/decoding.
- * Various helper functions are held in this class.
+ * TaskJpegEncode are the base class of tasks that wish to do JPEG
+ * encoding/decoding. Various helper functions are held in this class.
*/
public abstract class TaskJpegEncode extends TaskImageContainer {
}
/**
- * Constructor to use for initial task definition or complex shared state sharing.
+ * Constructor to use for initial task definition or complex shared state
+ * sharing.
*
* @param image Image reference that is required for computation
* @param executor Executor to avoid thread control leakage
* @param preferredLane Preferred processing priority for this task
* @param captureSession Session associated for UI handling
*/
- public TaskJpegEncode(ImageToProcess image, Executor executor, ImageTaskManager imageTaskManager,
+ public TaskJpegEncode(ImageToProcess image, Executor executor,
+ ImageTaskManager imageTaskManager,
TaskImageContainer.ProcessingPriority preferredLane, CaptureSession captureSession) {
super(image, executor, imageTaskManager, preferredLane, captureSession);
}
/**
- * Converts the YUV420_888 Image into a packed NV21 of a single byte array, suitable for JPEG
- * compression by the method convertNv21toJpeg. This version will allocate its own byte buffer
- * memory.
+ * Converts the YUV420_888 Image into a packed NV21 of a single byte array,
+ * suitable for JPEG compression by the method convertNv21toJpeg. This
+ * version will allocate its own byte buffer memory.
*
* @param img image to be converted
* @return byte array of NV21 packed image
}
/**
- * Converts the YUV420_888 Image into a packed NV21 of a single byte array, suitable for JPEG
- * compression by the method convertNv21toJpeg. Creates a memory block with the y component at
- * the head and interleaves the u,v components following the y component. Caller is responsible
- * to allocate a large enough buffer for results.
+ * Converts the YUV420_888 Image into a packed NV21 of a single byte array,
+ * suitable for JPEG compression by the method convertNv21toJpeg. Creates a
+ * memory block with the y component at the head and interleaves the u,v
+ * components following the y component. Caller is responsible to allocate a
+ * large enough buffer for results.
*
* @param img image to be converted
* @param dataCopy buffer to write NV21 packed image
*/
public void dummyConvertYUV420ImageToPackedNV21(byte[] dataCopy,
final int w, final int h) {
- final int y_size = w*h;
- final int data_offset = w*h;
+ final int y_size = w * h;
+ final int data_offset = w * h;
- for (int i = 0; i < y_size ; i++) {
- dataCopy[i] = (byte)((((i % w)*255)/w) & 255);
+ for (int i = 0; i < y_size; i++) {
+ dataCopy[i] = (byte) ((((i % w) * 255) / w) & 255);
dataCopy[i] = 0;
}
- for (int i = 0; i < h/2 ; i++) {
- for (int j = 0; j < w/2 ; j++) {
- int offset=data_offset + w*i + j*2;
- dataCopy[offset] = (byte) ((255*i)/(h/2) & 255);
- dataCopy[offset+1] = (byte) ((255*j)/(w/2) & 255);
+ for (int i = 0; i < h / 2; i++) {
+ for (int j = 0; j < w / 2; j++) {
+ int offset = data_offset + w * i + j * 2;
+ dataCopy[offset] = (byte) ((255 * i) / (h / 2) & 255);
+ dataCopy[offset + 1] = (byte) ((255 * j) / (w / 2) & 255);
}
}
}
-
/**
- * Wraps the Android built-in YUV to Jpeg conversion routine. Pass in a valid NV21 image and get
- * back a compressed JPEG buffer. A good default JPEG compression implementation that should be
- * supported on all platforms.
+ * Wraps the Android built-in YUV to Jpeg conversion routine. Pass in a
+ * valid NV21 image and get back a compressed JPEG buffer. A good default
+ * JPEG compression implementation that should be supported on all
+ * platforms.
*
* @param data_copy byte buffer that contains the NV21 image
* @param w width of NV21 image
return postViewBytes.toByteArray();
}
-
/**
* Wraps the onResultCompressed listener for ease of use.
*
* @param result Specification of resultant input size
* @param data Container for uncompressed data that represents image
*/
- public void onJpegEncodeDone(long id, TaskImage input, TaskImage result, byte[] data) {
- TaskInfo job = new TaskInfo(id, input, result);
+ public void onJpegEncodeDone(long id, TaskImage input, TaskImage result, byte[] data,
+ TaskInfo.Destination aDestination) {
+ TaskInfo job = new TaskInfo(id, input, result, aDestination);
final ImageProcessorListener listener = mImageTaskManager.getProxyListener();
listener.onResultCompressed(job, new CompressedPayload(data));
}
* @param input Specification of image input size
* @param result Specification of resultant input size
* @param imageUri URI of the saved image.
+ * @param destination Specifies the purpose of the image artifact
*/
- public void onUriResolved(long id, TaskImage input, TaskImage result, final Uri imageUri) {
- final TaskInfo job = new TaskInfo(id, input, result);
+ public void onUriResolved(long id, TaskImage input, TaskImage result, final Uri imageUri,
+ TaskInfo.Destination destination) {
+ final TaskInfo job = new TaskInfo(id, input, result, destination);
final ImageProcessorListener listener = mImageTaskManager.getProxyListener();
listener.onResultUri(job, imageUri);
}
--- /dev/null
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.camera.processing.imagebackend;
+
+import com.android.camera.debug.Log;
+import com.android.camera.session.CaptureSession;
+import com.android.camera.util.Size;
+
+import java.util.concurrent.Executor;
+
+/**
+ * Implements the conversion of a YUV_420_888 image to subsampled image
+ * inscribed in a circle.
+ */
+public class TaskPreviewChainedJpeg extends TaskConvertImageToRGBPreview {
+ protected final static Log.Tag TAG = new Log.Tag("TaskPreviewChainedJpeg");
+
+ /**
+ * Constructor
+ *
+ * @param image Image that the computation is dependent on
+ * @param executor Executor to fire off an events
+ * @param imageTaskManager Image task manager that allows reference counting
+ * and task spawning
+ * @param captureSession Capture session that bound to this image
+ * @param targetSize Approximate viewable pixel demensions of the desired
+ * preview Image */
+ TaskPreviewChainedJpeg(ImageToProcess image, Executor executor,
+ ImageTaskManager imageTaskManager, CaptureSession captureSession, Size targetSize) {
+ super(image, executor, imageTaskManager, ProcessingPriority.SLOW, captureSession,
+ targetSize , ThumbnailShape.MAINTAIN_ASPECT_NO_INSET);
+ }
+
+ public void logWrapper(String message) {
+ Log.v(TAG, message);
+ }
+
+ @Override
+ public void run() {
+ ImageToProcess img = mImage;
+
+ final TaskImage inputImage = calculateInputImage(img);
+ final int subsample = calculateBestSubsampleFactor(
+ new Size(inputImage.width, inputImage.height),
+ mTargetSize);
+ final TaskImage resultImage = calculateResultImage(img, subsample);
+
+ onStart(mId, inputImage, resultImage, TaskInfo.Destination.INTERMEDIATE_THUMBNAIL);
+
+ logWrapper("TIMER_END Rendering preview YUV buffer available, w=" + img.proxy.getWidth()
+ / subsample + " h=" + img.proxy.getHeight() / subsample + " of subsample "
+ + subsample);
+
+ final int[] convertedImage = runSelectedConversion(img.proxy,subsample);
+
+ // Chain JPEG task
+ TaskImageContainer jpegTask = new TaskCompressImageToJpeg(img, mExecutor,
+ mImageTaskManager, mSession);
+ mImageTaskManager.appendTasks(img, jpegTask);
+
+ // Signal backend that reference has been released
+ mImageTaskManager.releaseSemaphoreReference(img, mExecutor);
+ onPreviewDone(resultImage, inputImage, convertedImage,
+ TaskInfo.Destination.INTERMEDIATE_THUMBNAIL);
+ }
+
+
+}