bool CameraHardware::PowerOn()
{
- LOGD("CameraHardware::PowerOn: Power ON camera.");
+ ALOGD("CameraHardware::PowerOn: Power ON camera.");
mCameraPowerFile = new char[PROPERTY_VALUE_MAX];
if (!property_get(CAMERA_POWER_FILE, mCameraPowerFile, "")) {
- LOGD("CameraHardware::PowerOn: no power_file set");
+ ALOGD("CameraHardware::PowerOn: no power_file set");
delete [] mCameraPowerFile;
mCameraPowerFile = 0;
return true;
::write(handle,"1\n",2);
::close(handle);
} else {
- LOGE("Could not open %s for writing.", mCameraPowerFile);
+ ALOGE("Could not open %s for writing.", mCameraPowerFile);
return false;
}
} while (--timeOut > 0);
if (handle >= 0) {
- LOGD("Camera powered on");
+ ALOGD("Camera powered on");
::close(handle);
return true;
} else {
- LOGE("Unable to power camera");
+ ALOGE("Unable to power camera");
}
return false;
bool CameraHardware::PowerOff()
{
- LOGD("CameraHardware::PowerOff: Power OFF camera.");
+ ALOGD("CameraHardware::PowerOff: Power OFF camera.");
if (!mCameraPowerFile)
return true;
::write(handle,"0\n",2);
::close(handle);
} else {
- LOGE("Could not open %s for writing.", mCameraPowerFile);
+ ALOGE("Could not open %s for writing.", mCameraPowerFile);
return false;
}
delete [] mCameraPowerFile;
CameraHardware::~CameraHardware()
{
- LOGD("CameraHardware::destruct");
+ ALOGD("CameraHardware::destruct");
if (mPreviewThread != 0) {
stopPreview();
}
bool CameraHardware::NegotiatePreviewFormat(struct preview_stream_ops* win)
{
- LOGD("CameraHardware::NegotiatePreviewFormat");
+ ALOGD("CameraHardware::NegotiatePreviewFormat");
// Get the preview size... If we are recording, use the recording video size instead of the preview size
int pw, ph;
mParameters.getPreviewSize(&pw, &ph);
}
- LOGD("Trying to set preview window geometry to %dx%d",pw,ph);
+ ALOGD("Trying to set preview window geometry to %dx%d",pw,ph);
mPreviewWinFmt = PIXEL_FORMAT_UNKNOWN;
mPreviewWinWidth = 0;
mPreviewWinHeight = 0;
// Set the buffer geometry of the surface and YV12 as the preview format
if (win->set_buffers_geometry(win,pw,ph,PIXEL_FORMAT_RGBA_8888) != NO_ERROR) {
- LOGE("Unable to set buffer geometry");
+ ALOGE("Unable to set buffer geometry");
return false;
}
status_t CameraHardware::connectCamera(hw_device_t** device)
{
- LOGD("CameraHardware::connectCamera");
+ ALOGD("CameraHardware::connectCamera");
*device = &common;
return NO_ERROR;
status_t CameraHardware::closeCamera()
{
- LOGD("CameraHardware::closeCamera");
+ ALOGD("CameraHardware::closeCamera");
releaseCamera();
return NO_ERROR;
}
status_t CameraHardware::getCameraInfo(struct camera_info* info, int facing)
{
- LOGD("CameraHardware::getCameraInfo");
+ ALOGD("CameraHardware::getCameraInfo");
info->facing = facing;
info->orientation = 0;
status_t CameraHardware::setPreviewWindow(struct preview_stream_ops* window)
{
- LOGD("CameraHardware::setPreviewWindow: preview_stream_ops: %p", window);
+ ALOGD("CameraHardware::setPreviewWindow: preview_stream_ops: %p", window);
{
Mutex::Autolock lock(mLock);
status_t res = window->set_usage(window, GRALLOC_USAGE_SW_WRITE_OFTEN);
if (res != NO_ERROR) {
res = -res; // set_usage returns a negative errno.
- LOGE("%s: Error setting preview window usage %d -> %s",
+ ALOGE("%s: Error setting preview window usage %d -> %s",
__FUNCTION__, res, strerror(res));
return res;
}
// setup the preview window geometry to be able to use the full preview window
if (mPreviewThread != 0 && mWin != 0) {
- LOGD("CameraHardware::setPreviewWindow - Negotiating preview format");
+ ALOGD("CameraHardware::setPreviewWindow - Negotiating preview format");
NegotiatePreviewFormat(mWin);
}
camera_request_memory get_memory,
void* user)
{
- LOGD("CameraHardware::setCallbacks");
+ ALOGD("CameraHardware::setCallbacks");
{
Mutex::Autolock lock(mLock);
mNotifyCb = notify_cb;
void CameraHardware::enableMsgType(int32_t msgType)
{
- LOGD("CameraHardware::enableMsgType: %d", msgType);
+ ALOGD("CameraHardware::enableMsgType: %d", msgType);
{
Mutex::Autolock lock(mLock);
int32_t old = mMsgEnabled;
void CameraHardware::disableMsgType(int32_t msgType)
{
- LOGD("CameraHardware::disableMsgType: %d", msgType);
+ ALOGD("CameraHardware::disableMsgType: %d", msgType);
{
Mutex::Autolock lock(mLock);
int32_t old = mMsgEnabled;
// All messages queried must be enabled to return true
int enabled = (mMsgEnabled & msgType) == msgType;
- LOGD("CameraHardware::isMsgTypeEnabled(%d): %d", msgType, enabled);
+ ALOGD("CameraHardware::isMsgTypeEnabled(%d): %d", msgType, enabled);
return enabled;
}
status_t CameraHardware::startPreviewLocked()
{
- LOGD("CameraHardware::startPreviewLocked");
+ ALOGD("CameraHardware::startPreviewLocked");
if (mPreviewThread != 0) {
- LOGD("CameraHardware::startPreviewLocked: preview already running");
+ ALOGD("CameraHardware::startPreviewLocked: preview already running");
return NO_ERROR;
}
int fps = mParameters.getPreviewFrameRate();
- LOGD("CameraHardware::startPreviewLocked: Open, %dx%d", width, height);
+ ALOGD("CameraHardware::startPreviewLocked: Open, %dx%d", width, height);
status_t ret = camera.Open(mVideoDevice);
if (ret != NO_ERROR) {
- LOGE("Failed to initialize Camera");
+ ALOGE("Failed to initialize Camera");
return ret;
}
- LOGD("CameraHardware::startPreviewLocked: Init");
+ ALOGD("CameraHardware::startPreviewLocked: Init");
ret = camera.Init(width, height, fps);
if (ret != NO_ERROR) {
- LOGE("Failed to setup streaming");
+ ALOGE("Failed to setup streaming");
return ret;
}
/* Retrieve the real size being used */
camera.getSize(width, height);
- LOGD("CameraHardware::startPreviewLocked: effective size: %dx%d",width, height);
+ ALOGD("CameraHardware::startPreviewLocked: effective size: %dx%d",width, height);
// If we are recording, use the recording video size instead of the preview size
if (mRecordingEnabled && mMsgEnabled & CAMERA_MSG_VIDEO_FRAME) {
/* And reinit the memory heaps to reflect the real used size if needed */
initHeapLocked();
- LOGD("CameraHardware::startPreviewLocked: StartStreaming");
+ ALOGD("CameraHardware::startPreviewLocked: StartStreaming");
ret = camera.StartStreaming();
if (ret != NO_ERROR) {
- LOGE("Failed to start streaming");
+ ALOGE("Failed to start streaming");
return ret;
}
// setup the preview window geometry in order to use it to zoom the image
if (mWin != 0) {
- LOGD("CameraHardware::setPreviewWindow - Negotiating preview format");
+ ALOGD("CameraHardware::setPreviewWindow - Negotiating preview format");
NegotiatePreviewFormat(mWin);
}
- LOGD("CameraHardware::startPreviewLocked: starting PreviewThread");
+ ALOGD("CameraHardware::startPreviewLocked: starting PreviewThread");
mPreviewThread = new PreviewThread(this);
- LOGD("CameraHardware::startPreviewLocked: O - this:0x%p",this);
+ ALOGD("CameraHardware::startPreviewLocked: O - this:0x%p",this);
return NO_ERROR;
}
status_t CameraHardware::startPreview()
{
- LOGD("CameraHardware::startPreview");
+ ALOGD("CameraHardware::startPreview");
Mutex::Autolock lock(mLock);
return startPreviewLocked();
void CameraHardware::stopPreviewLocked()
{
- LOGD("CameraHardware::stopPreviewLocked");
+ ALOGD("CameraHardware::stopPreviewLocked");
if (mPreviewThread != 0) {
- LOGD("CameraHardware::stopPreviewLocked: stopping PreviewThread");
+ ALOGD("CameraHardware::stopPreviewLocked: stopping PreviewThread");
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
- LOGD("CameraHardware::stopPreviewLocked: Uninit");
+ ALOGD("CameraHardware::stopPreviewLocked: Uninit");
camera.Uninit();
- LOGD("CameraHardware::stopPreviewLocked: StopStreaming");
+ ALOGD("CameraHardware::stopPreviewLocked: StopStreaming");
camera.StopStreaming();
- LOGD("CameraHardware::stopPreviewLocked: Close");
+ ALOGD("CameraHardware::stopPreviewLocked: Close");
camera.Close();
}
- LOGD("CameraHardware::stopPreviewLocked: OK");
+ ALOGD("CameraHardware::stopPreviewLocked: OK");
}
void CameraHardware::stopPreview()
{
- LOGD("CameraHardware::stopPreview");
+ ALOGD("CameraHardware::stopPreview");
Mutex::Autolock lock(mLock);
stopPreviewLocked();
Mutex::Autolock lock(mLock);
enabled = (mPreviewThread != 0);
}
- LOGD("CameraHardware::isPreviewEnabled: %d", enabled);
+ ALOGD("CameraHardware::isPreviewEnabled: %d", enabled);
return enabled;
}
status_t CameraHardware::storeMetaDataInBuffers(int value)
{
- LOGD("CameraHardware::storeMetaDataInBuffers: %d", value);
+ ALOGD("CameraHardware::storeMetaDataInBuffers: %d", value);
// Do not accept to store metadata in buffers - We will always store
// YUV data on video buffers. Metadata, in the case of Nvidia Tegra2
status_t CameraHardware::startRecording()
{
- LOGD("CameraHardware::startRecording");
+ ALOGD("CameraHardware::startRecording");
{
Mutex::Autolock lock(mLock);
if (!mRecordingEnabled) {
void CameraHardware::stopRecording()
{
- LOGD("CameraHardware::stopRecording");
+ ALOGD("CameraHardware::stopRecording");
{
Mutex::Autolock lock(mLock);
if (mRecordingEnabled) {
Mutex::Autolock lock(mLock);
enabled = mRecordingEnabled;
}
- LOGD("CameraHardware::isRecordingEnabled: %d", mRecordingEnabled);
+ ALOGD("CameraHardware::isRecordingEnabled: %d", mRecordingEnabled);
return enabled;
}
void CameraHardware::releaseRecordingFrame(const void* mem)
{
- LOGD("CameraHardware::releaseRecordingFrame");
+ ALOGD("CameraHardware::releaseRecordingFrame");
}
status_t CameraHardware::setAutoFocus()
{
- LOGD("CameraHardware::setAutoFocus");
+ ALOGD("CameraHardware::setAutoFocus");
Mutex::Autolock lock(mLock);
if (createThread(beginAutoFocusThread, this) == false)
return UNKNOWN_ERROR;
status_t CameraHardware::cancelAutoFocus()
{
- LOGD("CameraHardware::cancelAutoFocus");
+ ALOGD("CameraHardware::cancelAutoFocus");
return NO_ERROR;
}
status_t CameraHardware::takePicture()
{
- LOGD("CameraHardware::takePicture");
+ ALOGD("CameraHardware::takePicture");
if (createThread(beginPictureThread, this) == false)
return UNKNOWN_ERROR;
status_t CameraHardware::cancelPicture()
{
- LOGD("CameraHardware::cancelPicture");
+ ALOGD("CameraHardware::cancelPicture");
return NO_ERROR;
}
status_t CameraHardware::setParameters(const char* parms)
{
- LOGD("CameraHardware::setParameters");
+ ALOGD("CameraHardware::setParameters");
CameraParameters params;
String8 str8_param(parms);
// If no changes, trivially accept it!
if (params.flatten() == mParameters.flatten()) {
- LOGD("Trivially accept it. No changes detected");
+ ALOGD("Trivially accept it. No changes detected");
return NO_ERROR;
}
strcmp(params.getPreviewFormat(),"yuv422sp") &&
strcmp(params.getPreviewFormat(),"yuv420sp") &&
strcmp(params.getPreviewFormat(),"yuv420p")) {
- LOGE("CameraHardware::setParameters: Unsupported format '%s' for preview",params.getPreviewFormat());
+ ALOGE("CameraHardware::setParameters: Unsupported format '%s' for preview",params.getPreviewFormat());
return BAD_VALUE;
}
if (strcmp(params.getPictureFormat(), CameraParameters::PIXEL_FORMAT_JPEG)) {
- LOGE("CameraHardware::setParameters: Only jpeg still pictures are supported");
+ ALOGE("CameraHardware::setParameters: Only jpeg still pictures are supported");
return BAD_VALUE;
}
strcmp(params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT),"yuv422sp") &&
strcmp(params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT),"yuv420sp") &&
strcmp(params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT),"yuv420p")) {
- LOGE("CameraHardware::setParameters: Unsupported format '%s' for recording",params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ ALOGE("CameraHardware::setParameters: Unsupported format '%s' for recording",params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT));
return BAD_VALUE;
}
int w, h;
params.getPreviewSize(&w, &h);
- LOGD("CameraHardware::setParameters: PREVIEW: Size %dx%d, %d fps, format: %s", w, h, params.getPreviewFrameRate(), params.getPreviewFormat());
+ ALOGD("CameraHardware::setParameters: PREVIEW: Size %dx%d, %d fps, format: %s", w, h, params.getPreviewFrameRate(), params.getPreviewFormat());
params.getPictureSize(&w, &h);
- LOGD("CameraHardware::setParameters: PICTURE: Size %dx%d, format: %s", w, h, params.getPictureFormat());
+ ALOGD("CameraHardware::setParameters: PICTURE: Size %dx%d, format: %s", w, h, params.getPictureFormat());
params.getVideoSize(&w, &h);
- LOGD("CameraHardware::setParameters: VIDEO: Size %dx%d, format: %s", w, h, params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ ALOGD("CameraHardware::setParameters: VIDEO: Size %dx%d, format: %s", w, h, params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT));
// Store the new parameters
mParameters = params;
// and also restart the preview so we use the new size if needed
initHeapLocked();
- LOGD("CameraHardware::setParameters: OK");
+ ALOGD("CameraHardware::setParameters: OK");
return NO_ERROR;
}
static char lNoParam = '\0';
char* CameraHardware::getParameters()
{
- LOGD("CameraHardware::getParameters");
+ ALOGD("CameraHardware::getParameters");
String8 params;
{
return ret_str;
}
- LOGE("%s: Unable to allocate string for %s", __FUNCTION__, params.string());
+ ALOGE("%s: Unable to allocate string for %s", __FUNCTION__, params.string());
/* Apparently, we can't return NULL fron this routine. */
return &lNoParam;
}
void CameraHardware::putParameters(char* params)
{
- LOGD("CameraHardware::putParameters");
+ ALOGD("CameraHardware::putParameters");
/* This method simply frees parameters allocated in getParameters(). */
if (params != NULL && params != &lNoParam) {
free(params);
status_t CameraHardware::sendCommand(int32_t command, int32_t arg1, int32_t arg2)
{
- LOGD("CameraHardware::sendCommand");
+ ALOGD("CameraHardware::sendCommand");
return 0;
}
void CameraHardware::releaseCamera()
{
- LOGD("CameraHardware::releaseCamera");
+ ALOGD("CameraHardware::releaseCamera");
if (mPreviewThread != 0) {
stopPreview();
}
status_t CameraHardware::dumpCamera(int fd)
{
- LOGD("dump");
+ ALOGD("dump");
return -EINVAL;
}
void CameraHardware::initDefaultParameters()
{
- LOGD("CameraHardware::initDefaultParameters");
+ ALOGD("CameraHardware::initDefaultParameters");
CameraParameters p;
unsigned int i;
SortedVector<int> avFps;
if (camera.Open(mVideoDevice) != NO_ERROR) {
- LOGE("cannot open device.");
+ ALOGE("cannot open device.");
} else {
// Get the default preview format
}
}
- LOGI("Default preview size: (%d x %d), fps:%d\n",pw,ph,pfps);
- LOGI("All available formats: %s",(const char*)szs);
- LOGI("All available fps: %s",(const char*)fpsranges);
- LOGI("Default picture size: (%d x %d)\n",fw,fh);
+ ALOGI("Default preview size: (%d x %d), fps:%d\n",pw,ph,pfps);
+ ALOGI("All available formats: %s",(const char*)szs);
+ ALOGI("All available fps: %s",(const char*)fpsranges);
+ ALOGI("Default picture size: (%d x %d)\n",fw,fh);
// Now store the data
p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
if (setParameters(p.flatten()) != NO_ERROR) {
- LOGE("CameraHardware::initDefaultParameters: Failed to set default parameters.");
+ ALOGE("CameraHardware::initDefaultParameters: Failed to set default parameters.");
}
}
void CameraHardware::initHeapLocked()
{
- LOGD("CameraHardware::initHeapLocked");
+ ALOGD("CameraHardware::initHeapLocked");
int preview_width, preview_height;
int picture_width, picture_height;
int video_width, video_height;
if (!mRequestMemory) {
- LOGE("No memory allocator available");
+ ALOGE("No memory allocator available");
return;
}
mParameters.getPictureSize(&picture_width, &picture_height);
mParameters.getVideoSize(&video_width, &video_height);
- LOGD("CameraHardware::initHeapLocked: preview size=%dx%d", preview_width, preview_height);
- LOGD("CameraHardware::initHeapLocked: picture size=%dx%d", picture_width, picture_height);
- LOGD("CameraHardware::initHeapLocked: video size=%dx%d", video_width, video_height);
+ ALOGD("CameraHardware::initHeapLocked: preview size=%dx%d", preview_width, preview_height);
+ ALOGD("CameraHardware::initHeapLocked: picture size=%dx%d", picture_width, picture_height);
+ ALOGD("CameraHardware::initHeapLocked: video size=%dx%d", video_width, video_height);
int how_raw_preview_big = 0;
if (mPreviewThread != 0) {
restart_preview = true;
stopPreviewLocked();
- LOGD("Stopping preview to allow changes");
+ ALOGD("Stopping preview to allow changes");
}
// Store the new effective size
if (mPreviewThread != 0) {
restart_preview = true;
stopPreviewLocked();
- LOGD("Stopping preview to allow changes");
+ ALOGD("Stopping preview to allow changes");
}
// Store the effective size
if (!restart_preview && mPreviewThread != 0) {
restart_preview = true;
stopPreviewLocked();
- LOGD("Stopping preview to allow changes");
+ ALOGD("Stopping preview to allow changes");
}
mRawPreviewFrameSize = how_raw_preview_big;
if (mRawPreviewHeap) {
mRawPreviewBuffer = mRawPreviewHeap->data;
} else {
- LOGE("Unable to allocate memory for RawPreview");
+ ALOGE("Unable to allocate memory for RawPreview");
}
- LOGD("CameraHardware::initHeapLocked: Raw preview heap allocated");
+ ALOGD("CameraHardware::initHeapLocked: Raw preview heap allocated");
}
int how_preview_big = 0;
if (!restart_preview && mPreviewThread != 0) {
restart_preview = true;
stopPreviewLocked();
- LOGD("Stopping preview to allow changes");
+ ALOGD("Stopping preview to allow changes");
}
mPreviewFrameSize = how_preview_big;
mPreviewBuffer[i] = (char*)mPreviewHeap->data + (i * mPreviewFrameSize);
}
} else {
- LOGE("Unable to allocate memory for Preview");
+ ALOGE("Unable to allocate memory for Preview");
}
- LOGD("CameraHardware::initHeapLocked: preview heap allocated");
+ ALOGD("CameraHardware::initHeapLocked: preview heap allocated");
}
int how_recording_big = 0;
if (!restart_preview && mPreviewThread != 0) {
restart_preview = true;
stopPreviewLocked();
- LOGD("Stopping preview to allow changes");
+ ALOGD("Stopping preview to allow changes");
}
mRecordingFrameSize = how_recording_big;
mRecBuffers[i] = (char*)mRecordingHeap->data + (i * mRecordingFrameSize);
}
} else {
- LOGE("Unable to allocate memory for Recording");
+ ALOGE("Unable to allocate memory for Recording");
}
- LOGD("CameraHardware::initHeapLocked: recording heap allocated");
+ ALOGD("CameraHardware::initHeapLocked: recording heap allocated");
}
int how_picture_big = picture_width * picture_height << 1; // Raw picture heap always in YUYV
if (mRawPictureHeap) {
mRawBuffer = mRawPictureHeap->data;
} else {
- LOGE("Unable to allocate memory for RawPicture");
+ ALOGE("Unable to allocate memory for RawPicture");
}
- LOGD("CameraHardware::initHeapLocked: Raw picture heap allocated");
+ ALOGD("CameraHardware::initHeapLocked: Raw picture heap allocated");
}
int how_jpeg_big = picture_width * picture_height << 1; // jpeg maximum size
}
mJpegPictureHeap = mRequestMemory(-1,how_jpeg_big,1,mCallbackCookie);
if (!mJpegPictureHeap) {
- LOGE("Unable to allocate memory for RawPicture");
+ ALOGE("Unable to allocate memory for RawPicture");
}
- LOGD("CameraHardware::initHeapLocked: Jpeg picture heap allocated");
+ ALOGD("CameraHardware::initHeapLocked: Jpeg picture heap allocated");
}
// Don't forget to restart the preview if it was stopped...
if (restart_preview) {
- LOGD("Restarting preview");
+ ALOGD("Restarting preview");
startPreviewLocked();
}
- LOGD("CameraHardware::initHeapLocked: OK");
+ ALOGD("CameraHardware::initHeapLocked: OK");
}
int CameraHardware::previewThread()
{
- LOGV("CameraHardware::previewThread: this=%p",this);
+ ALOGV("CameraHardware::previewThread: this=%p",this);
int previewFrameRate = mParameters.getPreviewFrameRate();
// If no raw preview buffer, we can't do anything...
if (mRawPreviewBuffer == 0) {
- LOGE("No Raw preview buffer!");
+ ALOGE("No Raw preview buffer!");
mLock.unlock();
return NO_ERROR;
}
// If no preview buffer, we cant do anything...
if (frame == 0) {
- LOGE("No preview buffer!");
+ ALOGE("No preview buffer!");
mLock.unlock();
return NO_ERROR;
}
// If the recording is enabled...
if (mRecordingEnabled && mMsgEnabled & CAMERA_MSG_VIDEO_FRAME) {
- //LOGD("CameraHardware::previewThread: posting video frame...");
+ //ALOGD("CameraHardware::previewThread: posting video frame...");
// Get the video size. We are warrantied here that the current capture
// size IS exacty equal to the video size, as this condition is enforced
}
if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
- //LOGD("CameraHardware::previewThread: posting preview frame...");
+ //ALOGD("CameraHardware::previewThread: posting preview frame...");
// Here we could eventually have a problem: If we are recording, the recording size
// takes precedence over the preview size. So, the rawBase buffer could be of a
break;
default:
- LOGE("Unhandled pixel format");
+ ALOGE("Unhandled pixel format");
}
mDataCbTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME, mRecordingHeap, recBufferIdx, mCallbackCookie);
}
- LOGV("previewThread OK");
+ ALOGV("previewThread OK");
// Wait for it...
usleep(delay);
{
// Preview to a preview window...
if (mWin == 0) {
- LOGE("%s: No preview window",__FUNCTION__);
+ ALOGE("%s: No preview window",__FUNCTION__);
return;
}
int stride = 0;
status_t res = mWin->dequeue_buffer(mWin, &buf, &stride);
if (res != NO_ERROR || buf == NULL) {
- LOGE("%s: Unable to dequeue preview window buffer: %d -> %s",
+ ALOGE("%s: Unable to dequeue preview window buffer: %d -> %s",
__FUNCTION__, -res, strerror(-res));
return;
}
/* Let the preview window to lock the buffer. */
res = mWin->lock_buffer(mWin, buf);
if (res != NO_ERROR) {
- LOGE("%s: Unable to lock preview window buffer: %d -> %s",
+ ALOGE("%s: Unable to lock preview window buffer: %d -> %s",
__FUNCTION__, -res, strerror(-res));
mWin->cancel_buffer(mWin, buf);
return;
GraphicBufferMapper& grbuffer_mapper(GraphicBufferMapper::get());
res = grbuffer_mapper.lock(*buf, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &vaddr);
if (res != NO_ERROR || vaddr == NULL) {
- LOGE("%s: grbuffer_mapper.lock failure: %d -> %s",
+ ALOGE("%s: grbuffer_mapper.lock failure: %d -> %s",
__FUNCTION__, res, strerror(res));
mWin->cancel_buffer(mWin, buf);
return;
// Make sure not to overflow the preview surface
if (xStart < 0 || yStart < 0) {
- LOGE("Preview window is smaller than video preview size - Cropping image.");
+ ALOGE("Preview window is smaller than video preview size - Cropping image.");
if (xStart < 0) {
srcWidth += xStart;
bytesPerPixel = 2;
}
- LOGV("ANativeWindow: bits:%p, stride in pixels:%d, w:%d, h: %d, format: %d",vaddr,stride,mPreviewWinWidth,mPreviewWinHeight,mPreviewWinFmt);
+ ALOGV("ANativeWindow: bits:%p, stride in pixels:%d, w:%d, h: %d, format: %d",vaddr,stride,mPreviewWinWidth,mPreviewWinHeight,mPreviewWinFmt);
// Based on the destination pixel type, we must convert from YUYV to it
int dstStride = bytesPerPixel * stride;
break;
default:
- LOGE("Unhandled pixel format");
+ ALOGE("Unhandled pixel format");
}
/* Show it. */
int CameraHardware::beginAutoFocusThread(void *cookie)
{
- LOGD("CameraHardware::beginAutoFocusThread");
+ ALOGD("CameraHardware::beginAutoFocusThread");
CameraHardware *c = (CameraHardware *)cookie;
return c->autoFocusThread();
}
int CameraHardware::autoFocusThread()
{
- LOGD("CameraHardware::autoFocusThread");
+ ALOGD("CameraHardware::autoFocusThread");
if (mMsgEnabled & CAMERA_MSG_FOCUS)
mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie);
return NO_ERROR;
int CameraHardware::beginPictureThread(void *cookie)
{
- LOGD("CameraHardware::beginPictureThread");
+ ALOGD("CameraHardware::beginPictureThread");
CameraHardware *c = (CameraHardware *)cookie;
return c->pictureThread();
}
int CameraHardware::pictureThread()
{
- LOGD("CameraHardware::pictureThread");
+ ALOGD("CameraHardware::pictureThread");
bool raw = false;
bool jpeg = false;
int w, h;
mParameters.getPictureSize(&w, &h);
- LOGD("CameraHardware::pictureThread: taking picture of %dx%d", w, h);
+ ALOGD("CameraHardware::pictureThread: taking picture of %dx%d", w, h);
/* Make sure to remember if the shutter must be enabled or not */
if (mMsgEnabled & CAMERA_MSG_SHUTTER) {
stopPreviewLocked();
}
- LOGD("CameraHardware::pictureThread: taking picture (%d x %d)", w, h);
+ ALOGD("CameraHardware::pictureThread: taking picture (%d x %d)", w, h);
if (camera.Open(mVideoDevice) == NO_ERROR) {
camera.Init(w, h, 1);
/* Retrieve the real size being used */
camera.getSize(w,h);
- LOGD("CameraHardware::pictureThread: effective size: %dx%d",w, h);
+ ALOGD("CameraHardware::pictureThread: effective size: %dx%d",w, h);
/* Store it as the picture size to use */
mParameters.setPictureSize(w, h);
camera.StartStreaming();
- LOGD("CameraHardware::pictureThread: waiting until camera picture stabilizes...");
+ ALOGD("CameraHardware::pictureThread: waiting until camera picture stabilizes...");
int maxFramesToWait = 8;
int luminanceStableFor = 0;
maxFramesToWait--;
- LOGD("luminance: %4d, dif: %4d, thresh: %d, stableFor: %d, maxWait: %d", luminance, dif, thresh, luminanceStableFor, maxFramesToWait);
+ ALOGD("luminance: %4d, dif: %4d, thresh: %d, stableFor: %d, maxWait: %d", luminance, dif, thresh, luminanceStableFor, maxFramesToWait);
}
- LOGD("CameraHardware::pictureThread: picture taken");
+ ALOGD("CameraHardware::pictureThread: picture taken");
if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) {
- LOGD("CameraHardware::pictureThread: took raw picture");
+ ALOGD("CameraHardware::pictureThread: took raw picture");
raw = true;
}
mJpegPictureHeap = mRequestMemory(-1,fileSize,1,mCallbackCookie);
if (mJpegPictureHeap) {
memcpy(mJpegPictureHeap->data,jpegBuff,fileSize);
- LOGD("CameraHardware::pictureThread: took jpeg picture compressed to %d bytes, q=%d", fileSize, quality);
+ ALOGD("CameraHardware::pictureThread: took jpeg picture compressed to %d bytes, q=%d", fileSize, quality);
jpeg = true;
} else {
- LOGE("Unable to allocate memory for RawPicture");
+ ALOGE("Unable to allocate memory for RawPicture");
}
free(jpegBuff);
} else {
- LOGE("Unable to allocate temporary memory for Jpeg compression");
+ ALOGE("Unable to allocate temporary memory for Jpeg compression");
}
}
camera.Close();
} else {
- LOGE("CameraHardware::pictureThread: failed to grab image");
+ ALOGE("CameraHardware::pictureThread: failed to grab image");
}
}
/* All this callbacks can potentially call one of our methods.
Make sure to dispatch them OUTSIDE the lock! */
if (shutter) {
- LOGD("Sending the Shutter message");
+ ALOGD("Sending the Shutter message");
mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie);
}
if (raw) {
- LOGD("Sending the raw message");
+ ALOGD("Sending the raw message");
mDataCb(CAMERA_MSG_RAW_IMAGE, mRawPictureHeap, 0, NULL, mCallbackCookie);
}
if (jpeg) {
- LOGD("Sending the jpeg message");
+ ALOGD("Sending the jpeg message");
mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegPictureHeap, 0, NULL, mCallbackCookie);
}
- LOGD("CameraHardware::pictureThread OK");
+ ALOGD("CameraHardware::pictureThread OK");
return NO_ERROR;
}
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->setPreviewWindow(window);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->setCallbacks(notify_cb, data_cb, data_cb_timestamp, get_memory, user);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->enableMsgType(msg_type);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->disableMsgType(msg_type);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->isMsgTypeEnabled(msg_type);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->startPreview();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->stopPreview();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->isPreviewEnabled();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->storeMetaDataInBuffers(enable);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->startRecording();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->stopRecording();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->isRecordingEnabled();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->releaseRecordingFrame(opaque);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->setAutoFocus();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->cancelAutoFocus();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->takePicture();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->cancelPicture();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->setParameters(parms);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return NULL;
}
return ec->getParameters();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->putParameters(params);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->sendCommand(cmd, arg1, arg2);
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return;
}
ec->releaseCamera();
{
CameraHardware* ec = reinterpret_cast<CameraHardware*>(dev->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->dumpCamera(fd);
CameraHardware* ec =
reinterpret_cast<CameraHardware*>(reinterpret_cast<struct camera_device*>(device)->priv);
if (ec == NULL) {
- LOGE("%s: Unexpected NULL camera device", __FUNCTION__);
+ ALOGE("%s: Unexpected NULL camera device", __FUNCTION__);
return -EINVAL;
}
return ec->closeCamera();
//#define DEBUG_FRAME 0
#ifdef DEBUG_FRAME
-#define LOG_FRAME LOGD
+#define LOG_FRAME ALOGD
#else
-#define LOG_FRAME LOGV
+#define LOG_FRAME ALOGV
#endif
namespace android {
memset(videoIn, 0, sizeof (struct vdIn));
if ((fd = open(device, O_RDWR)) == -1) {
- LOGE("ERROR opening V4L interface: %s", strerror(errno));
+ ALOGE("ERROR opening V4L interface: %s", strerror(errno));
return -1;
}
ret = ioctl (fd, VIDIOC_QUERYCAP, &videoIn->cap);
if (ret < 0) {
- LOGE("Error opening device: unable to query device.");
+ ALOGE("Error opening device: unable to query device.");
return -1;
}
if ((videoIn->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
- LOGE("Error opening device: video capture not supported.");
+ ALOGE("Error opening device: video capture not supported.");
return -1;
}
if (!(videoIn->cap.capabilities & V4L2_CAP_STREAMING)) {
- LOGE("Capture device does not support streaming i/o");
+ ALOGE("Capture device does not support streaming i/o");
return -1;
}
int V4L2Camera::Init(int width, int height, int fps)
{
- LOGD("V4L2Camera::Init");
+ ALOGD("V4L2Camera::Init");
/* Initialize the capture to the specified width and height */
static const struct {
// If no formats, break here
if (m_AllFmts.isEmpty()) {
- LOGE("No video formats available");
+ ALOGE("No video formats available");
return -1;
}
int difArea = sd.getArea() - area;
int difFps = my_abs(sd.getFps() - fps);
- LOGD("Trying format: (%d x %d), Fps: %d [difArea:%d, difFps:%d, cDifArea:%d, cDifFps:%d]",sd.getWidth(),sd.getHeight(),sd.getFps(), difArea, difFps, closestDArea, closestDFps);
+ ALOGD("Trying format: (%d x %d), Fps: %d [difArea:%d, difFps:%d, cDifArea:%d, cDifFps:%d]",sd.getWidth(),sd.getHeight(),sd.getFps(), difArea, difFps, closestDArea, closestDFps);
if (closestDArea < 0 ||
difArea < closestDArea ||
(difArea == closestDArea && difFps < closestDFps)) {
}
if (closestDArea == -1) {
- LOGE("Size not available: (%d x %d)",width,height);
+ ALOGE("Size not available: (%d x %d)",width,height);
return -1;
}
- LOGD("Selected format: (%d x %d), Fps: %d",closest.getWidth(),closest.getHeight(),closest.getFps());
+ ALOGD("Selected format: (%d x %d), Fps: %d",closest.getWidth(),closest.getHeight(),closest.getFps());
// Check if we will have to crop the captured image
bool crop = width != closest.getWidth() || height != closest.getHeight();
}
}
if (ret < 0) {
- LOGE("Open: VIDIOC_TRY_FMT Failed: %s", strerror(errno));
+ ALOGE("Open: VIDIOC_TRY_FMT Failed: %s", strerror(errno));
return ret;
}
videoIn->format.fmt.pix.pixelformat = pixFmtsOrder[i].fmt;
ret = ioctl(fd, VIDIOC_S_FMT, &videoIn->format);
if (ret < 0) {
- LOGE("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
+ ALOGE("Open: VIDIOC_S_FMT Failed: %s", strerror(errno));
return ret;
}
videoIn->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd, VIDIOC_G_FMT, &videoIn->format);
if (ret < 0) {
- LOGE("Open: VIDIOC_G_FMT Failed: %s", strerror(errno));
+ ALOGE("Open: VIDIOC_G_FMT Failed: %s", strerror(errno));
return ret;
}
videoIn->capCropOffset = (startX * videoIn->capBytesPerPixel) +
(videoIn->format.fmt.pix.bytesperline * startY);
- LOGI("Cropping from origin: %dx%d - size: %dx%d (offset:%d)",
+ ALOGI("Cropping from origin: %dx%d - size: %dx%d (offset:%d)",
startX,startY,
videoIn->outWidth,videoIn->outHeight,
videoIn->capCropOffset);
/* Set the framerate. If it fails, it wont be fatal */
if (ioctl(fd,VIDIOC_S_PARM,&videoIn->params) < 0) {
- LOGE("VIDIOC_S_PARM error: Unable to set %d fps", closest.getFps());
+ ALOGE("VIDIOC_S_PARM error: Unable to set %d fps", closest.getFps());
}
/* Gets video device defined frame rate (not real - consider it a maximum value) */
if (ioctl(fd,VIDIOC_G_PARM,&videoIn->params) < 0) {
- LOGE("VIDIOC_G_PARM - Unable to get timeperframe");
+ ALOGE("VIDIOC_G_PARM - Unable to get timeperframe");
}
- LOGI("Actual format: (%d x %d), Fps: %d, pixfmt: '%c%c%c%c', bytesperline: %d",
+ ALOGI("Actual format: (%d x %d), Fps: %d, pixfmt: '%c%c%c%c', bytesperline: %d",
videoIn->format.fmt.pix.width,
videoIn->format.fmt.pix.height,
videoIn->params.parm.capture.timeperframe.denominator,
/* Try to set it */
if(ioctl(fd,VIDIOC_S_JPEGCOMP, &videoIn->jpegcomp) >= 0)
{
- LOGE("VIDIOC_S_COMP:");
+ ALOGE("VIDIOC_S_COMP:");
if(errno == EINVAL)
{
videoIn->jpegcomp.quality = -1; //not supported
- LOGE(" compression control not supported\n");
+ ALOGE(" compression control not supported\n");
}
}
/* gets video stream jpeg compression parameters */
if(ioctl(fd,VIDIOC_G_JPEGCOMP, &videoIn->jpegcomp) >= 0) {
- LOGD("VIDIOC_G_COMP:\n");
- LOGD(" quality: %i\n", videoIn->jpegcomp.quality);
- LOGD(" APPn: %i\n", videoIn->jpegcomp.APPn);
- LOGD(" APP_len: %i\n", videoIn->jpegcomp.APP_len);
- LOGD(" APP_data: %s\n", videoIn->jpegcomp.APP_data);
- LOGD(" COM_len: %i\n", videoIn->jpegcomp.COM_len);
- LOGD(" COM_data: %s\n", videoIn->jpegcomp.COM_data);
- LOGD(" jpeg_markers: 0x%x\n", videoIn->jpegcomp.jpeg_markers);
+ ALOGD("VIDIOC_G_COMP:\n");
+ ALOGD(" quality: %i\n", videoIn->jpegcomp.quality);
+ ALOGD(" APPn: %i\n", videoIn->jpegcomp.APPn);
+ ALOGD(" APP_len: %i\n", videoIn->jpegcomp.APP_len);
+ ALOGD(" APP_data: %s\n", videoIn->jpegcomp.APP_data);
+ ALOGD(" COM_len: %i\n", videoIn->jpegcomp.COM_len);
+ ALOGD(" COM_data: %s\n", videoIn->jpegcomp.COM_data);
+ ALOGD(" jpeg_markers: 0x%x\n", videoIn->jpegcomp.jpeg_markers);
} else {
- LOGE("VIDIOC_G_COMP:");
+ ALOGE("VIDIOC_G_COMP:");
if(errno == EINVAL) {
videoIn->jpegcomp.quality = -1; //not supported
- LOGE(" compression control not supported\n");
+ ALOGE(" compression control not supported\n");
}
}
}
ret = ioctl(fd, VIDIOC_REQBUFS, &videoIn->rb);
if (ret < 0) {
- LOGE("Init: VIDIOC_REQBUFS failed: %s", strerror(errno));
+ ALOGE("Init: VIDIOC_REQBUFS failed: %s", strerror(errno));
return ret;
}
ret = ioctl (fd, VIDIOC_QUERYBUF, &videoIn->buf);
if (ret < 0) {
- LOGE("Init: Unable to query buffer (%s)", strerror(errno));
+ ALOGE("Init: Unable to query buffer (%s)", strerror(errno));
return ret;
}
videoIn->buf.m.offset);
if (videoIn->mem[i] == MAP_FAILED) {
- LOGE("Init: Unable to map buffer (%s)", strerror(errno));
+ ALOGE("Init: Unable to map buffer (%s)", strerror(errno));
return -1;
}
ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
if (ret < 0) {
- LOGE("Init: VIDIOC_QBUF Failed");
+ ALOGE("Init: VIDIOC_QBUF Failed");
return -1;
}
free(videoIn->tmpBuffer);
videoIn->tmpBuffer = (uint8_t*)calloc(1, tmpbuf_size);
if (!videoIn->tmpBuffer) {
- LOGE("couldn't calloc %lu bytes of memory for frame buffer\n",
+ ALOGE("couldn't calloc %lu bytes of memory for frame buffer\n",
(unsigned long) tmpbuf_size);
return -ENOMEM;
}
break;
default:
- LOGE("Should never arrive (1)- exit fatal !!\n");
+ ALOGE("Should never arrive (1)- exit fatal !!\n");
return -1;
}
for (int i = 0; i < DQcount-1; i++) {
ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
- LOGE_IF(ret < 0, "Uninit: VIDIOC_DQBUF Failed");
+ ALOGE_IF(ret < 0, "Uninit: VIDIOC_DQBUF Failed");
}
nQueued = 0;
nDequeued = 0;
for (int i = 0; i < NB_BUFFER; i++)
if (videoIn->mem[i] != NULL) {
ret = munmap(videoIn->mem[i], videoIn->buf.length);
- LOGE_IF(ret < 0, "Uninit: Unmap failed");
+ ALOGE_IF(ret < 0, "Uninit: Unmap failed");
videoIn->mem[i] = NULL;
}
ret = ioctl (fd, VIDIOC_STREAMON, &type);
if (ret < 0) {
- LOGE("StartStreaming: Unable to start capture: %s", strerror(errno));
+ ALOGE("StartStreaming: Unable to start capture: %s", strerror(errno));
return ret;
}
ret = ioctl (fd, VIDIOC_STREAMOFF, &type);
if (ret < 0) {
- LOGE("StopStreaming: Unable to stop capture: %s", strerror(errno));
+ ALOGE("StopStreaming: Unable to stop capture: %s", strerror(errno));
return ret;
}
videoIn->buf.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd, VIDIOC_DQBUF, &videoIn->buf);
if (ret < 0) {
- LOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
+ ALOGE("GrabPreviewFrame: VIDIOC_DQBUF Failed");
return;
}
/* Avoid crashing! - Make sure there is enough room in the output buffer! */
if (maxSize < videoIn->outFrameSize) {
- LOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);
+ ALOGE("V4L2Camera::GrabRawFrame: Insufficient space in output buffer: Required: %d, Got %d - DROPPING FRAME",videoIn->outFrameSize,maxSize);
} else {
case V4L2_PIX_FMT_MJPEG:
if(videoIn->buf.bytesused <= HEADERFRAME1) {
// Prevent crash on empty image
- LOGE("Ignoring empty buffer ...\n");
+ ALOGE("Ignoring empty buffer ...\n");
break;
}
if (jpeg_decode((uint8_t*)frameBuffer, strideOut, src, videoIn->outWidth, videoIn->outHeight) < 0) {
- LOGE("jpeg decode errors\n");
+ ALOGE("jpeg decode errors\n");
break;
}
break;
break;
default:
- LOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
+ ALOGE("error grabbing: unknown format: %i\n", videoIn->format.fmt.pix.pixelformat);
break;
}
/* And Queue the buffer again */
ret = ioctl(fd, VIDIOC_QBUF, &videoIn->buf);
if (ret < 0) {
- LOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
+ ALOGE("GrabPreviewFrame: VIDIOC_QBUF Failed");
return;
}
* returns 0 if enumeration succeded or errno otherwise */
bool V4L2Camera::EnumFrameIntervals(int pixfmt, int width, int height)
{
- LOGD("V4L2Camera::EnumFrameIntervals: pixfmt: 0x%08x, w:%d, h:%d",pixfmt,width,height);
+ ALOGD("V4L2Camera::EnumFrameIntervals: pixfmt: 0x%08x, w:%d, h:%d",pixfmt,width,height);
struct v4l2_frmivalenum fival;
int list_fps=0;
fival.width = width;
fival.height = height;
- LOGD("\tTime interval between frame: ");
+ ALOGD("\tTime interval between frame: ");
while (ioctl(fd,VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0)
{
fival.index++;
if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
- LOGD("%u/%u", fival.discrete.numerator, fival.discrete.denominator);
+ ALOGD("%u/%u", fival.discrete.numerator, fival.discrete.denominator);
m_AllFmts.add( SurfaceDesc( width, height, fival.discrete.denominator ) );
list_fps++;
} else if (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
- LOGD("{min { %u/%u } .. max { %u/%u } }",
+ ALOGD("{min { %u/%u } .. max { %u/%u } }",
fival.stepwise.min.numerator, fival.stepwise.min.numerator,
fival.stepwise.max.denominator, fival.stepwise.max.denominator);
break;
} else if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
- LOGD("{min { %u/%u } .. max { %u/%u } / "
+ ALOGD("{min { %u/%u } .. max { %u/%u } / "
"stepsize { %u/%u } }",
fival.stepwise.min.numerator, fival.stepwise.min.denominator,
fival.stepwise.max.numerator, fival.stepwise.max.denominator,
* returns 0 if enumeration succeded or errno otherwise */
bool V4L2Camera::EnumFrameSizes(int pixfmt)
{
- LOGD("V4L2Camera::EnumFrameSizes: pixfmt: 0x%08x",pixfmt);
+ ALOGD("V4L2Camera::EnumFrameSizes: pixfmt: 0x%08x",pixfmt);
int ret=0;
int fsizeind = 0;
struct v4l2_frmsizeenum fsize;
while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &fsize) >= 0) {
fsize.index++;
if (fsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- LOGD("{ discrete: width = %u, height = %u }",
+ ALOGD("{ discrete: width = %u, height = %u }",
fsize.discrete.width, fsize.discrete.height);
fsizeind++;
if (!EnumFrameIntervals(pixfmt,fsize.discrete.width, fsize.discrete.height))
- LOGD(" Unable to enumerate frame intervals");
+ ALOGD(" Unable to enumerate frame intervals");
} else if (fsize.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
- LOGD("{ continuous: min { width = %u, height = %u } .. "
+ ALOGD("{ continuous: min { width = %u, height = %u } .. "
"max { width = %u, height = %u } }",
fsize.stepwise.min_width, fsize.stepwise.min_height,
fsize.stepwise.max_width, fsize.stepwise.max_height);
- LOGD(" will not enumerate frame intervals.\n");
+ ALOGD(" will not enumerate frame intervals.\n");
} else if (fsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
- LOGD("{ stepwise: min { width = %u, height = %u } .. "
+ ALOGD("{ stepwise: min { width = %u, height = %u } .. "
"max { width = %u, height = %u } / "
"stepsize { width = %u, height = %u } }",
fsize.stepwise.min_width, fsize.stepwise.min_height,
fsize.stepwise.max_width, fsize.stepwise.max_height,
fsize.stepwise.step_width, fsize.stepwise.step_height);
- LOGD(" will not enumerate frame intervals.");
+ ALOGD(" will not enumerate frame intervals.");
} else {
- LOGE(" fsize.type not supported: %d\n", fsize.type);
- LOGE(" (Discrete: %d Continuous: %d Stepwise: %d)",
+ ALOGE(" fsize.type not supported: %d\n", fsize.type);
+ ALOGE(" (Discrete: %d Continuous: %d Stepwise: %d)",
V4L2_FRMSIZE_TYPE_DISCRETE,
V4L2_FRMSIZE_TYPE_CONTINUOUS,
V4L2_FRMSIZE_TYPE_STEPWISE);
fmt.fmt.pix.field = V4L2_FIELD_ANY;
if (ioctl(fd,VIDIOC_TRY_FMT, &fmt) >= 0) {
- LOGD("{ ?GSPCA? : width = %u, height = %u }\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
+ ALOGD("{ ?GSPCA? : width = %u, height = %u }\n", fmt.fmt.pix.width, fmt.fmt.pix.height);
// Add the mode descriptor
m_AllFmts.add( SurfaceDesc( fmt.fmt.pix.width, fmt.fmt.pix.height, 25 ) );
* returns: pointer to LFormats struct containing list of available frame formats */
bool V4L2Camera::EnumFrameFormats()
{
- LOGD("V4L2Camera::EnumFrameFormats");
+ ALOGD("V4L2Camera::EnumFrameFormats");
struct v4l2_fmtdesc fmt;
// Start with no modes
while (ioctl(fd,VIDIOC_ENUM_FMT, &fmt) >= 0) {
fmt.index++;
- LOGD("{ pixelformat = '%c%c%c%c', description = '%s' }",
+ ALOGD("{ pixelformat = '%c%c%c%c', description = '%s' }",
fmt.pixelformat & 0xFF, (fmt.pixelformat >> 8) & 0xFF,
(fmt.pixelformat >> 16) & 0xFF, (fmt.pixelformat >> 24) & 0xFF,
fmt.description);
//enumerate frame sizes for this pixel format
if (!EnumFrameSizes(fmt.pixelformat)) {
- LOGE(" Unable to enumerate frame sizes.");
+ ALOGE(" Unable to enumerate frame sizes.");
}
};
SortedVector<SurfaceSize> V4L2Camera::getAvailableSizes() const
{
- LOGD("V4L2Camera::getAvailableSizes");
+ ALOGD("V4L2Camera::getAvailableSizes");
SortedVector<SurfaceSize> ret;
// Iterate through the list. All duplicated entries will be removed
SortedVector<int> V4L2Camera::getAvailableFps() const
{
- LOGD("V4L2Camera::getAvailableFps");
+ ALOGD("V4L2Camera::getAvailableFps");
SortedVector<int> ret;
// Iterate through the list. All duplicated entries will be removed