package net.osdn.gokigen.a01d.camera.nikon.operation;
+import android.util.Log;
+
import androidx.annotation.NonNull;
import net.osdn.gokigen.a01d.camera.ICaptureControl;
+import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.IPtpIpCommandCallback;
import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.PtpIpCommandPublisher;
+import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.messages.PtpIpCommandGeneric;
import net.osdn.gokigen.a01d.liveview.IAutoFocusFrameDisplay;
-public class NikonCaptureControl implements ICaptureControl
+import static net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.IPtpIpMessages.REQUEST_SHUTTER_ON;
+import static net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.IPtpIpMessages.SEQ_DEVICE_READY;
+import static net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.IPtpIpMessages.SEQ_GET_EVENT;
+
+public class NikonCaptureControl implements ICaptureControl, IPtpIpCommandCallback
{
+ private final String TAG = this.toString();
private final PtpIpCommandPublisher commandPublisher;
- private final IAutoFocusFrameDisplay frameDisplayer;
+ //private final IAutoFocusFrameDisplay frameDisplayer;
+ private boolean isDumpLog = true;
public NikonCaptureControl(@NonNull PtpIpCommandPublisher commandPublisher, IAutoFocusFrameDisplay frameDisplayer)
{
this.commandPublisher = commandPublisher;
- this.frameDisplayer = frameDisplayer;
+ //this.frameDisplayer = frameDisplayer;
+ }
+
+ @Override
+ public void doCapture(int kind)
+ {
+ try
+ {
+ // シャッター
+ Log.v(TAG, " doCapture() ");
+ // シャッターを切る
+ commandPublisher.enqueueCommand(new PtpIpCommandGeneric(this, REQUEST_SHUTTER_ON, isDumpLog, 0, 0x9207, 8, 0xffffffff, 0x00));
+ commandPublisher.enqueueCommand(new PtpIpCommandGeneric(this, SEQ_DEVICE_READY, isDumpLog, 0, 0x90c8));
+ commandPublisher.enqueueCommand(new PtpIpCommandGeneric(this, SEQ_GET_EVENT, isDumpLog, 0, 0x90c7));
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
}
+ @Override
+ public void receivedMessage(int id, byte[] rx_body)
+ {
+ Log.v(TAG, " CanonCaptureControl::receivedMessage() : ");
+ try
+ {
+ if ((rx_body.length > 10)&&((rx_body[8] != (byte) 0x01)||(rx_body[9] != (byte) 0x20)))
+ {
+ Log.v(TAG, " --- RECEIVED NG REPLY. : " + id);
+ }
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
@Override
- public void doCapture(int kind)
+ public void onReceiveProgress(int currentBytes, int totalBytes, byte[] rx_body)
{
+ Log.v(TAG, " CanonCaptureControl::onReceiveProgress() : " + currentBytes + "/" + totalBytes);
+ }
+ @Override
+ public boolean isReceiveMulti()
+ {
+ return (false);
}
+
}
package net.osdn.gokigen.a01d.camera.nikon.operation;
import android.app.Activity;
+import android.content.SharedPreferences;
+import android.graphics.PointF;
+import android.graphics.RectF;
+import android.util.Log;
import android.view.MotionEvent;
import androidx.annotation.NonNull;
+import androidx.preference.PreferenceManager;
import net.osdn.gokigen.a01d.camera.IFocusingControl;
+import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.IPtpIpCommandCallback;
import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.PtpIpCommandPublisher;
+import net.osdn.gokigen.a01d.camera.ptpip.wrapper.command.messages.PtpIpCommandGeneric;
import net.osdn.gokigen.a01d.liveview.IAutoFocusFrameDisplay;
import net.osdn.gokigen.a01d.liveview.IIndicatorControl;
+import net.osdn.gokigen.a01d.preference.IPreferencePropertyAccessor;
-public class NikonFocusingControl implements IFocusingControl
+public class NikonFocusingControl implements IFocusingControl, IPtpIpCommandCallback
{
- private final Activity context;
+ private final String TAG = this.toString();
+ private static final int FOCUS_LOCK_PRE = 15;
+ private static final int FOCUS_LOCK = 16;
+ private static final int FOCUS_MOVE = 17;
+ private static final int FOCUS_UNLOCK = 18;
+
+ //private final Activity context;
private final PtpIpCommandPublisher commandPublisher;
private final IAutoFocusFrameDisplay frameDisplayer;
private final IIndicatorControl indicator;
+ private float maxPointLimitWidth;
+ private float maxPointLimitHeight;
+ private RectF preFocusFrameRect = null;
+ private boolean isDumpLog = false;
+
public NikonFocusingControl(@NonNull Activity context, @NonNull PtpIpCommandPublisher commandPublisher, IAutoFocusFrameDisplay frameDisplayer, IIndicatorControl indicator)
{
- this.context = context;
+ //this.context = context;
this.commandPublisher = commandPublisher;
this.frameDisplayer = frameDisplayer;
this.indicator = indicator;
+ try
+ {
+ SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
+ String focusPoint = preferences.getString(IPreferencePropertyAccessor.CANON_FOCUS_XY, IPreferencePropertyAccessor.CANON_FOCUS_XY_DEFAULT_VALUE);
+ String[] focus = focusPoint.split(",");
+ if (focus.length == 2)
+ {
+ maxPointLimitWidth = Integer.parseInt(focus[0]);
+ maxPointLimitHeight = Integer.parseInt(focus[1]);
+ }
+ else
+ {
+ maxPointLimitWidth = 6000.0f;
+ maxPointLimitHeight = 4000.0f;
+ }
+ Log.v(TAG, "FOCUS RESOLUTION : " + maxPointLimitWidth + "," + maxPointLimitHeight);
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ maxPointLimitWidth = 6000.0f;
+ maxPointLimitHeight = 4000.0f;
+ }
}
+ @Override
+ public boolean driveAutoFocus(final MotionEvent motionEvent)
+ {
+ Log.v(TAG, "driveAutoFocus()");
+ if (motionEvent.getAction() != MotionEvent.ACTION_DOWN)
+ {
+ return (false);
+ }
+ Thread thread = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ try
+ {
+ PointF point = frameDisplayer.getPointWithEvent(motionEvent);
+ if (point != null)
+ {
+ preFocusFrameRect = getPreFocusFrameRect(point);
+ showFocusFrame(preFocusFrameRect, IAutoFocusFrameDisplay.FocusFrameStatus.Running, 0.0);
+ if (frameDisplayer.isContainsPoint(point))
+ {
+ lockAutoFocus(point);
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
+ });
+ try
+ {
+ thread.start();
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ return (false);
+ }
@Override
- public boolean driveAutoFocus(MotionEvent motionEvent) {
- return false;
+ public void unlockAutoFocus()
+ {
+ try
+ {
+ Log.v(TAG, " Unlock AF ");
+ commandPublisher.enqueueCommand(new PtpIpCommandGeneric(this, FOCUS_UNLOCK, isDumpLog, 0, 0x9206));
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
}
@Override
- public void unlockAutoFocus() {
+ public void halfPressShutter(boolean isPressed)
+ {
+ unlockAutoFocus();
+ //lockAutoFocus(new PointF(0.5f, 0.5f));
+ }
+ private void lockAutoFocus(PointF point)
+ {
+ try
+ {
+ int x = (0x0000ffff & (Math.round(point.x * maxPointLimitWidth) + 1));
+ int y = (0x0000ffff & (Math.round(point.y * maxPointLimitHeight) + 1));
+ Log.v(TAG, "Lock AF: [" + x + ","+ y + "]");
+ commandPublisher.enqueueCommand(new PtpIpCommandGeneric(this, FOCUS_LOCK, isDumpLog, 0x9205, 8, x, y));
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ *
+ *
+ */
+ private RectF getPreFocusFrameRect(@NonNull PointF point)
+ {
+ float imageWidth = frameDisplayer.getContentSizeWidth();
+ float imageHeight = frameDisplayer.getContentSizeHeight();
+
+ // Display a provisional focus frame at the touched point.
+ float focusWidth = 0.125f; // 0.125 is rough estimate.
+ float focusHeight = 0.125f;
+ if (imageWidth > imageHeight)
+ {
+ focusHeight *= (imageWidth / imageHeight);
+ }
+ else
+ {
+ focusHeight *= (imageHeight / imageWidth);
+ }
+ return (new RectF(point.x - focusWidth / 2.0f, point.y - focusHeight / 2.0f,
+ point.x + focusWidth / 2.0f, point.y + focusHeight / 2.0f));
+ }
+
+ /**
+ *
+ *
+ */
+ private void showFocusFrame(RectF rect, IAutoFocusFrameDisplay.FocusFrameStatus status, double duration)
+ {
+ frameDisplayer.showFocusFrame(rect, status, duration);
+ indicator.onAfLockUpdate(IAutoFocusFrameDisplay.FocusFrameStatus.Focused == status);
}
+ /**
+ *
+ *
+ */
+ private void hideFocusFrame()
+ {
+ frameDisplayer.hideFocusFrame();
+ indicator.onAfLockUpdate(false);
+ }
+
+
@Override
- public void halfPressShutter(boolean isPressed) {
+ public void receivedMessage(int id, byte[] rx_body)
+ {
+ try
+ {
+
+ if ((rx_body.length > 10)&&((rx_body[8] != (byte) 0x01)||(rx_body[9] != (byte) 0x20)))
+ {
+ Log.v(TAG, " --- RECEIVED NG REPLY. : FOCUS OPERATION ---");
+ hideFocusFrame();
+ preFocusFrameRect = null;
+ return;
+ }
+
+ if ((id == FOCUS_LOCK)||(id == FOCUS_LOCK_PRE))
+ {
+ Log.v(TAG, "FOCUS LOCKED");
+ if (preFocusFrameRect != null)
+ {
+ // showFocusFrame(preFocusFrameRect, IAutoFocusFrameDisplay.FocusFrameStatus.Focused, 1.0); // 1秒だけ表示
+ showFocusFrame(preFocusFrameRect, IAutoFocusFrameDisplay.FocusFrameStatus.Focused, -1);
+ }
+ }
+ else if (id == FOCUS_MOVE)
+ {
+ Log.v(TAG, "FOCUS MOVED");
+ if (preFocusFrameRect != null)
+ {
+ hideFocusFrame();
+ }
+ }
+ else // if (id == FOCUS_UNLOCK)
+ {
+ Log.v(TAG, "FOCUS UNLOCKED");
+ hideFocusFrame();
+ }
+ preFocusFrameRect = null;
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void onReceiveProgress(int currentBytes, int totalBytes, byte[] rx_body)
+ {
+ Log.v(TAG, " " + currentBytes + "/" + totalBytes);
+ }
+
+ @Override
+ public boolean isReceiveMulti()
+ {
+ return (false);
}
}
interfaceProvider.getInformationReceiver().updateMessage(context.getString(R.string.canon_connect_connecting5), false, false, 0);
interfaceProvider.getInformationReceiver().updateMessage(context.getString(R.string.connect_connect_finished), false, false, 0);
connectFinished();
- Log.v(TAG, "CHANGED PLAYBACK MODE : DONE.");
+ Log.v(TAG, " CONNECT TO CAMERA : DONE.");
break;
default:
- Log.v(TAG, "RECEIVED UNKNOWN ID : " + id);
+ Log.v(TAG, " RECEIVED UNKNOWN ID : " + id);
onConnectError(context.getString(R.string.connect_receive_unknown_message));
break;
}
Log.v(TAG, "onReceiveBroadcastOfConnection() : CONNECTIVITY_ACTION");
WifiManager wifiManager = (WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
- if (wifiManager != null) {
+ if (wifiManager != null)
+ {
WifiInfo info = wifiManager.getConnectionInfo();
- if (wifiManager.isWifiEnabled() && info != null) {
- if (info.getNetworkId() != -1) {
+ if (wifiManager.isWifiEnabled() && info != null)
+ {
+ if (info.getNetworkId() != -1)
+ {
Log.v(TAG, "Network ID is -1, there is no currently connected network.");
}
// 自動接続が指示されていた場合は、カメラとの接続処理を行う
if (!commandIssued)
{
commandIssued = true;
- commandIssuer.enqueueCommand(new PtpIpCommandGeneric(imageReceiver, SEQ_GET_VIEWFRAME, 20, false, 0, 0x9203, 0, 0x00, 0x00, 0x00, 0x00));
+ commandIssuer.enqueueCommand(new PtpIpCommandGeneric(imageReceiver, SEQ_GET_VIEWFRAME, 40, true, 0, 0x9203, 0, 0x00, 0x00, 0x00, 0x00));
}
try
{
//Log.v(TAG, " ---+++--- RECEIVED LV IMAGE ---+++--- ");
try
{
- if ((dataReceiver != null)&&(data != null))
+ if ((dataReceiver != null)&&(data != null)&&(data.length > 0))
{
//Log.v(TAG, " ---+++--- RECEIVED LV IMAGE ---+++--- : " + data.length + " bytes.");
+ //SimpleLogDumper.dump_bytes(" [LVLV] " + ": ", Arrays.copyOfRange(data, 0, (0 + 512)));
//dataReceiver.setImageData(data, metadata);
+ int offset = 384;
if (data.length > 8)
{
- dataReceiver.setImageData(Arrays.copyOfRange(data, 8, data.length), metadata); // ヘッダ部分を切り取って送る
+ dataReceiver.setImageData(Arrays.copyOfRange(data, offset, data.length), metadata); // ヘッダ部分を切り取って送る
}
}
}
try
{
// end of receive sequence.
- //byte [] thumbnail = byteStream.toByteArray();
- //Log.v(TAG, " TransferComplete() RECEIVED : " + id + " size : " + target_image_size + " (" + thumbnail.length + ")");
- //SimpleLogDumper.dump_bytes(" [xxxxx]", Arrays.copyOfRange(thumbnail, 0, (64)));
- //SimpleLogDumper.dump_bytes(" [zzzzz]", Arrays.copyOfRange(thumbnail, (thumbnail.length - 64), (thumbnail.length)));
+ byte [] thumbnail = byteStream.toByteArray();
+ //byte [] thumbnail = rx_body;
+ Log.v(TAG, " TransferComplete() RECEIVED id[" + id + "] size : " + target_image_size + " (" + thumbnail.length + ")");
+ //SimpleLogDumper.dump_bytes(" [xxxxx]", Arrays.copyOfRange(thumbnail, 0, (512)));
+ //SimpleLogDumper.dump_bytes(" [zzzzz]", Arrays.copyOfRange(thumbnail, (thumbnail.length - 128), (thumbnail.length)));
callback.onCompleted(byteStream.toByteArray(), null);
+ //callback.onCompleted(rx_body, null);
receivedFirstData = false;
received_remain_bytes = 0;
received_total_bytes = 0;
target_image_size = 0;
-
byteStream.reset();
}
catch (Exception e)
{
return;
}
+ int first_offset = 416;
int length = rx_body.length;
int data_position = 0;
if (!receivedFirstData)
receivedFirstData = true;
data_position = (int) rx_body[0] & (0xff);
Log.v(TAG, " FIRST DATA POS. : " + data_position);
- SimpleLogDumper.dump_bytes(" [sssss]", Arrays.copyOfRange(rx_body, 0, (512)));
+ //SimpleLogDumper.dump_bytes(" [sssXXXsss]", Arrays.copyOfRange(rx_body, first_offset, (first_offset + 64)));
}
else if (received_remain_bytes > 0)
{
}
}
- while (data_position <= (length - 12)) {
- int body_size = (rx_body[data_position] & 0xff) + ((rx_body[data_position + 1] & 0xff) << 8) +
- ((rx_body[data_position + 2] & 0xff) << 16) + ((rx_body[data_position + 3] & 0xff) << 24);
- if (body_size <= 12) {
+ while (data_position <= (length - 12))
+ {
+ int body_size = (rx_body[data_position] & 0xff) + ((rx_body[data_position + 1] & 0xff) << 8) + ((rx_body[data_position + 2] & 0xff) << 16) + ((rx_body[data_position + 3] & 0xff) << 24);
+ if (body_size <= 12)
+ {
Log.v(TAG, " --- BODY SIZE IS SMALL : " + data_position + " (" + body_size + ") [" + received_remain_bytes + "] " + rx_body.length + " (" + target_image_size + ")");
//int startpos = (data_position > 48) ? (data_position - 48) : 0;
//SimpleLogDumper.dump_bytes(" [xxx]", Arrays.copyOfRange(rx_body, startpos, (data_position + 48)));
break;
}
+ int forward_length = data_position;
+
// 受信データ(のヘッダ部分)をダンプする
- //Log.v(TAG, " RX DATA : " + data_position + " (" + body_size + ") [" + received_remain_bytes + "] (" + received_total_bytes + ")");
- //SimpleLogDumper.dump_bytes(" [zzz] " + data_position + ": ", Arrays.copyOfRange(rx_body, data_position, (data_position + 48)));
+ Log.v(TAG, " RX DATA : " + data_position + " (" + body_size + ") [" + received_remain_bytes + "] (" + received_total_bytes + ")");
+ try
+ {
+ while (forward_length < rx_body.length)
+ {
+
+ if ((rx_body[forward_length] == (byte) 0xff)&&(rx_body[forward_length + 1] == (byte) 0xd8))
+ {
+ break;
+ }
+ forward_length++;
+ }
+ if (forward_length >= rx_body.length)
+ {
+ forward_length = data_position + 12;
+ }
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
- if ((data_position + body_size) > length) {
+ if ((data_position + body_size) > length)
+ {
// データがすべてバッファ内になかったときは、バッファすべてコピーして残ったサイズを記憶しておく。
- int copysize = (length - ((data_position + 12)));
- byteStream.write(rx_body, (data_position + 12), copysize);
- received_remain_bytes = body_size - copysize - 12; // マイナス12は、ヘッダ分
+ int copysize = (length - ((data_position + (12))));
+ byteStream.write(rx_body, (data_position + (12)), copysize);
+ received_remain_bytes = body_size - copysize - (12); // マイナス12は、ヘッダ分
received_total_bytes = received_total_bytes + copysize;
- //Log.v(TAG, " ----- copy : " + (data_position + 12) + " " + copysize + " remain : " + received_remain_bytes + " body size : " + body_size);
+ Log.v(TAG, " ----- copy : " + (data_position + (12)) + " " + copysize + " remain : " + received_remain_bytes + " body size : " + body_size);
break;
}
- try {
- byteStream.write(rx_body, (data_position + 12), (body_size - 12));
+ try
+ {
+ byteStream.write(rx_body, (data_position + (12)), (body_size - (12)));
data_position = data_position + body_size;
- received_total_bytes = received_total_bytes + 12;
- //Log.v(TAG, " --- COPY : " + (data_position + 12) + " " + (body_size - 12) + " remain : " + received_remain_bytes);
-
- } catch (Exception e) {
+ received_total_bytes = received_total_bytes + (12);
+ Log.v(TAG, " --- COPY : " + (data_position + 12) + " " + (body_size - (12)) + " remain : " + received_remain_bytes);
+ }
+ catch (Exception e)
+ {
Log.v(TAG, " pos : " + data_position + " size : " + body_size + " length : " + length);
e.printStackTrace();
}
@Override
public boolean isReceiveMulti()
{
- return (false);
+ return (true);
}
-
}