NyARCode ar_code = new NyARCode(16, 16);\r
ar_code.loadFromARFile(CARCODE_FILE);\r
ar_param = new J3dNyARParam();\r
- ar_param.loadFromARFile(PARAM_FILE);\r
- ar_param.changeSize(320, 240);\r
+ ar_param.loadARParamFromFile(PARAM_FILE);\r
+ ar_param.changeScreenSize(320, 240);\r
\r
//localeの作成とlocateとviewの設定\r
universe = new VirtualUniverse();\r
import java.awt.*;
import jp.nyatla.nyartoolkit.core.*;
+import jp.nyatla.nyartoolkit.core.param.NyARParam;
import jp.nyatla.nyartoolkit.core.transmat.*;
import jp.nyatla.nyartoolkit.detector.NyARSingleDetectMarker;
//NyARToolkitの準備
NyARParam ar_param=new NyARParam();
NyARCode ar_code =new NyARCode(16,16);
- ar_param.loadFromARFile(PARAM_FILE);
- ar_param.changeSize(320,240);
+ ar_param.loadARParamFromFile(PARAM_FILE);
+ ar_param.changeScreenSize(320,240);
nya=new NyARSingleDetectMarker(ar_param,ar_code,80.0);
ar_code.loadFromARFile(CARCODE_FILE);
//キャプチャイメージ用のラスタを準備
//NyARToolkitの準備\r
_ar_param = new GLNyARParam();\r
NyARCode ar_code = new NyARCode(16, 16);\r
- _ar_param.loadFromARFile(PARAM_FILE);\r
- _ar_param.changeSize(SCREEN_X, SCREEN_Y);\r
+ _ar_param.loadARParamFromFile(PARAM_FILE);\r
+ _ar_param.changeScreenSize(SCREEN_X, SCREEN_Y);\r
_nya = new GLNyARSingleDetectMarker(_ar_param, ar_code, 80.0);\r
_nya.setContinueMode(false);//ここをtrueにすると、transMatContinueモード(History計算)になります。\r
ar_code.loadFromARFile(CARCODE_FILE);\r
//NyARToolkit用の支援クラス\r
- _glnya = new NyARGLUtil(_gl, _ar_param);\r
+ _glnya = new NyARGLUtil(_gl);\r
//GL対応のRGBラスタオブジェクト\r
_cap_image = new GLNyARRaster_RGB(_ar_param);\r
//キャプチャ開始\r
//閾値計算(めんどくさいから一時的に自動調整にしとく。)\r
togs.doFilter(_cap_image, gs);\r
th.analyzeRaster(gs);\r
- threshold=th.getThreshold();\r
+ threshold=110;//th.getThreshold();\r
}\r
} catch (Exception e) {\r
e.printStackTrace();\r
_capture.setCaptureListener(this);\r
//NyARToolkitの準備\r
_ar_param = new GLNyARParam();\r
- _ar_param.loadFromARFile(PARAM_FILE);\r
- _ar_param.changeSize(SCREEN_X, SCREEN_Y);\r
+ _ar_param.loadARParamFromFile(PARAM_FILE);\r
+ _ar_param.changeScreenSize(SCREEN_X, SCREEN_Y);\r
\r
//ARコードを2個ロード\r
double[] width = new double[] { 80.0, 80.0 };\r
_nya = new GLNyARDetectMarker(_ar_param, ar_codes, width, 2);\r
_nya.setContinueMode(false);//ここをtrueにすると、transMatContinueモード(History計算)になります。\r
//NyARToolkit用の支援クラス\r
- _glnya = new NyARGLUtil(_gl, _ar_param);\r
+ _glnya = new NyARGLUtil(_gl);\r
//GL対応のRGBラスタオブジェクト\r
_cap_image = new GLNyARRaster_RGB(_ar_param);\r
//キャプチャ開始\r
import jp.nyatla.nyartoolkit.qt.utils.*;
import java.awt.*;
import jp.nyatla.nyartoolkit.core.*;
+import jp.nyatla.nyartoolkit.core.param.NyARParam;
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;
import jp.nyatla.nyartoolkit.detector.NyARSingleDetectMarker;
//NyARToolkitの準備
NyARParam ar_param=new NyARParam();
NyARCode ar_code =new NyARCode(16,16);
- ar_param.loadFromARFile(PARAM_FILE);
- ar_param.changeSize(320,240);
+ ar_param.loadARParamFromFile(PARAM_FILE);
+ ar_param.changeScreenSize(320,240);
nya=new NyARSingleDetectMarker(ar_param,ar_code,80.0);
ar_code.loadFromARFile(CARCODE_FILE);
//キャプチャイメージ用のラスタを準備
package jp.nyatla.nyartoolkit.java3d.utils;\r
\r
import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
+\r
import javax.media.j3d.Transform3D;\r
\r
public class J3dNyARParam extends NyARParam\r
{\r
- private double view_distance_min=0.01;//1cm~10.0m\r
- private double view_distance_max=10.0;\r
- private Transform3D m_projection=null;\r
- /**\r
- * 視体積の近い方をメートルで指定\r
- * @param i_new_value\r
- */\r
- public void setViewDistanceMin(double i_new_value)\r
- {\r
- m_projection=null;//キャッシュ済変数初期化\r
- view_distance_min=i_new_value;\r
- }\r
- /**\r
- * 視体積の遠い方をメートルで指定\r
- * @param i_new_value\r
- */\r
- public void setViewDistanceMax(double i_new_value)\r
- {\r
- m_projection=null;//キャッシュ済変数初期化\r
- view_distance_max=i_new_value;\r
- }\r
- /**\r
- * void arglCameraFrustumRH(const ARParam *cparam, const double focalmin, const double focalmax, GLdouble m_projection[16])\r
- * 関数の置き換え\r
- * @param focalmin\r
- * @param focalmax\r
- * @return\r
- */\r
- public Transform3D getCameraTransform()\r
- {\r
- //既に値がキャッシュされていたらそれを使う\r
- if(m_projection!=null){\r
- return m_projection;\r
+ private double view_distance_min = 0.01;//1cm~10.0m\r
+\r
+ private double view_distance_max = 10.0;\r
+\r
+ private Transform3D m_projection = null;\r
+\r
+ /**\r
+ * 視体積の近い方をメートルで指定\r
+ * @param i_new_value\r
+ */\r
+ public void setViewDistanceMin(double i_new_value)\r
+ {\r
+ m_projection = null;//キャッシュ済変数初期化\r
+ view_distance_min = i_new_value;\r
+ }\r
+\r
+ /**\r
+ * 視体積の遠い方をメートルで指定\r
+ * @param i_new_value\r
+ */\r
+ public void setViewDistanceMax(double i_new_value)\r
+ {\r
+ m_projection = null;//キャッシュ済変数初期化\r
+ view_distance_max = i_new_value;\r
+ }\r
+\r
+ /**\r
+ * void arglCameraFrustumRH(const ARParam *cparam, const double focalmin, const double focalmax, GLdouble m_projection[16])\r
+ * 関数の置き換え\r
+ * @param focalmin\r
+ * @param focalmax\r
+ * @return\r
+ */\r
+ public Transform3D getCameraTransform()\r
+ {\r
+ //既に値がキャッシュされていたらそれを使う\r
+ if (m_projection != null) {\r
+ return m_projection;\r
+ }\r
+ //無ければ計算\r
+\r
+ NyARMat trans_mat = new NyARMat(3, 4);\r
+ NyARMat icpara_mat = new NyARMat(3, 4);\r
+ double[][] p = new double[3][3], q = new double[4][4];\r
+ double width, height;\r
+ int i, j;\r
+\r
+ width = this._screen_size.w;\r
+ height = this._screen_size.h;\r
+\r
+ this.getPerspectiveProjectionMatrix().decompMat(icpara_mat, trans_mat);\r
+\r
+ double[][] icpara = icpara_mat.getArray();\r
+ double[][] trans = trans_mat.getArray();\r
+ for (i = 0; i < 4; i++) {\r
+ icpara[1][i] = (height - 1) * (icpara[2][i]) - icpara[1][i];\r
+ }\r
+\r
+ for (i = 0; i < 3; i++) {\r
+ for (j = 0; j < 3; j++) {\r
+ p[i][j] = icpara[i][j] / icpara[2][2];\r
+ }\r
+ }\r
+ //p[0][0],p[1][1]=n\r
+ //p[0][2],p[1][2]=t+b\r
+\r
+ //Projectionの計算\r
+ q[0][0] = (2.0 * p[0][0] / (width - 1));\r
+ q[0][1] = (2.0 * p[0][1] / (width - 1));\r
+ q[0][2] = -((2.0 * p[0][2] / (width - 1)) - 1.0);\r
+ q[0][3] = 0.0;\r
+\r
+ q[1][0] = 0.0;\r
+ q[1][1] = -(2.0 * p[1][1] / (height - 1));\r
+ q[1][2] = -((2.0 * p[1][2] / (height - 1)) - 1.0);\r
+ q[1][3] = 0.0;\r
+\r
+ q[2][0] = 0.0;\r
+ q[2][1] = 0.0;\r
+ q[2][2] = (view_distance_max + view_distance_min) / (view_distance_min - view_distance_max);\r
+ q[2][3] = 2.0 * view_distance_max * view_distance_min / (view_distance_min - view_distance_max);\r
+\r
+ q[3][0] = 0.0;\r
+ q[3][1] = 0.0;\r
+ q[3][2] = -1.0;\r
+ q[3][3] = 0.0;\r
+\r
+ q[2][2] = q[2][2] * -1;\r
+ q[2][3] = q[2][3] * -1;\r
+\r
+ double[] tmp_projection = new double[16];\r
+ for (i = 0; i < 4; i++) { // Row.\r
+ // First 3 columns of the current row.\r
+ for (j = 0; j < 3; j++) { // Column.\r
+ tmp_projection[i + j * 4] = (q[i][0] * trans[0][j] + q[i][1] * trans[1][j] + q[i][2] * trans[2][j]);\r
+ }\r
+ // Fourth column of the current row.\r
+ tmp_projection[i + 3 * 4] = q[i][0] * trans[0][3] + q[i][1] * trans[1][3] + q[i][2] * trans[2][3] + q[i][3];\r
+ }\r
+ m_projection = new Transform3D(tmp_projection);\r
+ m_projection.transpose();\r
+ return m_projection;\r
}\r
- //無ければ計算\r
-\r
- NyARMat trans_mat=new NyARMat(3,4);\r
- NyARMat icpara_mat=new NyARMat(3,4);\r
- double[][] p=new double[3][3], q=new double[4][4];\r
- double width, height;\r
- int i, j;\r
- \r
- width = xsize;\r
- height = ysize;\r
- \r
- decompMat(icpara_mat,trans_mat);\r
-\r
- double[][] icpara=icpara_mat.getArray();\r
- double[][] trans=trans_mat.getArray();\r
- for (i = 0; i < 4; i++) {\r
- icpara[1][i] = (height - 1)*(icpara[2][i]) - icpara[1][i];\r
- }\r
- \r
- for(i = 0; i < 3; i++) {\r
- for(j = 0; j < 3; j++) {\r
- p[i][j] = icpara[i][j] / icpara[2][2];\r
- }\r
- }\r
- //p[0][0],p[1][1]=n\r
- //p[0][2],p[1][2]=t+b\r
-\r
- //Projectionの計算\r
- q[0][0] = (2.0 * p[0][0] / (width - 1));\r
- q[0][1] = (2.0 * p[0][1] / (width - 1));\r
- q[0][2] = -((2.0 * p[0][2] / (width - 1)) - 1.0);\r
- q[0][3] = 0.0;\r
- \r
- q[1][0] = 0.0;\r
- q[1][1] = -(2.0 * p[1][1] / (height - 1));\r
- q[1][2] = -((2.0 * p[1][2] / (height - 1)) - 1.0);\r
- q[1][3] = 0.0;\r
- \r
- q[2][0] = 0.0;\r
- q[2][1] = 0.0;\r
- q[2][2] = (view_distance_max + view_distance_min)/(view_distance_min - view_distance_max);\r
- q[2][3] = 2.0 * view_distance_max * view_distance_min / (view_distance_min - view_distance_max);\r
- \r
- q[3][0] = 0.0;\r
- q[3][1] = 0.0;\r
- q[3][2] = -1.0;\r
- q[3][3] = 0.0;\r
- \r
- q[2][2]=q[2][2]*-1;\r
- q[2][3]=q[2][3]*-1;\r
- \r
- double[] tmp_projection=new double[16];\r
- for (i = 0; i < 4; i++) { // Row.\r
- // First 3 columns of the current row.\r
- for (j = 0; j < 3; j++) { // Column.\r
- tmp_projection[i + j*4] =(\r
- q[i][0] * trans[0][j] +\r
- q[i][1] * trans[1][j] +\r
- q[i][2] * trans[2][j]);\r
- }\r
- // Fourth column of the current row.\r
- tmp_projection[i + 3*4]=\r
- q[i][0] * trans[0][3] +\r
- q[i][1] * trans[1][3] +\r
- q[i][2] * trans[2][3] +\r
- q[i][3];\r
- }\r
- m_projection=new Transform3D(tmp_projection);\r
- m_projection.transpose();\r
- return m_projection;\r
- }\r
}\r
import javax.media.j3d.ImageComponent2D;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.jmf.utils.*;\r
\r
/**\r
\r
public J3dNyARRaster_RGB(NyARParam i_cparam)\r
{\r
- super(i_cparam.getX(), i_cparam.getY());\r
+ super(i_cparam.getScreenSize());\r
\r
//RGBのラスタを作る。\r
this.bufferd_image = new BufferedImage(this._size.w, this._size.h, BufferedImage.TYPE_3BYTE_BGR);\r
import javax.vecmath.*;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
import jp.nyatla.nyartoolkit.jmf.utils.JmfCameraCapture;\r
import jp.nyatla.nyartoolkit.jmf.utils.JmfCaptureListener;\r
import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;\r
import jp.nyatla.nyartoolkit.detector.*;\r
+import jp.nyatla.nyartoolkit.core.types.*;\r
\r
/**\r
* NyARToolkitと連動したBehaviorを返却するクラスです。\r
*/\r
public class NyARSingleMarkerBehaviorHolder implements JmfCaptureListener\r
{\r
- private NyARParam cparam;\r
- private JmfCameraCapture capture;\r
- private J3dNyARRaster_RGB nya_raster;//最大3スレッドで共有されるので、排他制御かけること。\r
- private NyARSingleDetectMarker nya;\r
- //Behaviorホルダ\r
- private NyARBehavior nya_behavior;\r
- public NyARSingleMarkerBehaviorHolder(NyARParam i_cparam,float i_rate,NyARCode i_ar_code,double i_marker_width) throws NyARException\r
- {\r
- nya_behavior=null;\r
- cparam=i_cparam;\r
- capture=new JmfCameraCapture(cparam.getX(),cparam.getY(),i_rate,JmfCameraCapture.PIXEL_FORMAT_RGB);\r
- capture.setCaptureListener(this);\r
- nya_raster=new J3dNyARRaster_RGB(cparam); \r
- nya=new NyARSingleDetectMarker(cparam,i_ar_code,i_marker_width);\r
- nya_behavior=new NyARBehavior(nya,nya_raster,i_rate);\r
- }\r
- public Behavior getBehavior()\r
- {\r
- return nya_behavior;\r
- }\r
- /**\r
- * i_back_groundにキャプチャ画像を転送するようにBehaviorを設定します。\r
- * i_back_groungはALLOW_IMAGE_WRITE属性を持つものである必要があります。\r
- * @param i_back_groung\r
- * @return\r
- */\r
- public void setBackGround(Background i_back_ground)\r
- {\r
- //コール先で排他制御\r
- nya_behavior.setRelatedBackGround(i_back_ground);\r
- }\r
- /**\r
- * i_trgroupの座標系をマーカーにあわせるようにBehaviorを設定します。\r
- *\r
- */\r
- public void setTransformGroup(TransformGroup i_trgroup)\r
- {\r
- //コール先で排他制御\r
- nya_behavior.setRelatedTransformGroup(i_trgroup);\r
- }\r
- /**\r
- * 座標系再計算後に呼び出されるリスナです。\r
- * @param i_listener\r
- */\r
- public void setUpdateListener(NyARSingleMarkerBehaviorListener i_listener)\r
- {\r
- //コール先で排他制御\r
- nya_behavior.setUpdateListener(i_listener);\r
- }\r
- /**\r
- * ラスタを更新 コールバック関数だから呼んじゃらめえ\r
- */\r
- public void onUpdateBuffer(Buffer i_buffer)\r
- {\r
- try{\r
- synchronized(nya_raster){\r
- nya_raster.setBuffer(i_buffer);\r
- }\r
- }catch(Exception e){\r
- e.printStackTrace();\r
+ private NyARParam cparam;\r
+\r
+ private JmfCameraCapture capture;\r
+\r
+ private J3dNyARRaster_RGB nya_raster;//最大3スレッドで共有されるので、排他制御かけること。\r
+\r
+ private NyARSingleDetectMarker nya;\r
+\r
+ //Behaviorホルダ\r
+ private NyARBehavior nya_behavior;\r
+\r
+ public NyARSingleMarkerBehaviorHolder(NyARParam i_cparam, float i_rate, NyARCode i_ar_code, double i_marker_width) throws NyARException\r
+ {\r
+ nya_behavior = null;\r
+ final NyARIntSize scr_size = i_cparam.getScreenSize();\r
+ cparam = i_cparam;\r
+ capture = new JmfCameraCapture(scr_size.w, scr_size.h, i_rate, JmfCameraCapture.PIXEL_FORMAT_RGB);\r
+ capture.setCaptureListener(this);\r
+ nya_raster = new J3dNyARRaster_RGB(cparam);\r
+ nya = new NyARSingleDetectMarker(cparam, i_ar_code, i_marker_width);\r
+ nya_behavior = new NyARBehavior(nya, nya_raster, i_rate);\r
+ }\r
+\r
+ public Behavior getBehavior()\r
+ {\r
+ return nya_behavior;\r
+ }\r
+\r
+ /**\r
+ * i_back_groundにキャプチャ画像を転送するようにBehaviorを設定します。\r
+ * i_back_groungはALLOW_IMAGE_WRITE属性を持つものである必要があります。\r
+ * @param i_back_groung\r
+ * @return\r
+ */\r
+ public void setBackGround(Background i_back_ground)\r
+ {\r
+ //コール先で排他制御\r
+ nya_behavior.setRelatedBackGround(i_back_ground);\r
+ }\r
+\r
+ /**\r
+ * i_trgroupの座標系をマーカーにあわせるようにBehaviorを設定します。\r
+ *\r
+ */\r
+ public void setTransformGroup(TransformGroup i_trgroup)\r
+ {\r
+ //コール先で排他制御\r
+ nya_behavior.setRelatedTransformGroup(i_trgroup);\r
+ }\r
+\r
+ /**\r
+ * 座標系再計算後に呼び出されるリスナです。\r
+ * @param i_listener\r
+ */\r
+ public void setUpdateListener(NyARSingleMarkerBehaviorListener i_listener)\r
+ {\r
+ //コール先で排他制御\r
+ nya_behavior.setUpdateListener(i_listener);\r
+ }\r
+\r
+ /**\r
+ * ラスタを更新 コールバック関数だから呼んじゃらめえ\r
+ */\r
+ public void onUpdateBuffer(Buffer i_buffer)\r
+ {\r
+ try {\r
+ synchronized (nya_raster) {\r
+ nya_raster.setBuffer(i_buffer);\r
+ }\r
+ } catch (Exception e) {\r
+ e.printStackTrace();\r
+ }\r
+ }\r
+\r
+ public void start() throws NyARException\r
+ {\r
+ //開始\r
+ capture.start();\r
}\r
- }\r
- public void start() throws NyARException\r
- {\r
- //開始\r
- capture.start();\r
- }\r
- public void stop()\r
- {\r
- capture.stop();\r
- }\r
-}\r
\r
+ public void stop()\r
+ {\r
+ capture.stop();\r
+ }\r
+}\r
\r
class NyARBehavior extends Behavior\r
{\r
- private NyARTransMatResult trans_mat_result=new NyARTransMatResult(); \r
- private NyARSingleDetectMarker related_nya;\r
- private TransformGroup trgroup;\r
- private Background back_ground;\r
- private J3dNyARRaster_RGB raster;\r
- private WakeupCondition wakeup;\r
- private NyARSingleMarkerBehaviorListener listener;\r
- public void initialize()\r
- {\r
- wakeupOn(wakeup);\r
- }\r
- /**\r
- * i_related_ic2dの内容で定期的にi_back_groundを更新するBehavior\r
- * @param i_back_ground\r
- * @param i_related_ic2d\r
- */\r
- public NyARBehavior(NyARSingleDetectMarker i_related_nya,J3dNyARRaster_RGB i_related_raster,float i_rate)\r
- {\r
- super();\r
- wakeup=new WakeupOnElapsedTime((int)(1000/i_rate));\r
- related_nya=i_related_nya;\r
- trgroup =null;\r
- raster =i_related_raster;\r
- back_ground=null;\r
- listener=null;\r
- this.setSchedulingBounds(new BoundingSphere(new Point3d(), 100.0));\r
- }\r
- public void setRelatedBackGround(Background i_back_ground)\r
- {\r
- synchronized(raster){\r
- back_ground=i_back_ground;\r
+ private NyARTransMatResult trans_mat_result = new NyARTransMatResult();\r
+\r
+ private NyARSingleDetectMarker related_nya;\r
+\r
+ private TransformGroup trgroup;\r
+\r
+ private Background back_ground;\r
+\r
+ private J3dNyARRaster_RGB raster;\r
+\r
+ private WakeupCondition wakeup;\r
+\r
+ private NyARSingleMarkerBehaviorListener listener;\r
+\r
+ public void initialize()\r
+ {\r
+ wakeupOn(wakeup);\r
}\r
- }\r
- public void setRelatedTransformGroup(TransformGroup i_trgroup)\r
- {\r
- synchronized(raster){\r
- trgroup=i_trgroup;\r
- } \r
- }\r
- public void setUpdateListener(NyARSingleMarkerBehaviorListener i_listener)\r
- {\r
- synchronized(raster){\r
- listener=i_listener;\r
+\r
+ /**\r
+ * i_related_ic2dの内容で定期的にi_back_groundを更新するBehavior\r
+ * @param i_back_ground\r
+ * @param i_related_ic2d\r
+ */\r
+ public NyARBehavior(NyARSingleDetectMarker i_related_nya, J3dNyARRaster_RGB i_related_raster, float i_rate)\r
+ {\r
+ super();\r
+ wakeup = new WakeupOnElapsedTime((int) (1000 / i_rate));\r
+ related_nya = i_related_nya;\r
+ trgroup = null;\r
+ raster = i_related_raster;\r
+ back_ground = null;\r
+ listener = null;\r
+ this.setSchedulingBounds(new BoundingSphere(new Point3d(), 100.0));\r
}\r
- }\r
- /**\r
- * いわゆるイベントハンドラ\r
- */\r
- public void processStimulus(Enumeration criteria)\r
- {\r
- try{\r
- synchronized(raster){\r
- Transform3D t3d=null;\r
- boolean is_marker_exist=false;\r
- if(back_ground!=null){\r
- raster.renewImageComponent2D();/*DirectXモードのときの対策*/\r
- back_ground.setImage(raster.getImageComponent2D());\r
- }\r
- if(raster.hasData()){\r
- is_marker_exist=related_nya.detectMarkerLite(raster, 100);\r
- if(is_marker_exist){\r
- related_nya.getTransmationMatrix(this.trans_mat_result);\r
- double[][] src=this.trans_mat_result.getArray();\r
- Matrix4d matrix=new Matrix4d(\r
- src[0][0],-src[1][0],-src[2][0],0,\r
- -src[0][1], src[1][1], src[2][1],0,\r
- -src[0][2], src[1][2], src[2][2],0,\r
- -src[0][3], src[1][3], src[2][3],1);\r
- matrix.transpose();\r
- t3d=new Transform3D(matrix);\r
- if(trgroup!=null){\r
- trgroup.setTransform(t3d);\r
- }\r
- }\r
- }\r
- if(listener!=null)\r
- {\r
- listener.onUpdate(is_marker_exist, t3d);\r
- }\r
- }\r
- wakeupOn(wakeup); \r
- }catch(Exception e){\r
- e.printStackTrace();\r
+\r
+ public void setRelatedBackGround(Background i_back_ground)\r
+ {\r
+ synchronized (raster) {\r
+ back_ground = i_back_ground;\r
+ }\r
+ }\r
+\r
+ public void setRelatedTransformGroup(TransformGroup i_trgroup)\r
+ {\r
+ synchronized (raster) {\r
+ trgroup = i_trgroup;\r
+ }\r
}\r
- }\r
-}\r
\r
+ public void setUpdateListener(NyARSingleMarkerBehaviorListener i_listener)\r
+ {\r
+ synchronized (raster) {\r
+ listener = i_listener;\r
+ }\r
+ }\r
+\r
+ /**\r
+ * いわゆるイベントハンドラ\r
+ */\r
+ public void processStimulus(Enumeration criteria)\r
+ {\r
+ try {\r
+ synchronized (raster) {\r
+ Transform3D t3d = null;\r
+ boolean is_marker_exist = false;\r
+ if (back_ground != null) {\r
+ raster.renewImageComponent2D();/*DirectXモードのときの対策*/\r
+ back_ground.setImage(raster.getImageComponent2D());\r
+ }\r
+ if (raster.hasData()) {\r
+ is_marker_exist = related_nya.detectMarkerLite(raster, 100);\r
+ if (is_marker_exist) {\r
+ related_nya.getTransmationMatrix(this.trans_mat_result);\r
+ double[][] src = this.trans_mat_result.getArray();\r
+ Matrix4d matrix = new Matrix4d(src[0][0], -src[1][0], -src[2][0], 0, -src[0][1], src[1][1], src[2][1], 0, -src[0][2], src[1][2], src[2][2], 0, -src[0][3], src[1][3], src[2][3], 1);\r
+ matrix.transpose();\r
+ t3d = new Transform3D(matrix);\r
+ if (trgroup != null) {\r
+ trgroup.setTransform(t3d);\r
+ }\r
+ }\r
+ }\r
+ if (listener != null) {\r
+ listener.onUpdate(is_marker_exist, t3d);\r
+ }\r
+ }\r
+ wakeupOn(wakeup);\r
+ } catch (Exception e) {\r
+ e.printStackTrace();\r
+ }\r
+ }\r
+}\r
package jp.nyatla.nyartoolkit.java3d.utils;\r
\r
import javax.media.j3d.*;\r
-\r
+/**\r
+ * \r
+ * NyARToolkitのBehaviorのリスナ\r
+ *\r
+ */\r
public interface NyARSingleMarkerBehaviorListener\r
{\r
/**\r
* RGB形式のJMFバッファをラップするオブジェクトをつくります。 生成直後のオブジェクトはデータを持ちません。\r
* メンバ関数はsetBufferを実行後に使用可能になります。\r
*/\r
- public JmfNyARRaster_RGB(int i_width, int i_height)\r
+ public JmfNyARRaster_RGB(NyARIntSize i_size)\r
+ {\r
+ this._size.w = i_size.w;\r
+ this._size.h = i_size.h;\r
+ this._ref_buf = null;\r
+ this._reader = new Reader(this._size);\r
+ }\r
+ public JmfNyARRaster_RGB(int i_width,int i_height)\r
{\r
this._size.w = i_width;\r
this._size.h = i_height;\r
this._ref_buf = null;\r
this._reader = new Reader(this._size);\r
- }\r
-\r
+ } \r
+ \r
/**\r
* フォーマットを解析して、ラスタタイプを返します。\r
* \r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.NyARCode;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;\r
import jp.nyatla.nyartoolkit.detector.NyARDetectMarker;\r
\r
package jp.nyatla.nyartoolkit.jogl.utils;\r
\r
import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
\r
/**\r
* NyARParamにOpenGL向け関数を追加したもの\r
NyARMat trans_mat = new NyARMat(3, 4);\r
NyARMat icpara_mat = new NyARMat(3, 4);\r
double[][] p = new double[3][3], q = new double[4][4];\r
- int width, height;\r
+\r
int i, j;\r
\r
- width = xsize;\r
- height = ysize;\r
+ final int width = this._screen_size.w;\r
+ final int height = this._screen_size.h;\r
\r
- decompMat(icpara_mat, trans_mat);\r
+ this.getPerspectiveProjectionMatrix().decompMat(icpara_mat, trans_mat);\r
\r
double[][] icpara = icpara_mat.getArray();\r
double[][] trans = trans_mat.getArray();\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.jmf.utils.*;\r
import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.rasterreader.*;\r
\r
\r
\r
public GLNyARRaster_RGB(NyARParam i_param)\r
{\r
- super(i_param.getX(), i_param.getY());\r
+ super(i_param.getScreenSize());\r
this._gl_flag = GL.GL_RGB;\r
this._gl_buf = new byte[this._size.w * this._size.h * 3];\r
}\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.NyARCode;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;\r
import jp.nyatla.nyartoolkit.detector.NyARSingleDetectMarker;\r
\r
import javax.media.opengl.GL;\r
import javax.media.opengl.glu.GLU;\r
\r
-import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.types.*;\r
/**\r
* NyARToolkit用のJOGL支援関数群\r
*/\r
public class NyARGLUtil\r
{\r
- private NyARParam ar_param;\r
+ private javax.media.opengl.GL _gl;\r
\r
- private javax.media.opengl.GL gl;\r
+ private javax.media.opengl.glu.GLU _glu;\r
\r
- private javax.media.opengl.glu.GLU glu;\r
-\r
- public NyARGLUtil(javax.media.opengl.GL i_gl, NyARParam i_camera_param)\r
+ public NyARGLUtil(javax.media.opengl.GL i_gl)\r
{\r
- this.ar_param = i_camera_param;\r
- this.gl = i_gl;\r
- this.glu = new GLU();\r
+ this._gl = i_gl;\r
+ this._glu = new GLU();\r
}\r
\r
/**\r
IntBuffer texEnvModeSave = IntBuffer.allocate(1);\r
boolean lightingSave;\r
boolean depthTestSave;\r
- javax.media.opengl.GL gl_ = this.gl;\r
+ javax.media.opengl.GL gl = this._gl;\r
+ final NyARIntSize rsize=i_raster.getSize();\r
\r
// Prepare an orthographic projection, set camera position for 2D drawing, and save GL state.\r
- gl_.glGetTexEnviv(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, texEnvModeSave); // Save GL texture environment mode.\r
+ gl.glGetTexEnviv(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, texEnvModeSave); // Save GL texture environment mode.\r
if (texEnvModeSave.array()[0] != GL.GL_REPLACE) {\r
- gl_.glTexEnvi(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, GL.GL_REPLACE);\r
+ gl.glTexEnvi(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, GL.GL_REPLACE);\r
}\r
- lightingSave = gl_.glIsEnabled(GL.GL_LIGHTING); // Save enabled state of lighting.\r
+ lightingSave = gl.glIsEnabled(GL.GL_LIGHTING); // Save enabled state of lighting.\r
if (lightingSave == true) {\r
- gl_.glDisable(GL.GL_LIGHTING);\r
+ gl.glDisable(GL.GL_LIGHTING);\r
}\r
- depthTestSave = gl_.glIsEnabled(GL.GL_DEPTH_TEST); // Save enabled state of depth test.\r
+ depthTestSave = gl.glIsEnabled(GL.GL_DEPTH_TEST); // Save enabled state of depth test.\r
if (depthTestSave == true) {\r
- gl_.glDisable(GL.GL_DEPTH_TEST);\r
+ gl.glDisable(GL.GL_DEPTH_TEST);\r
}\r
- gl_.glMatrixMode(GL.GL_PROJECTION);\r
- gl_.glPushMatrix();\r
- gl_.glLoadIdentity();\r
- glu.gluOrtho2D(0.0, ar_param.getX(), 0.0, ar_param.getY());\r
- gl_.glMatrixMode(GL.GL_MODELVIEW);\r
- gl_.glPushMatrix();\r
- gl_.glLoadIdentity();\r
+ gl.glMatrixMode(GL.GL_PROJECTION);\r
+ gl.glPushMatrix();\r
+ gl.glLoadIdentity();\r
+ _glu.gluOrtho2D(0.0,rsize.w, 0.0,rsize.h);\r
+ gl.glMatrixMode(GL.GL_MODELVIEW);\r
+ gl.glPushMatrix();\r
+ gl.glLoadIdentity();\r
arglDispImageStateful(i_raster, i_zoom);\r
\r
// Restore previous projection, camera position, and GL state.\r
- gl_.glMatrixMode(GL.GL_PROJECTION);\r
- gl_.glPopMatrix();\r
- gl_.glMatrixMode(GL.GL_MODELVIEW);\r
- gl_.glPopMatrix();\r
+ gl.glMatrixMode(GL.GL_PROJECTION);\r
+ gl.glPopMatrix();\r
+ gl.glMatrixMode(GL.GL_MODELVIEW);\r
+ gl.glPopMatrix();\r
if (depthTestSave) {\r
- gl_.glEnable(GL.GL_DEPTH_TEST); // Restore enabled state of depth test.\r
+ gl.glEnable(GL.GL_DEPTH_TEST); // Restore enabled state of depth test.\r
}\r
if (lightingSave) {\r
- gl_.glEnable(GL.GL_LIGHTING); // Restore enabled state of lighting.\r
+ gl.glEnable(GL.GL_LIGHTING); // Restore enabled state of lighting.\r
}\r
if (texEnvModeSave.get(0) != GL.GL_REPLACE) {\r
- gl_.glTexEnvi(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, texEnvModeSave.get(0)); // Restore GL texture environment mode.\r
+ gl.glTexEnvi(GL.GL_TEXTURE_ENV, GL.GL_TEXTURE_ENV_MODE, texEnvModeSave.get(0)); // Restore GL texture environment mode.\r
}\r
- gl_.glEnd();\r
+ gl.glEnd();\r
}\r
\r
/**\r
*/\r
private void arglDispImageStateful(GLNyARRaster_RGB i_raster, double zoom)\r
{\r
- javax.media.opengl.GL gl_ = this.gl;\r
- int width = i_raster.getWidth();\r
- int height = i_raster.getHeight();\r
+ javax.media.opengl.GL gl_ = this._gl;\r
+ final NyARIntSize rsize = i_raster.getSize();\r
float zoomf;\r
IntBuffer params = IntBuffer.allocate(4);\r
zoomf = (float) zoom;\r
gl_.glDisable(GL.GL_TEXTURE_2D);\r
gl_.glGetIntegerv(GL.GL_VIEWPORT, params);\r
- gl_.glPixelZoom(zoomf * ((float) (params.get(2)) / (float) width), -zoomf * ((float) (params.get(3)) / (float) height));\r
- gl_.glWindowPos2f(0.0f, (float) height);\r
+ gl_.glPixelZoom(zoomf * ((float) (params.get(2)) / (float) rsize.w), -zoomf * ((float) (params.get(3)) / (float) rsize.h));\r
+ gl_.glWindowPos2f(0.0f, (float) rsize.h);\r
gl_.glPixelStorei(GL.GL_UNPACK_ALIGNMENT, 1);\r
ByteBuffer buf = ByteBuffer.wrap(i_raster.getGLRgbArray());\r
- gl_.glDrawPixels(width, height, i_raster.getGLPixelFlag(), GL.GL_UNSIGNED_BYTE, buf);\r
+ gl_.glDrawPixels(rsize.w,rsize.h, i_raster.getGLPixelFlag(), GL.GL_UNSIGNED_BYTE, buf);\r
}\r
}\r
+++ /dev/null
-/**
- * AbstractVideoCapture 1.02 08/07/15
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-import java.awt.event.ActionEvent;
-import java.awt.event.ActionListener;
-import java.util.ArrayList;
-
-
-/**
- * Webcamによるキャプチャを実行するクラスが拡張すべき抽象クラス。
- *
- * @version 1.01 5 June 2008
- * @author arc
- */
-public abstract class AbstractVideoCapture implements ActionListener {
-
- protected int width; // キャプチャ幅(指定した値から微妙にずれる)
- protected int height; // キャプチャ高さ
- protected float fps = 30; // キャプチャ画像を取得するfps
-
- protected byte[] pixels; // キャプチャ画像の実データを保持するバイト型配列
-
- // リスナ
- public ArrayList<VideoCaptureListener> listeners = new ArrayList<VideoCaptureListener>();
-
- /** リスナを登録する */
- public void addVideoCaptureListener(VideoCaptureListener listener) { listeners.add(listener); }
- /** リスナを削除する */
- public void removeVideoCaptureListener(VideoCaptureListener listener) { listeners.remove(listener); }
-
- /** 指定した幅、高さでインスタンスを初期化するコンストラクタ。 */
- public AbstractVideoCapture(int w, int h) { setSize(w, h); }
-
- /** 指定した幅、高さでのキャプチャを指示する。start()でキャプチャを開始した後は使えない。 */
- public void setSize(int w, int h) { width = w; height = h; }
-
- /**
- * Webcamの設定ダイアログを表示する。
- * 既定のWebcamでは駄目な場合(複数のWebcamが接続されているPCなど)ではこれを実行するとよい。
- */
- public abstract void prepShowDialog() throws Exception;
-
- /**
- * キャプチャに使う入力デバイスを指定する。
- * デバイスの代わりにダミー(デバッグ用)としてFileオブジェクトも渡せる実装にしておくべきである。
- */
- public abstract void prepSetInput(Object device) throws Exception;
- /** キャプチャするフレームレートを指定する。 */
- public abstract boolean prepSetFramerate(float fps);
-
- /** キャプチャを始める */
- public abstract void start() throws Exception;
- /** キャプチャを終わる */
- public abstract void dispose();
-
- /** タイマー処理。キャプチャイメージの更新結果をリスナに伝える。 */
- public void actionPerformed(ActionEvent event) { update(); }
-
- /** リスナにキャプチャ結果を伝える */
- public void update() {
- if (pixels != null)
- for (VideoCaptureListener listener : listeners)
- listener.imageUpdated(pixels);
- }
-
- /** キャプチャしている画像の幅を取得する。 */
- public int getWidth() { return width; }
- /** キャプチャしている画像の高さを取得する。 */
- public int getHeight() { return height; }
-
-}
+++ /dev/null
-/**
- * VideoCaptureDummy 1.00 08/07/15
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-import javax.swing.Timer;
-
-
-/**
- * Webcamによるキャプチャを代替するダミークラス。
- *
- * @version 1.00 12 Sep 2008
- * @author arc
- */
-public class VideoCaptureDummy extends AbstractVideoCapture {
-
- // キャプチャに使うタイマー
- private Timer timer;
-
- /** コンストラクタ。 */
- public VideoCaptureDummy(int w, int h) { super(w, h); }
-
- /**
- * 入力を指定する。
- */
- public void prepSetInput(Object input) { }
-
- /**
- * Webcamの設定ダイアログを表示する。
- */
- public void prepShowDialog() { }
-
- /** キャプチャするフレームレートを指定する。 */
- public boolean prepSetFramerate(float fps_) {
- fps = fps_;
- return true;
- }
-
- /** キャプチャを開始する。 */
- public void start() {
- pixels = new byte[width * height * 3];
- timer = new Timer((int) (1000/fps), this);
- timer.start();
- }
-
- /** キャプチャを終了する。 */
- public void dispose() { timer.stop(); }
-
-}
+++ /dev/null
-/**
- * VideoCaptureJMF 1.00 08/07/15
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-import java.awt.Dimension;
-import java.awt.event.ActionEvent;
-import java.util.List;
-
-import javax.media.Buffer;
-import javax.media.CaptureDeviceInfo;
-import javax.media.CaptureDeviceManager;
-import javax.media.Format;
-import javax.media.Manager;
-import javax.media.MediaLocator;
-import javax.media.Player;
-import javax.media.control.FormatControl;
-import javax.media.control.FrameGrabbingControl;
-import javax.media.format.RGBFormat;
-import javax.media.format.VideoFormat;
-import javax.media.protocol.CaptureDevice;
-import javax.media.protocol.DataSource;
-import javax.swing.Timer;
-
-
-/**
- * JMFでWebcamによるキャプチャを実行するクラス。
- *
- * @version 1.00 15 Sep 2008
- * @author arc
- */
-public class VideoCaptureJMF extends AbstractVideoCapture {
-
- // キャプチャに使うタイマー
- private Timer timer;
-
- // JMF用のあれこれ
- private String locator = null;
- private Player player;
- private FrameGrabbingControl grabber;
-
- /** コンストラクタ。 */
- public VideoCaptureJMF(int w, int h) { super(w, h); }
-
- /**
- * 入力を指定する。
- */
- public void prepSetInput(Object input) {
- locator = input == null ? null : input.toString();
- }
-
- /**
- * Webcamの設定ダイアログを表示する。
- */
- public void prepShowDialog() { }
-
- /** キャプチャするフレームレートを指定する。 */
- public boolean prepSetFramerate(float fps_) {
- fps = fps_;
- return true;
- }
-
- /** 指定されたフォーマットでキャプチャできるデータソースを取得する。 */
- private DataSource createDataSource(Format format) {
- DataSource ds;
- MediaLocator ml;
-
- // デバイス名を指定されていない場合、
- // フォーマットに適したデバイスのメディアロケータを取得する
- if (locator == null) {
- List<CaptureDeviceInfo> devices =
- CaptureDeviceManager.getDeviceList(format);
- if (devices.size() < 1) {
- System.err.println(format+"のフォーマットでキャプチャできるデバイスが見つかりませんでした。");
- return null;
- }
- ml = devices.get(0).getLocator();
-
- // デバイス名を指定されている場合、そのメディアロケータを取得する
- } else ml = new MediaLocator(locator);
-
- // デバイスをデータソースにして、出力フォーマットを合わせる
- try {
- ds = Manager.createDataSource(ml);
- ds.connect();
- if (ds instanceof CaptureDevice)
- setCaptureFormat((CaptureDevice) ds, format);
- } catch (Exception e) {
- System.err.println(e);
- return null;
- }
- return ds;
- }
-
- /** キャプチャデバイスの出力フォーマットを設定する。 */
- private void setCaptureFormat(CaptureDevice cdev, Format format) {
- FormatControl [] fcs = cdev.getFormatControls();
- if (fcs.length < 1) return;
-
- FormatControl fc = fcs[0];
- Format [] formats = fc.getSupportedFormats();
- for (int i = 0; i < formats.length; i++)
- if (formats[i].matches(format)) {
- format = formats[i].intersects(format);
- fc.setFormat(format);
- break;
- }
- }
-
- /** キャプチャを開始する。 */
- public void start() throws Exception {
- player = Manager.createRealizedPlayer(createDataSource(
- new VideoFormat(
- RGBFormat.RGB, // encoding
- new Dimension(width, height), // size
- Format.NOT_SPECIFIED, // maxDataLength
- null, // dataType
- (float) fps))); // frameRate);
- player.start();
- grabber = (FrameGrabbingControl) player.getControl(
- "javax.media.control.FrameGrabbingControl");
- pixels = new byte[width * height * 3];
-
- // キャプチャイメージを定期的に更新するタイマー
- timer = new Timer((int) (1000/fps), this);
- timer.start();
- }
-
- /** タイマー処理。キャプチャイメージの更新結果をリスナに伝える。 */
- public void actionPerformed(ActionEvent event) {
- Buffer buffer = grabber.grabFrame();
- byte[] pixels_reversed = (byte[]) buffer.getData();
- if (pixels_reversed == null)
- return;
-
- // ピクセル値の再配列
- for (int x = 0; x < width; x ++)
- for (int y = 0; y < height; y ++) {
- pixels[(x + y * width) * 3 + 0] = pixels_reversed[(x + (height - y - 1) * width) * 3 + 2];
- pixels[(x + y * width) * 3 + 1] = pixels_reversed[(x + (height - y - 1) * width) * 3 + 1];
- pixels[(x + y * width) * 3 + 2] = pixels_reversed[(x + (height - y - 1) * width) * 3 + 0];
- }
-
- // 各リスナに更新されたバイト列を渡す
- update();
- }
-
- /** キャプチャを終了する。 */
- public void dispose() { timer.stop(); }
-
-}
+++ /dev/null
-/**
- * VideoCaptureListener 1.00 08/05/30
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-
-/**
- * キャプチャ画像を取得したいクラスが実装すべきインターフェース。
- *
- * @version 1.01 5 June 2008
- * @author arc
- */
-public interface VideoCaptureListener {
-
- public void imageUpdated(byte[] pixels);
-
-}
+++ /dev/null
-/**
- * VideoCaptureQT 1.00 08/05/30
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-import java.awt.event.ActionEvent;
-import java.io.File;
-
-import javax.swing.Timer;
-
-import quicktime.QTSession;
-import quicktime.QTException;
-import quicktime.io.QTFile;
-import quicktime.qd.PixMap;
-import quicktime.qd.QDConstants;
-import quicktime.qd.QDGraphics;
-import quicktime.qd.QDRect;
-import quicktime.std.movies.Movie;
-import quicktime.std.movies.media.DataRef;
-import quicktime.std.StdQTConstants;
-import quicktime.std.sg.SequenceGrabber;
-import quicktime.std.sg.SGVideoChannel;
-import quicktime.util.RawEncodedImage;
-
-
-/**
- * QuickTime for JavaでWebcamによるキャプチャを実行するクラス。
- *
- * @version 1.00 30 May 2008
- * @author arc
- */
-public class VideoCaptureQT extends AbstractVideoCapture {
-
- // キャプチャに使うタイマー
- private Timer timer;
-
- // QTJava用のあれこれ
- private QDGraphics graphics;
- private QDRect bounds;
- private SequenceGrabber grabber;
- private SGVideoChannel channel;
- private RawEncodedImage rawEncodedImage;
- private Movie movie;
-
- // ピクセルフォーマット変換用の一時変数
- private int[] pixels_int;
-
- /** コンストラクタ。 */
- public VideoCaptureQT(int w, int h) { super(w, h); }
-
- /**
- * 入力を指定する。
- * 複数のWebcamが接続されていて、使いたいデバイスの名前が分かっている場合はこれを実行するとよい。
- * ただし、使いたいデバイスと同じ名前を他のWebcamが持っていた場合、どれが使われるか分からない。
- * また、名前でなくファイルが指定されていたら、Webcamでなくダミーとしてファイルを入力に使う。
- */
- public void prepSetInput(Object input) throws QTException {
- QTSession.open();
- bounds = new QDRect(width, height);
- graphics = new QDGraphics(
- quicktime.util.EndianOrder.isNativeLittleEndian() ?
- QDConstants.k32BGRAPixelFormat : QDGraphics.kDefaultPixelFormat,
- bounds);
- if (input != null && input.getClass().equals(File.class)) {
- movie = quicktime.std.movies.Movie.fromDataRef(
- new DataRef(new QTFile((File) input)),
- StdQTConstants.newMovieActive
- );
- } else {
- grabber = new SequenceGrabber();
- grabber.setGWorld(graphics, null);
- channel = new SGVideoChannel(grabber);
- channel.setBounds(bounds);
-
- // seqGrabPreview == 2, Processingでmagic numberとしてハードコートされていた…
- channel.setUsage(StdQTConstants.seqGrabPreview);
-
- if (input != null) {
- try {
- channel.setDevice(input.toString());
- } catch (QTException e) {
- e.printStackTrace();
- throw e;
- }
- }
- }
- }
-
- /**
- * Webcamの設定ダイアログを表示する。
- * 既定のWebcamでは駄目な場合(複数のWebcamが接続されているPCなど)ではこれを実行するとよい。
- */
- public void prepShowDialog() throws QTException { channel.settingsDialog(); }
-
- /** キャプチャするフレームレートを指定する。 */
- public boolean prepSetFramerate(float fps_) {
- try {
- fps = fps_;
- channel.setFrameRate(fps);
- } catch (Exception e) {
- e.printStackTrace();
- return false;
- }
- return true;
- }
-
- /** キャプチャを開始する。 */
- public void start() throws QTException {
- try {
- if (movie == null) {
- grabber.prepare(true, false); // あってもなくてもよさそう
- grabber.startPreview();
- } else {
- movie.preroll(0, 1.0f);
- while (movie.maxLoadedTimeInMovie() == 0)
- movie.task(100);
- movie.setRate(1);
- movie.getPict(movie.getTime()).draw(graphics, bounds);
- }
- PixMap pixmap = graphics.getPixMap();
- rawEncodedImage = pixmap.getPixelData();
-
- width = rawEncodedImage.getRowBytes() / 4;
- pixels = new byte[width * height * 3];
- pixels_int = new int[width * height];
- } catch (QTException e) {
- QTSession.close();
- throw e;
- }
-
- // キャプチャイメージを定期的に更新するタイマー
- timer = new Timer((int) (1000/fps), this);
- timer.start();
- }
-
- /** タイマー処理。キャプチャイメージの更新結果をリスナに伝える。 */
- public void actionPerformed(ActionEvent event) {
-
- // 画像をQTJavaのRawEncodedImageとして取得
- try {
- if (movie == null) {
- grabber.idle();
- } else {
- if (movie.isDone()) movie.goToBeginning();
- movie.getPict(movie.getTime()).draw(graphics, bounds);
- }
- } catch (QTException e) {
- QTSession.close();
- e.printStackTrace();
- }
-
- // RawEncodedImageをint列に落とし込む
- rawEncodedImage.copyToArray(0, pixels_int, 0, pixels_int.length);
-
- // バイト列を生成する
- int idx_byte = 0;
- for (int idx = 0; idx < width*height; idx ++) {
- pixels[idx_byte ++] = (byte) (pixels_int[idx] >> 16);
- pixels[idx_byte ++] = (byte) (pixels_int[idx] >> 8 & 0xff);
- pixels[idx_byte ++] = (byte) (pixels_int[idx] & 0xff);
- }
-
- // 各リスナに更新されたバイト列を渡す
- update();
- }
-
- /** キャプチャを終了する。 */
- public void dispose() {
- try {
- if (movie == null) {
- grabber.stop();
- grabber.release();
- grabber.disposeChannel(channel);
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- QTSession.close();
- }
- timer.stop();
- }
-
-}
+++ /dev/null
-/**
- * VideoCaptureQT 1.00 08/05/30
- *
- * Copyright (c) 2008 arc
- * http://digitalmuseum.jp/
- * All rights reserved.
- */
-package jp.digitalmuseum.capture;
-
-import java.awt.event.ActionEvent;
-import java.io.File;
-
-import javax.swing.Timer;
-
-import quicktime.QTSession;
-import quicktime.QTException;
-import quicktime.io.QTFile;
-import quicktime.qd.PixMap;
-import quicktime.qd.QDConstants;
-import quicktime.qd.QDGraphics;
-import quicktime.qd.QDRect;
-import quicktime.std.movies.Movie;
-import quicktime.std.movies.media.DataRef;
-import quicktime.std.StdQTConstants;
-import quicktime.std.sg.SequenceGrabber;
-import quicktime.std.sg.SGVideoChannel;
-import quicktime.util.RawEncodedImage;
-
-
-/**
- * QuickTime for JavaでWebcamによるキャプチャを実行するクラス。
- *
- * MacのQTJavaだとk24RGBPixelFormatの処理系統が壊れているようで、
- * おかしなデータを吐いてしまうのでWindows専用。
- *
- * @version 1.00 30 May 2008
- * @author arc
- */
-public class VideoCaptureQTW extends AbstractVideoCapture {
-
- // キャプチャに使うタイマー
- private Timer timer;
-
- // QTJava用のあれこれ
- private QDGraphics graphics;
- private QDRect bounds;
- private SequenceGrabber grabber;
- private SGVideoChannel channel;
- private RawEncodedImage rawEncodedImage;
- private Movie movie;
-
- /** コンストラクタ。 */
- public VideoCaptureQTW(int w, int h) { super(w, h); }
-
- /**
- * 入力を指定する。
- * 複数のWebcamが接続されていて、使いたいデバイスの名前が分かっている場合はこれを実行するとよい。
- * ただし、使いたいデバイスと同じ名前を他のWebcamが持っていた場合、どれが使われるか分からない。
- * また、名前でなくファイルが指定されていたら、Webcamでなくダミーとしてファイルを入力に使う。
- */
- public void prepSetInput(Object input) throws QTException {
- QTSession.open();
- bounds = new QDRect(width, height);
- graphics = new QDGraphics(QDConstants.k24RGBPixelFormat, bounds);
- if (input != null && input.getClass().equals(File.class)) {
- movie = quicktime.std.movies.Movie.fromDataRef(
- new DataRef(new QTFile((File) input)),
- StdQTConstants.newMovieActive
- );
- } else {
- grabber = new SequenceGrabber();
- grabber.setGWorld(graphics, null);
- channel = new SGVideoChannel(grabber);
- channel.setBounds(bounds);
-
- // seqGrabPreview == 2, Processingでmagic numberとしてハードコートされていた…
- channel.setUsage(StdQTConstants.seqGrabPreview);
-
- if (input != null) {
- try {
- channel.setDevice(input.toString());
- } catch (QTException e) {
- e.printStackTrace();
- throw e;
- }
- }
- }
- }
-
- /**
- * Webcamの設定ダイアログを表示する。
- * 既定のWebcamでは駄目な場合(複数のWebcamが接続されているPCなど)ではこれを実行するとよい。
- */
- public void prepShowDialog() throws QTException { channel.settingsDialog(); }
-
- /** キャプチャするフレームレートを指定する。 */
- public boolean prepSetFramerate(float fps_) {
- try {
- fps = fps_;
- channel.setFrameRate(fps);
- } catch (Exception e) {
- e.printStackTrace();
- return false;
- }
- return true;
- }
-
- /** キャプチャを開始する。 */
- public void start() throws QTException {
- try {
- if (movie == null) {
- grabber.prepare(true, false); // あってもなくてもよさそう
- grabber.startPreview();
- } else {
- movie.preroll(0, 1.0f);
- while (movie.maxLoadedTimeInMovie() == 0)
- movie.task(100);
- movie.setRate(1);
- movie.getPict(movie.getTime()).draw(graphics, bounds);
- }
- PixMap pixmap = graphics.getPixMap();
- rawEncodedImage = pixmap.getPixelData();
-
- width = rawEncodedImage.getRowBytes() / 3;
- pixels = new byte[width * height * 3];
- } catch (QTException e) {
- QTSession.close();
- throw e;
- }
-
- // キャプチャイメージを定期的に更新するタイマー
- timer = new Timer((int) (1000/fps), this);
- timer.start();
- }
-
- /** タイマー処理。キャプチャイメージの更新結果をリスナに伝える。 */
- public void actionPerformed(ActionEvent event) {
-
- // 画像をQTJavaのRawEncodedImageとして取得
- try {
- if (movie == null) {
- grabber.idle();
- } else {
- if (movie.isDone()) movie.goToBeginning();
- movie.getPict(movie.getTime()).draw(graphics, bounds);
- }
- } catch (QTException e) {
- QTSession.close();
- e.printStackTrace();
- }
-
- // RawEncodedImageをバイト列に落とし込む
- rawEncodedImage.copyToArray(0, pixels, 0, pixels.length);
-
- // 各リスナに更新されたバイト列を渡す
- update();
- }
-
- /** キャプチャを終了する。 */
- public void dispose() {
- try {
- if (movie == null) {
- grabber.stop();
- grabber.release();
- grabber.disposeChannel(channel);
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- QTSession.close();
- }
- timer.stop();
- }
-
-}
+++ /dev/null
-package jp.digitalmuseum.capture.sample;\r
-\r
-public class Const {\r
- public static final int CAP_WIDTH = 800;\r
- public static final int CAP_HEIGHT = 600;\r
-}\r
+++ /dev/null
-package jp.digitalmuseum.capture.sample;\r
-\r
-\r
-public class Main {\r
- public static void main(String[] argv) {\r
- MyFrame frame = new MyFrame();\r
- }\r
-}\r
+++ /dev/null
-package jp.digitalmuseum.capture.sample;\r
-\r
-import java.awt.Canvas;\r
-import java.awt.Graphics;\r
-import java.awt.image.BufferedImage;\r
-import java.awt.image.DataBuffer;\r
-import java.awt.image.WritableRaster;\r
-\r
-import javax.swing.JFrame;\r
-\r
-import jp.digitalmuseum.capture.AbstractVideoCapture;\r
-import jp.digitalmuseum.capture.VideoCaptureDummy;\r
-import jp.digitalmuseum.capture.VideoCaptureListener;\r
-import jp.digitalmuseum.capture.VideoCaptureQT;\r
-\r
-\r
-class MyFrame extends JFrame {\r
- private AbstractVideoCapture capture;\r
- private WritableRaster raster;\r
- private BufferedImage image;\r
-\r
- public MyFrame() {\r
- // QuickTimeでキャプチャ\r
- capture = new VideoCaptureQT(Const.CAP_WIDTH, Const.CAP_HEIGHT);\r
- try {\r
- capture.prepSetInput(null);\r
- capture.start();\r
-\r
- // 失敗したらダミーでキャプチャ\r
- } catch (Exception e) {\r
- e.printStackTrace();\r
- capture = new VideoCaptureDummy(Const.CAP_WIDTH, Const.CAP_HEIGHT);\r
- try {\r
- capture.start();\r
-\r
- // ダミーすら初期化できなかったら終了\r
- } catch (Exception e1) {\r
- e1.printStackTrace();\r
- dispose();\r
- }\r
- }\r
-\r
- // キャンバスの初期化\r
- MyCanvas canvas = new MyCanvas();\r
- canvas.setSize(capture.getWidth(), capture.getHeight());\r
- capture.addVideoCaptureListener(canvas);\r
- add(canvas);\r
- pack();\r
-\r
- // メインウィンドウの初期化\r
- setDefaultCloseOperation(DISPOSE_ON_CLOSE);\r
- setTitle("sample");\r
- setResizable(false);\r
- setVisible(true);\r
- }\r
-\r
- /**\r
- * 終了処理\r
- */\r
- public void dispose() {\r
- capture.dispose();\r
- System.exit(0);\r
- }\r
-\r
- class MyCanvas extends Canvas implements VideoCaptureListener {\r
- /**\r
- * キャプチャ画像の描画\r
- */\r
- public void paint(Graphics g) {\r
- if (image != null)\r
- g.drawImage(image, 0, 0, null);\r
- }\r
- public void repaint(Graphics g) { paint(g); }\r
- public void update(Graphics g) { paint(g); }\r
-\r
- /**\r
- * キャプチャ画像のオブジェクト化\r
- */\r
- public void imageUpdated(byte[] pixels) {\r
- // 実データを画像オブジェクトに変換する準備\r
- if (raster == null) {\r
- raster = WritableRaster.createInterleavedRaster(DataBuffer.TYPE_BYTE,\r
- capture.getWidth(), capture.getHeight(),\r
- capture.getWidth()*3, 3,\r
- new int[] { 0, 1, 2 }, null); \r
- image = new BufferedImage(capture.getWidth(), capture.getHeight(),\r
- BufferedImage.TYPE_3BYTE_BGR);\r
- }\r
-\r
- // 実データを画像オブジェクトに変換\r
- raster.setDataElements(0, 0, capture.getWidth(), capture.getHeight(), pixels);\r
- image.setData(raster);\r
- repaint();\r
- }\r
-\r
- }\r
-\r
-}\r
ap_i = ap[i];// wap = ap + i * rowa;\r
\r
work = ap_i[0];\r
- for (j = 0; j < dimen_1; j++) {// for(j = 1, wbp = wcp,\r
- // work = *wap;j < dimen ;\r
- // j++, wap++, wbp++)\r
- ap_i[j] = ap_i[j + 1] - work * ap_n[j];// wap = *(wap +\r
- // 1) - work *\r
- // (*wbp);\r
+ for (j = 0; j < dimen_1; j++) {// for(j = 1, wbp = wcp,work = *wap;j < dimen ;j++, wap++, wbp++)\r
+ ap_i[j] = ap_i[j + 1] - work * ap_n[j];// wap = *(wap +1) - work *(*wbp);\r
}\r
ap_i[j] = -work * ap_n[j];// *wap = -work * (*wbp);\r
}\r
}\r
\r
/* int arMatrixPCA2( ARMat *input, ARMat *evec, ARVec *ev ); */\r
- public static void arMatrixPCA2(NyARMat input, NyARMat evec, NyARVec ev)\r
- throws NyARException\r
+ public static void arMatrixPCA2(NyARMat input, NyARMat evec, NyARVec ev) throws NyARException\r
{\r
NyARException.trap("未チェックのパス");\r
NyARMat work;\r
+++ /dev/null
-/* \r
- * PROJECT: NyARToolkit\r
- * --------------------------------------------------------------------------------\r
- * This work is based on the original ARToolKit developed by\r
- * Hirokazu Kato\r
- * Mark Billinghurst\r
- * HITLab, University of Washington, Seattle\r
- * http://www.hitl.washington.edu/artoolkit/\r
- *\r
- * The NyARToolkit is Java version ARToolkit class library.\r
- * Copyright (C)2008 R.Iizuka\r
- *\r
- * This program is free software; you can redistribute it and/or\r
- * modify it under the terms of the GNU General Public License\r
- * as published by the Free Software Foundation; either version 2\r
- * of the License, or (at your option) any later version.\r
- * \r
- * This program is distributed in the hope that it will be useful,\r
- * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
- * GNU General Public License for more details.\r
- * \r
- * You should have received a copy of the GNU General Public License\r
- * along with this framework; if not, write to the Free Software\r
- * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\r
- * \r
- * For further information please contact.\r
- * http://nyatla.jp/nyatoolkit/\r
- * <airmail(at)ebony.plala.or.jp>\r
- * \r
- */\r
-package jp.nyatla.nyartoolkit.core;\r
-\r
-import java.io.*;\r
-import java.nio.*;\r
-\r
-import jp.nyatla.nyartoolkit.core.types.*;\r
-import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.utils.DoubleValue;\r
-\r
-/*\r
- * typedef struct { int xsize, ysize; double mat[3][4]; double dist_factor[4]; }\r
- * ARParam;\r
- */\r
-public class NyARParam\r
-{\r
- private static final int SIZE_OF_PARAM_SET = 4 + 4 + (3 * 4 * 8) + (4 * 8);\r
-\r
- private static final int PD_LOOP = 3;\r
-\r
- protected int xsize, ysize;\r
-\r
- private double[] array34 = new double[3 * 4];// Double2dArray mat=new\r
- // Double2dArray(3,4);\r
-\r
- private double[] dist_factor = new double[4];\r
-\r
- public int getX()\r
- {\r
- return xsize;\r
- }\r
-\r
- public int getY()\r
- {\r
- return ysize;\r
- }\r
-\r
- public double[] getDistFactor()\r
- {\r
- return dist_factor;\r
- }\r
-\r
- /**\r
- * パラメタを格納した[4x3]配列を返します。\r
- * \r
- * @return\r
- */\r
- public final double[] get34Array()\r
- {\r
- return array34;\r
- }\r
-\r
- /**\r
- * ARToolKit標準ファイルから1個目の設定をロードする。\r
- * \r
- * @param i_filename\r
- * @throws NyARException\r
- */\r
- public void loadFromARFile(String i_filename) throws NyARException\r
- {\r
- try {\r
- loadFromARFile(new FileInputStream(i_filename));\r
- } catch (Exception e) {\r
- throw new NyARException(e);\r
- }\r
- }\r
-\r
- public void loadFromARFile(InputStream i_stream) throws NyARException\r
- {\r
- try {\r
- NyARParam new_inst[] = arParamLoad(i_stream, 1);\r
- i_stream.close();\r
- xsize = new_inst[0].xsize;\r
- ysize = new_inst[0].ysize;\r
- array34 = new_inst[0].array34;\r
- dist_factor = new_inst[0].dist_factor;\r
- } catch (Exception e) {\r
- throw new NyARException(e);\r
- }\r
- }\r
-\r
- /*\r
- * static double dot( double a1, double a2, double a3,double b1, double b2,\r
- * double b3 )\r
- */\r
- private final static double dot(double a1, double a2, double a3, double b1,\r
- double b2, double b3)\r
- {\r
- return (a1 * b1 + a2 * b2 + a3 * b3);\r
- }\r
-\r
- /* static double norm( double a, double b, double c ) */\r
- private final static double norm(double a, double b, double c)\r
- {\r
- return Math.sqrt(a * a + b * b + c * c);\r
- }\r
-\r
- /**\r
- * int arParamDecompMat( double source[3][4], double cpara[3][4], double\r
- * trans[3][4] ); 関数の置き換え Optimize STEP[754->665]\r
- * \r
- * @param o_cpara\r
- * 戻り引数。3x4のマトリクスを指定すること。\r
- * @param o_trans\r
- * 戻り引数。3x4のマトリクスを指定すること。\r
- * @return\r
- */\r
- public void decompMat(NyARMat o_cpara, NyARMat o_trans)\r
- {\r
- double[] source = array34;\r
- double[] Cpara = new double[3 * 4];// double Cpara[3][4];\r
- double rem1, rem2, rem3;\r
- int i;\r
- if (source[2 * 4 + 3] >= 0) {// if( source[2][3] >= 0 ) {\r
- // <Optimize>\r
- // for(int r = 0; r < 3; r++ ){\r
- // for(int c = 0; c < 4; c++ ){\r
- // Cpara[r][c]=source[r][c];//Cpara[r][c] = source[r][c];\r
- // }\r
- // }\r
- for (i = 0; i < 12; i++) {\r
- Cpara[i] = source[i];// Cpara[r][c] = source[r][c];\r
- }\r
- // </Optimize>\r
- } else {\r
- // <Optimize>\r
- // for(int r = 0; r < 3; r++ ){\r
- // for(int c = 0; c < 4; c++ ){\r
- // Cpara[r][c]=-source[r][c];//Cpara[r][c] = -(source[r][c]);\r
- // }\r
- // }\r
- for (i = 0; i < 12; i++) {\r
- Cpara[i] = source[i];// Cpara[r][c] = source[r][c];\r
- }\r
- // </Optimize>\r
- }\r
-\r
- double[][] cpara = o_cpara.getArray();\r
- double[][] trans = o_trans.getArray();\r
- for (int r = 0; r < 3; r++) {\r
- for (int c = 0; c < 4; c++) {\r
- cpara[r][c] = 0.0;// cpara[r][c] = 0.0;\r
- }\r
- }\r
- cpara[2][2] = norm(Cpara[2 * 4 + 0], Cpara[2 * 4 + 1], Cpara[2 * 4 + 2]);// cpara[2][2] =norm( Cpara[2][0],Cpara[2][1],Cpara[2][2]);\r
- trans[2][0] = Cpara[2 * 4 + 0] / cpara[2][2];// trans[2][0] = Cpara[2][0] /cpara[2][2];\r
- trans[2][1] = Cpara[2 * 4 + 1] / cpara[2][2];// trans[2][1] = Cpara[2][1] / cpara[2][2];\r
- trans[2][2] = Cpara[2 * 4 + 2] / cpara[2][2];// trans[2][2] =Cpara[2][2] /cpara[2][2];\r
- trans[2][3] = Cpara[2 * 4 + 3] / cpara[2][2];// trans[2][3] =Cpara[2][3] /cpara[2][2];\r
-\r
- cpara[1][2] = dot(trans[2][0], trans[2][1], trans[2][2], Cpara[1 * 4 + 0], Cpara[1 * 4 + 1], Cpara[1 * 4 + 2]);// cpara[1][2]=dot(trans[2][0],trans[2][1],trans[2][2],Cpara[1][0],Cpara[1][1],Cpara[1][2]);\r
- rem1 = Cpara[1 * 4 + 0] - cpara[1][2] * trans[2][0];// rem1 =Cpara[1][0] -cpara[1][2] *trans[2][0];\r
- rem2 = Cpara[1 * 4 + 1] - cpara[1][2] * trans[2][1];// rem2 =Cpara[1][1] -cpara[1][2] *trans[2][1];\r
- rem3 = Cpara[1 * 4 + 2] - cpara[1][2] * trans[2][2];// rem3 =Cpara[1][2] -cpara[1][2] *trans[2][2];\r
- cpara[1][1] = norm(rem1, rem2, rem3);// cpara[1][1] = norm( rem1,// rem2, rem3 );\r
- trans[1][0] = rem1 / cpara[1][1];// trans[1][0] = rem1 / cpara[1][1];\r
- trans[1][1] = rem2 / cpara[1][1];// trans[1][1] = rem2 / cpara[1][1];\r
- trans[1][2] = rem3 / cpara[1][1];// trans[1][2] = rem3 / cpara[1][1];\r
-\r
- cpara[0][2] = dot(trans[2][0], trans[2][1], trans[2][2], Cpara[0 * 4 + 0], Cpara[0 * 4 + 1], Cpara[0 * 4 + 2]);// cpara[0][2] =dot(trans[2][0], trans[2][1],trans[2][2],Cpara[0][0],Cpara[0][1],Cpara[0][2]);\r
- cpara[0][1] = dot(trans[1][0], trans[1][1], trans[1][2], Cpara[0 * 4 + 0], Cpara[0 * 4 + 1], Cpara[0 * 4 + 2]);// cpara[0][1]=dot(trans[1][0],trans[1][1],trans[1][2],Cpara[0][0],Cpara[0][1],Cpara[0][2]);\r
- rem1 = Cpara[0 * 4 + 0] - cpara[0][1] * trans[1][0] - cpara[0][2]* trans[2][0];// rem1 = Cpara[0][0] - cpara[0][1]*trans[1][0]- cpara[0][2]*trans[2][0];\r
- rem2 = Cpara[0 * 4 + 1] - cpara[0][1] * trans[1][1] - cpara[0][2]* trans[2][1];// rem2 = Cpara[0][1] - cpara[0][1]*trans[1][1]- cpara[0][2]*trans[2][1];\r
- rem3 = Cpara[0 * 4 + 2] - cpara[0][1] * trans[1][2] - cpara[0][2]* trans[2][2];// rem3 = Cpara[0][2] - cpara[0][1]*trans[1][2] - cpara[0][2]*trans[2][2];\r
- cpara[0][0] = norm(rem1, rem2, rem3);// cpara[0][0] = norm( rem1,rem2, rem3 );\r
- trans[0][0] = rem1 / cpara[0][0];// trans[0][0] = rem1 / cpara[0][0];\r
- trans[0][1] = rem2 / cpara[0][0];// trans[0][1] = rem2 / cpara[0][0];\r
- trans[0][2] = rem3 / cpara[0][0];// trans[0][2] = rem3 / cpara[0][0];\r
-\r
- trans[1][3] = (Cpara[1 * 4 + 3] - cpara[1][2] * trans[2][3])/ cpara[1][1];// trans[1][3] = (Cpara[1][3] -cpara[1][2]*trans[2][3]) / cpara[1][1];\r
- trans[0][3] = (Cpara[0 * 4 + 3] - cpara[0][1] * trans[1][3] - cpara[0][2]* trans[2][3])/ cpara[0][0];// trans[0][3] = (Cpara[0][3] -cpara[0][1]*trans[1][3]-cpara[0][2]*trans[2][3]) / cpara[0][0];\r
-\r
- for (int r = 0; r < 3; r++) {\r
- for (int c = 0; c < 3; c++) {\r
- cpara[r][c] /= cpara[2][2];// cpara[r][c] /= cpara[2][2];\r
- }\r
- }\r
- }\r
-\r
- /* int arParamDisp( ARParam *param ); */\r
- public int paramDisp()\r
- {\r
- System.out.println("--------------------------------------");// printf("--------------------------------------\n");\r
- System.out.print("SIZE = " + xsize + ", " + ysize);// printf("SIZE =%d, %d\n",param->xsize,param->ysize);\r
- System.out.println("Distortion factor = " + dist_factor[0] + " "\r
- + dist_factor[1] + " " + dist_factor[2] + " " + dist_factor[3]);// printf("Distortionfactor= %f%f%f%f\n",param->dist_factor[0],param->dist_factor[1],param->dist_factor[2],param->dist_factor[3]);\r
- for (int j = 0; j < 3; j++) {// for(j = 0; j < 3; j++ ) {\r
- for (int i = 0; i < 4; i++) {\r
- System.out.print(array34[j * 4 + i] + " ");// printf("%7.5f ",param->mat[j][i]);\r
- }\r
- System.out.println();// printf("\n");\r
- }// }\r
- System.out.println("--------------------------------------");// printf("--------------------------------------\n");\r
- return 0;\r
- }\r
-\r
- // /*int arParamDecomp( ARParam *source, ARParam *icpara, double trans[3][4] );*/\r
- // private static int arParamDecomp( NyARParam source, NyARParam icpara,double[][] trans)\r
- // {\r
- // icpara.xsize = source.xsize;//icpara->xsize = source->xsize;\r
- // icpara.ysize = source.ysize;//icpara->ysize = source->ysize;\r
- // icpara.dist_factor[0] = source.dist_factor[0];//icpara->dist_factor[0] =source->dist_factor[0];\r
- // icpara.dist_factor[1] = source.dist_factor[1];// icpara->dist_factor[1] =source->dist_factor[1];\r
- // icpara.dist_factor[2] = source.dist_factor[2];//icpara->dist_factor[2] =source->dist_factor[2];\r
- // icpara.dist_factor[3] = source.dist_factor[3];//icpara->dist_factor[3] =source->dist_factor[3];\r
- // return arParamDecompMat(source.mat, icpara.mat, trans );\r
- // }\r
- /**\r
- * int arParamChangeSize( ARParam *source, int xsize, int ysize, ARParam\r
- * *newparam ); 関数の代替関数 サイズプロパティをi_xsize,i_ysizeに変更します。\r
- * \r
- * @param xsize\r
- * @param ysize\r
- * @param newparam\r
- * @return\r
- * \r
- */\r
- public void changeSize(int i_xsize, int i_ysize)\r
- {\r
- double scale;\r
- scale = (double) i_xsize / (double) (xsize);// scale = (double)xsize / (double)(source->xsize);\r
-\r
- for (int i = 0; i < 4; i++) {\r
- array34[0 * 4 + i] = array34[0 * 4 + i] * scale;// newparam->mat[0][i]=source->mat[0][i]* scale;\r
- array34[1 * 4 + i] = array34[1 * 4 + i] * scale;// newparam->mat[1][i]=source->mat[1][i]* scale;\r
- array34[2 * 4 + i] = array34[2 * 4 + i];// newparam->mat[2][i] = source->mat[2][i];\r
- }\r
-\r
- dist_factor[0] = dist_factor[0] * scale;// newparam->dist_factor[0] =source->dist_factor[0] *scale;\r
- dist_factor[1] = dist_factor[1] * scale;// newparam->dist_factor[1] =source->dist_factor[1] *scale;\r
- dist_factor[2] = dist_factor[2] / (scale * scale);// newparam->dist_factor[2]=source->dist_factor[2]/ (scale*scale);\r
- dist_factor[3] = dist_factor[3];// newparam->dist_factor[3] =source->dist_factor[3];\r
-\r
- xsize = i_xsize;// newparam->xsize = xsize;\r
- ysize = i_ysize;// newparam->ysize = ysize;\r
- }\r
-\r
- /**\r
- * int arParamIdeal2Observ( const double dist_factor[4], const double ix,\r
- * const double iy,double *ox, double *oy ) 関数の代替関数\r
- * \r
- * @param ix\r
- * @param iy\r
- * @param ox\r
- * @param oy\r
- */\r
- public void ideal2Observ(final NyARDoublePoint2d i_in, NyARDoublePoint2d o_out)\r
- {\r
- final double df[] = this.dist_factor;\r
- final double d0 = df[0];\r
- final double d1 = df[1];\r
- final double d3 = df[3];\r
- final double x = (i_in.x - d0) * d3;\r
- final double y = (i_in.y - d1) * d3;\r
- if (x == 0.0 && y == 0.0) {\r
- o_out.x = d0;\r
- o_out.y = d1;\r
- } else {\r
- final double d = 1.0 - df[2] / 100000000.0 * (x * x + y * y);\r
- o_out.x = x * d + d0;\r
- o_out.y = y * d + d1;\r
- }\r
- }\r
-\r
- /**\r
- * ideal2Observをまとめて実行します。\r
- * \r
- * @param i_in\r
- * double[][2]\r
- * @param o_out\r
- * double[][2]\r
- */\r
- public void ideal2ObservBatch(final NyARDoublePoint2d[] i_in, NyARDoublePoint2d[] o_out, int i_size)\r
- {\r
-\r
- double x, y;\r
- final double df[] = this.dist_factor;\r
- final double d0 = df[0];\r
- final double d1 = df[1];\r
- final double d3 = df[3];\r
- final double d2_w = df[2] / 100000000.0;\r
- for (int i = 0; i < i_size; i++) {\r
- x = (i_in[i].x - d0) * d3;\r
- y = (i_in[i].y - d1) * d3;\r
- if (x == 0.0 && y == 0.0) {\r
- o_out[i].x = d0;\r
- o_out[i].y = d1;\r
- } else {\r
- final double d = 1.0 - d2_w * (x * x + y * y);\r
- o_out[i].x = x * d + d0;\r
- o_out[i].y = y * d + d1;\r
- }\r
- }\r
- return;\r
- }\r
-\r
- /**\r
- * int arParamObserv2Ideal( const double dist_factor[4], const double ox,\r
- * const double oy,double *ix, double *iy );\r
- * \r
- * @param ox\r
- * @param oy\r
- * @param ix\r
- * @param iy\r
- * @return\r
- */\r
- public int observ2Ideal(double ox, double oy, DoubleValue ix, DoubleValue iy)\r
- {\r
- double z02, z0, p, q, z, px, py, opttmp_1;\r
- final double d0, d1, d3;\r
- final double df[] = this.dist_factor;\r
- d0 = df[0];\r
- d1 = df[1];\r
-\r
- px = ox - d0;\r
- py = oy - d1;\r
- p = df[2] / 100000000.0;\r
- z02 = px * px + py * py;\r
- q = z0 = Math.sqrt(z02);// Optimize//q = z0 = Math.sqrt(px*px+ py*py);\r
-\r
- for (int i = 1;; i++) {\r
- if (z0 != 0.0) {\r
- // Optimize opttmp_1\r
- opttmp_1 = p * z02;\r
- z = z0 - ((1.0 - opttmp_1) * z0 - q) / (1.0 - 3.0 * opttmp_1);\r
- px = px * z / z0;\r
- py = py * z / z0;\r
- } else {\r
- px = 0.0;\r
- py = 0.0;\r
- break;\r
- }\r
- if (i == PD_LOOP) {\r
- break;\r
- }\r
- z02 = px * px + py * py;\r
- z0 = Math.sqrt(z02);// Optimize//z0 = Math.sqrt(px*px+ py*py);\r
- }\r
- d3 = df[3];\r
- ix.value = px / d3 + d0;\r
- iy.value = py / d3 + d1;\r
- return 0;\r
- }\r
-\r
- /**\r
- * 指定範囲のobserv2Idealをまとめて実行して、結果をo_idealに格納します。\r
- * \r
- * @param i_x_coord\r
- * @param i_y_coord\r
- * @param i_start\r
- * coord開始点\r
- * @param i_num\r
- * 計算数\r
- * @param o_ideal\r
- * 出力バッファ[i_num][2]であること。\r
- */\r
- public void observ2IdealBatch(int[] i_x_coord, int[] i_y_coord,\r
- int i_start, int i_num, double[][] o_ideal)\r
- {\r
- double z02, z0, q, z, px, py, opttmp_1;\r
- final double df[] = this.dist_factor;\r
- final double d0 = df[0];\r
- final double d1 = df[1];\r
- final double d3 = df[3];\r
- final double p = df[2] / 100000000.0;\r
- for (int j = 0; j < i_num; j++) {\r
-\r
- px = i_x_coord[i_start + j] - d0;\r
- py = i_y_coord[i_start + j] - d1;\r
-\r
- z02 = px * px + py * py;\r
- q = z0 = Math.sqrt(z02);// Optimize//q = z0 = Math.sqrt(px*px+py*py);\r
-\r
- for (int i = 1;; i++) {\r
- if (z0 != 0.0) {\r
- // Optimize opttmp_1\r
- opttmp_1 = p * z02;\r
- z = z0 - ((1.0 - opttmp_1) * z0 - q)/ (1.0 - 3.0 * opttmp_1);\r
- px = px * z / z0;\r
- py = py * z / z0;\r
- } else {\r
- px = 0.0;\r
- py = 0.0;\r
- break;\r
- }\r
- if (i == PD_LOOP) {\r
- break;\r
- }\r
- z02 = px * px + py * py;\r
- z0 = Math.sqrt(z02);// Optimize//z0 = Math.sqrt(px*px+ py*py);\r
- }\r
- o_ideal[j][0] = px / d3 + d0;\r
- o_ideal[j][1] = py / d3 + d1;\r
- }\r
- }\r
-\r
- /**\r
- * int arParamLoad( const char *filename, int num, ARParam *param, ...);\r
- * i_streamの入力ストリームからi_num個の設定を読み込み、パラメタを配列にして返します。\r
- * \r
- * @param filename\r
- * @param num\r
- * @param param\r
- * @return 設定を格納した配列を返します。\r
- * @throws Exception\r
- * i_num個の設定が読み出せない場合、JartkExceptionを発生します。\r
- */\r
- private static NyARParam[] arParamLoad(InputStream i_stream, int i_num)throws NyARException\r
- {\r
- try {\r
- int read_size = SIZE_OF_PARAM_SET * i_num;\r
- byte[] buf = new byte[read_size];\r
- i_stream.read(buf);\r
- // 返却配列を確保\r
- NyARParam[] result = new NyARParam[i_num];\r
-\r
- // バッファを加工\r
- ByteBuffer bb = ByteBuffer.wrap(buf);\r
- bb.order(ByteOrder.BIG_ENDIAN);\r
-\r
- // 固定回数パースして配列に格納\r
- for (int i = 0; i < i_num; i++) {\r
- NyARParam new_param = new NyARParam();\r
- ;\r
- new_param.xsize = bb.getInt();\r
- new_param.ysize = bb.getInt();\r
- for (int i2 = 0; i2 < 3; i2++) {\r
- for (int i3 = 0; i3 < 4; i3++) {\r
- new_param.array34[i2 * 4 + i3] = bb.getDouble();\r
- }\r
- }\r
- for (int i2 = 0; i2 < 4; i2++) {\r
- new_param.dist_factor[i2] = bb.getDouble();\r
- }\r
- result[i] = new_param;\r
- }\r
- return result;\r
- } catch (Exception e) {\r
- throw new NyARException(e);\r
- }\r
- }\r
-\r
- public static int arParamSave(String filename, int num, NyARParam param[])\r
- throws Exception\r
- {\r
- NyARException.trap("未チェックの関数");\r
- byte buf[] = new byte[SIZE_OF_PARAM_SET * param.length];\r
- // バッファをラップ\r
- ByteBuffer bb = ByteBuffer.wrap(buf);\r
- bb.order(ByteOrder.BIG_ENDIAN);\r
-\r
- // 書き込み\r
- for (int i = 0; i < param.length; i++) {\r
- bb.putInt(param[i].xsize);\r
- bb.putInt(param[i].ysize);\r
- for (int i2 = 0; i2 < 3; i2++) {\r
- for (int i3 = 0; i3 < 4; i3++) {\r
- bb.putDouble(param[i].array34[i2 * 4 + i3]);\r
- }\r
- }\r
- for (int i2 = 0; i2 < 4; i2++) {\r
- bb.putDouble(param[i].dist_factor[i2]);\r
- }\r
- }\r
- // ファイルに保存\r
- FileOutputStream fs = new FileOutputStream(filename);\r
- fs.write(buf);\r
- fs.close();\r
-\r
- return 0;\r
- }\r
-}\r
import jp.nyatla.nyartoolkit.core.labeling.*;\r
import jp.nyatla.nyartoolkit.core.raster.*;\r
import jp.nyatla.nyartoolkit.core.types.*;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
\r
\r
\r
-/**\r
- * ラベル同士の重なり(内包関係)を調べるクラスです。 ラベルリストに内包するラベルを蓄積し、それにターゲットのラベルが内包されているか を確認します。\r
- */\r
-class OverlapChecker\r
-{\r
- private NyARLabelingLabel[] _labels = new NyARLabelingLabel[32];\r
-\r
- private int _length;\r
-\r
- /**\r
- * 最大i_max_label個のラベルを蓄積できるようにオブジェクトをリセットする\r
- * \r
- * @param i_max_label\r
- */\r
- public void reset(int i_max_label)\r
- {\r
- if (i_max_label > this._labels.length) {\r
- this._labels = new NyARLabelingLabel[i_max_label];\r
- }\r
- this._length = 0;\r
- }\r
-\r
- /**\r
- * チェック対象のラベルを追加する。\r
- * \r
- * @param i_label_ref\r
- */\r
- public void push(NyARLabelingLabel i_label_ref)\r
- {\r
- this._labels[this._length] = i_label_ref;\r
- this._length++;\r
- }\r
\r
- /**\r
- * 現在リストにあるラベルと重なっているかを返す。\r
- * \r
- * @param i_label\r
- * @return 何れかのラベルの内側にあるならばfalse,独立したラベルである可能性が高ければtrueです.\r
- */\r
- public boolean check(NyARLabelingLabel i_label)\r
- {\r
- // 重なり処理かな?\r
- final NyARLabelingLabel[] label_pt = this._labels;\r
- final int px1 = (int) i_label.pos_x;\r
- final int py1 = (int) i_label.pos_y;\r
- for (int i = this._length - 1; i >= 0; i--) {\r
- final int px2 = (int) label_pt[i].pos_x;\r
- final int py2 = (int) label_pt[i].pos_y;\r
- final int d = (px1 - px2) * (px1 - px2) + (py1 - py2) * (py1 - py2);\r
- if (d < label_pt[i].area / 4) {\r
- // 対象外\r
- return false;\r
- }\r
- }\r
- // 対象\r
- return true;\r
- }\r
-}\r
\r
/**\r
* イメージから正方形候補を検出するクラス。\r
private static final int AR_AREA_MAX = 100000;// #define AR_AREA_MAX 100000\r
\r
private static final int AR_AREA_MIN = 70;// #define AR_AREA_MIN 70\r
-\r
private final int _width;\r
-\r
private final int _height;\r
\r
- private final NyARParam _cparam;\r
-\r
private final NyARLabeling_ARToolKit _labeling;\r
\r
private final NyARLabelingImage _limage;\r
\r
private final OverlapChecker _overlap_checker = new OverlapChecker();\r
+ private final NyARCameraDistortionFactor _dist_factor_ref;\r
\r
/**\r
* 最大i_squre_max個のマーカーを検出するクラスを作成する。\r
* \r
* @param i_param\r
*/\r
- public NyARSquareDetector(NyARParam i_param) throws NyARException\r
+ public NyARSquareDetector(NyARCameraDistortionFactor i_dist_factor_ref,NyARIntSize i_size) throws NyARException\r
{\r
- this._width = i_param.getX();\r
- this._height = i_param.getY();\r
- this._cparam = i_param;\r
+ this._width = i_size.w;\r
+ this._height = i_size.h;\r
+ this._dist_factor_ref = i_dist_factor_ref;\r
this._labeling = new NyARLabeling_ARToolKit();\r
this._limage = new NyARLabelingImage(this._width, this._height);\r
this._labeling.attachDestination(this._limage);\r
o_vertex[2] = v1;\r
o_vertex[3] = wv2.vertex[0];\r
} else if (wv1.number_of_vertex > 1 && wv2.number_of_vertex == 0) {// }else if( wvnum1 > 1 && wvnum2== 0) {\r
- v2 = v1 / 2;\r
- if (!wv1.getVertex(i_x_coord, i_y_coord, 0, v2, thresh)) {\r
+ //頂点位置を、起点から対角点の間の1/2にあると予想して、検索する。\r
+ v2 = (v1-i_vertex1_index)/2+i_vertex1_index;\r
+ if (!wv1.getVertex(i_x_coord, i_y_coord, i_vertex1_index, v2, thresh)) {\r
return false;\r
}\r
if (!wv2.getVertex(i_x_coord, i_y_coord, v2, v1, thresh)) {\r
return false;\r
}\r
} else if (wv1.number_of_vertex == 0 && wv2.number_of_vertex > 1) {\r
- v2 = (v1 + end_of_coord) / 2;\r
+ //v2 = (v1-i_vertex1_index+ end_of_coord-i_vertex1_index) / 2+i_vertex1_index;\r
+ v2 = (v1+ end_of_coord)/2;\r
\r
if (!wv1.getVertex(i_x_coord, i_y_coord, v1, v2, thresh)) {\r
return false;\r
final NyARVec ev = this.__getSquareLine_ev; // matrixPCAの戻り値を受け取る\r
final NyARVec mean = this.__getSquareLine_mean;// matrixPCAの戻り値を受け取る\r
final double[] mean_array = mean.getArray();\r
- final NyARParam cparam = this._cparam;\r
-\r
+ final NyARCameraDistortionFactor dist_factor=this._dist_factor_ref; \r
final NyARMat input = this.__getSquareLine_input;// 次処理で初期化される。\r
final NyARMat evec = this.__getSquareLine_evec;// アウトパラメータを受け取るから初期化不要//new NyARMat(2,2);\r
final double[][] evec_array = evec.getArray();\r
// pcaの準備\r
input.realloc(n, 2);\r
// バッチ取得\r
- cparam.observ2IdealBatch(i_xcoord, i_ycoord, st, n, input.getArray());\r
+ dist_factor.observ2IdealBatch(i_xcoord, i_ycoord, st, n, input.getArray());\r
\r
// 主成分分析\r
input.matrixPCA(evec, ev, mean);\r
return true;\r
}\r
}\r
+\r
+/**\r
+ * ラベル同士の重なり(内包関係)を調べるクラスです。 ラベルリストに内包するラベルを蓄積し、それにターゲットのラベルが内包されているか を確認します。\r
+ */\r
+class OverlapChecker\r
+{\r
+ private NyARLabelingLabel[] _labels = new NyARLabelingLabel[32];\r
+\r
+ private int _length;\r
+\r
+ /**\r
+ * 最大i_max_label個のラベルを蓄積できるようにオブジェクトをリセットする\r
+ * \r
+ * @param i_max_label\r
+ */\r
+ public void reset(int i_max_label)\r
+ {\r
+ if (i_max_label > this._labels.length) {\r
+ this._labels = new NyARLabelingLabel[i_max_label];\r
+ }\r
+ this._length = 0;\r
+ }\r
+\r
+ /**\r
+ * チェック対象のラベルを追加する。\r
+ * \r
+ * @param i_label_ref\r
+ */\r
+ public void push(NyARLabelingLabel i_label_ref)\r
+ {\r
+ this._labels[this._length] = i_label_ref;\r
+ this._length++;\r
+ }\r
+\r
+ /**\r
+ * 現在リストにあるラベルと重なっているかを返す。\r
+ * \r
+ * @param i_label\r
+ * @return 何れかのラベルの内側にあるならばfalse,独立したラベルである可能性が高ければtrueです.\r
+ */\r
+ public boolean check(NyARLabelingLabel i_label)\r
+ {\r
+ // 重なり処理かな?\r
+ final NyARLabelingLabel[] label_pt = this._labels;\r
+ final int px1 = (int) i_label.pos_x;\r
+ final int py1 = (int) i_label.pos_y;\r
+ for (int i = this._length - 1; i >= 0; i--) {\r
+ final int px2 = (int) label_pt[i].pos_x;\r
+ final int py2 = (int) label_pt[i].pos_y;\r
+ final int d = (px1 - px2) * (px1 - px2) + (py1 - py2) * (py1 - py2);\r
+ if (d < label_pt[i].area / 4) {\r
+ // 対象外\r
+ return false;\r
+ }\r
+ }\r
+ // 対象\r
+ return true;\r
+ }\r
+}
\ No newline at end of file
public NyARSquare prePush() throws NyARException\r
{\r
return (NyARSquare) super.prePush();\r
- } \r
-\r
+ }\r
}\r
// AR_HEADER_VERSION_MAJOR\r
// 2\r
\r
- private static final int AR_HEADER_VERSION_MINOR = 72;// #define\r
- // AR_HEADER_VERSION_MINOR\r
- // 72\r
+ private static final int AR_HEADER_VERSION_MINOR = 72;// #define AR_HEADER_VERSION_MINOR 72\r
\r
- private static final int AR_HEADER_VERSION_TINY = 0;// #define\r
- // AR_HEADER_VERSION_TINY\r
- // 0\r
+ private static final int AR_HEADER_VERSION_TINY = 0;// #define AR_HEADER_VERSION_TINY 0\r
\r
- private static final int AR_HEADER_VERSION_BUILD = 0;// #define\r
- // AR_HEADER_VERSION_BUILD\r
- // 0\r
+ private static final int AR_HEADER_VERSION_BUILD = 0;// #define AR_HEADER_VERSION_BUILD 0\r
\r
- private static final String AR_HEADER_VERSION_STRING = "2.72.0";// #define\r
- // AR_HEADER_VERSION_STRING\r
- // "2.72.0"\r
+ private static final String AR_HEADER_VERSION_STRING = "2.72.0";// #define AR_HEADER_VERSION_STRING "2.72.0"\r
\r
- public static final boolean AR_HAVE_HEADER_VERSION_2 = true;// #define\r
- // AR_HAVE_HEADER_VERSION_2\r
+ public static final boolean AR_HAVE_HEADER_VERSION_2 = true;// #define AR_HAVE_HEADER_VERSION_2\r
\r
- public static final boolean AR_HAVE_HEADER_VERSION_2_72 = true;// #define\r
- // AR_HAVE_HEADER_VERSION_2_72\r
+ public static final boolean AR_HAVE_HEADER_VERSION_2_72 = true;// #define AR_HAVE_HEADER_VERSION_2_72\r
\r
public static String getARVersion()\r
{\r
* \r
* \r
*/\r
-class NyARWorkHolder\r
+final class NyARWorkHolder\r
{\r
private final static int ARRAY_APPEND_STEP = 256;\r
\r
* pattern_match関数を分解した3種類のパターン検出クラスを定義します。\r
* \r
*/\r
-public interface NyARMatchPatt\r
+public interface INyARMatchPatt\r
{\r
public double getConfidence();\r
\r
* AR_TEMPLATE_MATCHING_BWと同等のルールで マーカーを評価します。\r
* \r
*/\r
-public class NyARMatchPatt_BlackWhite implements NyARMatchPatt\r
+public class NyARMatchPatt_BlackWhite implements INyARMatchPatt\r
{\r
private double datapow;\r
\r
* AR_TEMPLATE_MATCHING_COLORかつAR_MATCHING_WITHOUT_PCAと同等のルールで マーカーを評価します。\r
* \r
*/\r
-public class NyARMatchPatt_Color_WITHOUT_PCA implements NyARMatchPatt\r
+public class NyARMatchPatt_Color_WITHOUT_PCA implements INyARMatchPatt\r
{\r
private int[][][] input = new int[1][1][3];\r
\r
* AR_TEMPLATE_MATCHING_COLORかつAR_MATCHING_WITH_PCAと同等のルールで マーカーを評価します。\r
* \r
*/\r
-public class NyARMatchPatt_Color_WITH_PCA implements NyARMatchPatt\r
+public class NyARMatchPatt_Color_WITH_PCA implements INyARMatchPatt\r
{\r
private final int EVEC_MAX = 10;// #define EVEC_MAX 10\r
\r
\r
private double[][][][] evec;// static double evec[EVEC_MAX][AR_PATT_SIZE_Y*AR_PATT_SIZE_X*3];\r
\r
- private double[][] epat = new double[4][EVEC_MAX];// static double\r
- // epat[AR_PATT_NUM_MAX][4][EVEC_MAX];\r
+ private double[][] epat = new double[4][EVEC_MAX];// static double epat[AR_PATT_NUM_MAX][4][EVEC_MAX];\r
\r
private int ave;\r
\r
int[][][] data = i_target_patt.getPatArray();\r
\r
input = new int[height][width][3];\r
- evec = new double[EVEC_MAX][height][width][3];// static double\r
- // evec[EVEC_MAX][AR_PATT_SIZE_Y*AR_PATT_SIZE_X*3];\r
+ evec = new double[EVEC_MAX][height][width][3];// static double evec[EVEC_MAX][AR_PATT_SIZE_Y*AR_PATT_SIZE_X*3];\r
int sum;\r
\r
sum = ave = 0;\r
- for (int i = 0; i < height; i++) {// for(int\r
- // i=0;i<Config.AR_PATT_SIZE_Y;i++){\r
- for (int i2 = 0; i2 < width; i2++) {// for(int\r
- // i2=0;i2<Config.AR_PATT_SIZE_X;i2++){\r
+ for (int i = 0; i < height; i++) {// for(int i=0;i<Config.AR_PATT_SIZE_Y;i++){\r
+ for (int i2 = 0; i2 < width; i2++) {// for(int i2=0;i2<Config.AR_PATT_SIZE_X;i2++){\r
ave += (255 - data[i][i2][0]) + (255 - data[i][i2][1])\r
+ (255 - data[i][i2][2]);\r
}\r
--- /dev/null
+package jp.nyatla.nyartoolkit.core.param;\r
+\r
+import jp.nyatla.nyartoolkit.core.types.NyARDoublePoint2d;\r
+import jp.nyatla.utils.DoubleValue;\r
+\r
+/**\r
+ * カメラの歪み成分を格納するクラスと、補正関数群\r
+ * http://www.hitl.washington.edu/artoolkit/Papers/ART02-Tutorial.pdf\r
+ * 11ページを読むといいよ。\r
+ * \r
+ * x=x(xi-x0),y=s(yi-y0)\r
+ * d^2=x^2+y^2\r
+ * p=(1-fd^2)\r
+ * xd=px+x0,yd=py+y0\r
+ */\r
+final public class NyARCameraDistortionFactor\r
+{\r
+ private static final int PD_LOOP = 3;\r
+ private double _f0;//x0\r
+ private double _f1;//y0\r
+ private double _f2;//100000000.0*f\r
+ private double _f3;//s\r
+ /**\r
+ * 配列の値をファクタ値としてセットする。\r
+ * @param i_factor\r
+ * 4要素以上の配列\r
+ */\r
+ public void setValue(double[] i_factor)\r
+ {\r
+ this._f0=i_factor[0];\r
+ this._f1=i_factor[1];\r
+ this._f2=i_factor[2];\r
+ this._f3=i_factor[3];\r
+ return;\r
+ }\r
+ public void getValue(double[] o_factor)\r
+ {\r
+ o_factor[0]=this._f0;\r
+ o_factor[1]=this._f1;\r
+ o_factor[2]=this._f2;\r
+ o_factor[3]=this._f3;\r
+ return;\r
+ } \r
+ public void changeScale(double i_scale)\r
+ {\r
+ this._f0=this._f0*i_scale;// newparam->dist_factor[0] =source->dist_factor[0] *scale;\r
+ this._f1=this._f1*i_scale;// newparam->dist_factor[1] =source->dist_factor[1] *scale;\r
+ this._f2=this._f2/ (i_scale * i_scale);// newparam->dist_factor[2]=source->dist_factor[2]/ (scale*scale);\r
+ //this.f3=this.f3;// newparam->dist_factor[3] =source->dist_factor[3];\r
+ return;\r
+ }\r
+ /**\r
+ * int arParamIdeal2Observ( const double dist_factor[4], const double ix,const double iy,double *ox, double *oy ) 関数の代替関数\r
+ * \r
+ * @param i_in\r
+ * @param o_out\r
+ */\r
+ public void ideal2Observ(final NyARDoublePoint2d i_in, NyARDoublePoint2d o_out)\r
+ {\r
+ final double x = (i_in.x - this._f0) * this._f3;\r
+ final double y = (i_in.y - this._f1) * this._f3;\r
+ if (x == 0.0 && y == 0.0) {\r
+ o_out.x = this._f0;\r
+ o_out.y = this._f1;\r
+ } else {\r
+ final double d = 1.0 - this._f2 / 100000000.0 * (x * x + y * y);\r
+ o_out.x = x * d + this._f0;\r
+ o_out.y = y * d + this._f1;\r
+ }\r
+ return;\r
+ }\r
+\r
+ /**\r
+ * ideal2Observをまとめて実行します。\r
+ * @param i_in\r
+ * @param o_out\r
+ */\r
+ public void ideal2ObservBatch(final NyARDoublePoint2d[] i_in, NyARDoublePoint2d[] o_out, int i_size)\r
+ {\r
+ double x, y;\r
+ final double d0 = this._f0;\r
+ final double d1 = this._f1;\r
+ final double d3 = this._f3;\r
+ final double d2_w = this._f2 / 100000000.0;\r
+ for (int i = 0; i < i_size; i++) {\r
+ x = (i_in[i].x - d0) * d3;\r
+ y = (i_in[i].y - d1) * d3;\r
+ if (x == 0.0 && y == 0.0) {\r
+ o_out[i].x = d0;\r
+ o_out[i].y = d1;\r
+ } else {\r
+ final double d = 1.0 - d2_w * (x * x + y * y);\r
+ o_out[i].x = x * d + d0;\r
+ o_out[i].y = y * d + d1;\r
+ }\r
+ }\r
+ return;\r
+ }\r
+\r
+ /**\r
+ * int arParamObserv2Ideal( const double dist_factor[4], const double ox,const double oy,double *ix, double *iy );\r
+ * \r
+ * @param ox\r
+ * @param oy\r
+ * @param ix\r
+ * @param iy\r
+ * @return\r
+ */\r
+ public void observ2Ideal(double ox, double oy, DoubleValue ix, DoubleValue iy)\r
+ {\r
+ double z02, z0, p, q, z, px, py, opttmp_1;\r
+ final double d0 = this._f0;\r
+ final double d1 = this._f1;\r
+\r
+ px = ox - d0;\r
+ py = oy - d1;\r
+ p = this._f2 / 100000000.0;\r
+ z02 = px * px + py * py;\r
+ q = z0 = Math.sqrt(z02);// Optimize//q = z0 = Math.sqrt(px*px+ py*py);\r
+\r
+ for (int i = 1;; i++) {\r
+ if (z0 != 0.0) {\r
+ // Optimize opttmp_1\r
+ opttmp_1 = p * z02;\r
+ z = z0 - ((1.0 - opttmp_1) * z0 - q) / (1.0 - 3.0 * opttmp_1);\r
+ px = px * z / z0;\r
+ py = py * z / z0;\r
+ } else {\r
+ px = 0.0;\r
+ py = 0.0;\r
+ break;\r
+ }\r
+ if (i == PD_LOOP) {\r
+ break;\r
+ }\r
+ z02 = px * px + py * py;\r
+ z0 = Math.sqrt(z02);// Optimize//z0 = Math.sqrt(px*px+ py*py);\r
+ }\r
+ ix.value = px / this._f3 + d0;\r
+ iy.value = py / this._f3 + d1;\r
+ return;\r
+ }\r
+\r
+ /**\r
+ * 指定範囲のobserv2Idealをまとめて実行して、結果をo_idealに格納します。\r
+ * \r
+ * @param i_x_coord\r
+ * @param i_y_coord\r
+ * @param i_start\r
+ * coord開始点\r
+ * @param i_num\r
+ * 計算数\r
+ * @param o_ideal\r
+ * 出力バッファ[i_num][2]であること。\r
+ */\r
+ public void observ2IdealBatch(int[] i_x_coord, int[] i_y_coord,int i_start, int i_num, double[][] o_ideal)\r
+ {\r
+ double z02, z0, q, z, px, py, opttmp_1;\r
+ final double d0 = this._f0;\r
+ final double d1 = this._f1;\r
+ final double d3 = this._f3;\r
+ final double p = this._f2 / 100000000.0;\r
+ for (int j = 0; j < i_num; j++) {\r
+\r
+ px = i_x_coord[i_start + j] - d0;\r
+ py = i_y_coord[i_start + j] - d1;\r
+\r
+ z02 = px * px + py * py;\r
+ q = z0 = Math.sqrt(z02);// Optimize//q = z0 = Math.sqrt(px*px+py*py);\r
+\r
+ for (int i = 1;; i++) {\r
+ if (z0 != 0.0) {\r
+ // Optimize opttmp_1\r
+ opttmp_1 = p * z02;\r
+ z = z0 - ((1.0 - opttmp_1) * z0 - q)/ (1.0 - 3.0 * opttmp_1);\r
+ px = px * z / z0;\r
+ py = py * z / z0;\r
+ } else {\r
+ px = 0.0;\r
+ py = 0.0;\r
+ break;\r
+ }\r
+ if (i == PD_LOOP) {\r
+ break;\r
+ }\r
+ z02 = px * px + py * py;\r
+ z0 = Math.sqrt(z02);// Optimize//z0 = Math.sqrt(px*px+ py*py);\r
+ }\r
+ o_ideal[j][0] = px / d3 + d0;\r
+ o_ideal[j][1] = py / d3 + d1;\r
+ }\r
+ return;\r
+ } \r
+}\r
--- /dev/null
+/* \r
+ * PROJECT: NyARToolkit\r
+ * --------------------------------------------------------------------------------\r
+ * This work is based on the original ARToolKit developed by\r
+ * Hirokazu Kato\r
+ * Mark Billinghurst\r
+ * HITLab, University of Washington, Seattle\r
+ * http://www.hitl.washington.edu/artoolkit/\r
+ *\r
+ * The NyARToolkit is Java version ARToolkit class library.\r
+ * Copyright (C)2008 R.Iizuka\r
+ *\r
+ * This program is free software; you can redistribute it and/or\r
+ * modify it under the terms of the GNU General Public License\r
+ * as published by the Free Software Foundation; either version 2\r
+ * of the License, or (at your option) any later version.\r
+ * \r
+ * This program is distributed in the hope that it will be useful,\r
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
+ * GNU General Public License for more details.\r
+ * \r
+ * You should have received a copy of the GNU General Public License\r
+ * along with this framework; if not, write to the Free Software\r
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\r
+ * \r
+ * For further information please contact.\r
+ * http://nyatla.jp/nyatoolkit/\r
+ * <airmail(at)ebony.plala.or.jp>\r
+ * \r
+ */\r
+package jp.nyatla.nyartoolkit.core.param;\r
+\r
+import java.io.*;\r
+import java.nio.*;\r
+\r
+import jp.nyatla.nyartoolkit.NyARException;\r
+import jp.nyatla.nyartoolkit.core.types.*;\r
+\r
+/**\r
+ * typedef struct { int xsize, ysize; double mat[3][4]; double dist_factor[4]; } ARParam;\r
+ * NyARの動作パラメータを格納するクラス\r
+ *\r
+ */\r
+public class NyARParam\r
+{\r
+ protected NyARIntSize _screen_size=new NyARIntSize();\r
+ private static final int SIZE_OF_PARAM_SET = 4 + 4 + (3 * 4 * 8) + (4 * 8);\r
+ private NyARCameraDistortionFactor _dist=new NyARCameraDistortionFactor();\r
+ private NyARPerspectiveProjectionMatrix _projection_matrix=new NyARPerspectiveProjectionMatrix();\r
+\r
+ public NyARIntSize getScreenSize()\r
+ {\r
+ return this._screen_size;\r
+ }\r
+\r
+ public NyARPerspectiveProjectionMatrix getPerspectiveProjectionMatrix()\r
+ {\r
+ return this._projection_matrix;\r
+ }\r
+ public NyARCameraDistortionFactor getDistortionFactor()\r
+ {\r
+ return this._dist;\r
+ }\r
+\r
+ /**\r
+ * ARToolKit標準ファイルから1個目の設定をロードする。\r
+ * \r
+ * @param i_filename\r
+ * @throws NyARException\r
+ */\r
+ public void loadARParamFromFile(String i_filename) throws NyARException\r
+ {\r
+ try {\r
+ loadARParam(new FileInputStream(i_filename));\r
+ } catch (Exception e) {\r
+ throw new NyARException(e);\r
+ }\r
+ }\r
+\r
+ /**\r
+ * int arParamChangeSize( ARParam *source, int xsize, int ysize, ARParam *newparam );\r
+ * 関数の代替関数 サイズプロパティをi_xsize,i_ysizeに変更します。\r
+ * @param i_xsize\r
+ * @param i_ysize\r
+ * @param newparam\r
+ * @return\r
+ * \r
+ */\r
+ public void changeScreenSize(int i_xsize, int i_ysize)\r
+ {\r
+ final double scale = (double) i_xsize / (double) (this._screen_size.w);// scale = (double)xsize / (double)(source->xsize);\r
+ //スケールを変更\r
+ this._dist.changeScale(scale);\r
+ this._projection_matrix.changeScale(scale);\r
+ //for (int i = 0; i < 4; i++) {\r
+ // array34[0 * 4 + i] = array34[0 * 4 + i] * scale;// newparam->mat[0][i]=source->mat[0][i]* scale;\r
+ // array34[1 * 4 + i] = array34[1 * 4 + i] * scale;// newparam->mat[1][i]=source->mat[1][i]* scale;\r
+ // array34[2 * 4 + i] = array34[2 * 4 + i];// newparam->mat[2][i] = source->mat[2][i];\r
+ //}\r
+\r
+\r
+ this._screen_size.w = i_xsize;// newparam->xsize = xsize;\r
+ this._screen_size.h = i_ysize;// newparam->ysize = ysize;\r
+ return;\r
+ }\r
+\r
+\r
+ /**\r
+ * int arParamLoad( const char *filename, int num, ARParam *param, ...);\r
+ * i_streamの入力ストリームからi_num個の設定を読み込み、パラメタを配列にして返します。\r
+ * \r
+ * @param i_stream\r
+ * @throws Exception\r
+ */\r
+ public void loadARParam(InputStream i_stream)throws NyARException\r
+ {\r
+ try {\r
+ byte[] buf = new byte[SIZE_OF_PARAM_SET];\r
+ i_stream.read(buf);\r
+ double[] tmp=new double[12];\r
+\r
+ // バッファを加工\r
+ ByteBuffer bb = ByteBuffer.wrap(buf);\r
+ bb.order(ByteOrder.BIG_ENDIAN);\r
+ this._screen_size.w = bb.getInt();\r
+ this._screen_size.h = bb.getInt();\r
+ //double値を12個読み込む\r
+ for(int i=0;i<12;i++){\r
+ tmp[i]=bb.getDouble();\r
+ }\r
+ //Projectionオブジェクトにセット\r
+ this._projection_matrix.setValue(tmp);\r
+ //double値を4個読み込む\r
+ for (int i = 0; i < 4; i++) {\r
+ tmp[i]=bb.getDouble();\r
+ }\r
+ //Factorオブジェクトにセット\r
+ this._dist.setValue(tmp);\r
+ } catch (Exception e) {\r
+ throw new NyARException(e);\r
+ }\r
+ return;\r
+ }\r
+\r
+ public void saveARParam(OutputStream i_stream)throws Exception\r
+ {\r
+ NyARException.trap("未チェックの関数");\r
+ byte buf[] = new byte[SIZE_OF_PARAM_SET];\r
+ // バッファをラップ\r
+ ByteBuffer bb = ByteBuffer.wrap(buf);\r
+ bb.order(ByteOrder.BIG_ENDIAN);\r
+\r
+ // 書き込み\r
+ bb.putInt(this._screen_size.w);\r
+ bb.putInt(this._screen_size.h);\r
+ double[] tmp=new double[12];\r
+ //Projectionを読み出し\r
+ this._projection_matrix.getValue(tmp);\r
+ //double値を12個書き込む\r
+ for(int i=0;i<12;i++){\r
+ tmp[i]=bb.getDouble();\r
+ }\r
+ //Factorを読み出し\r
+ this._dist.getValue(tmp);\r
+ //double値を4個書き込む\r
+ for (int i = 0; i < 4; i++) {\r
+ tmp[i]=bb.getDouble();\r
+ }\r
+ i_stream.write(buf);\r
+ return;\r
+ }\r
+}\r
--- /dev/null
+package jp.nyatla.nyartoolkit.core.param;\r
+\r
+import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.types.matrix.NyARDoubleMatrix34;\r
+\r
+/**\r
+ * 透視変換行列を格納します。\r
+ * http://www.hitl.washington.edu/artoolkit/Papers/ART02-Tutorial.pdf\r
+ * 7ページを見るといいよ。\r
+ *\r
+ */\r
+final public class NyARPerspectiveProjectionMatrix extends NyARDoubleMatrix34\r
+{\r
+ /*\r
+ * static double dot( double a1, double a2, double a3,double b1, double b2,double b3 )\r
+ */\r
+ private final static double dot(double a1, double a2, double a3, double b1,double b2, double b3)\r
+ {\r
+ return (a1 * b1 + a2 * b2 + a3 * b3);\r
+ }\r
+\r
+ /* static double norm( double a, double b, double c ) */\r
+ private final static double norm(double a, double b, double c)\r
+ {\r
+ return Math.sqrt(a * a + b * b + c * c);\r
+ }\r
+\r
+ /**\r
+ * int arParamDecompMat( double source[3][4], double cpara[3][4], double trans[3][4] ); 関数の置き換え Optimize STEP[754->665]\r
+ * \r
+ * @param o_cpara\r
+ * 戻り引数。3x4のマトリクスを指定すること。\r
+ * @param o_trans\r
+ * 戻り引数。3x4のマトリクスを指定すること。\r
+ * @return\r
+ */\r
+ public void decompMat(NyARMat o_cpara, NyARMat o_trans)\r
+ {\r
+ double rem1, rem2, rem3;\r
+ double c00,c01,c02,c03,c10,c11,c12,c13,c20,c21,c22,c23;\r
+ if (this.m23>= 0) {// if( source[2][3] >= 0 ) {\r
+ // <Optimize>\r
+ // for(int r = 0; r < 3; r++ ){\r
+ // for(int c = 0; c < 4; c++ ){\r
+ // Cpara[r][c]=source[r][c];//Cpara[r][c] = source[r][c];\r
+ // }\r
+ // }\r
+ c00=this.m00;\r
+ c01=this.m01;\r
+ c02=this.m02;\r
+ c03=this.m03;\r
+ c10=this.m10;\r
+ c11=this.m11;\r
+ c12=this.m12;\r
+ c13=this.m13;\r
+ c20=this.m20;\r
+ c21=this.m21;\r
+ c22=this.m22;\r
+ c23=this.m23;\r
+ } else {\r
+ // <Optimize>\r
+ // for(int r = 0; r < 3; r++ ){\r
+ // for(int c = 0; c < 4; c++ ){\r
+ // Cpara[r][c]=-source[r][c];//Cpara[r][c] = -(source[r][c]);\r
+ // }\r
+ // }\r
+ c00=-this.m00;\r
+ c01=-this.m01;\r
+ c02=-this.m02;\r
+ c03=-this.m03;\r
+ c10=-this.m10;\r
+ c11=-this.m11;\r
+ c12=-this.m12;\r
+ c13=-this.m13;\r
+ c20=-this.m20;\r
+ c21=-this.m21;\r
+ c22=-this.m22;\r
+ c23=-this.m23;\r
+ }\r
+\r
+ double[][] cpara = o_cpara.getArray();\r
+ double[][] trans = o_trans.getArray();\r
+ for (int r = 0; r < 3; r++) {\r
+ for (int c = 0; c < 4; c++) {\r
+ cpara[r][c] = 0.0;// cpara[r][c] = 0.0;\r
+ }\r
+ }\r
+ cpara[2][2] = norm(c20, c21, c22);// cpara[2][2] =norm( Cpara[2][0],Cpara[2][1],Cpara[2][2]);\r
+ trans[2][0] = c20 / cpara[2][2];// trans[2][0] = Cpara[2][0] /cpara[2][2];\r
+ trans[2][1] = c21 / cpara[2][2];// trans[2][1] = Cpara[2][1] / cpara[2][2];\r
+ trans[2][2] = c22 / cpara[2][2];// trans[2][2] =Cpara[2][2] /cpara[2][2];\r
+ trans[2][3] = c23 / cpara[2][2];// trans[2][3] =Cpara[2][3] /cpara[2][2];\r
+\r
+ cpara[1][2] = dot(trans[2][0], trans[2][1], trans[2][2], c10, c11, c12);// cpara[1][2]=dot(trans[2][0],trans[2][1],trans[2][2],Cpara[1][0],Cpara[1][1],Cpara[1][2]);\r
+ rem1 = c10 - cpara[1][2] * trans[2][0];// rem1 =Cpara[1][0] -cpara[1][2] *trans[2][0];\r
+ rem2 = c11 - cpara[1][2] * trans[2][1];// rem2 =Cpara[1][1] -cpara[1][2] *trans[2][1];\r
+ rem3 = c12 - cpara[1][2] * trans[2][2];// rem3 =Cpara[1][2] -cpara[1][2] *trans[2][2];\r
+ cpara[1][1] = norm(rem1, rem2, rem3);// cpara[1][1] = norm( rem1,// rem2, rem3 );\r
+ trans[1][0] = rem1 / cpara[1][1];// trans[1][0] = rem1 / cpara[1][1];\r
+ trans[1][1] = rem2 / cpara[1][1];// trans[1][1] = rem2 / cpara[1][1];\r
+ trans[1][2] = rem3 / cpara[1][1];// trans[1][2] = rem3 / cpara[1][1];\r
+\r
+ cpara[0][2] = dot(trans[2][0], trans[2][1], trans[2][2], c00, c01, c02);// cpara[0][2] =dot(trans[2][0], trans[2][1],trans[2][2],Cpara[0][0],Cpara[0][1],Cpara[0][2]);\r
+ cpara[0][1] = dot(trans[1][0], trans[1][1], trans[1][2], c00, c01, c02);// cpara[0][1]=dot(trans[1][0],trans[1][1],trans[1][2],Cpara[0][0],Cpara[0][1],Cpara[0][2]);\r
+ rem1 = c00 - cpara[0][1] * trans[1][0] - cpara[0][2]* trans[2][0];// rem1 = Cpara[0][0] - cpara[0][1]*trans[1][0]- cpara[0][2]*trans[2][0];\r
+ rem2 = c01 - cpara[0][1] * trans[1][1] - cpara[0][2]* trans[2][1];// rem2 = Cpara[0][1] - cpara[0][1]*trans[1][1]- cpara[0][2]*trans[2][1];\r
+ rem3 = c02 - cpara[0][1] * trans[1][2] - cpara[0][2]* trans[2][2];// rem3 = Cpara[0][2] - cpara[0][1]*trans[1][2] - cpara[0][2]*trans[2][2];\r
+ cpara[0][0] = norm(rem1, rem2, rem3);// cpara[0][0] = norm( rem1,rem2, rem3 );\r
+ trans[0][0] = rem1 / cpara[0][0];// trans[0][0] = rem1 / cpara[0][0];\r
+ trans[0][1] = rem2 / cpara[0][0];// trans[0][1] = rem2 / cpara[0][0];\r
+ trans[0][2] = rem3 / cpara[0][0];// trans[0][2] = rem3 / cpara[0][0];\r
+\r
+ trans[1][3] = (c13 - cpara[1][2] * trans[2][3])/ cpara[1][1];// trans[1][3] = (Cpara[1][3] -cpara[1][2]*trans[2][3]) / cpara[1][1];\r
+ trans[0][3] = (c03 - cpara[0][1] * trans[1][3] - cpara[0][2]* trans[2][3])/ cpara[0][0];// trans[0][3] = (Cpara[0][3] -cpara[0][1]*trans[1][3]-cpara[0][2]*trans[2][3]) / cpara[0][0];\r
+\r
+ for (int r = 0; r < 3; r++) {\r
+ for (int c = 0; c < 3; c++) {\r
+ cpara[r][c] /= cpara[2][2];// cpara[r][c] /= cpara[2][2];\r
+ }\r
+ }\r
+ return;\r
+ }\r
+ /**\r
+ * int arParamChangeSize( ARParam *source, int xsize, int ysize, ARParam *newparam );\r
+ * Matrixのスケールを変換します。\r
+ * @param i_scale\r
+ * \r
+ */\r
+ public void changeScale(double i_scale)\r
+ {\r
+ this.m00=this.m00*i_scale;\r
+ this.m10=this.m10*i_scale;\r
+ this.m01=this.m01*i_scale;\r
+ this.m11=this.m11*i_scale;\r
+ this.m02=this.m02*i_scale;\r
+ this.m12=this.m12*i_scale;\r
+ this.m03=this.m03*i_scale;\r
+ this.m13=this.m13*i_scale;\r
+ //for (int i = 0; i < 4; i++) {\r
+ // array34[0 * 4 + i] = array34[0 * 4 + i] * scale;// newparam->mat[0][i]=source->mat[0][i]* scale;\r
+ // array34[1 * 4 + i] = array34[1 * 4 + i] * scale;// newparam->mat[1][i]=source->mat[1][i]* scale;\r
+ // array34[2 * 4 + i] = array34[2 * 4 + i];// newparam->mat[2][i] = source->mat[2][i];\r
+ //}\r
+ return;\r
+ }\r
+\r
+ \r
+ \r
+ \r
+}\r
yw = 102.5 + 5.0 * (j + 0.5) * ydiv2_reciprocal;\r
for (i = 0; i < xdiv2; i++) {\r
xw = 102.5 + 5.0 * (i + 0.5) * xdiv2_reciprocal;\r
- d = para[2 * 3 + 0] * xw + para[2 * 3 + 1] * yw\r
- + para[2 * 3 + 2];\r
+ d = para[2 * 3 + 0] * xw + para[2 * 3 + 1] * yw+ para[2 * 3 + 2];\r
if (d == 0) {\r
throw new NyARException();\r
}\r
}\r
}\r
}\r
- // short[][][] ext_pat=new\r
- // short[Config.AR_PATT_SIZE_Y][Config.AR_PATT_SIZE_X][3];//ARUint32\r
+ // short[][][] ext_pat=new short[Config.AR_PATT_SIZE_Y][Config.AR_PATT_SIZE_X][3];//ARUint32\r
// ext_pat2[AR_PATT_SIZE_Y][AR_PATT_SIZE_X][3];\r
/* <Optimize> */\r
int xdiv_x_ydiv = xdiv * ydiv;\r
extpat_j_i[1] = (ext_pat2_j_i[1] / xdiv_x_ydiv);// ext_pat[j][i][1]=(byte)(ext_pat2[j][i][1]/(xdiv*ydiv));\r
extpat_j_i[2] = (ext_pat2_j_i[2] / xdiv_x_ydiv);// ext_pat[j][i][2]=(byte)(ext_pat2[j][i][2]/(xdiv*ydiv));\r
}\r
- }/*\r
- * int xdiv_mul_ydiv=xdiv*ydiv; short [][] extpat_pt_2; short[]\r
- * extpat_pt_1; int[][] ext_pat2_pt_2; int[] ext_pat2_pt_1; for(int\r
- * j=this.height-1; j>=0; j--){//for(int j = 0; j < this.height; j++ ){\r
- * extpat_pt_2=extpat[j]; ext_pat2_pt_2=ext_pat2[j]; for(int i =\r
- * this.width-1; i>=0; i--){//for(int i = 0; i < this.width; i++ ){ //\r
- * PRL 2006-06-08. extpat_pt_1=extpat_pt_2[i];\r
- * ext_pat2_pt_1=ext_pat2_pt_2[i];\r
- * extpat_pt_1[0]=(short)(ext_pat2_pt_1[0] /\r
- * xdiv_mul_ydiv);//ext_pat[j][i][0] = (byte)(ext_pat2[j][i][0] /\r
- * (xdiv*ydiv)); extpat_pt_1[1]=(short)(ext_pat2_pt_1[1] /\r
- * xdiv_mul_ydiv);//ext_pat[j][i][1] = (byte)(ext_pat2[j][i][1] /\r
- * (xdiv*ydiv)); extpat_pt_1[2]=(short)(ext_pat2_pt_1[2] /\r
- * xdiv_mul_ydiv);//ext_pat[j][i][2] = (byte)(ext_pat2[j][i][2] /\r
- * (xdiv*ydiv)); } } /*</Optimize>\r
- */\r
+ }\r
return true;\r
}\r
}
\ No newline at end of file
package jp.nyatla.nyartoolkit.core.transmat;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
import jp.nyatla.nyartoolkit.core.NyARSquare;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
import jp.nyatla.nyartoolkit.core.transmat.fitveccalc.NyARFitVecCalculator;\r
import jp.nyatla.nyartoolkit.core.transmat.rotmatrix.NyARRotMatrix;\r
import jp.nyatla.nyartoolkit.core.transmat.rottransopt.NyARRotTransOptimize;\r
import jp.nyatla.nyartoolkit.core.types.*;\r
-import java.util.*;\r
+\r
+\r
/**\r
* This class calculates ARMatrix from square information and holds it. --\r
* 変換行列を計算して、結果を保持するクラス。\r
* \r
*/\r
-public class NyARTransMat_O2 implements INyARTransMat\r
+public class NyARTransMat implements INyARTransMat\r
{\r
private final static double AR_GET_TRANS_CONT_MAT_MAX_FIT_ERROR = 1.0;\r
\r
private final NyARDoublePoint2d _center=new NyARDoublePoint2d(0,0);\r
private final NyARFitVecCalculator _calculator;\r
private final NyARTransOffset _offset=new NyARTransOffset();\r
- private final NyARRotTransOptimize __mat_optimize;\r
+ private final NyARRotTransOptimize _mat_optimize;\r
\r
\r
- public NyARTransMat_O2(NyARParam i_param) throws NyARException\r
+ public NyARTransMat(NyARParam i_param) throws NyARException\r
{\r
- this._calculator=new NyARFitVecCalculator(i_param);\r
- this._rotmatrix = new NyARRotMatrix(i_param);\r
- this.__mat_optimize=new NyARRotTransOptimize(i_param);\r
+ final NyARCameraDistortionFactor dist=i_param.getDistortionFactor();\r
+ final NyARPerspectiveProjectionMatrix pmat=i_param.getPerspectiveProjectionMatrix();\r
+ this._calculator=new NyARFitVecCalculator(pmat,dist);\r
+ this._rotmatrix = new NyARRotMatrix(pmat);\r
+ this._mat_optimize=new NyARRotTransOptimize(pmat);\r
}\r
\r
public void setCenter(double i_x, double i_y)\r
return;\r
}\r
\r
-// private NyARDoublePoint3d[] __transMat_marker_vertex3d;\r
\r
private final NyARDoublePoint2d[] __transMat_sqvertex_ref = new NyARDoublePoint2d[4];\r
private final NyARLinear[] __transMat_linear_ref=new NyARLinear[4];\r
-\r
final NyARDoublePoint3d __transMat_trans=new NyARDoublePoint3d();\r
/**\r
* double arGetTransMat( ARMarkerInfo *marker_info,double center[2], double width, double conv[3][4] )\r
this._calculator.calculateTransfer(this._rotmatrix,trans);\r
\r
//計算結果の最適化(this._rotmatrix,trans)\r
- this.__mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
+ this._mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
+ \r
// マトリクスの保存\r
o_result_conv.updateMatrixValue(this._rotmatrix, this._offset.point, trans);\r
return;\r
this._calculator.calculateTransfer(this._rotmatrix,trans);\r
\r
//計算結果の最適化(this._rotmatrix,trans)\r
- final double err=this.__mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
+ final double err=this._mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
\r
//計算結果を保存\r
io_result_conv.updateMatrixValue(this._rotmatrix, this._offset.point, trans);\r
//回転行列の平行移動量の計算\r
this._calculator.calculateTransfer(this._rotmatrix,trans);\r
//計算結果の最適化(this._rotmatrix,trans)\r
- final double err2=this.__mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
+ final double err2=this._mat_optimize.optimize(this._rotmatrix,trans,this._calculator);\r
//エラー値が低かったら値を差換え\r
if (err2 < err) {\r
// 良い値が取れたら、差換え\r
{\r
public NyARDoublePoint3d[] vertex=NyARDoublePoint3d.createArray(4);\r
public NyARDoublePoint3d point=new NyARDoublePoint3d(); \r
- \r
+ /**\r
+ * 中心位置と辺長から、オフセット情報を作成して設定する。\r
+ * @param i_width\r
+ * @param i_center\r
+ */\r
public void setSquare(double i_width,NyARDoublePoint2d i_center)\r
{\r
final double w_2 = i_width / 2.0;\r
import jp.nyatla.nyartoolkit.core.transmat.rotmatrix.NyARRotMatrix;\r
import jp.nyatla.nyartoolkit.core.types.*;\r
import jp.nyatla.nyartoolkit.*;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
/**\r
* 平行移動量を計算するクラス\r
+ * \r
+ * NyARPerspectiveProjectionMatrixに直接アクセスしてる場所があるけど、\r
+ * この辺の計算はNyARPerspectiveProjectionMatrixクラスの関数にして押し込む予定。\r
*\r
*/\r
public class NyARFitVecCalculator\r
private final NyARMat _mat_b = new NyARMat(3,8);//3,NUMBER_OF_VERTEX*2\r
private final NyARMat _mat_a = new NyARMat(8,3);/*NUMBER_OF_VERTEX,3*/\r
private final NyARMat _mat_d = new NyARMat(3,3);\r
- private final NyARParam _cparam;\r
+ private final NyARPerspectiveProjectionMatrix _projection_mat;\r
+ private final NyARCameraDistortionFactor _distortionfactor;\r
\r
\r
// private NyARDoublePoint2d[] _vertex_2d_ref;\r
- public NyARFitVecCalculator(final NyARParam i_param)\r
+ public NyARFitVecCalculator(final NyARPerspectiveProjectionMatrix i_projection_mat_ref,final NyARCameraDistortionFactor i_distortion_ref)\r
{\r
// 変換マトリクスdとbの準備(arGetTransMatSubの一部)\r
- final double cpara[] = i_param.get34Array();\r
final double[][] a_array = this._mat_a.getArray();\r
final double[][] b_array = this._mat_b.getArray();\r
\r
//変換用行列のcpara固定値の部分を先に初期化してしまう。\r
for (int i = 0; i < 4; i++) {\r
final int x2 = i * 2;\r
- a_array[x2][0] = b_array[0][x2] = cpara[0 * 4 + 0];// mat_a->m[j*6+0]=mat_b->m[num*0+j*2] =cpara[0][0];\r
- a_array[x2][1] = b_array[1][x2] = cpara[0 * 4 + 1];// mat_a->m[j*6+1]=mat_b->m[num*2+j*2]=cpara[0][1];\r
+ a_array[x2][0] = b_array[0][x2] = i_projection_mat_ref.m00;// mat_a->m[j*6+0]=mat_b->m[num*0+j*2] =cpara[0][0];\r
+ a_array[x2][1] = b_array[1][x2] = i_projection_mat_ref.m01;// mat_a->m[j*6+1]=mat_b->m[num*2+j*2]=cpara[0][1];\r
//a_array[x2][2] = b_array[2][x2] = cpara[0 * 4 + 2] - o_marker_vertex_2d[i].x;// mat_a->m[j*6+2]=mat_b->m[num*4+j*2]=cpara[0][2]-pos2d[j][0];\r
a_array[x2 + 1][0] = b_array[0][x2 + 1] = 0.0;// mat_a->m[j*6+3] =mat_b->m[num*0+j*2+1]= 0.0;\r
- a_array[x2 + 1][1] = b_array[1][x2 + 1] = cpara[1 * 4 + 1];// mat_a->m[j*6+4] =mat_b->m[num*2+j*2+1]= cpara[1][1];\r
+ a_array[x2 + 1][1] = b_array[1][x2 + 1] = i_projection_mat_ref.m11;// mat_a->m[j*6+4] =mat_b->m[num*2+j*2+1]= cpara[1][1];\r
//a_array[x2 + 1][2] = b_array[2][x2 + 1] = cpara[1 * 4 + 2] - o_marker_vertex_2d[i].y;// mat_a->m[j*6+5]=mat_b->m[num*4+j*2+1]=cpara[1][2]-pos2d[j][1];\r
}\r
- this._cparam=i_param;\r
- this._fitsquare_vertex=NyARDoublePoint2d.createArray(4);\r
+ this._projection_mat=i_projection_mat_ref;\r
+ this._distortionfactor=i_distortion_ref;\r
return;\r
}\r
- private final NyARDoublePoint2d[] _fitsquare_vertex;\r
+ private final NyARDoublePoint2d[] _fitsquare_vertex=NyARDoublePoint2d.createArray(4);;\r
private NyARTransOffset _offset_square;\r
public void setOffsetSquare(NyARTransOffset i_offset)\r
{\r
// int i;\r
// if (arFittingMode == AR_FITTING_TO_INPUT) {\r
// // arParamIdeal2Observをバッチ処理\r
- this._cparam.ideal2ObservBatch(i_square_vertex, vertex,4);\r
+ this._distortionfactor.ideal2ObservBatch(i_square_vertex, vertex,4);\r
// } else {\r
// for (i = 0; i < NUMBER_OF_VERTEX; i++) {\r
// o_marker_vertex_2d[i].x = i_square_vertex[i].x;\r
// } \r
\r
\r
- \r
+ final double cpara02=this._projection_mat.m02;\r
+ final double cpara12=this._projection_mat.m12; \r
final NyARMat mat_d=_mat_d;\r
final NyARMat mat_a=this._mat_a;\r
final NyARMat mat_b=this._mat_b;\r
- final double[] cparam_array=this._cparam.get34Array();\r
final double[][] a_array = mat_a.getArray();\r
final double[][] b_array = mat_b.getArray();\r
- final double cpara02=cparam_array[0*4+2];\r
- final double cpara12=cparam_array[1*4+2];\r
for (int i = 0; i < 4; i++) {\r
final int x2 = i * 2; \r
a_array[x2][2] = b_array[2][x2] = cpara02 - vertex[i].x;// mat_a->m[j*6+2]=mat_b->m[num*4+j*2]=cpara[0][2]-pos2d[j][0];\r
mat_d.matrixSelfInv(); \r
return;\r
}\r
- private final NyARMat __calculateTransferVec_mat_c = new NyARMat(8, 1);//NUMBER_OF_VERTEX * 2, 1\r
private final NyARMat _mat_e = new NyARMat(3, 1);\r
private final NyARMat _mat_f = new NyARMat(3, 1);\r
+ private final NyARMat __calculateTransferVec_mat_c = new NyARMat(8, 1);//NUMBER_OF_VERTEX * 2, 1\r
private final NyARDoublePoint3d[] __calculateTransfer_point3d=NyARDoublePoint3d.createArray(4); \r
\r
/**\r
final public void calculateTransfer(NyARRotMatrix i_rotation,NyARDoublePoint3d o_transfer) throws NyARException\r
{\r
assert(this._offset_square!=null);\r
- final double[] cparam_array=this._cparam.get34Array();\r
- final double cpara00=cparam_array[0*4+0];\r
- final double cpara01=cparam_array[0*4+1];\r
- final double cpara02=cparam_array[0*4+2];\r
- final double cpara11=cparam_array[1*4+1];\r
- final double cpara12=cparam_array[1*4+2];\r
+ final double cpara00=this._projection_mat.m00;\r
+ final double cpara01=this._projection_mat.m01;\r
+ final double cpara02=this._projection_mat.m02;\r
+ final double cpara11=this._projection_mat.m11;\r
+ final double cpara12=this._projection_mat.m12;\r
\r
final NyARDoublePoint3d[] point3d=this.__calculateTransfer_point3d;\r
final NyARDoublePoint3d[] vertex3d=this._offset_square.vertex; \r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;\r
import jp.nyatla.nyartoolkit.core.types.*;\r
-import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.types.matrix.NyARDoubleMatrix33;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
/**\r
* 回転行列計算用の、3x3行列\r
*\r
*/\r
-public class NyARRotMatrix\r
-{\r
- public double m00;\r
- public double m01;\r
- public double m02;\r
- public double m10;\r
- public double m11;\r
- public double m12;\r
- public double m20;\r
- public double m21;\r
- public double m22;\r
- \r
+public class NyARRotMatrix extends NyARDoubleMatrix33\r
+{ \r
/**\r
* インスタンスを準備します。\r
* \r
* @param i_param\r
*/\r
- public NyARRotMatrix(NyARParam i_param) throws NyARException\r
+ public NyARRotMatrix(NyARPerspectiveProjectionMatrix i_matrix) throws NyARException\r
{\r
- this.__initRot_vec1=new NyARRotVector(i_param);\r
- this.__initRot_vec2=new NyARRotVector(i_param);\r
+ this.__initRot_vec1=new NyARRotVector(i_matrix);\r
+ this.__initRot_vec2=new NyARRotVector(i_matrix);\r
return;\r
}\r
final private NyARRotVector __initRot_vec1;\r
\r
//軸2\r
vec2.exteriorProductFromLinear(i_linear[1], i_linear[3]);\r
- vec2.checkVectorByVertex(i_sqvertex[3], i_sqvertex[1]);\r
+ vec2.checkVectorByVertex(i_sqvertex[3], i_sqvertex[0]);\r
\r
//回転の最適化?\r
NyARRotVector.checkRotation(vec1,vec2);\r
this.m21 =vec2.v3;\r
\r
//最後の軸を計算\r
- this.m02 = vec1.v2 * vec2.v3 - vec1.v3 * vec2.v2;\r
- this.m12 = vec1.v3 * vec2.v1 - vec1.v1 * vec2.v3;\r
- this.m22 = vec1.v1 * vec2.v2 - vec1.v2 * vec2.v1;\r
- final double w = Math.sqrt(this.m02 * this.m02 + this.m12 * this.m12 + this.m22 * this.m22);\r
- this.m02 /= w;\r
- this.m12 /= w;\r
- this.m22 /= w;\r
+ final double w02 = vec1.v2 * vec2.v3 - vec1.v3 * vec2.v2;\r
+ final double w12 = vec1.v3 * vec2.v1 - vec1.v1 * vec2.v3;\r
+ final double w22 = vec1.v1 * vec2.v2 - vec1.v2 * vec2.v1;\r
+ final double w = Math.sqrt(w02 * w02 + w12 * w12 + w22 * w22);\r
+ this.m02 = w02/w;\r
+ this.m12 = w12/w;\r
+ this.m22 = w22/w;\r
return;\r
}\r
\r
a = -a;\r
}\r
// <Optimize>\r
- // sinc = (rot[2][1]*rot[0][2]-rot[2][0]*rot[1][2])/\r
- // (rot[0][2]*rot[0][2]+rot[1][2]*rot[1][2]);\r
- // cosc = -(rot[0][2]*rot[2][0]+rot[1][2]*rot[2][1])/\r
- // (rot[0][2]*rot[0][2]+rot[1][2]*rot[1][2]);\r
+ // sinc = (rot[2][1]*rot[0][2]-rot[2][0]*rot[1][2])/(rot[0][2]*rot[0][2]+rot[1][2]*rot[1][2]);\r
+ // cosc = -(rot[0][2]*rot[2][0]+rot[1][2]*rot[2][1])/(rot[0][2]*rot[0][2]+rot[1][2]*rot[1][2]);\r
final double tmp = (rot02 * rot02 + rot12 * rot12);\r
sinc = (this.m21 * rot02 - this.m20 * rot12) / tmp;\r
cosc = -(rot02 * this.m20 + rot12 * this.m21) / tmp;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.NyARMat;\r
-import jp.nyatla.nyartoolkit.core.NyARParam;\r
import jp.nyatla.nyartoolkit.core.types.NyARDoublePoint2d;\r
import jp.nyatla.nyartoolkit.core.types.NyARLinear;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
\r
public class NyARRotVector\r
{\r
\r
//privateメンバ達\r
\r
- private double[] _i_cpara_array_ref;\r
+ private NyARPerspectiveProjectionMatrix _projection_mat_ref;\r
\r
private double[][] _inv_cpara_array_ref;\r
\r
- public NyARRotVector(NyARParam i_cpara) throws NyARException\r
+ public NyARRotVector(NyARPerspectiveProjectionMatrix i_cmat) throws NyARException\r
{\r
- final double[] cpara = i_cpara.get34Array();\r
NyARMat mat_a = new NyARMat(3, 3);\r
double[][] a_array = mat_a.getArray();\r
- int i, j;\r
- for (j = 0; j < 3; j++) {\r
- for (i = 0; i < 3; i++) {\r
- a_array[j][i] = cpara[j * 4 + i];// m[j*3+i] = cpara[j][i];\r
- }\r
- }\r
+ \r
+ a_array[0][0] =i_cmat.m00;\r
+ a_array[0][1] =i_cmat.m01;\r
+ a_array[0][2] =i_cmat.m02;\r
+ a_array[1][0] =i_cmat.m10;\r
+ a_array[1][1] =i_cmat.m11;\r
+ a_array[1][2] =i_cmat.m12;\r
+ a_array[2][0] =i_cmat.m20;\r
+ a_array[2][1] =i_cmat.m21;\r
+ a_array[2][2] =i_cmat.m22;\r
+ \r
mat_a.matrixSelfInv();\r
- this._i_cpara_array_ref = cpara;\r
+ this._projection_mat_ref = i_cmat;\r
this._inv_cpara_array_ref = mat_a.getArray();\r
//GCない言語のときは、ここで配列の所有権委譲してね!\r
}\r
\r
/**\r
- * 2直線に直交するベクトルを計算する。\r
+ * ï¼\92ç\9b´ç·\9aã\81«ç\9b´äº¤ã\81\99ã\82\8bã\83\99ã\82¯ã\83\88ã\83«ã\82\92è¨\88ç®\97ã\81\99ã\82\8bã\83»ã\83»ã\83»ã\81 ã\81¨æ\80\9dã\81\86ã\80\82\r
* @param i_linear1\r
* @param i_linear2\r
*/\r
public void exteriorProductFromLinear(NyARLinear i_linear1, NyARLinear i_linear2)\r
{\r
//1行目\r
- final double[] cpara = this._i_cpara_array_ref;\r
+ final NyARPerspectiveProjectionMatrix cmat= this._projection_mat_ref;\r
final double w1 = i_linear1.run * i_linear2.rise - i_linear2.run * i_linear1.rise;\r
final double w2 = i_linear1.rise * i_linear2.intercept - i_linear2.rise * i_linear1.intercept;\r
final double w3 = i_linear1.intercept * i_linear2.run - i_linear2.intercept * i_linear1.run;\r
\r
- final double m0 = w1 * (cpara[0 * 4 + 1] * cpara[1 * 4 + 2] - cpara[0 * 4 + 2] * cpara[1 * 4 + 1]) + w2 * cpara[1 * 4 + 1] - w3 * cpara[0 * 4 + 1];\r
- final double m1 = -w1 * cpara[0 * 4 + 0] * cpara[1 * 4 + 2] + w3 * cpara[0 * 4 + 0];\r
- final double m2 = w1 * cpara[0 * 4 + 0] * cpara[1 * 4 + 1];\r
+ final double m0 = w1 * (cmat.m01 * cmat.m12 - cmat.m02 * cmat.m11) + w2 * cmat.m11 - w3 * cmat.m01;//w1 * (cpara[0 * 4 + 1] * cpara[1 * 4 + 2] - cpara[0 * 4 + 2] * cpara[1 * 4 + 1]) + w2 * cpara[1 * 4 + 1] - w3 * cpara[0 * 4 + 1];\r
+ final double m1 = -w1 * cmat.m00 * cmat.m12 + w3 * cmat.m00;//-w1 * cpara[0 * 4 + 0] * cpara[1 * 4 + 2] + w3 * cpara[0 * 4 + 0];\r
+ final double m2 = w1 * cmat.m00 * cmat.m11;//w1 * cpara[0 * 4 + 0] * cpara[1 * 4 + 1];\r
final double w = Math.sqrt(m0 * m0 + m1 * m1 + m2 * m2);\r
this.v1 = m0 / w;\r
this.v2 = m1 / w;\r
public void checkVectorByVertex(final NyARDoublePoint2d i_start_vertex, final NyARDoublePoint2d i_end_vertex) throws NyARException\r
{\r
double h;\r
- final double[] cpara = this._i_cpara_array_ref;\r
final double[][] inv_cpara = this._inv_cpara_array_ref;\r
//final double[] world = __checkVectorByVertex_world;// [2][3];\r
final double world0 = inv_cpara[0][0] * i_start_vertex.x * 10.0 + inv_cpara[0][1] * i_start_vertex.y * 10.0 + inv_cpara[0][2] * 10.0;// mat_a->m[0]*st[0]*10.0+\r
// </Optimize>\r
\r
//final double[] camera = __checkVectorByVertex_camera;// [2][2];\r
-\r
- h = cpara[2 * 4 + 0] * world0 + cpara[2 * 4 + 1] * world1 + cpara[2 * 4 + 2] * world2;\r
+ final NyARPerspectiveProjectionMatrix cmat= this._projection_mat_ref;\r
+ //h = cpara[2 * 4 + 0] * world0 + cpara[2 * 4 + 1] * world1 + cpara[2 * 4 + 2] * world2;\r
+ h = cmat.m20 * world0 + cmat.m21 * world1 + cmat.m22 * world2;\r
if (h == 0.0) {\r
throw new NyARException();\r
}\r
- final double camera0 = (cpara[0 * 4 + 0] * world0 + cpara[0 * 4 + 1] * world1 + cpara[0 * 4 + 2] * world2) / h;\r
- final double camera1 = (cpara[1 * 4 + 0] * world0 + cpara[1 * 4 + 1] * world1 + cpara[1 * 4 + 2] * world2) / h;\r
+ //final double camera0 = (cpara[0 * 4 + 0] * world0 + cpara[0 * 4 + 1] * world1 + cpara[0 * 4 + 2] * world2) / h;\r
+ //final double camera1 = (cpara[1 * 4 + 0] * world0 + cpara[1 * 4 + 1] * world1 + cpara[1 * 4 + 2] * world2) / h;\r
+ final double camera0 = (cmat.m00 * world0 + cmat.m01 * world1 + cmat.m02 * world2) / h;\r
+ final double camera1 = (cmat.m10 * world0 + cmat.m11 * world1 + cmat.m12 * world2) / h;\r
\r
- h = cpara[2 * 4 + 0] * world3 + cpara[2 * 4 + 1] * world4 + cpara[2 * 4 + 2] * world5;\r
+ //h = cpara[2 * 4 + 0] * world3 + cpara[2 * 4 + 1] * world4 + cpara[2 * 4 + 2] * world5;\r
+ h = cmat.m20 * world3 + cmat.m21 * world4 + cmat.m22 * world5;\r
if (h == 0.0) {\r
throw new NyARException();\r
}\r
- final double camera2 = (cpara[0 * 4 + 0] * world3 + cpara[0 * 4 + 1] * world4 + cpara[0 * 4 + 2] * world5) / h;\r
- final double camera3 = (cpara[1 * 4 + 0] * world3 + cpara[1 * 4 + 1] * world4 + cpara[1 * 4 + 2] * world5) / h;\r
+ //final double camera2 = (cpara[0 * 4 + 0] * world3 + cpara[0 * 4 + 1] * world4 + cpara[0 * 4 + 2] * world5) / h;\r
+ //final double camera3 = (cpara[1 * 4 + 0] * world3 + cpara[1 * 4 + 1] * world4 + cpara[1 * 4 + 2] * world5) / h;\r
+ final double camera2 = (cmat.m00 * world3 + cmat.m01 * world4 + cmat.m02 * world5) / h;\r
+ final double camera3 = (cmat.m10 * world3 + cmat.m11 * world4 + cmat.m12 * world5) / h;\r
\r
final double v = (i_end_vertex.x - i_start_vertex.x) * (camera2 - camera0) + (i_end_vertex.y - i_start_vertex.y) * (camera3 - camera1);\r
if (v < 0) {\r
package jp.nyatla.nyartoolkit.core.transmat.rottransopt;\r
\r
-import java.util.Date;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.param.*;\r
import jp.nyatla.nyartoolkit.core.transmat.fitveccalc.NyARFitVecCalculator;\r
import jp.nyatla.nyartoolkit.core.transmat.rotmatrix.NyARRotMatrix;\r
import jp.nyatla.nyartoolkit.core.types.NyARDoublePoint2d;\r
{\r
private final static int AR_GET_TRANS_MAT_MAX_LOOP_COUNT = 5;// #define AR_GET_TRANS_MAT_MAX_LOOP_COUNT 5\r
private final static double AR_GET_TRANS_MAT_MAX_FIT_ERROR = 1.0;// #define AR_GET_TRANS_MAT_MAX_FIT_ERROR 1.0\r
- private final NyARParam _param;\r
- public NyARRotTransOptimize(NyARParam i_param)\r
+ private final NyARPerspectiveProjectionMatrix _projection_mat_ref;\r
+ public NyARRotTransOptimize(NyARPerspectiveProjectionMatrix i_projection_mat_ref)\r
{\r
- this._param=i_param;\r
+ this._projection_mat_ref=i_projection_mat_ref;\r
+ return;\r
}\r
\r
final public double optimize(NyARRotMatrix io_rotmat,NyARDoublePoint3d io_transvec,NyARFitVecCalculator i_calculator) throws NyARException\r
{\r
- \r
- \r
- \r
- \r
final NyARDoublePoint2d[] fit_vertex=i_calculator.getFitSquare();\r
final NyARDoublePoint3d[] offset_square=i_calculator.getOffsetVertex().vertex;\r
\r
d_pt2 = i_vertex2d[3];\r
P2D30 = d_pt2.x;\r
P2D31 = d_pt2.y;\r
- final double cpara[] = this._param.get34Array();\r
- final double CP0, CP1, CP2, CP3, CP4, CP5, CP6, CP7, CP8, CP9, CP10;\r
- CP0 = cpara[0];\r
- CP1 = cpara[1];\r
- CP2 = cpara[2];\r
- CP3 = cpara[3];\r
- CP4 = cpara[4];\r
- CP5 = cpara[5];\r
- CP6 = cpara[6];\r
- CP7 = cpara[7];\r
- CP8 = cpara[8];\r
- CP9 = cpara[9];\r
- CP10 = cpara[10];\r
- combo03 = CP0 * trans.x + CP1 * trans.y + CP2 * trans.z + CP3;\r
- combo13 = CP4 * trans.x + CP5 * trans.y + CP6 * trans.z + CP7;\r
- combo23 = CP8 * trans.x + CP9 * trans.y + CP10 * trans.z + cpara[11];\r
+ final NyARPerspectiveProjectionMatrix prjmat = this._projection_mat_ref;\r
+ final double CP0, CP1, CP2, CP4, CP5, CP6, CP8, CP9, CP10;\r
+ CP0 = prjmat.m00;\r
+ CP1 = prjmat.m01;\r
+ CP2 = prjmat.m02;\r
+ CP4 = prjmat.m10;\r
+ CP5 = prjmat.m11;\r
+ CP6 = prjmat.m12;\r
+ CP8 = prjmat.m20;\r
+ CP9 = prjmat.m21;\r
+ CP10 = prjmat.m22;\r
+ combo03 = CP0 * trans.x + CP1 * trans.y + CP2 * trans.z + prjmat.m03;\r
+ combo13 = CP4 * trans.x + CP5 * trans.y + CP6 * trans.z + prjmat.m13;\r
+ combo23 = CP8 * trans.x + CP9 * trans.y + CP10 * trans.z + prjmat.m23;\r
double CACA, SASA, SACA, CA, SA;\r
double CACACB, SACACB, SASACB, CASB, SASB;\r
double SACASC, SACACBSC, SACACBCC, SACACC;\r
--- /dev/null
+package jp.nyatla.nyartoolkit.core.types.matrix;\r
+\r
+public interface INyARDoubleMatrix\r
+{\r
+ /**\r
+ * 配列の内容を行列に設定する。\r
+ * 遅いので余り使わないでね。\r
+ * @param o_value\r
+ */\r
+ public void setValue(double[] i_value);\r
+ /**\r
+ * 行列の内容を配列に返す。\r
+ * 遅いので余り使わないでね。\r
+ * @param o_value\r
+ */\r
+ public void getValue(double[] o_value);\r
+\r
+}\r
--- /dev/null
+package jp.nyatla.nyartoolkit.core.types.matrix;\r
+\r
+public class NyARDoubleMatrix33 implements INyARDoubleMatrix\r
+{\r
+ public double m00;\r
+ public double m01;\r
+ public double m02;\r
+ public double m10;\r
+ public double m11;\r
+ public double m12;\r
+ public double m20;\r
+ public double m21;\r
+ public double m22;\r
+ /**\r
+ * 遅いからあんまり使わないでね。\r
+ */\r
+ public void setValue(double[] i_value)\r
+ {\r
+ this.m00=i_value[0];\r
+ this.m01=i_value[1];\r
+ this.m02=i_value[2];\r
+ this.m10=i_value[3];\r
+ this.m11=i_value[4];\r
+ this.m12=i_value[5];\r
+ this.m20=i_value[6];\r
+ this.m21=i_value[7];\r
+ this.m22=i_value[8];\r
+ return;\r
+ }\r
+ /**\r
+ * 遅いからあんまり使わないでね。\r
+ */\r
+ public void getValue(double[] o_value)\r
+ {\r
+ o_value[0]=this.m00;\r
+ o_value[1]=this.m01;\r
+ o_value[2]=this.m02;\r
+ o_value[3]=this.m10;\r
+ o_value[4]=this.m11;\r
+ o_value[5]=this.m12;\r
+ o_value[6]=this.m20;\r
+ o_value[7]=this.m21;\r
+ o_value[8]=this.m22;\r
+ return;\r
+ }\r
+}\r
--- /dev/null
+package jp.nyatla.nyartoolkit.core.types.matrix;\r
+\r
+public class NyARDoubleMatrix34 implements INyARDoubleMatrix\r
+{\r
+ public double m00;\r
+ public double m01;\r
+ public double m02;\r
+ public double m03;\r
+ public double m10;\r
+ public double m11;\r
+ public double m12;\r
+ public double m13;\r
+ public double m20;\r
+ public double m21;\r
+ public double m22;\r
+ public double m23;\r
+ public void setValue(double[] i_value)\r
+ {\r
+ this.m00=i_value[0];\r
+ this.m01=i_value[1];\r
+ this.m02=i_value[2];\r
+ this.m03=i_value[3];\r
+ this.m10=i_value[4];\r
+ this.m11=i_value[5];\r
+ this.m12=i_value[6];\r
+ this.m13=i_value[7];\r
+ this.m20=i_value[8];\r
+ this.m21=i_value[9];\r
+ this.m22=i_value[10];\r
+ this.m23=i_value[11];\r
+ return;\r
+ }\r
+ public void getValue(double[] o_value)\r
+ {\r
+ o_value[0]=this.m00;\r
+ o_value[1]=this.m01;\r
+ o_value[2]=this.m02;\r
+ o_value[3]=this.m03;\r
+ o_value[4]=this.m10;\r
+ o_value[5]=this.m11;\r
+ o_value[6]=this.m12;\r
+ o_value[7]=this.m13;\r
+ o_value[8]=this.m20;\r
+ o_value[9]=this.m21;\r
+ o_value[10]=this.m22;\r
+ o_value[11]=this.m23;\r
+ return;\r
+ } \r
+}\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.*;\r
import jp.nyatla.nyartoolkit.core.match.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.pickup.*;\r
import jp.nyatla.nyartoolkit.core.raster.*;\r
import jp.nyatla.nyartoolkit.core.raster.rgb.*;\r
import jp.nyatla.nyartoolkit.core.transmat.*;\r
import jp.nyatla.nyartoolkit.core.rasterfilter.rgb2bin.*;\r
+import jp.nyatla.nyartoolkit.core.types.*;\r
\r
class NyARDetectMarkerResult\r
{\r
*/\r
public NyARDetectMarker(NyARParam i_param, NyARCode[] i_code, double[] i_marker_width, int i_number_of_code) throws NyARException\r
{\r
+ final NyARIntSize scr_size=i_param.getScreenSize();\r
// 解析オブジェクトを作る\r
- this._square_detect = new NyARSquareDetector(i_param);\r
- this._transmat = new NyARTransMat_O2(i_param);\r
+ this._square_detect = new NyARSquareDetector(i_param.getDistortionFactor(),scr_size);\r
+ this._transmat = new NyARTransMat(i_param);\r
// 比較コードを保存\r
this._codes = i_code;\r
// 比較コードの解像度は全部同じかな?(違うとパターンを複数種つくらないといけないから)\r
// 評価器を作る。\r
this._match_patt = new NyARMatchPatt_Color_WITHOUT_PCA();\r
//2値画像バッファを作る\r
- this._bin_raster=new NyARBinRaster(i_param.getX(),i_param.getY()); \r
+ this._bin_raster=new NyARBinRaster(scr_size.w,scr_size.h); \r
}\r
\r
private NyARBinRaster _bin_raster;\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.*;\r
import jp.nyatla.nyartoolkit.core.match.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.pickup.*;\r
import jp.nyatla.nyartoolkit.core.raster.rgb.*;\r
import jp.nyatla.nyartoolkit.core.raster.*;\r
import jp.nyatla.nyartoolkit.core.transmat.*;\r
+import jp.nyatla.nyartoolkit.core.types.NyARIntSize;\r
import jp.nyatla.nyartoolkit.core.rasterfilter.rgb2bin.NyARRasterFilter_ARToolkitThreshold;\r
\r
/**\r
*/\r
public NyARSingleDetectMarker(NyARParam i_param, NyARCode i_code, double i_marker_width) throws NyARException\r
{\r
+ final NyARIntSize scr_size=i_param.getScreenSize(); \r
// 解析オブジェクトを作る\r
- this._square_detect = new NyARSquareDetector(i_param);\r
- this._transmat = new NyARTransMat_O2(i_param);\r
+ this._square_detect = new NyARSquareDetector(i_param.getDistortionFactor(),scr_size);\r
+ this._transmat = new NyARTransMat(i_param);\r
// 比較コードを保存\r
this._code = i_code;\r
this._marker_width = i_marker_width;\r
// 評価器を作る。\r
this._match_patt = new NyARMatchPatt_Color_WITHOUT_PCA();\r
//2値画像バッファを作る\r
- this._bin_raster=new NyARBinRaster(i_param.getX(),i_param.getY());\r
+ this._bin_raster=new NyARBinRaster(scr_size.w,scr_size.h);\r
}\r
\r
NyARBinRaster _bin_raster;\r
import java.util.*;\r
\r
import jp.nyatla.nyartoolkit.core.*;\r
-import jp.nyatla.nyartoolkit.core.raster.rgb.NyARRgbRaster_BGRA;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
+import jp.nyatla.nyartoolkit.core.raster.rgb.*;\r
import jp.nyatla.nyartoolkit.core.transmat.*;\r
import jp.nyatla.nyartoolkit.detector.NyARSingleDetectMarker;\r
-import jp.nyatla.nyartoolkit.core.rasteranalyzer.threshold.*;\r
-import jp.nyatla.nyartoolkit.core.raster.*;\r
-import jp.nyatla.nyartoolkit.core.rasterfilter.rgb2gs.*;\r
+\r
\r
/**\r
* 320x240のBGRA32で記録されたRAWイメージから、1種類のパターンを認識し、\r
{\r
private final String code_file = "../Data/patt.hiro";\r
\r
- private final String data_file = "../Data/320x240ABGR.raw";\r
+// private final String data_file = "../Data/320x240ABGR.raw";\r
+ private final String data_file = "../Data/320x240RGB.raw";\r
\r
private final String camera_file = "../Data/camera_para.dat";\r
\r
{\r
// AR用カメラパラメタファイルをロード\r
NyARParam ap = new NyARParam();\r
- ap.loadFromARFile(camera_file);\r
- ap.changeSize(320, 240);\r
+ ap.loadARParamFromFile(camera_file);\r
+ ap.changeScreenSize(640, 480);\r
\r
// AR用のパターンコードを読み出し\r
NyARCode code = new NyARCode(16, 16);\r
FileInputStream fs = new FileInputStream(data_file);\r
byte[] buf = new byte[(int) f.length()];\r
fs.read(buf);\r
- NyARRgbRaster_BGRA ra = NyARRgbRaster_BGRA.wrap(buf, 320, 240);\r
+ INyARRgbRaster ra = NyARRgbRaster_RGB.wrap(buf, 640, 480);\r
+// INyARRgbRaster ra = NyARRgbRaster_BGRA.wrap(buf, 320, 240);\r
// Blank_Raster ra=new Blank_Raster(320, 240);\r
\r
// 1パターンのみを追跡するクラスを作成\r
NyARSingleDetectMarker ar = new NyARSingleDetectMarker(ap, code, 80.0);\r
NyARTransMatResult result_mat = new NyARTransMatResult();\r
ar.setContinueMode(false);\r
- ar.detectMarkerLite(ra, 100);\r
+ ar.detectMarkerLite(ra, 110);\r
ar.getTransmationMatrix(result_mat);\r
\r
// マーカーを検出\r
Date d2 = new Date();\r
for (int i = 0; i < 1000; i++) {\r
// 変換行列を取得\r
- ar.detectMarkerLite(ra, 100);\r
+ ar.detectMarkerLite(ra, 110);\r
ar.getTransmationMatrix(result_mat);\r
}\r
Date d = new Date();\r