<classpath>\r
<classpathentry path="" kind="src"/>\r
<classpathentry path="org.eclipse.jdt.launching.JRE_CONTAINER" kind="con"/>\r
- <classpathentry path="C:/Program Files/JMF2.1.1e/lib/jmf.jar" kind="lib"/>\r
+ <classpathentry sourcepath="/jmfapp" path="C:/Program Files/JMF2.1.1e/lib/jmf.jar" kind="lib"/>\r
<classpathentry path="C:/Program Files/Java/Java3D/1.5.1/lib/ext/j3dcore.jar" kind="lib"/>\r
<classpathentry path="C:/Program Files/Java/Java3D/1.5.1/lib/ext/j3dutils.jar" kind="lib"/>\r
<classpathentry path="C:/Program Files/Java/Java3D/1.5.1/lib/ext/vecmath.jar" kind="lib"/>\r
*/\r
public class NyARJava3D extends JFrame implements NyARSingleMarkerBehaviorListener\r
{\r
+ private static final long serialVersionUID = -8472866262481865377L;\r
+\r
private final String CARCODE_FILE = "../../Data/patt.hiro";\r
\r
private final String PARAM_FILE = "../../Data/camera_para.dat";\r
*/
public class NyarToolkitLinkTest extends Frame implements JmfCaptureListener
{
+ private static final long serialVersionUID = 6471434231970804953L;
+
private final String CARCODE_FILE = "../../Data/patt.hiro";
private final String PARAM_FILE = "../../Data/camera_para.dat";
- private JmfCameraCapture capture;
+ private JmfCaptureDevice _capture;
- private NyARSingleDetectMarker nya;
+ private NyARSingleDetectMarker _nya;
- private JmfNyARRaster_RGB raster;
+ private JmfNyARRaster_RGB _raster;
- private NyARTransMatResult trans_mat_result = new NyARTransMatResult();
+ private NyARTransMatResult _trans_mat_result = new NyARTransMatResult();
public NyarToolkitLinkTest() throws NyARException, NyARException
{
setTitle("JmfCaptureTest");
setBounds(0, 0, 320 + 64, 240 + 64);
//キャプチャの準備
- capture = new JmfCameraCapture(320, 240, 30f, JmfCameraCapture.PIXEL_FORMAT_RGB);
- capture.setCaptureListener(this);
+ JmfCaptureDeviceList devlist=new JmfCaptureDeviceList();
+ _capture=devlist.getDevice(0);
+ _capture.setCaptureFormat(JmfCaptureDevice.PIXEL_FORMAT_RGB,320, 240,15f);
+ _capture.setOnCapture(this);
//NyARToolkitの準備
NyARParam ar_param = new NyARParam();
NyARCode ar_code = new NyARCode(16, 16);
ar_param.loadARParamFromFile(PARAM_FILE);
ar_param.changeScreenSize(320, 240);
- nya = new NyARSingleDetectMarker(ar_param, ar_code, 80.0);
+ this._nya = new NyARSingleDetectMarker(ar_param, ar_code, 80.0);
ar_code.loadARPattFromFile(CARCODE_FILE);
//キャプチャイメージ用のラスタを準備
- raster = new JmfNyARRaster_RGB(320, 240);
+ this._raster = new JmfNyARRaster_RGB(320, 240);
+ return;
}
public void onUpdateBuffer(Buffer i_buffer)
{
try {
//キャプチャしたバッファをラスタにセット
- raster.setBuffer(i_buffer);
+ this._raster.setBuffer(i_buffer);
//キャプチャしたイメージを表示用に加工
BufferToImage b2i = new BufferToImage((VideoFormat) i_buffer.getFormat());
Graphics g = getGraphics();
//マーカー検出
- boolean is_marker_exist = nya.detectMarkerLite(raster, 100);
+ boolean is_marker_exist = this._nya.detectMarkerLite(this._raster, 100);
if (is_marker_exist) {
//変換行列を取得
- nya.getTransmationMatrix(this.trans_mat_result);
+ this._nya.getTransmationMatrix(this._trans_mat_result);
}
//情報を画面に書く
g.drawImage(img, 32, 32, this);
if (is_marker_exist) {
- g.drawString("マーカー検出:" + nya.getConfidence(), 32, 50);
- g.drawString("[m00]" +this.trans_mat_result.m00, 32, 50 + 16*1);
- g.drawString("[m01]" +this.trans_mat_result.m01, 32, 50 + 16*2);
- g.drawString("[m02]" +this.trans_mat_result.m02, 32, 50 + 16*3);
- g.drawString("[m03]" +this.trans_mat_result.m03, 32, 50 + 16*4);
- g.drawString("[m10]" +this.trans_mat_result.m10, 32, 50 + 16*5);
- g.drawString("[m11]" +this.trans_mat_result.m11, 32, 50 + 16*6);
- g.drawString("[m12]" +this.trans_mat_result.m12, 32, 50 + 16*7);
- g.drawString("[m13]" +this.trans_mat_result.m13, 32, 50 + 16*8);
- g.drawString("[m20]" +this.trans_mat_result.m20, 32, 50 + 16*9);
- g.drawString("[m21]" +this.trans_mat_result.m21, 32, 50 + 16*10);
- g.drawString("[m22]" +this.trans_mat_result.m22, 32, 50 + 16*11);
- g.drawString("[m23]" +this.trans_mat_result.m23, 32, 50 + 16*12);
+ g.drawString("マーカー検出:" + this._nya.getConfidence(), 32, 50);
+ g.drawString("[m00]" +this._trans_mat_result.m00, 32, 50 + 16*1);
+ g.drawString("[m01]" +this._trans_mat_result.m01, 32, 50 + 16*2);
+ g.drawString("[m02]" +this._trans_mat_result.m02, 32, 50 + 16*3);
+ g.drawString("[m03]" +this._trans_mat_result.m03, 32, 50 + 16*4);
+ g.drawString("[m10]" +this._trans_mat_result.m10, 32, 50 + 16*5);
+ g.drawString("[m11]" +this._trans_mat_result.m11, 32, 50 + 16*6);
+ g.drawString("[m12]" +this._trans_mat_result.m12, 32, 50 + 16*7);
+ g.drawString("[m13]" +this._trans_mat_result.m13, 32, 50 + 16*8);
+ g.drawString("[m20]" +this._trans_mat_result.m20, 32, 50 + 16*9);
+ g.drawString("[m21]" +this._trans_mat_result.m21, 32, 50 + 16*10);
+ g.drawString("[m22]" +this._trans_mat_result.m22, 32, 50 + 16*11);
+ g.drawString("[m23]" +this._trans_mat_result.m23, 32, 50 + 16*12);
} else {
g.drawString("マーカー未検出:", 32, 100);
}
private void startCapture()
{
try {
- capture.start();
+ this._capture.start();
} catch (Exception e) {
e.printStackTrace();
}
\r
private GLNyARRaster_RGB _cap_image;\r
\r
- private JmfCameraCapture _capture;\r
+ private JmfCaptureDevice _capture;\r
\r
private GL _gl;\r
private NyARGLUtil _glnya;\r
//NyARToolkitの準備\r
try {\r
//キャプチャの準備\r
- _capture = new JmfCameraCapture(SCREEN_X, SCREEN_Y, 15f, JmfCameraCapture.PIXEL_FORMAT_RGB);\r
- _capture.setCaptureListener(this);\r
+ JmfCaptureDeviceList devlist=new JmfCaptureDeviceList();\r
+ _capture=devlist.getDevice(0);\r
+ _capture.setCaptureFormat(JmfCaptureDevice.PIXEL_FORMAT_RGB,SCREEN_X, SCREEN_Y,15f);\r
+ _capture.setOnCapture(this);\r
//NyARToolkitの準備\r
_ar_param = new NyARParam();\r
NyARCode ar_code = new NyARCode(16, 16);\r
\r
private GLNyARRaster_RGB _cap_image;\r
\r
- private JmfCameraCapture _capture;\r
+ private JmfCaptureDevice _capture;\r
\r
private GL _gl;\r
\r
//NyARToolkitの準備\r
try {\r
//キャプチャの準備\r
- _capture = new JmfCameraCapture(SCREEN_X, SCREEN_Y, 15f, JmfCameraCapture.PIXEL_FORMAT_RGB);\r
- _capture.setCaptureListener(this);\r
+ JmfCaptureDeviceList devlist=new JmfCaptureDeviceList();\r
+ _capture=devlist.getDevice(0);\r
+ _capture.setCaptureFormat(JmfCaptureDevice.PIXEL_FORMAT_RGB,SCREEN_X, SCREEN_Y,15f);\r
+ _capture.setOnCapture(this); \r
//NyARToolkitの準備\r
_ar_param = new NyARParam();\r
_ar_param.loadARParamFromFile(PARAM_FILE);\r
*/
public class NyarToolkitLinkTest extends Frame implements QtCaptureListener
{
+ private static final long serialVersionUID = 6154831884117789648L;
+
private final String CARCODE_FILE = "../../Data/patt.hiro";
private final String PARAM_FILE = "../../Data/camera_para.dat";
*/
public class QtCaptureTest extends Frame implements QtCaptureListener
{
+ private static final long serialVersionUID = -734697739607654631L;
+
public QtCaptureTest() throws NyARException
{
setTitle("QtCaptureTest");
public void doFilter(NyARGrayscaleRaster i_input, NyARBinRaster i_output) throws NyARException\r
{\r
final NyARIntSize size = i_output.getSize();\r
- final int[][] out_buf = (int[][]) i_output.getBufferReader().getBuffer();\r
- final int[][] in_buf = (int[][]) i_input.getBufferReader().getBuffer();\r
+ final int[] out_buf = (int[]) i_output.getBufferReader().getBuffer();\r
+ final int[] in_buf = (int[]) i_input.getBufferReader().getBuffer();\r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
assert (size.h % 8 == 0 && size.w % 8 == 0);//暫定実装なので。\r
\r
sum = nn = 0;\r
for (int yy = y - area; yy < y + area + 1; yy++) {\r
for (int xx = x1 - area; xx < x1 + area; xx++) {\r
- sum += in_buf[yy][xx];\r
+ sum += in_buf[yy*size.w+xx];\r
nn++;\r
}\r
}\r
for (int x = area; x < x2; x++) {\r
if (!first) {\r
for (int yy = y - area; yy < y + area; yy++) {\r
- sum += in_buf[yy][x + area];\r
- sum -= in_buf[yy][x - area];\r
+ sum += in_buf[yy*size.w+x + area];\r
+ sum -= in_buf[yy*size.w+x - area];\r
}\r
}\r
first = false;\r
th = (sum / nn);\r
- int g = in_buf[y][x];\r
- out_buf[y][x] = th < g ? 1 : 0;\r
+ int g = in_buf[y*size.w+x];\r
+ out_buf[y*size.w+x] = th < g ? 1 : 0;\r
}\r
}\r
return;\r
int in_buf_type=in_buffer_reader.getBufferType();\r
\r
NyARIntSize size = i_output.getSize();\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_BIN_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_BIN_8));\r
assert (checkInputType(in_buf_type)==true); \r
assert (i_input.getSize().isEqualSize(size.w*2,size.h*2) == true);\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
byte[] in_buf = (byte[]) in_buffer_reader.getBuffer();\r
\r
switch (in_buffer_reader.getBufferType()) {\r
return;\r
}\r
\r
- private void convert24BitRgb(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert24BitRgb(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
final int size_w=i_size.w*2;\r
final int x_mod_end= size_w-(size_w%8);\r
int x; \r
for (int y =i_size.h-1; y>=0 ; y--){\r
//端数分\r
- final int[] row_ptr=i_out[y];\r
+ final int row_ptr=y*i_size.w;\r
for (x = i_size.w-1;x>=x_mod_end;x--) {\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
+ i_out[row_ptr+x]=w<=th?0:1;\r
bp -= 6;\r
}\r
//タイリング \r
for (;x>=0;x-=8) {\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
+ i_out[row_ptr+x]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-1]=w<=th?0:1;\r
+ i_out[row_ptr+x-1]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-2]=w<=th?0:1;\r
+ i_out[row_ptr+x-2]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-3]=w<=th?0:1;\r
+ i_out[row_ptr+x-3]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-4]=w<=th?0:1;\r
+ i_out[row_ptr+x-4]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-5]=w<=th?0:1;\r
+ i_out[row_ptr+x-5]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-6]=w<=th?0:1;\r
+ i_out[row_ptr+x-6]=w<=th?0:1;\r
bp -= 6;\r
w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-7]=w<=th?0:1;\r
+ i_out[row_ptr+x-7]=w<=th?0:1;\r
bp -= 6;\r
}\r
//1行飛ばし\r
}\r
return;\r
}\r
- private void convert32BitRgbx(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert32BitRgbx(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
final int size_w=i_size.w;\r
final int x_mod_end= size_w-(size_w%8);\r
int w;\r
int x;\r
for (int y =i_size.h-1; y>=0 ; y--){\r
- final int[] row_ptr=i_out[y];\r
+ final int row_ptr=y*i_size.w;\r
\r
//端数分\r
for (x = size_w-1;x>=x_mod_end;x--) {\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
+ i_out[row_ptr+x]=w<=th?0:1;\r
bp -= 4;\r
}\r
//タイリング\r
for (;x>=0;x-=8) {\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
+ i_out[row_ptr+x]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-1]=w<=th?0:1;\r
+ i_out[row_ptr+x-1]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-2]=w<=th?0:1;\r
+ i_out[row_ptr+x-2]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-3]=w<=th?0:1;\r
+ i_out[row_ptr+x-3]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-4]=w<=th?0:1;\r
+ i_out[row_ptr+x-4]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-5]=w<=th?0:1;\r
+ i_out[row_ptr+x-5]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-6]=w<=th?0:1;\r
+ i_out[row_ptr+x-6]=w<=th?0:1;\r
bp -= 4;\r
w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-7]=w<=th?0:1;\r
+ i_out[row_ptr+x-7]=w<=th?0:1;\r
bp -= 4;\r
} \r
}\r
this._out_image = i_destination_image;\r
\r
// NyLabelingImageのイメージ初期化(枠書き)\r
- int[][] img = (int[][]) i_destination_image.getBufferReader().getBuffer();\r
+ int[] img = (int[])i_destination_image.getBufferReader().getBuffer();\r
+ int bottom_ptr=(size.h - 1)*size.w;\r
for (int i = 0; i < size.w; i++) {\r
- img[0][i] = 0;\r
- img[size.h - 1][i] = 0;\r
+ img[i] = 0;\r
+ img[bottom_ptr+i] = 0;\r
}\r
for (int i = 0; i < size.h; i++) {\r
- img[i][0] = 0;\r
- img[i][size.w - 1] = 0;\r
+ img[i*size.w] = 0;\r
+ img[(i+1)*size.w - 1] = 0;\r
}\r
\r
// サイズ(参照値)を保存\r
this._dest_size = size;\r
+ return;\r
}\r
\r
public INyARLabelingImage getAttachedDestination()\r
\r
final int lxsize = in_size.w;// lxsize = arUtil_c.arImXsize;\r
final int lysize = in_size.h;// lysize = arUtil_c.arImYsize;\r
- int[][] label_img = (int[][]) out_image.getBufferReader().getBuffer();\r
+ int[] label_img = (int[])out_image.getBufferReader().getBuffer();\r
\r
// 枠作成はインスタンスを作った直後にやってしまう。\r
-\r
- // ラベリング情報のリセット(ラベリングインデックスを使用)\r
+ \r
+ //ラベリング情報のリセット(ラベリングインデックスを使用)\r
out_image.reset(true);\r
-\r
- int[] label_idxtbl = out_image.getIndexArray();\r
+ \r
+ int[] label_idxtbl=out_image.getIndexArray();\r
\r
int[] work2_pt;\r
int wk_max = 0;\r
\r
int label_pixel;\r
- int[][] raster_buf = (int[][]) i_raster.getBufferReader().getBuffer();\r
- int[] line_ptr;\r
+ int[] raster_buf=(int[])i_raster.getBufferReader().getBuffer();\r
+ int line_ptr;\r
int[][] work2 = this.work_holder.work2;\r
- int[] label_img_pt0, label_img_pt1;\r
+ int label_img_ptr0, label_img_ptr1;\r
for (j = 1; j < lysize - 1; j++) {// for (int j = 1; j < lysize - 1;j++, pnt += poff*2, pnt2 += 2) {\r
- line_ptr = raster_buf[j];\r
- label_img_pt0 = label_img[j];\r
- label_img_pt1 = label_img[j - 1];\r
+ line_ptr=j*lxsize;\r
+ label_img_ptr0=j*lxsize;//label_img_pt0 = label_img[j];\r
+ label_img_ptr1=label_img_ptr0-lxsize;//label_img_pt1 = label_img[j - 1];\r
for (i = 1; i < lxsize - 1; i++) {// for(int i = 1; i < lxsize-1;i++, pnt+=poff, pnt2++) {\r
// RGBの合計値が閾値より小さいかな?\r
- if (line_ptr[i] == 0) {\r
+ if (raster_buf[line_ptr+i]==0) {\r
// pnt1 = ShortPointer.wrap(pnt2, -lxsize);//pnt1 =&(pnt2[-lxsize]);\r
- if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) {\r
- label_pixel = label_img_pt1[i];// *pnt2 = *pnt1;\r
+ if (label_img[label_img_ptr1+i] > 0) {//if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) {\r
+ label_pixel = label_img[label_img_ptr1+i];//label_pixel = label_img_pt1[i];// *pnt2 = *pnt1;\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
- } else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) {\r
- if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) {\r
- m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1];\r
- n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1];\r
+ } else if (label_img[label_img_ptr1+i + 1] > 0) {//} else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) {\r
+ if (label_img[label_img_ptr1+i - 1] > 0) {//if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) {\r
+ m = label_idxtbl[label_img[label_img_ptr1+i + 1] - 1];//m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1];\r
+ n = label_idxtbl[label_img[label_img_ptr1+i - 1] - 1];//n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1];\r
if (m > n) {\r
label_pixel = n;// *pnt2 = n;\r
// wk=IntPointer.wrap(work, 0);//wk =\r
work2_pt[1] += i;\r
work2_pt[2] += j;\r
work2_pt[6] = j;\r
- } else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) {\r
- m = label_idxtbl[(label_img_pt1[i + 1]) - 1];// m =work[*(pnt1+1)-1];\r
- n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1];\r
+ } else if ((label_img[label_img_ptr0+i - 1]) > 0) {//} else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) {\r
+ m = label_idxtbl[label_img[label_img_ptr1+i + 1] - 1];//m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1];\r
+ n = label_idxtbl[label_img[label_img_ptr0+i - 1] - 1];//n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1];\r
if (m > n) {\r
\r
label_pixel = n;// *pnt2 = n;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
} else {\r
\r
- label_pixel = label_img_pt1[i + 1];// *pnt2 =\r
- // *(pnt1+1);\r
+ label_pixel = label_img[label_img_ptr1+i + 1];//label_pixel = label_img_pt1[i + 1];// *pnt2 =\r
+ // *(pnt1+1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
if (work2_pt[3] > i) {// if(\r
- // work2[((*pnt2)-1)*7+3] >\r
- // i ){\r
+ // work2[((*pnt2)-1)*7+3] >\r
+ // i ){\r
work2_pt[3] = i;// work2[((*pnt2)-1)*7+3] = i;\r
}\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
}\r
- } else if ((label_img_pt1[i - 1]) > 0) {// }else if(\r
- // *(pnt1-1) > 0 ) {\r
- label_pixel = label_img_pt1[i - 1];// *pnt2 =\r
- // *(pnt1-1);\r
+ } else if ((label_img[label_img_ptr1+i - 1]) > 0) {//} else if ((label_img_pt1[i - 1]) > 0) {// }else if(\r
+ // *(pnt1-1) > 0 ) {\r
+ label_pixel = label_img[label_img_ptr1+i - 1];//label_pixel = label_img_pt1[i - 1];// *pnt2 =\r
+ // *(pnt1-1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[4] = i;// work2[((*pnt2)-1)*7+4] = i;\r
}\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
- } else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) {\r
- label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1);\r
+ } else if (label_img[label_img_ptr0+i - 1] > 0) {//} else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) {\r
+ label_pixel = label_img[label_img_ptr0+i - 1];//label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[5] = j;\r
work2_pt[6] = j;\r
}\r
- label_img_pt0[i] = label_pixel;\r
+ label_img[label_img_ptr0+i] = label_pixel;//label_img_pt0[i] = label_pixel;\r
} else {\r
- label_img_pt0[i] = 0;// *pnt2 = 0;\r
+ label_img[label_img_ptr0+i] = 0;//label_img_pt0[i] = 0;// *pnt2 = 0;\r
}\r
}\r
}\r
import javax.vecmath.*;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.jmf.utils.JmfCameraCapture;\r
-import jp.nyatla.nyartoolkit.jmf.utils.JmfCaptureListener;\r
+import jp.nyatla.nyartoolkit.jmf.utils.*;\r
import jp.nyatla.nyartoolkit.core.*;\r
import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
import jp.nyatla.nyartoolkit.core.transmat.NyARTransMatResult;\r
*/\r
public class NyARSingleMarkerBehaviorHolder implements JmfCaptureListener\r
{\r
- private NyARParam cparam;\r
+ private NyARParam _cparam;\r
\r
- private JmfCameraCapture capture;\r
+ private JmfCaptureDevice _capture;\r
\r
- private J3dNyARRaster_RGB nya_raster;//最大3スレッドで共有されるので、排他制御かけること。\r
+ private J3dNyARRaster_RGB _nya_raster;//最大3スレッドで共有されるので、排他制御かけること。\r
\r
- private NyARSingleDetectMarker nya;\r
+ private NyARSingleDetectMarker _nya;\r
\r
//Behaviorホルダ\r
- private NyARBehavior nya_behavior;\r
+ private NyARBehavior _nya_behavior;\r
\r
public NyARSingleMarkerBehaviorHolder(NyARParam i_cparam, float i_rate, NyARCode i_ar_code, double i_marker_width) throws NyARException\r
{\r
- nya_behavior = null;\r
+ this._nya_behavior = null;\r
final NyARIntSize scr_size = i_cparam.getScreenSize();\r
- cparam = i_cparam;\r
- capture = new JmfCameraCapture(scr_size.w, scr_size.h, i_rate, JmfCameraCapture.PIXEL_FORMAT_RGB);\r
- capture.setCaptureListener(this);\r
- nya_raster = new J3dNyARRaster_RGB(cparam);\r
- nya = new NyARSingleDetectMarker(cparam, i_ar_code, i_marker_width);\r
- nya_behavior = new NyARBehavior(nya, nya_raster, i_rate);\r
+ this._cparam = i_cparam;\r
+ //キャプチャの準備\r
+ JmfCaptureDeviceList devlist=new JmfCaptureDeviceList();\r
+ this._capture=devlist.getDevice(0);\r
+ this._capture.setCaptureFormat(JmfCaptureDevice.PIXEL_FORMAT_RGB,scr_size.w, scr_size.h,15f);\r
+ this._capture.setOnCapture(this); \r
+ this._nya_raster = new J3dNyARRaster_RGB(this._cparam);\r
+ this._nya = new NyARSingleDetectMarker(this._cparam, i_ar_code, i_marker_width);\r
+ this._nya_behavior = new NyARBehavior(this._nya, this._nya_raster, i_rate);\r
}\r
\r
public Behavior getBehavior()\r
{\r
- return nya_behavior;\r
+ return this._nya_behavior;\r
}\r
\r
/**\r
public void setBackGround(Background i_back_ground)\r
{\r
//コール先で排他制御\r
- nya_behavior.setRelatedBackGround(i_back_ground);\r
+ this._nya_behavior.setRelatedBackGround(i_back_ground);\r
}\r
\r
/**\r
public void setTransformGroup(TransformGroup i_trgroup)\r
{\r
//コール先で排他制御\r
- nya_behavior.setRelatedTransformGroup(i_trgroup);\r
+ this._nya_behavior.setRelatedTransformGroup(i_trgroup);\r
}\r
\r
/**\r
public void setUpdateListener(NyARSingleMarkerBehaviorListener i_listener)\r
{\r
//コール先で排他制御\r
- nya_behavior.setUpdateListener(i_listener);\r
+ this._nya_behavior.setUpdateListener(i_listener);\r
}\r
\r
/**\r
public void onUpdateBuffer(Buffer i_buffer)\r
{\r
try {\r
- synchronized (nya_raster) {\r
- nya_raster.setBuffer(i_buffer);\r
+ synchronized (this._nya_raster) {\r
+ this._nya_raster.setBuffer(i_buffer);\r
}\r
} catch (Exception e) {\r
e.printStackTrace();\r
public void start() throws NyARException\r
{\r
//開始\r
- capture.start();\r
+ this._capture.start();\r
}\r
\r
public void stop()\r
{\r
- capture.stop();\r
+ this._capture.stop();\r
}\r
}\r
\r
import javax.media.protocol.DataSource;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
+\r
/**\r
* 簡易JMFキャプチャクラス\r
- * @author atla\r
+ * @deprecated このクラスは近い将来削除します。\r
+ * JmfCaptureDeviceList/JmfCaptureDeviceを使用してください。\r
+ * {@link #JmfCaptureDeviceList()}\r
*\r
*/\r
-public class JmfCameraCapture\r
+@Deprecated public class JmfCameraCapture\r
{\r
private Dimension image_size;\r
\r
\r
public void setCaptureListener(JmfCaptureListener i_listener) throws NyARException\r
{\r
- if (jmf_processor != null) {\r
+ if (jmf_processor != null)\r
+ {\r
throw new NyARException();\r
}\r
capture_listener = i_listener;\r
--- /dev/null
+package jp.nyatla.nyartoolkit.jmf.utils;\r
+\r
+import java.awt.Dimension;\r
+\r
+import javax.media.CaptureDeviceInfo;\r
+import javax.media.Format;\r
+import javax.media.IncompatibleSourceException;\r
+import javax.media.Manager;\r
+import javax.media.MediaLocator;\r
+import javax.media.Processor;\r
+import javax.media.ProcessorModel;\r
+import javax.media.control.FormatControl;\r
+import javax.media.format.VideoFormat;\r
+import javax.media.protocol.CaptureDevice;\r
+import javax.media.protocol.DataSource;\r
+\r
+import jp.nyatla.nyartoolkit.NyARException;\r
+\r
+public class JmfCaptureDevice\r
+{\r
+ private JmfCaptureListener _capture_listener;\r
+ private MonitorStream _jmf_monitor_stream;\r
+ private Processor _jmf_processor;\r
+ private CaptureDeviceInfo _info;\r
+ private Format _capture_format;\r
+ public static final String PIXEL_FORMAT_RGB = "RGB";\r
+ public static final String PIXEL_FORMAT_YUV = "YUV";\r
+ \r
+\r
+ public JmfCaptureDevice(CaptureDeviceInfo i_capinfo) throws NyARException\r
+ {\r
+ this._info = i_capinfo;\r
+ this._capture_format = null;\r
+ return;\r
+ }\r
+\r
+ public Format[] getSupportFormats()\r
+ {\r
+ return this._info.getFormats();\r
+ }\r
+\r
+ public final Format getCaptureFormat()\r
+ {\r
+ return this._capture_format;\r
+ }\r
+\r
+ /**\r
+ * このキャプチャデバイスの提供する、i_index番目のフォーマットをキャプチャフォーマットに指定します。\r
+ * @param i_index\r
+ */\r
+ public void setCaptureFormat(int i_index)\r
+ {\r
+ this._capture_format = this._info.getFormats()[i_index];\r
+ return;\r
+ }\r
+ /**\r
+ * キャプチャ画像のフォーマットを指定した形式にしようと試みます。\r
+ * @param i_encode\r
+ * キャプチャする画像フォーマットを指定します。フォーマットはこのクラスに宣言される定数値を使ってください。\r
+ * @param i_size\r
+ * キャプチャ画像サイズを指定します。\r
+ * @param i_rate\r
+ * キャプチャレートをFPS単位で指定します。\r
+ * @return\r
+ * 指定に成功するとTRUEを返します。失敗するとFALSEを返します。\r
+ */ \r
+ public boolean setCaptureFormat(String i_encode, Dimension i_size, float i_rate) throws NyARException\r
+ {\r
+ if (this._jmf_processor != null) {\r
+ throw new NyARException();\r
+ }\r
+ Format[] formats = this._info.getFormats();\r
+ Format f = new VideoFormat(i_encode, i_size, Format.NOT_SPECIFIED, null, i_rate);\r
+ for (int i = 0; i < formats.length; i++) {\r
+ if (formats[i].matches(f)) {\r
+ f = formats[i].intersects(f);\r
+ this._capture_format = null;\r
+ this._capture_format = f;\r
+ return true;\r
+ }\r
+ }\r
+ //ない。\r
+ return false;\r
+ }\r
+ public boolean setCaptureFormat(String i_encode,int i_size_x,int i_size_y, float i_rate) throws NyARException\r
+ {\r
+ return setCaptureFormat(i_encode,new Dimension(i_size_x,i_size_y),i_rate);\r
+ }\r
+ /**\r
+ * 画像のキャプチャイベントを受信するリスナクラスを指定します。\r
+ * @param i_listener\r
+ * リスナークラス\r
+ * @throws NyARException\r
+ */\r
+ public void setOnCapture(JmfCaptureListener i_listener) throws NyARException\r
+ {\r
+ if (this._jmf_processor != null) {\r
+ throw new NyARException();\r
+ }\r
+ this._capture_listener = i_listener;\r
+ return;\r
+ }\r
+ /**\r
+ * キャプチャーを開始します。stop関数を呼び出すまでの間、setOnCaptureで指定したリスナークラスに、\r
+ * フォーマットで指定したキャプチャ画像が通知されます。\r
+ * @throws NyARException\r
+ */\r
+ public void start() throws NyARException\r
+ {\r
+ // startしていたらエラー\r
+ if (this._jmf_processor != null) {\r
+ throw new NyARException();\r
+ }\r
+ DataSource ds;\r
+ final MediaLocator ml = this._info.getLocator();\r
+ try {\r
+ ds = Manager.createDataSource(ml);\r
+ ds.connect();\r
+ // ここでフォーマットを作成\r
+ if (ds instanceof CaptureDevice) {\r
+ FormatControl[] fcs = ((CaptureDevice) ds).getFormatControls();\r
+ if (fcs.length < 1) {\r
+ return;\r
+ }\r
+ FormatControl fc = fcs[0];\r
+ fc.setFormat(this._capture_format);\r
+ }\r
+ } catch (Exception e) {\r
+ throw new NyARException(e);\r
+ }\r
+ try{\r
+ if(ds==null){\r
+ //Merge the data sources, if both audio and video are available\r
+ ds = Manager.createMergingDataSource(new DataSource[] { null }); \r
+ }else{\r
+ // Create the monitoring datasource wrapper\r
+ ds = new MonitorCDS(ds); \r
+ }\r
+ }catch(IncompatibleSourceException e){\r
+ throw new NyARException(e);\r
+ }\r
+ \r
+ // データソース完成\r
+ try {\r
+ // Merge the data sources, if both audio and video are available\r
+ VideoFormat[] formats = new VideoFormat[] { new VideoFormat(null) };\r
+ ProcessorModel pm = new ProcessorModel(ds, formats, null);// ,\r
+ Processor processor;\r
+ processor = Manager.createRealizedProcessor(pm);\r
+ this._jmf_monitor_stream = (MonitorStream) ds.getControl("jmfsample.MonitorStream");\r
+ this._jmf_monitor_stream.setCaptureListener(this._capture_listener);\r
+ this._jmf_processor = processor;\r
+ this._jmf_processor.start();\r
+ } catch (Exception e) {\r
+ ds.disconnect();\r
+ throw new NyARException(e);\r
+ }\r
+ return;\r
+ }\r
+\r
+ public void stop()\r
+ {\r
+ this._jmf_processor.stop();\r
+ this._jmf_processor.close();\r
+ this._jmf_processor = null;\r
+ return;\r
+ }\r
+ protected void finalize()\r
+ {\r
+ if (this._jmf_processor != null) {\r
+ this._jmf_processor.stop();\r
+ this._jmf_processor.close();\r
+ this._jmf_processor = null;\r
+ }\r
+ return;\r
+ }\r
+}
\ No newline at end of file
--- /dev/null
+package jp.nyatla.nyartoolkit.jmf.utils;\r
+\r
+import java.awt.Dimension;\r
+import java.util.Vector;\r
+\r
+import javax.media.CaptureDeviceInfo;\r
+import javax.media.CaptureDeviceManager;\r
+import javax.media.Format;\r
+import javax.media.format.*;\r
+import jp.nyatla.nyartoolkit.*;\r
+\r
+\r
+\r
+/**\r
+ * JMFキャプチャデバイスのリストを保持します。\r
+ * 複数のカメラが接続された環境では、最も初めに認識したカメラの実がアクティブになるため、\r
+ * このクラスで実際に認識できるカメラは1個だけです。\r
+ *\r
+ */\r
+public class JmfCaptureDeviceList\r
+{\r
+ private Vector _devices;\r
+\r
+ public JmfCaptureDeviceList() throws NyARException\r
+ {\r
+ this._devices = (Vector) CaptureDeviceManager.getDeviceList(null).clone();\r
+ // ビデオソースのデバイスだけ残す\r
+ try {\r
+\r
+ for (int i = 0; i < this._devices.size();) {\r
+ CaptureDeviceInfo cdi = (CaptureDeviceInfo) this._devices.elementAt(i);\r
+ // VideoFormatもってるかな?\r
+ if (!isCaptureDevice(cdi)) {\r
+ this._devices.remove(i);\r
+ continue;\r
+ }\r
+ i++;\r
+ }\r
+ } catch (Exception e) {\r
+ throw new NyARException(e);\r
+ }\r
+ return;\r
+ }\r
+\r
+ /**\r
+ * i_cdiがビデオキャプチャデバイスかを調べる。ようなことをする。\r
+ * \r
+ * @param i_cdi\r
+ * @return\r
+ */\r
+ private static boolean isCaptureDevice(CaptureDeviceInfo i_cdi)\r
+ {\r
+ Format[] fms = i_cdi.getFormats();\r
+ for (int i = 0; i < fms.length; i++) {\r
+ Format f = fms[i];\r
+ if (f instanceof VideoFormat) {\r
+ return true;\r
+ }\r
+ }\r
+ return false;\r
+ }\r
+\r
+ /**\r
+ * 使用できるキャプチャデバイスの数を数える\r
+ * @return\r
+ */\r
+ public int getCount()\r
+ {\r
+ return this._devices.size();\r
+ }\r
+ /**\r
+ * i_index番目のキャプチャデバイスを得る。\r
+ * @param i_index\r
+ * @return\r
+ * @throws NyARException\r
+ */\r
+ public JmfCaptureDevice getDevice(int i_index) throws NyARException\r
+ {\r
+ return new JmfCaptureDevice((CaptureDeviceInfo) this._devices.elementAt(i_index));\r
+ }\r
+\r
+ public static void main(String[] args)\r
+ {\r
+ //テストケース\r
+ try {\r
+ JmfCaptureDeviceList j = new JmfCaptureDeviceList();\r
+ System.out.println(j.getCount());\r
+ JmfCaptureDevice d = j.getDevice(0);\r
+ d.setCaptureFormat("YUV", new Dimension(320, 240), 15.0f);\r
+// YUVFormat f=(YUVFormat)d.getCaptureFormat();\r
+ d.start();\r
+ d.stop();\r
+\r
+ } catch (Exception e) {\r
+ e.printStackTrace();\r
+ }\r
+ return;\r
+ }\r
+\r
+}\r
*/\r
public interface JmfCaptureListener{\r
public void onUpdateBuffer(Buffer i_buffer);\r
- \r
}
\ No newline at end of file
*/
package jp.nyatla.nyartoolkit.jmf.utils;
-
import javax.media.*;
import javax.media.protocol.*;
import javax.media.control.*;
import java.io.IOException;
+public class MonitorCDS extends PushBufferDataSource
+{
+
+ private PushBufferDataSource delegate = null;
+
+ private PushBufferStream[] delStreams = null;
+
+ private MonitorStream monitorStream = null;
+
+ private PushBufferStream[] monitorStreams = null;
+
+ boolean delStarted = false; // variable used by MonitorStream also
+
+ private Control[] controls;
+
+ public MonitorCDS(DataSource ds)
+ {
+ // Get the stream from the actual datasource
+ // and create a MonitorStream from it
+ // Export the MonitorControl interface of the MonitorStream
+ if (ds instanceof PushBufferDataSource) {
+ delegate = (PushBufferDataSource) ds;
+ delStreams = delegate.getStreams();
+ monitorStream = new MonitorStream(delStreams[0], this);
+ monitorStreams = new PushBufferStream[] { monitorStream };
+ }
+ }
+
+ public Object[] getControls()
+ {
+ return controls;
+ }
+
+ public Object getControl(String value)
+ {
+ if (value.equals("jmfsample.MonitorStream") || value.equals("javax.media.control.MonitorControl")){
+ return monitorStream;
+ }else{
+ return null;
+ }
+ }
+
+ public javax.media.CaptureDeviceInfo getCaptureDeviceInfo()
+ {
+ return ((CaptureDevice) delegate).getCaptureDeviceInfo();
+ }
+
+ public FormatControl[] getFormatControls()
+ {
+ return ((CaptureDevice) delegate).getFormatControls();
+ }
+
+ public String getContentType()
+ {
+ return delegate.getContentType();
+ }
+ public void connect() throws IOException
+ {
+ if (delegate == null)
+ throw new IOException("Incompatible DataSource");
+ // Delegate is already connected
+ }
-public class MonitorCDS extends PushBufferDataSource{
+ public void disconnect()
+ {
+ monitorStream.setEnabled(false);
+ delegate.disconnect();
+ }
- private PushBufferDataSource delegate = null;
- private PushBufferStream [] delStreams = null;
- private MonitorStream monitorStream = null;
- private PushBufferStream [] monitorStreams = null;
- boolean delStarted = false; // variable used by MonitorStream also
- private Control [] controls;
+ public synchronized void start() throws IOException
+ {
+ startDelegate();
+ delStarted = true;
+ }
- public MonitorCDS(DataSource ds)
- {
- // Get the stream from the actual datasource
- // and create a MonitorStream from it
- // Export the MonitorControl interface of the MonitorStream
- if (ds instanceof PushBufferDataSource)
+ public synchronized void stop() throws IOException
{
- delegate = (PushBufferDataSource) ds;
- delStreams = delegate.getStreams();
- monitorStream = new MonitorStream(delStreams[0], this);
- monitorStreams = new PushBufferStream[] {monitorStream};
+ if (!monitorStream.isEnabled()) {
+ stopDelegate();
+ }
+ delStarted = false;
}
- }
-
- public Object [] getControls()
- {
- return controls;
- }
-
- public Object getControl(String value) {
- if (value.equals("jmfsample.MonitorStream") || value.equals("javax.media.control.MonitorControl"))
- return monitorStream;
- else
- return null;
- }
-
- public javax.media.CaptureDeviceInfo getCaptureDeviceInfo()
- {
- return ((CaptureDevice)delegate).getCaptureDeviceInfo();
- }
-
- public FormatControl[] getFormatControls()
- {
- return ((CaptureDevice)delegate).getFormatControls();
- }
-
- public String getContentType()
- {
- return delegate.getContentType();
- }
-
- public void connect() throws IOException
- {
- if (delegate == null)
- throw new IOException("Incompatible DataSource");
- // Delegate is already connected
- }
-
- public void disconnect()
- {
- monitorStream.setEnabled(false);
- delegate.disconnect();
- }
-
- public synchronized void start() throws IOException
- {
- startDelegate();
- delStarted = true;
- }
-
- public synchronized void stop() throws IOException
- {
- if (!monitorStream.isEnabled()) {
- stopDelegate();
+
+ public Time getDuration()
+ {
+ return delegate.getDuration();
+ }
+
+ public PushBufferStream[] getStreams()
+ {
+ return monitorStreams;
+ }
+
+ void startDelegate() throws IOException
+ {
+ delegate.start();
+ }
+
+ void stopDelegate() throws IOException
+ {
+ delegate.stop();
}
- delStarted = false;
- }
-
- public Time getDuration()
- {
- return delegate.getDuration();
- }
-
- public PushBufferStream [] getStreams()
- {
- return monitorStreams;
- }
-
- void startDelegate() throws IOException
- {
- delegate.start();
- }
-
- void stopDelegate() throws IOException
- {
- delegate.stop();
- }
}
*/
package jp.nyatla.nyartoolkit.jmf.utils;
-
-
import javax.media.*;
import javax.media.protocol.*;
import java.io.IOException;
import java.awt.*;
-public class MonitorStream implements PushBufferStream, BufferTransferHandler {
-
- JmfCaptureListener img_listener;
- PushBufferStream actual = null;
- boolean dataAvailable = false;
- boolean terminate = false;
- boolean enabled = false;
- Object bufferLock = new Object();
- Buffer cbuffer = new Buffer();
- BufferTransferHandler transferHandler = null;
- Component component = null;
- MonitorCDS cds;
- BufferToImage bti = null;
-
- MonitorStream(PushBufferStream actual, MonitorCDS cds) {
- this.actual = actual;
- actual.setTransferHandler(this);
- this.cds = cds;
- }
-
- public javax.media.Format getFormat()
- {
- return actual.getFormat();
- }
- /**
- * 非同期READ
- */
- public void read(Buffer buffer) throws IOException
- {
- // Wait for data to be available
- // Doesn't get used much because the transferData
- // call is made when data IS available. And most
- // Processors/Players read the data in the same
- // thread that called transferData, although that's
- // not a safe assumption to make
- if (!dataAvailable) {
- synchronized (bufferLock) {
- while (!dataAvailable && !terminate) {
- try {
- bufferLock.wait(100);
- } catch (InterruptedException ie) {
- }
- }
- }
- }
+public class MonitorStream implements PushBufferStream, BufferTransferHandler
+{
+
+ JmfCaptureListener img_listener;
+
+ PushBufferStream actual = null;
+
+ boolean dataAvailable = false;
+
+ boolean terminate = false;
+
+ boolean enabled = false;
- if (dataAvailable) {
- synchronized (bufferLock) {
- // Copy the buffer attributes, but swap the data
- // attributes so that no extra copy is made.
- buffer.copy(cbuffer, true);
- //dataAvailable = false;
- }
+ Object bufferLock = new Object();
+
+ Buffer cbuffer = new Buffer();
+
+ BufferTransferHandler transferHandler = null;
+
+ Component component = null;
+
+ MonitorCDS cds;
+
+ BufferToImage bti = null;
+
+ MonitorStream(PushBufferStream actual, MonitorCDS cds)
+ {
+ this.actual = actual;
+ actual.setTransferHandler(this);
+ this.cds = cds;
}
-// return;
- }
- public void setCaptureListener(JmfCaptureListener i_listener)
- {
- img_listener=i_listener;
- }
-
- public void transferData(PushBufferStream pbs)
- {
- // Get the data from the original source stream
- synchronized (bufferLock) {
- try {
- pbs.read(cbuffer);
- } catch (IOException ioe) {
- return;
- }
- dataAvailable = true;
- bufferLock.notifyAll();
+
+ public javax.media.Format getFormat()
+ {
+ return actual.getFormat();
}
- if(img_listener!=null){
- img_listener.onUpdateBuffer(cbuffer);
+
+ /**
+ * 非同期READ
+ */
+ public void read(Buffer buffer) throws IOException
+ {
+ // Wait for data to be available
+ // Doesn't get used much because the transferData
+ // call is made when data IS available. And most
+ // Processors/Players read the data in the same
+ // thread that called transferData, although that's
+ // not a safe assumption to make
+ if (!dataAvailable) {
+ synchronized (bufferLock) {
+ while (!dataAvailable && !terminate) {
+ try {
+ bufferLock.wait(100);
+ } catch (InterruptedException ie) {
+ }
+ }
+ }
+ }
+
+ if (dataAvailable) {
+ synchronized (bufferLock) {
+ // Copy the buffer attributes, but swap the data
+ // attributes so that no extra copy is made.
+ buffer.copy(cbuffer, true);
+ //dataAvailable = false;
+ }
+ }
+ // return;
}
-
-/*
- // Display data if monitor is active
- if (isEnabled()) {
- if (bti == null) {
- VideoFormat vf = (VideoFormat) cbuffer.getFormat();
- bti = new BufferToImage(vf);
- }
- if (bti != null && component != null) {
- Image im = bti.createImage(cbuffer);
- Graphics g = component.getGraphics();
- Dimension size = component.getSize();
- if (g != null)
- g.drawImage(im, 0, 0, component);
- }
+
+ public void setCaptureListener(JmfCaptureListener i_listener)
+ {
+ img_listener = i_listener;
}
-*/
- // Maybe synchronize this with setTransferHandler() ?
- if (transferHandler != null && cds.delStarted)
- transferHandler.transferData(this);
- }
-
- public void setTransferHandler(BufferTransferHandler transferHandler) {
- this.transferHandler = transferHandler;
- }
-
- public boolean setEnabled(boolean value) {
- enabled = value;
- if (value == false) {
- if (!cds.delStarted) {
- try {
- cds.stopDelegate();
- } catch (IOException ioe) {
+
+ public void transferData(PushBufferStream pbs)
+ {
+ // Get the data from the original source stream
+ synchronized (bufferLock) {
+ try {
+ pbs.read(cbuffer);
+ } catch (IOException ioe) {
+ return;
+ }
+ dataAvailable = true;
+ bufferLock.notifyAll();
}
- }
- } else {
- // Start the capture datasource if the monitor is enabled
- try {
- cds.startDelegate();
- }catch (IOException ioe) {
- }
+ if (img_listener != null) {
+ img_listener.onUpdateBuffer(cbuffer);
+ }
+
+ /*
+ // Display data if monitor is active
+ if (isEnabled()) {
+ if (bti == null) {
+ VideoFormat vf = (VideoFormat) cbuffer.getFormat();
+ bti = new BufferToImage(vf);
+ }
+ if (bti != null && component != null) {
+ Image im = bti.createImage(cbuffer);
+ Graphics g = component.getGraphics();
+ Dimension size = component.getSize();
+ if (g != null)
+ g.drawImage(im, 0, 0, component);
+ }
+ }
+ */
+ // Maybe synchronize this with setTransferHandler() ?
+ if (transferHandler != null && cds.delStarted)
+ transferHandler.transferData(this);
}
- return enabled;
- }
- public boolean isEnabled()
- {
- return enabled;
- }
+ public void setTransferHandler(BufferTransferHandler transferHandler)
+ {
+ this.transferHandler = transferHandler;
+ }
+ public boolean setEnabled(boolean value)
+ {
+ enabled = value;
+ if (value == false) {
+ if (!cds.delStarted) {
+ try {
+ cds.stopDelegate();
+ } catch (IOException ioe) {
+ }
+ }
+ } else {
+ // Start the capture datasource if the monitor is enabled
+ try {
+ cds.startDelegate();
+ } catch (IOException ioe) {
+ }
+ }
+ return enabled;
+ }
+ public boolean isEnabled()
+ {
+ return enabled;
+ }
- public float setPreviewFrameRate(float rate)
- {
- System.err.println("TODO");
- return rate;
- }
-
- public ContentDescriptor getContentDescriptor()
- {
- return actual.getContentDescriptor();
- }
+ public float setPreviewFrameRate(float rate)
+ {
+ System.err.println("TODO");
+ return rate;
+ }
- public long getContentLength()
- {
- return actual.getContentLength();
- }
+ public ContentDescriptor getContentDescriptor()
+ {
+ return actual.getContentDescriptor();
+ }
- public boolean endOfStream() {
- return actual.endOfStream();
- }
+ public long getContentLength()
+ {
+ return actual.getContentLength();
+ }
- public Object [] getControls() {
- return new Object[0];
- }
+ public boolean endOfStream()
+ {
+ return actual.endOfStream();
+ }
- public Object getControl(String str) {
- return null;
- }
+ public Object[] getControls()
+ {
+ return new Object[0];
+ }
+ public Object getControl(String str)
+ {
+ return null;
+ }
}
*/
public class JmfCaptureTest extends Frame implements JmfCaptureListener
{
+ private static final long serialVersionUID = -2110888320986446576L;
+ private JmfCaptureDevice _capture;
public JmfCaptureTest() throws NyARException
{
setTitle("JmfCaptureTest");
setBounds(0, 0, 320 + 64, 240 + 64);
- capture = new JmfCameraCapture(320, 240, 30f, JmfCameraCapture.PIXEL_FORMAT_RGB);
- capture.setCaptureListener(this);
+ JmfCaptureDeviceList dl=new JmfCaptureDeviceList();
+ this._capture=dl.getDevice(0);
+ this._capture.setOnCapture(this);
}
- private JmfCameraCapture capture;
public void onUpdateBuffer(Buffer i_buffer)
{
private void startCapture()
{
try {
- capture.start();
+ this._capture.start();
} catch (Exception e) {
e.printStackTrace();
}
if (i_is_reverse) {\r
final int length = this._size.w * 3;\r
int src_idx = 0;\r
- int dest_idx = (this._size.h - 1) * length; \r
- for (int i = 0; i < this._size.h; i++) {\r
+ int dest_idx = (this._size.h - 1) * length;\r
+ for (int i = 0; i < this._size.h; i++){\r
System.arraycopy(src_buf, src_idx, this._gl_buf, dest_idx, length);\r
src_idx += length;\r
dest_idx -= length;\r
*/\r
public final void detectMarker(NyARBinRaster i_raster, NyARSquareStack o_square_stack) throws NyARException\r
{\r
- final INyARLabeling labeling_proc = this._labeling;\r
final NyARLabelingImage limage = this._limage;\r
\r
// 初期化\r
o_square_stack.clear();\r
\r
// ラベリング\r
- labeling_proc.labeling(i_raster);\r
+ this._labeling.labeling(i_raster);\r
\r
// ラベル数が0ならここまで\r
final int label_num = limage.getLabelStack().getLength();\r
public class NyARLabelingImage extends NyARRaster_BasicClass implements INyARLabelingImage\r
{\r
private final static int MAX_LABELS = 1024*32; \r
- protected int[][] _ref_buf;\r
+ protected int[] _ref_buf;\r
private INyARBufferReader _buffer_reader;\r
protected NyARLabelingLabelStack _label_list;\r
protected int[] _index_table;\r
public NyARLabelingImage(int i_width, int i_height)\r
{\r
super(new NyARIntSize(i_width,i_height));\r
- this._ref_buf =new int[i_height][i_width];\r
+ this._ref_buf =new int[i_height*i_width];\r
this._label_list = new NyARLabelingLabelStack(MAX_LABELS);\r
this._index_table=new int[MAX_LABELS];\r
this._is_index_table_enable=false;\r
- this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT2D);\r
+ this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT1D);\r
\r
return;\r
}\r
*/\r
protected int getTopClipTangentX(NyARLabelingLabel i_label) throws NyARException\r
{\r
- int w;\r
+ int pix;\r
int i_label_id=i_label.id;\r
int[] index_table=this._index_table;\r
- int[] limage_j=this._ref_buf[i_label.clip_t];\r
+ int[] limage=this._ref_buf;\r
+ int limage_ptr=i_label.clip_t*this._size.w;\r
final int clip1 = i_label.clip_r;\r
// p1=ShortPointer.wrap(limage,j*xsize+clip.get());//p1 =&(limage[j*xsize+clip[0]]);\r
for (int i = i_label.clip_l; i <= clip1; i++) {// for( i = clip[0]; i <=clip[1]; i++, p1++ ) {\r
- w = limage_j[i];\r
- if (w > 0 && index_table[w-1] == i_label_id){\r
+ pix = limage[limage_ptr+i];\r
+ if (pix > 0 && index_table[pix-1] == i_label_id){\r
return i;\r
}\r
}\r
*/\r
public int getContour(int i_index,int i_array_size,int[] o_coord_x,int[] o_coord_y) throws NyARException\r
{\r
+ final int width=this._size.w;\r
final int[] xdir = this._getContour_xdir;// static int xdir[8] = { 0,1, 1, 1, 0,-1,-1,-1};\r
final int[] ydir = this._getContour_ydir;// static int ydir[8] = {-1,-1,0, 1, 1, 1, 0,-1};\r
final NyARLabelingLabel label=(NyARLabelingLabel)this._label_list.getItem(i_index); \r
o_coord_y[0] = sy;\r
int dir = 5;\r
\r
- int[][] limage=this._ref_buf;\r
+ int[] limage=this._ref_buf;\r
int c = o_coord_x[0];\r
int r = o_coord_y[0];\r
for (;;) {\r
dir = (dir + 5) % 8;\r
for (i = 0; i < 8; i++) {\r
- if (limage[r + ydir[dir]][c + xdir[dir]] > 0) {\r
+ if (limage[(r + ydir[dir])*width+(c + xdir[dir])] > 0) {\r
break;\r
}\r
dir = (dir + 1) % 8;\r
*/\r
package jp.nyatla.nyartoolkit.core.labeling;\r
\r
-\r
-\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.raster.*;\r
import jp.nyatla.nyartoolkit.core.types.*;\r
public class NyARLabeling_ARToolKit implements INyARLabeling\r
{\r
private static final int WORK_SIZE = 1024 * 32;// #define WORK_SIZE 1024*32\r
+\r
private final NyARWorkHolder work_holder = new NyARWorkHolder(WORK_SIZE);\r
+\r
private NyARIntSize _dest_size;\r
+\r
private INyARLabelingImage _out_image;\r
\r
- public void attachDestination(INyARLabelingImage i_destination_image)throws NyARException\r
+ public void attachDestination(INyARLabelingImage i_destination_image) throws NyARException\r
{\r
// サイズチェック\r
NyARIntSize size = i_destination_image.getSize();\r
this._out_image = i_destination_image;\r
\r
// NyLabelingImageのイメージ初期化(枠書き)\r
- int[][] img = (int[][])i_destination_image.getBufferReader().getBuffer();\r
+ int[] img = (int[]) i_destination_image.getBufferReader().getBuffer();\r
+ int bottom_ptr = (size.h - 1) * size.w;\r
for (int i = 0; i < size.w; i++) {\r
- img[0][i] = 0;\r
- img[size.h - 1][i] = 0;\r
+ img[i] = 0;\r
+ img[bottom_ptr + i] = 0;\r
}\r
for (int i = 0; i < size.h; i++) {\r
- img[i][0] = 0;\r
- img[i][size.w - 1] = 0;\r
+ img[i * size.w] = 0;\r
+ img[(i + 1) * size.w - 1] = 0;\r
}\r
\r
// サイズ(参照値)を保存\r
this._dest_size = size;\r
+ return;\r
}\r
+\r
public INyARLabelingImage getAttachedDestination()\r
{\r
return this._out_image;\r
}\r
+\r
/**\r
* static ARInt16 *labeling2( ARUint8 *image, int thresh,int *label_num, int **area, double **pos, int **clip,int **label_ref, int LorR ) 関数の代替品\r
* ラスタimageをラベリングして、結果を保存します。 Optimize:STEP[1514->1493]\r
*/\r
public void labeling(NyARBinRaster i_raster) throws NyARException\r
{\r
- int m, n; /* work */\r
- int i, j, k;\r
- INyARLabelingImage out_image = this._out_image;\r
+ int label_img_ptr1, label_pixel;\r
+ int i, j;\r
+ int n, k; /* work */\r
\r
// サイズチェック\r
NyARIntSize in_size = i_raster.getSize();\r
\r
final int lxsize = in_size.w;// lxsize = arUtil_c.arImXsize;\r
final int lysize = in_size.h;// lysize = arUtil_c.arImYsize;\r
- int[][] label_img = (int[][])out_image.getBufferReader().getBuffer();\r
+ final int[] label_img = (int[]) this._out_image.getBufferReader().getBuffer();\r
\r
// 枠作成はインスタンスを作った直後にやってしまう。\r
- \r
- //ラベリング情報のリセット(ラベリングインデックスを使用)\r
- out_image.reset(true);\r
- \r
- int[] label_idxtbl=out_image.getIndexArray();\r
+\r
+ // ラベリング情報のリセット(ラベリングインデックスを使用)\r
+ this._out_image.reset(true);\r
+\r
+ int[] label_idxtbl = this._out_image.getIndexArray();\r
+ int[] raster_buf = (int[]) i_raster.getBufferReader().getBuffer();\r
\r
int[] work2_pt;\r
int wk_max = 0;\r
\r
- int label_pixel;\r
- int[][] raster_buf=(int[][])i_raster.getBufferReader().getBuffer();\r
- int[] line_ptr;\r
+ int pixel_index;\r
int[][] work2 = this.work_holder.work2;\r
- int[] label_img_pt0, label_img_pt1;\r
+\r
+ // [1,1](ptr0)と、[0,1](ptr1)のインデクス値を計算する。\r
for (j = 1; j < lysize - 1; j++) {// for (int j = 1; j < lysize - 1;j++, pnt += poff*2, pnt2 += 2) {\r
- line_ptr=raster_buf[j];\r
- label_img_pt0 = label_img[j];\r
- label_img_pt1 = label_img[j - 1];\r
- for (i = 1; i < lxsize - 1; i++) {// for(int i = 1; i < lxsize-1;i++, pnt+=poff, pnt2++) {\r
+ pixel_index = j * lxsize + 1;\r
+ label_img_ptr1 = pixel_index - lxsize;// label_img_pt1 = label_img[j - 1];\r
+ for (i = 1; i < lxsize - 1; i++, pixel_index++, label_img_ptr1++) {// for(int i = 1; i < lxsize-1;i++, pnt+=poff, pnt2++) {\r
// RGBの合計値が閾値より小さいかな?\r
- if (line_ptr[i]==0) {\r
+ if (raster_buf[pixel_index] != 0) {\r
+ label_img[pixel_index] = 0;// label_img_pt0[i] = 0;// *pnt2 = 0;\r
+ } else {\r
// pnt1 = ShortPointer.wrap(pnt2, -lxsize);//pnt1 =&(pnt2[-lxsize]);\r
- if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) {\r
- label_pixel = label_img_pt1[i];// *pnt2 = *pnt1;\r
+ if (label_img[label_img_ptr1] > 0) {// if (label_img_pt1[i] > 0) {// if( *pnt1 > 0 ) {\r
+ label_pixel = label_img[label_img_ptr1];// label_pixel = label_img_pt1[i];// *pnt2 = *pnt1;\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
- } else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) {\r
- if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) {\r
- m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1];\r
- n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1];\r
- if (m > n) {\r
- label_pixel = n;// *pnt2 = n;\r
- // wk=IntPointer.wrap(work, 0);//wk =\r
- // &(work[0]);\r
+ } else if (label_img[label_img_ptr1 + 1] > 0) {// } else if (label_img_pt1[i + 1] > 0) {// }else if(*(pnt1+1) > 0 ) {\r
+ if (label_img[label_img_ptr1 - 1] > 0) {// if (label_img_pt1[i - 1] > 0) {// if( *(pnt1-1) > 0 ) {\r
+ label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1];// m = label_idxtbl[label_img_pt1[i + 1] - 1];// m\r
+ // =work[*(pnt1+1)-1];\r
+ n = label_idxtbl[label_img[label_img_ptr1 - 1] - 1];// n = label_idxtbl[label_img_pt1[i - 1] - 1];// n =work[*(pnt1-1)-1];\r
+ if (label_pixel > n) {\r
+ // wk=IntPointer.wrap(work, 0);//wk = &(work[0]);\r
for (k = 0; k < wk_max; k++) {\r
- if (label_idxtbl[k] == m) {// if( *wk == m )\r
+ if (label_idxtbl[k] == label_pixel) {// if( *wk == m )\r
label_idxtbl[k] = n;// *wk = n;\r
}\r
}\r
- } else if (m < n) {\r
- label_pixel = m;// *pnt2 = m;\r
+ label_pixel = n;// *pnt2 = n;\r
+ } else if (label_pixel < n) {\r
// wk=IntPointer.wrap(work,0);//wk = &(work[0]);\r
for (k = 0; k < wk_max; k++) {\r
if (label_idxtbl[k] == n) {// if( *wk == n ){\r
- label_idxtbl[k] = m;// *wk = m;\r
+ label_idxtbl[k] = label_pixel;// *wk = m;\r
}\r
}\r
- } else {\r
- label_pixel = m;// *pnt2 = m;\r
}\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;\r
work2_pt[1] += i;\r
work2_pt[2] += j;\r
work2_pt[6] = j;\r
- } else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) {\r
- m = label_idxtbl[(label_img_pt1[i + 1]) - 1];// m =work[*(pnt1+1)-1];\r
- n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1];\r
- if (m > n) {\r
-\r
- label_pixel = n;// *pnt2 = n;\r
+ } else if ((label_img[pixel_index - 1]) > 0) {// } else if ((label_img_pt0[i - 1]) > 0) {// }else if(*(pnt2-1) > 0) {\r
+ label_pixel = label_idxtbl[label_img[label_img_ptr1 + 1] - 1];// m = label_idxtbl[label_img_pt1[i + 1] - 1];// m =work[*(pnt1+1)-1];\r
+ n = label_idxtbl[label_img[pixel_index - 1] - 1];// n = label_idxtbl[label_img_pt0[i - 1] - 1];// n =work[*(pnt2-1)-1];\r
+ if (label_pixel > n) {\r
for (k = 0; k < wk_max; k++) {\r
- if (label_idxtbl[k] == m) {// if( *wk == m ){\r
+ if (label_idxtbl[k] == label_pixel) {// if( *wk == m ){\r
label_idxtbl[k] = n;// *wk = n;\r
}\r
}\r
- } else if (m < n) {\r
- label_pixel = m;// *pnt2 = m;\r
+ label_pixel = n;// *pnt2 = n;\r
+ } else if (label_pixel < n) {\r
for (k = 0; k < wk_max; k++) {\r
if (label_idxtbl[k] == n) {// if( *wk == n ){\r
- label_idxtbl[k] = m;// *wk = m;\r
+ label_idxtbl[k] = label_pixel;// *wk = m;\r
}\r
}\r
- } else {\r
- label_pixel = m;// *pnt2 = m;\r
}\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
} else {\r
\r
- label_pixel = label_img_pt1[i + 1];// *pnt2 =\r
- // *(pnt1+1);\r
+ label_pixel = label_img[label_img_ptr1 + 1];// label_pixel = label_img_pt1[i + 1];// *pnt2 =\r
+ // *(pnt1+1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[1] += i;// work2[((*pnt2)-1)*7+1] += i;\r
work2_pt[2] += j;// work2[((*pnt2)-1)*7+2] += j;\r
- if (work2_pt[3] > i) {// if(\r
- // work2[((*pnt2)-1)*7+3] >\r
- // i ){\r
+ if (work2_pt[3] > i) {// if(work2[((*pnt2)-1)*7+3] > i ){\r
work2_pt[3] = i;// work2[((*pnt2)-1)*7+3] = i;\r
}\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
}\r
- } else if ((label_img_pt1[i - 1]) > 0) {// }else if(\r
- // *(pnt1-1) > 0 ) {\r
- label_pixel = label_img_pt1[i - 1];// *pnt2 =\r
- // *(pnt1-1);\r
+ } else if ((label_img[label_img_ptr1 - 1]) > 0) {// } else if ((label_img_pt1[i - 1]) > 0) {// }else if(\r
+ // *(pnt1-1) > 0 ) {\r
+ label_pixel = label_img[label_img_ptr1 - 1];// label_pixel = label_img_pt1[i - 1];// *pnt2 =\r
+ // *(pnt1-1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[4] = i;// work2[((*pnt2)-1)*7+4] = i;\r
}\r
work2_pt[6] = j;// work2[((*pnt2)-1)*7+6] = j;\r
- } else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) {\r
- label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1);\r
+ } else if (label_img[pixel_index - 1] > 0) {// } else if (label_img_pt0[i - 1] > 0) {// }else if(*(pnt2-1) > 0) {\r
+ label_pixel = label_img[pixel_index - 1];// label_pixel = label_img_pt0[i - 1];// *pnt2 =*(pnt2-1);\r
\r
work2_pt = work2[label_pixel - 1];\r
work2_pt[0]++;// work2[((*pnt2)-1)*7+0] ++;\r
work2_pt[5] = j;\r
work2_pt[6] = j;\r
}\r
- label_img_pt0[i] = label_pixel;\r
- } else {\r
- label_img_pt0[i] = 0;// *pnt2 = 0;\r
+ label_img[pixel_index] = label_pixel;// label_img_pt0[i] = label_pixel;\r
}\r
}\r
+\r
}\r
// インデックステーブルとラベル数の計算\r
int wlabel_num = 1;// *label_num = *wlabel_num = j - 1;\r
wlabel_num -= 1;// *label_num = *wlabel_num = j - 1;\r
if (wlabel_num == 0) {// if( *label_num == 0 ) {\r
// 発見数0\r
- out_image.getLabelStack().clear();\r
+ this._out_image.getLabelStack().clear();\r
return;\r
}\r
// ラベル情報の保存等\r
- NyARLabelingLabelStack label_list = out_image.getLabelStack();\r
+ NyARLabelingLabelStack label_list = this._out_image.getLabelStack();\r
\r
// ラベルバッファを予約\r
label_list.reserv(wlabel_num);\r
\r
// エリアと重心、クリップ領域を計算\r
NyARLabelingLabel label_pt;\r
- NyARLabelingLabel[] labels = (NyARLabelingLabel[])label_list.getArray();\r
+ NyARLabelingLabel[] labels = (NyARLabelingLabel[]) label_list.getArray();\r
for (i = 0; i < wlabel_num; i++) {\r
- label_pt =labels[i];\r
- label_pt.id =i+1;\r
+ label_pt = labels[i];\r
+ label_pt.id = i + 1;\r
label_pt.area = 0;\r
- label_pt.pos_x =label_pt.pos_y = 0;\r
+ label_pt.pos_x = label_pt.pos_y = 0;\r
label_pt.clip_l = lxsize;// wclip[i*4+0] = lxsize;\r
label_pt.clip_t = lysize;// wclip[i*4+2] = lysize;\r
- label_pt.clip_r =label_pt.clip_b = 0;// wclip[i*4+3] = 0;\r
+ label_pt.clip_r = label_pt.clip_b = 0;// wclip[i*4+3] = 0;\r
}\r
\r
for (i = 0; i < wk_max; i++) {\r
{\r
return this._dist;\r
}\r
+ /**\r
+ * \r
+ * @param i_factor\r
+ * NyARCameraDistortionFactorにセットする配列を指定する。要素数は4であること。\r
+ * @param i_projection\r
+ * NyARPerspectiveProjectionMatrixセットする配列を指定する。要素数は12であること。\r
+ */\r
+ public void setValue(double[] i_factor,double[] i_projection)\r
+ {\r
+ this._dist.setValue(i_factor);\r
+ this._projection_matrix.setValue(i_projection);\r
+ return;\r
+ }\r
\r
/**\r
* ARToolKit標準ファイルから1個目の設定をロードする。\r
//スケールを変更\r
this._dist.changeScale(scale);\r
this._projection_matrix.changeScale(scale);\r
- //for (int i = 0; i < 4; i++) {\r
- // array34[0 * 4 + i] = array34[0 * 4 + i] * scale;// newparam->mat[0][i]=source->mat[0][i]* scale;\r
- // array34[1 * 4 + i] = array34[1 * 4 + i] * scale;// newparam->mat[1][i]=source->mat[1][i]* scale;\r
- // array34[2 * 4 + i] = array34[2 * 4 + i];// newparam->mat[2][i] = source->mat[2][i];\r
- //}\r
-\r
-\r
this._screen_size.w = i_xsize;// newparam->xsize = xsize;\r
this._screen_size.h = i_ysize;// newparam->ysize = ysize;\r
return;\r
package jp.nyatla.nyartoolkit.core.pca2d;\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
-import jp.nyatla.nyartoolkit.core.param.*;\r
import jp.nyatla.nyartoolkit.core.types.NyARDoublePoint2d;\r
import jp.nyatla.nyartoolkit.core.types.matrix.NyARDoubleMatrix22;\r
\r
public final class NyARBinRaster extends NyARRaster_BasicClass\r
{\r
private INyARBufferReader _buffer_reader;\r
- protected int[][] _ref_buf;\r
+ protected int[] _ref_buf;\r
\r
public NyARBinRaster(int i_width, int i_height)\r
{\r
super(new NyARIntSize(i_width,i_height));\r
- this._ref_buf = new int[i_height][i_width];\r
- this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT2D_BIN_8);\r
+ this._ref_buf = new int[i_height*i_width];\r
+ this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT1D_BIN_8);\r
}\r
public INyARBufferReader getBufferReader()\r
{\r
public final class NyARGrayscaleRaster extends NyARRaster_BasicClass\r
{\r
\r
- protected int[][] _ref_buf;\r
+ protected int[] _ref_buf;\r
private INyARBufferReader _buffer_reader;\r
\r
public NyARGrayscaleRaster(int i_width, int i_height)\r
{\r
super(new NyARIntSize(i_width,i_height));\r
- this._ref_buf = new int[i_height][i_width];\r
- this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8);\r
+ this._ref_buf = new int[i_height*i_width];\r
+ this._buffer_reader=new NyARBufferReader(this._ref_buf,INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8);\r
}\r
public INyARBufferReader getBufferReader()\r
{\r
INyARBufferReader out_buffer_reader=i_output.getBufferReader();\r
int in_buf_type=in_buffer_reader.getBufferType();\r
\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_BIN_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_BIN_8));\r
assert (checkInputType(in_buf_type)==true); \r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
byte[] in_buf = (byte[]) in_buffer_reader.getBuffer();\r
\r
NyARIntSize size = i_output.getSize();\r
return;\r
}\r
\r
- private void convert24BitRgb(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert24BitRgb(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
- final int size_w=i_size.w;\r
- final int x_mod_end= size_w-(size_w%8);\r
final int th=this._threshold*3;\r
- int bp =(size_w*i_size.h-1)*3; \r
+ int bp =(i_size.w*i_size.h-1)*3;\r
int w;\r
- int x; \r
- for (int y =i_size.h-1; y>=0 ; y--){\r
- //端数分\r
- final int[] row_ptr=i_out[y];\r
- for (x = size_w-1;x>=x_mod_end;x--) {\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
- bp -= 3;\r
- }\r
- //タイリング \r
- for (;x>=0;x-=8) {\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-1]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-2]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-3]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-4]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-5]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-6]=w<=th?0:1;\r
- bp -= 3;\r
- w=((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-7]=w<=th?0:1;\r
- bp -= 3;\r
- }\r
+ int xy;\r
+ final int pix_count =i_size.h*i_size.w;\r
+ final int pix_mod_part=pix_count-(pix_count%8);\r
+ for(xy=pix_count-1;xy>=pix_mod_part;xy--){\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ }\r
+ //タイリング\r
+ for (;xy>=0;) {\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 3;\r
+ xy--;\r
}\r
return;\r
}\r
- private void convert32BitRgbx(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert32BitRgbx(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
- final int size_w=i_size.w;\r
- final int x_mod_end= size_w-(size_w%8);\r
final int th=this._threshold*3;\r
- int bp =(size_w*i_size.h-1)*4;\r
+ int bp =(i_size.w*i_size.h-1)*4;\r
int w;\r
- int x;\r
- for (int y =i_size.h-1; y>=0 ; y--){\r
- final int[] row_ptr=i_out[y];\r
-\r
- //端数分\r
- for (x = size_w-1;x>=x_mod_end;x--) {\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
- bp -= 4;\r
- }\r
- //タイリング\r
- for (;x>=0;x-=8) {\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-1]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-2]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-3]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-4]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-5]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-6]=w<=th?0:1;\r
- bp -= 4;\r
- w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
- row_ptr[x-7]=w<=th?0:1;\r
- bp -= 4;\r
- } \r
+ int xy;\r
+ final int pix_count =i_size.h*i_size.w;\r
+ final int pix_mod_part=pix_count-(pix_count%8);\r
+ for(xy=pix_count-1;xy>=pix_mod_part;xy--){\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ }\r
+ //タイリング\r
+ for (;xy>=0;) {\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
+ w= ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff));\r
+ i_out[xy]=w<=th?0:1;\r
+ bp -= 4;\r
+ xy--;\r
}\r
return;\r
}\r
* \r
*/\r
package jp.nyatla.nyartoolkit.core.rasterreader;\r
-\r
+/**\r
+ * このインタフェイスは、画素データを格納するバッファオブジェクト\r
+ * へのアクセス方法と、その形式を定義します。\r
+ *\r
+ */\r
public interface INyARBufferReader\r
{\r
+ public static final int BYTE1D =0x00010000;\r
+ public static final int INT2D =0x00020000;\r
+ public static final int SHORT1D=0x00030000;\r
+ public static final int INT1D =0x00040000;\r
// ID規約\r
- // 00-07(8)型番号\r
- // 08-15(8)ビットフォーマットID\r
- // 00:24bit/01:32bit/02:16bit\r
+ // 24-31(8)予約\r
// 16-27(8)型ID\r
// 00:無効/01:byte[]/02:int[][]/03:short[]\r
- // 24-31(8)予約\r
+ // 08-15(8)ビットフォーマットID\r
+ // 00:24bit/01:32bit/02:16bit\r
+ // 00-07(8)型番号\r
//\r
- \r
/**\r
* RGB24フォーマットで、全ての画素が0\r
*/\r
/**\r
* byte[]で、R8G8B8の24ビットで画素が格納されている。\r
*/\r
- public static final int BUFFERFORMAT_BYTE1D_R8G8B8_24 = 0x00010001;\r
-\r
+ public static final int BUFFERFORMAT_BYTE1D_R8G8B8_24 = BYTE1D|0x0001;\r
/**\r
* byte[]で、B8G8R8の24ビットで画素が格納されている。\r
*/\r
- public static final int BUFFERFORMAT_BYTE1D_B8G8R8_24 = 0x00010002;\r
-\r
+ public static final int BUFFERFORMAT_BYTE1D_B8G8R8_24 = BYTE1D|0x0002;\r
/**\r
* byte[]で、R8G8B8X8の32ビットで画素が格納されている。\r
*/\r
- public static final int BUFFERFORMAT_BYTE1D_B8G8R8X8_32 = 0x00010101;\r
+ public static final int BUFFERFORMAT_BYTE1D_B8G8R8X8_32 = BYTE1D|0x0101;\r
\r
/**\r
* byte[]で、RGB565の16ビット(little/big endian)で画素が格納されている。\r
*/\r
- public static final int BUFFERFORMAT_BYTE1D_R5G6B5_16LE = 0x00010201;\r
- public static final int BUFFERFORMAT_BYTE1D_R5G6B5_16BE = 0x00010202;\r
+ public static final int BUFFERFORMAT_BYTE1D_R5G6B5_16LE = BYTE1D|0x0201;\r
+ public static final int BUFFERFORMAT_BYTE1D_R5G6B5_16BE = BYTE1D|0x0202;\r
/**\r
* short[]で、RGB565の16ビット(little/big endian)で画素が格納されている。\r
*/ \r
- public static final int BUFFERFORMAT_WORD1D_R5G6B5_16LE = 0x00030201;\r
- public static final int BUFFERFORMAT_WORD1D_R5G6B5_16BE = 0x00030202;\r
+ public static final int BUFFERFORMAT_WORD1D_R5G6B5_16LE = SHORT1D|0x0201;\r
+ public static final int BUFFERFORMAT_WORD1D_R5G6B5_16BE = SHORT1D|0x0202;\r
\r
\r
/**\r
* int[][]で特に値範囲を定めない\r
*/\r
- public static final int BUFFERFORMAT_INT2D = 0x00020000;\r
-\r
+ public static final int BUFFERFORMAT_INT2D = INT2D|0x0000;\r
/**\r
* int[][]で0-255のグレイスケール画像\r
*/\r
- public static final int BUFFERFORMAT_INT2D_GLAY_8 = 0x00020001;\r
-\r
+ public static final int BUFFERFORMAT_INT2D_GLAY_8 = INT2D|0x0001;\r
/**\r
* int[][]で0/1の2値画像\r
*/\r
- public static final int BUFFERFORMAT_INT2D_BIN_8 = 0x00020002;\r
+ public static final int BUFFERFORMAT_INT2D_BIN_8 = INT2D|0x0002;\r
\r
+ /**\r
+ * int[]で特に値範囲を定めない\r
+ */\r
+ public static final int BUFFERFORMAT_INT1D = INT1D|0x0000;\r
+ /**\r
+ * int[]で0-255のグレイスケール画像\r
+ */\r
+ public static final int BUFFERFORMAT_INT1D_GLAY_8 = INT1D|0x0001;\r
+ /**\r
+ * int[]で特に値範囲を定めない\r
+ */\r
+ public static final int BUFFERFORMAT_INT1D_BIN_8 = INT1D|0x0002;\r
+ /**\r
+ * int[]で、XRGB32の32ビットで画素が格納されている。\r
+ */ \r
+ public static final int BUFFERFORMAT_INT1D_X8R8G8B8_32 = INT1D|0x0102;\r
+ \r
+\r
+ /**\r
+ * バッファオブジェクトを返します。\r
+ * @return\r
+ */\r
public Object getBuffer();\r
+ /**\r
+ * バッファオブジェクトの形式を返します。\r
+ * @return\r
+ */\r
public int getBufferType();\r
+ /**\r
+ * バッファオブジェクトの形式が、i_type_valueにが一致するか返します。\r
+ * @param i_type_value\r
+ * @return\r
+ */\r
public boolean isEqualBufferType(int i_type_value);\r
}\r
{\r
protected Object _buffer;\r
protected int _buffer_type;\r
+ protected NyARBufferReader()\r
+ {\r
+ return;\r
+ }\r
public NyARBufferReader(Object i_buffer,int i_buffer_type)\r
{\r
this._buffer=i_buffer;\r
this._buffer_type=i_buffer_type;\r
+ return;\r
}\r
public Object getBuffer()\r
{\r
}\r
final private NyARRotVector __initRot_vec1;\r
final private NyARRotVector __initRot_vec2;\r
+ \r
\r
\r
\r
double sina, cosa, sinb,cosb,sinc, cosc;\r
\r
if (this.m22 > 1.0) {// <Optimize/>if( rot[2][2] > 1.0 ) {\r
- this.m22 = 1.0;// <Optimize/>rot[2][2] = 1.0;\r
+ cosb = 1.0;// <Optimize/>rot[2][2] = 1.0;\r
} else if (this.m22 < -1.0) {// <Optimize/>}else if( rot[2][2] < -1.0 ) {\r
- this.m22 = -1.0;// <Optimize/>rot[2][2] = -1.0;\r
+ cosb = -1.0;// <Optimize/>rot[2][2] = -1.0;\r
+ }else{\r
+ cosb =this.m22;// <Optimize/>cosb = rot[2][2];\r
}\r
- cosb =this.m22;// <Optimize/>cosb = rot[2][2];\r
b = Math.acos(cosb);\r
sinb =Math.sin(b);\r
final double rot02=this.m02;\r
}\r
return ret;\r
}\r
+ public static void copyArray(final NyARIntPoint[] i_from,NyARIntPoint[] i_to)\r
+ {\r
+ for(int i=i_from.length-1;i>=0;i--)\r
+ {\r
+ i_to[i].x=i_from[i].x;\r
+ i_to[i].y=i_from[i].y;\r
+ }\r
+ return;\r
+ }\r
}\r
*/\r
package jp.nyatla.nyartoolkit.core.types.stack;\r
\r
-import jp.nyatla.nyartoolkit.NyARException;\r
+\r
import jp.nyatla.nyartoolkit.core.types.*;\r
import jp.nyatla.utils.NyObjectStack;\r
\r
*/\r
package jp.nyatla.nyartoolkit.core.types.stack;\r
\r
-import jp.nyatla.nyartoolkit.NyARException;\r
+\r
import jp.nyatla.nyartoolkit.core.types.NyARIntRect;\r
import jp.nyatla.utils.NyObjectStack;\r
\r
public void analyzeRaster(INyARRaster i_input) throws NyARException\r
{\r
INyARBufferReader buffer_reader=i_input.getBufferReader();\r
- assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_BIN_8));\r
+ assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_BIN_8));\r
\r
// 結果をクリア\r
this._result.clear();\r
w1 = b1 = w2 = b2 = w3 = b3 = 0;\r
\r
NyARIntRect item;\r
- int[] line;\r
+ int[] raster_buf=(int[])buffer_reader.getBuffer();\r
+ int line_ptr;\r
int s_pos, b2_spos,b3_spos;\r
b2_spos=0;\r
for (int y = size.h - 1-8; y >= 8; y--) {\r
- line = ((int[][]) buffer_reader.getBuffer())[y];\r
+ line_ptr = y*size.w;\r
x = size.w - 1;\r
s_pos=0;\r
int token_id=0;\r
// w1の特定\r
w1 = 0;\r
for (; x >= 0; x--) {\r
- if (line[x] == 0) {\r
+ if (raster_buf[line_ptr+x] == 0) {\r
// 検出条件確認:w1は2以上欲しいな。\r
if (!check_w1(w1)) {\r
// 条件不十分\r
b1 = 0;\r
s_pos = x;\r
for (; x >= 0; x--) {\r
- if (line[x] > 0) {\r
+ if (raster_buf[line_ptr+x] > 0) {\r
// 検出条件確認:b1は1以上欲しいな。\r
if (!check_b1(b1)){\r
//条件不十分→白検出からやり直し\r
// w2の特定\r
w2 = 0;\r
for (; x >= 0; x--) {\r
- if (line[x] == 0) {\r
+ if (raster_buf[line_ptr+x] == 0) {\r
// 検出条件確認:w2*10/b1は80-120以上欲しいな。\r
if (!check_w2(b1,w2)) {\r
//条件不十分→w2→w1として、b1を解析\r
b2 = 0;\r
b2_spos=x;\r
for (; x >= 0; x--) {\r
- if (line[x] > 0){\r
+ if (raster_buf[line_ptr+x] > 0){\r
//条件:(w1+b1)/2の2~4倍\r
\r
if (!check_b2(b1,b2)) {\r
// w3の特定\r
w3 = 0;\r
for (; x >= 0; x--) {\r
- if (line[x] == 0){\r
+ if (raster_buf[line_ptr+x] == 0){\r
if (!check_w3(w2,w3)) {\r
//w2→w1,b2->b1として解析しなおす。\r
if(check_w1(w2) && check_b1(b2)){\r
b3 = 0;\r
b3_spos=x;\r
for (; x >= 0; x--) {\r
- if (line[x] > 0) {\r
+ if (raster_buf[line_ptr+x] > 0) {\r
// 検出条件確認\r
if (!check_b3(b3,b1)) {\r
if(check_w1(w2) && check_b1(b2)){\r
{\r
}\r
\r
- private int createHistgram(int[][] in_buf,NyARIntSize i_size, int[] o_histgram) throws NyARException\r
+ private int createHistgram(int[] in_buf,NyARIntSize i_size, int[] o_histgram) throws NyARException\r
{\r
int[][] fil1={\r
{-1,-2,-1},\r
int sam1,sam2;\r
for (int y = 1; y < i_size.h-1; y++) {\r
for (int x = 1; x < i_size.w-1; x++) {\r
- int v = in_buf[y][x];\r
+ int v = in_buf[y* i_size.w+x];\r
sam1=sam2=0;\r
for(int yy=0;yy<3;yy++){\r
for(int xx=0;xx<3;xx++){\r
- int v2=in_buf[y+yy-1][x+xx-1];\r
+ int v2=in_buf[(y+yy-1)* i_size.w+(x+xx-1)];\r
sam1+=v2*fil1[xx][yy];\r
sam2+=v2*fil1[yy][xx];\r
} \r
public void analyzeRaster(INyARRaster i_input) throws NyARException\r
{\r
final INyARBufferReader buffer_reader=i_input.getBufferReader(); \r
- assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
int[] histgram = new int[256];\r
- this._threshold = createHistgram((int[][])buffer_reader.getBuffer(),i_input.getSize(), histgram);\r
+ this._threshold = createHistgram((int[])buffer_reader.getBuffer(),i_input.getSize(), histgram);\r
}\r
\r
/**\r
{\r
INyARBufferReader in_buffer_reader=i_input.getBufferReader(); \r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
- assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
NyARIntSize size = i_output.getSize();\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
// 0で塗りつぶし\r
for (int y = 0; y < size.h; y++) {\r
for (int x = 0; x < size.w; x++) {\r
- out_buf[y][x] = 0;\r
+ out_buf[y* size.w+x] = 0;\r
}\r
}\r
// ヒストグラムを計算\r
int[] histgram = new int[256];\r
- int threshold = createHistgram((int[][])in_buffer_reader.getBuffer(),i_input.getSize(), histgram);\r
+ int threshold = createHistgram((int[])in_buffer_reader.getBuffer(),i_input.getSize(), histgram);\r
\r
// ヒストグラムの最大値を出す\r
int max_v = 0;\r
}\r
// 目盛り\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][0] = 128;\r
- out_buf[i][128] = 128;\r
- out_buf[i][255] = 128;\r
+ out_buf[i* size.w+0] = 128;\r
+ out_buf[i* size.w+128] = 128;\r
+ out_buf[i* size.w+255] = 128;\r
}\r
// スケーリングしながら描画\r
for (int i = 0; i < 255; i++) {\r
- out_buf[histgram[i] * (size.h - 1) / max_v][i] = 255;\r
+ out_buf[(histgram[i] * (size.h - 1) / max_v)* size.w+i] = 255;\r
}\r
// 値\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][threshold] = 255;\r
+ out_buf[i* size.w+threshold] = 255;\r
}\r
return;\r
}\r
\r
private int createHistgram(INyARBufferReader i_reader,NyARIntSize i_size, int[] o_histgram) throws NyARException\r
{\r
- int[][] in_buf = (int[][]) i_reader.getBuffer();\r
+ int[] in_buf = (int[]) i_reader.getBuffer();\r
int[] histgram = o_histgram;\r
\r
// ヒストグラムを作成\r
for (int y = 0; y < i_size.h; y++) {\r
int sum2 = 0;\r
for (int x = 0; x < i_size.w; x++) {\r
- int v = in_buf[y][x];\r
+ int v = in_buf[y* i_size.w+x];\r
histgram[v]++;\r
sum2 += v;\r
}\r
public void analyzeRaster(INyARRaster i_input) throws NyARException\r
{\r
final INyARBufferReader buffer_reader=i_input.getBufferReader(); \r
- assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
int[] histgram = new int[256];\r
this._threshold = createHistgram(buffer_reader,i_input.getSize(), histgram);\r
}\r
{\r
INyARBufferReader in_buffer_reader=i_input.getBufferReader(); \r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
- assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
NyARIntSize size = i_output.getSize();\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
// 0で塗りつぶし\r
for (int y = 0; y < size.h; y++) {\r
for (int x = 0; x < size.w; x++) {\r
- out_buf[y][x] = 0;\r
+ out_buf[y* size.w+x] = 0;\r
}\r
}\r
// ヒストグラムを計算\r
}\r
// 目盛り\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][0] = 128;\r
- out_buf[i][128] = 128;\r
- out_buf[i][255] = 128;\r
+ out_buf[i* size.w+0] = 128;\r
+ out_buf[i* size.w+128] = 128;\r
+ out_buf[i* size.w+255] = 128;\r
}\r
// スケーリングしながら描画\r
for (int i = 0; i < 255; i++) {\r
- out_buf[histgram[i] * (size.h - 1) / max_v][i] = 255;\r
+ out_buf[(histgram[i] * (size.h - 1) / max_v)* size.w+i] = 255;\r
}\r
// 値\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][threshold] = 255;\r
+ out_buf[i* size.w+threshold] = 255;\r
}\r
return;\r
}\r
\r
private int createHistgram(INyARBufferReader i_reader,NyARIntSize i_size, int[] o_histgram) throws NyARException\r
{\r
- int[][] in_buf = (int[][]) i_reader.getBuffer();\r
+ int[] in_buf = (int[]) i_reader.getBuffer();\r
int[] histgram = o_histgram;\r
\r
// ヒストグラムを作成\r
for (int y = 0; y < i_size.h; y++) {\r
int sum2 = 0;\r
for (int x = 0; x < i_size.w; x++) {\r
- int v = in_buf[y][x];\r
+ int v = in_buf[y* i_size.w+x];\r
histgram[v]++;\r
sum2 += v;\r
}\r
public void analyzeRaster(INyARRaster i_input) throws NyARException\r
{\r
final INyARBufferReader buffer_reader=i_input.getBufferReader(); \r
- assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
\r
int[] histgram = new int[256];\r
// 閾値の基準値を出す。\r
{\r
INyARBufferReader in_buffer_reader=i_input.getBufferReader(); \r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
- assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
\r
NyARIntSize size = i_output.getSize();\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
// 0で塗りつぶし\r
for (int y = 0; y < size.h; y++) {\r
for (int x = 0; x < size.w; x++) {\r
- out_buf[y][x] = 0;\r
+ out_buf[y* size.w+x] = 0;\r
}\r
}\r
// ヒストグラムを計算\r
}\r
// 目盛り\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][0] = 128;\r
- out_buf[i][128] = 128;\r
- out_buf[i][255] = 128;\r
+ out_buf[i* size.w+0] = 128;\r
+ out_buf[i* size.w+128] = 128;\r
+ out_buf[i* size.w+255] = 128;\r
}\r
// スケーリングしながら描画\r
for (int i = 0; i < 255; i++) {\r
- out_buf[histgram[i] * (size.h - 1) / max_v][i] = 255;\r
+ out_buf[(histgram[i] * (size.h - 1) / max_v)* size.w+i] = 255;\r
}\r
// 値\r
for (int i = 0; i < size.h; i++) {\r
- out_buf[i][threshold] = 255;\r
+ out_buf[i* size.w+threshold] = 255;\r
}\r
return;\r
}\r
{\r
INyARBufferReader in_buffer_reader=i_input.getBufferReader(); \r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
- assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
- assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8));\r
+ assert (in_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
+ assert (out_buffer_reader.isEqualBufferType(INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8));\r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
- int[][] in_buf = (int[][]) in_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
+ int[] in_buf = (int[]) in_buffer_reader.getBuffer();\r
\r
int bp = 0;\r
NyARIntSize size = i_output.getSize();\r
for (int y = 1; y < size.h; y++) {\r
int prev = 128;\r
for (int x = 1; x < size.w; x++) {\r
- int w = in_buf[y][x];\r
- out_buf[y][x] = (Math.abs(w - prev) + Math.abs(w - in_buf[y - 1][x])) / 2;\r
+ int w = in_buf[y* size.w+x];\r
+ out_buf[y* size.w+x] = (Math.abs(w - prev) + Math.abs(w - in_buf[(y - 1)* size.w+x])) / 2;\r
prev = w;\r
bp += 3;\r
}\r
public void doFilter(NyARGrayscaleRaster i_input, NyARBinRaster i_output) throws NyARException\r
{\r
final NyARIntSize size = i_output.getSize();\r
- final int[][] out_buf = (int[][]) i_output.getBufferReader().getBuffer();\r
- final int[][] in_buf = (int[][]) i_input.getBufferReader().getBuffer();\r
+ final int[] out_buf = (int[]) i_output.getBufferReader().getBuffer();\r
+ final int[] in_buf = (int[]) i_input.getBufferReader().getBuffer();\r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
assert (size.h % 8 == 0 && size.w % 8 == 0);//暫定実装なので。\r
\r
sum = nn = 0;\r
for (int yy = y - area; yy < y + area + 1; yy++) {\r
for (int xx = x1 - area; xx < x1 + area; xx++) {\r
- sum += in_buf[yy][xx];\r
+ sum += in_buf[yy*size.w+xx];\r
nn++;\r
}\r
}\r
for (int x = area; x < x2; x++) {\r
if (!first) {\r
for (int yy = y - area; yy < y + area; yy++) {\r
- sum += in_buf[yy][x + area];\r
- sum -= in_buf[yy][x - area];\r
+ sum += in_buf[yy*size.w+x + area];\r
+ sum -= in_buf[yy*size.w+x - area];\r
}\r
}\r
first = false;\r
int th = (sum / nn);\r
\r
- int g = in_buf[y][x];\r
- out_buf[y][x] = th < g ? 1 : 0;\r
+ int g = in_buf[y*size.w+x];\r
+ out_buf[y*size.w+x] = th < g ? 1 : 0;\r
}\r
}\r
return;\r
{\r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- final int[][] out_buf = (int[][]) i_output.getBufferReader().getBuffer();\r
- final int[][] in_buf = (int[][]) i_input.getBufferReader().getBuffer();\r
+ final int[] out_buf = (int[]) i_output.getBufferReader().getBuffer();\r
+ final int[] in_buf = (int[]) i_input.getBufferReader().getBuffer();\r
\r
int bp = 0;\r
NyARIntSize size = i_output.getSize();\r
for (int y = 0; y < size.h - 1; y++) {\r
for (int x = 0; x < size.w; x++) {\r
- out_buf[y][x] = in_buf[y][x] >= this._threshold ? 1 : 0;\r
+ out_buf[y*size.w+x] = in_buf[y*size.w+x] >= this._threshold ? 1 : 0;\r
bp += 3;\r
}\r
}\r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
byte[] in_buf = (byte[]) in_buffer_reader.getBuffer();\r
\r
NyARIntSize size = i_output.getSize();\r
return;\r
}\r
\r
- private void convert24BitRgb(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert24BitRgb(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
int bp = 0;\r
for (int y = 0; y < i_size.h; y++) {\r
for (int x = 0; x < i_size.w; x++) {\r
- i_out[y][x] = ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff)) / 3;\r
+ i_out[y*i_size.w+x] = ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff)) / 3;\r
bp += 3;\r
}\r
}\r
return;\r
}\r
- private void convert32BitRgbx(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert32BitRgbx(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
int bp = 0;\r
for (int y = 0; y < i_size.h; y++) {\r
for (int x = 0; x < i_size.w; x++) {\r
- i_out[y][x] = ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff)) / 3;\r
+ i_out[y*i_size.w+x] = ((i_in[bp] & 0xff) + (i_in[bp + 1] & 0xff) + (i_in[bp + 2] & 0xff)) / 3;\r
bp += 4;\r
}\r
}\r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
byte[] in_buf = (byte[]) in_buffer_reader.getBuffer();\r
\r
NyARIntSize size = i_output.getSize();\r
return;\r
}\r
\r
- private void convert24BitRgb(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert24BitRgb(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
int bp = 0;\r
for (int y = 0; y < i_size.h; y++) {\r
for (int x = 0; x < i_size.w; x++) {\r
- i_out[y][x] = ((i_in[bp] & 0xff) * (i_in[bp + 1] & 0xff) * (i_in[bp + 2] & 0xff)) >> 16;\r
+ i_out[y*i_size.w+x] = ((i_in[bp] & 0xff) * (i_in[bp + 1] & 0xff) * (i_in[bp + 2] & 0xff)) >> 16;\r
bp += 3;\r
}\r
}\r
INyARBufferReader out_buffer_reader=i_output.getBufferReader(); \r
assert (i_input.getSize().isEqualSize(i_output.getSize()) == true);\r
\r
- final int[][] out_buf = (int[][]) out_buffer_reader.getBuffer();\r
+ final int[] out_buf = (int[]) out_buffer_reader.getBuffer();\r
final byte[] in_buf = (byte[]) in_buffer_reader.getBuffer();\r
\r
NyARIntSize size = i_output.getSize();\r
return;\r
}\r
\r
- private void convert24BitRgb(byte[] i_in, int[][] i_out, NyARIntSize i_size)\r
+ private void convert24BitRgb(byte[] i_in, int[] i_out, NyARIntSize i_size)\r
{\r
int bp = 0;\r
for (int y = 0; y < i_size.h; y++) {\r
for (int x = 0; x < i_size.w; x++) {\r
- i_out[y][x] = ((i_in[bp] & 0xff) | (i_in[bp + 1] & 0xff) | (i_in[bp + 2] & 0xff));\r
+ i_out[y*i_size.w+x] = ((i_in[bp] & 0xff) | (i_in[bp + 1] & 0xff) | (i_in[bp + 2] & 0xff));\r
bp += 3;\r
}\r
}\r
--- /dev/null
+/* \r
+ * PROJECT: NyARToolkit\r
+ * --------------------------------------------------------------------------------\r
+ * This work is based on the original ARToolKit developed by\r
+ * Hirokazu Kato\r
+ * Mark Billinghurst\r
+ * HITLab, University of Washington, Seattle\r
+ * http://www.hitl.washington.edu/artoolkit/\r
+ *\r
+ * The NyARToolkit is Java version ARToolkit class library.\r
+ * Copyright (C)2008 R.Iizuka\r
+ *\r
+ * This program is free software; you can redistribute it and/or\r
+ * modify it under the terms of the GNU General Public License\r
+ * as published by the Free Software Foundation; either version 2\r
+ * of the License, or (at your option) any later version.\r
+ * \r
+ * This program is distributed in the hope that it will be useful,\r
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
+ * GNU General Public License for more details.\r
+ * \r
+ * You should have received a copy of the GNU General Public License\r
+ * along with this framework; if not, write to the Free Software\r
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA\r
+ * \r
+ * For further information please contact.\r
+ * http://nyatla.jp/nyatoolkit/\r
+ * <airmail(at)ebony.plala.or.jp>\r
+ * \r
+ */\r
+package jp.nyatla.nyartoolkit.detector;\r
+\r
+import jp.nyatla.nyartoolkit.NyARException;\r
+import jp.nyatla.nyartoolkit.core.*;\r
+import jp.nyatla.nyartoolkit.core.match.*;\r
+import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
+import jp.nyatla.nyartoolkit.core.pickup.*;\r
+import jp.nyatla.nyartoolkit.core.raster.rgb.*;\r
+import jp.nyatla.nyartoolkit.core.raster.*;\r
+import jp.nyatla.nyartoolkit.core.transmat.*;\r
+import jp.nyatla.nyartoolkit.core.types.NyARIntSize;\r
+import jp.nyatla.nyartoolkit.core.rasterfilter.rgb2bin.*;\r
+import jp.nyatla.nyartoolkit.core.types.*;\r
+\r
+/**\r
+ * 画像からARCodeに最も一致するマーカーを1個検出し、その変換行列を計算するクラスです。\r
+ * \r
+ */\r
+public class NyARCustomSingleDetectMarker\r
+{\r
+ private static final int AR_SQUARE_MAX = 100;\r
+\r
+ private boolean _is_continue = false;\r
+ private NyARMatchPatt_Color_WITHOUT_PCA _match_patt;\r
+ private INyARSquareDetector _square_detect;\r
+\r
+ private final NyARSquareStack _square_list = new NyARSquareStack(AR_SQUARE_MAX);\r
+\r
+ private NyARCode _code;\r
+\r
+ protected INyARTransMat _transmat;\r
+\r
+ private double _marker_width;\r
+ // 検出結果の保存用\r
+ private int _detected_direction;\r
+ private double _detected_confidence;\r
+ private NyARSquare _detected_square;\r
+ private INyARColorPatt _patt;\r
+ //画処理用\r
+ private NyARBinRaster _bin_raster;\r
+ protected INyARRasterFilter_RgbToBin _tobin_filter;\r
+\r
+ /**\r
+ * 検出するARCodeとカメラパラメータから、1個のARCodeを検出するNyARSingleDetectMarkerインスタンスを作ります。\r
+ * \r
+ * @param i_param\r
+ * カメラパラメータを指定します。\r
+ * @param i_code\r
+ * 検出するARCodeを指定します。\r
+ * @param i_marker_width\r
+ * ARコードの物理サイズを、ミリメートルで指定します。\r
+ * @param i_filter\r
+ * RGB→BIN変換フィルタを指定します。\r
+ * @throws NyARException\r
+ */\r
+ public NyARCustomSingleDetectMarker(NyARParam i_param, NyARCode i_code, double i_marker_width,INyARRasterFilter_RgbToBin i_filter) throws NyARException\r
+ {\r
+ final NyARIntSize scr_size=i_param.getScreenSize(); \r
+ // 解析オブジェクトを作る\r
+ this._square_detect = new NyARSquareDetector(i_param.getDistortionFactor(),scr_size);\r
+ this._transmat = new NyARTransMat(i_param);\r
+ // 比較コードを保存\r
+ this._code = i_code;\r
+ this._marker_width = i_marker_width;\r
+ // 評価パターンのホルダを作る\r
+ this._patt = new NyARColorPatt_O3(_code.getWidth(), _code.getHeight());\r
+ // 評価器を作る。\r
+ this._match_patt = new NyARMatchPatt_Color_WITHOUT_PCA();\r
+ //2値画像バッファを作る\r
+ this._bin_raster=new NyARBinRaster(scr_size.w,scr_size.h);\r
+ this._tobin_filter=i_filter;\r
+ }\r
+\r
+\r
+ /**\r
+ * i_imageにマーカー検出処理を実行し、結果を記録します。\r
+ * \r
+ * @param i_raster\r
+ * マーカーを検出するイメージを指定します。イメージサイズは、カメラパラメータ\r
+ * と一致していなければなりません。\r
+ * @return マーカーが検出できたかを真偽値で返します。\r
+ * @throws NyARException\r
+ */\r
+ public boolean detectMarkerLite(INyARRgbRaster i_raster) throws NyARException\r
+ {\r
+ //サイズチェック\r
+ if(!this._bin_raster.getSize().isEqualSize(i_raster.getSize())){\r
+ throw new NyARException();\r
+ }\r
+\r
+ //ラスタを2値イメージに変換する.\r
+ this._tobin_filter.doFilter(i_raster,this._bin_raster);\r
+ \r
+ \r
+ this._detected_square = null;\r
+ NyARSquareStack l_square_list = this._square_list;\r
+ // スクエアコードを探す\r
+ this._square_detect.detectMarker(this._bin_raster, l_square_list);\r
+\r
+\r
+ int number_of_square = l_square_list.getLength();\r
+ // コードは見つかった?\r
+ if (number_of_square < 1) {\r
+ return false;\r
+ }\r
+\r
+ // 評価基準になるパターンをイメージから切り出す\r
+ if (!this._patt.pickFromRaster(i_raster, (NyARSquare)l_square_list.getItem(0))) {\r
+ // パターンの切り出しに失敗\r
+ return false;\r
+ }\r
+ // パターンを評価器にセット\r
+ if (!this._match_patt.setPatt(this._patt)) {\r
+ // 計算に失敗した。\r
+ throw new NyARException();\r
+ }\r
+ // コードと比較する\r
+ this._match_patt.evaluate(this._code);\r
+ int square_index = 0;\r
+ int direction = this._match_patt.getDirection();\r
+ double confidence = this._match_patt.getConfidence();\r
+ for (int i = 1; i < number_of_square; i++) {\r
+ // 次のパターンを取得\r
+ this._patt.pickFromRaster(i_raster, (NyARSquare)l_square_list.getItem(i));\r
+ // 評価器にセットする。\r
+ this._match_patt.setPatt(this._patt);\r
+ // コードと比較する\r
+ this._match_patt.evaluate(this._code);\r
+ double c2 = this._match_patt.getConfidence();\r
+ if (confidence > c2) {\r
+ continue;\r
+ }\r
+ // もっと一致するマーカーがあったぽい\r
+ square_index = i;\r
+ direction = this._match_patt.getDirection();\r
+ confidence = c2;\r
+ }\r
+ // マーカー情報を保存\r
+ this._detected_square = (NyARSquare)l_square_list.getItem(square_index);\r
+ this._detected_direction = direction;\r
+ this._detected_confidence = confidence;\r
+ return true;\r
+ }\r
+\r
+ /**\r
+ * 検出したマーカーの変換行列を計算して、o_resultへ値を返します。\r
+ * 直前に実行したdetectMarkerLiteが成功していないと使えません。\r
+ * \r
+ * @param o_result\r
+ * 変換行列を受け取るオブジェクトを指定します。\r
+ * @throws NyARException\r
+ */\r
+ public void getTransmationMatrix(NyARTransMatResult o_result) throws NyARException\r
+ {\r
+ // 一番一致したマーカーの位置とかその辺を計算\r
+ if (this._is_continue) {\r
+ this._transmat.transMatContinue(this._detected_square,this._detected_direction,this._marker_width, o_result);\r
+ } else {\r
+ this._transmat.transMat(this._detected_square,this._detected_direction,this._marker_width, o_result);\r
+ }\r
+ return;\r
+ }\r
+ /**\r
+ * 画面上のマーカ頂点情報を配列へ取得します。\r
+ * @param o_point\r
+ * 4要素以上の配列を指定して下さい。先頭の4要素に値がコピーされます。\r
+ */\r
+ public void getSquarePosition(NyARIntPoint[] o_point)\r
+ {\r
+ NyARIntPoint.copyArray(this._detected_square.imvertex,o_point);\r
+ return;\r
+ }\r
+ /**\r
+ * 画面上のマーカ頂点情報を配列へのリファレンスを返します。\r
+ * 返されたオブジェクトはクラスに所有し続けられています。クラスのメンバ関数を実行すると内容が書き変わります。\r
+ * 外部でデータをストックする場合は、getSquarePositionで複製して下さい。\r
+ * @return\r
+ */\r
+ public NyARIntPoint[] refSquarePosition()\r
+ {\r
+ return this._detected_square.imvertex;\r
+ }\r
+ \r
+\r
+ /**\r
+ * 検出したマーカーの一致度を返します。\r
+ * \r
+ * @return マーカーの一致度を返します。0~1までの値をとります。 一致度が低い場合には、誤認識の可能性が高くなります。\r
+ * @throws NyARException\r
+ */\r
+ public double getConfidence()\r
+ {\r
+ return this._detected_confidence;\r
+ }\r
+\r
+ /**\r
+ * 検出したマーカーの方位を返します。\r
+ * \r
+ * @return 0,1,2,3の何れかを返します。\r
+ */\r
+ public int getDirection()\r
+ {\r
+ return this._detected_direction;\r
+ }\r
+\r
+ /**\r
+ * getTransmationMatrixの計算モードを設定します。 初期値はTRUEです。\r
+ * \r
+ * @param i_is_continue\r
+ * TRUEなら、transMatCont互換の計算をします。 FALSEなら、transMat互換の計算をします。\r
+ */\r
+ public void setContinueMode(boolean i_is_continue)\r
+ {\r
+ this._is_continue = i_is_continue;\r
+ }\r
+}\r
\r
import jp.nyatla.nyartoolkit.NyARException;\r
import jp.nyatla.nyartoolkit.core.*;\r
-import jp.nyatla.nyartoolkit.core.match.*;\r
import jp.nyatla.nyartoolkit.core.param.NyARParam;\r
-import jp.nyatla.nyartoolkit.core.pickup.*;\r
import jp.nyatla.nyartoolkit.core.raster.rgb.*;\r
-import jp.nyatla.nyartoolkit.core.raster.*;\r
-import jp.nyatla.nyartoolkit.core.transmat.*;\r
-import jp.nyatla.nyartoolkit.core.types.NyARIntSize;\r
import jp.nyatla.nyartoolkit.core.rasterfilter.rgb2bin.NyARRasterFilter_ARToolkitThreshold;\r
\r
/**\r
* 画像からARCodeに最も一致するマーカーを1個検出し、その変換行列を計算するクラスです。\r
* \r
*/\r
-public class NyARSingleDetectMarker\r
+public class NyARSingleDetectMarker extends NyARCustomSingleDetectMarker\r
{\r
- private static final int AR_SQUARE_MAX = 100;\r
-\r
- private boolean _is_continue = false;\r
- private NyARMatchPatt_Color_WITHOUT_PCA _match_patt;\r
- private INyARSquareDetector _square_detect;\r
-\r
- private final NyARSquareStack _square_list = new NyARSquareStack(AR_SQUARE_MAX);\r
-\r
- private NyARCode _code;\r
-\r
- protected INyARTransMat _transmat;\r
-\r
- private double _marker_width;\r
-\r
- // 検出結果の保存用\r
- private int _detected_direction;\r
-\r
- private double _detected_confidence;\r
-\r
- private NyARSquare _detected_square;\r
-\r
- private INyARColorPatt _patt;\r
-\r
/**\r
* 検出するARCodeとカメラパラメータから、1個のARCodeを検出するNyARSingleDetectMarkerインスタンスを作ります。\r
* \r
*/\r
public NyARSingleDetectMarker(NyARParam i_param, NyARCode i_code, double i_marker_width) throws NyARException\r
{\r
- final NyARIntSize scr_size=i_param.getScreenSize(); \r
- // 解析オブジェクトを作る\r
- this._square_detect = new NyARSquareDetector(i_param.getDistortionFactor(),scr_size);\r
- this._transmat = new NyARTransMat(i_param);\r
- // 比較コードを保存\r
- this._code = i_code;\r
- this._marker_width = i_marker_width;\r
- // 評価パターンのホルダを作る\r
- this._patt = new NyARColorPatt_O3(_code.getWidth(), _code.getHeight());\r
- // 評価器を作る。\r
- this._match_patt = new NyARMatchPatt_Color_WITHOUT_PCA();\r
- //2値画像バッファを作る\r
- this._bin_raster=new NyARBinRaster(scr_size.w,scr_size.h);\r
+ super(i_param,i_code,i_marker_width,new NyARRasterFilter_ARToolkitThreshold(100));\r
}\r
\r
- NyARBinRaster _bin_raster;\r
- NyARRasterFilter_ARToolkitThreshold _tobin_filter=new NyARRasterFilter_ARToolkitThreshold(100);\r
\r
/**\r
* i_imageにマーカー検出処理を実行し、結果を記録します。\r
*/\r
public boolean detectMarkerLite(INyARRgbRaster i_raster,int i_threshold) throws NyARException\r
{\r
- //サイズチェック\r
- if(!this._bin_raster.getSize().isEqualSize(i_raster.getSize())){\r
- throw new NyARException();\r
- }\r
-\r
- //ラスタを2値イメージに変換する.\r
- this._tobin_filter.setThreshold(i_threshold);\r
- this._tobin_filter.doFilter(i_raster,this._bin_raster);\r
- \r
- \r
- this._detected_square = null;\r
- NyARSquareStack l_square_list = this._square_list;\r
- // スクエアコードを探す\r
- this._square_detect.detectMarker(this._bin_raster, l_square_list);\r
-\r
-\r
- int number_of_square = l_square_list.getLength();\r
- // コードは見つかった?\r
- if (number_of_square < 1) {\r
- return false;\r
- }\r
-\r
- // 評価基準になるパターンをイメージから切り出す\r
- if (!this._patt.pickFromRaster(i_raster, (NyARSquare)l_square_list.getItem(0))) {\r
- // パターンの切り出しに失敗\r
- return false;\r
- }\r
- // パターンを評価器にセット\r
- if (!this._match_patt.setPatt(this._patt)) {\r
- // 計算に失敗した。\r
- throw new NyARException();\r
- }\r
- // コードと比較する\r
- this._match_patt.evaluate(this._code);\r
- int square_index = 0;\r
- int direction = this._match_patt.getDirection();\r
- double confidence = this._match_patt.getConfidence();\r
- for (int i = 1; i < number_of_square; i++) {\r
- // 次のパターンを取得\r
- this._patt.pickFromRaster(i_raster, (NyARSquare)l_square_list.getItem(i));\r
- // 評価器にセットする。\r
- this._match_patt.setPatt(this._patt);\r
- // コードと比較する\r
- this._match_patt.evaluate(this._code);\r
- double c2 = this._match_patt.getConfidence();\r
- if (confidence > c2) {\r
- continue;\r
- }\r
- // もっと一致するマーカーがあったぽい\r
- square_index = i;\r
- direction = this._match_patt.getDirection();\r
- confidence = c2;\r
- }\r
- // マーカー情報を保存\r
- this._detected_square = (NyARSquare)l_square_list.getItem(square_index);\r
- this._detected_direction = direction;\r
- this._detected_confidence = confidence;\r
- return true;\r
- }\r
-\r
- /**\r
- * 検出したマーカーの変換行列を計算して、o_resultへ値を返します。\r
- * 直前に実行したdetectMarkerLiteが成功していないと使えません。\r
- * \r
- * @param o_result\r
- * 変換行列を受け取るオブジェクトを指定します。\r
- * @throws NyARException\r
- */\r
- public void getTransmationMatrix(NyARTransMatResult o_result) throws NyARException\r
- {\r
- // 一番一致したマーカーの位置とかその辺を計算\r
- if (this._is_continue) {\r
- this._transmat.transMatContinue(this._detected_square,this._detected_direction,this._marker_width, o_result);\r
- } else {\r
- this._transmat.transMat(this._detected_square,this._detected_direction,this._marker_width, o_result);\r
- }\r
- return;\r
- }\r
-\r
- /**\r
- * 検出したマーカーの一致度を返します。\r
- * \r
- * @return マーカーの一致度を返します。0~1までの値をとります。 一致度が低い場合には、誤認識の可能性が高くなります。\r
- * @throws NyARException\r
- */\r
- public double getConfidence()\r
- {\r
- return this._detected_confidence;\r
- }\r
-\r
- /**\r
- * 検出したマーカーの方位を返します。\r
- * \r
- * @return 0,1,2,3の何れかを返します。\r
- */\r
- public int getDirection()\r
- {\r
- return this._detected_direction;\r
- }\r
-\r
- /**\r
- * getTransmationMatrixの計算モードを設定します。 初期値はTRUEです。\r
- * \r
- * @param i_is_continue\r
- * TRUEなら、transMatCont互換の計算をします。 FALSEなら、transMat互換の計算をします。\r
- */\r
- public void setContinueMode(boolean i_is_continue)\r
- {\r
- this._is_continue = i_is_continue;\r
+ ((NyARRasterFilter_ARToolkitThreshold)this._tobin_filter).setThreshold(i_threshold);\r
+ return super.detectMarkerLite(i_raster);\r
}\r
}\r
* \r
* @param i_array\r
*/\r
- public NyObjectStack(Object[] i_array)\r
+ protected NyObjectStack(Object[] i_array)\r
{\r
// ポインタだけははじめに確保しておく\r
this._items = i_array;\r
\r
public void drawImage(NyARGrayscaleRaster i_raster) throws NyARException\r
{\r
- assert (i_raster.getBufferReader().getBufferType() == INyARBufferReader.BUFFERFORMAT_INT2D_GLAY_8);\r
+ assert (i_raster.getBufferReader().getBufferType() == INyARBufferReader.BUFFERFORMAT_INT1D_GLAY_8);\r
\r
int w = this.getWidth();\r
int h = this.getHeight();\r
throw new NyARException();\r
}\r
\r
- int[][] limg;\r
+ int[] limg;\r
// イメージの描画\r
- limg = (int[][]) i_raster.getBufferReader().getBuffer();\r
+ limg = (int[]) i_raster.getBufferReader().getBuffer();\r
for (int i = 0; i < h; i++) {\r
for (int i2 = 0; i2 < w; i2++) {\r
- this.setRGB(i2, i,limg[i][i2]);\r
+ this.setRGB(i2, i,limg[i*w+i2]);\r
}\r
}\r
return;\r
*/\r
public void drawImage(NyARBinRaster i_raster) throws NyARException\r
{\r
- assert (i_raster.getBufferReader().getBufferType() == INyARBufferReader.BUFFERFORMAT_INT2D_BIN_8);\r
+ assert (i_raster.getBufferReader().getBufferType() == INyARBufferReader.BUFFERFORMAT_INT1D_BIN_8);\r
\r
int w = this.getWidth();\r
int h = this.getHeight();\r
throw new NyARException();\r
}\r
\r
- int[][] limg;\r
+ int[] limg;\r
// イメージの描画\r
- limg = (int[][]) i_raster.getBufferReader().getBuffer();\r
+ limg = (int[]) i_raster.getBufferReader().getBuffer();\r
for (int i = 0; i < h; i++) {\r
for (int i2 = 0; i2 < w; i2++) {\r
- this.setRGB(i2, i, limg[i][i2] > 0 ? 255 : 0);\r
+ this.setRGB(i2, i, limg[i*w+i2] > 0 ? 255 : 0);\r
}\r
}\r
return;\r
}\r
int[] index_array=i_image.getIndexArray();\r
\r
- int[][] limg;\r
+ int[] limg;\r
// イメージの描画\r
- limg = (int[][]) i_image.getBufferReader().getBuffer();\r
+ limg = (int[]) i_image.getBufferReader().getBuffer();\r
for (int i = 0; i < h; i++) {\r
for (int i2 = 0; i2 < w; i2++) {\r
- int t=limg[i][i2]-1;\r
+ int t=limg[i*w+i2]-1;\r
if(t<0){\r
t=0;\r
}else{\r