1 package com.tomgibara.android.camera;
\r
3 import java.io.IOException;
\r
4 import java.io.InputStream;
\r
5 import java.net.InetSocketAddress;
\r
6 import java.net.Socket;
\r
7 import java.nio.ByteBuffer;
\r
8 import java.nio.ByteOrder;
\r
9 import java.nio.FloatBuffer;
\r
10 import java.nio.IntBuffer;
\r
12 import javax.microedition.khronos.opengles.GL10;
\r
14 import jp.nyatla.nyartoolkit.NyARException;
\r
15 import jp.nyatla.nyartoolkit.core.NyARCode;
\r
16 import jp.nyatla.nyartoolkit.core.raster.NyARRaster_RGB;
\r
17 import jp.nyatla.nyartoolkit.jogl.utils.GLNyARParam;
\r
18 import jp.nyatla.nyartoolkit.jogl.utils.GLNyARSingleDetectMarker;
\r
19 import android.graphics.Bitmap;
\r
20 import android.graphics.BitmapFactory;
\r
21 import android.graphics.Canvas;
\r
22 import android.graphics.OpenGLContext;
\r
23 import android.graphics.Paint;
\r
24 import android.graphics.Rect;
\r
25 import android.util.Log;
\r
26 import android.view.SurfaceHolder;
\r
29 * A CameraSource implementation that obtains its bitmaps via a TCP connection
\r
30 * to a remote host on a specified address/port.
\r
32 * Originally wrote by Tom Gibara
\r
35 * Modified for detecting ARMarker and drawing 3D cube.
\r
41 public class SocketCamera implements CameraSource {
\r
43 private static final int SOCKET_TIMEOUT = 5000;
\r
45 private final String address;
\r
46 private final int port;
\r
47 private final Rect bounds;
\r
48 private final boolean preserveAspectRatio;
\r
49 private final Paint paint = new Paint();
\r
50 private final Paint paintBlue = new Paint();
\r
52 private GLNyARSingleDetectMarker nya;
\r
53 private NyARRaster_RGB raster;
\r
54 private GLNyARParam ar_param;
\r
55 // private NyARCode ar_code;
\r
57 private SurfaceHolder mHolder;
\r
59 private final Bitmap fixedBitmap;
\r
60 private final Cube mCube = new Cube();
\r
64 private int[] pixels;
\r
66 private float[] resultf;
\r
68 private static final boolean LOCAL = false;
\r
69 private static final boolean TEXT = false;
\r
70 private static final boolean DETECT = true;
\r
72 public SocketCamera(String address, int port, int width, int height,
\r
73 boolean preserveAspectRatio, GLNyARParam ar_param,
\r
74 NyARCode ar_code, SurfaceHolder holder, Bitmap fixedBitmap) {
\r
75 this.address = address;
\r
77 bounds = new Rect(0, 0, width, height);
\r
78 this.preserveAspectRatio = preserveAspectRatio;
\r
80 paint.setColor(0xFFFF0000);
\r
81 paint.setFilterBitmap(true);
\r
82 paint.setAntiAlias(true);
\r
84 paintBlue.setColor(0xFF0000FF);
\r
85 paintBlue.setFilterBitmap(true);
\r
86 paintBlue.setAntiAlias(true);
\r
88 this.ar_param = ar_param;
\r
89 // this.ar_code = ar_code;
\r
91 nya = new GLNyARSingleDetectMarker(ar_param, ar_code, 80.0);
\r
92 nya.setContinueMode(true);
\r
93 } catch (NyARException e) {
\r
94 e.printStackTrace();
\r
99 this.fixedBitmap = fixedBitmap;
\r
104 public int getWidth() {
\r
105 return bounds.right;
\r
109 public int getHeight() {
\r
110 return bounds.bottom;
\r
114 public boolean open() {
\r
115 /* nothing to do */
\r
120 public boolean capture(Canvas canvas) {
\r
121 if (canvas == null) {
\r
122 throw new IllegalArgumentException("null canvas");
\r
125 int[] pixels = this.pixels;
\r
126 byte[] buf = this.buf;
\r
127 float[] resultf = this.resultf;
\r
131 if (this.w != width || this.h != height || buf == null) {
\r
134 pixels = new int[w * h];
\r
135 buf = new byte[pixels.length * 3];
\r
136 resultf = new float[16];
\r
141 Socket socket = null;
\r
143 Bitmap bitmap = null;
\r
145 socket = new Socket();
\r
146 socket.setSoTimeout(SOCKET_TIMEOUT);
\r
147 socket.connect(new InetSocketAddress(address, port),
\r
149 // obtain the bitmap
\r
150 InputStream in = socket.getInputStream();
\r
151 // DataInputStream dis = new DataInputStream(
\r
152 // new BufferedInputStream(in, buf.length));
\r
153 // dis.readFully(buf);
\r
155 bitmap = BitmapFactory.decodeStream(in);
\r
156 // bitmap = BitmapFactory.decodeByteArray(buf, 0, buf.length);
\r
158 bitmap = fixedBitmap;
\r
162 OpenGLContext glc = new OpenGLContext(OpenGLContext.DEPTH_BUFFER);
\r
163 glc.makeCurrent(mHolder);
\r
164 GL10 gl = (GL10) (glc.getGL());
\r
165 gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
\r
167 gl.glViewport(0, 0, w, h);
\r
168 float ratio = (float) h / w;
\r
169 gl.glMatrixMode(GL10.GL_PROJECTION);
\r
170 gl.glLoadIdentity();
\r
171 gl.glFrustumf(-1.0f, 1.0f, -ratio, ratio, 5.0f, 40.0f);
\r
173 gl.glMatrixMode(GL10.GL_MODELVIEW);
\r
174 gl.glLoadIdentity();
\r
175 gl.glTranslatef(0.0f, 0.0f, -10.0f);
\r
177 gl.glDisable(GL10.GL_DITHER);
\r
178 gl.glActiveTexture(GL10.GL_TEXTURE0);
\r
179 gl.glBindTexture(GL10.GL_TEXTURE_2D, 0);
\r
180 gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S,
\r
181 GL10.GL_CLAMP_TO_EDGE);
\r
182 gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T,
\r
183 GL10.GL_CLAMP_TO_EDGE);
\r
184 gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
\r
186 gl.glTexParameterx(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
\r
188 gl.glTexEnvx(GL10.GL_TEXTURE_ENV, GL10.GL_TEXTURE_ENV_MODE,
\r
191 gl.glColor4f(0.7f, 0.7f, 0.7f, 1.0f);
\r
192 gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
\r
193 gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
\r
194 gl.glEnable(GL10.GL_CULL_FACE);
\r
195 gl.glShadeModel(GL10.GL_SMOOTH);
\r
196 gl.glEnable(GL10.GL_DEPTH_TEST);
\r
198 boolean is_marker_exist = false;
\r
200 // double[][] atm = null;
\r
203 // time = System.currentTimeMillis();
\r
206 bitmap.getPixels(pixels, 0, w, 0, 0, w, h);
\r
208 for (int i = 0; i < pixels.length; i++) {
\r
209 int argb = pixels[i];
\r
210 // byte a = (byte) (argb & 0xFF000000 >> 24);
\r
211 byte r = (byte) (argb & 0x00FF0000 >> 16);
\r
212 byte g = (byte) (argb & 0x0000FF00 >> 8);
\r
213 byte b = (byte) (argb & 0x000000FF);
\r
215 buf[i * 3 + 1] = g;
\r
216 buf[i * 3 + 2] = b;
\r
218 // Log.d("nyar", "Time to copy image : "
\r
219 // + (System.currentTimeMillis() - time) + " ms");
\r
221 if (raster == null) {
\r
222 raster = new NyARRaster_RGB();
\r
224 // time = System.currentTimeMillis();
\r
225 NyARRaster_RGB.wrap(raster, buf, w, h);
\r
228 // Log.d("nyar", "detecting marker");
\r
229 is_marker_exist = nya.detectMarkerLite(raster, 100);
\r
230 // Log.d("nyar", "marker detected");
\r
232 // if (is_marker_exist) {
\r
234 // atm = nya.getTransmationMatrix().getArray();
\r
236 } catch (NyARException e) {
\r
237 Log.e("nyar", "marker detection failed", e);
\r
240 // Log.d("nyar", "Time to detect marker : "
\r
241 // + (System.currentTimeMillis() - time) + " ms");
\r
243 gl.glMatrixMode(GL10.GL_MODELVIEW);
\r
244 gl.glLoadIdentity();
\r
245 // int num = (int) (System.currentTimeMillis() % 3) - 1;
\r
247 gl.glTranslatef(0.0f, 0.0f, -10.0f);
\r
248 gl.glRotatef(20.0f * num, 0, 0, 1);
\r
250 if (is_marker_exist) {
\r
251 // Log.d("nyar", "marker exist");
\r
252 // // Projection transformation.
\r
253 gl.glMatrixMode(GL10.GL_PROJECTION);
\r
254 gl.glLoadIdentity();
\r
255 gl.glLoadMatrixf(ar_param.getCameraFrustumRHf(), 0);
\r
256 gl.glMatrixMode(GL10.GL_MODELVIEW);
\r
257 // Viewing transformation.
\r
258 gl.glLoadIdentity();
\r
259 nya.getCameraViewRH(resultf);
\r
260 // int num = (int) (System.currentTimeMillis() % 3) - 1;
\r
261 // gl.glTranslatef(0.0f, 2.5f, -20.0f);
\r
262 gl.glLoadMatrixf(resultf, 0);
\r
263 // gl.glTranslatef(0.0f, 0.0f, -10.0f);
\r
264 // gl.glRotatef(20.0f, 1, 0, 0);
\r
265 // gl.glScalef(0.5f, 0.5f, 1.0f);
\r
266 // gl.glRotatef(20.0f * num, 0, 0, 1);
\r
270 // Log.d("nyar", "marker does not exist");
\r
273 // time = System.currentTimeMillis();
\r
274 gl.glClearColor(1, 1, 1, 1);
\r
275 gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
\r
277 // render it to canvas, scaling if necessary
\r
278 if (bounds.right == bitmap.width()
\r
279 && bounds.bottom == bitmap.height()) {
\r
280 canvas.drawBitmap(bitmap, 0, 0, null);
\r
282 // Log.d("nyar", "preserveAspectRation = " +
\r
283 // preserveAspectRatio);
\r
285 if (preserveAspectRatio) {
\r
286 dest = new Rect(bounds);
\r
287 dest.bottom = bitmap.height() * bounds.right
\r
289 dest.offset(0, (bounds.bottom - dest.bottom) / 2);
\r
293 canvas.drawBitmap(bitmap, null, dest, paint);
\r
296 if (is_marker_exist) {
\r
297 // Log.d("nyar", "drawing a cube.");
\r
299 // gl.glRotatef(20.0f, 0, 0, 1.0f);
\r
301 } else if (!DETECT) {
\r
302 // Log.d("nyar", "drawing a cube without detection.");
\r
304 // Thread.sleep(1000);
\r
307 // Log.d("nyar", "" + (System.currentTimeMillis() - time) + "ms");
\r
312 // g.drawImage(img, 32, 32, this);
\r
313 if (is_marker_exist) {
\r
314 // Log.d("nyar", "marker exist and drawing text");
\r
315 // str = "マーカー検出 [" + nya.getConfidence() + "]";
\r
316 // canvas.drawText(str, 32, 100, paint);
\r
317 // for (int i = 0; i < 3; i++) {
\r
318 // for (int i2 = 0; i2 < 4; i2++) {
\r
319 // str = "[" + i + "][" + i2 + "]" + atm[i][i2];
\r
320 // canvas.drawText(str, 32, 0 + (1 + i2 * 3 + i) * 16,
\r
325 for (int i = 0; i < 4; i++) {
\r
326 for (int i2 = 0; i2 < 4; i2++) {
\r
327 str = "[" + i + "][" + i2 + "]"
\r
328 + resultf[i * 4 + i2];
\r
329 canvas.drawText(str, 32, 0 + (1 + i * 4 + i2) * 16,
\r
333 float[] arp = ar_param.getCameraFrustumRHf();
\r
334 for (int i = 0; i < 4; i++) {
\r
335 for (int i2 = 0; i2 < 4; i2++) {
\r
336 str = "[" + i + "][" + i2 + "]" + arp[i * 4 + i2];
\r
337 canvas.drawText(str, 152,
\r
338 0 + (1 + i * 4 + i2) * 16, paintBlue);
\r
342 // Log.d("nyar", "marker does not exist");
\r
343 // g.drawString("マーカー未検出:", 32, 100);
\r
345 canvas.drawText(str, 32, 100, paint);
\r
348 } catch (Exception e) {
\r
349 Log.i(LOG_TAG, "Failed to obtain image over network", e);
\r
353 if (socket != null) {
\r
356 } catch (IOException e) {
\r
364 public void close() {
\r
365 /* nothing to do */
\r
370 private FloatBuffer mVertexBuffer;
\r
371 private IntBuffer mColorBuffer;
\r
372 private ByteBuffer mIndexBuffer;
\r
376 float[] vertices = { fone, fone, fone, fone, -fone, fone, -fone,
\r
377 -fone, fone, -fone, fone, fone, fone, fone, -fone, fone,
\r
378 -fone, -fone, -fone, -fone, -fone, -fone, fone, -fone };
\r
380 int one = 0x00010000;
\r
381 int[] colors = { 0, 0, 0, one, one, 0, 0, one, one, one, 0, one, 0,
\r
382 one, 0, one, 0, 0, one, one, one, 0, one, one, one, one,
\r
383 one, one, 0, one, one, one };
\r
385 byte[] indices = new byte[] { 0, 4, 5, 0, 5, 1, 1, 5, 6, 1, 6, 2,
\r
386 2, 6, 7, 2, 7, 3, 3, 7, 4, 3, 4, 0, 4, 7, 6, 4, 6, 5, 3, 0,
\r
388 // indices = new byte[] { 3, 2, 1, 0, 2, 3, 7, 6, 0, 1, 5, 4, 3, 0,
\r
390 // 7, 1, 2, 6, 5, 4, 5, 6, 7 };
\r
392 // Buffers to be passed to gl*Pointer() functions
\r
393 // must be direct, i.e., they must be placed on the
\r
394 // native heap where the garbage collector cannot
\r
397 // Buffers with multi-byte datatypes (e.g., short, int, float)
\r
398 // must have their byte order set to native order
\r
400 ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
\r
401 vbb.order(ByteOrder.nativeOrder());
\r
402 mVertexBuffer = vbb.asFloatBuffer();
\r
403 mVertexBuffer.put(vertices);
\r
404 mVertexBuffer.position(0);
\r
406 ByteBuffer cbb = ByteBuffer.allocateDirect(colors.length * 4);
\r
407 cbb.order(ByteOrder.nativeOrder());
\r
408 mColorBuffer = cbb.asIntBuffer();
\r
409 mColorBuffer.put(colors);
\r
410 mColorBuffer.position(0);
\r
412 mIndexBuffer = ByteBuffer.allocateDirect(indices.length);
\r
413 mIndexBuffer.put(indices);
\r
414 mIndexBuffer.position(0);
\r
417 public void draw(GL10 gl) {
\r
419 gl.glTranslatef(0.0f, 0.0f, 0.5f); // Place base of cube on marker
\r
421 gl.glFrontFace(GL10.GL_CW);
\r
422 gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
\r
423 gl.glColorPointer(4, GL10.GL_FIXED, 0, mColorBuffer);
\r
424 gl.glDrawElements(GL10.GL_TRIANGLES, 36, GL10.GL_UNSIGNED_BYTE,
\r
427 gl.glTranslatef(0.0f, 0.0f, -0.5f); // Place base of cube on marker
\r