First Person Camera using JOGL GL3 core - camera

I am trying to make a basic first person camera scene using JOGL GL3 core and programmed vertex shader, but it doesn't look like the vertex array object is been correctly projected.
I believe the keyboard and mouse functions are working correctly and that the problem lies with shader program or vertex shader.
The AxisScene is where the bulk of the action happens, but the entire gradle project can be found here
I followed the projection theory from here
What code is wrong/missing to create true FPS behaviour?
package fpsscene.fpsscene;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jogamp.common.nio.Buffers;
import com.jogamp.opengl.GL;
import com.jogamp.opengl.GL2;
import com.jogamp.opengl.GL2ES2;
import com.jogamp.opengl.GL3;
import com.jogamp.opengl.GL3ES3;
import com.jogamp.opengl.GLAutoDrawable;
import com.jogamp.opengl.GLES3;
import com.jogamp.opengl.glu.GLU;
import com.jogamp.opengl.math.Matrix4;
import fpsscene.adapters.ApplyXY;
import fpsscene.adapters.BasicMovement;
import fpsscene.gl.primitives.ColoredTriangle;
import fpsscene.gl.primitives.Point2f;
import fpsscene.gl.primitives.Point3f;
public class AxisScene extends Scene implements ApplyXY , BasicMovement{
private static String vertexShaderString = String.join("\n",
"#version 130\n",
"",
"in vec3 vertex_position;",
"in vec3 vertex_colour;",
"uniform mat4 view, proj;",
"out vec3 colour;",
"void main() {",
" colour = vertex_colour;",
" gl_Position = proj * view * vec4 (vertex_position, 1.0);",
"}"
);
private static String fragmentShaderString = String.join("\n",
"#version 130\n",
"in vec3 colour;",
"out vec4 frag_colour;",
"void main() {",
" frag_colour = vec4 (colour, 1.0);",
"}"
);
private int shaderProgram;
int vertShader;
int fragShader;
int view_mat_location;
int proj_mat_location;
Matrix4 proj_mat;
Matrix4 view_mat;
float sens_rot;
Point3f eye_default;
Point3f up_default;
Point2f rot_default;
Point2f fov_default;
Point3f eye;
Point3f up;
Point2f rot;
Point2f fov;
int axisVao[] = new int[1];
private int axisLen;
float near; // clipping plane
float far; // clipping plane
static final int COLOR_IDX = 0;
static final int VERTICES_IDX = 1;
private static final float DROT_FULL = 360.0f;
private static final float DROT_QUART = DROT_FULL/4.0f;
private int width=1920;
private int height=1080;
public AxisScene() {
this.eye_default = new Point3f(0.0f, 0.0f, 0.0f);
this.fov_default = new Point2f(120.0f, 90.0f);
this.rot_default = new Point2f(0.0f, 0.0f);
this.up_default = new Point3f(0.0f, 1.0f, 0.0f);
this.eye = eye_default;
this.fov = fov_default;
this.rot = rot_default;
this.up = up_default;
near = 0.01f;
far = 1000000.0f;
sens_rot = 0.03f;
rot.set(138.869919f, 4.44001198f);
eye.set(-4.66594696f,3.20000124f,-5.04626369f);
// rot.set(167.31528f,0.0f);
updateProjMat();
updateViewMatrix();
}
#Override
public void init(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
if(!gl.isGL3core()){
Logger.getAnonymousLogger().log(Level.SEVERE, "GL3core not enabled");
}
vertShader = createShaderFromString(gl, AxisScene.vertexShaderString,GL2ES2.GL_VERTEX_SHADER);
fragShader = createShaderFromString(gl, AxisScene.fragmentShaderString,GL2ES2.GL_FRAGMENT_SHADER);
shaderProgram = gl.glCreateProgram();
gl.glAttachShader(shaderProgram, vertShader);
gl.glAttachShader(shaderProgram, fragShader);
gl.glLinkProgram(shaderProgram);
this.view_mat_location = gl.glGetUniformLocation(shaderProgram, "view");
this.proj_mat_location = gl.glGetUniformLocation(shaderProgram, "proj");
gl.glDeleteShader(vertShader);
gl.glDeleteShader(fragShader);
List<ColoredTriangle> triangles = new AxisTrianges(100).createAxisTriangles();
float[] vertices = ColoredTriangle.verticesToArray(triangles);
float[] colors = ColoredTriangle.colorsToArray(triangles);
FloatBuffer fbVertices = Buffers.newDirectFloatBuffer(vertices);
FloatBuffer fbColors = Buffers.newDirectFloatBuffer(colors);
int[] points_vbo = new int[1];
gl.glGenBuffers(1, points_vbo,0);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, points_vbo[0]);
gl.glBufferData(GL.GL_ARRAY_BUFFER, triangles.size() * 9 * Float.BYTES, fbVertices, GL.GL_STATIC_DRAW);
int[] colours_vbo = new int[1];
gl.glGenBuffers(1, colours_vbo,0);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, colours_vbo[0]);
gl.glBufferData(GL.GL_ARRAY_BUFFER, triangles.size() * 9 * Float.BYTES, fbColors, GL.GL_STATIC_DRAW);
gl.glGenVertexArrays(1, axisVao,0);
gl.glBindVertexArray(axisVao[0]);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, points_vbo[0]);
gl.glVertexAttribPointer(0, 3, GL.GL_FLOAT, false, 0, 0L);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, colours_vbo[0]);
gl.glVertexAttribPointer(1, 3, GL.GL_FLOAT, false, 0, 0L);
gl.glEnableVertexAttribArray(0);
gl.glEnableVertexAttribArray(1);
axisLen = triangles.size();
}
#Override
public void dispose(GLAutoDrawable drawable) {
System.out.println("cleanup, remember to release shaders");
GL3 gl = drawable.getGL().getGL3();
gl.glUseProgram(0);
gl.glDetachShader(shaderProgram, vertShader);
gl.glDeleteShader(vertShader);
gl.glDetachShader(shaderProgram, fragShader);
gl.glDeleteShader(fragShader);
gl.glDeleteProgram(shaderProgram);
}
#Override
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
this.width = width;
this.height = height;
this.updateProjMat();
GL3 gl = drawable.getGL().getGL3();
gl.glViewport((width-height)/2,0,height,height);
}
#Override
protected void glDisplay(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
gl.glClearColor(1, 1, 1, 1.0f);
gl.glClear(GL2ES2.GL_STENCIL_BUFFER_BIT | GL2ES2.GL_COLOR_BUFFER_BIT | GL2ES2.GL_DEPTH_BUFFER_BIT );
gl.glUseProgram(shaderProgram);
gl.glUniformMatrix4fv(this.view_mat_location, 1, false, this.view_mat.getMatrix(), 0);
gl.glUniformMatrix4fv(this.proj_mat_location, 1, true, this.proj_mat.getMatrix(), 0);
gl.glBindVertexArray(axisVao[0]);
gl.glDrawArrays(GL2ES2.GL_TRIANGLES, 0, 3 * axisLen); //Draw the vertices as triangle
gl.glBindVertexArray(0);
gl.glCullFace(GL2ES2.GL_NONE);
gl.glDisable(GL2ES2.GL_CULL_FACE);
}
private void updateViewMatrix() {
Matrix4 T = new Matrix4();
T.translate(-eye.getX(), -eye.getY(), -eye.getZ());
Matrix4 yRot = new Matrix4();
yRot.rotate((float)Math.toRadians(rot.getX()), 0.0f, 1.0f, 0.0f);
Matrix4 xRot = new Matrix4();
xRot.rotate((float)Math.toRadians(Math.cos(-Math.toRadians(rot.getX())) * rot.getY()), 1.0f, 0.0f, 0.0f);
Matrix4 zRot = new Matrix4();
zRot.rotate((float)Math.toRadians(Math.sin(Math.toRadians(rot.getX())) * rot.getY()), 0.0f, 0.0f, 1.0f);
Matrix4 R = yRot;
R.multMatrix(xRot);
R.multMatrix(zRot);
view_mat = T;
view_mat.multMatrix(R);
}
#Override
protected boolean glRender(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
return false;
}
private void updateProjMat() {
float aspect = (float) width / (float) height; // aspect ratio
float range = (float) Math.tan(Math.toRadians(fov.getX() * 0.5f));
float proj_mat[] = new float[16];
proj_mat[0] = 1.0f / (range * aspect);
proj_mat[1] = 0.0f;
proj_mat[2] = 0.0f;
proj_mat[3] = 0.0f;
proj_mat[4] = 0.0f;
proj_mat[5] = 1.0f / range;
proj_mat[6] = 0.0f;
proj_mat[7] = 0.0f;
proj_mat[8] = 0.0f;
proj_mat[9] = 0.0f;
proj_mat[10] = -(far + near) / (far - near);
proj_mat[11] = -(2.0f * far * near) / (far - near);
proj_mat[12] = 0.0f;
proj_mat[13] = 0.0f;
proj_mat[14] =-1.0f;
proj_mat[15] = 0.0f;
this.proj_mat = new Matrix4();
this.proj_mat.multMatrix(proj_mat);
}
#Override
public void applyXY(float x, float y) {
rot.setX(fmod(rot.getX() + x * sens_rot, DROT_FULL));
rot.setY(Math.min(Math.max(rot.getY() + y * sens_rot, -DROT_QUART), DROT_QUART));
updateViewMatrix();
}
private float fmod(float f, float m) {
return ((f%m) + m) %m;
}
#Override
public void translate(float x, float y, float z) {
float deltax = z * (float)Math.sin(Math.toRadians(rot.getX())) + x * (float)Math.cos(Math.toRadians(rot.getX()));
float deltaz = z * (float)Math.cos(Math.toRadians(rot.getX())) - x * (float)Math.sin(Math.toRadians(rot.getX()));
eye.set( eye.getX()+deltax, eye.getY()+y, eye.getZ()+deltaz );
updateViewMatrix();
System.out.println(eye + rot.toString());
}
private int createShaderFromString(GL3 gl, String shaderCode,int type) {
int shader = gl.glCreateShader(type);;
String[] vlines = new String[] { shaderCode };
int[] vlengths = new int[] { vlines[0].length() };
gl.glShaderSource(shader, vlines.length, vlines, vlengths, 0);
gl.glCompileShader(shader);
int[] compiled = new int[1];
gl.glGetShaderiv(shader, GL2ES2.GL_COMPILE_STATUS, compiled,0);
if(compiled[0]!=0){
System.out.println("Horray! vertex shader compiled");
} else {
int[] logLength = new int[1];
gl.glGetShaderiv(shader, GL2ES2.GL_INFO_LOG_LENGTH, logLength, 0);
byte[] log = new byte[logLength[0]];
gl.glGetShaderInfoLog(shader, logLength[0], (int[])null, 0, log, 0);
System.err.println("Error compiling the vertex shader: " + new String(log));
System.exit(1);
}
return shader;
}
}

If you're looking for a way to make a camera have "true FPS behavior" then I suggest you to take a look at this tutorial: https://beta.wikiversity.org/wiki/Computer_graphics_--2008-2009--_info.uvt.ro/Laboratory_7
I personally went with solution nÂș 2. From what iv'e seen of the solution you went with, this one seems a lot more elegant. Of course I modified it to include the mouse. If you want to try to do it yourself just try to follow whatever solution better adjusts to your code. If you want I can instead show you my code with the modifications done but only if you ask for it, since I don't want to spoil you the solution.

Related

LWJGL triangle not showing kotlin

I was trying to make triangle shader in kotlin with LWJGL, but for some reason it doesn't work and I do not get any error. It's really hard to debug it because even if I intentionally write something the wrong way in shader code, I still do not receive any error, so I can't even tell what is wrong with this code.
Triangle class code:
class Triangle {
private var vertexShaderCode: String = "" +
"attribute vec4 vPosition; \n" +
"void main() { \n" +
" gl_Position = vPosition; \n" +
"}"
private var fragmentShaderCode: String = "" +
"precision mediump float; \n" +
"uniform vec4 vColor; \n" +
"void main() { \n" +
" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); \n" +
"}"
private lateinit var vertexBuffer: FloatBuffer
private val COORDS_PER_VERTEX: Int = 3
private val triangleCoords = floatArrayOf(
0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f,
1.0f, 0.0f, 0.0f
)
private val color = floatArrayOf(1.0f, 0.0f, 0.0f, 1.0f)
private val mProgram: Int
var VBO: Int
var VAO: Int
init {
VBO = glGenBuffers()
VAO = glGenVertexArrays()
glBindVertexArray(0)
val vertexShader: Int = Engine().loadShader(GL_VERTEX_SHADER, vertexShaderCode)
val fragmentShader: Int = Engine().loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode)
mProgram = glCreateProgram()
glAttachShader(mProgram, vertexShader)
glAttachShader(mProgram, fragmentShader)
glLinkProgram(mProgram)
}
fun draw() {
glUseProgram(mProgram)
VBO = glGenBuffers()
VAO = glGenVertexArrays()
glBindVertexArray(VAO)
glBindBuffer(GL_ARRAY_BUFFER, VBO)
GL15.glBufferData(GL_ARRAY_BUFFER, triangleCoords, GL_STATIC_DRAW)
GL20.glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0)
glBindVertexArray(VAO)
glDrawArrays(GL_TRIANGLES, 0, 3)
glDisableVertexAttribArray(0)
glBindVertexArray(0)
}
}
Application code:
class Engine {
private var window: Long = 0
fun run() {
println("Hello LWJGL " + Version.getVersion() + "!")
init()
loop()
glfwFreeCallbacks(window)
glfwDestroyWindow(window)
glfwTerminate()
glfwSetErrorCallback(null)?.free()
}
private fun init() {
GLFWErrorCallback.createPrint(System.err).set()
if(!glfwInit())
throw IllegalStateException("Unable to initialize GLFW!")
glfwDefaultWindowHints()
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE)
glfwWindowHint(GLFW_RESIZABLE, GLFW_TRUE)
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3)
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0)
window = glfwCreateWindow(600, 800, "Hello World!", NULL, NULL)
if(window==NULL)
throw RuntimeException("Failed to create the GLFW window!")
stackPush().use { stack ->
val pWidth = stack.mallocInt(1) // int*
val pHeight = stack.mallocInt(1) // int*
// Get the window size passed to glfwCreateWindow
glfwGetWindowSize(window, pWidth, pHeight)
// Get the resolution of the primary monitor
val vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor())
// Center the window
glfwSetWindowPos(
window,
(vidmode!!.width() - pWidth[0]) / 2,
(vidmode.height() - pHeight[0]) / 2
)
}
glfwMakeContextCurrent(window)
glfwSwapInterval(1)
glfwShowWindow(window)
}
private fun loop(){
GL.createCapabilities()
var triangle = Triangle1()
glClearColor(1.0f, 1.0f, 1.0f, 1.0f)
while(!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT or GL_DEPTH_BUFFER_BIT)
triangle.draw()
glfwSwapBuffers(window)
glfwPollEvents()
}
}
fun loadShader(type: Int, shaderCode: String): Int {
val shader = glCreateShader(type)
glShaderSource(shader, shaderCode)
glCompileShader(shader)
return shader
}
}
Ok, so there were MANY things wrong with this code. Comments under my question really helped so thank you! If someone has similar problem, I post fixed version of this code.
class Triangle {
private var vertexShaderCode: String = "" +
"#version 330\n" +
"attribute vec4 vPosition; \n" +
"void main() { \n" +
" gl_Position = vPosition; \n" +
"}"
private var fragmentShaderCode: String = "" +
"#version 330 \n" +
"precision mediump float; \n" +
"uniform vec4 vColor; \n" +
"void main() { \n" +
" gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); \n" +
"}"
private val COORDS_PER_VERTEX: Int = 3
private val triangleCoords = floatArrayOf(
+0.0f, +0.8f, // Top coordinate
-0.8f, -0.8f, // Bottom-left coordinate
+0.8f, -0.8f // Bottom-right coordinate
)
private var coordsBuffer: FloatBuffer = BufferUtils.createFloatBuffer(triangleCoords.size)
private val color = floatArrayOf(1.0f, 0.0f, 0.0f, 1.0f)
private val mProgram: Int
var VBO: Int
var VAO: Int
var vertexShader: Int
var fragmentShader: Int
init {
coordsBuffer.put(triangleCoords).flip()
VAO = glGenVertexArrays()
glBindVertexArray(VAO)
VBO = glGenBuffers()
glBindBuffer(GL_ARRAY_BUFFER, VBO)
GL15.glBufferData(GL_ARRAY_BUFFER, coordsBuffer, GL_STATIC_DRAW)
GL20.glVertexAttribPointer(0, 2, GL_FLOAT, false, 0, 0)
vertexShader = Engine().loadShader(GL_VERTEX_SHADER, vertexShaderCode)
fragmentShader = Engine().loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode)
mProgram = glCreateProgram()
glAttachShader(mProgram, vertexShader)
glAttachShader(mProgram, fragmentShader)
glLinkProgram(mProgram)
if (glGetProgrami(mProgram, GL_LINK_STATUS) == GL_FALSE)
throw RuntimeException("Unable to link shader program:")
}
fun draw() {
glUseProgram(mProgram)
glBindVertexArray(VAO)
glEnableVertexAttribArray(0)
glDrawArrays(GL_TRIANGLES, 0, 3)
glDisableVertexAttribArray(0)
glBindVertexArray(0)
glUseProgram(0)
}

How to color individual pixels with OpenGL ES 2.0?

Is there possible to change the color of an individual pixel with OpenGL ES 2.0? Right now, I have found that I can manage that using a vertex. I've used this method to draw it:
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 1);
The size of the point was set to minimum in order to be a single pixel painted.
All good, until I've needed to draw 3 to 4 millions of them! It takes 5-6 seconds to initialize only one frame. This is time-inefficient as long as the pixels will be updated constantly. The update/ refresh would be preferable to be as close as possible to 60 fps.
How can I paint them in a more efficient way?
Note: It's a must to paint them individually only!
My attempt is here (for a screen of 1440x2560 px):
package com.example.ctelescu.opengl_pixel_draw;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class PixelDrawRenderer implements GLSurfaceView.Renderer {
private float[] mModelMatrix = new float[16];
private float[] mViewMatrix = new float[16];
private float[] mProjectionMatrix = new float[16];
private float[] mMVPMatrix = new float[16];
private final FloatBuffer mVerticesBuffer;
private int mMVPMatrixHandle;
private int mPositionHandle;
private int mColorHandle;
private final int mBytesPerFloat = 4;
private final int mStrideBytes = 7 * mBytesPerFloat;
private final int mPositionOffset = 0;
private final int mPositionDataSize = 3;
private final int mColorOffset = 3;
private final int mColorDataSize = 4;
public PixelDrawRenderer() {
// Define the vertices.
// final float[] vertices = {
// // X, Y, Z,
// // R, G, B, A
// -1f, 1f, 0.0f,
// 1.0f, 0.0f, 0.0f, 1.0f,
//
// -0.9f, 1.2f, 0.0f,
// 0.0f, 0.0f, 1.0f, 1.0f,
//
// -0.88f, 1.2f, 0.0f,
// 0.0f, 1.0f, 0.0f, 1.0f,
//
// -0.87f, 1.2f, 0.0f,
// 0.0f, 1.0f, 0.0f, 1.0f,
//
// -0.86f, 1.2f, 0.0f,
// 0.0f, 1.0f, 0.0f, 1.0f,
//
// -0.85f, 1.2f, 0.0f,
// 0.0f, 1.0f, 0.0f, 1.0f};
// Initialize the buffers.
mVerticesBuffer = ByteBuffer.allocateDirect(22579200 * mBytesPerFloat)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
// mVerticesBuffer.put(vertices);
}
#Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
// Set the background clear color to gray.
GLES20.glClearColor(0.5f, 0.5f, 0.5f, 0.5f);
// Position the eye behind the origin.
final float eyeX = 0.0f;
final float eyeY = 0.0f;
final float eyeZ = 1.5f;
// We are looking toward the distance
final float lookX = 0.0f;
final float lookY = 0.0f;
final float lookZ = -5.0f;
// Set our up vector. This is where our head would be pointing were we holding the camera.
final float upX = 0.0f;
final float upY = 1.0f;
final float upZ = 0.0f;
// Set the view matrix. This matrix can be said to represent the camera position.
// NOTE: In OpenGL 1, a ModelView matrix is used, which is a combination of a model and
// view matrix. In OpenGL 2, we can keep track of these matrices separately if we choose.
Matrix.setLookAtM(mViewMatrix, 0, eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ);
final String vertexShader =
"uniform mat4 u_MVPMatrix; \n" // A constant representing the combined model/view/projection matrix.
+ "attribute vec4 a_Position; \n" // Per-vertex position information we will pass in.
+ "attribute vec4 a_Color; \n" // Per-vertex color information we will pass in.
+ "varying vec4 v_Color; \n" // This will be passed into the fragment shader.
+ "void main() \n" // The entry point for our vertex shader.
+ "{ \n"
+ " v_Color = a_Color; \n" // Pass the color through to the fragment shader.
// It will be interpolated across the vertex.
+ " gl_Position = u_MVPMatrix \n" // gl_Position is a special variable used to store the final position.
+ " * a_Position; \n" // Multiply the vertex by the matrix to get the final point in
+ " gl_PointSize = 0.1; \n"
+ "} \n"; // normalized screen coordinates.
final String fragmentShader =
"#ifdef GL_FRAGMENT_PRECISION_HIGH \n"
+ "precision highp float; \n"
+ "#else \n"
+ "precision mediump float; \n" // Set the default precision to medium. We don't need as high of a
// precision in the fragment shader.
+ "#endif \n"
+ "varying vec4 v_Color; \n" // This is the color from the vertex shader interpolated across the
// vertex per fragment.
+ "void main() \n" // The entry point for our fragment shader.
+ "{ \n"
+ " gl_FragColor = v_Color; \n" // Pass the color directly through the pipeline.
+ "} \n";
// Load in the vertex shader.
int vertexShaderHandle = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
if (vertexShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(vertexShaderHandle, vertexShader);
// Compile the shader.
GLES20.glCompileShader(vertexShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(vertexShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(vertexShaderHandle);
vertexShaderHandle = 0;
}
}
if (vertexShaderHandle == 0) {
throw new RuntimeException("Error creating vertex shader.");
}
// Load in the fragment shader shader.
int fragmentShaderHandle = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
if (fragmentShaderHandle != 0) {
// Pass in the shader source.
GLES20.glShaderSource(fragmentShaderHandle, fragmentShader);
// Compile the shader.
GLES20.glCompileShader(fragmentShaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(fragmentShaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0) {
GLES20.glDeleteShader(fragmentShaderHandle);
fragmentShaderHandle = 0;
}
}
if (fragmentShaderHandle == 0) {
throw new RuntimeException("Error creating fragment shader.");
}
// Create a program object and store the handle to it.
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0) {
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
GLES20.glBindAttribLocation(programHandle, 0, "a_Position");
GLES20.glBindAttribLocation(programHandle, 1, "a_Color");
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);
// If the link failed, delete the program.
if (linkStatus[0] == 0) {
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0) {
throw new RuntimeException("Error creating program.");
}
// Set program handles. These will later be used to pass in values to the program.
mMVPMatrixHandle = GLES20.glGetUniformLocation(programHandle, "u_MVPMatrix");
mPositionHandle = GLES20.glGetAttribLocation(programHandle, "a_Position");
mColorHandle = GLES20.glGetAttribLocation(programHandle, "a_Color");
// Tell OpenGL to use this program when rendering.
GLES20.glUseProgram(programHandle);
}
#Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {
// Set the OpenGL viewport to the same size as the surface.
GLES20.glViewport(0, 0, width, height);
// Create a new perspective projection matrix. The height will stay the same
// while the width will vary as per aspect ratio.
final float ratio = (float) width / height;
final float left = -ratio;
final float right = ratio;
final float bottom = -1.0f;
final float top = 1.0f;
final float near = 1.0f;
final float far = 10.0f;
Matrix.frustumM(mProjectionMatrix, 0, left, right, bottom, top, near, far);
float[] vertices = new float[22579200];
int counter = 0;
for (float i = -width / 2; i < width / 2; i++) {
for (float j = height / 2; j > -height / 2; j--) {
// Initialize the buffers.
vertices[counter++] = 2f * i * (1f / width); //X
vertices[counter++] = 2f * j * (1.5f / height); //Y
vertices[counter++] = 0; //Z
vertices[counter++] = 1f; //blue
vertices[counter++] = 1f; //green
vertices[counter++] = 0f; //blue
vertices[counter++] = 1f; //alpha
}
}
mVerticesBuffer.put(vertices);
mVerticesBuffer.clear();
}
#Override
public void onDrawFrame(GL10 glUnused) {
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
// Draw the vertices facing straight on.
Matrix.setIdentityM(mModelMatrix, 0);
drawVertices(mVerticesBuffer);
}
private void drawVertices(final FloatBuffer aVertexBuffer) {
// Pass in the position information
aVertexBuffer.position(mPositionOffset);
GLES20.glVertexAttribPointer(mPositionHandle, mPositionDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aVertexBuffer);
GLES20.glEnableVertexAttribArray(mPositionHandle);
// Pass in the color information
aVertexBuffer.position(mColorOffset);
GLES20.glVertexAttribPointer(mColorHandle, mColorDataSize, GLES20.GL_FLOAT, false,
mStrideBytes, aVertexBuffer);
GLES20.glEnableVertexAttribArray(mColorHandle);
// This multiplies the view matrix by the model matrix, and stores the result in the MVP matrix
// (which currently contains model * view).
Matrix.multiplyMM(mMVPMatrix, 0, mViewMatrix, 0, mModelMatrix, 0);
// This multiplies the modelview matrix by the projection matrix, and stores the result in the MVP matrix
// (which now contains model * view * projection).
Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glDrawArrays(GLES20.GL_POINTS, 0, 3225600);
}
}

Slow image processing of images from filesystem as compared to the webcam

I was able to follow the csharp-sample-apps from the github repo for Affectiva. I ran the demo using my webcam and the processing and performance was great.I am not getting the same processing speed from the PhotoDetector when I try to run it over images in filesystem. Any help or improvement would be appreciated.
namespace Logical.EmocaoFace
{
public class AnaliseEmocao : Affdex.ImageListener, Affdex.ProcessStatusListener
{
private Bitmap img { get; set; }
private Dictionary<int, Affdex.Face> faces { get; set; }
private Affdex.Detector detector { get; set; }
private ReaderWriterLock rwLock { get; set; }
public void processaEmocaoImagem()
{
for (int i = 0; i < resultado.count; i++){
RetornaEmocaoFace();
if (faceAffdex != null)
{
}
}
}
public void RetornaEmocaoFace(string caminhoImagem)
{
Affdex.Detector detector = new Affdex.PhotoDetector(1, Affdex.FaceDetectorMode.LARGE_FACES);
detector.setImageListener(this);
detector.setProcessStatusListener(this);
if (detector != null)
{
//ProcessVideo videoForm = new ProcessVideo(detector);
detector.setClassifierPath(#"D:\Desenvolvimento\Componentes\Afectiva\data");
detector.setDetectAllEmotions(true);
detector.setDetectAllExpressions(false);
detector.setDetectAllEmojis(false);
detector.setDetectAllAppearances(false);
detector.start();
((Affdex.PhotoDetector)detector).process(LoadFrameFromFile(caminhoImagem));
detector.stop();
}
}
static Affdex.Frame LoadFrameFromFile(string fileName)
{
Bitmap bitmap = new Bitmap(fileName);
// Lock the bitmap's bits.
Rectangle rect = new Rectangle(0, 0, bitmap.Width, bitmap.Height);
BitmapData bmpData = bitmap.LockBits(rect, ImageLockMode.ReadWrite, bitmap.PixelFormat);
// Get the address of the first line.
IntPtr ptr = bmpData.Scan0;
// Declare an array to hold the bytes of the bitmap.
int numBytes = bitmap.Width * bitmap.Height * 3;
byte[] rgbValues = new byte[numBytes];
int data_x = 0;
int ptr_x = 0;
int row_bytes = bitmap.Width * 3;
// The bitmap requires bitmap data to be byte aligned.
// http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size
for (int y = 0; y < bitmap.Height; y++)
{
Marshal.Copy(ptr + ptr_x, rgbValues, data_x, row_bytes);//(pixels, data_x, ptr + ptr_x, row_bytes);
data_x += row_bytes;
ptr_x += bmpData.Stride;
}
bitmap.UnlockBits(bmpData);
//Affdex.Frame retorno = new Affdex.Frame(bitmap.Width, bitmap.Height, rgbValues, Affdex.Frame.COLOR_FORMAT.BGR);
//bitmap.Dispose();
//return retorno;
return new Affdex.Frame(bitmap.Width, bitmap.Height, rgbValues, Affdex.Frame.COLOR_FORMAT.BGR);
}
public void onImageCapture(Affdex.Frame frame)
{
frame.Dispose();
}
public void onImageResults(Dictionary<int, Affdex.Face> faces, Affdex.Frame frame)
{
byte[] pixels = frame.getBGRByteArray();
this.img = new Bitmap(frame.getWidth(), frame.getHeight(), PixelFormat.Format24bppRgb);
var bounds = new Rectangle(0, 0, frame.getWidth(), frame.getHeight());
BitmapData bmpData = img.LockBits(bounds, ImageLockMode.WriteOnly, img.PixelFormat);
IntPtr ptr = bmpData.Scan0;
int data_x = 0;
int ptr_x = 0;
int row_bytes = frame.getWidth() * 3;
// The bitmap requires bitmap data to be byte aligned.
// http://stackoverflow.com/questions/20743134/converting-opencv-image-to-gdi-bitmap-doesnt-work-depends-on-image-size
for (int y = 0; y < frame.getHeight(); y++)
{
Marshal.Copy(pixels, data_x, ptr + ptr_x, row_bytes);
data_x += row_bytes;
ptr_x += bmpData.Stride;
}
img.UnlockBits(bmpData);
this.faces = faces;
frame.Dispose();
}
public void onProcessingException(Affdex.AffdexException A_0)
{
throw new NotImplementedException("Encountered an exception while processing " + A_0.ToString());
}
public void onProcessingFinished()
{
string idArquivo = CodEspaco + "," + System.Guid.NewGuid().ToString();
for(int i = 0; i < faces.Count; i++)
{
}
}
}
public static class GraphicsExtensions
{
public static void DrawCircle(this Graphics g, Pen pen,
float centerX, float centerY, float radius)
{
g.DrawEllipse(pen, centerX - radius, centerY - radius,
radius + radius, radius + radius);
}
}
}
Found the answer to my own question:
Using PhotoDetector is not ideal in this case since it is expensive to use the Face Detector configuration on subsequent frame calls.
The best option to improve the performance would be to use an instance of the FrameDetector Class.
Here is a getting started guide to analyze-frames.

libgdx, animatios dont scale on android

I was testing my app on android,
my app have 1 background image with 2 animations using TextureAtlas
it works fine on desctop, the sprite and the animations all scale but when I teste it on android, the sprite resize corretly but the animations dont resize at all
constantes.VIRTUAL_WIDTH=1920;
constantes.VIRTUAL_HEIGHT=1080;
.....
public static void show() {
camera = new OrthographicCamera(constantes.VIRTUAL_WIDTH, constantes.VIRTUAL_HEIGHT); //Aspect Ratio Maintenance
batch = new SpriteBatch();
texturafundo = new Texture("cenarios/penhasco.jpg");
spriteFundo = new Sprite(texturafundo);
spriteFundo.setSize(Gdx.graphics.getWidth(),Gdx.graphics.getHeight());
// animations
textureAtlas = new TextureAtlas(Gdx.files.internal("anima/rio.txt"));
animacao = new Animation(1/10f, textureAtlas.getRegions());
textureAtlas2 = new TextureAtlas(Gdx.files.internal("anima/portal.txt"));
animacao2 = new Animation(1/10f, textureAtlas2.getRegions());
}
public void render(float delta) {
// update camera
camera.update();
// set viewport
Gdx.gl.glViewport((int) viewport.x, (int) viewport.y,
(int) viewport.width, (int) viewport.height);
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
elapsedTime+=delta;
batch.begin();
spriteFundo.draw(batch);
//sesenha animacao 1
batch.draw(animacao.getKeyFrame(elapsedTime, true), 0, 0);
batch.draw(animacao2.getKeyFrame(elapsedTime, true), 788, 249);
batch.end();
}
public void resize(int width, int height) {
float aspectRatio = (float)width/(float)height;
float scale = 1f;
Vector2 crop = new Vector2(0f, 0f);
if(aspectRatio > constantes.ASPECT_RATIO) {
scale = (float) height / (float) constantes.VIRTUAL_HEIGHT;
crop.x = (width - constantes.VIRTUAL_WIDTH * scale) / 2f;
} else if(aspectRatio < constantes.ASPECT_RATIO) {
scale = (float) width / (float) constantes.VIRTUAL_WIDTH;
crop.y = (height - constantes.VIRTUAL_HEIGHT * scale) / 2f;
} else {
scale = (float) width / (float) constantes.VIRTUAL_WIDTH;
}
float w = (float) constantes.VIRTUAL_WIDTH * scale;
float h = (float) constantes.VIRTUAL_HEIGHT * scale;
viewport = new Rectangle(crop.x, crop.y, w, h);
}

Why does the origin of the second ball not attached to the first ball?

I was reading the book The Nature of Code, where Exercise 3.12 asked me to implement a double pendulum.
class Pendulum {
PVector origin, location;
float r; // arm length
float angle;
float aVelocity;
float aAcceleration;
float damping;
Pendulum(PVector origin_, float r_) {
origin = origin_.get();
location = new PVector();
r = r_;
angle = PI/3;
aVelocity = 0;
aAcceleration = 0;
damping = 0.995;
}
void go() {
update();
display();
}
void update() {
float gravity = 0.4;
aAcceleration = (-1 * gravity / r) * sin(angle);
aVelocity += aAcceleration;
angle += aVelocity;
aVelocity *= damping;
location.set(r*sin(angle), r*cos(angle));
location.add(origin);
}
void display() {
stroke(0);
line(origin.x, origin.y, location.x, location.y);
fill(150);
ellipse(location.x, location.y, 20, 20);
}
}
Pendulum p, p2;
void setup() {
size(640, 360);
p = new Pendulum(new PVector(width/2, 0), 150);
p2 = new Pendulum(p.location, 100);
}
void draw() {
background(255);
p.go();
p2.go();
}
So in the setup function, I set the origin of p2 to be the location of p1. However, the origin of p2 appeared on the position (0, 0). How should I fix this? I have tried to set a temporary variable for p2 but that's not convenient.
I'm not exactly sure what you are trying to do,
but in the constructor:
Pendulum(PVector origin_, float r_) {
origin = origin_.get();
location = new PVector(); <-- here you set the location to a new vector
...
}
And you directly use the location in here:
void setup() {
size(640, 360);
p = new Pendulum(new PVector(width/2, 0), 150);
p2 = new Pendulum(p.location, 100); <-- here
}
which is the new location created. i suppose that's your problem you should be looking into.