libgdx, animatios dont scale on android - camera

I was testing my app on android,
my app have 1 background image with 2 animations using TextureAtlas
it works fine on desctop, the sprite and the animations all scale but when I teste it on android, the sprite resize corretly but the animations dont resize at all
constantes.VIRTUAL_WIDTH=1920;
constantes.VIRTUAL_HEIGHT=1080;
.....
public static void show() {
camera = new OrthographicCamera(constantes.VIRTUAL_WIDTH, constantes.VIRTUAL_HEIGHT); //Aspect Ratio Maintenance
batch = new SpriteBatch();
texturafundo = new Texture("cenarios/penhasco.jpg");
spriteFundo = new Sprite(texturafundo);
spriteFundo.setSize(Gdx.graphics.getWidth(),Gdx.graphics.getHeight());
// animations
textureAtlas = new TextureAtlas(Gdx.files.internal("anima/rio.txt"));
animacao = new Animation(1/10f, textureAtlas.getRegions());
textureAtlas2 = new TextureAtlas(Gdx.files.internal("anima/portal.txt"));
animacao2 = new Animation(1/10f, textureAtlas2.getRegions());
}
public void render(float delta) {
// update camera
camera.update();
// set viewport
Gdx.gl.glViewport((int) viewport.x, (int) viewport.y,
(int) viewport.width, (int) viewport.height);
Gdx.gl.glClearColor(0, 0, 0, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
elapsedTime+=delta;
batch.begin();
spriteFundo.draw(batch);
//sesenha animacao 1
batch.draw(animacao.getKeyFrame(elapsedTime, true), 0, 0);
batch.draw(animacao2.getKeyFrame(elapsedTime, true), 788, 249);
batch.end();
}
public void resize(int width, int height) {
float aspectRatio = (float)width/(float)height;
float scale = 1f;
Vector2 crop = new Vector2(0f, 0f);
if(aspectRatio > constantes.ASPECT_RATIO) {
scale = (float) height / (float) constantes.VIRTUAL_HEIGHT;
crop.x = (width - constantes.VIRTUAL_WIDTH * scale) / 2f;
} else if(aspectRatio < constantes.ASPECT_RATIO) {
scale = (float) width / (float) constantes.VIRTUAL_WIDTH;
crop.y = (height - constantes.VIRTUAL_HEIGHT * scale) / 2f;
} else {
scale = (float) width / (float) constantes.VIRTUAL_WIDTH;
}
float w = (float) constantes.VIRTUAL_WIDTH * scale;
float h = (float) constantes.VIRTUAL_HEIGHT * scale;
viewport = new Rectangle(crop.x, crop.y, w, h);
}

Related

Horizontal scroll in appium

How can i horizontally scroll in the appium
like for Vertical scroll we do
driver.findElementByAndroidUIAutomator("new UiScrollable(new UiSelector()).scrollIntoView("")");
is there like that???
You can set the UiSelector to scrollable and set direction of scrolling
driver().findElementByAndroidUIAutomator("new UiScrollable(new UiSelector().scrollable(true)."
+ "resourceId(\"<id of scrollable control>\"))"
+ ".setAsHorizontalList().scrollIntoView(new UiSelector().textContains(\"<text to search for>\"))");
Link gives details about UiScrollable
You can Use this way this will work horizontal and also by changing scroll start and scroll end you can use same code for vertical scroll also.
public void scrollHorizontal(double value1,double value2) {
Dimension dimension =driver.manage().window().getSize();
Double scrollheight = dimension.getHeight()*value1;
int scrollStart = scrollheight.intValue();
Double scrollheightEnd = dimension.getHeight()*value2;
int scrollEnd = scrollheightEnd.intValue();
new TouchAction((PerformsTouchActions)driver)
.press(PointOption.point(scrollStart,0))
.waitAction(WaitOptions.waitOptions(Duration.ofSeconds(2)))
.moveTo(PointOption.point(scrollEnd , 0))
.release()
.perform(); }
Use below method for horizontally scrolling:
public static void swipeHorizontal(AppiumDriver driver, double startPercentage, double finalPercentage, double anchorPercentage, int duration) throws Exception {
Dimension size = driver.manage().window().getSize();
int anchor = (int) (size.height * anchorPercentage);
int startPoint = (int) (size.width * startPercentage);
int endPoint = (int) (size.width * finalPercentage);
new TouchAction(driver).press(startPoint, anchor).waitAction(Duration.ofMillis(duration)).moveTo(endPoint, anchor).release().perform();
}
Call above method by:
swipeHorizontal((AppiumDriver) driver,0.9,0.01,0.5,2000);
This applies in all directions:
enum:
public enum DIRECTION {
DOWN, UP, LEFT, RIGHT;
}
actual code:
public static void swipe(MobileDriver driver, DIRECTION direction, long duration) {
Dimension size = driver.manage().window().getSize();
int startX = 0;
int endX = 0;
int startY = 0;
int endY = 0;
switch (direction) {
case RIGHT:
startY = (int) (size.height / 2);
startX = (int) (size.width * 0.90);
endX = (int) (size.width * 0.05);
new TouchAction(driver)
.press(startX, startY)
.waitAction(Duration.ofMillis(duration))
.moveTo(endX, startY)
.release()
.perform();
break;
case LEFT:
startY = (int) (size.height / 2);
startX = (int) (size.width * 0.05);
endX = (int) (size.width * 0.90);
new TouchAction(driver)
.press(startX, startY)
.waitAction(Duration.ofMillis(duration))
.moveTo(endX, startY)
.release()
.perform();
break;
case UP:
endY = (int) (size.height * 0.70);
startY = (int) (size.height * 0.30);
startX = (size.width / 2);
new TouchAction(driver)
.press(startX, startY)
.waitAction(Duration.ofMillis(duration))
.moveTo(endX, startY)
.release()
.perform();
break;
case DOWN:
startY = (int) (size.height * 0.70);
endY = (int) (size.height * 0.30);
startX = (size.width / 2);
new TouchAction(driver)
.press(startX, startY)
.waitAction(Duration.ofMillis(duration))
.moveTo(startX, endY)
.release()
.perform();
break;
}
}
usage:
swipe(driver, Util.DIRECTION.RIGHT);
Hope this helps,

First Person Camera using JOGL GL3 core

I am trying to make a basic first person camera scene using JOGL GL3 core and programmed vertex shader, but it doesn't look like the vertex array object is been correctly projected.
I believe the keyboard and mouse functions are working correctly and that the problem lies with shader program or vertex shader.
The AxisScene is where the bulk of the action happens, but the entire gradle project can be found here
I followed the projection theory from here
What code is wrong/missing to create true FPS behaviour?
package fpsscene.fpsscene;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.jogamp.common.nio.Buffers;
import com.jogamp.opengl.GL;
import com.jogamp.opengl.GL2;
import com.jogamp.opengl.GL2ES2;
import com.jogamp.opengl.GL3;
import com.jogamp.opengl.GL3ES3;
import com.jogamp.opengl.GLAutoDrawable;
import com.jogamp.opengl.GLES3;
import com.jogamp.opengl.glu.GLU;
import com.jogamp.opengl.math.Matrix4;
import fpsscene.adapters.ApplyXY;
import fpsscene.adapters.BasicMovement;
import fpsscene.gl.primitives.ColoredTriangle;
import fpsscene.gl.primitives.Point2f;
import fpsscene.gl.primitives.Point3f;
public class AxisScene extends Scene implements ApplyXY , BasicMovement{
private static String vertexShaderString = String.join("\n",
"#version 130\n",
"",
"in vec3 vertex_position;",
"in vec3 vertex_colour;",
"uniform mat4 view, proj;",
"out vec3 colour;",
"void main() {",
" colour = vertex_colour;",
" gl_Position = proj * view * vec4 (vertex_position, 1.0);",
"}"
);
private static String fragmentShaderString = String.join("\n",
"#version 130\n",
"in vec3 colour;",
"out vec4 frag_colour;",
"void main() {",
" frag_colour = vec4 (colour, 1.0);",
"}"
);
private int shaderProgram;
int vertShader;
int fragShader;
int view_mat_location;
int proj_mat_location;
Matrix4 proj_mat;
Matrix4 view_mat;
float sens_rot;
Point3f eye_default;
Point3f up_default;
Point2f rot_default;
Point2f fov_default;
Point3f eye;
Point3f up;
Point2f rot;
Point2f fov;
int axisVao[] = new int[1];
private int axisLen;
float near; // clipping plane
float far; // clipping plane
static final int COLOR_IDX = 0;
static final int VERTICES_IDX = 1;
private static final float DROT_FULL = 360.0f;
private static final float DROT_QUART = DROT_FULL/4.0f;
private int width=1920;
private int height=1080;
public AxisScene() {
this.eye_default = new Point3f(0.0f, 0.0f, 0.0f);
this.fov_default = new Point2f(120.0f, 90.0f);
this.rot_default = new Point2f(0.0f, 0.0f);
this.up_default = new Point3f(0.0f, 1.0f, 0.0f);
this.eye = eye_default;
this.fov = fov_default;
this.rot = rot_default;
this.up = up_default;
near = 0.01f;
far = 1000000.0f;
sens_rot = 0.03f;
rot.set(138.869919f, 4.44001198f);
eye.set(-4.66594696f,3.20000124f,-5.04626369f);
// rot.set(167.31528f,0.0f);
updateProjMat();
updateViewMatrix();
}
#Override
public void init(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
if(!gl.isGL3core()){
Logger.getAnonymousLogger().log(Level.SEVERE, "GL3core not enabled");
}
vertShader = createShaderFromString(gl, AxisScene.vertexShaderString,GL2ES2.GL_VERTEX_SHADER);
fragShader = createShaderFromString(gl, AxisScene.fragmentShaderString,GL2ES2.GL_FRAGMENT_SHADER);
shaderProgram = gl.glCreateProgram();
gl.glAttachShader(shaderProgram, vertShader);
gl.glAttachShader(shaderProgram, fragShader);
gl.glLinkProgram(shaderProgram);
this.view_mat_location = gl.glGetUniformLocation(shaderProgram, "view");
this.proj_mat_location = gl.glGetUniformLocation(shaderProgram, "proj");
gl.glDeleteShader(vertShader);
gl.glDeleteShader(fragShader);
List<ColoredTriangle> triangles = new AxisTrianges(100).createAxisTriangles();
float[] vertices = ColoredTriangle.verticesToArray(triangles);
float[] colors = ColoredTriangle.colorsToArray(triangles);
FloatBuffer fbVertices = Buffers.newDirectFloatBuffer(vertices);
FloatBuffer fbColors = Buffers.newDirectFloatBuffer(colors);
int[] points_vbo = new int[1];
gl.glGenBuffers(1, points_vbo,0);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, points_vbo[0]);
gl.glBufferData(GL.GL_ARRAY_BUFFER, triangles.size() * 9 * Float.BYTES, fbVertices, GL.GL_STATIC_DRAW);
int[] colours_vbo = new int[1];
gl.glGenBuffers(1, colours_vbo,0);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, colours_vbo[0]);
gl.glBufferData(GL.GL_ARRAY_BUFFER, triangles.size() * 9 * Float.BYTES, fbColors, GL.GL_STATIC_DRAW);
gl.glGenVertexArrays(1, axisVao,0);
gl.glBindVertexArray(axisVao[0]);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, points_vbo[0]);
gl.glVertexAttribPointer(0, 3, GL.GL_FLOAT, false, 0, 0L);
gl.glBindBuffer(GL.GL_ARRAY_BUFFER, colours_vbo[0]);
gl.glVertexAttribPointer(1, 3, GL.GL_FLOAT, false, 0, 0L);
gl.glEnableVertexAttribArray(0);
gl.glEnableVertexAttribArray(1);
axisLen = triangles.size();
}
#Override
public void dispose(GLAutoDrawable drawable) {
System.out.println("cleanup, remember to release shaders");
GL3 gl = drawable.getGL().getGL3();
gl.glUseProgram(0);
gl.glDetachShader(shaderProgram, vertShader);
gl.glDeleteShader(vertShader);
gl.glDetachShader(shaderProgram, fragShader);
gl.glDeleteShader(fragShader);
gl.glDeleteProgram(shaderProgram);
}
#Override
public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
this.width = width;
this.height = height;
this.updateProjMat();
GL3 gl = drawable.getGL().getGL3();
gl.glViewport((width-height)/2,0,height,height);
}
#Override
protected void glDisplay(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
gl.glClearColor(1, 1, 1, 1.0f);
gl.glClear(GL2ES2.GL_STENCIL_BUFFER_BIT | GL2ES2.GL_COLOR_BUFFER_BIT | GL2ES2.GL_DEPTH_BUFFER_BIT );
gl.glUseProgram(shaderProgram);
gl.glUniformMatrix4fv(this.view_mat_location, 1, false, this.view_mat.getMatrix(), 0);
gl.glUniformMatrix4fv(this.proj_mat_location, 1, true, this.proj_mat.getMatrix(), 0);
gl.glBindVertexArray(axisVao[0]);
gl.glDrawArrays(GL2ES2.GL_TRIANGLES, 0, 3 * axisLen); //Draw the vertices as triangle
gl.glBindVertexArray(0);
gl.glCullFace(GL2ES2.GL_NONE);
gl.glDisable(GL2ES2.GL_CULL_FACE);
}
private void updateViewMatrix() {
Matrix4 T = new Matrix4();
T.translate(-eye.getX(), -eye.getY(), -eye.getZ());
Matrix4 yRot = new Matrix4();
yRot.rotate((float)Math.toRadians(rot.getX()), 0.0f, 1.0f, 0.0f);
Matrix4 xRot = new Matrix4();
xRot.rotate((float)Math.toRadians(Math.cos(-Math.toRadians(rot.getX())) * rot.getY()), 1.0f, 0.0f, 0.0f);
Matrix4 zRot = new Matrix4();
zRot.rotate((float)Math.toRadians(Math.sin(Math.toRadians(rot.getX())) * rot.getY()), 0.0f, 0.0f, 1.0f);
Matrix4 R = yRot;
R.multMatrix(xRot);
R.multMatrix(zRot);
view_mat = T;
view_mat.multMatrix(R);
}
#Override
protected boolean glRender(GLAutoDrawable drawable) {
GL3 gl = drawable.getGL().getGL3();
return false;
}
private void updateProjMat() {
float aspect = (float) width / (float) height; // aspect ratio
float range = (float) Math.tan(Math.toRadians(fov.getX() * 0.5f));
float proj_mat[] = new float[16];
proj_mat[0] = 1.0f / (range * aspect);
proj_mat[1] = 0.0f;
proj_mat[2] = 0.0f;
proj_mat[3] = 0.0f;
proj_mat[4] = 0.0f;
proj_mat[5] = 1.0f / range;
proj_mat[6] = 0.0f;
proj_mat[7] = 0.0f;
proj_mat[8] = 0.0f;
proj_mat[9] = 0.0f;
proj_mat[10] = -(far + near) / (far - near);
proj_mat[11] = -(2.0f * far * near) / (far - near);
proj_mat[12] = 0.0f;
proj_mat[13] = 0.0f;
proj_mat[14] =-1.0f;
proj_mat[15] = 0.0f;
this.proj_mat = new Matrix4();
this.proj_mat.multMatrix(proj_mat);
}
#Override
public void applyXY(float x, float y) {
rot.setX(fmod(rot.getX() + x * sens_rot, DROT_FULL));
rot.setY(Math.min(Math.max(rot.getY() + y * sens_rot, -DROT_QUART), DROT_QUART));
updateViewMatrix();
}
private float fmod(float f, float m) {
return ((f%m) + m) %m;
}
#Override
public void translate(float x, float y, float z) {
float deltax = z * (float)Math.sin(Math.toRadians(rot.getX())) + x * (float)Math.cos(Math.toRadians(rot.getX()));
float deltaz = z * (float)Math.cos(Math.toRadians(rot.getX())) - x * (float)Math.sin(Math.toRadians(rot.getX()));
eye.set( eye.getX()+deltax, eye.getY()+y, eye.getZ()+deltaz );
updateViewMatrix();
System.out.println(eye + rot.toString());
}
private int createShaderFromString(GL3 gl, String shaderCode,int type) {
int shader = gl.glCreateShader(type);;
String[] vlines = new String[] { shaderCode };
int[] vlengths = new int[] { vlines[0].length() };
gl.glShaderSource(shader, vlines.length, vlines, vlengths, 0);
gl.glCompileShader(shader);
int[] compiled = new int[1];
gl.glGetShaderiv(shader, GL2ES2.GL_COMPILE_STATUS, compiled,0);
if(compiled[0]!=0){
System.out.println("Horray! vertex shader compiled");
} else {
int[] logLength = new int[1];
gl.glGetShaderiv(shader, GL2ES2.GL_INFO_LOG_LENGTH, logLength, 0);
byte[] log = new byte[logLength[0]];
gl.glGetShaderInfoLog(shader, logLength[0], (int[])null, 0, log, 0);
System.err.println("Error compiling the vertex shader: " + new String(log));
System.exit(1);
}
return shader;
}
}
If you're looking for a way to make a camera have "true FPS behavior" then I suggest you to take a look at this tutorial: https://beta.wikiversity.org/wiki/Computer_graphics_--2008-2009--_info.uvt.ro/Laboratory_7
I personally went with solution nÂș 2. From what iv'e seen of the solution you went with, this one seems a lot more elegant. Of course I modified it to include the mouse. If you want to try to do it yourself just try to follow whatever solution better adjusts to your code. If you want I can instead show you my code with the modifications done but only if you ask for it, since I don't want to spoil you the solution.

Why does the origin of the second ball not attached to the first ball?

I was reading the book The Nature of Code, where Exercise 3.12 asked me to implement a double pendulum.
class Pendulum {
PVector origin, location;
float r; // arm length
float angle;
float aVelocity;
float aAcceleration;
float damping;
Pendulum(PVector origin_, float r_) {
origin = origin_.get();
location = new PVector();
r = r_;
angle = PI/3;
aVelocity = 0;
aAcceleration = 0;
damping = 0.995;
}
void go() {
update();
display();
}
void update() {
float gravity = 0.4;
aAcceleration = (-1 * gravity / r) * sin(angle);
aVelocity += aAcceleration;
angle += aVelocity;
aVelocity *= damping;
location.set(r*sin(angle), r*cos(angle));
location.add(origin);
}
void display() {
stroke(0);
line(origin.x, origin.y, location.x, location.y);
fill(150);
ellipse(location.x, location.y, 20, 20);
}
}
Pendulum p, p2;
void setup() {
size(640, 360);
p = new Pendulum(new PVector(width/2, 0), 150);
p2 = new Pendulum(p.location, 100);
}
void draw() {
background(255);
p.go();
p2.go();
}
So in the setup function, I set the origin of p2 to be the location of p1. However, the origin of p2 appeared on the position (0, 0). How should I fix this? I have tried to set a temporary variable for p2 but that's not convenient.
I'm not exactly sure what you are trying to do,
but in the constructor:
Pendulum(PVector origin_, float r_) {
origin = origin_.get();
location = new PVector(); <-- here you set the location to a new vector
...
}
And you directly use the location in here:
void setup() {
size(640, 360);
p = new Pendulum(new PVector(width/2, 0), 150);
p2 = new Pendulum(p.location, 100); <-- here
}
which is the new location created. i suppose that's your problem you should be looking into.

Centre Buttons In Swift - Objective C Conversion Issue

In Objective C, I am using a function to centre some buttons, which works fine.
The buttons centre perfectly on the screen.
However, converting the code to swift it isn't working as expected. There is extra space on the left hand side:
func centerImageViews()
// Measure the total width taken by the buttons
var width:CGFloat = 0.0
for (index, item) in enumerate(self.soundBoxes) {
if(item.tag>=numberOfBoxes){
item.alpha=0
}else{
item.alpha=1
width += item.bounds.size.width + 10
}
if (width > 10){
width -= 10;
}
// If the buttons width is shorter than the visible bounds, adjust origin
var origin:CGFloat = 0;
if (width < self.view.frame.size.width){
origin = (self.view.frame.size.width - width) / 2.0;
}
// Place buttons
var x:CGFloat = origin;
var y:CGFloat = 200;
for (index, item) in enumerate(self.soundBoxes) {
item.center = CGPointMake(x + item.bounds.size.width/2.0, y + item.bounds.size.height/2.0);
x += item.bounds.size.width + 10;
}
}
}
In Objective C:
- (void)centerImageViews:(NSArray*)imageViews withCount:(int)number
{
int kButtonSeperator=10;
// Measure The Total Width Of Our Buttons
CGFloat width = 0;
for (UIImageView* hit in soundBoxes)
if (hit.tag>=numberOfBoxes) {
hit.alpha=0;
} else {
width += hit.bounds.size.width + kButtonSeperator;
hit.alpha=1;
}
if (width > kButtonSeperator)
width -= kButtonSeperator;
CGFloat origin = 0;
if (width < self.view.frame.size.width)
origin = (self.view.frame.size.width - width) / 2.f;
CGFloat x = origin;
CGFloat y = 85;
UIImageView* hit;
for (hit in imageViews) {
hit.center = CGPointMake(x + hit.bounds.size.width/2.f, y + hit.bounds.size.height/2.f);
//NSLog(#"X Is %f",hit.center.x);
x += hit.bounds.size.width + kButtonSeperator;
}
}
}

Image manipulation with Kinect

I'm trying to create an application that zooms in and out of an image and rotates it via Kinect. So far it works but for either or the cases. What I would like is that if I have rotated the image, that new value is saved for when I zoom, so I zoom on an image that has been rotated X degrees. The way I have it now, if I first rotate and then try to zoom, the image goes back to the initial stage.
private void TrackDistances(Skeleton skeleton)
{
if (skeleton.TrackingState == SkeletonTrackingState.Tracked)
{
...
if (wristLeft.Y > shoulderLeft.Y && wristRight.Y > shoulderRight.Y)
{
float distance = Math.Abs(wristLeft.X - wristRight.X);
image_Zoom(distance);
}
if (wristLeft.Y < shoulderLeft.Y && wristRight.Y < shoulderRight.Y)
{
angleDeg = GetJointAngle(zeroPoint, anglePoint);
image_Rotate(angleDeg);
}
}
}
private void image_Zoom(float distance)
{
//TransformGroup transformGroup = (TransformGroup)image.RenderTransform;
//ScaleTransform scale = (ScaleTransform)transformGroup.Children[0];
//double zoom = distance * 1.5;
//scale.ScaleX = zoom;
//scale.ScaleY = zoom;
double zoom = distance * 1.5;
double ScaleX = zoom;
double ScaleY = zoom;
ScaleTransform scale = new ScaleTransform(ScaleX, ScaleY);
image.RenderTransform = scale;
}
private void image_Rotate(double angleDeg)
{
var angle = angleDeg - 180;
RotateTransform rotate = new RotateTransform(angle);
image.RenderTransform = rotate;
}
Any suggestions?
Thanks!
I think it's because you change RenderTransform to be either ScaleTranform or RotateTransform.
You can set the ScaleTransform and RotateTransform of the image in the XAML, and just change the angle or zoom parameter in the code behind.
also see here:
How can I do both zoom and rotate on an inkcanvas?