How to draw several lines slowly in constant velocity on canvas by Android? - android-animation

I need capture the mark to draw a figure on canvas in Android, and the effect just like the follow gif:
Well, as far, I can draw a side with constant velocity by ValueAnimator. However, I just only can draw one side at one time, because I can't save the last side when drawing the next side. So, is there a good way to solve the problem?
Code for draw a line slowly by ValueAnimator:
GraphicsView.java
public class GraphicsView extends View {
private int stepX, stepY = 0;
private int startX, startY, stopX, stopY = 0;
private Paint paint = null;
public GraphicsView(Context context) {
super(context);
// Paint
paint = new Paint();
paint.setAntiAlias(true);
paint.setColor(Color.RED);
paint.setStyle(Paint.Style.STROKE);
startX = 40;
startY = 397;
stopX = 1040;
stopY = 397;
Init();
}
public void Init(){
ValueAnimator animatorX = ValueAnimator.ofFloat(startX, stopX);
ValueAnimator animatorY = ValueAnimator.ofFloat(startY, stopY);
animatorX.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
#Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
stepX = Math.round((Float)valueAnimator.getAnimatedValue()); invalidate();
}
});
animatorY.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
#Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
stepY = Math.round((Float)valueAnimator.getAnimatedValue()); invalidate();
}
});
AnimatorSet set = new AnimatorSet();
LinearInterpolator l = new LinearInterpolator();
set.setInterpolator(l);
set.setDuration(3000);
set.playTogether(animatorX, animatorY);
set.start();
}
#Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
canvas.drawLine(startX, startY, stepX, stepY, paint);
}
}
MainActivity.java
public class MainActivity extends AppCompatActivity {
private Display display = null;
private GraphicsView view = null;
private ConstraintLayout layout = null;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
display = getWindowManager().getDefaultDisplay();
layout = (ConstraintLayout)findViewById(R.id.main_layout);
view = new GraphicsView(this);
view.setMinimumWidth(display.getWidth());
view.setMinimumHeight(display.getHeight());
layout.addView(view);
}
}

you can use the ObjectAnimator class to callback
to one of your class methods every time you'd like to draw a bit more of the path.
import android.animation.ObjectAnimator;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.DashPathEffect;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathEffect;
import android.graphics.PathMeasure;
import android.util.AttributeSet;
import android.view.View;
import android.util.Log;
public class PathView extends View
{
Path path;
Paint paint;
float length;
public PathView(Context context)
{
super(context);
}
public PathView(Context context, AttributeSet attrs)
{
super(context, attrs);
}
public PathView(Context context, AttributeSet attrs, int defStyleAttr)
{
super(context, attrs, defStyleAttr);
}
public void init()
{
paint = new Paint();
paint.setColor(Color.BLUE);
paint.setStrokeWidth(10);
paint.setStyle(Paint.Style.STROKE);
path = new Path();
path.moveTo(50, 50);
path.lineTo(50, 500);
path.lineTo(200, 500);
path.lineTo(200, 300);
path.lineTo(350, 300);
// Measure the path
PathMeasure measure = new PathMeasure(path, false);
length = measure.getLength();
float[] intervals = new float[]{length, length};
ObjectAnimator animator = ObjectAnimator.ofFloat(PathView.this, "phase", 1.0f, 0.0f);
animator.setDuration(3000);
animator.start();
}
//is called by animtor object
public void setPhase(float phase)
{
Log.d("pathview","setPhase called with:" + String.valueOf(phase));
paint.setPathEffect(createPathEffect(length, phase, 0.0f));
invalidate();//will calll onDraw
}
private static PathEffect createPathEffect(float pathLength, float phase, float offset)
{
return new DashPathEffect(new float[] { pathLength, pathLength },
Math.max(phase * pathLength, offset));
}
#Override
public void onDraw(Canvas c)
{
super.onDraw(c);
c.drawPath(path, paint);
}
}
Then, just call init() to begin the animation, like this (or if you'd like it to start as soon as the view is inflated, put the init() call inside the constructors):
PathView path_view = (PathView) root_view.findViewById(R.id.path);
path_view.init();
Also see this question here, and
Using Value Animator Example
Reference 1
Reference 2
Reference 3

Related

TornadoFX TableView. Scrolling the large amount of rows leads to UI freezing

I have a windows desktop application on Kotlin and I'm using JDK Zulu11 with JavaFX and TornadoFX 2.0.0.
I faced the problem with scrolling of large amount of rows (~4mln) in the TableView.
I have something like a player and when it starts I just need to do autoscroll to the row corresponding to the player current position. So to make playing smooth I do it by calling scrollTo method every 50 milliseconds, 20 times per second.
I observed that approximately at 300000 UI starts freezing and at 500000 it is almost dead.
When I increase the delay from 50ms to 200ms or 500ms the situation is the same, UI gets freeze.
When I used JDK Zulu1.8 with JavaFX and TornadoFX 1.7.2 just for check all was perfect, all is playing very smooth and fast enough. With Oracle JDK 1.8 all is ok also.
But I need to migrate to JDK 11 because I have some important dependencies.
So the question is what is wrong with JDK 11(JavaFX) and TornadoFX 2.0.0 and how it can be fixed?
Thanks a lot for any help.
PS: Here is the minimal reproducible example, I just found some TableView example on javacodegeeks and modified it, so please chek with JDK1.8 and with OpenJDK11, I used Azul Zulu 11.
Also here is the video with demonstration.
import javafx.application.Application;
import javafx.application.Platform;
import javafx.beans.property.SimpleIntegerProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.event.ActionEvent;
import javafx.event.Event;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.scene.control.Button;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.layout.VBox;
import javafx.scene.layout.HBox;
import javafx.scene.text.Font;
import javafx.scene.text.FontWeight;
import javafx.scene.text.Text;
import javafx.scene.paint.Color;
import javafx.scene.control.Label;
import javafx.scene.control.TableView;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.TableColumn;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import java.util.List;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
public class FxTableViewExample1 extends Application {
public static class Book {
private SimpleIntegerProperty index;
private SimpleStringProperty title;
private SimpleStringProperty author;
public Book () {
}
public Book (Integer i, String s1, String s2) {
index = new SimpleIntegerProperty(i);
title = new SimpleStringProperty(s1);
author = new SimpleStringProperty(s2);
}
public int getIndex() {
return index.get();
}
public void setIndex(int index) {
this.index.set(index);
}
public String getTitle() {
return title.get();
}
public void setTitle(String s) {
title.set(s);
}
public String getAuthor() {
return author.get();
}
public void setAuthor(String s) {
author.set(s);
}
#Override
public String toString() {
return (index.get() + ": " + title.get() + ", by " + author.get());
}
}
private static final Integer COUNT = 10000000;
private static final Integer DELTA = 5000;
private static final Integer PERIOD = 50;
public static final EventType<Event> ScrollEventType = new EventType<>("ScrollEvent");
public static final EventType<Event> StopEventType = new EventType<>("StopEvent");
public static class ScrollEvent extends Event {
public Integer position = 0;
public ScrollEvent(Integer position) {
super(ScrollEventType);
this.position = position;
}
}
public static class StopEvent extends Event {
public StopEvent() {
super(StopEventType);
}
}
private TableView<Book> table;
private ObservableList<Book> data;
private Text actionStatus;
private Button startButton;
private Button stopButton;
private Integer count = 0;
private SimpleIntegerProperty currentPositionProperty = new SimpleIntegerProperty(0);
private Timer timer = null;
public static void main(String [] args) {
Application.launch(args);
}
#Override
public void start(Stage primaryStage) {
primaryStage.setTitle("Table View Example 1");
// Books label
Label label = new Label("Books");
label.setTextFill(Color.DARKBLUE);
label.setFont(Font.font("Calibri", FontWeight.BOLD, 36));
HBox labelHb = new HBox();
labelHb.setAlignment(Pos.CENTER);
labelHb.getChildren().add(label);
// Table view, data, columns and properties
table = new TableView<>();
data = getInitialTableData();
table.setItems(data);
TableColumn<Book, Integer> indexCol = new TableColumn<>("Index");
indexCol.setCellValueFactory(new PropertyValueFactory<Book, Integer>("index"));
TableColumn<Book, String> titleCol = new TableColumn<Book, String>("Title");
titleCol.setCellValueFactory(new PropertyValueFactory<Book, String>("title"));
TableColumn<Book, String> authorCol = new TableColumn<Book, String>("Author");
authorCol.setCellValueFactory(new PropertyValueFactory<Book, String>("author"));
table.getColumns().setAll(indexCol, titleCol, authorCol);
table.setPrefWidth(450);
table.setPrefHeight(300);
table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
table.getSelectionModel().selectedIndexProperty().addListener(
new RowSelectChangeListener());
// Status message text
actionStatus = new Text();
actionStatus.setFill(Color.FIREBRICK);
startButton = new Button("Play");
stopButton = new Button("Stop");
stopButton.setDisable(true);
currentPositionProperty.addListener(new ChangeListener<Number>() {
#Override
public void changed(ObservableValue<? extends Number> observable, Number oldValue, Number newValue) {
Platform.runLater(new Runnable() {
#Override
public void run() {
table.scrollTo(newValue.intValue());
table.getSelectionModel().select(newValue.intValue());
}
});
}
});
primaryStage.addEventHandler(ScrollEventType, new EventHandler<Event>() {
#Override
public void handle(Event event) {
if (event.getEventType() == ScrollEventType) {
currentPositionProperty.set(((ScrollEvent)event).position);
}
}
});
primaryStage.addEventHandler(StopEventType, new EventHandler<Event>() {
#Override
public void handle(Event event) {
if (timer != null) {
timer.cancel();
timer = null;
}
}
});
startButton.setOnAction(new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent event) {
count = 0;
startButton.setDisable(true);
stopButton.setDisable(false);
if (timer == null) {
timer = new Timer(true);
timer.schedule(new TimerTask() {
#Override
public void run() {
count++;
int position = count * DELTA;
if (position >= COUNT) {
Event.fireEvent(primaryStage, new ScrollEvent(COUNT));
Event.fireEvent(primaryStage, new StopEvent());
} else {
Event.fireEvent(primaryStage, new ScrollEvent(position));
}
}
}, 0, PERIOD);
}
}
});
stopButton.setOnAction(new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent event) {
startButton.setDisable(false);
stopButton.setDisable(true);
if (timer != null) {
timer.cancel();
timer = null;
}
}
});
HBox hbox = new HBox(20);
hbox.setPadding(new Insets(25, 25, 25, 25));
hbox.getChildren().addAll(startButton, stopButton);
// Vbox
VBox vbox = new VBox(20);
vbox.setPadding(new Insets(25, 25, 25, 25));
vbox.getChildren().addAll(labelHb, table, actionStatus, hbox);
// Scene
Scene scene = new Scene(vbox, 500, 475); // w x h
primaryStage.setScene(scene);
primaryStage.show();
// Select the first row
table.getSelectionModel().select(0);
Book book = table.getSelectionModel().getSelectedItem();
actionStatus.setText(book.toString());
} // start()
private class RowSelectChangeListener implements ChangeListener<Number> {
#Override
public void changed(ObservableValue<? extends Number> ov,
Number oldVal, Number newVal) {
int ix = newVal.intValue();
if ((ix < 0) || (ix >= data.size())) {
return; // invalid data
}
Book book = data.get(ix);
actionStatus.setText(book.toString());
}
}
private ObservableList<Book> getInitialTableData() {
List<Book> list = new ArrayList<>();
int i = 0;
while (i < COUNT) {
list.add(new Book(i++, "The Thief", "Fuminori Nakamura"));
list.add(new Book(i++, "Of Human Bondage", "Somerset Maugham"));
list.add(new Book(i++, "The Bluest Eye", "Toni Morrison"));
list.add(new Book(i++, "I Am Ok You Are Ok", "Thomas Harris"));
list.add(new Book(i++, "Magnificent Obsession", "Lloyd C Douglas"));
list.add(new Book(i++, "100 Years of Solitude", "Gabriel Garcia Marquez"));
list.add(new Book(i++, "What the Dog Saw", "Malcolm Gladwell"));
list.add(new Book(i++, "The Fakir", "Ruzbeh Bharucha"));
list.add(new Book(i++, "The Hobbit", "J.R.R. Tolkien"));
list.add(new Book(i++, "Strange Life of Ivan Osokin", "P.D. Ouspensky"));
list.add(new Book(i++, "The Hunt for Red October", "Tom Clancy"));
list.add(new Book(i++, "Coma", "Robin Cook"));
}
return FXCollections.observableList(list);
}
}
The problem was resolved by using OpenJDK11 and separate OpenJFX15 instead of Zulu JDK11+JFX.

How to turn on flash in picture mode

[SOLVED]
After searching for an answer, I didn't find a solution for turning on the flash when in picture mode.
The app opens the camera in the background, and continulsy processes the pictures and detects objects, but the phone is located in a container which doesn't have light there, thus I need to make sure the flash is always opened.
There can be other approaches I'm considering as well and I'm not sure how to get these approaches to work also:
Switch to Video Mode. (Because I'm processing the pictures of the camera preview anyway, and in video mode the flash mode can work w/o recording a vide).
Set the camera default app to different app which supports image preview with flash when tapping on screen (I'll need to figure out how to switch to different app and how to simulate tapping, maybe even with another device which is connected to the app w/ bluetooth and sends clicks).
Override camera's API and make sure the Flash can be on, or just disabled and let another app turn on the flash.
This doesn't seem to work: (in the last code block)
Camera.Parameters parameters = camera.getParameters();
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_ON);
Solution 1 or 3 should be ideal, any ideas how to make it work? This is the code I'm using:
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.text.TextUtils;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import org.tensorflow.lite.examples.classification.customview.AutoFitTextureView;
import org.tensorflow.lite.examples.classification.env.Logger;
public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
private static final int MINIMUM_PREVIEW_SIZE = 320;
/** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/** A {#link Semaphore} to prevent the app from exiting before closing the camera. */
private final Semaphore cameraOpenCloseLock = new Semaphore(1);
/** A {#link OnImageAvailableListener} to receive frames as they are available. */
private final OnImageAvailableListener imageListener;
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
private final Size inputSize;
/** The layout identifier to inflate for this Fragment. */
private final int layout;
private final ConnectionCallback cameraConnectionCallback;
private final CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
#Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
#Override
public void onCaptureCompleted(
final CameraCaptureSession session,
final CaptureRequest request,
final TotalCaptureResult result) {}
};
/** ID of the current {#link CameraDevice}. */
private String cameraId;
/** An {#link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/** A {#link CameraCaptureSession } for camera preview. */
private CameraCaptureSession captureSession;
/** A reference to the opened {#link CameraDevice}. */
private CameraDevice cameraDevice;
/** The rotation in degrees of the camera sensor from the display. */
private Integer sensorOrientation;
/** The {#link Size} of camera preview. */
private Size previewSize;
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
/** A {#link Handler} for running tasks in the background. */
private Handler backgroundHandler;
/**
* {#link TextureView.SurfaceTextureListener} handles several lifecycle events on a {#link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
openCamera(width, height);
}
#Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height);
}
#Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/** An {#link ImageReader} that handles preview frame capture. */
private ImageReader previewReader;
/** {#link CaptureRequest.Builder} for the camera preview */
private CaptureRequest.Builder previewRequestBuilder;
/** {#link CaptureRequest} generated by {#link #previewRequestBuilder} */
private CaptureRequest previewRequest;
/** {#link CameraDevice.StateCallback} is called when {#link CameraDevice} changes its state. */
private final CameraDevice.StateCallback stateCallback =
new CameraDevice.StateCallback() {
#Override
public void onOpened(final CameraDevice cd) {
// This method is called when the camera is opened. We start camera preview here.
cameraOpenCloseLock.release();
cameraDevice = cd;
createCameraPreviewSession();
}
#Override
public void onDisconnected(final CameraDevice cd) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
}
#Override
public void onError(final CameraDevice cd, final int error) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
final Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
#SuppressLint("ValidFragment")
private CameraConnectionFragment(
final ConnectionCallback connectionCallback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener;
this.layout = layout;
this.inputSize = inputSize;
}
/**
* Given {#code choices} of {#code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
*
* #param choices The list of sizes that the camera supports for the intended output class
* #param width The minimum desired width
* #param height The minimum desired height
* #return The optimal {#code Size}, or an arbitrary one if none were big enough
*/
protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
exactSizeFound = true;
}
if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
bigEnough.add(option);
} else {
tooSmall.add(option);
}
}
LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (exactSizeFound) {
LOGGER.i("Exact size match found.");
return desiredSize;
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
return chosenSize;
} else {
LOGGER.e("Couldn't find any suitable preview size");
return choices[0];
}
}
public static CameraConnectionFragment newInstance(
final ConnectionCallback callback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
}
/**
* Shows a {#link Toast} on the UI thread.
*
* #param text The message to show
*/
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(
new Runnable() {
#Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
#Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
#Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
#Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
openCamera(textureView.getWidth(), textureView.getHeight());
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
#Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
public void setCamera(String cameraId) {
this.cameraId = cameraId;
}
/** Sets up member variables related to camera. */
private void setUpCameraOutputs() {
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
textureView.setVisibility(View.GONE);
} else {
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
textureView.setVisibility(View.GONE);
}
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
// TODO(andrewharp): abstract ErrorDialog/RuntimeException handling out into new method and
// reuse throughout app.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
throw new RuntimeException(getString(R.string.camera_error));
}
cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
}
/** Opens the camera specified by {#link CameraConnectionFragment#cameraId}. */
private void openCamera(final int width, final int height) {
setUpCameraOutputs();
configureTransform(width, height);
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/** Closes the current {#link CameraDevice}. */
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != previewReader) {
previewReader.close();
previewReader = null;
}
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
/** Starts a background thread and its {#link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("ImageListener");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
/** Stops the background thread and its {#link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
/** Creates a new {#link CameraCaptureSession} for camera preview. */
private void createCameraPreviewSession() {
try {
final SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview.
final Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
// Create the reader for the preview frames.
previewReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(surface, previewReader.getSurface()),
new CameraCaptureSession.StateCallback() {
#Override
public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
// previewRequestBuilder.set(
// CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
previewRequestBuilder.set(
CaptureRequest.FLASH_MODE, CaptureRequest.CONTROL_AE_MODE_ON_ALWAYS_FLASH);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest, captureCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
#Override
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
},
null);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
}
}
The second one:
public class LegacyCameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Camera camera;
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/** The layout identifier to inflate for this Fragment. */
private int layout;
/** An {#link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/**
* {#link TextureView.SurfaceTextureListener} handles several lifecycle events on a {#link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
#Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
#Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
#Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
#Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
#SuppressLint("ValidFragment")
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
#Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
#Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
#Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
#Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
#Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/** Starts a background thread and its {#link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/** Stops the background thread and its {#link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
}
return -1; // No camera found
}
}
SOLUTION: in second block code:
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_TORCH);
and in first block code:
//Check Whether device supports AutoFlash, If you YES then set AutoFlash
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_AUTO))
{
parameters.setFlashMode(parameters.FLASH_MODE_AUTO);
}
SOLUTION: in second block code:
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON);
mPreviewRequestBuilder.set(CaptureRequest.FLASH_MODE,
CaptureRequest.FLASH_MODE_TORCH);
and in first block code:
//Check Whether device supports AutoFlash, If you YES then set AutoFlash
List<String> flashModes = parameters.getSupportedFlashModes();
if (flashModes.contains(android.hardware.Camera.Parameters.FLASH_MODE_AUTO))
{
parameters.setFlashMode(parameters.FLASH_MODE_AUTO);
}

Libgdx - camera.unproject is not fixing my coordinates

The following code gives me very strange y coordinates.
10-18 00:13:36.834 30543-30567/com.xxxx.yyyy.android I/x﹕ 137.4782
10-18 00:13:36.834 30543-30567/com.xxxx.yyyy.android I/y﹕ -1984.2426
10-18 00:13:36.835 30543-30567/com.xxxx.yyyy.android I/ux﹕ 91.65213
10-18 00:13:36.835 30543-30567/com.xxxx.yyyy.android I/uy﹕ -1984.2426
I imagine I set up everything wrong rather than do it wrong while running?
The camera.unproject call should take care of all remapping from screen coordinates to game coordinates, shouldn't it? Or do i have to scale and invert before unprojecting?
package com.xxxx.yyyy;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Camera;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.InputListener;
public class LetterActor extends Actor
{
private Texture texture;
private Vector3 touchPosition = new Vector3();
private Camera camera;
private boolean unproject = true;
public LetterActor(Texture letterTexture, Camera theCamera)
{
texture = letterTexture;
camera = theCamera;
touchPosition.set(240, 800, 0);
camera.unproject(touchPosition);
setPosition(touchPosition.x, touchPosition.y);
setSize(texture.getWidth(), texture.getHeight());
addListener(new InputListener()
{
#Override
public boolean touchDown(InputEvent event, float x, float y, int pointer, int button)
{
touchPosition.set(x, y, 0);
if (unproject)
{
camera.unproject(touchPosition);
}
setPosition(touchPosition.x, touchPosition.y);
logPositions(x, y, touchPosition.x, touchPosition.y);
return true;
}
#Override
public void touchUp(InputEvent event, float x, float y, int pointer, int button)
{
touchPosition.set(x, y, 0);
if (unproject)
{
camera.unproject(touchPosition);
}
setPosition(touchPosition.x, touchPosition.y);
logPositions(x, y, touchPosition.x, touchPosition.y);
}
#Override
public void touchDragged(InputEvent event, float x, float y, int pointer)
{
touchPosition.set(x, y, 0);
if (unproject)
{
camera.unproject(touchPosition);
}
setPosition(touchPosition.x, touchPosition.y);
logPositions(x, y, touchPosition.x, touchPosition.y);
}
});
}
private void screenTo()
{
}
private void logPositions(float x, float y,float ux, float uy)
{
Gdx.app.log("x", Float.toString(x));
Gdx.app.log("y", Float.toString(y));
Gdx.app.log("ux", Float.toString(ux));
Gdx.app.log("uy", Float.toString(y));
}
#Override
public void draw(Batch batch, float alpha)
{
batch.draw(texture, getX(), getY(), getWidth(), getHeight());
}
#Override
public void act(float delta) {}
}
package com.xxxx.yyyy;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.utils.viewport.ExtendViewport;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.Touchable;
import com.badlogic.gdx.utils.viewport.FitViewport;
public class WordPuzzle extends ApplicationAdapter
{
private final static float VIRTUAL_WIDTH = 480;
private final static float VIRTUAL_HEIGHT = 800;
private OrthographicCamera camera;
private FitViewport viewport;
private Stage stage;
#Override
public void create()
{
camera = new OrthographicCamera(VIRTUAL_WIDTH, VIRTUAL_HEIGHT);
camera.setToOrtho(false, VIRTUAL_WIDTH, VIRTUAL_HEIGHT);
viewport = new FitViewport(VIRTUAL_WIDTH, VIRTUAL_HEIGHT, camera);
stage = new Stage();
stage.setViewport(viewport);
Gdx.input.setInputProcessor(stage);
Texture[] textures = LetterLoader.loadLetters();
for (int i = 0; i < textures.length; i++)
{
LetterActor letterActor = new LetterActor(textures[i], camera);
letterActor.setTouchable(Touchable.enabled);
stage.addActor(letterActor);
}
}
#Override
public void render()
{
Gdx.gl.glClearColor(1, 1, 1, 1);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
stage.act(Gdx.graphics.getDeltaTime());
stage.draw();
}
#Override public void resize(int width, int height)
{
stage.getViewport().update(width, height, true);
}
#Override public void dispose()
{
stage.dispose();
}
}
package com.xxxx.yyyy;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Texture;
public class LetterLoader {
public static Texture[] loadLetters()
{
Texture[] letters = new Texture[26];
for (int i = 0; i < 26; i++)
{
char letter = (char) (i + 65);
letters[i] = new Texture(Gdx.files.internal("bigletters/" + letter + ".png"));
}
return letters;
}
}
First, the touch position (x, y) you get from the input listener are already the correct coordinates.
Concerning your output, you actually print y two times, but call it uy the second time:
Gdx.app.log("uy", Float.toString(y));
If touchPosition.set(240, 800, 0); is in screen coordinates, then you need to unproject them, but
camera.unproject(touchPosition);
assumes that your camera fills the whole screen, thus it calls internally:
unproject(screenCoords, 0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Since you use a virtual size, this is wrong. The most simple solution would be to use the unproject method from the viewport that you are using:
viewport.unproject(touchPosition);
This will call the camera unproject method with the correct parameters automatically.
Since you are using Stage and InputListener, the coordinates you get in touchDown and the related methods are already in world coordinates, so it doesn't make sense to unproject them. You can use the x and y directly.
Also (although this is irrelevant to InputListener), camera.unproject assumes a Viewport that fills the screen, which is not true of FitViewport, which you're using. If you are using a Viewport class, you need to use viewport.unproject instead of camera.unproject, so it takes the black bars into account.
But you only need to worry about unprojecting for stuff not related to the Stage.

Problems with KeyListener and JOGL

I'm trying to bind a key to translate a GL_QUAD around the screen. I created a class, as I will attach below, that implements KeyListener, and within that I have a method that upon the keypress of 'd', adds 0.1 to the x coordinates of the quad vertices. Now, I have two questions relating to this.
Firstly, it doesn't seem to do anything. Upon the keypress, nothing happens to the object.
Is there a better way to achieve what I am trying to do? My end goal is to eventually end up with a sprite, that the camera is focused upon, that can move around a visually 2D game world.
Thanks for your time.
Code:
SpriteTest.java
package com.mangostudios.spritetest;
import java.awt.Frame;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.media.opengl.GLCapabilities;
import javax.media.opengl.GLProfile;
import javax.media.opengl.awt.GLCanvas;
import com.jogamp.opengl.util.FPSAnimator;
public class SpriteTest
{
public static void main(String[] args) {
GLProfile glp = GLProfile.getDefault();
GLCapabilities caps = new GLCapabilities(glp);
GLCanvas canvas = new GLCanvas(caps);
Frame frame = new Frame("AWT Window Test");
frame.setSize(300, 300);
frame.add(canvas);
frame.setVisible(true);
frame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
System.exit(0);
}
});
canvas.addGLEventListener(new Renderer());
FPSAnimator animator = new FPSAnimator(canvas, 60);
//animator.add(canvas);
animator.start();
}
}
Renderer.java
package com.mangostudios.spritetest;
import javax.media.opengl.GL2;
import javax.media.opengl.GLAutoDrawable;
import javax.media.opengl.GLEventListener;
public class Renderer implements GLEventListener {
InputListener input = new InputListener();
#Override
public void display(GLAutoDrawable drawable) {
update();
render(drawable);
}
#Override
public void dispose(GLAutoDrawable drawable) {
}
#Override
public void init(GLAutoDrawable drawable) {
}
#Override
public void reshape(GLAutoDrawable drawable, int x, int y, int w, int h) {
}
private void update() {
}
private void render(GLAutoDrawable drawable) {
GL2 gl = drawable.getGL().getGL2();
// draw a triangle filling the window
gl.glBegin(GL2.GL_QUADS);
gl.glVertex2f( input.xTran, 0.1f);
gl.glVertex2f( input.xTran,-0.1f);
gl.glVertex2f( -input.xTran, -0.1f);
gl.glVertex2f( -input.xTran, 0.1f);
gl.glEnd();
}
}
InputListener.java
package com.mangostudios.spritetest;
import com.jogamp.newt.event.KeyEvent;
import com.jogamp.newt.event.KeyListener;
public class InputListener implements KeyListener{
boolean loopBool = false;
float xTran = 0.1f;
float yTran = 0.1f;
#Override
public void keyPressed(KeyEvent d) {
loopBool = true;
while (loopBool = true) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
#Override
public void keyReleased(KeyEvent d) {
}
}
At first, you never call addKeyListener(). Secondly, you shouldn't put an infinite loop into keyPressed(). Thirdly, you use a NEWT KeyListener whereas you use an AWT GLCanvas :s Rather use GLWindow with a NEWT KeyListener or use an AWT GLCanvas with an AWT KeyListener or use NewtCanvasAWT. Finally, before writing your own example, try mine on Wikipedia in order to understand why it works.

ImageView doesn't slike on translationX animation

I'm trying to create a slider, when users fling left or right, this slider will slide animatedly, translation animation. However, it doesn't work in a right way.
package pete.android.study.home.scrollingindicator;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
public class MainScreen extends Activity {
protected ValueAnimator mScrollIndicatorAnimator;
protected ValueAnimator mScrollator;
protected ImageView mScrollIndicator = null;
protected static final int sScrollIndicatorFadeInDuration = 150;
protected static final int sScrollIndicatorFadeOutDuration = 650;
protected static final int sScrollIndicatorFlashDuration = 650;
public static final int PAGE_COUNT = 4;
private int mPageWidth = 0;
private int mCurrentPage = 0;
private int mIndicatorPos = 0;
private int mIndicatorSpace = 0;
private GestureDetector mGestureDetector;
private static final boolean DEBUG = true;
private static final String TAG = "indicator";
private Handler mHandler;
#Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
DisplayMetrics displaymetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displaymetrics);
//ht = displaymetrics.heightPixels;
mPageWidth = displaymetrics.widthPixels;
mHandler = new Handler();
setupScrollingIndicator();
mGestureDetector = new GestureDetector(this, new LearnGestureListener());
}
#Override
public boolean onTouchEvent(MotionEvent event) {
if (mGestureDetector.onTouchEvent(event))
return true;
else
return false;
}
public void setupScrollingIndicator() {
if(mScrollIndicator == null) {
mScrollIndicator = (ImageView) findViewById(R.id.indicator);
}
mIndicatorSpace = mPageWidth / PAGE_COUNT;
mIndicatorPos = mIndicatorSpace * mCurrentPage;
if(DEBUG) {
Log.i(TAG, "mIndicatorSpace = " + mIndicatorSpace);
Log.i(TAG, "mCurrentPage = " + mCurrentPage);
Log.i(TAG, "mIndicatorPos = " + mIndicatorPos);
}
if(mScrollIndicator.getMeasuredWidth() != mIndicatorSpace) {
mScrollIndicator.getLayoutParams().width = mIndicatorSpace;
mScrollIndicator.requestLayout();
}
mScrollIndicator.setTranslationX(mIndicatorPos);
mScrollIndicator.invalidate();
}
public void showIndicator() {
setupScrollingIndicator();
mScrollIndicator.setVisibility(View.VISIBLE);
cancelScrollingAnimations();
mScrollIndicatorAnimator = ObjectAnimator.ofFloat(mScrollIndicator, "alpha", 1f);
mScrollIndicatorAnimator.setDuration(sScrollIndicatorFadeInDuration);
mScrollIndicatorAnimator.start();
mScrollator = ObjectAnimator.ofFloat(mScrollIndicator, "translationX", mIndicatorPos);
mScrollator.setDuration(sScrollIndicatorFlashDuration);
mScrollator.start();
}
Runnable hideScrollingIndicatorRunnable = new Runnable() {
#Override
public void run() {
hideIndicator();
}
};
protected void flashIndicator() {
showIndicator();
mHandler.postDelayed(hideScrollingIndicatorRunnable, sScrollIndicatorFlashDuration);
}
public void hideIndicator() {
setupScrollingIndicator();
cancelScrollingAnimations();
mScrollIndicatorAnimator = ObjectAnimator.ofFloat(mScrollIndicator, "alpha", 0f);
mScrollIndicatorAnimator.setDuration(sScrollIndicatorFadeOutDuration);
mScrollIndicatorAnimator.addListener(new AnimatorListenerAdapter() {
private boolean cancelled = false;
#Override
public void onAnimationCancel(android.animation.Animator animation) {
cancelled = true;
}
#Override
public void onAnimationEnd(Animator animation) {
if (!cancelled) {
mScrollIndicator.setVisibility(View.INVISIBLE);
}
}
});
mScrollIndicatorAnimator.start();
}
public void cancelScrollingAnimations() {
if(mScrollIndicatorAnimator != null) {
mScrollIndicatorAnimator.cancel();
}
}
public void scrollToRight() {
flashIndicator();
mCurrentPage++;
if(mCurrentPage >= PAGE_COUNT) {
mCurrentPage = 0;
}
}
public void scrollToLeft() {
flashIndicator();
mCurrentPage--;
if(mCurrentPage < 0) {
mCurrentPage = PAGE_COUNT - 1;
}
}
class LearnGestureListener extends GestureDetector.SimpleOnGestureListener {
#Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if(e2.getX() - e1.getX() > 50) {
scrollToLeft();
} else if(e2.getX() - e1.getX() < 50) {
scrollToRight();
}
return true;
}
}
}
Please help me fix this, thanks very much!
mScrollIndicator.setTranslationX(mIndicatorPos) just redraw your view on new position mIndicatorPos. It doesn't do animating.
If you wanna mScrollIndicator move along with your scroll event, try to override onScroll method instead of onFling.
Or another way, create an android.animation.ObjectAnimator object, setup it for your view and call ObjectAnimator.start() on Fling.