Example usage for android.view GestureDetector GestureDetector

List of usage examples for android.view GestureDetector GestureDetector

Introduction

In this page you can find the example usage for android.view GestureDetector GestureDetector.

Prototype

public GestureDetector(Context context, OnGestureListener listener) 

Source Link

Document

Creates a GestureDetector with the supplied listener.

Usage

From source file:com.med.fast.ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from   ww w.  j av  a 2 s. co m*/
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.ocr_capture);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);
}

From source file:de.damdi.fitness.activity.start_training.FExDetailFragment.java

@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
    View rootView = inflater.inflate(R.layout.fragment_fex_detail, container, false);

    // show the current exercise
    ImageView imageview = (ImageView) rootView.findViewById(R.id.imageview);

    // set gesture detector
    this.mGestureScanner = new GestureDetector(this.getActivity(),
            new ExerciseDetailOnGestureListener(this, imageview, mExercise));

    // Images/* w w  w  .ja  v  a  2 s  .  co  m*/
    if (!mExercise.getImagePaths().isEmpty()) {
        DataHelper data = new DataHelper(getActivity());
        imageview.setImageDrawable(data.getDrawable(mExercise.getImagePaths().get(0).toString()));
    } else {
        imageview.setImageResource(R.drawable.ic_launcher);
    }

    imageview.setOnTouchListener(new View.OnTouchListener() {
        @Override
        public boolean onTouch(View v, MotionEvent event) {
            return mGestureScanner.onTouchEvent(event);
        }
    });

    // set adapter
    ListView listView = (ListView) rootView.findViewById(R.id.list);
    final TrainingEntryListAdapter mAdapter = new TrainingEntryListAdapter((FragmentActivity) getActivity(),
            mExercise, mTrainingEntry);
    listView.setAdapter(mAdapter);

    SwipeDismissListViewTouchListener touchListener = new SwipeDismissListViewTouchListener(listView,
            new SwipeDismissListViewTouchListener.OnDismissCallback() {
                @Override
                public void onDismiss(ListView listView, int[] reverseSortedPositions) {
                    for (int position : reverseSortedPositions) {
                        mAdapter.remove(position);
                    }
                    mAdapter.notifyDataSetChanged();
                }
            });
    listView.setOnTouchListener(touchListener);
    // Setting this scroll listener is required to ensure that during
    // ListView scrolling,
    // we don't look for swipes.
    listView.setOnScrollListener(touchListener.makeScrollListener());

    return rootView;
}

From source file:com.google.android.gms.samples.vision.barcodereader.BarcodeCapture.java

/**
 * Initializes the UI and creates the detector pipeline.
 */// w w  w .j a va 2 s  . c  o  m

@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container,
        @Nullable Bundle savedInstanceState) {
    View rootView = inflater.inflate(R.layout.barcode_capture, container, false);

    mPreview = (CameraSourcePreview) rootView.findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) rootView.findViewById(R.id.graphicOverlay);
    mGraphicOverlay.setShowText(isShouldShowText());
    mGraphicOverlay.setRectColors(getRectColors());
    mGraphicOverlay.setDrawRect(isShowDrawRect());

    // read parameters from the intent used to launch the activity.

    requestCameraPermission();

    gestureDetector = new GestureDetector(getContext(), new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(getContext(), new ScaleListener());

    rootView.setOnTouchListener(new View.OnTouchListener() {
        @Override
        public boolean onTouch(View view, MotionEvent e) {
            boolean b = scaleGestureDetector.onTouchEvent(e);

            boolean c = gestureDetector.onTouchEvent(e);
            return b || c || view.onTouchEvent(e);
        }
    });
    return rootView;
}

From source file:net.osmand.plus.views.controls.DynamicListView.java

public void init(Context context) {
    setOnScrollListener(mScrollListener);
    DisplayMetrics metrics = context.getResources().getDisplayMetrics();
    mSmoothScrollAmountAtEdge = (int) (SMOOTH_SCROLL_AMOUNT_AT_EDGE / metrics.density);
    singleTapDetector = new GestureDetector(context, new SingleTapConfirm());
}

From source file:com.amazon.appstream.fireclient.FireClientActivity.java

/**
 * Initialization. Sets up the app and spawns the connection
 * dialog.//from   ww  w.  j a  v a  2s . co  m
 */
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    Log.v(TAG, "onCreate");
    mGestureDetector = new GestureDetector(this, this);

    mGestureDetector.setIsLongpressEnabled(false);

    mTouchscreenAvailable = getPackageManager().hasSystemFeature("android.hardware.touchscreen");

    SharedPreferences prefs = getSharedPreferences("main", MODE_PRIVATE);
    if (prefs.contains(SERVER_ADDRESS)) {
        mServerAddress = prefs.getString(SERVER_ADDRESS, null);
    }
    if (prefs.contains(DES_SERVER_ADDRESS)) {
        mDESServerAddress = prefs.getString(DES_SERVER_ADDRESS, null);
    }
    if (prefs.contains(USE_APP_SERVER)) {
        mUseAppServer = prefs.getBoolean(USE_APP_SERVER, false);
    }
    if (prefs.contains(APP_ID)) {
        mAppId = prefs.getString(APP_ID, null);
    }
    if (prefs.contains(USER_ID)) {
        mUserId = prefs.getString(USER_ID, null);
    }

    requestWindowFeature(Window.FEATURE_NO_TITLE);
}

From source file:ar.com.bestprice.buyitnow.barcodereader.BarcodeCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from   ww w  .j  a va2  s  .co  m*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.barcode_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<BarcodeGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:org.careerop.textscanner.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *//*from  ww  w  .j  a  va 2  s.  c o  m*/
@Override
public void onCreate(Bundle icicle) {
    super.onCreate(icicle);
    setContentView(R.layout.ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // read parameters from the intent used to launch the activity.
    boolean autoFocus = getIntent().getBooleanExtra(AutoFocus, false);
    boolean useFlash = getIntent().getBooleanExtra(UseFlash, false);

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to capture. Use two finger to zoom", Snackbar.LENGTH_LONG).show();
}

From source file:com.huyn.demogroup.freechild.FixedViewAttacher.java

public FixedViewAttacher(ImageView imageView) {
    mImageView = imageView;/*from  w ww .j av a2 s  .c  om*/

    if (imageView.isInEditMode()) {
        return;
    }

    View parent = (View) mImageView.getParent();
    parent.setOnTouchListener(this);
    parent.addOnLayoutChangeListener(this);

    // Create Gesture Detectors...
    mScaleDragDetector = new CustomGestureDetector(imageView.getContext(), this);

    mGestureDetector = new GestureDetector(imageView.getContext(),
            new GestureDetector.SimpleOnGestureListener() {

                @Override
                public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
                    if (mSingleFlingListener != null) {
                        if (getScale() > DEFAULT_MIN_SCALE) {
                            return false;
                        }

                        if (MotionEventCompat.getPointerCount(e1) > SINGLE_TOUCH
                                || MotionEventCompat.getPointerCount(e2) > SINGLE_TOUCH) {
                            return false;
                        }

                        return mSingleFlingListener.onFling(e1, e2, velocityX, velocityY);
                    }
                    return false;
                }
            });
}

From source file:ocr.OcrCaptureActivity.java

/**
 * Initializes the UI and creates the detector pipeline.
 *///from  w  w  w. j a v  a2s.  co  m
@Override
public void onCreate(Bundle bundle) {
    super.onCreate(bundle);
    setContentView(R.layout.activity_ocr_capture);

    mPreview = (CameraSourcePreview) findViewById(R.id.preview);
    mGraphicOverlay = (GraphicOverlay<OcrGraphic>) findViewById(R.id.graphicOverlay);

    // Set good defaults for capturing text.
    boolean autoFocus = true;
    boolean useFlash = false;

    // Check for the camera permission before accessing the camera.  If the
    // permission is not granted yet, request permission.
    int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
    if (rc == PackageManager.PERMISSION_GRANTED) {
        createCameraSource(autoFocus, useFlash);
    } else {
        requestCameraPermission();
    }

    gestureDetector = new GestureDetector(this, new CaptureGestureListener());
    scaleGestureDetector = new ScaleGestureDetector(this, new ScaleListener());

    Snackbar.make(mGraphicOverlay, "Tap to Speak. Pinch/Stretch to zoom", Snackbar.LENGTH_LONG).show();

    // TODO: Set up the Text To Speech engine.
    TextToSpeech.OnInitListener listener = new TextToSpeech.OnInitListener() {
        @Override
        public void onInit(final int status) {
            if (status == TextToSpeech.SUCCESS) {
                Log.d("TTS", "Text to speech engine started successfully.");
                tts.setLanguage(Locale.US);
            } else {
                Log.d("TTS", "Error starting the text to speech engine.");
            }
        }
    };
    tts = new TextToSpeech(this.getApplicationContext(), listener);

}

From source file:ca.frozen.curlingtv.activities.VideoFragment.java

@Override
public void onCreate(Bundle savedInstanceState) {
    // configure the activity
    super.onCreate(savedInstanceState);

    // load the settings and cameras
    Utils.loadData();/*from  w w  w . j a  v a  2s.co  m*/

    // get the parameters
    camera = getArguments().getParcelable(CAMERA);
    fullScreen = getArguments().getBoolean(FULL_SCREEN);

    // create the gesture recognizers
    simpleDetector = new GestureDetector(getActivity(), new SimpleListener());
    scaleDetector = new ScaleGestureDetector(getActivity(), new ScaleListener());

    // create the fade in handler and runnable
    fadeInHandler = new Handler();
    fadeInRunner = new Runnable() {
        @Override
        public void run() {
            Animation fadeInName = new AlphaAnimation(0, 1);
            fadeInName.setDuration(FADEIN_ANIMATION_TIME);
            fadeInName.setFillAfter(true);
            Animation fadeInSnapshot = new AlphaAnimation(0, 1);
            fadeInSnapshot.setDuration(FADEIN_ANIMATION_TIME);
            fadeInSnapshot.setFillAfter(true);
            nameView.startAnimation(fadeInName);
            snapshotButton.startAnimation(fadeInSnapshot);
            fadeListener.onStartFadeIn();
        }
    };

    // create the fade out handler and runnable
    fadeOutHandler = new Handler();
    fadeOutRunner = new Runnable() {
        @Override
        public void run() {
            Animation fadeOutName = new AlphaAnimation(1, 0);
            fadeOutName.setDuration(FADEOUT_ANIMATION_TIME);
            fadeOutName.setFillAfter(true);
            Animation fadeOutSnapshot = new AlphaAnimation(1, 0);
            fadeOutSnapshot.setDuration(FADEOUT_ANIMATION_TIME);
            fadeOutSnapshot.setFillAfter(true);
            nameView.startAnimation(fadeOutName);
            snapshotButton.startAnimation(fadeOutSnapshot);
            fadeListener.onStartFadeOut();
        }
    };

    // create the finish handler and runnable
    finishHandler = new Handler();
    finishRunner = new Runnable() {
        @Override
        public void run() {
            getActivity().finish();
        }
    };
}