Read, generate and scan QR code in Android
Required dependencies
implementation 'com.android.support:appcompat-v7:26.1.0' implementation 'com.google.android.gms:play-services-vision:11.6.0' implementation 'com.google.zxing:core:3.2.1'
Required permissions in the manifest file.
Activities in the manifest file.
This code processes an existing QR code image and decode it to a string. When the button btn_read_qr_code in the layout is clicked, it reads a qr code image from the drawable folder, decode it, gets the string value and assign the string value to a text view.
btn_read_qr_code.setOnClickListener(object : View.OnClickListener {
override fun onClick(v: View) {
// Load the Image
val myBitmap = BitmapFactory.decodeResource(applicationContext.resources, R.drawable.puppy)
iv_qr_code_image.setImageBitmap(myBitmap)
// Setup the Barcode Detector
val detector = BarcodeDetector.Builder(applicationContext)
.setBarcodeFormats(Barcode.DATA_MATRIX or Barcode.QR_CODE)
.build()
if (!detector.isOperational) {
tv_qr_code_text_content.setText("Could not set up the detector!")
return
}
// Detect the Barcode
val frame = Frame.Builder().setBitmap(myBitmap).build()
val barcodes = detector.detect(frame)
// Decode the Barcode
val thisCode = barcodes.valueAt(0)
tv_qr_code_text_content.text = thisCode.rawValue
}
})
This code takes the text from an EditText, generates a QR code bitmap, then sets an ImageView with this QR code bitmap. The end result is a QR code image.
btn_generate_qr.setOnClickListener({
val textValue = et_qr_code_text.getText().toString()
try {
val bitmap = textToQRBitmap(textValue)
iv_qr_code_image.setImageBitmap(bitmap)
} catch (e: WriterException) {
e.printStackTrace()
}
})
@Throws(WriterException::class)
fun textToQRBitmap(Value: String): Bitmap? {
val bitMatrix: BitMatrix
try {
bitMatrix = MultiFormatWriter().encode(Value, BarcodeFormat.QR_CODE, 500, 500, null)
} catch (Illegalargumentexception: IllegalArgumentException) {
return null
}
val matrixWidth = bitMatrix.width
val matrixHeight = bitMatrix.height
val pixels = IntArray(matrixWidth * matrixHeight)
for (y in 0 until matrixHeight) {
val offset = y * matrixWidth
for (x in 0 until matrixWidth) {
pixels[offset + x] = if (bitMatrix.get(x, y)) Color.BLACK else Color.WHITE
}
}
val bitmap = Bitmap.createBitmap(matrixWidth, matrixHeight, Bitmap.Config.RGB_565)
bitmap.setPixels(pixels, 0, 500, 0, 0, matrixWidth, matrixHeight)
return bitmap
}
Scan QR Code
In MainActivity.kt, put te button listener code in the onCreate function, declare a request code(BARCODE_READER_REQUEST_CODE) and process the QR code result in the onActivityResult function.
private val BARCODE_READER_REQUEST_CODE = 1
btn_scan_qrcode.setOnClickListener(View.OnClickListener {
val intent = Intent(applicationContext, BarcodeCaptureActivity::class.java)
startActivityForResult(intent, BARCODE_READER_REQUEST_CODE)
})
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
if (requestCode == BARCODE_READER_REQUEST_CODE) {
if (resultCode == CommonStatusCodes.SUCCESS) {
if (data != null) {
val barcode : Barcode = data.getParcelableExtra(BarcodeCaptureActivity.BarcodeObject)
val p = barcode.cornerPoints
tv_qr_code_text_content.setText(barcode.displayValue)
} else
tv_qr_code_text_content.setText(R.string.no_barcode_captured)
} else
Log.e(LOG_TAG, String.format(getString(R.string.barcode_error_format), CommonStatusCodes.getStatusCodeString(resultCode)))
} else
super.onActivityResult(requestCode, resultCode, data)
}
activity_main.xml
barcode_capture.xml
BarcodeCaptureActivity.java
/*
* Copyright (C) The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file and all BarcodeXXX and CameraXXX files in this project edited by
* Daniell Algar (included due to copyright reason)
*/
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.hardware.Camera;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.DisplayMetrics;
import android.util.Log;
import android.widget.Toast;
import com.example.qr_code_read_write_scan.R;
import com.example.qr_code_read_write_scan.camera.CameraSource;
import com.example.qr_code_read_write_scan.camera.CameraSourcePreview;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GoogleApiAvailability;
import com.google.android.gms.common.api.CommonStatusCodes;
import com.google.android.gms.vision.MultiProcessor;
import com.google.android.gms.vision.barcode.Barcode;
import com.google.android.gms.vision.barcode.BarcodeDetector;
import java.io.IOException;
public final class BarcodeCaptureActivity extends AppCompatActivity
implements BarcodeTracker.BarcodeGraphicTrackerCallback {
private static final String TAG = "Barcode-reader";
// Intent request code to handle updating play services if needed.
private static final int RC_HANDLE_GMS = 9001;
// Permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
// Constants used to pass extra data in the intent
public static final String BarcodeObject = "Barcode";
private CameraSource mCameraSource;
private CameraSourcePreview mPreview;
/**
* Initializes the UI and creates the detector pipeline.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.barcode_capture);
mPreview = (CameraSourcePreview) findViewById(R.id.preview);
boolean autoFocus = true;
boolean useFlash = false;
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource(autoFocus, useFlash);
} else {
requestCameraPermission();
}
}
@Override
public void onDetectedQrCode(Barcode barcode) {
if (barcode != null) {
Intent intent = new Intent();
intent.putExtra(BarcodeObject, barcode);
setResult(CommonStatusCodes.SUCCESS, intent);
finish();
}
}
// Handles the requesting of the camera permission.
private void requestCameraPermission() {
Log.w(TAG, "Camera permission is not granted. Requesting permission");
final String[] permissions = new String[]{Manifest.permission.CAMERA};
if (!ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.CAMERA)) {
ActivityCompat.requestPermissions(this, permissions, RC_HANDLE_CAMERA_PERM);
}
}
/**
* Creates and starts the camera.
*
* Suppressing InlinedApi since there is a check that the minimum version is met before using
* the constant.
*/
@SuppressLint("InlinedApi")
private void createCameraSource(boolean autoFocus, boolean useFlash) {
Context context = getApplicationContext();
// A barcode detector is created to track barcodes. An associated multi-processor instance
// is set to receive the barcode detection results, track the barcodes, and maintain
// graphics for each barcode on screen. The factory is used by the multi-processor to
// create a separate tracker instance for each barcode.
BarcodeDetector barcodeDetector = new BarcodeDetector.Builder(context)
.setBarcodeFormats(Barcode.ALL_FORMATS)
.build();
BarcodeTrackerFactory barcodeFactory = new BarcodeTrackerFactory(this);
barcodeDetector.setProcessor(new MultiProcessor.Builder<>(barcodeFactory).build());
if (!barcodeDetector.isOperational()) {
// Note: The first time that an app using the barcode or face API is installed on a
// device, GMS will download a native libraries to the device in order to do detection.
// Usually this completes before the app is run for the first time. But if that
// download has not yet completed, then the above call will not detect any barcodes
// and/or faces.
//
// isOperational() can be used to check if the required native libraries are currently
// available. The detectors will automatically become operational once the library
// downloads complete on device.
Log.w(TAG, "Detector dependencies are not yet available.");
// Check for low storage. If there is low storage, the native library will not be
// downloaded, so detection will not become operational.
IntentFilter lowstorageFilter = new IntentFilter(Intent.ACTION_DEVICE_STORAGE_LOW);
boolean hasLowStorage = registerReceiver(null, lowstorageFilter) != null;
if (hasLowStorage) {
Toast.makeText(this, R.string.low_storage_error,
Toast.LENGTH_LONG).show();
Log.w(TAG, getString(R.string.low_storage_error));
}
}
// Creates and starts the camera. Note that this uses a higher resolution in comparison
// to other detection examples to enable the barcode detector to detect small barcodes
// at long distances.
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
CameraSource.Builder builder = new CameraSource.Builder(getApplicationContext(), barcodeDetector)
.setFacing(CameraSource.CAMERA_FACING_BACK)
.setRequestedPreviewSize(metrics.widthPixels, metrics.heightPixels)
.setRequestedFps(24.0f);
// make sure that auto focus is an available option
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
builder = builder.setFocusMode(
autoFocus ? Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE : null);
}
mCameraSource = builder
.setFlashMode(useFlash ? Camera.Parameters.FLASH_MODE_TORCH : null)
.build();
}
// Restarts the camera
@Override
protected void onResume() {
super.onResume();
startCameraSource();
}
// Stops the camera
@Override
protected void onPause() {
super.onPause();
if (mPreview != null) {
mPreview.stop();
}
}
/**
* Releases the resources associated with the camera source, the associated detectors, and the
* rest of the processing pipeline.
*/
@Override
protected void onDestroy() {
super.onDestroy();
if (mPreview != null) {
mPreview.release();
}
}
/**
* Callback for the result from requesting permissions. This method
* is invoked for every call on {@link #requestPermissions(String[], int)}.
*
* Note: It is possible that the permissions request interaction
* with the user is interrupted. In this case you will receive empty permissions
* and results arrays which should be treated as a cancellation.
*
*
* @param requestCode The request code passed in {@link #requestPermissions(String[], int)}.
* @param permissions The requested permissions. Never null.
* @param grantResults The grant results for the corresponding permissions
* which is either {@link PackageManager#PERMISSION_GRANTED}
* or {@link PackageManager#PERMISSION_DENIED}. Never null.
* @see #requestPermissions(String[], int)
*/
@Override
public void onRequestPermissionsResult(int requestCode,
@NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode != RC_HANDLE_CAMERA_PERM) {
Log.d(TAG, "Got unexpected permission result: " + requestCode);
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
return;
}
if (grantResults.length != 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
Log.d(TAG, "Camera permission granted - initialize the camera source");
// we have permission, so create the camerasource
boolean autoFocus = true;
boolean useFlash = false;
createCameraSource(autoFocus, useFlash);
return;
}
Log.e(TAG, "Permission not granted: results len = " + grantResults.length +
" Result code = " + (grantResults.length > 0 ? grantResults[0] : "(empty)"));
DialogInterface.OnClickListener listener = new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
finish();
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Multitracker sample")
.setMessage(R.string.no_camera_permission)
.setPositiveButton(R.string.ok, listener)
.show();
}
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
private void startCameraSource() throws SecurityException {
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}
if (mCameraSource != null) {
try {
mPreview.start(mCameraSource);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
}
BarcodeTracker.java
/* * Copyright (C) The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.content.Context; import com.google.android.gms.vision.Tracker; import com.google.android.gms.vision.barcode.Barcode; class BarcodeTracker extends Tracker{ private BarcodeGraphicTrackerCallback mListener; public interface BarcodeGraphicTrackerCallback { void onDetectedQrCode(Barcode barcode); } BarcodeTracker(Context listener) { mListener = (BarcodeGraphicTrackerCallback) listener; } @Override public void onNewItem(int id, Barcode item) { if (item.displayValue != null) { mListener.onDetectedQrCode(item); } } }
BarcodeTrackerFactory.java
/* * Copyright (C) The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.content.Context; import com.google.android.gms.vision.MultiProcessor; import com.google.android.gms.vision.Tracker; import com.google.android.gms.vision.barcode.Barcode; /** * Factory for creating a tracker and associated graphic to be associated with a new barcode. The * multi-processor uses this factory to create barcode trackers as needed -- one for each barcode. */ class BarcodeTrackerFactory implements MultiProcessor.Factory{ private Context mContext; BarcodeTrackerFactory(Context context) { mContext = context; } @Override public Tracker create(Barcode barcode) { return new BarcodeTracker(mContext); } }
CameraSourcePreview.java
/*
* Copyright (C) The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.Manifest;
import android.content.Context;
import android.content.res.Configuration;
import android.support.annotation.RequiresPermission;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.ViewGroup;
import com.google.android.gms.common.images.Size;
import java.io.IOException;
public class CameraSourcePreview extends ViewGroup {
private static final String TAG = CameraSourcePreview.class.getSimpleName();
private Context mContext;
private SurfaceView mSurfaceView;
private boolean mStartRequested;
private boolean mSurfaceAvailable;
private CameraSource mCameraSource;
public CameraSourcePreview(Context context, AttributeSet attrs) {
super(context, attrs);
mContext = context;
mStartRequested = false;
mSurfaceAvailable = false;
mSurfaceView = new SurfaceView(context);
mSurfaceView.getHolder().addCallback(new SurfaceCallback());
addView(mSurfaceView);
}
@RequiresPermission(Manifest.permission.CAMERA)
public void start(CameraSource cameraSource) throws IOException, SecurityException {
if (cameraSource == null) {
stop();
}
mCameraSource = cameraSource;
if (mCameraSource != null) {
mStartRequested = true;
startIfReady();
}
}
public void stop() {
if (mCameraSource != null) {
mCameraSource.stop();
}
}
public void release() {
if (mCameraSource != null) {
mCameraSource.release();
mCameraSource = null;
}
}
@RequiresPermission(Manifest.permission.CAMERA)
private void startIfReady() throws IOException, SecurityException {
if (mStartRequested && mSurfaceAvailable) {
mCameraSource.start(mSurfaceView.getHolder());
mStartRequested = false;
}
}
private class SurfaceCallback implements SurfaceHolder.Callback {
@Override
public void surfaceCreated(SurfaceHolder surface) {
mSurfaceAvailable = true;
try {
startIfReady();
} catch (SecurityException se) {
Log.e(TAG,"Do not have permission to start the camera", se);
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder surface) {
mSurfaceAvailable = false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int previewWidth = 320;
int previewHeight = 240;
if (mCameraSource != null) {
Size size = mCameraSource.getPreviewSize();
if (size != null) {
previewWidth = size.getWidth();
previewHeight = size.getHeight();
}
}
// Swap width and height sizes when in portrait, since it will be rotated 90 degrees
if (isPortraitMode()) {
int tmp = previewWidth;
previewWidth = previewHeight;
previewHeight = tmp;
}
final int viewWidth = right - left;
final int viewHeight = bottom - top;
int childWidth;
int childHeight;
int childXOffset = 0;
int childYOffset = 0;
float widthRatio = (float) viewWidth / (float) previewWidth;
float heightRatio = (float) viewHeight / (float) previewHeight;
// To fill the view with the camera preview, while also preserving the correct aspect ratio,
// it is usually necessary to slightly oversize the child and to crop off portions along one
// of the dimensions. We scale up based on the dimension requiring the most correction, and
// compute a crop offset for the other dimension.
if (widthRatio > heightRatio) {
childWidth = viewWidth;
childHeight = (int) ((float) previewHeight * widthRatio);
childYOffset = (childHeight - viewHeight) / 2;
} else {
childWidth = (int) ((float) previewWidth * heightRatio);
childHeight = viewHeight;
childXOffset = (childWidth - viewWidth) / 2;
}
for (int i = 0; i < getChildCount(); ++i) {
// One dimension will be cropped. We shift child over or up by this offset and adjust
// the size to maintain the proper aspect ratio.
getChildAt(i).layout(
-1 * childXOffset, -1 * childYOffset,
childWidth - childXOffset, childHeight - childYOffset);
}
try {
startIfReady();
} catch (IOException e) {
Log.e(TAG, "Could not start camera source.", e);
} catch (SecurityException se) {
Log.e(TAG, "Does not have permission to start the camera.", se);
}
}
private boolean isPortraitMode() {
int orientation = mContext.getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
return false;
}
if (orientation == Configuration.ORIENTATION_PORTRAIT) {
return true;
}
Log.d(TAG, "isPortraitMode returning false by default");
return false;
}
}
CameraSource.java
/*
* Copyright (C) The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.Manifest;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Build;
import android.os.SystemClock;
import android.support.annotation.Nullable;
import android.support.annotation.RequiresPermission;
import android.support.annotation.StringDef;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import com.google.android.gms.common.images.Size;
import com.google.android.gms.vision.Detector;
import com.google.android.gms.vision.Frame;
import java.io.IOException;
import java.lang.Thread.State;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
// Note: This requires Google Play Services 8.1 or higher, due to using indirect byte buffers for
// storing images.
/**
* Manages the camera in conjunction with an underlying
* {@link com.google.android.gms.vision.Detector}. This receives preview frames from the camera at
* a specified rate, sending those frames to the detector as fast as it is able to process those
* frames.
*
* This camera source makes a best effort to manage processing on preview frames as fast as
* possible, while at the same time minimizing lag. As such, frames may be dropped if the detector
* is unable to keep up with the rate of frames generated by the camera. You should use
* {@link CameraSource.Builder#setRequestedFps(float)} to specify a frame rate that works well with
* the capabilities of the camera hardware and the detector options that you have selected. If CPU
* utilization is higher than you'd like, then you may want to consider reducing FPS. If the camera
* preview or detector results are too "jerky", then you may want to consider increasing FPS.
*
* The following Android permission is required to use the camera:
* -
*
- android.permissions.CAMERA *
success set to true.
*
* The auto-focus routine does not lock auto-exposure and auto-white
* balance after it completes.
*
* @param success true if focus was successful, false if otherwise
*/
void onAutoFocus(boolean success);
}
/**
* Callback interface used to notify on auto focus start and stop.
*
* This is only supported in continuous autofocus modes -- {@link * Camera.Parameters#FOCUS_MODE_CONTINUOUS_VIDEO} and {@link * Camera.Parameters#FOCUS_MODE_CONTINUOUS_PICTURE}. Applications can show * autofocus animation based on this.
*/ public interface AutoFocusMoveCallback { /** * Called when the camera auto focus starts or stops. * * @param start true if focus starts to move, false if focus stops to move */ void onAutoFocusMoving(boolean start); } //============================================================================================== // Public //============================================================================================== /** * Stops the camera and releases the resources of the camera and underlying detector. */ public void release() { synchronized (mCameraLock) { stop(); mFrameProcessor.release(); } } /** * Opens the camera and starts sending preview frames to the underlying detector. The preview * frames are not displayed. * * @throws IOException if the camera's preview texture or display could not be initialized */ @RequiresPermission(Manifest.permission.CAMERA) public CameraSource start() throws IOException { synchronized (mCameraLock) { if (mCamera != null) { return this; } mCamera = createCamera(); // SurfaceTexture was introduced in Honeycomb (11), so if we are running and // old version of Android. fall back to use SurfaceView. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mDummySurfaceTexture = new SurfaceTexture(DUMMY_TEXTURE_NAME); mCamera.setPreviewTexture(mDummySurfaceTexture); } else { mDummySurfaceView = new SurfaceView(mContext); mCamera.setPreviewDisplay(mDummySurfaceView.getHolder()); } mCamera.startPreview(); mProcessingThread = new Thread(mFrameProcessor); mFrameProcessor.setActive(true); mProcessingThread.start(); } return this; } /** * Opens the camera and starts sending preview frames to the underlying detector. The supplied * surface holder is used for the preview so frames can be displayed to the user. * * @param surfaceHolder the surface holder to use for the preview frames * @throws IOException if the supplied surface holder could not be used as the preview display */ @RequiresPermission(Manifest.permission.CAMERA) public CameraSource start(SurfaceHolder surfaceHolder) throws IOException { synchronized (mCameraLock) { if (mCamera != null) { return this; } mCamera = createCamera(); mCamera.setPreviewDisplay(surfaceHolder); mCamera.startPreview(); mProcessingThread = new Thread(mFrameProcessor); mFrameProcessor.setActive(true); mProcessingThread.start(); } return this; } /** * Closes the camera and stops sending frames to the underlying frame detector. * * This camera source may be restarted again by calling {@link #start()} or * {@link #start(SurfaceHolder)}. * * Call {@link #release()} instead to completely shut down this camera source and release the * resources of the underlying detector. */ public void stop() { synchronized (mCameraLock) { mFrameProcessor.setActive(false); if (mProcessingThread != null) { try { // Wait for the thread to complete to ensure that we can't have multiple threads // executing at the same time (i.e., which would happen if we called start too // quickly after stop). mProcessingThread.join(); } catch (InterruptedException e) { Log.d(TAG, "Frame processing thread interrupted on release."); } mProcessingThread = null; } // clear the buffer to prevent oom exceptions mBytesToByteBuffer.clear(); if (mCamera != null) { mCamera.stopPreview(); mCamera.setPreviewCallbackWithBuffer(null); try { // We want to be compatible back to Gingerbread, but SurfaceTexture // wasn't introduced until Honeycomb. Since the interface cannot use a SurfaceTexture, if the // developer wants to display a preview we must use a SurfaceHolder. If the developer doesn't // want to display a preview we use a SurfaceTexture if we are running at least Honeycomb. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { mCamera.setPreviewTexture(null); } else { mCamera.setPreviewDisplay(null); } } catch (Exception e) { Log.e(TAG, "Failed to clear camera preview: " + e); } mCamera.release(); mCamera = null; } } } /** * Returns the preview size that is currently in use by the underlying camera. */ public Size getPreviewSize() { return mPreviewSize; } /** * Returns the selected camera; one of {@link #CAMERA_FACING_BACK} or * {@link #CAMERA_FACING_FRONT}. */ public int getCameraFacing() { return mFacing; } public int doZoom(float scale) { synchronized (mCameraLock) { if (mCamera == null) { return 0; } int currentZoom = 0; int maxZoom; Camera.Parameters parameters = mCamera.getParameters(); if (!parameters.isZoomSupported()) { Log.w(TAG, "Zoom is not supported on this device"); return currentZoom; } maxZoom = parameters.getMaxZoom(); currentZoom = parameters.getZoom() + 1; float newZoom; if (scale > 1) { newZoom = currentZoom + scale * (maxZoom / 10); } else { newZoom = currentZoom * scale; } currentZoom = Math.round(newZoom) - 1; if (currentZoom < 0) { currentZoom = 0; } else if (currentZoom > maxZoom) { currentZoom = maxZoom; } parameters.setZoom(currentZoom); mCamera.setParameters(parameters); return currentZoom; } } /** * Initiates taking a picture, which happens asynchronously. The camera source should have been * activated previously with {@link #start()} or {@link #start(SurfaceHolder)}. The camera * preview is suspended while the picture is being taken, but will resume once picture taking is * done. * * @param shutter the callback for image capture moment, or null * @param jpeg the callback for JPEG image data, or null */ public void takePicture(ShutterCallback shutter, PictureCallback jpeg) { synchronized (mCameraLock) { if (mCamera != null) { PictureStartCallback startCallback = new PictureStartCallback(); startCallback.mDelegate = shutter; PictureDoneCallback doneCallback = new PictureDoneCallback(); doneCallback.mDelegate = jpeg; mCamera.takePicture(startCallback, null, null, doneCallback); } } } /** * Gets the current focus mode setting. * * @return current focus mode. This value is null if the camera is not yet created. Applications should call {@link * #autoFocus(AutoFocusCallback)} to start the focus if focus * mode is FOCUS_MODE_AUTO or FOCUS_MODE_MACRO. * @see Camera.Parameters#FOCUS_MODE_AUTO * @see Camera.Parameters#FOCUS_MODE_INFINITY * @see Camera.Parameters#FOCUS_MODE_MACRO * @see Camera.Parameters#FOCUS_MODE_FIXED * @see Camera.Parameters#FOCUS_MODE_EDOF * @see Camera.Parameters#FOCUS_MODE_CONTINUOUS_VIDEO * @see Camera.Parameters#FOCUS_MODE_CONTINUOUS_PICTURE */ @Nullable @FocusMode public String getFocusMode() { return mFocusMode; } /** * Sets the focus mode. * * @param mode the focus mode * @return {@code true} if the focus mode is set, {@code false} otherwise * @see #getFocusMode() */ public boolean setFocusMode(@FocusMode String mode) { synchronized (mCameraLock) { if (mCamera != null && mode != null) { Camera.Parameters parameters = mCamera.getParameters(); if (parameters.getSupportedFocusModes().contains(mode)) { parameters.setFocusMode(mode); mCamera.setParameters(parameters); mFocusMode = mode; return true; } } return false; } } /** * Gets the current flash mode setting. * * @return current flash mode. null if flash mode setting is not * supported or the camera is not yet created. * @see Camera.Parameters#FLASH_MODE_OFF * @see Camera.Parameters#FLASH_MODE_AUTO * @see Camera.Parameters#FLASH_MODE_ON * @see Camera.Parameters#FLASH_MODE_RED_EYE * @see Camera.Parameters#FLASH_MODE_TORCH */ @Nullable @FlashMode public String getFlashMode() { return mFlashMode; } /** * Sets the flash mode. * * @param mode flash mode. * @return {@code true} if the flash mode is set, {@code false} otherwise * @see #getFlashMode() */ public boolean setFlashMode(@FlashMode String mode) { synchronized (mCameraLock) { if (mCamera != null && mode != null) { Camera.Parameters parameters = mCamera.getParameters(); if (parameters.getSupportedFlashModes().contains(mode)) { parameters.setFlashMode(mode); mCamera.setParameters(parameters); mFlashMode = mode; return true; } } return false; } } /** * Starts camera auto-focus and registers a callback function to run when * the camera is focused. This method is only valid when preview is active * (between {@link #start()} or {@link #start(SurfaceHolder)} and before {@link #stop()} or {@link #release()}). * *Callers should check * {@link #getFocusMode()} to determine if * this method should be called. If the camera does not support auto-focus, * it is a no-op and {@link AutoFocusCallback#onAutoFocus(boolean)} * callback will be called immediately. *
*If the current flash mode is not * {@link Camera.Parameters#FLASH_MODE_OFF}, flash may be * fired during auto-focus, depending on the driver and camera hardware.
* * @param cb the callback to run * @see #cancelAutoFocus() */ public void autoFocus(@Nullable AutoFocusCallback cb) { synchronized (mCameraLock) { if (mCamera != null) { CameraAutoFocusCallback autoFocusCallback = null; if (cb != null) { autoFocusCallback = new CameraAutoFocusCallback(); autoFocusCallback.mDelegate = cb; } mCamera.autoFocus(autoFocusCallback); } } } /** * Cancels any auto-focus function in progress. * Whether or not auto-focus is currently in progress, * this function will return the focus position to the default. * If the camera does not support auto-focus, this is a no-op. * * @see #autoFocus(AutoFocusCallback) */ public void cancelAutoFocus() { synchronized (mCameraLock) { if (mCamera != null) { mCamera.cancelAutoFocus(); } } } /** * Sets camera auto-focus move callback. * * @param cb the callback to run * @return {@code true} if the operation is supported (i.e. from Jelly Bean), {@code false} otherwise */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public boolean setAutoFocusMoveCallback(@Nullable AutoFocusMoveCallback cb) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) { return false; } synchronized (mCameraLock) { if (mCamera != null) { CameraAutoFocusMoveCallback autoFocusMoveCallback = null; if (cb != null) { autoFocusMoveCallback = new CameraAutoFocusMoveCallback(); autoFocusMoveCallback.mDelegate = cb; } mCamera.setAutoFocusMoveCallback(autoFocusMoveCallback); } } return true; } //============================================================================================== // Private //============================================================================================== /** * Only allow creation via the builder class. */ private CameraSource() { } /** * Wraps the camera1 shutter callback so that the deprecated API isn't exposed. */ private class PictureStartCallback implements Camera.ShutterCallback { private ShutterCallback mDelegate; @Override public void onShutter() { if (mDelegate != null) { mDelegate.onShutter(); } } } /** * Wraps the final callback in the camera sequence, so that we can automatically turn the camera * preview back on after the picture has been taken. */ private class PictureDoneCallback implements Camera.PictureCallback { private PictureCallback mDelegate; @Override public void onPictureTaken(byte[] data, Camera camera) { if (mDelegate != null) { mDelegate.onPictureTaken(data); } synchronized (mCameraLock) { if (mCamera != null) { mCamera.startPreview(); } } } } /** * Wraps the camera1 auto focus callback so that the deprecated API isn't exposed. */ private class CameraAutoFocusCallback implements Camera.AutoFocusCallback { private AutoFocusCallback mDelegate; @Override public void onAutoFocus(boolean success, Camera camera) { if (mDelegate != null) { mDelegate.onAutoFocus(success); } } } /** * Wraps the camera1 auto focus move callback so that the deprecated API isn't exposed. */ @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private class CameraAutoFocusMoveCallback implements Camera.AutoFocusMoveCallback { private AutoFocusMoveCallback mDelegate; @Override public void onAutoFocusMoving(boolean start, Camera camera) { if (mDelegate != null) { mDelegate.onAutoFocusMoving(start); } } } /** * Opens the camera and applies the user settings. * * @throws RuntimeException if the method fails */ @SuppressLint("InlinedApi") private Camera createCamera() { int requestedCameraId = getIdForRequestedCamera(mFacing); if (requestedCameraId == -1) { throw new RuntimeException("Could not find requested camera."); } Camera camera = Camera.open(requestedCameraId); SizePair sizePair = selectSizePair(camera, mRequestedPreviewWidth, mRequestedPreviewHeight); if (sizePair == null) { throw new RuntimeException("Could not find suitable preview size."); } Size pictureSize = sizePair.pictureSize(); mPreviewSize = sizePair.previewSize(); int[] previewFpsRange = selectPreviewFpsRange(camera, mRequestedFps); if (previewFpsRange == null) { throw new RuntimeException("Could not find suitable preview frames per second range."); } Camera.Parameters parameters = camera.getParameters(); if (pictureSize != null) { parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight()); } parameters.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); parameters.setPreviewFpsRange( previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); parameters.setPreviewFormat(ImageFormat.NV21); setRotation(camera, parameters, requestedCameraId); if (mFocusMode != null) { if (parameters.getSupportedFocusModes().contains( mFocusMode)) { parameters.setFocusMode(mFocusMode); } else { Log.i(TAG, "Camera focus mode: " + mFocusMode + " is not supported on this device."); } } // setting mFocusMode to the one set in the params mFocusMode = parameters.getFocusMode(); if (mFlashMode != null) { if (parameters.getSupportedFlashModes().contains( mFlashMode)) { parameters.setFlashMode(mFlashMode); } else { Log.i(TAG, "Camera flash mode: " + mFlashMode + " is not supported on this device."); } } // setting mFlashMode to the one set in the params mFlashMode = parameters.getFlashMode(); camera.setParameters(parameters); // Four frame buffers are needed for working with the camera: // // one for the frame that is currently being executed upon in doing detection // one for the next pending frame to process immediately upon completing detection // two for the frames that the camera uses to populate future preview images camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback()); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); camera.addCallbackBuffer(createPreviewBuffer(mPreviewSize)); return camera; } /** * Gets the id for the camera specified by the direction it is facing. Returns -1 if no such * camera was found. * * @param facing the desired camera (front-facing or rear-facing) */ private static int getIdForRequestedCamera(int facing) { CameraInfo cameraInfo = new CameraInfo(); for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { Camera.getCameraInfo(i, cameraInfo); if (cameraInfo.facing == facing) { return i; } } return -1; } /** * Selects the most suitable preview and picture size, given the desired width and height. *
* Even though we may only need the preview size, it's necessary to find both the preview * size and the picture size of the camera together, because these need to have the same aspect * ratio. On some hardware, if you would only set the preview size, you will get a distorted * image. * * @param camera the camera to select a preview size from * @param desiredWidth the desired width of the camera preview frames * @param desiredHeight the desired height of the camera preview frames * @return the selected preview and picture size pair */ private static SizePair selectSizePair(Camera camera, int desiredWidth, int desiredHeight) { ListReferences
https://codelabs.developers.google.com/codelabs/bar-codes/#0
https://www.varvet.com/blog/android-qr-code-reader-made-easy/
https://github.com/varvet/BarcodeReaderSample
https://github.com/googlesamples/android-vision
https://stackoverflow.com/a/30529128/2069407
https://stackoverflow.com/a/25283174/2069407
Search within Codexpedia
Search the entire web