I am creating real-time object detection applications using Android OpenCV. I am using the Android Camera2 API with TextureView to capture an image. I want to add OpenCV code for real-time image processing and view the result.
Here is my code for shooting
protected void takePicture() { if(null == cameraDevice) { Log.e(TAG, "cameraDevice is null"); return; } CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId()); Size[] jpegSizes = null; if (characteristics != null) { jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG); } int width = 640; int height = 480; if (jpegSizes != null && 0 < jpegSizes.length) { width = jpegSizes[0].getWidth(); height = jpegSizes[0].getHeight(); } ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1); List<Surface> outputSurfaces = new ArrayList<Surface>(2); outputSurfaces.add(reader.getSurface()); outputSurfaces.add(new Surface(textureView.getSurfaceTexture())); final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(reader.getSurface()); captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); // Orientation int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); String timeStamp = new SimpleDateFormat("ddMMyyyy_HHmmss").format(new Date()); final File file = new File(Environment.getExternalStorageDirectory()+"/Billboard_" + timeStamp + ".jpg"); // get the location from the NetworkProvider LocationManager lm = (LocationManager) this.getSystemService(Context.LOCATION_SERVICE); LocationListener locationListener = new LocationListener() { @Override public void onLocationChanged(Location location) { longitude = location.getLongitude(); latitude = location.getLatitude(); storeGeoCoordsToImage(file, location); Log.e(TAG, "Latitude = " + latitude); Log.e(TAG, "Longitude = " + longitude); } @Override public void onProviderDisabled(String provider) {} @Override public void onProviderEnabled(String provider) {} @Override public void onStatusChanged(String provider, int status,Bundle extras) {} }; // update location listener lm.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, locationListener); ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); ByteBuffer buffer = image.getPlanes()[0].getBuffer(); byte[] bytes = new byte[buffer.capacity()]; buffer.get(bytes); save(bytes); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (image != null) { image.close(); } } } private void save(byte[] bytes) throws IOException { OutputStream output = null; try { output = new FileOutputStream(file); output.write(bytes); } finally { if (null != output) { output.close(); } } } }; reader.setOnImageAvailableListener(readerListener, mBackgroundHandler); final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); Toast.makeText(MainActivity.this, "Saved:" + file, Toast.LENGTH_SHORT).show(); createCameraPreview(); } }; cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession session) { try { session.capture(captureBuilder.build(), captureListener, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(CameraCaptureSession session) { } }, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); }
}
I want to add Java OpenCV code like this and view the result on screen.
Mat destination = new Mat(); Imgproc.cvtColor(source, destination, Imgproc.COLOR_RGB2GRAY); Imgproc.equalizeHist(destination, destination); Imgproc.canny(destination, destination, 50, 150);
I am confused how I can get the image from the camera preview and do some image processing and then display the result.
Any help in integrating the OpenCV code and Camera2 API would be helpful. Thanks.