Rxjava 2 exception with camera

I just switched my code from asynctask to rxjava2 , and I accidentally get this exception in my connection:

The camera is used after Camera.release () is called in the Galaxy s6 Edge

Below is my code -

Luggage Class:

  public class Cameras { private static final String TAG = Cameras.class.getSimpleName(); private static final String SP_CAMERA_ID = "camera_id"; private static final int NO_NEXT_TASK = 0; private static final int NEXT_TASK_RELEASE_COMPLETE = 1; private static final int NEXT_TASK_SWITCH_COMPLETE = 2; private static final int NEXT_TASK_START_PREVIEW = 3; private Camera camera; private int currentCameraId = -1; private Camera.PreviewCallback previewCallback; private byte[] buffer1, buffer2, buffer3; private SurfaceTexture surfaceTexture; private Listener listener; public interface Listener { void onCameraOpened(Camera.Size size, int angle); } private boolean cameraReleased = false; public Cameras(Camera.PreviewCallback previewCallback, Listener listener) { this.listener = listener; this.previewCallback = previewCallback; this.currentCameraId = Spin.INSTANCE.getSp().getInt(SP_CAMERA_ID, -1); getCameraList(); } private void getCameraList() { int numberOfCameras = Camera.getNumberOfCameras(); Camera.CameraInfo camInfo = new Camera.CameraInfo(); for (int i = 0; i < numberOfCameras; i++) { Camera.getCameraInfo(i, camInfo); cams.add(camInfo.facing); } if (Camera.CameraInfo.CAMERA_FACING_BACK != currentCameraId && Camera.CameraInfo.CAMERA_FACING_FRONT != currentCameraId) { currentCameraId = cams.get(cams.size() == 2 ? 1 : 0); } } public boolean isSwitchCamAvailable() { return Camera.getNumberOfCameras() > 1; } public void open(SurfaceTexture surfaceTexture) { this.surfaceTexture = surfaceTexture; init(NEXT_TASK_START_PREVIEW); } private void init(final int nextTask) { if (cams.isEmpty()) { Toast.makeText(Spin.getContext(), "Device have no camera", Toast.LENGTH_SHORT).show(); return; } cameraReleased = false; if (Looper.getMainLooper().getThread() == Thread.currentThread()) { Observable.defer(new Callable<ObservableSource<?>>() { @Override public ObservableSource<?> call() throws Exception { synchronized (this) { try { camera = Camera.open(currentCameraId); } catch (RuntimeException e) { e.printStackTrace(); } } return Completable.complete().toObservable(); } }).doOnComplete(() -> { initComplete(nextTask); Log.d("Complete", "Complete"); }) .subscribeOn(Schedulers.computation()) .observeOn(AndroidSchedulers.mainThread()).subscribe(); } else { try { synchronized (this) { this.camera = Camera.open(currentCameraId); } initComplete(nextTask); } catch (RuntimeException ignored) { } } } private void initComplete(int nextTask) { if (camera == null) { return; } //noinspection SynchronizeOnNonFinalField synchronized (camera) { try { Camera.Parameters params = camera.getParameters(); Camera.Size size = getClosestFrameSize(params, 640); params.setPreviewSize(size.width, size.height); camera.setParameters(params); camera.setPreviewCallbackWithBuffer(previewCallback); int bufferSize = size.width * size.height * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8; buffer1 = new byte[bufferSize]; buffer2 = new byte[bufferSize]; buffer3 = new byte[bufferSize]; camera.addCallbackBuffer(buffer1); camera.addCallbackBuffer(buffer2); camera.addCallbackBuffer(buffer3); camera.setPreviewTexture(surfaceTexture); int angle = rotateStream(); camera.setDisplayOrientation(angle); if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT && angle > 0) angle = 360 - angle; listener.onCameraOpened(size, angle); routNextTask(nextTask); } catch (IOException | RuntimeException e) { e.printStackTrace(); } } } private Camera.Size getClosestFrameSize(Camera.Parameters params, int width) { Camera.Size result = null; List<Camera.Size> sizes = params.getSupportedPreviewSizes(); Camera.Size currentSize = null; int closestDistance = 0; int currentDistance = 0; for (int i = 0; i < sizes.size(); ++i) { if (null == result) { result = sizes.get(i); closestDistance = Math.abs(result.width - width); continue; } currentSize = sizes.get(i); currentDistance = Math.abs(currentSize.width - width); if (currentDistance < closestDistance) { closestDistance = currentDistance; result = currentSize; if (closestDistance == 0) break; } } return result; } public void stopPreview() { stopPreview(NO_NEXT_TASK); } private String nextTaskStr(final int nextTask) { String nextTaskStr = null; switch (nextTask) { case NO_NEXT_TASK: nextTaskStr = "NO_NEXT_TASK"; break; case NEXT_TASK_RELEASE_COMPLETE: nextTaskStr = "NEXT_TASK_RELEASE_COMPLETE"; break; case NEXT_TASK_SWITCH_COMPLETE: nextTaskStr = "NEXT_TASK_SWITCH_COMPLETE"; break; case NEXT_TASK_START_PREVIEW: nextTaskStr = "NEXT_TASK_START_PREVIEW"; break; } return nextTaskStr; } private void stopPreview(final int nextTask) { if (null == camera) return; if (Looper.getMainLooper().getThread() == Thread.currentThread()) { Observable.defer(new Callable<ObservableSource<?>>() { @Override public ObservableSource<?> call() throws Exception { synchronized (this) { if ( (null != camera) && (!cameraReleased) ) { if (LogDog.isEnabled) ; camera.stopPreview(); } } return Completable.complete().toObservable(); } }).doOnComplete(() -> { routNextTask(nextTask); Log.d("Complete", "Complete"); }) .subscribeOn(Schedulers.computation()) .observeOn(AndroidSchedulers.mainThread()).subscribe(); } else { synchronized (this) { if ( (null != camera) && (!cameraReleased) ) { camera.stopPreview(); } } routNextTask(nextTask); } } private void routNextTask(int nextTask) { if (NO_NEXT_TASK == nextTask) return; if (NEXT_TASK_SWITCH_COMPLETE == nextTask) { switchCamComplete(); } else if (NEXT_TASK_RELEASE_COMPLETE == nextTask) { releaseComplete(); } else if (NEXT_TASK_START_PREVIEW == nextTask) { startPreview(null); } } public void startPreview(Camera.PreviewCallback cpc) { if (null == camera) return; synchronized (this) { camera.startPreview(); switchCamOnAir = false; } } private void releaseCamera() { synchronized (this) { if (null == camera) return; camera.setPreviewCallback(null); camera.release(); camera = null; cameraReleased = true; } } public void release() { synchronized (this) { if (null == camera) return; stopPreview(NEXT_TASK_RELEASE_COMPLETE); } } private void releaseComplete() { synchronized (this) { if (camera != null) { camera.release(); cameraReleased = true; camera = null; } } buffer1 = null; buffer2 = null; buffer3 = null; } private boolean switchCamOnAir = false; public void switchCam() { if (!isSwitchCamAvailable()) return; if (null == camera) return; if (switchCamOnAir) return; this.switchCamOnAir = true; stopPreview(NEXT_TASK_SWITCH_COMPLETE); } private void switchCamComplete() { releaseCamera(); if (currentCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT) { currentCameraId = Camera.CameraInfo.CAMERA_FACING_BACK; } else { currentCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT; } Spin.INSTANCE.getSp().edit().putInt(SP_CAMERA_ID, currentCameraId).apply(); init(NEXT_TASK_START_PREVIEW); } public int rotateStream() { Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(currentCameraId, info); WindowManager wm = (WindowManager) Spin.getContext() .getSystemService(Context.WINDOW_SERVICE); int rotation = wm.getDefaultDisplay().getRotation(); int degrees = 0; switch (rotation) { case Surface.ROTATION_0: degrees = 0; break; case Surface.ROTATION_90: degrees = 90; break; case Surface.ROTATION_180: degrees = 180; break; case Surface.ROTATION_270: degrees = 270; break; } int result; if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { result = (info.orientation + degrees) % 360; result = (360 - result) % 360; // compensate the mirror } else { // back-facing result = (info.orientation - degrees + 360) % 360; } return result; } @SuppressWarnings("unused") public int getDeviceDefaultOrientation() { WindowManager windowManager = (WindowManager) Spin.getContext() .getSystemService(Context.WINDOW_SERVICE); Configuration config = Spin.getContext().getResources().getConfiguration(); int rotation = windowManager.getDefaultDisplay().getRotation(); if (((rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_180) && config.orientation == Configuration.ORIENTATION_LANDSCAPE) || ((rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) && config.orientation == Configuration.ORIENTATION_PORTRAIT)) { return Configuration.ORIENTATION_LANDSCAPE; } else { return Configuration.ORIENTATION_PORTRAIT; } } } 

cameras.java

 if (Looper.getMainLooper().getThread() == Thread.currentThread()) { Observable.defer(new Callable<ObservableSource<?>>() { @Override public ObservableSource<?> call() throws Exception { if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) ); synchronized (this) { if ( (null != camera) && (!cameraReleased) ) { if (LogDog.isEnabled) LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()"); camera.stopPreview(); } } return Completable.complete().toObservable(); } }).doOnComplete(() -> { routNextTask(nextTask); Log.d("Complete", "Complete"); }) .subscribeOn(Schedulers.computation()) .observeOn(AndroidSchedulers.mainThread()).subscribe(); 

Not sure what I'm doing wrong. Any ideas where I can free the camera or highlight it, so it works without any problems? The exception is the following:

FATAL EXCEPTION: main io.reactivex.exceptions.OnErrorNotImplementedException: camera is used after calling Camera.release () in io.reactivex.internal.functions.Functions $ OnErrorMissingConsumer.accept (Functions.java:704) in io.reactivex.internal.functions .Functions $ OnErrorMissingConsumer.accept (Functions.java:701) in io.reactivex.internal.observers.LambdaObserver.onError (LambdaObserver.java:74) in io.reactivex.internal.operatorsObervOererererererererverervableerbservable $ .java: 276) in io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver.drainNormal (ObservableObserveOn.java:172) in io.reactivex.internal.operators.observable.ObservableObserveOn $ ObserveOnObserver..verve.ervivern.erviverbs.verive.nerv .android.schedulers.HandlerScheduler $ ScheduledRunnable.run (HandlerScheduler.java:109) on android.os.Handler.handleCallback (Handler.java:751) on android.os.Handler.dispatchMessage (Handler.java:95) on android. os.Looper.loop (Looper.java:154) at android.app.ActivityThread.main (ActivityThread.java:6119) in java.lang.reflect.Method.invoke (native method) in com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run (ZygoteInit.java:886) at com.android.internal.os.ZygoteInit.main (ZygoteInit.java:776) Caused by: java.lang.RuntimeException: the camera is used after Camera.release () is called on android.hardware.Camera._stopPreview (native method) on android.hardware.Camera.stopPreview (Camera.java:730) on com.media.video.Cameras $ 2.call (Cameras.java:413) on com.media.video.Cameras $ 2.call (Cameras.java:406 ) in io.reactivex.internal.operators.observable.ObservableDefer.subscribeActual (ObservableDefer.java:32) in io.reactivex.Observable.subscribe (Observable.java:10842) in io.reactivex.internal.operators.observable.Obsable. subscribeActual (ObservableDoOnEach.java:42) in io.reactivex.Observable.subscribe (Observable.java:10842) in io.reactivex.internal.operators.observable.ObservableSubscribeOn $ SubscribeTask.run (ObservableSubscribeOn.java:96) in io.reactivex.internal.schedulers.ScheduledDirectTask.call (ScheduledDirectTask.java:38) in io.reternalx.duternalschex .ScheduledDirectTask.call (ScheduledDirectTask.java:26) in java.util.concurrent.FutureTask.run (FutureTask.java:237) in java.util.concurrent.ScheduledThreadPoolExecutor $ ScheduledFutureTask.run (Scheduec2readPool java.java) util.concurrent.ThreadP oolExecutor.runWorker (ThreadPoolExecutor.java:1133) in java.util.concurrent.ThreadPoolExecutor $ Worker.run (ThreadPoolExecutor.java:607) in java.lang.Thread.run (Thread.java:761)

+7
android rx-java rx-android rx-java2
source share
1 answer

The code you wrote to release the camera is subject to race conditions. A small change that may already make a difference is to set a flag before an action occurs.

 cameraReleased = true; camera.release(); camera = null; 

You may need to double-check the flag before releasing Camera , but for this you previously synchronized the code. In this context, the problem is that you are calling synchronized(this) in a deferred Observable . Instead, you should synchronize in the same instance as everywhere else, because this does not apply to the outer class. Use this@Cameras instead

 @Override public ObservableSource<?> call() throws Exception { if (LogDog.isEnabled) LogDog.e("Debug::"+TAG + "::stopPreview()::AsyncTask::doInBackground()", " (camera != null) =" + (camera != null) ); synchronized ( this@Cameras ) { if ( (null != camera) && (!cameraReleased) ) { if (LogDog.isEnabled) LogDog.e("Debug::" + TAG + "::stopPreview()::AsyncTask::doInBackground()", " XXX CALL camera.stopPreview()"); camera.stopPreview(); } } return Completable.complete().toObservable(); } 

Additionally, you use the use case for Observable.defer() does not look right. Completable.fromAction() factory might be more appropriate.

+3
source share

All Articles