Revert of Support adding and removing MediaRecorder to camera 2 session. (patchset #5 id:80001 of https://codereview.webrtc.org/2833773003/ )

Reason for revert:
Breaks external bot

Original issue's description:
> Support adding and removing MediaRecorder to camera 2 session.
>
> Camera 1 API is not supported.
>
> BUG=b/36684011
>
> Review-Url: https://codereview.webrtc.org/2833773003
> Cr-Commit-Position: refs/heads/master@{#17901}
> Committed: 2fc04769fa

TBR=sakal@webrtc.org,glaznev@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=b/36684011

Review-Url: https://codereview.webrtc.org/2844233002
Cr-Commit-Position: refs/heads/master@{#17905}
This commit is contained in:
magjed 2017-04-27 02:34:20 -07:00 committed by Commit bot
parent 20a4b3fb2a
commit 6702739862
7 changed files with 35 additions and 302 deletions

View File

@ -10,8 +10,11 @@
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.content.Context;
import android.media.MediaRecorder;
import java.util.List;
public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture;
@ -26,11 +29,8 @@ public class Camera1Capturer extends CameraCapturer {
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
int width, int height, int framerate) {
if (mediaRecorder != null) {
throw new RuntimeException("MediaRecoder is not supported for camera 1.");
}
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
framerate);

View File

@ -13,7 +13,6 @@ package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraManager;
import android.media.MediaRecorder;
@TargetApi(21)
public class Camera2Capturer extends CameraCapturer {
@ -30,9 +29,9 @@ public class Camera2Capturer extends CameraCapturer {
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName,
int width, int height, int framerate) {
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate);
surfaceTextureHelper, cameraName, width, height, framerate);
}
}

View File

@ -10,8 +10,6 @@
package org.webrtc;
import android.media.MediaRecorder;
/**
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
@ -61,32 +59,6 @@ public interface CameraVideoCapturer extends VideoCapturer {
*/
void switchCamera(CameraSwitchHandler switchEventsHandler);
/**
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
* The callback may be called on an arbitrary thread.
*/
public interface MediaRecorderHandler {
// Invoked on success.
void onMediaRecorderSuccess();
// Invoked on failure, e.g. camera is stopped or any exception happens.
void onMediaRecorderError(String errorDescription);
}
/**
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
* This function can be called from any thread.
*/
void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler);
/**
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler);
/**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that

View File

@ -25,7 +25,6 @@ import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
@ -245,12 +244,6 @@ public class Camera2CapturerTest {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(true /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest

View File

@ -15,13 +15,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Environment;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
@ -508,95 +502,6 @@ class CameraVideoCapturerTestFixtures {
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
mediaRecorder.setVideoSource(
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2500000;
profile.videoFrameWidth = 640;
profile.videoFrameHeight = 480;
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mediaRecorder.setVideoEncoder(profile.videoCodec);
mediaRecorder.setOutputFile(outputFile.getPath());
mediaRecorder.prepare();
// Add MediaRecorder to camera pipeline.
final boolean[] addMediaRecorderSuccessful = new boolean[1];
final CountDownLatch addBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler addMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
addMediaRecorderSuccessful[0] = true;
addBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
addMediaRecorderSuccessful[0] = false;
addBarrier.countDown();
}
};
capturerInstance.capturer.addMediaRecorderToCamera(mediaRecorder, addMediaRecorderHandler);
// Wait until MediaRecoder has been added.
addBarrier.await();
// Check result.
assertTrue(addMediaRecorderSuccessful[0]);
// Start MediaRecorder and wait for a few frames to capture.
mediaRecorder.start();
for (int i = 0; i < 5; i++) {
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
}
mediaRecorder.stop();
// Remove MediaRecorder from camera pipeline.
final boolean[] removeMediaRecorderSuccessful = new boolean[1];
final CountDownLatch removeBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler removeMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
removeMediaRecorderSuccessful[0] = true;
removeBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
removeMediaRecorderSuccessful[0] = false;
removeBarrier.countDown();
}
};
capturerInstance.capturer.removeMediaRecorderFromCamera(removeMediaRecorderHandler);
// Wait until MediaRecoder has been removed.
removeBarrier.await();
// Check result.
assertTrue(removeMediaRecorderSuccessful[0]);
// Ensure that frames are received after removing MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
// Check that recorded file contains some data.
assertTrue(outputFile.length() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
public void cameraEventsInvoked() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance);

View File

@ -21,12 +21,11 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler;
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@ -50,7 +49,6 @@ class Camera2Session implements CameraSession {
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Surface mediaRecorderSurface;
private final String cameraId;
private final int width;
private final int height;
@ -125,14 +123,9 @@ class Camera2Session implements CameraSession {
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
surfaces.add(mediaRecorderSurface);
}
try {
camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler);
camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
return;
@ -182,10 +175,6 @@ class Camera2Session implements CameraSession {
chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
captureRequestBuilder.addTarget(mediaRecorderSurface);
}
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
@ -291,15 +280,15 @@ class Camera2Session implements CameraSession {
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId,
int width, int height, int framerate) {
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
mediaRecorder, cameraId, width, height, framerate);
cameraId, width, height, framerate);
}
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime();
@ -310,7 +299,6 @@ class Camera2Session implements CameraSession {
this.applicationContext = applicationContext;
this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper;
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
this.cameraId = cameraId;
this.width = width;
this.height = height;

View File

@ -11,7 +11,6 @@
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import java.util.Arrays;
@ -24,13 +23,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
IN_PROGRESS, // Waiting for new switched capture session to start.
}
enum MediaRecorderState {
IDLE, // No media recording update (add or remove) requested.
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
}
private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
@ -45,9 +37,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
@Override
public void onDone(CameraSession session) {
checkIsOnCameraThread();
Logging.d(TAG,
"Create session done. Switch state: " + switchState
+ ". MediaRecorder state: " + mediaRecorderState);
Logging.d(TAG, "Create session done");
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */);
@ -67,19 +57,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler);
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderSuccess();
mediaRecorderEventsHandler = null;
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
mediaRecorderState = MediaRecorderState.ACTIVE;
} else {
mediaRecorderState = MediaRecorderState.IDLE;
}
}
}
}
@ -104,14 +81,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderError(error);
mediaRecorderEventsHandler = null;
}
mediaRecorderState = MediaRecorderState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected();
} else {
@ -119,7 +88,8 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
} else {
Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */);
createSessionInternal(OPEN_CAMERA_DELAY_MS);
}
}
}
@ -243,10 +213,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) {
@ -311,17 +277,17 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0, null /* mediaRecorder */);
createSessionInternal(0);
}
}
private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) {
private void createSessionInternal(int delayMs) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, mediaRecorder, cameraName, width, height, framerate);
surfaceHelper, cameraName, width, height, framerate);
}
}, delayMs);
}
@ -383,29 +349,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
});
}
@Override
public void addMediaRecorderToCamera(
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "addMediaRecorderToCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
}
});
}
@Override
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "removeMediaRecorderFromCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
}
});
}
@Override
public boolean isScreencast() {
return false;
@ -427,13 +370,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}
}
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal");
@ -448,15 +384,18 @@ abstract class CameraCapturer implements CameraVideoCapturer {
synchronized (stateLock) {
if (switchState != SwitchState.IDLE) {
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
return;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
Logging.d(TAG, "switchCamera switchInProgress");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera switch already in progress.");
}
return;
}
if (!sessionOpening && currentSession == null) {
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
Logging.d(TAG, "switchCamera: No session open");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera is not running.");
}
return;
}
@ -485,74 +424,11 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, null /* mediaRecorder */);
createSessionInternal(0);
}
Logging.d(TAG, "switchCamera done");
}
private void reportUpdateMediaRecorderError(
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
mediaRecoderEventsHandler.onMediaRecorderError(error);
}
}
private void updateMediaRecorderInternal(
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
synchronized (stateLock) {
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
reportUpdateMediaRecorderError(
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
return;
}
if (switchState != SwitchState.IDLE) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
return;
}
if (currentSession == null) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
return;
}
if (sessionOpening) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
return;
}
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
mediaRecorderState =
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
Logging.d(TAG, "updateMediaRecoder: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, mediaRecorder);
}
Logging.d(TAG, "updateMediaRecoderInternal done");
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed.");
@ -568,6 +444,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate);
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
int width, int height, int framerate);
}