Conclude VideoFrame emit fieldtrial.

Concludes VideoFrame emit fieldtrial and start producing VideoFrames
by default. Deprecates old onByteBufferFrameCaptured and
onTextureFrameCaptured methods.

Bug: webrtc:8776
Change-Id: Icc224e9f8d89a30f04cf95dd46a498d69cffe9d0
Reviewed-on: https://webrtc-review.googlesource.com/43022
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21866}
This commit is contained in:
Sami Kalliomäki
2018-01-26 11:06:45 +01:00
committed by Commit Bot
parent c22d6a8f9b
commit 682dc619f2
11 changed files with 39 additions and 131 deletions

View File

@ -95,9 +95,6 @@ public class PeerConnectionClient {
"WebRTC-H264HighProfile/Enabled/"; "WebRTC-H264HighProfile/Enabled/";
private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL = private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
"WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"; "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
private static final String VIDEO_FRAME_EMIT_FIELDTRIAL =
PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/" + PeerConnectionFactory.TRIAL_ENABLED
+ "/";
private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
@ -395,7 +392,6 @@ public class PeerConnectionClient {
fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL; fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
Log.d(TAG, "Disable WebRTC AGC field trial."); Log.d(TAG, "Disable WebRTC AGC field trial.");
} }
fieldTrials += VIDEO_FRAME_EMIT_FIELDTRIAL;
// Check preferred video codec. // Check preferred video codec.
preferredVideoCodec = VIDEO_CODEC_VP8; preferredVideoCodec = VIDEO_CODEC_VP8;

View File

@ -20,7 +20,7 @@ import java.util.List;
@JNINamespace("webrtc::jni") @JNINamespace("webrtc::jni")
public class PeerConnectionFactory { public class PeerConnectionFactory {
public static final String TRIAL_ENABLED = "Enabled"; public static final String TRIAL_ENABLED = "Enabled";
public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit"; @Deprecated public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
private static final String TAG = "PeerConnectionFactory"; private static final String TAG = "PeerConnectionFactory";
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread"; private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";

View File

@ -195,8 +195,11 @@ public class ScreenCapturerAndroid
@Override @Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) { public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++; numCapturedFrames++;
capturerObserver.onTextureFrameCaptured( final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs); width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, timestampNs);
capturerObserver.onFrameCaptured(frame);
frame.release();
} }
@Override @Override

View File

@ -23,13 +23,19 @@ public interface VideoCapturer {
void onCapturerStopped(); void onCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer. // Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured( @Deprecated
byte[] data, int width, int height, int rotation, long timeStamp); default void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
// owned by VideoCapturer. // owned by VideoCapturer.
void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix, @Deprecated
int rotation, long timestamp); default void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer. // Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onFrameCaptured(VideoFrame frame); void onFrameCaptured(VideoFrame frame);

View File

@ -124,18 +124,6 @@ class CameraVideoCapturerTestFixtures {
Logging.d(TAG, "onCapturerStopped"); Logging.d(TAG, "onCapturerStopped");
} }
@Override
public void onByteBufferFrameCaptured(
byte[] frame, int width, int height, int rotation, long timeStamp) {
throw new RuntimeException("onByteBufferFrameCaptured called");
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timeStamp) {
throw new RuntimeException("onTextureFrameCaptured called");
}
@Override @Override
public void onFrameCaptured(VideoFrame frame) { public void onFrameCaptured(VideoFrame frame) {
synchronized (frameLock) { synchronized (frameLock) {
@ -346,8 +334,6 @@ class CameraVideoCapturerTestFixtures {
CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) { CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
PeerConnectionFactory.initialize( PeerConnectionFactory.initialize(
PeerConnectionFactory.InitializationOptions.builder(testObjectFactory.getAppContext()) PeerConnectionFactory.InitializationOptions.builder(testObjectFactory.getAppContext())
.setFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+ PeerConnectionFactory.TRIAL_ENABLED + "/")
.createInitializationOptions()); .createInitializationOptions());
this.peerConnectionFactory = new PeerConnectionFactory(null /* options */); this.peerConnectionFactory = new PeerConnectionFactory(null /* options */);

View File

@ -39,20 +39,6 @@ public class FileVideoCapturerTest {
// Empty on purpose. // Empty on purpose.
} }
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
// Empty on purpose.
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
// Empty on purpose.
}
@Override @Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck") @SuppressWarnings("NoSynchronizedMethodCheck")

View File

@ -31,6 +31,7 @@ class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver
} }
@Override @Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured( public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) { byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured( nativeOnByteBufferFrameCaptured(
@ -38,6 +39,7 @@ class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver
} }
@Override @Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId, public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) { float[] transformMatrix, int rotation, long timestamp) {
nativeOnTextureFrameCaptured( nativeOnTextureFrameCaptured(

View File

@ -36,8 +36,6 @@ class Camera1Session implements CameraSession {
private static enum SessionState { RUNNING, STOPPED } private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler; private final Handler cameraThreadHandler;
private final Events events; private final Events events;
private final boolean captureToTexture; private final boolean captureToTexture;
@ -158,9 +156,6 @@ class Camera1Session implements CameraSession {
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) { CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId); Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
this.cameraThreadHandler = new Handler(); this.cameraThreadHandler = new Handler();
this.events = events; this.events = events;
@ -277,17 +272,12 @@ class Camera1Session implements CameraSession {
transformMatrix = RendererCommon.multiplyMatrices( transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix()); transformMatrix, RendererCommon.horizontalFlipMatrix());
} }
if (videoFrameEmitTrialEnabled) { final VideoFrame.Buffer buffer =
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs); events.onFrameCaptured(Camera1Session.this, frame);
events.onFrameCaptured(Camera1Session.this, frame); frame.release();
frame.release();
} else {
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
} }
}); });
} }
@ -317,22 +307,15 @@ class Camera1Session implements CameraSession {
firstFrameReported = true; firstFrameReported = true;
} }
if (videoFrameEmitTrialEnabled) { VideoFrame.Buffer frameBuffer = new NV21Buffer(
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width, data, captureFormat.width, captureFormat.height, () -> cameraThreadHandler.post(() -> {
captureFormat.height, () -> cameraThreadHandler.post(() -> { if (state == SessionState.RUNNING) {
if (state == SessionState.RUNNING) { camera.addCallbackBuffer(data);
camera.addCallbackBuffer(data); }
} }));
})); final VideoFrame frame = new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
final VideoFrame frame = events.onFrameCaptured(Camera1Session.this, frame);
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs); frame.release();
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
} else {
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
} }
}); });
} }

View File

@ -45,8 +45,6 @@ class Camera2Session implements CameraSession {
private static enum SessionState { RUNNING, STOPPED } private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler; private final Handler cameraThreadHandler;
private final CreateSessionCallback callback; private final CreateSessionCallback callback;
private final Events events; private final Events events;
@ -228,17 +226,12 @@ class Camera2Session implements CameraSession {
transformMatrix = transformMatrix =
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation); RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
if (videoFrameEmitTrialEnabled) { VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer( captureFormat.width, captureFormat.height,
captureFormat.width, captureFormat.height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix)); final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs); events.onFrameCaptured(Camera2Session.this, frame);
events.onFrameCaptured(Camera2Session.this, frame); frame.release();
frame.release();
} else {
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
} }
}); });
Logging.d(TAG, "Camera device successfully started."); Logging.d(TAG, "Camera device successfully started.");
@ -313,9 +306,6 @@ class Camera2Session implements CameraSession {
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) { MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId); Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
constructionTimeNs = System.nanoTime(); constructionTimeNs = System.nanoTime();

View File

@ -192,44 +192,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
capturerObserver.onFrameCaptured(frame); capturerObserver.onFrameCaptured(frame);
} }
} }
@Override
public void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onByteBufferFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onByteBufferFrameCaptured(data, width, height, rotation, timestamp);
}
}
@Override
public void onTextureFrameCaptured(CameraSession session, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
surfaceHelper.returnTextureFrame();
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onTextureFrameCaptured(
width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
}
}; };
private final Runnable openCameraTimeoutRunnable = new Runnable() { private final Runnable openCameraTimeoutRunnable = new Runnable() {

View File

@ -26,12 +26,6 @@ interface CameraSession {
void onCameraDisconnected(CameraSession session); void onCameraDisconnected(CameraSession session);
void onCameraClosed(CameraSession session); void onCameraClosed(CameraSession session);
void onFrameCaptured(CameraSession session, VideoFrame frame); void onFrameCaptured(CameraSession session, VideoFrame frame);
// The old way of passing frames. Will be removed eventually.
void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp);
} }
/** /**