Introduce class that handles native wrapping of AndroidVideoTrackSource

This CL attempts to do separation of concerns by introducing a simple
class that only handles JNI wrapping of a C++ AndroidVideoTrackSource.
This layer can be easiliy mocked out in Java unit tests.

Bug: webrtc:10247
Change-Id: Idbdbfde6d3e00b64f3f310f76505801fa496580d
Reviewed-on: https://webrtc-review.googlesource.com/c/121562
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26556}
This commit is contained in:
Magnus Jedvert
2019-02-05 16:39:41 +01:00
committed by Commit Bot
parent b3032b6e33
commit 99b275d126
8 changed files with 189 additions and 132 deletions

View File

@ -16,11 +16,40 @@ import android.support.annotation.Nullable;
* Java wrapper of native AndroidVideoTrackSource.
*/
public class VideoSource extends MediaSource {
private final NativeCapturerObserver capturerObserver;
/** Simple aspect ratio clas for use in constraining output format. */
public static class AspectRatio {
public static final AspectRatio UNDEFINED = new AspectRatio(/* width= */ 0, /* height= */ 0);
public final int width;
public final int height;
public AspectRatio(int width, int height) {
this.width = width;
this.height = height;
}
}
private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
private final CapturerObserver capturerObserver = new CapturerObserver() {
@Override
public void onCapturerStarted(boolean success) {
nativeAndroidVideoTrackSource.setState(success);
}
@Override
public void onCapturerStopped() {
nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeAndroidVideoTrackSource.onFrameCaptured(frame);
}
};
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource);
this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
}
/**
@ -42,8 +71,18 @@ public class VideoSource extends MediaSource {
*/
public void adaptOutputFormat(
int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) {
nativeAdaptOutputFormat(getNativeVideoTrackSource(), landscapeWidth, landscapeHeight,
portraitWidth, portraitHeight, fps);
adaptOutputFormat(new AspectRatio(landscapeWidth, landscapeHeight),
/* maxLandscapePixelCount= */ landscapeWidth * landscapeHeight,
new AspectRatio(portraitWidth, portraitHeight),
/* maxPortraitPixelCount= */ portraitWidth * portraitHeight, fps);
}
/** Same as above, with even more control as each constraint is optional. */
public void adaptOutputFormat(AspectRatio targetLandscapeAspectRatio,
@Nullable Integer maxLandscapePixelCount, AspectRatio targetPortraitAspectRatio,
@Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
nativeAndroidVideoTrackSource.adaptOutputFormat(targetLandscapeAspectRatio,
maxLandscapePixelCount, targetPortraitAspectRatio, maxPortraitPixelCount, maxFps);
}
public CapturerObserver getCapturerObserver() {
@ -54,7 +93,4 @@ public class VideoSource extends MediaSource {
long getNativeVideoTrackSource() {
return getNativeMediaSource();
}
private static native void nativeAdaptOutputFormat(long source, int landscapeWidth,
int landscapeHeight, int portraitWidth, int portraitHeight, int fps);
}