Android: Add FramerateRange class

The Camera1 and Camera2 API use different framerate range types. Camera1
uses int[2] and Camera2 uses Range<Integer>. Range<Integer> is
unfortunately only available on Lollipop and later, so this CL adds a
similar FramerateRange class in CaptureFormat.

The purpose with this CL is to have a common framerate range type that can
be reused from both Camera1 and Camera2 in helper functions such as
CameraEnumerationAndroid.getClosestSupportedFramerateRange().

BUG=webrtc:5519
R=sakal@webrtc.org

Review URL: https://codereview.webrtc.org/2010763003 .

Cr-Commit-Position: refs/heads/master@{#12942}
This commit is contained in:
Magnus Jedvert
2016-05-27 10:35:51 +02:00
parent a44e72c44f
commit 94cb67d6df
6 changed files with 101 additions and 47 deletions

View File

@ -286,7 +286,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
// Make sure camera is started and first frame is received and then stop it. // Make sure camera is started and first frame is received and then stop it.
assertTrue(observer.WaitForCapturerToStart()); assertTrue(observer.WaitForCapturerToStart());
@ -310,7 +310,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
// Make sure camera is started and then stop it. // Make sure camera is started and then stop it.
assertTrue(observer.WaitForCapturerToStart()); assertTrue(observer.WaitForCapturerToStart());
@ -359,7 +359,7 @@ public class VideoCapturerAndroidTestFixtures {
for(int i = 0; i < 3 ; ++i) { for(int i = 0; i < 3 ; ++i) {
CameraEnumerationAndroid.CaptureFormat format = formats.get(i); CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
assertTrue(observer.WaitForCapturerToStart()); assertTrue(observer.WaitForCapturerToStart());
observer.WaitForNextCapturedFrame(); observer.WaitForNextCapturedFrame();
@ -408,7 +408,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) { if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
@ -433,7 +433,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
waitUntilIdle(capturer); waitUntilIdle(capturer);
@ -459,7 +459,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
capturer.stopCapture(); capturer.stopCapture();
release(capturer); release(capturer);
@ -475,7 +475,7 @@ public class VideoCapturerAndroidTestFixtures {
List<CaptureFormat> formats = capturer.getSupportedFormats(); List<CaptureFormat> formats = capturer.getSupportedFormats();
CameraEnumerationAndroid.CaptureFormat format = formats.get(0); CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
assertTrue(observer.WaitForCapturerToStart()); assertTrue(observer.WaitForCapturerToStart());
@ -485,7 +485,7 @@ public class VideoCapturerAndroidTestFixtures {
assertTrue(listOftimestamps.size() >= 1); assertTrue(listOftimestamps.size() >= 1);
format = formats.get(1); format = formats.get(1);
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
observer.WaitForCapturerToStart(); observer.WaitForCapturerToStart();
if (capturer.isCapturingToTexture()) { if (capturer.isCapturingToTexture()) {
@ -548,7 +548,7 @@ public class VideoCapturerAndroidTestFixtures {
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
final FakeCapturerObserver observer = new FakeCapturerObserver(); final FakeCapturerObserver observer = new FakeCapturerObserver();
capturer.startCapture(format.width, format.height, format.maxFramerate, capturer.startCapture(format.width, format.height, format.framerate.max,
surfaceTextureHelper, appContext, observer); surfaceTextureHelper, appContext, observer);
// Make sure camera is started. // Make sure camera is started.
assertTrue(observer.WaitForCapturerToStart()); assertTrue(observer.WaitForCapturerToStart());

View File

@ -43,21 +43,56 @@ public class CameraEnumerationAndroid {
} }
public static class CaptureFormat { public static class CaptureFormat {
// Class to represent a framerate range. The framerate varies because of lightning conditions.
// The values are multiplied by 1000, so 1000 represents one frame per second.
public static class FramerateRange {
public int min;
public int max;
public FramerateRange(int min, int max) {
this.min = min;
this.max = max;
}
@Override
public String toString() {
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
}
@Override
public boolean equals(Object other) {
if (!(other instanceof FramerateRange)) {
return false;
}
final FramerateRange otherFramerate = (FramerateRange) other;
return min == otherFramerate.min && max == otherFramerate.max;
}
@Override
public int hashCode() {
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
return 1 + 65537 * min + max;
}
}
public final int width; public final int width;
public final int height; public final int height;
public final int maxFramerate; public final FramerateRange framerate;
public final int minFramerate;
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
// all imageFormats. // all imageFormats.
public final int imageFormat = ImageFormat.NV21; public final int imageFormat = ImageFormat.NV21;
public CaptureFormat(int width, int height, int minFramerate, public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
int maxFramerate) {
this.width = width; this.width = width;
this.height = height; this.height = height;
this.minFramerate = minFramerate; this.framerate = new FramerateRange(minFramerate, maxFramerate);
this.maxFramerate = maxFramerate; }
public CaptureFormat(int width, int height, FramerateRange framerate) {
this.width = width;
this.height = height;
this.framerate = framerate;
} }
// Calculates the frame size of this capture format. // Calculates the frame size of this capture format.
@ -79,15 +114,14 @@ public class CameraEnumerationAndroid {
@Override @Override
public String toString() { public String toString() {
return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]"; return width + "x" + height + "@" + framerate;
} }
public boolean isSameFormat(final CaptureFormat that) { public boolean isSameFormat(final CaptureFormat that) {
if (that == null) { if (that == null) {
return false; return false;
} }
return width == that.width && height == that.height && maxFramerate == that.maxFramerate return width == that.width && height == that.height && framerate.equals(that.framerate);
&& minFramerate == that.minFramerate;
} }
} }
@ -134,7 +168,9 @@ public class CameraEnumerationAndroid {
return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK); return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
} }
// Helper class for finding the closest supported format for the two functions below. // Helper class for finding the closest supported format for the two functions below. It creates a
// comparator based on the difference to some requested parameters, where the element with the
// minimum difference is the element that is closest to the requested parameters.
private static abstract class ClosestComparator<T> implements Comparator<T> { private static abstract class ClosestComparator<T> implements Comparator<T> {
// Difference between supported and requested parameter. // Difference between supported and requested parameter.
abstract int diff(T supportedParameter); abstract int diff(T supportedParameter);
@ -145,20 +181,15 @@ public class CameraEnumerationAndroid {
} }
} }
public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters, public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
final int framerate) { List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange(); return Collections.min(supportedFramerates,
if (listFpsRange.isEmpty()) { new ClosestComparator<CaptureFormat.FramerateRange>() {
Logging.w(TAG, "No supported preview fps range"); private static final int MAX_FPS_WEIGHT = 10;
return new int[]{0, 0};
} @Override
return Collections.min(listFpsRange, int diff(CaptureFormat.FramerateRange range) {
new ClosestComparator<int[]>() { return range.min + MAX_FPS_WEIGHT * abs(requestedFps * 1000 - range.max);
@Override int diff(int[] range) {
final int maxFpsWeight = 10;
return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX]
+ maxFpsWeight * abs(framerate
- range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
} }
}); });
} }

View File

@ -81,4 +81,16 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator {
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms."); + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
return formatList; return formatList;
} }
// Convert from int[2] to CaptureFormat.FramerateRange.
public static List<CaptureFormat.FramerateRange> convertFramerates(
List<int[]> arrayRanges) {
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
for (int[] range : arrayRanges) {
ranges.add(new CaptureFormat.FramerateRange(
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
}
return ranges;
}
} }

View File

@ -397,19 +397,24 @@ public class VideoCapturerAndroid implements
// Find closest supported format for |width| x |height| @ |framerate|. // Find closest supported format for |width| x |height| @ |framerate|.
final android.hardware.Camera.Parameters parameters = camera.getParameters(); final android.hardware.Camera.Parameters parameters = camera.getParameters();
for (int[] fpsRange : parameters.getSupportedPreviewFpsRange()) { final List<CaptureFormat.FramerateRange> supportedFramerates =
Logging.d(TAG, "Available fps range: " + CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + ":" + Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
final CaptureFormat.FramerateRange bestFpsRange;
if (supportedFramerates.isEmpty()) {
Logging.w(TAG, "No supported preview fps range");
bestFpsRange = new CaptureFormat.FramerateRange(0, 0);
} else {
bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(
supportedFramerates, framerate);
} }
final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000);
final android.hardware.Camera.Size previewSize = final android.hardware.Camera.Size previewSize =
CameraEnumerationAndroid.getClosestSupportedSize( CameraEnumerationAndroid.getClosestSupportedSize(
parameters.getSupportedPreviewSizes(), width, height); parameters.getSupportedPreviewSizes(), width, height);
final CaptureFormat captureFormat = new CaptureFormat( final CaptureFormat captureFormat = new CaptureFormat(
previewSize.width, previewSize.height, previewSize.width, previewSize.height, bestFpsRange);
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
// Check if we are already using this capture format, then we don't need to do anything. // Check if we are already using this capture format, then we don't need to do anything.
if (captureFormat.isSameFormat(this.captureFormat)) { if (captureFormat.isSameFormat(this.captureFormat)) {
@ -424,8 +429,8 @@ public class VideoCapturerAndroid implements
} }
// Note: setRecordingHint(true) actually decrease frame rate on N5. // Note: setRecordingHint(true) actually decrease frame rate on N5.
// parameters.setRecordingHint(true); // parameters.setRecordingHint(true);
if (captureFormat.maxFramerate > 0) { if (captureFormat.framerate.max > 0) {
parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate); parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
} }
parameters.setPreviewSize(captureFormat.width, captureFormat.height); parameters.setPreviewSize(captureFormat.width, captureFormat.height);

View File

@ -127,21 +127,27 @@ AndroidVideoCapturerJni::GetSupportedFormats() {
jclass j_list_class = jni->FindClass("java/util/List"); jclass j_list_class = jni->FindClass("java/util/List");
jclass j_format_class = jclass j_format_class =
jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat"); jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat");
jclass j_framerate_class = jni->FindClass(
"org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange");
const int size = jni->CallIntMethod( const int size = jni->CallIntMethod(
j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I")); j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I"));
jmethodID j_get = jmethodID j_get =
GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;"); GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;");
jfieldID j_framerate_field = GetFieldID(
jni, j_format_class, "framerate",
"org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange");
jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I"); jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I");
jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I"); jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I");
jfieldID j_max_framerate_field = jfieldID j_max_framerate_field =
GetFieldID(jni, j_format_class, "maxFramerate", "I"); GetFieldID(jni, j_framerate_class, "max", "I");
std::vector<cricket::VideoFormat> formats; std::vector<cricket::VideoFormat> formats;
formats.reserve(size); formats.reserve(size);
for (int i = 0; i < size; ++i) { for (int i = 0; i < size; ++i) {
jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i); jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i);
jobject j_framerate = GetObjectField(jni, j_format, j_framerate_field);
const int frame_interval = cricket::VideoFormat::FpsToInterval( const int frame_interval = cricket::VideoFormat::FpsToInterval(
(GetIntField(jni, j_format, j_max_framerate_field) + 999) / 1000); (GetIntField(jni, j_framerate, j_max_framerate_field) + 999) / 1000);
formats.emplace_back(GetIntField(jni, j_format, j_width_field), formats.emplace_back(GetIntField(jni, j_format, j_width_field),
GetIntField(jni, j_format, j_height_field), GetIntField(jni, j_format, j_height_field),
frame_interval, cricket::FOURCC_NV21); frame_interval, cricket::FOURCC_NV21);

View File

@ -77,7 +77,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
long maxCaptureBandwidth = java.lang.Long.MIN_VALUE; long maxCaptureBandwidth = java.lang.Long.MIN_VALUE;
for (CaptureFormat format : formats) { for (CaptureFormat format : formats) {
maxCaptureBandwidth = Math.max(maxCaptureBandwidth, maxCaptureBandwidth = Math.max(maxCaptureBandwidth,
(long) format.width * format.height * format.maxFramerate); (long) format.width * format.height * format.framerate.max);
} }
// Fraction between 0 and 1. // Fraction between 0 and 1.
@ -107,7 +107,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener
// Return the highest frame rate possible based on bandwidth and format. // Return the highest frame rate possible based on bandwidth and format.
private int calculateFramerate(double bandwidth, CaptureFormat format) { private int calculateFramerate(double bandwidth, CaptureFormat format) {
return (int) Math.round(Math.min(format.maxFramerate, return (int) Math.round(Math.min(format.framerate.max,
(int) Math.round(bandwidth / (format.width * format.height))) / 1000.0); (int) Math.round(bandwidth / (format.width * format.height))) / 1000.0);
} }
} }