diff --git a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java index ed7e250b65..04643f1527 100644 --- a/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java +++ b/webrtc/api/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java @@ -286,7 +286,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); // Make sure camera is started and first frame is received and then stop it. assertTrue(observer.WaitForCapturerToStart()); @@ -310,7 +310,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); // Make sure camera is started and then stop it. assertTrue(observer.WaitForCapturerToStart()); @@ -359,7 +359,7 @@ public class VideoCapturerAndroidTestFixtures { for(int i = 0; i < 3 ; ++i) { CameraEnumerationAndroid.CaptureFormat format = formats.get(i); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); assertTrue(observer.WaitForCapturerToStart()); observer.WaitForNextCapturedFrame(); @@ -408,7 +408,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) { @@ -433,7 +433,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); waitUntilIdle(capturer); @@ -459,7 +459,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); capturer.stopCapture(); release(capturer); @@ -475,7 +475,7 @@ public class VideoCapturerAndroidTestFixtures { List formats = capturer.getSupportedFormats(); CameraEnumerationAndroid.CaptureFormat format = formats.get(0); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); assertTrue(observer.WaitForCapturerToStart()); @@ -485,7 +485,7 @@ public class VideoCapturerAndroidTestFixtures { assertTrue(listOftimestamps.size() >= 1); format = formats.get(1); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); observer.WaitForCapturerToStart(); if (capturer.isCapturingToTexture()) { @@ -548,7 +548,7 @@ public class VideoCapturerAndroidTestFixtures { final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create( "SurfaceTextureHelper test" /* threadName */, null /* sharedContext */); final FakeCapturerObserver observer = new FakeCapturerObserver(); - capturer.startCapture(format.width, format.height, format.maxFramerate, + capturer.startCapture(format.width, format.height, format.framerate.max, surfaceTextureHelper, appContext, observer); // Make sure camera is started. assertTrue(observer.WaitForCapturerToStart()); diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java index c5c8be3a5d..2fa685d1fb 100644 --- a/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java +++ b/webrtc/api/java/android/org/webrtc/CameraEnumerationAndroid.java @@ -43,21 +43,56 @@ public class CameraEnumerationAndroid { } public static class CaptureFormat { + // Class to represent a framerate range. The framerate varies because of lightning conditions. + // The values are multiplied by 1000, so 1000 represents one frame per second. + public static class FramerateRange { + public int min; + public int max; + + public FramerateRange(int min, int max) { + this.min = min; + this.max = max; + } + + @Override + public String toString() { + return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]"; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof FramerateRange)) { + return false; + } + final FramerateRange otherFramerate = (FramerateRange) other; + return min == otherFramerate.min && max == otherFramerate.max; + } + + @Override + public int hashCode() { + // Use prime close to 2^16 to avoid collisions for normal values less than 2^16. + return 1 + 65537 * min + max; + } + } + public final int width; public final int height; - public final int maxFramerate; - public final int minFramerate; + public final FramerateRange framerate; // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of // all imageFormats. public final int imageFormat = ImageFormat.NV21; - public CaptureFormat(int width, int height, int minFramerate, - int maxFramerate) { + public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) { this.width = width; this.height = height; - this.minFramerate = minFramerate; - this.maxFramerate = maxFramerate; + this.framerate = new FramerateRange(minFramerate, maxFramerate); + } + + public CaptureFormat(int width, int height, FramerateRange framerate) { + this.width = width; + this.height = height; + this.framerate = framerate; } // Calculates the frame size of this capture format. @@ -79,15 +114,14 @@ public class CameraEnumerationAndroid { @Override public String toString() { - return width + "x" + height + "@[" + minFramerate + ":" + maxFramerate + "]"; + return width + "x" + height + "@" + framerate; } public boolean isSameFormat(final CaptureFormat that) { if (that == null) { return false; } - return width == that.width && height == that.height && maxFramerate == that.maxFramerate - && minFramerate == that.minFramerate; + return width == that.width && height == that.height && framerate.equals(that.framerate); } } @@ -134,7 +168,9 @@ public class CameraEnumerationAndroid { return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK); } - // Helper class for finding the closest supported format for the two functions below. + // Helper class for finding the closest supported format for the two functions below. It creates a + // comparator based on the difference to some requested parameters, where the element with the + // minimum difference is the element that is closest to the requested parameters. private static abstract class ClosestComparator implements Comparator { // Difference between supported and requested parameter. abstract int diff(T supportedParameter); @@ -145,20 +181,15 @@ public class CameraEnumerationAndroid { } } - public static int[] getFramerateRange(android.hardware.Camera.Parameters parameters, - final int framerate) { - List listFpsRange = parameters.getSupportedPreviewFpsRange(); - if (listFpsRange.isEmpty()) { - Logging.w(TAG, "No supported preview fps range"); - return new int[]{0, 0}; - } - return Collections.min(listFpsRange, - new ClosestComparator() { - @Override int diff(int[] range) { - final int maxFpsWeight = 10; - return range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX] - + maxFpsWeight * abs(framerate - - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); + public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange( + List supportedFramerates, final int requestedFps) { + return Collections.min(supportedFramerates, + new ClosestComparator() { + private static final int MAX_FPS_WEIGHT = 10; + + @Override + int diff(CaptureFormat.FramerateRange range) { + return range.min + MAX_FPS_WEIGHT * abs(requestedFps * 1000 - range.max); } }); } diff --git a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java index 59a7847f85..04edba8379 100644 --- a/webrtc/api/java/android/org/webrtc/CameraEnumerator.java +++ b/webrtc/api/java/android/org/webrtc/CameraEnumerator.java @@ -81,4 +81,16 @@ public class CameraEnumerator implements CameraEnumerationAndroid.Enumerator { + " Time spent: " + (endTimeMs - startTimeMs) + " ms."); return formatList; } + + // Convert from int[2] to CaptureFormat.FramerateRange. + public static List convertFramerates( + List arrayRanges) { + final List ranges = new ArrayList(); + for (int[] range : arrayRanges) { + ranges.add(new CaptureFormat.FramerateRange( + range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX], + range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX])); + } + return ranges; + } } diff --git a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java index 5c59318ea9..ee5e641755 100644 --- a/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java +++ b/webrtc/api/java/android/org/webrtc/VideoCapturerAndroid.java @@ -397,19 +397,24 @@ public class VideoCapturerAndroid implements // Find closest supported format for |width| x |height| @ |framerate|. final android.hardware.Camera.Parameters parameters = camera.getParameters(); - for (int[] fpsRange : parameters.getSupportedPreviewFpsRange()) { - Logging.d(TAG, "Available fps range: " + - fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + ":" + - fpsRange[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); + final List supportedFramerates = + CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRange()); + Logging.d(TAG, "Available fps ranges: " + supportedFramerates); + + final CaptureFormat.FramerateRange bestFpsRange; + if (supportedFramerates.isEmpty()) { + Logging.w(TAG, "No supported preview fps range"); + bestFpsRange = new CaptureFormat.FramerateRange(0, 0); + } else { + bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange( + supportedFramerates, framerate); } - final int[] range = CameraEnumerationAndroid.getFramerateRange(parameters, framerate * 1000); + final android.hardware.Camera.Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize( parameters.getSupportedPreviewSizes(), width, height); final CaptureFormat captureFormat = new CaptureFormat( - previewSize.width, previewSize.height, - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX], - range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); + previewSize.width, previewSize.height, bestFpsRange); // Check if we are already using this capture format, then we don't need to do anything. if (captureFormat.isSameFormat(this.captureFormat)) { @@ -424,8 +429,8 @@ public class VideoCapturerAndroid implements } // Note: setRecordingHint(true) actually decrease frame rate on N5. // parameters.setRecordingHint(true); - if (captureFormat.maxFramerate > 0) { - parameters.setPreviewFpsRange(captureFormat.minFramerate, captureFormat.maxFramerate); + if (captureFormat.framerate.max > 0) { + parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max); } parameters.setPreviewSize(captureFormat.width, captureFormat.height); diff --git a/webrtc/api/java/jni/androidvideocapturer_jni.cc b/webrtc/api/java/jni/androidvideocapturer_jni.cc index 3f79f3a321..c6f4b40ea5 100644 --- a/webrtc/api/java/jni/androidvideocapturer_jni.cc +++ b/webrtc/api/java/jni/androidvideocapturer_jni.cc @@ -127,21 +127,27 @@ AndroidVideoCapturerJni::GetSupportedFormats() { jclass j_list_class = jni->FindClass("java/util/List"); jclass j_format_class = jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat"); + jclass j_framerate_class = jni->FindClass( + "org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange"); const int size = jni->CallIntMethod( j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I")); jmethodID j_get = GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;"); + jfieldID j_framerate_field = GetFieldID( + jni, j_format_class, "framerate", + "org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange"); jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I"); jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I"); jfieldID j_max_framerate_field = - GetFieldID(jni, j_format_class, "maxFramerate", "I"); + GetFieldID(jni, j_framerate_class, "max", "I"); std::vector formats; formats.reserve(size); for (int i = 0; i < size; ++i) { jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i); + jobject j_framerate = GetObjectField(jni, j_format, j_framerate_field); const int frame_interval = cricket::VideoFormat::FpsToInterval( - (GetIntField(jni, j_format, j_max_framerate_field) + 999) / 1000); + (GetIntField(jni, j_framerate, j_max_framerate_field) + 999) / 1000); formats.emplace_back(GetIntField(jni, j_format, j_width_field), GetIntField(jni, j_format, j_height_field), frame_interval, cricket::FOURCC_NV21); diff --git a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java index dfd9558896..d231c41a19 100644 --- a/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java +++ b/webrtc/examples/androidapp/src/org/appspot/apprtc/CaptureQualityController.java @@ -77,7 +77,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener long maxCaptureBandwidth = java.lang.Long.MIN_VALUE; for (CaptureFormat format : formats) { maxCaptureBandwidth = Math.max(maxCaptureBandwidth, - (long) format.width * format.height * format.maxFramerate); + (long) format.width * format.height * format.framerate.max); } // Fraction between 0 and 1. @@ -107,7 +107,7 @@ public class CaptureQualityController implements SeekBar.OnSeekBarChangeListener // Return the highest frame rate possible based on bandwidth and format. private int calculateFramerate(double bandwidth, CaptureFormat format) { - return (int) Math.round(Math.min(format.maxFramerate, + return (int) Math.round(Math.min(format.framerate.max, (int) Math.round(bandwidth / (format.width * format.height))) / 1000.0); } }