diff --git a/media/base/videoadapter.cc b/media/base/videoadapter.cc index 840101bcee..b2ed333614 100644 --- a/media/base/videoadapter.cc +++ b/media/base/videoadapter.cc @@ -169,8 +169,18 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, // OnOutputFormatRequest and OnResolutionFramerateRequest. int max_pixel_count = resolution_request_max_pixel_count_; - if (max_pixel_count_) - max_pixel_count = std::min(max_pixel_count, *max_pixel_count_); + // Select target aspect ratio and max pixel count depending on input frame + // orientation. + absl::optional> target_aspect_ratio; + if (in_width > in_height) { + target_aspect_ratio = target_landscape_aspect_ratio_; + if (max_landscape_pixel_count_) + max_pixel_count = std::min(max_pixel_count, *max_landscape_pixel_count_); + } else { + target_aspect_ratio = target_portrait_aspect_ratio_; + if (max_portrait_pixel_count_) + max_pixel_count = std::min(max_pixel_count, *max_portrait_pixel_count_); + } int target_pixel_count = std::min(resolution_request_target_pixel_count_, max_pixel_count); @@ -195,19 +205,14 @@ bool VideoAdapter::AdaptFrameResolution(int in_width, } // Calculate how the input should be cropped. - if (!target_aspect_ratio_ || target_aspect_ratio_->first <= 0 || - target_aspect_ratio_->second <= 0) { + if (!target_aspect_ratio || target_aspect_ratio->first <= 0 || + target_aspect_ratio->second <= 0) { *cropped_width = in_width; *cropped_height = in_height; } else { - // Adjust |target_aspect_ratio_| orientation to match input. - if ((in_width > in_height) != - (target_aspect_ratio_->first > target_aspect_ratio_->second)) { - std::swap(target_aspect_ratio_->first, target_aspect_ratio_->second); - } const float requested_aspect = - target_aspect_ratio_->first / - static_cast(target_aspect_ratio_->second); + target_aspect_ratio->first / + static_cast(target_aspect_ratio->second); *cropped_width = std::min(in_width, static_cast(in_height * requested_aspect)); *cropped_height = @@ -274,9 +279,33 @@ void VideoAdapter::OnOutputFormatRequest( const absl::optional>& target_aspect_ratio, const absl::optional& max_pixel_count, const absl::optional& max_fps) { + absl::optional> target_landscape_aspect_ratio; + absl::optional> target_portrait_aspect_ratio; + if (target_aspect_ratio && target_aspect_ratio->first > 0 && + target_aspect_ratio->second > 0) { + // Maintain input orientation. + const int max_side = + std::max(target_aspect_ratio->first, target_aspect_ratio->second); + const int min_side = + std::min(target_aspect_ratio->first, target_aspect_ratio->second); + target_landscape_aspect_ratio = std::make_pair(max_side, min_side); + target_portrait_aspect_ratio = std::make_pair(min_side, max_side); + } + OnOutputFormatRequest(target_landscape_aspect_ratio, max_pixel_count, + target_portrait_aspect_ratio, max_pixel_count, max_fps); +} + +void VideoAdapter::OnOutputFormatRequest( + const absl::optional>& target_landscape_aspect_ratio, + const absl::optional& max_landscape_pixel_count, + const absl::optional>& target_portrait_aspect_ratio, + const absl::optional& max_portrait_pixel_count, + const absl::optional& max_fps) { rtc::CritScope cs(&critical_section_); - target_aspect_ratio_ = target_aspect_ratio; - max_pixel_count_ = max_pixel_count; + target_landscape_aspect_ratio_ = target_landscape_aspect_ratio; + max_landscape_pixel_count_ = max_landscape_pixel_count; + target_portrait_aspect_ratio_ = target_portrait_aspect_ratio; + max_portrait_pixel_count_ = max_portrait_pixel_count; max_fps_ = max_fps; next_frame_timestamp_ns_ = absl::nullopt; } diff --git a/media/base/videoadapter.h b/media/base/videoadapter.h index bb9ab8026f..1864d5cc0c 100644 --- a/media/base/videoadapter.h +++ b/media/base/videoadapter.h @@ -68,6 +68,17 @@ class VideoAdapter { const absl::optional& max_pixel_count, const absl::optional& max_fps); + // Same as above, but allows setting two different target aspect ratios + // depending on incoming frame orientation. This gives more fine-grained + // control and can e.g. be used to force landscape video to be cropped to + // portrait video. + void OnOutputFormatRequest( + const absl::optional>& target_landscape_aspect_ratio, + const absl::optional& max_landscape_pixel_count, + const absl::optional>& target_portrait_aspect_ratio, + const absl::optional& max_portrait_pixel_count, + const absl::optional& max_fps); + // Requests the output frame size from |AdaptFrameResolution| to have as close // as possible to |target_pixel_count| pixels (if set) but no more than // |max_pixel_count|. @@ -100,9 +111,14 @@ class VideoAdapter { // Max number of pixels/fps requested via calls to OnOutputFormatRequest, // OnResolutionFramerateRequest respectively. // The adapted output format is the minimum of these. - absl::optional> target_aspect_ratio_ + absl::optional> target_landscape_aspect_ratio_ + RTC_GUARDED_BY(critical_section_); + absl::optional max_landscape_pixel_count_ + RTC_GUARDED_BY(critical_section_); + absl::optional> target_portrait_aspect_ratio_ + RTC_GUARDED_BY(critical_section_); + absl::optional max_portrait_pixel_count_ RTC_GUARDED_BY(critical_section_); - absl::optional max_pixel_count_ RTC_GUARDED_BY(critical_section_); absl::optional max_fps_ RTC_GUARDED_BY(critical_section_); int resolution_request_target_pixel_count_ RTC_GUARDED_BY(critical_section_); int resolution_request_max_pixel_count_ RTC_GUARDED_BY(critical_section_); diff --git a/media/base/videoadapter_unittest.cc b/media/base/videoadapter_unittest.cc index ea7528117e..c600fc2c07 100644 --- a/media/base/videoadapter_unittest.cc +++ b/media/base/videoadapter_unittest.cc @@ -1111,4 +1111,61 @@ TEST_P(VideoAdapterTest, TestAdaptToMax) { EXPECT_EQ(640, out_width_); EXPECT_EQ(360, out_height_); } + +// Test adjusting to 16:9 in landscape, and 9:16 in portrait. +TEST(VideoAdapterTestMultipleOrientation, TestNormal) { + VideoAdapter video_adapter; + video_adapter.OnOutputFormatRequest(std::make_pair(640, 360), 640 * 360, + std::make_pair(360, 640), 360 * 640, 30); + + int cropped_width; + int cropped_height; + int out_width; + int out_height; + EXPECT_TRUE(video_adapter.AdaptFrameResolution( + /* in_width= */ 640, /* in_height= */ 480, /* in_timestamp_ns= */ 0, + &cropped_width, &cropped_height, &out_width, &out_height)); + EXPECT_EQ(640, cropped_width); + EXPECT_EQ(360, cropped_height); + EXPECT_EQ(640, out_width); + EXPECT_EQ(360, out_height); + + EXPECT_TRUE(video_adapter.AdaptFrameResolution( + /* in_width= */ 480, /* in_height= */ 640, + /* in_timestamp_ns= */ rtc::kNumNanosecsPerSec / 30, &cropped_width, + &cropped_height, &out_width, &out_height)); + EXPECT_EQ(360, cropped_width); + EXPECT_EQ(640, cropped_height); + EXPECT_EQ(360, out_width); + EXPECT_EQ(640, out_height); +} + +// Force output to be 9:16, even for landscape input. +TEST(VideoAdapterTestMultipleOrientation, TestForcePortrait) { + VideoAdapter video_adapter; + video_adapter.OnOutputFormatRequest(std::make_pair(360, 640), 640 * 360, + std::make_pair(360, 640), 360 * 640, 30); + + int cropped_width; + int cropped_height; + int out_width; + int out_height; + EXPECT_TRUE(video_adapter.AdaptFrameResolution( + /* in_width= */ 640, /* in_height= */ 480, /* in_timestamp_ns= */ 0, + &cropped_width, &cropped_height, &out_width, &out_height)); + EXPECT_EQ(270, cropped_width); + EXPECT_EQ(480, cropped_height); + EXPECT_EQ(270, out_width); + EXPECT_EQ(480, out_height); + + EXPECT_TRUE(video_adapter.AdaptFrameResolution( + /* in_width= */ 480, /* in_height= */ 640, + /* in_timestamp_ns= */ rtc::kNumNanosecsPerSec / 30, &cropped_width, + &cropped_height, &out_width, &out_height)); + EXPECT_EQ(360, cropped_width); + EXPECT_EQ(640, cropped_height); + EXPECT_EQ(360, out_width); + EXPECT_EQ(640, out_height); +} + } // namespace cricket diff --git a/sdk/android/api/org/webrtc/VideoSource.java b/sdk/android/api/org/webrtc/VideoSource.java index 7f9f3e5323..a8ef6620ac 100644 --- a/sdk/android/api/org/webrtc/VideoSource.java +++ b/sdk/android/api/org/webrtc/VideoSource.java @@ -30,7 +30,20 @@ public class VideoSource extends MediaSource { * maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested. */ public void adaptOutputFormat(int width, int height, int fps) { - nativeAdaptOutputFormat(getNativeVideoTrackSource(), width, height, fps); + final int maxSide = Math.max(width, height); + final int minSide = Math.min(width, height); + adaptOutputFormat(maxSide, minSide, minSide, maxSide, fps); + } + + /** + * Same as above, but allows setting two different target resolutions depending on incoming + * frame orientation. This gives more fine-grained control and can e.g. be used to force landscape + * video to be cropped to portrait video. + */ + public void adaptOutputFormat( + int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) { + nativeAdaptOutputFormat(getNativeVideoTrackSource(), landscapeWidth, landscapeHeight, + portraitWidth, portraitHeight, fps); } public CapturerObserver getCapturerObserver() { @@ -44,5 +57,6 @@ public class VideoSource extends MediaSource { // Returns source->internal() from webrtc::VideoTrackSourceProxy. private static native long nativeGetInternalSource(long source); - private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps); + private static native void nativeAdaptOutputFormat(long source, int landscapeWidth, + int landscapeHeight, int portraitWidth, int portraitHeight, int fps); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java index ef7669102f..17b0977d59 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java @@ -169,6 +169,14 @@ public class Camera1CapturerUsingByteBufferTest { fixtures.scaleCameraOutput(); } + // This test that frames forwarded to a renderer is cropped to a new orientation if + // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack. + @Test + @MediumTest + public void testCropCameraOutput() throws InterruptedException { + fixtures.cropCameraOutput(); + } + // This test that an error is reported if the camera is already opened // when CameraVideoCapturer is started. @Test diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java index 28fa825ad7..4dc003726a 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java @@ -172,6 +172,14 @@ public class Camera1CapturerUsingTextureTest { fixtures.scaleCameraOutput(); } + // This test that frames forwarded to a renderer is cropped to a new orientation if + // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack. + @Test + @MediumTest + public void testCropCameraOutput() throws InterruptedException { + fixtures.cropCameraOutput(); + } + // This test that an error is reported if the camera is already opened // when CameraVideoCapturer is started. @Test diff --git a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java index 77e8d77f0a..fba943176a 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/Camera2CapturerTest.java @@ -302,6 +302,14 @@ public class Camera2CapturerTest { fixtures.scaleCameraOutput(); } + // This test that frames forwarded to a renderer is cropped to a new orientation if + // adaptOutputFormat is called in such a way. This test both Java and C++ parts of of the stack. + @Test + @MediumTest + public void testCropCameraOutput() throws InterruptedException { + fixtures.cropCameraOutput(); + } + // This test that an error is reported if the camera is already opened // when CameraVideoCapturer is started. @Test diff --git a/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java b/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java index dd0d780df0..b47fee08d1 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java @@ -688,6 +688,49 @@ class CameraVideoCapturerTestFixtures { assertTrue(gotExpectedResolution); } + public void cropCameraOutput() throws InterruptedException { + final CapturerInstance capturerInstance = createCapturer(false /* initialize */); + final VideoTrackWithRenderer videoTrackWithRenderer = + createVideoTrackWithRenderer(capturerInstance.capturer); + assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0); + + final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth(); + final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight(); + final int frameRate = 30; + final int cropWidth; + final int cropHeight; + if (startWidth > startHeight) { + // Landscape input, request portrait output. + cropWidth = 360; + cropHeight = 640; + } else { + // Portrait input, request landscape output. + cropWidth = 640; + cropHeight = 630; + } + + // Request different output orientation than input. + videoTrackWithRenderer.source.adaptOutputFormat( + cropWidth, cropHeight, cropWidth, cropHeight, frameRate); + + boolean gotExpectedOrientation = false; + int numberOfInspectedFrames = 0; + + do { + videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender(); + ++numberOfInspectedFrames; + + gotExpectedOrientation = (cropWidth > cropHeight) + == (videoTrackWithRenderer.rendererCallbacks.frameWidth() + > videoTrackWithRenderer.rendererCallbacks.frameHeight()); + } while (!gotExpectedOrientation && numberOfInspectedFrames < 30); + + disposeCapturer(capturerInstance); + disposeVideoTrackWithRenderer(videoTrackWithRenderer); + + assertTrue(gotExpectedOrientation); + } + public void startWhileCameraIsAlreadyOpen() throws InterruptedException { final String cameraName = testObjectFactory.getNameOfBackFacingDevice(); // At this point camera is not actually opened. diff --git a/sdk/android/src/jni/androidvideotracksource.cc b/sdk/android/src/jni/androidvideotracksource.cc index 41d4278ac9..aa459e1833 100644 --- a/sdk/android/src/jni/androidvideotracksource.cc +++ b/sdk/android/src/jni/androidvideotracksource.cc @@ -84,10 +84,19 @@ void AndroidVideoTrackSource::OnFrameCaptured( int crop_x; int crop_y; - if (!AdaptFrame(width, height, camera_time_us, &adapted_width, - &adapted_height, &crop_width, &crop_height, &crop_x, - &crop_y)) { - return; + if (rotation % 180 == 0) { + if (!AdaptFrame(width, height, camera_time_us, &adapted_width, + &adapted_height, &crop_width, &crop_height, &crop_x, + &crop_y)) { + return; + } + } else { + // Swap all width/height and x/y. + if (!AdaptFrame(height, width, camera_time_us, &adapted_height, + &adapted_width, &crop_height, &crop_width, &crop_y, + &crop_x)) { + return; + } } rtc::scoped_refptr buffer = @@ -103,12 +112,16 @@ void AndroidVideoTrackSource::OnFrameCaptured( OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us)); } -void AndroidVideoTrackSource::OnOutputFormatRequest(int width, - int height, +void AndroidVideoTrackSource::OnOutputFormatRequest(int landscape_width, + int landscape_height, + int portrait_width, + int portrait_height, int fps) { - cricket::VideoFormat format(width, height, - cricket::VideoFormat::FpsToInterval(fps), 0); - video_adapter()->OnOutputFormatRequest(format); + video_adapter()->OnOutputFormatRequest( + std::make_pair(landscape_width, landscape_height), + landscape_width * landscape_height, + std::make_pair(portrait_width, portrait_height), + portrait_width * portrait_height, fps); } } // namespace jni diff --git a/sdk/android/src/jni/androidvideotracksource.h b/sdk/android/src/jni/androidvideotracksource.h index 8f092c1b04..4c87432a66 100644 --- a/sdk/android/src/jni/androidvideotracksource.h +++ b/sdk/android/src/jni/androidvideotracksource.h @@ -53,7 +53,11 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource { VideoRotation rotation, const JavaRef& j_video_frame_buffer); - void OnOutputFormatRequest(int width, int height, int fps); + void OnOutputFormatRequest(int landscape_width, + int landscape_height, + int portrait_width, + int portrait_height, + int fps); private: rtc::Thread* signaling_thread_; diff --git a/sdk/android/src/jni/videosource.cc b/sdk/android/src/jni/videosource.cc index 09b538ebc2..37ba0b04b1 100644 --- a/sdk/android/src/jni/videosource.cc +++ b/sdk/android/src/jni/videosource.cc @@ -33,13 +33,16 @@ static jlong JNI_VideoSource_GetInternalSource(JNIEnv* jni, static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni, const JavaParamRef&, jlong j_source, - jint j_width, - jint j_height, + jint j_landscape_width, + jint j_landscape_height, + jint j_portrait_width, + jint j_portrait_height, jint j_fps) { RTC_LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat"; AndroidVideoTrackSource* source = AndroidVideoTrackSourceFromJavaProxy(j_source); - source->OnOutputFormatRequest(j_width, j_height, j_fps); + source->OnOutputFormatRequest(j_landscape_width, j_landscape_height, + j_portrait_width, j_portrait_height, j_fps); } } // namespace jni