Calculate frame timestamps based on target frame rate

Before this change HardwareVideoEncoder used capture time as frame
timestamp passed to HW encoder. That led to buffer overshoots with
HW encoders which infer frame rate from timestamps when frames were
dropped before encoding (i.e., frame rate decreases according to frame
timestamps) or when FramerateBitrateAdjuster was used.

Fixed this by using synthetic monotonically increasing timestamps
calculated based on target frame rate provided by bitrate adjuster.

Bug: webrtc:12982
Change-Id: I2454cd4e574bbea1cb9855ced4d998104845415c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/228902
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Commit-Queue: Sergey Silkin <ssilkin@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34810}
This commit is contained in:
Sergey Silkin
2021-08-17 20:10:28 +02:00
committed by WebRTC LUCI CQ
parent 51238e6c28
commit c68796e260
6 changed files with 150 additions and 49 deletions

View File

@ -13,12 +13,12 @@ package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps;
protected int targetFps;
protected int targetFramerateFps;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
public void setTargets(int targetBitrateBps, int targetFramerateFps) {
this.targetBitrateBps = targetBitrateBps;
this.targetFps = targetFps;
this.targetFramerateFps = targetFramerateFps;
}
@Override
@ -32,7 +32,7 @@ class BaseBitrateAdjuster implements BitrateAdjuster {
}
@Override
public int getCodecConfigFramerate() {
return targetFps;
public int getAdjustedFramerateFps() {
return targetFramerateFps;
}
}

View File

@ -15,7 +15,7 @@ interface BitrateAdjuster {
/**
* Sets the target bitrate in bits per second and framerate in frames per second.
*/
void setTargets(int targetBitrateBps, int targetFps);
void setTargets(int targetBitrateBps, int targetFramerateFps);
/**
* Should be used to report the size of an encoded frame to the bitrate adjuster. Use
@ -26,6 +26,6 @@ interface BitrateAdjuster {
/** Gets the current bitrate. */
int getAdjustedBitrateBps();
/** Gets the framerate for initial codec configuration. */
int getCodecConfigFramerate();
/** Gets the current framerate. */
int getAdjustedFramerateFps();
}

View File

@ -31,24 +31,24 @@ class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
private int bitrateAdjustmentScaleExp;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
public void setTargets(int targetBitrateBps, int targetFramerateFps) {
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
}
super.setTargets(targetBitrateBps, targetFps);
super.setTargets(targetBitrateBps, targetFramerateFps);
}
@Override
public void reportEncodedFrame(int size) {
if (targetFps == 0) {
if (targetFramerateFps == 0) {
return;
}
// Accumulate the difference between actual and expected frame sizes.
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFps;
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFramerateFps;
deviationBytes += (size - expectedBytesPerFrame);
timeSinceLastAdjustmentMs += 1000.0 / targetFps;
timeSinceLastAdjustmentMs += 1000.0 / targetFramerateFps;
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
// shortfall of the target.

View File

@ -15,21 +15,12 @@ package org.webrtc;
* hardware codecs that assume the framerate never changes.
*/
class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
private static final int INITIAL_FPS = 30;
private static final int DEFAULT_FRAMERATE_FPS = 30;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetFps == 0) {
// Framerate-based bitrate adjustment always initializes to the same framerate.
targetFps = INITIAL_FPS;
}
super.setTargets(targetBitrateBps, targetFps);
this.targetBitrateBps = this.targetBitrateBps * INITIAL_FPS / this.targetFps;
}
@Override
public int getCodecConfigFramerate() {
return INITIAL_FPS;
public void setTargets(int targetBitrateBps, int targetFramerateFps) {
// Keep frame rate unchanged and adjust bit rate.
this.targetFramerateFps = DEFAULT_FRAMERATE_FPS;
this.targetBitrateBps = targetBitrateBps * DEFAULT_FRAMERATE_FPS / targetFramerateFps;
}
}

View File

@ -149,6 +149,8 @@ class HardwareVideoEncoder implements VideoEncoder {
private boolean useSurfaceMode;
// --- Only accessed from the encoding thread.
// Presentation timestamp of next frame to encode.
private long nextPresentationTimestampUs;
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs;
@ -223,6 +225,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private VideoCodecStatus initEncodeInternal() {
encodeThreadChecker.checkIsOnValidThread();
nextPresentationTimestampUs = 0;
lastKeyFrameNs = -1;
try {
@ -238,7 +241,7 @@ class HardwareVideoEncoder implements VideoEncoder {
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getCodecConfigFramerate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedFramerateFps());
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
if (codecType == VideoCodecMimeType.H264) {
String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
@ -375,11 +378,18 @@ class HardwareVideoEncoder implements VideoEncoder {
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
long presentationTimestampUs = nextPresentationTimestampUs;
// Round frame duration down to avoid bitrate overshoot.
long frameDurationUs =
(long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps());
nextPresentationTimestampUs += frameDurationUs;
final VideoCodecStatus returnValue;
if (useSurfaceMode) {
returnValue = encodeTextureBuffer(videoFrame);
returnValue = encodeTextureBuffer(videoFrame, presentationTimestampUs);
} else {
returnValue = encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
returnValue =
encodeByteBuffer(videoFrame, presentationTimestampUs, videoFrameBuffer, bufferSize);
}
// Check if the queue was successful.
@ -391,7 +401,8 @@ class HardwareVideoEncoder implements VideoEncoder {
return returnValue;
}
private VideoCodecStatus encodeTextureBuffer(VideoFrame videoFrame) {
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, long presentationTimestampUs) {
encodeThreadChecker.checkIsOnValidThread();
try {
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
@ -401,7 +412,7 @@ class HardwareVideoEncoder implements VideoEncoder {
VideoFrame derotatedFrame =
new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return VideoCodecStatus.ERROR;
@ -409,12 +420,9 @@ class HardwareVideoEncoder implements VideoEncoder {
return VideoCodecStatus.OK;
}
private VideoCodecStatus encodeByteBuffer(
VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentationTimestampUs,
VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
encodeThreadChecker.checkIsOnValidThread();
// Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index;
try {

View File

@ -11,15 +11,18 @@
package org.webrtc;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
@ -36,6 +39,7 @@ import org.webrtc.EncodedImage.FrameType;
import org.webrtc.FakeMediaCodecWrapper.State;
import org.webrtc.VideoCodecStatus;
import org.webrtc.VideoEncoder;
import org.webrtc.VideoEncoder.BitrateAllocation;
import org.webrtc.VideoEncoder.CodecSpecificInfo;
import org.webrtc.VideoEncoder.EncodeInfo;
import org.webrtc.VideoEncoder.Settings;
@ -57,6 +61,10 @@ public class HardwareVideoEncoderTest {
/* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */));
private static final long POLL_DELAY_MS = 10;
private static final long DELIVER_ENCODED_IMAGE_DELAY_MS = 10;
private static final EncodeInfo ENCODE_INFO_KEY_FRAME =
new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey});
private static final EncodeInfo ENCODE_INFO_DELTA_FRAME =
new EncodeInfo(new FrameType[] {FrameType.VideoFrameDelta});
private static class TestEncoder extends HardwareVideoEncoder {
private final Object deliverEncodedImageLock = new Object();
@ -114,12 +122,18 @@ public class HardwareVideoEncoderTest {
private class TestEncoderBuilder {
private VideoCodecMimeType codecType = VideoCodecMimeType.VP8;
private BitrateAdjuster bitrateAdjuster = new BaseBitrateAdjuster();
public TestEncoderBuilder setCodecType(VideoCodecMimeType codecType) {
this.codecType = codecType;
return this;
}
public TestEncoderBuilder setBitrateAdjuster(BitrateAdjuster bitrateAdjuster) {
this.bitrateAdjuster = bitrateAdjuster;
return this;
}
public TestEncoder build() {
return new TestEncoder((String name)
-> fakeMediaCodecWrapper,
@ -128,12 +142,19 @@ public class HardwareVideoEncoderTest {
/* yuvColorFormat= */ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
/* params= */ new HashMap<>(),
/* keyFrameIntervalSec= */ 0,
/* forceKeyFrameIntervalMs= */ 0,
/* bitrateAdjuster= */ new BaseBitrateAdjuster(),
/* forceKeyFrameIntervalMs= */ 0, bitrateAdjuster,
/* sharedContext= */ null);
}
}
private VideoFrame createTestVideoFrame(long timestampNs) {
byte[] i420 = CodecTestHelper.generateRandomData(
TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
final VideoFrame.I420Buffer testBuffer =
CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
return new VideoFrame(testBuffer, /* rotation= */ 0, timestampNs);
}
@Mock VideoEncoder.Callback mockEncoderCallback;
private FakeMediaCodecWrapper fakeMediaCodecWrapper;
@ -201,21 +222,13 @@ public class HardwareVideoEncoderTest {
@Test
public void testDeliversOutputData() throws InterruptedException {
final int outputDataLength = 100;
// Set-up.
TestEncoder encoder = new TestEncoderBuilder().build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
byte[] i420 = CodecTestHelper.generateRandomData(
TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
final VideoFrame.I420Buffer testBuffer =
CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
final VideoFrame testFrame =
new VideoFrame(testBuffer, /* rotation= */ 0, /* timestampNs= */ 42);
encoder.encode(testFrame, new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey}));
encoder.encode(createTestVideoFrame(/* timestampNs= */ 42), ENCODE_INFO_KEY_FRAME);
// Test.
byte[] outputData = CodecTestHelper.generateRandomData(outputDataLength);
byte[] outputData = CodecTestHelper.generateRandomData(100);
fakeMediaCodecWrapper.addOutputData(outputData,
/* presentationTimestampUs= */ 0,
/* flags= */ MediaCodec.BUFFER_FLAG_SYNC_FRAME);
@ -264,4 +277,93 @@ public class HardwareVideoEncoderTest {
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
}
@Test
public void testFramerateWithFramerateBitrateAdjuster() {
// Enable FramerateBitrateAdjuster and initialize encoder with frame rate 15fps. Vefity that our
// initial frame rate setting is ignored and media encoder is initialized with 30fps
// (FramerateBitrateAdjuster default).
HardwareVideoEncoder encoder =
new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
encoder.initEncode(
new Settings(
/* numberOfCores= */ 1,
/* width= */ 640,
/* height= */ 480,
/* startBitrate= */ 10000,
/* maxFramerate= */ 15,
/* numberOfSimulcastStreams= */ 1,
/* automaticResizeOn= */ true,
/* capabilities= */ new VideoEncoder.Capabilities(false /* lossNotification */)),
mockEncoderCallback);
MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
assertThat(mediaFormat.getInteger(MediaFormat.KEY_FRAME_RATE)).isEqualTo(30);
}
@Test
public void testBitrateWithFramerateBitrateAdjuster() throws InterruptedException {
// Enable FramerateBitrateAdjuster and change frame rate while encoding video. Verify that
// bitrate setting passed to media encoder is adjusted to compensate for changes in frame rate.
TestEncoder encoder =
new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
encoder.encode(createTestVideoFrame(/* timestampNs= */ 0), ENCODE_INFO_KEY_FRAME);
// Reduce frame rate by half.
BitrateAllocation bitrateAllocation = new BitrateAllocation(
/* bitratesBbs= */ new int[][] {new int[] {TEST_ENCODER_SETTINGS.startBitrate}});
encoder.setRateAllocation(bitrateAllocation, TEST_ENCODER_SETTINGS.maxFramerate / 2);
// Generate output to trigger bitrate update in encoder wrapper.
fakeMediaCodecWrapper.addOutputData(
CodecTestHelper.generateRandomData(100), /* presentationTimestampUs= */ 0, /* flags= */ 0);
encoder.waitDeliverEncodedImage();
// Frame rate has been reduced by half. Verify that bitrate doubled.
ArgumentCaptor<Bundle> bundleCaptor = ArgumentCaptor.forClass(Bundle.class);
verify(fakeMediaCodecWrapper, times(2)).setParameters(bundleCaptor.capture());
Bundle params = bundleCaptor.getAllValues().get(1);
assertThat(params.containsKey(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE)).isTrue();
assertThat(params.getInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE))
.isEqualTo(TEST_ENCODER_SETTINGS.startBitrate * 2);
}
@Test
public void testTimestampsWithFramerateBitrateAdjuster() throws InterruptedException {
// Enable FramerateBitrateAdjuster and change frame rate while encoding video. Verify that
// encoder ignores changes in frame rate and calculates frame timestamps based on fixed frame
// rate 30fps.
TestEncoder encoder =
new TestEncoderBuilder().setBitrateAdjuster(new FramerateBitrateAdjuster()).build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
encoder.encode(createTestVideoFrame(/* timestampNs= */ 0), ENCODE_INFO_KEY_FRAME);
// Reduce frametate by half.
BitrateAllocation bitrateAllocation = new BitrateAllocation(
/* bitratesBbs= */ new int[][] {new int[] {TEST_ENCODER_SETTINGS.startBitrate}});
encoder.setRateAllocation(bitrateAllocation, TEST_ENCODER_SETTINGS.maxFramerate / 2);
// Encoder is allowed to buffer up to 2 frames. Generate output to avoid frame dropping.
fakeMediaCodecWrapper.addOutputData(
CodecTestHelper.generateRandomData(100), /* presentationTimestampUs= */ 0, /* flags= */ 0);
encoder.waitDeliverEncodedImage();
encoder.encode(createTestVideoFrame(/* timestampNs= */ 1), ENCODE_INFO_DELTA_FRAME);
encoder.encode(createTestVideoFrame(/* timestampNs= */ 2), ENCODE_INFO_DELTA_FRAME);
ArgumentCaptor<Long> timestampCaptor = ArgumentCaptor.forClass(Long.class);
verify(fakeMediaCodecWrapper, times(3))
.queueInputBuffer(
/* index= */ anyInt(),
/* offset= */ anyInt(),
/* size= */ anyInt(), timestampCaptor.capture(),
/* flags= */ anyInt());
long frameDurationMs = SECONDS.toMicros(1) / 30;
assertThat(timestampCaptor.getAllValues())
.containsExactly(0L, frameDurationMs, 2 * frameDurationMs);
}
}