Use correct presentationTimestampUs for VideoFrames in old encoder.
In MediaCodecVideoEncoder, VideoFrame timestamp was used as a presentation timestamp. With this change timestamp maintained in C++ code is used instead. This matches the behaviour with old frame callbacks. Bug: b/72832862 Change-Id: I1f0543ebe837ccac22c83a81a81f3ea128e2a866 Reviewed-on: https://webrtc-review.googlesource.com/47381 Reviewed-by: Anders Carlsson <andersc@webrtc.org> Commit-Queue: Sami Kalliomäki <sakal@webrtc.org> Cr-Commit-Position: refs/heads/master@{#21872}
This commit is contained in:
committed by
Commit Bot
parent
06c2aa9f7b
commit
debbc7801f
@ -634,10 +634,10 @@ public class MediaCodecVideoEncoder {
|
|||||||
* Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode.
|
* Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode.
|
||||||
*/
|
*/
|
||||||
@CalledByNativeUnchecked
|
@CalledByNativeUnchecked
|
||||||
boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) {
|
boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex,
|
||||||
|
long presentationTimestampUs) {
|
||||||
checkOnMediaCodecThread();
|
checkOnMediaCodecThread();
|
||||||
try {
|
try {
|
||||||
long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTimestampNs());
|
|
||||||
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
||||||
|
|
||||||
VideoFrame.Buffer buffer = frame.getBuffer();
|
VideoFrame.Buffer buffer = frame.getBuffer();
|
||||||
@ -649,7 +649,7 @@ public class MediaCodecVideoEncoder {
|
|||||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||||
VideoFrameDrawer.drawTexture(drawer, textureBuffer, new Matrix() /* renderMatrix */, width,
|
VideoFrameDrawer.drawTexture(drawer, textureBuffer, new Matrix() /* renderMatrix */, width,
|
||||||
height, 0 /* viewportX */, 0 /* viewportY */, width, height);
|
height, 0 /* viewportX */, 0 /* viewportY */, width, height);
|
||||||
eglBase.swapBuffers(frame.getTimestampNs());
|
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
|
||||||
} else {
|
} else {
|
||||||
VideoFrame.I420Buffer i420Buffer = buffer.toI420();
|
VideoFrame.I420Buffer i420Buffer = buffer.toI420();
|
||||||
final int chromaHeight = (height + 1) / 2;
|
final int chromaHeight = (height + 1) / 2;
|
||||||
|
|||||||
@ -897,7 +897,7 @@ bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni,
|
|||||||
int input_buffer_index) {
|
int input_buffer_index) {
|
||||||
bool encode_status = Java_MediaCodecVideoEncoder_encodeFrame(
|
bool encode_status = Java_MediaCodecVideoEncoder_encodeFrame(
|
||||||
jni, j_media_codec_video_encoder_, jlongFromPointer(this), key_frame,
|
jni, j_media_codec_video_encoder_, jlongFromPointer(this), key_frame,
|
||||||
frame, input_buffer_index);
|
frame, input_buffer_index, current_timestamp_us_);
|
||||||
if (CheckException(jni)) {
|
if (CheckException(jni)) {
|
||||||
ALOGE << "Exception in encode frame.";
|
ALOGE << "Exception in encode frame.";
|
||||||
ProcessHWError(true /* reset_if_fallback_unavailable */);
|
ProcessHWError(true /* reset_if_fallback_unavailable */);
|
||||||
|
|||||||
Reference in New Issue
Block a user