Change capture time format to nanoseconds in EncodedImage.

The millisecond field is deprecated and will be removed once the
dependencies have been updated.

BUG=webrtc:7760

Review-Url: https://codereview.webrtc.org/3010623002
Cr-Commit-Position: refs/heads/master@{#19622}
This commit is contained in:
sakal
2017-08-31 02:37:28 -07:00
committed by Commit Bot
parent 05ea2b39e0
commit e172d89f73
6 changed files with 53 additions and 41 deletions

View File

@ -11,6 +11,7 @@
package org.webrtc; package org.webrtc;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
/** /**
* An encoded frame from a video stream. Used as an input for decoders and as an output for * An encoded frame from a video stream. Used as an input for decoders and as an output for
@ -26,18 +27,20 @@ public class EncodedImage {
public final ByteBuffer buffer; public final ByteBuffer buffer;
public final int encodedWidth; public final int encodedWidth;
public final int encodedHeight; public final int encodedHeight;
public final long captureTimeMs; public final long captureTimeMs; // Deprecated
public final long captureTimeNs;
public final FrameType frameType; public final FrameType frameType;
public final int rotation; public final int rotation;
public final boolean completeFrame; public final boolean completeFrame;
public final Integer qp; public final Integer qp;
private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeMs, private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
FrameType frameType, int rotation, boolean completeFrame, Integer qp) { FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
this.buffer = buffer; this.buffer = buffer;
this.encodedWidth = encodedWidth; this.encodedWidth = encodedWidth;
this.encodedHeight = encodedHeight; this.encodedHeight = encodedHeight;
this.captureTimeMs = captureTimeMs; this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
this.captureTimeNs = captureTimeNs;
this.frameType = frameType; this.frameType = frameType;
this.rotation = rotation; this.rotation = rotation;
this.completeFrame = completeFrame; this.completeFrame = completeFrame;
@ -52,7 +55,7 @@ public class EncodedImage {
private ByteBuffer buffer; private ByteBuffer buffer;
private int encodedWidth; private int encodedWidth;
private int encodedHeight; private int encodedHeight;
private long captureTimeMs; private long captureTimeNs;
private EncodedImage.FrameType frameType; private EncodedImage.FrameType frameType;
private int rotation; private int rotation;
private boolean completeFrame; private boolean completeFrame;
@ -75,8 +78,14 @@ public class EncodedImage {
return this; return this;
} }
@Deprecated
public Builder setCaptureTimeMs(long captureTimeMs) { public Builder setCaptureTimeMs(long captureTimeMs) {
this.captureTimeMs = captureTimeMs; this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
return this;
}
public Builder setCaptureTimeNs(long captureTimeNs) {
this.captureTimeNs = captureTimeNs;
return this; return this;
} }
@ -101,7 +110,7 @@ public class EncodedImage {
} }
public EncodedImage createEncodedImage() { public EncodedImage createEncodedImage() {
return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeMs, frameType, return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
rotation, completeFrame, qp); rotation, completeFrame, qp);
} }
} }

View File

@ -80,7 +80,7 @@ public class HardwareVideoEncoderTest {
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]); VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampUs = 20000; final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1); final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() { VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@ -89,7 +89,7 @@ public class HardwareVideoEncoderTest {
assertTrue(image.buffer.capacity() > 0); assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width); assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height); assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeMs, presentationTimestampUs / 1000); assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey); assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0); assertEquals(image.rotation, 0);
assertTrue(image.completeFrame); assertTrue(image.completeFrame);
@ -101,7 +101,7 @@ public class HardwareVideoEncoderTest {
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK); assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height); VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000); VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey}); new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
@ -130,7 +130,7 @@ public class HardwareVideoEncoderTest {
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]); VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampUs = 20000; final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1); final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() { VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@ -139,7 +139,7 @@ public class HardwareVideoEncoderTest {
assertTrue(image.buffer.capacity() > 0); assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width); assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height); assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeMs, presentationTimestampUs / 1000); assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey); assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0); assertEquals(image.rotation, 0);
assertTrue(image.completeFrame); assertTrue(image.completeFrame);
@ -193,7 +193,7 @@ public class HardwareVideoEncoderTest {
return null; return null;
} }
}; };
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000); VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey}); new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});

View File

@ -20,6 +20,7 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque; import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker; import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */ /** Android hardware video decoder. */
@ -282,8 +283,8 @@ class HardwareVideoDecoder
frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation)); frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
try { try {
codec.queueInputBuffer( codec.queueInputBuffer(index, 0 /* offset */, size,
index, 0 /* offset */, size, frame.captureTimeMs * 1000, 0 /* flags */); TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e); Logging.e(TAG, "queueInputBuffer failed", e);
frameInfos.pollLast(); frameInfos.pollLast();

View File

@ -25,6 +25,7 @@ import java.util.Deque;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
/** Android hardware video encoder. */ /** Android hardware video encoder. */
@TargetApi(19) @TargetApi(19)
@ -55,9 +56,9 @@ class HardwareVideoEncoder implements VideoEncoder {
private final int keyFrameIntervalSec; private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some // Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders. // Qualcomm video encoders.
private final long forcedKeyFrameMs; private final long forcedKeyFrameNs;
// Presentation timestamp of the last requested (or forced) key frame. // Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameMs; private long lastKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster; private final BitrateAdjuster bitrateAdjuster;
private int adjustedBitrate; private int adjustedBitrate;
@ -125,7 +126,7 @@ class HardwareVideoEncoder implements VideoEncoder {
this.inputColorFormat = null; this.inputColorFormat = null;
} }
this.keyFrameIntervalSec = keyFrameIntervalSec; this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameMs = forceKeyFrameIntervalMs; this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster; this.bitrateAdjuster = bitrateAdjuster;
this.outputBuilders = new LinkedBlockingDeque<>(); this.outputBuilders = new LinkedBlockingDeque<>();
this.textureContext = textureContext; this.textureContext = textureContext;
@ -150,7 +151,7 @@ class HardwareVideoEncoder implements VideoEncoder {
this.callback = callback; this.callback = callback;
lastKeyFrameMs = -1; lastKeyFrameNs = -1;
try { try {
codec = MediaCodec.createByCodecName(codecName); codec = MediaCodec.createByCodecName(codecName);
@ -257,11 +258,8 @@ class HardwareVideoEncoder implements VideoEncoder {
} }
} }
// Frame timestamp rounded to the nearest microsecond and millisecond. if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; requestKeyFrame(videoFrame.getTimestampNs());
long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) {
requestKeyFrame(presentationTimestampMs);
} }
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer(); VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
@ -269,7 +267,7 @@ class HardwareVideoEncoder implements VideoEncoder {
// subsampled at one byte per four pixels. // subsampled at one byte per four pixels.
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
EncodedImage.Builder builder = EncodedImage.builder() EncodedImage.Builder builder = EncodedImage.builder()
.setCaptureTimeMs(presentationTimestampMs) .setCaptureTimeNs(videoFrame.getTimestampNs())
.setCompleteFrame(true) .setCompleteFrame(true)
.setEncodedWidth(videoFrame.getBuffer().getWidth()) .setEncodedWidth(videoFrame.getBuffer().getWidth())
.setEncodedHeight(videoFrame.getBuffer().getHeight()) .setEncodedHeight(videoFrame.getBuffer().getHeight())
@ -287,7 +285,7 @@ class HardwareVideoEncoder implements VideoEncoder {
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) { if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient"); Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
} }
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize, presentationTimestampUs); return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
} }
} }
@ -321,8 +319,11 @@ class HardwareVideoEncoder implements VideoEncoder {
return VideoCodecStatus.OK; return VideoCodecStatus.OK;
} }
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, private VideoCodecStatus encodeByteBuffer(
VideoFrame.Buffer videoFrameBuffer, int bufferSize, long presentationTimestampUs) { VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
// Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind. // No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index; int index;
try { try {
@ -397,11 +398,11 @@ class HardwareVideoEncoder implements VideoEncoder {
return initEncodeInternal(newWidth, newHeight, 0, 0, callback); return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
} }
private boolean shouldForceKeyFrame(long presentationTimestampMs) { private boolean shouldForceKeyFrame(long presentationTimestampNs) {
return forcedKeyFrameMs > 0 && presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs; return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
} }
private void requestKeyFrame(long presentationTimestampMs) { private void requestKeyFrame(long presentationTimestampNs) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame // indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead, // be encoded as a key frame, but sadly that flag is ignored. Instead,
@ -414,7 +415,7 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "requestKeyFrame failed", e); Logging.e(TAG, "requestKeyFrame failed", e);
return; return;
} }
lastKeyFrameMs = presentationTimestampMs; lastKeyFrameNs = presentationTimestampNs;
} }
private Thread createOutputThread() { private Thread createOutputThread() {

View File

@ -15,6 +15,7 @@
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h" #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h" #include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "webrtc/rtc_base/logging.h" #include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h" #include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc { namespace webrtc {
@ -127,7 +128,8 @@ int32_t VideoDecoderWrapper::Decode(
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
FrameExtraInfo frame_extra_info; FrameExtraInfo frame_extra_info;
frame_extra_info.capture_time_ms = input_image.capture_time_ms_; frame_extra_info.capture_time_ns =
input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec;
frame_extra_info.timestamp_rtp = input_image._timeStamp; frame_extra_info.timestamp_rtp = input_image._timeStamp;
frame_extra_info.qp = frame_extra_info.qp =
qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>(); qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>();
@ -174,7 +176,6 @@ void VideoDecoderWrapper::OnDecodedFrame(JNIEnv* jni,
jobject jqp) { jobject jqp) {
const jlong capture_time_ns = const jlong capture_time_ns =
jni->CallLongMethod(jframe, video_frame_get_timestamp_ns_method_); jni->CallLongMethod(jframe, video_frame_get_timestamp_ns_method_);
const uint32_t capture_time_ms = capture_time_ns / 1000 / 1000;
FrameExtraInfo frame_extra_info; FrameExtraInfo frame_extra_info;
do { do {
if (frame_extra_infos_.empty()) { if (frame_extra_infos_.empty()) {
@ -186,7 +187,7 @@ void VideoDecoderWrapper::OnDecodedFrame(JNIEnv* jni,
frame_extra_infos_.pop_front(); frame_extra_infos_.pop_front();
// If the decoder might drop frames so iterate through the queue until we // If the decoder might drop frames so iterate through the queue until we
// find a matching timestamp. // find a matching timestamp.
} while (frame_extra_info.capture_time_ms != capture_time_ms); } while (frame_extra_info.capture_time_ns != capture_time_ns);
VideoFrame frame = android_video_buffer_factory_.CreateFrame( VideoFrame frame = android_video_buffer_factory_.CreateFrame(
jni, jframe, frame_extra_info.timestamp_rtp); jni, jframe, frame_extra_info.timestamp_rtp);
@ -237,12 +238,12 @@ jobject VideoDecoderWrapper::ConvertEncodedImageToJavaEncodedImage(
if (image.qp_ != -1) { if (image.qp_ != -1) {
qp = jni->NewObject(*integer_class_, integer_constructor_, image.qp_); qp = jni->NewObject(*integer_class_, integer_constructor_, image.qp_);
} }
return jni->NewObject(*encoded_image_class_, encoded_image_constructor_, return jni->NewObject(
buffer, static_cast<jint>(image._encodedWidth), *encoded_image_class_, encoded_image_constructor_, buffer,
static_cast<jint>(image._encodedHeight), static_cast<jint>(image._encodedWidth),
static_cast<jlong>(image.capture_time_ms_), frame_type, static_cast<jint>(image._encodedHeight),
static_cast<jint>(image.rotation_), static_cast<jlong>(image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec),
image._completeFrame, qp); frame_type, static_cast<jint>(image.rotation_), image._completeFrame, qp);
} }
int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) { int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {

View File

@ -58,7 +58,7 @@ class VideoDecoderWrapper : public VideoDecoder {
private: private:
struct FrameExtraInfo { struct FrameExtraInfo {
uint32_t capture_time_ms; // Used as an identifier of the frame. uint64_t capture_time_ns; // Used as an identifier of the frame.
uint32_t timestamp_rtp; uint32_t timestamp_rtp;
rtc::Optional<uint8_t> qp; rtc::Optional<uint8_t> qp;