Add rotation to EncodedImage and make sure it is passed through encoders.
This fix a potential race where the rotation information of a sent frame does not match the encoded frame. BUG=webrtc:5783 TEST= Run ApprtcDemo on IOs and Android with and without capture to texture and both VP8 and H264. R=magjed@webrtc.org, pbos@webrtc.org, tkchin@webrtc.org TBR=tkchin_webrtc // For IOS changes. Review URL: https://codereview.webrtc.org/1886113003 . Cr-Commit-Position: refs/heads/master@{#12426}
This commit is contained in:
@ -393,6 +393,7 @@ int32_t H264EncoderImpl::Encode(
|
||||
encoded_image_._timeStamp = frame.timestamp();
|
||||
encoded_image_.ntp_time_ms_ = frame.ntp_time_ms();
|
||||
encoded_image_.capture_time_ms_ = frame.render_time_ms();
|
||||
encoded_image_.rotation_ = frame.rotation();
|
||||
encoded_image_._frameType = EVideoFrameType_to_FrameType(info.eFrameType);
|
||||
|
||||
// Split encoded image up into fragments. This also updates |encoded_image_|.
|
||||
|
||||
@ -118,8 +118,14 @@ struct FrameEncodeParams {
|
||||
int32_t w,
|
||||
int32_t h,
|
||||
int64_t rtms,
|
||||
uint32_t ts)
|
||||
: encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts) {
|
||||
uint32_t ts,
|
||||
webrtc::VideoRotation r)
|
||||
: encoder(e),
|
||||
width(w),
|
||||
height(h),
|
||||
render_time_ms(rtms),
|
||||
timestamp(ts),
|
||||
rotation(r) {
|
||||
if (csi) {
|
||||
codec_specific_info = *csi;
|
||||
} else {
|
||||
@ -133,6 +139,7 @@ struct FrameEncodeParams {
|
||||
int32_t height;
|
||||
int64_t render_time_ms;
|
||||
uint32_t timestamp;
|
||||
webrtc::VideoRotation rotation;
|
||||
};
|
||||
|
||||
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the
|
||||
@ -185,7 +192,8 @@ void VTCompressionOutputCallback(void* encoder,
|
||||
encode_params->encoder->OnEncodedFrame(
|
||||
status, info_flags, sample_buffer, encode_params->codec_specific_info,
|
||||
encode_params->width, encode_params->height,
|
||||
encode_params->render_time_ms, encode_params->timestamp);
|
||||
encode_params->render_time_ms, encode_params->timestamp,
|
||||
encode_params->rotation);
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
@ -306,7 +314,7 @@ int H264VideoToolboxEncoder::Encode(
|
||||
std::unique_ptr<internal::FrameEncodeParams> encode_params;
|
||||
encode_params.reset(new internal::FrameEncodeParams(
|
||||
this, codec_specific_info, width_, height_, input_image.render_time_ms(),
|
||||
input_image.timestamp()));
|
||||
input_image.timestamp(), input_image.rotation()));
|
||||
|
||||
// Update the bitrate if needed.
|
||||
SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps());
|
||||
@ -471,7 +479,8 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
|
||||
int32_t width,
|
||||
int32_t height,
|
||||
int64_t render_time_ms,
|
||||
uint32_t timestamp) {
|
||||
uint32_t timestamp,
|
||||
VideoRotation rotation) {
|
||||
if (status != noErr) {
|
||||
LOG(LS_ERROR) << "H264 encode failed.";
|
||||
return;
|
||||
@ -511,6 +520,7 @@ void H264VideoToolboxEncoder::OnEncodedFrame(
|
||||
is_keyframe ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta;
|
||||
frame.capture_time_ms_ = render_time_ms;
|
||||
frame._timeStamp = timestamp;
|
||||
frame.rotation_ = rotation;
|
||||
|
||||
int result = callback_->Encoded(frame, &codec_specific_info, header.get());
|
||||
if (result != 0) {
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
|
||||
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_H264_H264_VIDEO_TOOLBOX_ENCODER_H_
|
||||
|
||||
#include "webrtc/common_video/rotation.h"
|
||||
#include "webrtc/modules/video_coding/codecs/h264/include/h264.h"
|
||||
#include "webrtc/modules/video_coding/include/bitrate_adjuster.h"
|
||||
|
||||
@ -58,7 +59,8 @@ class H264VideoToolboxEncoder : public H264Encoder {
|
||||
int32_t width,
|
||||
int32_t height,
|
||||
int64_t render_time_ms,
|
||||
uint32_t timestamp);
|
||||
uint32_t timestamp,
|
||||
VideoRotation rotation);
|
||||
|
||||
private:
|
||||
int ResetCompressionSession();
|
||||
|
||||
@ -1024,6 +1024,7 @@ int VP8EncoderImpl::GetEncodedPartitions(const VideoFrame& input_image,
|
||||
encoded_images_[encoder_idx]._timeStamp = input_image.timestamp();
|
||||
encoded_images_[encoder_idx].capture_time_ms_ =
|
||||
input_image.render_time_ms();
|
||||
encoded_images_[encoder_idx].rotation_ = input_image.rotation();
|
||||
|
||||
int qp = -1;
|
||||
vpx_codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER_64, &qp);
|
||||
|
||||
@ -692,6 +692,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
|
||||
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
|
||||
encoded_image_._timeStamp = input_image_->timestamp();
|
||||
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
|
||||
encoded_image_.rotation_ = input_image_->rotation();
|
||||
encoded_image_._encodedHeight = raw_->d_h;
|
||||
encoded_image_._encodedWidth = raw_->d_w;
|
||||
int qp = -1;
|
||||
|
||||
Reference in New Issue
Block a user