Remove codec thread from MediaCodecVideoEncoder.

After this change, all calls to MediaCodecVideoEncoder must be made on
the same task queue. Removes OnCodecThread suffix from methods since it
is no longer meaningful.

BUG=webrtc:6290

Review-Url: https://codereview.webrtc.org/2669093004
Cr-Commit-Position: refs/heads/master@{#16792}
This commit is contained in:
sakal
2017-02-23 02:25:20 -08:00
committed by Commit bot
parent df92c5cb8c
commit 02f994b4e9
2 changed files with 212 additions and 213 deletions

View File

@ -25,9 +25,11 @@
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
#include "webrtc/base/sequenced_task_checker.h"
#include "webrtc/base/task_queue.h"
#include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/base/weak_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/common_video/h264/h264_bitstream_parser.h"
#include "webrtc/common_video/h264/h264_common.h"
@ -90,18 +92,16 @@ const char kH264HighProfileFieldTrial[] = "WebRTC-H264HighProfile";
// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
// HW-backed video encode. This C++ class is implemented as a very thin shim,
// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
// MediaCodecVideoEncoder is created, operated, and destroyed on a single
// thread, currently the libjingle Worker thread.
class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
public rtc::MessageHandler {
// MediaCodecVideoEncoder must be operated on a single task queue, currently
// this is the encoder queue from ViE encoder.
class MediaCodecVideoEncoder : public webrtc::VideoEncoder {
public:
virtual ~MediaCodecVideoEncoder();
MediaCodecVideoEncoder(JNIEnv* jni,
const cricket::VideoCodec& codec,
jobject egl_context);
// webrtc::VideoEncoder implementation. Everything trampolines to
// |codec_thread_| for execution.
// webrtc::VideoEncoder implementation.
int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
int32_t /* number_of_cores */,
size_t /* max_payload_size */) override;
@ -116,55 +116,53 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int32_t SetRateAllocation(const webrtc::BitrateAllocation& rate_allocation,
uint32_t frame_rate) override;
// rtc::MessageHandler implementation.
void OnMessage(rtc::Message* msg) override;
bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
const char* ImplementationName() const override;
private:
// ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
// InitEncodeOnCodecThread() in an attempt to restore the codec to an
// operable state. Necessary after all manner of OMX-layer errors.
// Returns true if the codec was reset successfully.
bool ResetCodecOnCodecThread();
class EncodeTask : public rtc::QueuedTask {
public:
EncodeTask(rtc::WeakPtr<MediaCodecVideoEncoder> encoder);
bool Run() override;
private:
rtc::WeakPtr<MediaCodecVideoEncoder> encoder_;
};
// ResetCodec() calls Release() and InitEncodeInternal() in an attempt to
// restore the codec to an operable state. Necessary after all manner of
// OMX-layer errors. Returns true if the codec was reset successfully.
bool ResetCodec();
// Fallback to a software encoder if one is supported else try to reset the
// encoder. Called with |reset_if_fallback_unavailable| equal to false from
// init/release encoder so that we don't go into infinite recursion.
// Returns true if the codec was reset successfully.
bool ProcessHWErrorOnCodecThread(bool reset_if_fallback_unavailable);
bool ProcessHWError(bool reset_if_fallback_unavailable);
// Calls ProcessHWErrorOnCodecThread(true). Returns
// WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if sw_fallback_required_ was set or
// WEBRTC_VIDEO_CODEC_ERROR otherwise.
int32_t ProcessHWErrorOnEncodeOnCodecThread();
// Calls ProcessHWError(true). Returns WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE if
// sw_fallback_required_ was set or WEBRTC_VIDEO_CODEC_ERROR otherwise.
int32_t ProcessHWErrorOnEncode();
// Implementation of webrtc::VideoEncoder methods above, all running on the
// codec thread exclusively.
//
// If width==0 then this is assumed to be a re-initialization and the
// previously-current values are reused instead of the passed parameters
// (makes it easier to reason about thread-safety).
int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
bool use_surface);
int32_t InitEncodeInternal(int width,
int height,
int kbps,
int fps,
bool use_surface);
// Reconfigure to match |frame| in width, height. Also reconfigures the
// encoder if |frame| is a texture/byte buffer and the encoder is initialized
// for byte buffer/texture. Returns false if reconfiguring fails.
bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
int32_t EncodeOnCodecThread(
const webrtc::VideoFrame& input_image,
const std::vector<webrtc::FrameType>* frame_types,
const int64_t frame_input_time_ms);
bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
bool EncodeTextureOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame);
int32_t RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback);
int32_t ReleaseOnCodecThread();
int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame);
bool EncodeByteBuffer(JNIEnv* jni,
bool key_frame,
const webrtc::VideoFrame& frame,
int input_buffer_index);
bool EncodeTexture(JNIEnv* jni,
bool key_frame,
const webrtc::VideoFrame& frame);
// Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
@ -182,18 +180,22 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// Displays encoder statistics.
void LogStatistics(bool force_log);
#if RTC_DCHECK_IS_ON
// Mutex for protecting inited_. It is only used for correctness checking on
// debug build. It is used for checking that encoder has been released in the
// destructor. Because this might happen on a different thread, we need a
// mutex.
rtc::CriticalSection inited_crit_;
#endif
// Type of video codec.
const cricket::VideoCodec codec_;
// Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
// |codec_thread_| synchronously.
webrtc::EncodedImageCallback* callback_;
// State that is constant for the lifetime of this object once the ctor
// returns.
std::unique_ptr<Thread>
codec_thread_; // Thread on which to operate MediaCodec.
rtc::ThreadChecker codec_thread_checker_;
rtc::SequencedTaskChecker encoder_queue_checker_;
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
jmethodID j_init_encode_method_;
@ -212,7 +214,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
jfieldID j_info_presentation_timestamp_us_field_;
// State that is valid only between InitEncode() and the next Release().
// Touched only on codec_thread_ so no explicit synchronization necessary.
int width_; // Frame width in pixels.
int height_; // Frame height in pixels.
bool inited_;
@ -234,6 +235,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int current_encoding_time_ms_; // Overall encoding time in the current second
int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
// Holds the task while the polling loop is paused.
std::unique_ptr<rtc::QueuedTask> encode_task_;
struct InputFrameInfo {
InputFrameInfo(int64_t encode_start_time,
@ -287,11 +290,17 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
webrtc::VideoCodecMode codec_mode_;
bool sw_fallback_required_;
// All other member variables should be before WeakPtrFactory. Valid only from
// InitEncode to Release.
std::unique_ptr<rtc::WeakPtrFactory<MediaCodecVideoEncoder>> weak_factory_;
};
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
// Call Release() to ensure no more callbacks to us after we are deleted.
Release();
#if RTC_DCHECK_IS_ON
rtc::CritScope lock(&inited_crit_);
RTC_DCHECK(!inited_);
#endif
}
MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
@ -299,7 +308,6 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
jobject egl_context)
: codec_(codec),
callback_(NULL),
codec_thread_(new Thread()),
j_media_codec_video_encoder_class_(
jni,
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
@ -315,16 +323,8 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
picture_id_(0),
egl_context_(egl_context),
sw_fallback_required_(false) {
// It would be nice to avoid spinning up a new thread per MediaCodec, and
// instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
// 2732 means that deadlocks abound. This class synchronously trampolines
// to |codec_thread_|, so if anything else can be coming to _us_ from
// |codec_thread_|, or from any thread holding the |_sendCritSect| described
// in the bug, we have a problem. For now work around that with a dedicated
// thread.
codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
codec_thread_checker_.DetachFromThread();
encoder_queue_checker_.Detach();
jclass j_output_buffer_info_class =
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
j_init_encode_method_ = GetMethodID(
@ -369,16 +369,16 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
if (CheckException(jni)) {
ALOGW << "MediaCodecVideoEncoder ctor failed.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
}
srand(time(NULL));
AllowBlockingCalls();
}
int32_t MediaCodecVideoEncoder::InitEncode(
const webrtc::VideoCodec* codec_settings,
int32_t /* number_of_cores */,
size_t /* max_payload_size */) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
if (codec_settings == NULL) {
ALOGE << "NULL VideoCodec instance";
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
@ -407,35 +407,9 @@ int32_t MediaCodecVideoEncoder::InitEncode(
ALOGD << "InitEncode request: " << init_width << " x " << init_height;
ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
return codec_thread_->Invoke<int32_t>(
RTC_FROM_HERE,
Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
init_height, codec_settings->startBitrate,
codec_settings->maxFramerate,
codec_settings->expect_encode_from_texture));
}
int32_t MediaCodecVideoEncoder::Encode(
const webrtc::VideoFrame& frame,
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
const std::vector<webrtc::FrameType>* frame_types) {
return codec_thread_->Invoke<int32_t>(
RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this,
frame, frame_types, rtc::TimeMillis()));
}
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback) {
return codec_thread_->Invoke<int32_t>(
RTC_FROM_HERE,
Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
this, callback));
}
int32_t MediaCodecVideoEncoder::Release() {
ALOGD << "EncoderRelease request";
return codec_thread_->Invoke<int32_t>(
RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
return InitEncodeInternal(
init_width, init_height, codec_settings->startBitrate,
codec_settings->maxFramerate, codec_settings->expect_encode_from_texture);
}
int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
@ -443,52 +417,14 @@ int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::SetRateAllocation(
const webrtc::BitrateAllocation& rate_allocation,
uint32_t frame_rate) {
return codec_thread_->Invoke<int32_t>(
RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this,
rate_allocation.get_sum_kbps(), frame_rate));
}
void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
// We only ever send one message to |this| directly (not through a Bind()'d
// functor), so expect no ID/data.
RTC_CHECK(!msg->message_id) << "Unexpected message!";
RTC_CHECK(!msg->pdata) << "Unexpected message!";
if (!inited_) {
return;
}
// It would be nice to recover from a failure here if one happened, but it's
// unclear how to signal such a failure to the app, so instead we stay silent
// about it and let the next app-called API method reveal the borkedness.
DeliverPendingOutputs(jni);
// If there aren't more frames to deliver, we can start polling at lower rate.
if (input_frame_infos_.empty()) {
codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this);
} else {
codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
}
// Call log statistics here so it's called even if no frames are being
// delivered.
LogStatistics(false);
}
bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
ALOGE << "ResetOnCodecThread";
if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK) {
bool MediaCodecVideoEncoder::ResetCodec() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
ALOGE << "Reset";
if (Release() != WEBRTC_VIDEO_CODEC_OK) {
ALOGE << "Releasing codec failed during reset.";
return false;
}
if (InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
if (InitEncodeInternal(width_, height_, 0, 0, false) !=
WEBRTC_VIDEO_CODEC_OK) {
ALOGE << "Initializing encoder failed during reset.";
return false;
@ -496,9 +432,49 @@ bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
return true;
}
bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread(
MediaCodecVideoEncoder::EncodeTask::EncodeTask(
rtc::WeakPtr<MediaCodecVideoEncoder> encoder)
: encoder_(encoder) {}
bool MediaCodecVideoEncoder::EncodeTask::Run() {
if (!encoder_) {
// Encoder was destroyed.
return true;
}
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_->encoder_queue_checker_);
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
if (!encoder_->inited_) {
encoder_->encode_task_ = std::unique_ptr<rtc::QueuedTask>(this);
return false;
}
// It would be nice to recover from a failure here if one happened, but it's
// unclear how to signal such a failure to the app, so instead we stay silent
// about it and let the next app-called API method reveal the borkedness.
encoder_->DeliverPendingOutputs(jni);
// Call log statistics here so it's called even if no frames are being
// delivered.
encoder_->LogStatistics(false);
// If there aren't more frames to deliver, we can start polling at lower rate.
if (encoder_->input_frame_infos_.empty()) {
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollNoFramesMs);
} else {
rtc::TaskQueue::Current()->PostDelayedTask(
std::unique_ptr<rtc::QueuedTask>(this), kMediaCodecPollMs);
}
return false;
}
bool MediaCodecVideoEncoder::ProcessHWError(
bool reset_if_fallback_unavailable) {
ALOGE << "ProcessHWErrorOnCodecThread";
ALOGE << "ProcessHWError";
if (FindMatchingCodec(cricket::InternalEncoderFactory().supported_codecs(),
codec_)) {
ALOGE << "Fallback to SW encoder.";
@ -506,20 +482,23 @@ bool MediaCodecVideoEncoder::ProcessHWErrorOnCodecThread(
return false;
} else if (reset_if_fallback_unavailable) {
ALOGE << "Reset encoder.";
return ResetCodecOnCodecThread();
return ResetCodec();
}
return false;
}
int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncodeOnCodecThread() {
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
int32_t MediaCodecVideoEncoder::ProcessHWErrorOnEncode() {
ProcessHWError(true /* reset_if_fallback_unavailable */);
return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
: WEBRTC_VIDEO_CODEC_ERROR;
}
int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
int width, int height, int kbps, int fps, bool use_surface) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
int height,
int kbps,
int fps,
bool use_surface) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
if (sw_fallback_required_) {
return WEBRTC_VIDEO_CODEC_OK;
}
@ -529,7 +508,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
const VideoCodecType codec_type = webrtc::PayloadNameToCodecType(codec_.name)
.value_or(webrtc::kVideoCodecUnknown);
ALOGD << "InitEncodeOnCodecThread Type: " << (int)codec_type << ", " << width
ALOGD << "InitEncodeInternal Type: " << (int)codec_type << ", " << width
<< " x " << height << ". Bitrate: " << kbps << " kbps. Fps: " << fps;
if (kbps == 0) {
kbps = last_set_bitrate_kbps_;
@ -566,6 +545,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
gof_idx_ = 0;
last_frame_received_ms_ = -1;
frames_received_since_last_key_ = kMinKeyFrameInterval;
weak_factory_.reset(new rtc::WeakPtrFactory<MediaCodecVideoEncoder>(this));
encode_task_.reset(new EncodeTask(weak_factory_->GetWeakPtr()));
// We enforce no extra stride/padding in the format creation step.
jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
@ -576,12 +557,12 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
(use_surface ? egl_context_ : nullptr));
if (!encode_status) {
ALOGE << "Failed to configure encoder.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (CheckException(jni)) {
ALOGE << "Exception in init encode.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -591,12 +572,12 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
j_get_input_buffers_method_));
if (CheckException(jni)) {
ALOGE << "Exception in get input buffers.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (IsNull(jni, input_buffers)) {
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -612,7 +593,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
break;
default:
LOG(LS_ERROR) << "Wrong color format.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
@ -626,26 +607,32 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
jni->GetDirectBufferCapacity(input_buffers_[i]);
if (CheckException(jni)) {
ALOGE << "Exception in get direct buffer capacity.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
}
}
inited_ = true;
{
#if RTC_DCHECK_IS_ON
rtc::CritScope lock(&inited_crit_);
#endif
inited_ = true;
}
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
int32_t MediaCodecVideoEncoder::Encode(
const webrtc::VideoFrame& frame,
const std::vector<webrtc::FrameType>* frame_types,
const int64_t frame_input_time_ms) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
const std::vector<webrtc::FrameType>* frame_types) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
if (sw_fallback_required_)
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
const int64_t frame_input_time_ms = rtc::TimeMillis();
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
@ -670,8 +657,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
frames_received_++;
if (!DeliverPendingOutputs(jni)) {
if (!ProcessHWErrorOnCodecThread(
true /* reset_if_fallback_unavailable */)) {
if (!ProcessHWError(true /* reset_if_fallback_unavailable */)) {
return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE
: WEBRTC_VIDEO_CODEC_ERROR;
}
@ -706,7 +692,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
if (consecutive_full_queue_frame_drops_ >=
ENCODER_STALL_FRAMEDROP_THRESHOLD) {
ALOGE << "Encoder got stuck.";
return ProcessHWErrorOnEncodeOnCodecThread();
return ProcessHWErrorOnEncode();
}
frames_dropped_media_encoder_++;
return WEBRTC_VIDEO_CODEC_OK;
@ -719,7 +705,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
VideoFrame input_frame(input_buffer, frame.timestamp(),
frame.render_time_ms(), frame.rotation());
if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
if (!MaybeReconfigureEncoder(input_frame)) {
ALOGE << "Failed to reconfigure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -728,11 +714,11 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
bool encode_status = true;
if (!input_frame.video_frame_buffer()->native_handle()) {
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
j_dequeue_input_buffer_method_);
int j_input_buffer_index = jni->CallIntMethod(
*j_media_codec_video_encoder_, j_dequeue_input_buffer_method_);
if (CheckException(jni)) {
ALOGE << "Exception in dequeu input buffer.";
return ProcessHWErrorOnEncodeOnCodecThread();
return ProcessHWErrorOnEncode();
}
if (j_input_buffer_index == -1) {
// Video codec falls behind - no input buffer available.
@ -748,41 +734,44 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
}
return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
} else if (j_input_buffer_index == -2) {
return ProcessHWErrorOnEncodeOnCodecThread();
return ProcessHWErrorOnEncode();
}
encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
j_input_buffer_index);
encode_status =
EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index);
} else {
encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
encode_status = EncodeTexture(jni, key_frame, input_frame);
}
if (!encode_status) {
ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
return ProcessHWErrorOnEncodeOnCodecThread();
return ProcessHWErrorOnEncode();
}
// Save input image timestamps for later output.
input_frame_infos_.emplace_back(
frame_input_time_ms, input_frame.timestamp(),
input_frame.render_time_ms(), input_frame.rotation());
input_frame_infos_.emplace_back(frame_input_time_ms, input_frame.timestamp(),
input_frame.render_time_ms(),
input_frame.rotation());
last_input_timestamp_ms_ =
current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
codec_thread_->Clear(this);
codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
// Start the polling loop if it is not started.
if (encode_task_) {
rtc::TaskQueue::Current()->PostDelayedTask(std::move(encode_task_),
kMediaCodecPollMs);
}
if (!DeliverPendingOutputs(jni)) {
return ProcessHWErrorOnEncodeOnCodecThread();
return ProcessHWErrorOnEncode();
}
return WEBRTC_VIDEO_CODEC_OK;
}
bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
bool MediaCodecVideoEncoder::MaybeReconfigureEncoder(
const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
const bool is_texture_frame =
frame.video_frame_buffer()->native_handle() != nullptr;
@ -809,15 +798,17 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
if (!reconfigure_due_to_format && !reconfigure_due_to_size)
return true;
ReleaseOnCodecThread();
Release();
return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
WEBRTC_VIDEO_CODEC_OK;
return InitEncodeInternal(width_, height_, 0, 0, is_texture_frame) ==
WEBRTC_VIDEO_CODEC_OK;
}
bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
bool key_frame,
const webrtc::VideoFrame& frame,
int input_buffer_index) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
RTC_CHECK(!use_surface_);
jobject j_input_buffer = input_buffers_[input_buffer_index];
@ -825,7 +816,7 @@ bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
if (CheckException(jni)) {
ALOGE << "Exception in get direct buffer address.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
RTC_CHECK(yuv_buffer) << "Indirect buffer??";
@ -847,15 +838,16 @@ bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
current_timestamp_us_);
if (CheckException(jni)) {
ALOGE << "Exception in encode buffer.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
return encode_status;
}
bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
bool key_frame,
const webrtc::VideoFrame& frame) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
RTC_CHECK(use_surface_);
NativeHandleImpl* handle = static_cast<NativeHandleImpl*>(
frame.video_frame_buffer()->native_handle());
@ -868,30 +860,30 @@ bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
current_timestamp_us_);
if (CheckException(jni)) {
ALOGE << "Exception in encode texture.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
return encode_status;
}
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
webrtc::EncodedImageCallback* callback) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
int32_t MediaCodecVideoEncoder::Release() {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
if (!inited_) {
return WEBRTC_VIDEO_CODEC_OK;
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
frames_received_ << ". Encoded: " << frames_encoded_ <<
". Dropped: " << frames_dropped_media_encoder_;
ALOGD << "EncoderRelease: Frames received: " << frames_received_
<< ". Encoded: " << frames_encoded_
<< ". Dropped: " << frames_dropped_media_encoder_;
ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); ++i)
jni->DeleteGlobalRef(input_buffers_[i]);
@ -899,25 +891,32 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
if (CheckException(jni)) {
ALOGE << "Exception in release.";
ProcessHWErrorOnCodecThread(false /* reset_if_fallback_unavailable */);
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
rtc::MessageQueueManager::Clear(this);
inited_ = false;
{
#if RTC_DCHECK_IS_ON
rtc::CritScope lock(&inited_crit_);
#endif
inited_ = false;
}
use_surface_ = false;
ALOGD << "EncoderReleaseOnCodecThread done.";
encode_task_.reset(nullptr);
weak_factory_.reset(nullptr);
ALOGD << "EncoderRelease done.";
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
uint32_t frame_rate) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
int32_t MediaCodecVideoEncoder::SetRateAllocation(
const webrtc::BitrateAllocation& rate_allocation,
uint32_t frame_rate) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
const uint32_t new_bit_rate = rate_allocation.get_sum_kbps();
if (sw_fallback_required_)
return WEBRTC_VIDEO_CODEC_OK;
frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
frame_rate : MAX_ALLOWED_VIDEO_FPS;
if (last_set_bitrate_kbps_ == new_bit_rate &&
last_set_fps_ == frame_rate) {
frame_rate =
(frame_rate < MAX_ALLOWED_VIDEO_FPS) ? frame_rate : MAX_ALLOWED_VIDEO_FPS;
if (last_set_bitrate_kbps_ == new_bit_rate && last_set_fps_ == frame_rate) {
return WEBRTC_VIDEO_CODEC_OK;
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
@ -928,12 +927,11 @@ int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
if (frame_rate > 0) {
last_set_fps_ = frame_rate;
}
bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_set_rates_method_,
last_set_bitrate_kbps_,
last_set_fps_);
bool ret =
jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_,
last_set_bitrate_kbps_, last_set_fps_);
if (CheckException(jni) || !ret) {
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK
: WEBRTC_VIDEO_CODEC_ERROR;
}
@ -966,14 +964,14 @@ jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
}
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
while (true) {
jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
if (CheckException(jni)) {
ALOGE << "Exception in set dequeue output buffer.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (IsNull(jni, j_output_buffer_info)) {
@ -983,7 +981,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
if (output_buffer_index == -1) {
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
@ -1013,7 +1011,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
jni->GetDirectBufferAddress(j_output_buffer));
if (CheckException(jni)) {
ALOGE << "Exception in get direct buffer address.";
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -1102,7 +1100,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
<< " " << image->_buffer[2] << " " << image->_buffer[3]
<< " " << image->_buffer[4] << " " << image->_buffer[5];
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}
header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
@ -1122,7 +1120,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
j_release_output_buffer_method_,
output_buffer_index);
if (CheckException(jni) || !success) {
ProcessHWErrorOnCodecThread(true /* reset_if_fallback_unavailable */);
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
}