Use WebRTC logging in MediaCodec JNI code.
Also enable HW encoder scaling in AppRTCDemo. R=wzh@webrtc.org Review URL: https://codereview.webrtc.org/1396653002 . Cr-Commit-Position: refs/heads/master@{#10205}
This commit is contained in:
@ -32,6 +32,7 @@
|
||||
#include <android/log.h>
|
||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||
#include "webrtc/base/thread.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
@ -46,9 +47,9 @@ namespace webrtc_jni {
|
||||
#else
|
||||
#define ALOGV(...)
|
||||
#endif
|
||||
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
|
||||
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
|
||||
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
|
||||
#define ALOGD LOG_TAG(rtc::LS_INFO, TAG)
|
||||
#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG)
|
||||
#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG)
|
||||
|
||||
// Color formats supported by encoder - should mirror supportedColorList
|
||||
// from MediaCodecVideoEncoder.java
|
||||
@ -97,7 +98,7 @@ static inline jobject JavaEnumFromIndex(
|
||||
// currently thrown exception.
|
||||
static inline bool CheckException(JNIEnv* jni) {
|
||||
if (jni->ExceptionCheck()) {
|
||||
ALOGE("Java JNI exception.");
|
||||
ALOGE << "Java JNI exception.";
|
||||
jni->ExceptionDescribe();
|
||||
jni->ExceptionClear();
|
||||
return true;
|
||||
|
@ -240,7 +240,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
||||
|
||||
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
|
||||
use_surface_ = (render_egl_context_ != NULL);
|
||||
ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_);
|
||||
ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
|
||||
memset(&codec_, 0, sizeof(codec_));
|
||||
AllowBlockingCalls();
|
||||
}
|
||||
@ -252,9 +252,9 @@ MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
||||
|
||||
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
||||
int32_t numberOfCores) {
|
||||
ALOGD("InitDecode.");
|
||||
ALOGD << "InitDecode.";
|
||||
if (inst == NULL) {
|
||||
ALOGE("NULL VideoCodec instance");
|
||||
ALOGE << "NULL VideoCodec instance";
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
// Factory should guard against other codecs being used with us.
|
||||
@ -262,7 +262,7 @@ int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
||||
<< "Unsupported codec " << inst->codecType << " for " << codecType_;
|
||||
|
||||
if (sw_fallback_required_) {
|
||||
ALOGE("InitDecode() - fallback to SW decoder");
|
||||
ALOGE << "InitDecode() - fallback to SW decoder";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
// Save VideoCodec instance for later.
|
||||
@ -280,14 +280,14 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d.",
|
||||
(int)codecType_, codec_.width, codec_.height,
|
||||
codec_.maxFramerate);
|
||||
ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
|
||||
<< codec_.width << " x " << codec_.height << ". Fps: " <<
|
||||
(int)codec_.maxFramerate;
|
||||
|
||||
// Release previous codec first if it was allocated before.
|
||||
int ret_val = ReleaseOnCodecThread();
|
||||
if (ret_val < 0) {
|
||||
ALOGE("Release failure: %d - fallback to SW codec", ret_val);
|
||||
ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
|
||||
sw_fallback_required_ = true;
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
@ -313,7 +313,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
||||
: nullptr);
|
||||
if (CheckException(jni) || !success) {
|
||||
ALOGE("Codec initialization error - fallback to SW codec.");
|
||||
ALOGE << "Codec initialization error - fallback to SW codec.";
|
||||
sw_fallback_required_ = true;
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
@ -345,7 +345,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
input_buffers_[i] =
|
||||
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
|
||||
if (CheckException(jni)) {
|
||||
ALOGE("NewGlobalRef error - fallback to SW codec.");
|
||||
ALOGE << "NewGlobalRef error - fallback to SW codec.";
|
||||
sw_fallback_required_ = true;
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
@ -357,7 +357,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::Release() {
|
||||
ALOGD("DecoderRelease request");
|
||||
ALOGD << "DecoderRelease request";
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
|
||||
}
|
||||
@ -368,7 +368,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
||||
}
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
|
||||
ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_;
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
for (size_t i = 0; i < input_buffers_.size(); i++) {
|
||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||
@ -379,9 +379,10 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
||||
inited_ = false;
|
||||
rtc::MessageQueueManager::Clear(this);
|
||||
if (CheckException(jni)) {
|
||||
ALOGE("Decoder release exception");
|
||||
ALOGE << "Decoder release exception";
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
ALOGD << "DecoderReleaseOnCodecThread done";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@ -394,13 +395,13 @@ int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
|
||||
CheckOnCodecThread();
|
||||
int ret_val = ReleaseOnCodecThread();
|
||||
if (ret_val < 0) {
|
||||
ALOGE("ProcessHWError: Release failure");
|
||||
ALOGE << "ProcessHWError: Release failure";
|
||||
}
|
||||
if (codecType_ == kVideoCodecH264) {
|
||||
// For now there is no SW H.264 which can be used as fallback codec.
|
||||
// So try to restart hw codec for now.
|
||||
ret_val = InitDecodeOnCodecThread();
|
||||
ALOGE("Reset H.264 codec done. Status: %d", ret_val);
|
||||
ALOGE << "Reset H.264 codec done. Status: " << ret_val;
|
||||
if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
|
||||
// H.264 codec was succesfully reset - return regular error code.
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
@ -411,7 +412,7 @@ int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
|
||||
}
|
||||
} else {
|
||||
sw_fallback_required_ = true;
|
||||
ALOGE("Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE");
|
||||
ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
|
||||
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
|
||||
}
|
||||
}
|
||||
@ -423,19 +424,19 @@ int32_t MediaCodecVideoDecoder::Decode(
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
int64_t renderTimeMs) {
|
||||
if (sw_fallback_required_) {
|
||||
ALOGE("Decode() - fallback to SW codec");
|
||||
ALOGE << "Decode() - fallback to SW codec";
|
||||
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
|
||||
}
|
||||
if (callback_ == NULL) {
|
||||
ALOGE("Decode() - callback_ is NULL");
|
||||
ALOGE << "Decode() - callback_ is NULL";
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (inputImage._buffer == NULL && inputImage._length > 0) {
|
||||
ALOGE("Decode() - inputImage is incorrect");
|
||||
ALOGE << "Decode() - inputImage is incorrect";
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
if (!inited_) {
|
||||
ALOGE("Decode() - decoder is not initialized");
|
||||
ALOGE << "Decode() - decoder is not initialized";
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
@ -447,7 +448,7 @@ int32_t MediaCodecVideoDecoder::Decode(
|
||||
codec_.height = inputImage._encodedHeight;
|
||||
int32_t ret = InitDecode(&codec_, 1);
|
||||
if (ret < 0) {
|
||||
ALOGE("InitDecode failure: %d - fallback to SW codec", ret);
|
||||
ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
|
||||
sw_fallback_required_ = true;
|
||||
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
|
||||
}
|
||||
@ -456,11 +457,11 @@ int32_t MediaCodecVideoDecoder::Decode(
|
||||
// Always start with a complete key frame.
|
||||
if (key_frame_required_) {
|
||||
if (inputImage._frameType != webrtc::kKeyFrame) {
|
||||
ALOGE("Decode() - key frame is required");
|
||||
ALOGE << "Decode() - key frame is required";
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
if (!inputImage._completeFrame) {
|
||||
ALOGE("Decode() - complete frame is required");
|
||||
ALOGE << "Decode() - complete frame is required";
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
key_frame_required_ = false;
|
||||
@ -485,11 +486,11 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||
ALOGV("Received: %d. Decoded: %d. Wait for output...",
|
||||
frames_received_, frames_decoded_);
|
||||
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
|
||||
ALOGE("DeliverPendingOutputs error");
|
||||
ALOGE << "DeliverPendingOutputs error";
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
|
||||
ALOGE("Output buffer dequeue timeout");
|
||||
ALOGE << "Output buffer dequeue timeout";
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
}
|
||||
@ -498,7 +499,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
|
||||
j_dequeue_input_buffer_method_);
|
||||
if (CheckException(jni) || j_input_buffer_index < 0) {
|
||||
ALOGE("dequeueInputBuffer error");
|
||||
ALOGE << "dequeueInputBuffer error";
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
|
||||
@ -509,8 +510,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||
RTC_CHECK(buffer) << "Indirect buffer??";
|
||||
int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
|
||||
if (CheckException(jni) || buffer_capacity < inputImage._length) {
|
||||
ALOGE("Input frame size %d is bigger than buffer size %d.",
|
||||
inputImage._length, buffer_capacity);
|
||||
ALOGE << "Input frame size "<< inputImage._length <<
|
||||
" is bigger than buffer size " << buffer_capacity;
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
|
||||
@ -533,13 +534,13 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||
inputImage._length,
|
||||
timestamp_us);
|
||||
if (CheckException(jni) || !success) {
|
||||
ALOGE("queueInputBuffer error");
|
||||
ALOGE << "queueInputBuffer error";
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
|
||||
// Try to drain the decoder
|
||||
if (!DeliverPendingOutputs(jni, 0)) {
|
||||
ALOGE("DeliverPendingOutputs error");
|
||||
ALOGE << "DeliverPendingOutputs error";
|
||||
return ProcessHWErrorOnCodecThread();
|
||||
}
|
||||
|
||||
@ -558,7 +559,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
j_dequeue_output_buffer_method_,
|
||||
dequeue_timeout_ms);
|
||||
if (CheckException(jni)) {
|
||||
ALOGE("dequeueOutputBuffer() error");
|
||||
ALOGE << "dequeueOutputBuffer() error";
|
||||
return false;
|
||||
}
|
||||
if (IsNull(jni, j_decoder_output_buffer)) {
|
||||
@ -604,7 +605,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
||||
|
||||
if (output_buffer_size < width * height * 3 / 2) {
|
||||
ALOGE("Insufficient output buffer size: %d", output_buffer_size);
|
||||
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
|
||||
return false;
|
||||
}
|
||||
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
|
||||
@ -659,7 +660,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
j_return_decoded_byte_buffer_method_,
|
||||
output_buffer_index);
|
||||
if (CheckException(jni)) {
|
||||
ALOGE("returnDecodedByteBuffer error");
|
||||
ALOGE << "returnDecodedByteBuffer error";
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -690,10 +691,11 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
||||
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
||||
current_frames_ > 0) {
|
||||
ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
|
||||
current_bytes_ * 8 / statistic_time_ms,
|
||||
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
|
||||
current_decoding_time_ms_ / current_frames_, statistic_time_ms);
|
||||
ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
|
||||
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
|
||||
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
|
||||
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
|
||||
" for last " << statistic_time_ms << " ms.";
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
@ -703,7 +705,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
// Callback - output decoded frame.
|
||||
const int32_t callback_status = callback_->Decoded(decoded_frame);
|
||||
if (callback_status > 0) {
|
||||
ALOGE("callback error");
|
||||
ALOGE << "callback error";
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -716,7 +718,7 @@ int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::Reset() {
|
||||
ALOGD("DecoderReset");
|
||||
ALOGD << "DecoderReset";
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
@ -736,7 +738,7 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
|
||||
CheckOnCodecThread();
|
||||
|
||||
if (!DeliverPendingOutputs(jni, 0)) {
|
||||
ALOGE("OnMessage: DeliverPendingOutputs error");
|
||||
ALOGE << "OnMessage: DeliverPendingOutputs error";
|
||||
ProcessHWErrorOnCodecThread();
|
||||
return;
|
||||
}
|
||||
@ -745,7 +747,7 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
|
||||
|
||||
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
|
||||
render_egl_context_(NULL) {
|
||||
ALOGD("MediaCodecVideoDecoderFactory ctor");
|
||||
ALOGD << "MediaCodecVideoDecoderFactory ctor";
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
||||
@ -758,7 +760,7 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
|
||||
is_vp8_hw_supported = false;
|
||||
}
|
||||
if (is_vp8_hw_supported) {
|
||||
ALOGD("VP8 HW Decoder supported.");
|
||||
ALOGD << "VP8 HW Decoder supported.";
|
||||
supported_codec_types_.push_back(kVideoCodecVP8);
|
||||
}
|
||||
|
||||
@ -769,13 +771,13 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
|
||||
is_h264_hw_supported = false;
|
||||
}
|
||||
if (is_h264_hw_supported) {
|
||||
ALOGD("H264 HW Decoder supported.");
|
||||
ALOGD << "H264 HW Decoder supported.";
|
||||
supported_codec_types_.push_back(kVideoCodecH264);
|
||||
}
|
||||
}
|
||||
|
||||
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
|
||||
ALOGD("MediaCodecVideoDecoderFactory dtor");
|
||||
ALOGD << "MediaCodecVideoDecoderFactory dtor";
|
||||
if (render_egl_context_) {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
@ -785,7 +787,7 @@ MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
|
||||
|
||||
void MediaCodecVideoDecoderFactory::SetEGLContext(
|
||||
JNIEnv* jni, jobject render_egl_context) {
|
||||
ALOGD("MediaCodecVideoDecoderFactory::SetEGLContext");
|
||||
ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
|
||||
if (render_egl_context_) {
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
render_egl_context_ = NULL;
|
||||
@ -793,41 +795,42 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
|
||||
if (!IsNull(jni, render_egl_context)) {
|
||||
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
|
||||
if (CheckException(jni)) {
|
||||
ALOGE("error calling NewGlobalRef for EGL Context.");
|
||||
ALOGE << "error calling NewGlobalRef for EGL Context.";
|
||||
render_egl_context_ = NULL;
|
||||
} else {
|
||||
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
|
||||
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
|
||||
ALOGE("Wrong EGL Context.");
|
||||
ALOGE << "Wrong EGL Context.";
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
render_egl_context_ = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (render_egl_context_ == NULL) {
|
||||
ALOGW("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
|
||||
ALOGW << "NULL VideoDecoder EGL context - HW surface decoding is disabled.";
|
||||
}
|
||||
}
|
||||
|
||||
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
|
||||
VideoCodecType type) {
|
||||
if (supported_codec_types_.empty()) {
|
||||
ALOGE("No HW video decoder for type %d.", (int)type);
|
||||
ALOGE << "No HW video decoder for type " << (int)type;
|
||||
return NULL;
|
||||
}
|
||||
for (VideoCodecType codec_type : supported_codec_types_) {
|
||||
if (codec_type == type) {
|
||||
ALOGD("Create HW video decoder for type %d.", (int)type);
|
||||
ALOGD << "Create HW video decoder for type " << (int)type;
|
||||
return new MediaCodecVideoDecoder(
|
||||
AttachCurrentThreadIfNeeded(), type, render_egl_context_);
|
||||
}
|
||||
}
|
||||
ALOGE("Can not find HW video decoder for type %d.", (int)type);
|
||||
ALOGE << "Can not find HW video decoder for type " << (int)type;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
|
||||
webrtc::VideoDecoder* decoder) {
|
||||
ALOGD << "Destroy video decoder.";
|
||||
delete decoder;
|
||||
}
|
||||
|
||||
|
@ -287,7 +287,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(
|
||||
const int kMinHeight = 180;
|
||||
const int kLowQpThresholdDenominator = 3;
|
||||
if (codec_settings == NULL) {
|
||||
ALOGE("NULL VideoCodec instance");
|
||||
ALOGE << "NULL VideoCodec instance";
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
// Factory should guard against other codecs being used with us.
|
||||
@ -295,10 +295,10 @@ int32_t MediaCodecVideoEncoder::InitEncode(
|
||||
<< "Unsupported codec " << codec_settings->codecType << " for "
|
||||
<< codecType_;
|
||||
|
||||
ALOGD("InitEncode request");
|
||||
ALOGD << "InitEncode request";
|
||||
scale_ = webrtc::field_trial::FindFullName(
|
||||
"WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
|
||||
ALOGD("Automatic resize: %s", scale_ ? "enabled" : "disabled");
|
||||
ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
|
||||
if (scale_) {
|
||||
if (codecType_ == kVideoCodecVP8) {
|
||||
// QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
|
||||
@ -352,7 +352,7 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::Release() {
|
||||
ALOGD("EncoderRelease request");
|
||||
ALOGD << "EncoderRelease request";
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
|
||||
}
|
||||
@ -400,7 +400,7 @@ void MediaCodecVideoEncoder::CheckOnCodecThread() {
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoder::ResetCodec() {
|
||||
ALOGE("ResetCodec");
|
||||
ALOGE << "ResetCodec";
|
||||
if (Release() != WEBRTC_VIDEO_CODEC_OK ||
|
||||
codec_thread_->Invoke<int32_t>(Bind(
|
||||
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
|
||||
@ -417,8 +417,9 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
ALOGD("InitEncodeOnCodecThread Type: %d. %d x %d. Bitrate: %d kbps. Fps: %d",
|
||||
(int)codecType_, width, height, kbps, fps);
|
||||
ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
|
||||
width << " x " << height << ". Bitrate: " << kbps <<
|
||||
" kbps. Fps: " << fps;
|
||||
if (kbps == 0) {
|
||||
kbps = last_set_bitrate_kbps_;
|
||||
}
|
||||
@ -529,8 +530,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
||||
scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
|
||||
|
||||
if (input_frame.width() != width_ || input_frame.height() != height_) {
|
||||
ALOGD("Frame resolution change from %d x %d to %d x %d",
|
||||
width_, height_, input_frame.width(), input_frame.height());
|
||||
ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
|
||||
" to " << input_frame.width() << " x " << input_frame.height();
|
||||
width_ = input_frame.width();
|
||||
height_ = input_frame.height();
|
||||
ResetCodec();
|
||||
@ -543,8 +544,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
||||
int encoder_latency_ms = last_input_timestamp_ms_ -
|
||||
last_output_timestamp_ms_;
|
||||
if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
|
||||
ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
|
||||
encoder_latency_ms, frames_in_queue_);
|
||||
ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
|
||||
" ms. Q size: " << frames_in_queue_;
|
||||
frames_dropped_++;
|
||||
// Report dropped frame to quality_scaler_.
|
||||
OnDroppedFrame();
|
||||
@ -623,8 +624,9 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||
}
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ALOGD("EncoderReleaseOnCodecThread: Frames received: %d. Encoded: %d. "
|
||||
"Dropped: %d.", frames_received_, frames_encoded_, frames_dropped_);
|
||||
ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
|
||||
frames_received_ << ". Encoded: " << frames_encoded_ <<
|
||||
". Dropped: " << frames_dropped_;
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||
@ -633,6 +635,7 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||
CHECK_EXCEPTION(jni);
|
||||
rtc::MessageQueueManager::Clear(this);
|
||||
inited_ = false;
|
||||
ALOGD << "EncoderReleaseOnCodecThread done.";
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
@ -745,12 +748,13 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
||||
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
||||
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
||||
current_frames_ > 0) {
|
||||
ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
|
||||
" encTime: %d for last %d ms",
|
||||
current_bytes_ * 8 / statistic_time_ms,
|
||||
last_set_bitrate_kbps_,
|
||||
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
|
||||
current_encoding_time_ms_ / current_frames_, statistic_time_ms);
|
||||
ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " <<
|
||||
(current_bytes_ * 8 / statistic_time_ms) <<
|
||||
", target: " << last_set_bitrate_kbps_ << " kbps, fps: " <<
|
||||
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
|
||||
<< ", encTime: " <<
|
||||
(current_encoding_time_ms_ / current_frames_) << " for last " <<
|
||||
statistic_time_ms << " ms.";
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
@ -816,10 +820,10 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
||||
scPosition += H264_SC_LENGTH;
|
||||
}
|
||||
if (scPositionsLength == 0) {
|
||||
ALOGE("Start code is not found!");
|
||||
ALOGE("Data 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
|
||||
image->_buffer[0], image->_buffer[1], image->_buffer[2],
|
||||
image->_buffer[3], image->_buffer[4], image->_buffer[5]);
|
||||
ALOGE << "Start code is not found!";
|
||||
ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
|
||||
<< " " << image->_buffer[2] << " " << image->_buffer[3]
|
||||
<< " " << image->_buffer[4] << " " << image->_buffer[5];
|
||||
ResetCodec();
|
||||
return false;
|
||||
}
|
||||
@ -909,7 +913,7 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
|
||||
GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (is_vp8_hw_supported) {
|
||||
ALOGD("VP8 HW Encoder supported.");
|
||||
ALOGD << "VP8 HW Encoder supported.";
|
||||
supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
|
||||
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
|
||||
}
|
||||
@ -919,7 +923,7 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
|
||||
GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (is_h264_hw_supported) {
|
||||
ALOGD("H.264 HW Encoder supported.");
|
||||
ALOGD << "H.264 HW Encoder supported.";
|
||||
supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
|
||||
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
|
||||
}
|
||||
@ -935,8 +939,8 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
|
||||
for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
|
||||
it != supported_codecs_.end(); ++it) {
|
||||
if (it->type == type) {
|
||||
ALOGD("Create HW video encoder for type %d (%s).",
|
||||
(int)type, it->name.c_str());
|
||||
ALOGD << "Create HW video encoder for type " << (int)type <<
|
||||
" (" << it->name << ").";
|
||||
return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
|
||||
}
|
||||
}
|
||||
@ -950,7 +954,7 @@ MediaCodecVideoEncoderFactory::codecs() const {
|
||||
|
||||
void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
||||
webrtc::VideoEncoder* encoder) {
|
||||
ALOGD("Destroy video encoder.");
|
||||
ALOGD << "Destroy video encoder.";
|
||||
delete encoder;
|
||||
}
|
||||
|
||||
|
@ -55,6 +55,8 @@ public class PeerConnectionClient {
|
||||
public static final String AUDIO_TRACK_ID = "ARDAMSa0";
|
||||
private static final String TAG = "PCRTCClient";
|
||||
private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/";
|
||||
private static final String FIELD_TRIAL_AUTOMATIC_RESIZE =
|
||||
"WebRTC-MediaCodecVideoEncoder-AutomaticResize/Enabled/";
|
||||
private static final String VIDEO_CODEC_VP8 = "VP8";
|
||||
private static final String VIDEO_CODEC_VP9 = "VP9";
|
||||
private static final String VIDEO_CODEC_H264 = "H264";
|
||||
@ -283,13 +285,16 @@ public class PeerConnectionClient {
|
||||
Log.d(TAG, "Create peer connection factory. Use video: " +
|
||||
peerConnectionParameters.videoCallEnabled);
|
||||
isError = false;
|
||||
|
||||
// Initialize field trials.
|
||||
String field_trials = FIELD_TRIAL_AUTOMATIC_RESIZE;
|
||||
// Check if VP9 is used by default.
|
||||
if (videoCallEnabled && peerConnectionParameters.videoCodec != null
|
||||
&& peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
|
||||
PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_VP9);
|
||||
} else {
|
||||
PeerConnectionFactory.initializeFieldTrials(null);
|
||||
field_trials += FIELD_TRIAL_VP9;
|
||||
}
|
||||
PeerConnectionFactory.initializeFieldTrials(field_trials);
|
||||
|
||||
// Check if H.264 is used by default.
|
||||
preferH264 = false;
|
||||
if (videoCallEnabled && peerConnectionParameters.videoCodec != null
|
||||
|
Reference in New Issue
Block a user