Use WebRTC logging in MediaCodec JNI code.

Also enable HW encoder scaling in AppRTCDemo.

R=wzh@webrtc.org

Review URL: https://codereview.webrtc.org/1396653002 .

Cr-Commit-Position: refs/heads/master@{#10205}
This commit is contained in:
Alex Glaznev
2015-10-07 16:51:02 -07:00
parent 21622a1d19
commit fddf6e526c
4 changed files with 99 additions and 86 deletions

View File

@ -32,6 +32,7 @@
#include <android/log.h> #include <android/log.h>
#include "talk/app/webrtc/java/jni/classreferenceholder.h" #include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "webrtc/base/thread.h" #include "webrtc/base/thread.h"
#include "webrtc/base/logging.h"
#include "webrtc/system_wrappers/interface/tick_util.h" #include "webrtc/system_wrappers/interface/tick_util.h"
namespace webrtc_jni { namespace webrtc_jni {
@ -46,9 +47,9 @@ namespace webrtc_jni {
#else #else
#define ALOGV(...) #define ALOGV(...)
#endif #endif
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__) #define ALOGD LOG_TAG(rtc::LS_INFO, TAG)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__) #define ALOGW LOG_TAG(rtc::LS_WARNING, TAG)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__) #define ALOGE LOG_TAG(rtc::LS_ERROR, TAG)
// Color formats supported by encoder - should mirror supportedColorList // Color formats supported by encoder - should mirror supportedColorList
// from MediaCodecVideoEncoder.java // from MediaCodecVideoEncoder.java
@ -97,7 +98,7 @@ static inline jobject JavaEnumFromIndex(
// currently thrown exception. // currently thrown exception.
static inline bool CheckException(JNIEnv* jni) { static inline bool CheckException(JNIEnv* jni) {
if (jni->ExceptionCheck()) { if (jni->ExceptionCheck()) {
ALOGE("Java JNI exception."); ALOGE << "Java JNI exception.";
jni->ExceptionDescribe(); jni->ExceptionDescribe();
jni->ExceptionClear(); jni->ExceptionClear();
return true; return true;

View File

@ -240,7 +240,7 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL); use_surface_ = (render_egl_context_ != NULL);
ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_); ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
memset(&codec_, 0, sizeof(codec_)); memset(&codec_, 0, sizeof(codec_));
AllowBlockingCalls(); AllowBlockingCalls();
} }
@ -252,9 +252,9 @@ MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
int32_t numberOfCores) { int32_t numberOfCores) {
ALOGD("InitDecode."); ALOGD << "InitDecode.";
if (inst == NULL) { if (inst == NULL) {
ALOGE("NULL VideoCodec instance"); ALOGE << "NULL VideoCodec instance";
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
} }
// Factory should guard against other codecs being used with us. // Factory should guard against other codecs being used with us.
@ -262,7 +262,7 @@ int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
<< "Unsupported codec " << inst->codecType << " for " << codecType_; << "Unsupported codec " << inst->codecType << " for " << codecType_;
if (sw_fallback_required_) { if (sw_fallback_required_) {
ALOGE("InitDecode() - fallback to SW decoder"); ALOGE << "InitDecode() - fallback to SW decoder";
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
// Save VideoCodec instance for later. // Save VideoCodec instance for later.
@ -280,14 +280,14 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d.", ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
(int)codecType_, codec_.width, codec_.height, << codec_.width << " x " << codec_.height << ". Fps: " <<
codec_.maxFramerate); (int)codec_.maxFramerate;
// Release previous codec first if it was allocated before. // Release previous codec first if it was allocated before.
int ret_val = ReleaseOnCodecThread(); int ret_val = ReleaseOnCodecThread();
if (ret_val < 0) { if (ret_val < 0) {
ALOGE("Release failure: %d - fallback to SW codec", ret_val); ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
sw_fallback_required_ = true; sw_fallback_required_ = true;
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
@ -313,7 +313,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
: nullptr); : nullptr);
if (CheckException(jni) || !success) { if (CheckException(jni) || !success) {
ALOGE("Codec initialization error - fallback to SW codec."); ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true; sw_fallback_required_ = true;
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
@ -345,7 +345,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
input_buffers_[i] = input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE("NewGlobalRef error - fallback to SW codec."); ALOGE << "NewGlobalRef error - fallback to SW codec.";
sw_fallback_required_ = true; sw_fallback_required_ = true;
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
@ -357,7 +357,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
} }
int32_t MediaCodecVideoDecoder::Release() { int32_t MediaCodecVideoDecoder::Release() {
ALOGD("DecoderRelease request"); ALOGD << "DecoderRelease request";
return codec_thread_->Invoke<int32_t>( return codec_thread_->Invoke<int32_t>(
Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this)); Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
} }
@ -368,7 +368,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
} }
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_); ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_;
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); i++) { for (size_t i = 0; i < input_buffers_.size(); i++) {
jni->DeleteGlobalRef(input_buffers_[i]); jni->DeleteGlobalRef(input_buffers_[i]);
@ -379,9 +379,10 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
inited_ = false; inited_ = false;
rtc::MessageQueueManager::Clear(this); rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE("Decoder release exception"); ALOGE << "Decoder release exception";
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
ALOGD << "DecoderReleaseOnCodecThread done";
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
@ -394,13 +395,13 @@ int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
CheckOnCodecThread(); CheckOnCodecThread();
int ret_val = ReleaseOnCodecThread(); int ret_val = ReleaseOnCodecThread();
if (ret_val < 0) { if (ret_val < 0) {
ALOGE("ProcessHWError: Release failure"); ALOGE << "ProcessHWError: Release failure";
} }
if (codecType_ == kVideoCodecH264) { if (codecType_ == kVideoCodecH264) {
// For now there is no SW H.264 which can be used as fallback codec. // For now there is no SW H.264 which can be used as fallback codec.
// So try to restart hw codec for now. // So try to restart hw codec for now.
ret_val = InitDecodeOnCodecThread(); ret_val = InitDecodeOnCodecThread();
ALOGE("Reset H.264 codec done. Status: %d", ret_val); ALOGE << "Reset H.264 codec done. Status: " << ret_val;
if (ret_val == WEBRTC_VIDEO_CODEC_OK) { if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
// H.264 codec was succesfully reset - return regular error code. // H.264 codec was succesfully reset - return regular error code.
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
@ -411,7 +412,7 @@ int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
} }
} else { } else {
sw_fallback_required_ = true; sw_fallback_required_ = true;
ALOGE("Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE"); ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} }
} }
@ -423,19 +424,19 @@ int32_t MediaCodecVideoDecoder::Decode(
const CodecSpecificInfo* codecSpecificInfo, const CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs) { int64_t renderTimeMs) {
if (sw_fallback_required_) { if (sw_fallback_required_) {
ALOGE("Decode() - fallback to SW codec"); ALOGE << "Decode() - fallback to SW codec";
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} }
if (callback_ == NULL) { if (callback_ == NULL) {
ALOGE("Decode() - callback_ is NULL"); ALOGE << "Decode() - callback_ is NULL";
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
} }
if (inputImage._buffer == NULL && inputImage._length > 0) { if (inputImage._buffer == NULL && inputImage._length > 0) {
ALOGE("Decode() - inputImage is incorrect"); ALOGE << "Decode() - inputImage is incorrect";
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
} }
if (!inited_) { if (!inited_) {
ALOGE("Decode() - decoder is not initialized"); ALOGE << "Decode() - decoder is not initialized";
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
} }
@ -447,7 +448,7 @@ int32_t MediaCodecVideoDecoder::Decode(
codec_.height = inputImage._encodedHeight; codec_.height = inputImage._encodedHeight;
int32_t ret = InitDecode(&codec_, 1); int32_t ret = InitDecode(&codec_, 1);
if (ret < 0) { if (ret < 0) {
ALOGE("InitDecode failure: %d - fallback to SW codec", ret); ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
sw_fallback_required_ = true; sw_fallback_required_ = true;
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} }
@ -456,11 +457,11 @@ int32_t MediaCodecVideoDecoder::Decode(
// Always start with a complete key frame. // Always start with a complete key frame.
if (key_frame_required_) { if (key_frame_required_) {
if (inputImage._frameType != webrtc::kKeyFrame) { if (inputImage._frameType != webrtc::kKeyFrame) {
ALOGE("Decode() - key frame is required"); ALOGE << "Decode() - key frame is required";
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
if (!inputImage._completeFrame) { if (!inputImage._completeFrame) {
ALOGE("Decode() - complete frame is required"); ALOGE << "Decode() - complete frame is required";
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
key_frame_required_ = false; key_frame_required_ = false;
@ -485,11 +486,11 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
ALOGV("Received: %d. Decoded: %d. Wait for output...", ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_); frames_received_, frames_decoded_);
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
ALOGE("DeliverPendingOutputs error"); ALOGE << "DeliverPendingOutputs error";
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
if (frames_received_ > frames_decoded_ + max_pending_frames_) { if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGE("Output buffer dequeue timeout"); ALOGE << "Output buffer dequeue timeout";
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
} }
@ -498,7 +499,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_, int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
j_dequeue_input_buffer_method_); j_dequeue_input_buffer_method_);
if (CheckException(jni) || j_input_buffer_index < 0) { if (CheckException(jni) || j_input_buffer_index < 0) {
ALOGE("dequeueInputBuffer error"); ALOGE << "dequeueInputBuffer error";
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
@ -509,8 +510,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
RTC_CHECK(buffer) << "Indirect buffer??"; RTC_CHECK(buffer) << "Indirect buffer??";
int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer); int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
if (CheckException(jni) || buffer_capacity < inputImage._length) { if (CheckException(jni) || buffer_capacity < inputImage._length) {
ALOGE("Input frame size %d is bigger than buffer size %d.", ALOGE << "Input frame size "<< inputImage._length <<
inputImage._length, buffer_capacity); " is bigger than buffer size " << buffer_capacity;
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate; jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
@ -533,13 +534,13 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
inputImage._length, inputImage._length,
timestamp_us); timestamp_us);
if (CheckException(jni) || !success) { if (CheckException(jni) || !success) {
ALOGE("queueInputBuffer error"); ALOGE << "queueInputBuffer error";
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
// Try to drain the decoder // Try to drain the decoder
if (!DeliverPendingOutputs(jni, 0)) { if (!DeliverPendingOutputs(jni, 0)) {
ALOGE("DeliverPendingOutputs error"); ALOGE << "DeliverPendingOutputs error";
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
} }
@ -558,7 +559,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_dequeue_output_buffer_method_, j_dequeue_output_buffer_method_,
dequeue_timeout_ms); dequeue_timeout_ms);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE("dequeueOutputBuffer() error"); ALOGE << "dequeueOutputBuffer() error";
return false; return false;
} }
if (IsNull(jni, j_decoder_output_buffer)) { if (IsNull(jni, j_decoder_output_buffer)) {
@ -604,7 +605,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
if (output_buffer_size < width * height * 3 / 2) { if (output_buffer_size < width * height * 3 / 2) {
ALOGE("Insufficient output buffer size: %d", output_buffer_size); ALOGE << "Insufficient output buffer size: " << output_buffer_size;
return false; return false;
} }
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField( jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
@ -659,7 +660,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_return_decoded_byte_buffer_method_, j_return_decoded_byte_buffer_method_,
output_buffer_index); output_buffer_index);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE("returnDecodedByteBuffer error"); ALOGE << "returnDecodedByteBuffer error";
return false; return false;
} }
} }
@ -690,10 +691,11 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) { current_frames_ > 0) {
ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms", ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
current_bytes_ * 8 / statistic_time_ms, (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
current_decoding_time_ms_ / current_frames_, statistic_time_ms); << ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
" for last " << statistic_time_ms << " ms.";
start_time_ms_ = GetCurrentTimeMs(); start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0; current_frames_ = 0;
current_bytes_ = 0; current_bytes_ = 0;
@ -703,7 +705,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
// Callback - output decoded frame. // Callback - output decoded frame.
const int32_t callback_status = callback_->Decoded(decoded_frame); const int32_t callback_status = callback_->Decoded(decoded_frame);
if (callback_status > 0) { if (callback_status > 0) {
ALOGE("callback error"); ALOGE << "callback error";
} }
return true; return true;
@ -716,7 +718,7 @@ int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
} }
int32_t MediaCodecVideoDecoder::Reset() { int32_t MediaCodecVideoDecoder::Reset() {
ALOGD("DecoderReset"); ALOGD << "DecoderReset";
if (!inited_) { if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
} }
@ -736,7 +738,7 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
CheckOnCodecThread(); CheckOnCodecThread();
if (!DeliverPendingOutputs(jni, 0)) { if (!DeliverPendingOutputs(jni, 0)) {
ALOGE("OnMessage: DeliverPendingOutputs error"); ALOGE << "OnMessage: DeliverPendingOutputs error";
ProcessHWErrorOnCodecThread(); ProcessHWErrorOnCodecThread();
return; return;
} }
@ -745,7 +747,7 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() : MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
render_egl_context_(NULL) { render_egl_context_(NULL) {
ALOGD("MediaCodecVideoDecoderFactory ctor"); ALOGD << "MediaCodecVideoDecoderFactory ctor";
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
@ -758,7 +760,7 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
is_vp8_hw_supported = false; is_vp8_hw_supported = false;
} }
if (is_vp8_hw_supported) { if (is_vp8_hw_supported) {
ALOGD("VP8 HW Decoder supported."); ALOGD << "VP8 HW Decoder supported.";
supported_codec_types_.push_back(kVideoCodecVP8); supported_codec_types_.push_back(kVideoCodecVP8);
} }
@ -769,13 +771,13 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
is_h264_hw_supported = false; is_h264_hw_supported = false;
} }
if (is_h264_hw_supported) { if (is_h264_hw_supported) {
ALOGD("H264 HW Decoder supported."); ALOGD << "H264 HW Decoder supported.";
supported_codec_types_.push_back(kVideoCodecH264); supported_codec_types_.push_back(kVideoCodecH264);
} }
} }
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() { MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
ALOGD("MediaCodecVideoDecoderFactory dtor"); ALOGD << "MediaCodecVideoDecoderFactory dtor";
if (render_egl_context_) { if (render_egl_context_) {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->DeleteGlobalRef(render_egl_context_); jni->DeleteGlobalRef(render_egl_context_);
@ -785,7 +787,7 @@ MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
void MediaCodecVideoDecoderFactory::SetEGLContext( void MediaCodecVideoDecoderFactory::SetEGLContext(
JNIEnv* jni, jobject render_egl_context) { JNIEnv* jni, jobject render_egl_context) {
ALOGD("MediaCodecVideoDecoderFactory::SetEGLContext"); ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
if (render_egl_context_) { if (render_egl_context_) {
jni->DeleteGlobalRef(render_egl_context_); jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL; render_egl_context_ = NULL;
@ -793,41 +795,42 @@ void MediaCodecVideoDecoderFactory::SetEGLContext(
if (!IsNull(jni, render_egl_context)) { if (!IsNull(jni, render_egl_context)) {
render_egl_context_ = jni->NewGlobalRef(render_egl_context); render_egl_context_ = jni->NewGlobalRef(render_egl_context);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE("error calling NewGlobalRef for EGL Context."); ALOGE << "error calling NewGlobalRef for EGL Context.";
render_egl_context_ = NULL; render_egl_context_ = NULL;
} else { } else {
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext"); jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) { if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
ALOGE("Wrong EGL Context."); ALOGE << "Wrong EGL Context.";
jni->DeleteGlobalRef(render_egl_context_); jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL; render_egl_context_ = NULL;
} }
} }
} }
if (render_egl_context_ == NULL) { if (render_egl_context_ == NULL) {
ALOGW("NULL VideoDecoder EGL context - HW surface decoding is disabled."); ALOGW << "NULL VideoDecoder EGL context - HW surface decoding is disabled.";
} }
} }
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder( webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
VideoCodecType type) { VideoCodecType type) {
if (supported_codec_types_.empty()) { if (supported_codec_types_.empty()) {
ALOGE("No HW video decoder for type %d.", (int)type); ALOGE << "No HW video decoder for type " << (int)type;
return NULL; return NULL;
} }
for (VideoCodecType codec_type : supported_codec_types_) { for (VideoCodecType codec_type : supported_codec_types_) {
if (codec_type == type) { if (codec_type == type) {
ALOGD("Create HW video decoder for type %d.", (int)type); ALOGD << "Create HW video decoder for type " << (int)type;
return new MediaCodecVideoDecoder( return new MediaCodecVideoDecoder(
AttachCurrentThreadIfNeeded(), type, render_egl_context_); AttachCurrentThreadIfNeeded(), type, render_egl_context_);
} }
} }
ALOGE("Can not find HW video decoder for type %d.", (int)type); ALOGE << "Can not find HW video decoder for type " << (int)type;
return NULL; return NULL;
} }
void MediaCodecVideoDecoderFactory::DestroyVideoDecoder( void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
webrtc::VideoDecoder* decoder) { webrtc::VideoDecoder* decoder) {
ALOGD << "Destroy video decoder.";
delete decoder; delete decoder;
} }

View File

@ -287,7 +287,7 @@ int32_t MediaCodecVideoEncoder::InitEncode(
const int kMinHeight = 180; const int kMinHeight = 180;
const int kLowQpThresholdDenominator = 3; const int kLowQpThresholdDenominator = 3;
if (codec_settings == NULL) { if (codec_settings == NULL) {
ALOGE("NULL VideoCodec instance"); ALOGE << "NULL VideoCodec instance";
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
} }
// Factory should guard against other codecs being used with us. // Factory should guard against other codecs being used with us.
@ -295,10 +295,10 @@ int32_t MediaCodecVideoEncoder::InitEncode(
<< "Unsupported codec " << codec_settings->codecType << " for " << "Unsupported codec " << codec_settings->codecType << " for "
<< codecType_; << codecType_;
ALOGD("InitEncode request"); ALOGD << "InitEncode request";
scale_ = webrtc::field_trial::FindFullName( scale_ = webrtc::field_trial::FindFullName(
"WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled"; "WebRTC-MediaCodecVideoEncoder-AutomaticResize") == "Enabled";
ALOGD("Automatic resize: %s", scale_ ? "enabled" : "disabled"); ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
if (scale_) { if (scale_) {
if (codecType_ == kVideoCodecVP8) { if (codecType_ == kVideoCodecVP8) {
// QP is obtained from VP8-bitstream for HW, so the QP corresponds to the // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
@ -352,7 +352,7 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
} }
int32_t MediaCodecVideoEncoder::Release() { int32_t MediaCodecVideoEncoder::Release() {
ALOGD("EncoderRelease request"); ALOGD << "EncoderRelease request";
return codec_thread_->Invoke<int32_t>( return codec_thread_->Invoke<int32_t>(
Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this)); Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
} }
@ -400,7 +400,7 @@ void MediaCodecVideoEncoder::CheckOnCodecThread() {
} }
void MediaCodecVideoEncoder::ResetCodec() { void MediaCodecVideoEncoder::ResetCodec() {
ALOGE("ResetCodec"); ALOGE << "ResetCodec";
if (Release() != WEBRTC_VIDEO_CODEC_OK || if (Release() != WEBRTC_VIDEO_CODEC_OK ||
codec_thread_->Invoke<int32_t>(Bind( codec_thread_->Invoke<int32_t>(Bind(
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
@ -417,8 +417,9 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
ALOGD("InitEncodeOnCodecThread Type: %d. %d x %d. Bitrate: %d kbps. Fps: %d", ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
(int)codecType_, width, height, kbps, fps); width << " x " << height << ". Bitrate: " << kbps <<
" kbps. Fps: " << fps;
if (kbps == 0) { if (kbps == 0) {
kbps = last_set_bitrate_kbps_; kbps = last_set_bitrate_kbps_;
} }
@ -529,8 +530,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
scale_ ? quality_scaler_.GetScaledFrame(frame) : frame; scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
if (input_frame.width() != width_ || input_frame.height() != height_) { if (input_frame.width() != width_ || input_frame.height() != height_) {
ALOGD("Frame resolution change from %d x %d to %d x %d", ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
width_, height_, input_frame.width(), input_frame.height()); " to " << input_frame.width() << " x " << input_frame.height();
width_ = input_frame.width(); width_ = input_frame.width();
height_ = input_frame.height(); height_ = input_frame.height();
ResetCodec(); ResetCodec();
@ -543,8 +544,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
int encoder_latency_ms = last_input_timestamp_ms_ - int encoder_latency_ms = last_input_timestamp_ms_ -
last_output_timestamp_ms_; last_output_timestamp_ms_;
if (frames_in_queue_ > 2 || encoder_latency_ms > 70) { if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d", ALOGD << "Drop frame - encoder is behind by " << encoder_latency_ms <<
encoder_latency_ms, frames_in_queue_); " ms. Q size: " << frames_in_queue_;
frames_dropped_++; frames_dropped_++;
// Report dropped frame to quality_scaler_. // Report dropped frame to quality_scaler_.
OnDroppedFrame(); OnDroppedFrame();
@ -623,8 +624,9 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
} }
CheckOnCodecThread(); CheckOnCodecThread();
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ALOGD("EncoderReleaseOnCodecThread: Frames received: %d. Encoded: %d. " ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
"Dropped: %d.", frames_received_, frames_encoded_, frames_dropped_); frames_received_ << ". Encoded: " << frames_encoded_ <<
". Dropped: " << frames_dropped_;
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); ++i) for (size_t i = 0; i < input_buffers_.size(); ++i)
jni->DeleteGlobalRef(input_buffers_[i]); jni->DeleteGlobalRef(input_buffers_[i]);
@ -633,6 +635,7 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
rtc::MessageQueueManager::Clear(this); rtc::MessageQueueManager::Clear(this);
inited_ = false; inited_ = false;
ALOGD << "EncoderReleaseOnCodecThread done.";
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
@ -745,12 +748,13 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) { current_frames_ > 0) {
ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d," ALOGD << "Encoded frames: " << frames_encoded_ << ". Bitrate: " <<
" encTime: %d for last %d ms", (current_bytes_ * 8 / statistic_time_ms) <<
current_bytes_ * 8 / statistic_time_ms, ", target: " << last_set_bitrate_kbps_ << " kbps, fps: " <<
last_set_bitrate_kbps_, ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms, << ", encTime: " <<
current_encoding_time_ms_ / current_frames_, statistic_time_ms); (current_encoding_time_ms_ / current_frames_) << " for last " <<
statistic_time_ms << " ms.";
start_time_ms_ = GetCurrentTimeMs(); start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0; current_frames_ = 0;
current_bytes_ = 0; current_bytes_ = 0;
@ -816,10 +820,10 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
scPosition += H264_SC_LENGTH; scPosition += H264_SC_LENGTH;
} }
if (scPositionsLength == 0) { if (scPositionsLength == 0) {
ALOGE("Start code is not found!"); ALOGE << "Start code is not found!";
ALOGE("Data 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x", ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
image->_buffer[0], image->_buffer[1], image->_buffer[2], << " " << image->_buffer[2] << " " << image->_buffer[3]
image->_buffer[3], image->_buffer[4], image->_buffer[5]); << " " << image->_buffer[4] << " " << image->_buffer[5];
ResetCodec(); ResetCodec();
return false; return false;
} }
@ -909,7 +913,7 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (is_vp8_hw_supported) { if (is_vp8_hw_supported) {
ALOGD("VP8 HW Encoder supported."); ALOGD << "VP8 HW Encoder supported.";
supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8", supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
} }
@ -919,7 +923,7 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (is_h264_hw_supported) { if (is_h264_hw_supported) {
ALOGD("H.264 HW Encoder supported."); ALOGD << "H.264 HW Encoder supported.";
supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264", supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS)); MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
} }
@ -935,8 +939,8 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin(); for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
it != supported_codecs_.end(); ++it) { it != supported_codecs_.end(); ++it) {
if (it->type == type) { if (it->type == type) {
ALOGD("Create HW video encoder for type %d (%s).", ALOGD << "Create HW video encoder for type " << (int)type <<
(int)type, it->name.c_str()); " (" << it->name << ").";
return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type); return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
} }
} }
@ -950,7 +954,7 @@ MediaCodecVideoEncoderFactory::codecs() const {
void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
webrtc::VideoEncoder* encoder) { webrtc::VideoEncoder* encoder) {
ALOGD("Destroy video encoder."); ALOGD << "Destroy video encoder.";
delete encoder; delete encoder;
} }

View File

@ -55,6 +55,8 @@ public class PeerConnectionClient {
public static final String AUDIO_TRACK_ID = "ARDAMSa0"; public static final String AUDIO_TRACK_ID = "ARDAMSa0";
private static final String TAG = "PCRTCClient"; private static final String TAG = "PCRTCClient";
private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/"; private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/";
private static final String FIELD_TRIAL_AUTOMATIC_RESIZE =
"WebRTC-MediaCodecVideoEncoder-AutomaticResize/Enabled/";
private static final String VIDEO_CODEC_VP8 = "VP8"; private static final String VIDEO_CODEC_VP8 = "VP8";
private static final String VIDEO_CODEC_VP9 = "VP9"; private static final String VIDEO_CODEC_VP9 = "VP9";
private static final String VIDEO_CODEC_H264 = "H264"; private static final String VIDEO_CODEC_H264 = "H264";
@ -283,13 +285,16 @@ public class PeerConnectionClient {
Log.d(TAG, "Create peer connection factory. Use video: " + Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled); peerConnectionParameters.videoCallEnabled);
isError = false; isError = false;
// Initialize field trials.
String field_trials = FIELD_TRIAL_AUTOMATIC_RESIZE;
// Check if VP9 is used by default. // Check if VP9 is used by default.
if (videoCallEnabled && peerConnectionParameters.videoCodec != null if (videoCallEnabled && peerConnectionParameters.videoCodec != null
&& peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) { && peerConnectionParameters.videoCodec.equals(VIDEO_CODEC_VP9)) {
PeerConnectionFactory.initializeFieldTrials(FIELD_TRIAL_VP9); field_trials += FIELD_TRIAL_VP9;
} else {
PeerConnectionFactory.initializeFieldTrials(null);
} }
PeerConnectionFactory.initializeFieldTrials(field_trials);
// Check if H.264 is used by default. // Check if H.264 is used by default.
preferH264 = false; preferH264 = false;
if (videoCallEnabled && peerConnectionParameters.videoCodec != null if (videoCallEnabled && peerConnectionParameters.videoCodec != null