This cl add support to encode from textures to MediaCodecVideoEncoder.

This has also partly been reviewed in https://codereview.webrtc.org/1375953002/.

BUG=webrtc:4993
TBR=glaznew@webrtc.org

Review URL: https://codereview.webrtc.org/1403713002

Cr-Commit-Position: refs/heads/master@{#10725}
This commit is contained in:
perkj
2015-11-20 01:31:25 -08:00
committed by Commit bot
parent 5663b4fa9a
commit 30e918278c
6 changed files with 363 additions and 92 deletions

View File

@ -26,19 +26,26 @@
*/ */
package org.webrtc; package org.webrtc;
import java.nio.ByteBuffer; import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.test.ActivityTestCase; import android.test.ActivityTestCase;
import android.test.suitebuilder.annotation.SmallTest; import android.test.suitebuilder.annotation.SmallTest;
import android.util.Log; import android.util.Log;
import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo; import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
import java.nio.ByteBuffer;
import javax.microedition.khronos.egl.EGL10;
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
public final class MediaCodecVideoEncoderTest extends ActivityTestCase { public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
final static String TAG = "MediaCodecVideoEncoderTest"; final static String TAG = "MediaCodecVideoEncoderTest";
@SmallTest @SmallTest
public static void testInitReleaseUsingByteBuffer() { public static void testInitializeUsingByteBuffer() {
if (!MediaCodecVideoEncoder.isVp8HwSupported()) { if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
Log.i(TAG, Log.i(TAG,
"Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer"); "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
@ -46,7 +53,37 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
} }
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30)); MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
encoder.release();
}
@SmallTest
public static void testInitilizeUsingTextures() {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
EGL10.EGL_NO_CONTEXT));
encoder.release();
}
@SmallTest
public static void testInitializeUsingByteBufferReInitilizeUsingTextures() {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
null));
encoder.release();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
EGL10.EGL_NO_CONTEXT));
encoder.release(); encoder.release();
} }
@ -65,7 +102,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder(); MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode( assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30)); MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
ByteBuffer[] inputBuffers = encoder.getInputBuffers(); ByteBuffer[] inputBuffers = encoder.getInputBuffers();
assertNotNull(inputBuffers); assertNotNull(inputBuffers);
assertTrue(min_size <= inputBuffers[0].capacity()); assertTrue(min_size <= inputBuffers[0].capacity());
@ -92,4 +129,49 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
encoder.release(); encoder.release();
} }
@SmallTest
public static void testEncoderUsingTextures() throws InterruptedException {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
final int width = 640;
final int height = 480;
final long presentationTs = 2;
final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
// TODO(perkj): This test is week since we don't fill the texture with valid data with correct
// width and height and verify the encoded data. Fill the OES texture and figure out a way to
// verify that the output make sense.
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
eglOesBase.getContext()));
assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
presentationTs));
GlUtil.checkNoGLES2Error("encodeTexture");
// It should be Ok to delete the texture after calling encodeTexture.
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
OutputBufferInfo info = encoder.dequeueOutputBuffer();
while (info == null) {
info = encoder.dequeueOutputBuffer();
Thread.sleep(20);
}
assertTrue(info.index != -1);
assertTrue(info.buffer.capacity() > 0);
encoder.releaseOutputBuffer(info.index);
encoder.release();
eglOesBase.release();
}
} }

View File

@ -29,13 +29,14 @@
#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h" #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h" #include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/base/thread.h" #include "webrtc/base/thread.h"
#include "webrtc/base/thread_checker.h" #include "webrtc/base/thread_checker.h"
#include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h" #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h" #include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/utility/quality_scaler.h" #include "webrtc/modules/video_coding/utility/quality_scaler.h"
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h" #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/system_wrappers/include/field_trial.h" #include "webrtc/system_wrappers/include/field_trial.h"
@ -82,7 +83,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
public: public:
virtual ~MediaCodecVideoEncoder(); virtual ~MediaCodecVideoEncoder();
MediaCodecVideoEncoder(JNIEnv* jni, MediaCodecVideoEncoder(JNIEnv* jni,
VideoCodecType codecType); VideoCodecType codecType,
jobject egl_context);
// webrtc::VideoEncoder implementation. Everything trampolines to // webrtc::VideoEncoder implementation. Everything trampolines to
// |codec_thread_| for execution. // |codec_thread_| for execution.
@ -106,6 +108,8 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int GetTargetFramerate() override; int GetTargetFramerate() override;
bool SupportsNativeHandle() const override { return true; }
private: private:
// ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
// InitEncodeOnCodecThread() in an attempt to restore the codec to an // InitEncodeOnCodecThread() in an attempt to restore the codec to an
@ -118,15 +122,19 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// If width==0 then this is assumed to be a re-initialization and the // If width==0 then this is assumed to be a re-initialization and the
// previously-current values are reused instead of the passed parameters // previously-current values are reused instead of the passed parameters
// (makes it easier to reason about thread-safety). // (makes it easier to reason about thread-safety).
int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps); int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
// Reconfigure to match |frame| in width, height. Returns false if bool use_surface);
// reconfiguring fails. // Reconfigure to match |frame| in width, height. Also reconfigures the
// encoder if |frame| is a texture/byte buffer and the encoder is initialized
// for byte buffer/texture. Returns false if reconfiguring fails.
bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame); bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
int32_t EncodeOnCodecThread( int32_t EncodeOnCodecThread(
const webrtc::VideoFrame& input_image, const webrtc::VideoFrame& input_image,
const std::vector<webrtc::FrameType>* frame_types); const std::vector<webrtc::FrameType>* frame_types);
bool EncodeByteBufferOnCodecThread(JNIEnv* jni, bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index); bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
bool EncodeTextureOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame);
int32_t RegisterEncodeCompleteCallbackOnCodecThread( int32_t RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback); webrtc::EncodedImageCallback* callback);
@ -164,6 +172,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
jmethodID j_get_input_buffers_method_; jmethodID j_get_input_buffers_method_;
jmethodID j_dequeue_input_buffer_method_; jmethodID j_dequeue_input_buffer_method_;
jmethodID j_encode_buffer_method_; jmethodID j_encode_buffer_method_;
jmethodID j_encode_texture_method_;
jmethodID j_release_method_; jmethodID j_release_method_;
jmethodID j_set_rates_method_; jmethodID j_set_rates_method_;
jmethodID j_dequeue_output_buffer_method_; jmethodID j_dequeue_output_buffer_method_;
@ -179,6 +188,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
int width_; // Frame width in pixels. int width_; // Frame width in pixels.
int height_; // Frame height in pixels. int height_; // Frame height in pixels.
bool inited_; bool inited_;
bool use_surface_;
uint16_t picture_id_; uint16_t picture_id_;
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format. enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps. int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
@ -220,6 +230,10 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
// non-flexible VP9 mode. // non-flexible VP9 mode.
uint8_t tl0_pic_idx_; uint8_t tl0_pic_idx_;
size_t gof_idx_; size_t gof_idx_;
// EGL context - owned by factory, should not be allocated/destroyed
// by MediaCodecVideoEncoder.
jobject egl_context_;
}; };
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() { MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
@ -228,10 +242,11 @@ MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
} }
MediaCodecVideoEncoder::MediaCodecVideoEncoder( MediaCodecVideoEncoder::MediaCodecVideoEncoder(
JNIEnv* jni, VideoCodecType codecType) : JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
codecType_(codecType), codecType_(codecType),
callback_(NULL), callback_(NULL),
inited_(false), inited_(false),
use_surface_(false),
picture_id_(0), picture_id_(0),
codec_thread_(new Thread()), codec_thread_(new Thread()),
j_media_codec_video_encoder_class_( j_media_codec_video_encoder_class_(
@ -243,7 +258,8 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
GetMethodID(jni, GetMethodID(jni,
*j_media_codec_video_encoder_class_, *j_media_codec_video_encoder_class_,
"<init>", "<init>",
"()V"))) { "()V"))),
egl_context_(egl_context) {
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
// It would be nice to avoid spinning up a new thread per MediaCodec, and // It would be nice to avoid spinning up a new thread per MediaCodec, and
// instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
@ -261,7 +277,8 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
jni, jni,
*j_media_codec_video_encoder_class_, *j_media_codec_video_encoder_class_,
"initEncode", "initEncode",
"(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z"); "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
"IIIILjavax/microedition/khronos/egl/EGLContext;)Z");
j_get_input_buffers_method_ = GetMethodID( j_get_input_buffers_method_ = GetMethodID(
jni, jni,
*j_media_codec_video_encoder_class_, *j_media_codec_video_encoder_class_,
@ -271,6 +288,9 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
j_encode_buffer_method_ = GetMethodID( j_encode_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
j_encode_texture_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "encodeTexture",
"(ZI[FJ)Z");
j_release_method_ = j_release_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
j_set_rates_method_ = GetMethodID( j_set_rates_method_ = GetMethodID(
@ -351,7 +371,8 @@ int32_t MediaCodecVideoEncoder::InitEncode(
codec_settings->width, codec_settings->width,
codec_settings->height, codec_settings->height,
codec_settings->startBitrate, codec_settings->startBitrate,
codec_settings->maxFramerate)); codec_settings->maxFramerate,
false /* use_surface */));
} }
int32_t MediaCodecVideoEncoder::Encode( int32_t MediaCodecVideoEncoder::Encode(
@ -417,8 +438,8 @@ bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
ALOGE << "ResetOnCodecThread"; ALOGE << "ResetOnCodecThread";
if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK || if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
InitEncodeOnCodecThread(width_, height_, 0, 0) InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
!= WEBRTC_VIDEO_CODEC_OK) { WEBRTC_VIDEO_CODEC_OK) {
// TODO(fischman): wouldn't it be nice if there was a way to gracefully // TODO(fischman): wouldn't it be nice if there was a way to gracefully
// degrade to a SW encoder at this point? There isn't one AFAICT :( // degrade to a SW encoder at this point? There isn't one AFAICT :(
// https://code.google.com/p/webrtc/issues/detail?id=2920 // https://code.google.com/p/webrtc/issues/detail?id=2920
@ -428,8 +449,9 @@ bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
} }
int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread( int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
int width, int height, int kbps, int fps) { int width, int height, int kbps, int fps, bool use_surface) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
@ -465,6 +487,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
render_times_ms_.clear(); render_times_ms_.clear();
frame_rtc_times_ms_.clear(); frame_rtc_times_ms_.clear();
drop_next_input_frame_ = false; drop_next_input_frame_ = false;
use_surface_ = use_surface;
picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF; picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1); gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
tl0_pic_idx_ = static_cast<uint8_t>(rand()); tl0_pic_idx_ = static_cast<uint8_t>(rand());
@ -475,13 +498,17 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_); jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
const bool encode_status = jni->CallBooleanMethod( const bool encode_status = jni->CallBooleanMethod(
*j_media_codec_video_encoder_, j_init_encode_method_, *j_media_codec_video_encoder_, j_init_encode_method_,
j_video_codec_enum, width, height, kbps, fps); j_video_codec_enum, width, height, kbps, fps,
(use_surface ? egl_context_ : nullptr));
if (!encode_status) { if (!encode_status) {
ALOGE << "Failed to configure encoder."; ALOGE << "Failed to configure encoder.";
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
if (use_surface) {
scale_ = false; // TODO(perkj): Implement scaling when using textures.
} else {
jobjectArray input_buffers = reinterpret_cast<jobjectArray>( jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
jni->CallObjectMethod(*j_media_codec_video_encoder_, jni->CallObjectMethod(*j_media_codec_video_encoder_,
j_get_input_buffers_method_)); j_get_input_buffers_method_));
@ -516,8 +543,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity"; RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
} }
CHECK_EXCEPTION(jni); }
inited_ = true; inited_ = true;
codec_thread_->PostDelayed(kMediaCodecPollMs, this); codec_thread_->PostDelayed(kMediaCodecPollMs, this);
@ -575,6 +601,9 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
} }
} }
const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
bool encode_status = true;
if (!input_frame.native_handle()) {
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
j_dequeue_input_buffer_method_); j_dequeue_input_buffer_method_);
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
@ -590,6 +619,17 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
ResetCodecOnCodecThread(); ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
} }
encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
j_input_buffer_index);
} else {
encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
}
if (!encode_status) {
ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR;
}
last_input_timestamp_ms_ = last_input_timestamp_ms_ =
current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec; current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
@ -599,15 +639,9 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
timestamps_.push_back(input_frame.timestamp()); timestamps_.push_back(input_frame.timestamp());
render_times_ms_.push_back(input_frame.render_time_ms()); render_times_ms_.push_back(input_frame.render_time_ms());
frame_rtc_times_ms_.push_back(GetCurrentTimeMs()); frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
const bool encode_status =
EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
j_input_buffer_index);
current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_; current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
if (!encode_status || !DeliverPendingOutputs(jni)) { if (!DeliverPendingOutputs(jni)) {
ALOGE << "Failed deliver pending outputs."; ALOGE << "Failed deliver pending outputs.";
ResetCodecOnCodecThread(); ResetCodecOnCodecThread();
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
@ -619,9 +653,17 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
const webrtc::VideoFrame& frame) { const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
const bool is_texture_frame = frame.native_handle() != nullptr;
const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
const bool reconfigure_due_to_size = const bool reconfigure_due_to_size =
frame.width() != width_ || frame.height() != height_; frame.width() != width_ || frame.height() != height_;
if (reconfigure_due_to_format) {
ALOGD << "Reconfigure encoder due to format change. "
<< (use_surface_ ?
"Reconfiguring to encode from byte buffer." :
"Reconfiguring to encode from texture.");
}
if (reconfigure_due_to_size) { if (reconfigure_due_to_size) {
ALOGD << "Reconfigure encoder due to frame resolution change from " ALOGD << "Reconfigure encoder due to frame resolution change from "
<< width_ << " x " << height_ << " to " << frame.width() << " x " << width_ << " x " << height_ << " to " << frame.width() << " x "
@ -630,18 +672,19 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
height_ = frame.height(); height_ = frame.height();
} }
if (!reconfigure_due_to_size) if (!reconfigure_due_to_format && !reconfigure_due_to_size)
return true; return true;
ReleaseOnCodecThread(); ReleaseOnCodecThread();
return InitEncodeOnCodecThread(width_, height_, 0, 0) == return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
WEBRTC_VIDEO_CODEC_OK; WEBRTC_VIDEO_CODEC_OK;
} }
bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni, bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) { bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_CHECK(!use_surface_);
ALOGV("Encoder frame in # %d. TS: %lld. Q: %d", ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_); frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
@ -668,6 +711,25 @@ bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
return encode_status; return encode_status;
} }
bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
bool key_frame, const webrtc::VideoFrame& frame) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
RTC_CHECK(use_surface_);
NativeHandleImpl* handle =
static_cast<NativeHandleImpl*>(frame.native_handle());
jfloatArray sampling_matrix = jni->NewFloatArray(16);
jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_encode_texture_method_,
key_frame,
handle->oes_texture_id,
sampling_matrix,
current_timestamp_us_);
CHECK_EXCEPTION(jni);
return encode_status;
}
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread( int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
webrtc::EncodedImageCallback* callback) { webrtc::EncodedImageCallback* callback) {
RTC_DCHECK(codec_thread_checker_.CalledOnValidThread()); RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
@ -694,6 +756,7 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
rtc::MessageQueueManager::Clear(this); rtc::MessageQueueManager::Clear(this);
inited_ = false; inited_ = false;
use_surface_ = false;
ALOGD << "EncoderReleaseOnCodecThread done."; ALOGD << "EncoderReleaseOnCodecThread done.";
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
@ -991,7 +1054,8 @@ int MediaCodecVideoEncoder::GetTargetFramerate() {
return scale_ ? quality_scaler_.GetTargetFramerate() : -1; return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
} }
MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() { MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
: egl_context_ (nullptr) {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
@ -1030,6 +1094,33 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {} MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
void MediaCodecVideoEncoderFactory::SetEGLContext(
JNIEnv* jni, jobject render_egl_context) {
ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
if (egl_context_) {
jni->DeleteGlobalRef(egl_context_);
egl_context_ = NULL;
}
if (!IsNull(jni, render_egl_context)) {
egl_context_ = jni->NewGlobalRef(render_egl_context);
if (CheckException(jni)) {
ALOGE << "error calling NewGlobalRef for EGL Context.";
egl_context_ = NULL;
} else {
jclass j_egl_context_class =
FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) {
ALOGE << "Wrong EGL Context.";
jni->DeleteGlobalRef(egl_context_);
egl_context_ = NULL;
}
}
}
if (egl_context_ == NULL) {
ALOGW << "NULL VideoDecoder EGL context - HW surface encoding is disabled.";
}
}
webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder( webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
VideoCodecType type) { VideoCodecType type) {
if (supported_codecs_.empty()) { if (supported_codecs_.empty()) {
@ -1041,7 +1132,8 @@ webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
if (it->type == type) { if (it->type == type) {
ALOGD << "Create HW video encoder for type " << (int)type << ALOGD << "Create HW video encoder for type " << (int)type <<
" (" << it->name << ")."; " (" << it->name << ").";
return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type); return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
egl_context_);
} }
} }
ALOGW << "Can not find HW video encoder for type " << (int)type; ALOGW << "Can not find HW video encoder for type " << (int)type;

View File

@ -43,6 +43,8 @@ class MediaCodecVideoEncoderFactory
MediaCodecVideoEncoderFactory(); MediaCodecVideoEncoderFactory();
virtual ~MediaCodecVideoEncoderFactory(); virtual ~MediaCodecVideoEncoderFactory();
void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
// WebRtcVideoEncoderFactory implementation. // WebRtcVideoEncoderFactory implementation.
webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type) webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
override; override;
@ -50,6 +52,7 @@ class MediaCodecVideoEncoderFactory
void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override; void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
private: private:
jobject egl_context_;
// Empty if platform support is lacking, const after ctor returns. // Empty if platform support is lacking, const after ctor returns.
std::vector<VideoCodec> supported_codecs_; std::vector<VideoCodec> supported_codecs_;
}; };

View File

@ -1292,21 +1292,30 @@ JOW(void, PeerConnectionFactory_nativeSetOptions)(
} }
JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)( JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) { JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
jobject remote_egl_context) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
OwnedFactoryAndThreads* owned_factory = OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory); reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
MediaCodecVideoEncoderFactory* encoder_factory =
static_cast<MediaCodecVideoEncoderFactory*>
(owned_factory->encoder_factory());
if (encoder_factory) {
LOG(LS_INFO) << "Set EGL context for HW encoding.";
encoder_factory->SetEGLContext(jni, local_egl_context);
}
MediaCodecVideoDecoderFactory* decoder_factory = MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*> static_cast<MediaCodecVideoDecoderFactory*>
(owned_factory->decoder_factory()); (owned_factory->decoder_factory());
if (decoder_factory) { if (decoder_factory) {
LOG(LS_INFO) << "Set EGL context for HW acceleration."; LOG(LS_INFO) << "Set EGL context for HW decoding.";
decoder_factory->SetEGLContext(jni, render_egl_context); decoder_factory->SetEGLContext(jni, remote_egl_context);
} }
#endif #endif
} }
static std::string static std::string
GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) { GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
jclass enumClass = FindClass(jni, className.c_str()); jclass enumClass = FindClass(jni, className.c_str());

View File

@ -33,8 +33,10 @@ import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo; import android.media.MediaCodecInfo;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
import android.os.Bundle; import android.os.Bundle;
import android.view.Surface;
import org.webrtc.Logging; import org.webrtc.Logging;
@ -43,6 +45,8 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import javax.microedition.khronos.egl.EGLContext;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder. // Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API. // This class is an implementation detail of the Java PeerConnection API.
@TargetApi(19) @TargetApi(19)
@ -73,6 +77,9 @@ public class MediaCodecVideoEncoder {
private Thread mediaCodecThread; private Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
private EglBase eglBase;
private Surface inputSurface;
private GlRectDrawer drawer;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9"; private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc"; private static final String H264_MIME_TYPE = "video/avc";
@ -109,6 +116,9 @@ public class MediaCodecVideoEncoder {
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
}; };
private static final int[] supportedSurfaceColorList = {
CodecCapabilities.COLOR_FormatSurface
};
private VideoCodecType type; private VideoCodecType type;
private int colorFormat; // Used by native code. private int colorFormat; // Used by native code.
@ -138,7 +148,7 @@ public class MediaCodecVideoEncoder {
} }
private static EncoderProperties findHwEncoder( private static EncoderProperties findHwEncoder(
String mime, String[] supportedHwCodecPrefixes) { String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
// MediaCodec.setParameters is missing for JB and below, so bitrate // MediaCodec.setParameters is missing for JB and below, so bitrate
// can not be adjusted dynamically. // can not be adjusted dynamically.
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
@ -188,8 +198,7 @@ public class MediaCodecVideoEncoder {
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat)); Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
} }
// Check if codec supports either yuv420 or nv12. for (int supportedColorFormat : colorList) {
for (int supportedColorFormat : supportedColorList) {
for (int codecColorFormat : capabilities.colorFormats) { for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) { if (codecColorFormat == supportedColorFormat) {
// Found supported HW encoder. // Found supported HW encoder.
@ -204,14 +213,30 @@ public class MediaCodecVideoEncoder {
} }
public static boolean isVp8HwSupported() { public static boolean isVp8HwSupported() {
return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null; return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null;
} }
public static boolean isVp9HwSupported() { public static boolean isVp9HwSupported() {
return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null; return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null;
} }
public static boolean isH264HwSupported() { public static boolean isH264HwSupported() {
return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null; return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null;
}
public static boolean isVp8HwSupportedUsingTextures() {
return findHwEncoder(
VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null;
}
public static boolean isVp9HwSupportedUsingTextures() {
return findHwEncoder(
VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null;
}
public static boolean isH264HwSupportedUsingTextures() {
return findHwEncoder(
H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null;
} }
private void checkOnMediaCodecThread() { private void checkOnMediaCodecThread() {
@ -244,10 +269,11 @@ public class MediaCodecVideoEncoder {
} }
} }
// Returns false if the hardware encoder currently can't be used. boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps) { EGLContext sharedContext) {
final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
". @ " + kbps + " kbps. Fps: " + fps + "."); ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
if (mediaCodecThread != null) { if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?"); throw new RuntimeException("Forgot to release()?");
@ -257,15 +283,18 @@ public class MediaCodecVideoEncoder {
int keyFrameIntervalSec = 0; int keyFrameIntervalSec = 0;
if (type == VideoCodecType.VIDEO_CODEC_VP8) { if (type == VideoCodecType.VIDEO_CODEC_VP8) {
mime = VP8_MIME_TYPE; mime = VP8_MIME_TYPE;
properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes); properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100; keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) { } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
mime = VP9_MIME_TYPE; mime = VP9_MIME_TYPE;
properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes); properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 100; keyFrameIntervalSec = 100;
} else if (type == VideoCodecType.VIDEO_CODEC_H264) { } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
mime = H264_MIME_TYPE; mime = H264_MIME_TYPE;
properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes); properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
useSurface ? supportedSurfaceColorList : supportedColorList);
keyFrameIntervalSec = 20; keyFrameIntervalSec = 20;
} }
if (properties == null) { if (properties == null) {
@ -293,6 +322,13 @@ public class MediaCodecVideoEncoder {
mediaCodec.configure( mediaCodec.configure(
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) {
eglBase = new EglBase(sharedContext, EglBase.ConfigType.RECORDABLE);
// Create an input surface and keep a reference since we must release the surface when done.
inputSurface = mediaCodec.createInputSurface();
eglBase.createSurface(inputSurface);
drawer = new GlRectDrawer();
}
mediaCodec.start(); mediaCodec.start();
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Output buffers: " + outputBuffers.length); Logging.d(TAG, "Output buffers: " + outputBuffers.length);
@ -335,6 +371,29 @@ public class MediaCodecVideoEncoder {
} }
} }
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
long presentationTimestampUs) {
checkOnMediaCodecThread();
try {
if (isKeyframe) {
Logging.d(TAG, "Sync frame request");
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mediaCodec.setParameters(b);
}
eglBase.makeCurrent();
drawer.drawOes(oesTextureId, transformationMatrix);
// TODO(perkj): Do we have to call EGLExt.eglPresentationTimeANDROID ?
// If not, remove |presentationTimestampUs|.
eglBase.swapBuffers();
return true;
}
catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
return false;
}
}
void release() { void release() {
Logging.d(TAG, "Java releaseEncoder"); Logging.d(TAG, "Java releaseEncoder");
checkOnMediaCodecThread(); checkOnMediaCodecThread();
@ -370,6 +429,18 @@ public class MediaCodecVideoEncoder {
mediaCodec = null; mediaCodec = null;
mediaCodecThread = null; mediaCodecThread = null;
if (drawer != null) {
drawer.release();
drawer = null;
}
if (eglBase != null) {
eglBase.release();
eglBase = null;
}
if (inputSurface != null) {
inputSurface.release();
inputSurface = null;
}
runningInstance = null; runningInstance = null;
Logging.d(TAG, "Java releaseEncoder done"); Logging.d(TAG, "Java releaseEncoder done");
} }

View File

@ -135,8 +135,22 @@ public class PeerConnectionFactory {
nativeSetOptions(nativeFactory, options); nativeSetOptions(nativeFactory, options);
} }
@Deprecated
public void setVideoHwAccelerationOptions(Object renderEGLContext) { public void setVideoHwAccelerationOptions(Object renderEGLContext) {
nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext); nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext, renderEGLContext);
}
/** Set the EGL context used by HW Video encoding and decoding.
*
*
* @param localEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
* Must be the same as used by VideoCapturerAndroid and any local
* video renderer.
* @param remoteEGLContext An instance of javax.microedition.khronos.egl.EGLContext.
* Must be the same as used by any remote video renderer.
*/
public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
} }
public void dispose() { public void dispose() {
@ -204,7 +218,7 @@ public class PeerConnectionFactory {
public native void nativeSetOptions(long nativeFactory, Options options); public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions( private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object renderEGLContext); long nativeFactory, Object localEGLContext, Object remoteEGLContext);
private static native void nativeThreadsCallbacks(long nativeFactory); private static native void nativeThreadsCallbacks(long nativeFactory);