- Make shared EGL context used for HW video decoding member

of decoder factory class.
- Add new Peer connection factory method to initialize shared
EGL context.

This provides an option to use single peer connection factory
in the application and create peer connections from the same
factory and reinitialize shared EGL context for video
decoding HW acceleration.

R=wzh@webrtc.org

Review URL: https://codereview.webrtc.org/1304063011 .

Cr-Commit-Position: refs/heads/master@{#9838}
This commit is contained in:
Alex Glaznev
2015-09-01 15:04:13 -07:00
parent c36d4df250
commit 4d2f4d1c69
10 changed files with 129 additions and 90 deletions

View File

@ -35,8 +35,7 @@ public class PeerConnectionAndroidTest extends ActivityTestCase {
@Override
protected void setUp() {
assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
getInstrumentation().getContext(), true,
true, true, null));
getInstrumentation().getContext(), true, true, true));
}
public void testCompleteSession() throws Exception {

View File

@ -171,8 +171,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
@Override
protected void setUp() {
assertTrue(PeerConnectionFactory.initializeAndroidGlobals(
getInstrumentation().getContext(), true,
true, true, null));
getInstrumentation().getContext(), true, true, true));
}
@SmallTest

View File

@ -47,6 +47,7 @@ namespace webrtc_jni {
#define ALOGV(...)
#endif
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
// Color formats supported by encoder - should mirror supportedColorList

View File

@ -61,16 +61,13 @@ using webrtc::kVideoCodecVP8;
namespace webrtc_jni {
jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
public rtc::MessageHandler {
public:
explicit MediaCodecVideoDecoder(JNIEnv* jni, VideoCodecType codecType);
explicit MediaCodecVideoDecoder(
JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
virtual ~MediaCodecVideoDecoder();
static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
override;
@ -158,11 +155,16 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
std::vector<jobject> input_buffers_;
jobject surface_texture_;
jobject previous_surface_texture_;
// Render EGL context - owned by factory, should not be allocated/destroyed
// by VideoDecoder.
jobject render_egl_context_;
};
MediaCodecVideoDecoder::MediaCodecVideoDecoder(
JNIEnv* jni, VideoCodecType codecType) :
JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
codecType_(codecType),
render_egl_context_(render_egl_context),
key_frame_required_(true),
inited_(false),
sw_fallback_required_(false),
@ -233,10 +235,8 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = true;
if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL) {
use_surface_ = false;
}
use_surface_ = (render_egl_context_ != NULL) ? true : false;
ALOGD("MediaCodecVideoDecoder ctor. Use surface: %d", use_surface_);
memset(&codec_, 0, sizeof(codec_));
AllowBlockingCalls();
}
@ -310,7 +310,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
codec_.width,
codec_.height,
use_surface_,
MediaCodecVideoDecoderFactory::render_egl_context_);
render_egl_context_);
if (CheckException(jni) || !success) {
ALOGE("Codec initialization error - fallback to SW codec.");
sw_fallback_required_ = true;
@ -745,35 +745,9 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
}
int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
jobject render_egl_context) {
ALOGD("SetAndroidObjects for surface decoding.");
if (render_egl_context_) {
jni->DeleteGlobalRef(render_egl_context_);
}
if (IsNull(jni, render_egl_context)) {
render_egl_context_ = NULL;
} else {
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
if (CheckException(jni)) {
ALOGE("error calling NewGlobalRef for EGL Context.");
render_egl_context_ = NULL;
} else {
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
ALOGE("Wrong EGL Context.");
jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL;
}
}
}
if (render_egl_context_ == NULL) {
ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
}
return 0;
}
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() :
render_egl_context_(NULL) {
ALOGD("MediaCodecVideoDecoderFactory ctor");
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
@ -802,21 +776,55 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
}
}
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
ALOGD("MediaCodecVideoDecoderFactory dtor");
if (render_egl_context_) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL;
}
}
void MediaCodecVideoDecoderFactory::SetEGLContext(
JNIEnv* jni, jobject render_egl_context) {
ALOGD("MediaCodecVideoDecoderFactory::SetEGLContext");
if (render_egl_context_) {
jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL;
}
if (!IsNull(jni, render_egl_context)) {
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
if (CheckException(jni)) {
ALOGE("error calling NewGlobalRef for EGL Context.");
render_egl_context_ = NULL;
} else {
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
ALOGE("Wrong EGL Context.");
jni->DeleteGlobalRef(render_egl_context_);
render_egl_context_ = NULL;
}
}
}
if (render_egl_context_ == NULL) {
ALOGW("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
}
}
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
VideoCodecType type) {
if (supported_codec_types_.empty()) {
ALOGE("No HW video decoder for type %d.", (int)type);
return NULL;
}
for (std::vector<VideoCodecType>::const_iterator it =
supported_codec_types_.begin(); it != supported_codec_types_.end();
++it) {
if (*it == type) {
for (VideoCodecType codec_type : supported_codec_types_) {
if (codec_type == type) {
ALOGD("Create HW video decoder for type %d.", (int)type);
return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type);
return new MediaCodecVideoDecoder(
AttachCurrentThreadIfNeeded(), type, render_egl_context_);
}
}
ALOGE("Can not find HW video decoder for type %d.", (int)type);
return NULL;
}

View File

@ -40,17 +40,17 @@ class MediaCodecVideoDecoderFactory
public:
MediaCodecVideoDecoderFactory();
virtual ~MediaCodecVideoDecoderFactory();
static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
// WebRtcVideoDecoderFactory implementation.
webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
override;
void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
// Render EGL context.
static jobject render_egl_context_;
private:
jobject render_egl_context_; // Render EGL context.
std::vector<webrtc::VideoCodecType> supported_codec_types_;
};

View File

@ -90,6 +90,8 @@
using webrtc::LogcatTraceContext;
#endif
using cricket::WebRtcVideoDecoderFactory;
using cricket::WebRtcVideoEncoderFactory;
using rtc::Bind;
using rtc::Thread;
using rtc::ThreadManager;
@ -129,7 +131,7 @@ static char *field_trials_init_string = NULL;
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Set in PeerConnectionFactory_initializeAndroidGlobals().
static bool factory_static_initialized = false;
static bool vp8_hw_acceleration_enabled = true;
static bool video_hw_acceleration_enabled = true;
#endif
extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
@ -999,9 +1001,9 @@ JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context,
jboolean initialize_audio, jboolean initialize_video,
jboolean vp8_hw_acceleration, jobject render_egl_context) {
jboolean video_hw_acceleration) {
bool failure = false;
vp8_hw_acceleration_enabled = vp8_hw_acceleration;
video_hw_acceleration_enabled = video_hw_acceleration;
if (!factory_static_initialized) {
if (initialize_video) {
failure |= webrtc::SetRenderAndroidVM(GetJVM());
@ -1011,10 +1013,6 @@ JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
factory_static_initialized = true;
}
if (initialize_video) {
failure |= MediaCodecVideoDecoderFactory::SetAndroidObjects(jni,
render_egl_context);
}
return !failure;
}
#endif // defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
@ -1044,18 +1042,26 @@ class OwnedFactoryAndThreads {
public:
OwnedFactoryAndThreads(Thread* worker_thread,
Thread* signaling_thread,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory,
PeerConnectionFactoryInterface* factory)
: worker_thread_(worker_thread),
signaling_thread_(signaling_thread),
encoder_factory_(encoder_factory),
decoder_factory_(decoder_factory),
factory_(factory) {}
~OwnedFactoryAndThreads() { CHECK_RELEASE(factory_); }
PeerConnectionFactoryInterface* factory() { return factory_; }
WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
private:
const scoped_ptr<Thread> worker_thread_;
const scoped_ptr<Thread> signaling_thread_;
WebRtcVideoEncoderFactory* encoder_factory_;
WebRtcVideoDecoderFactory* decoder_factory_;
PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
};
@ -1074,22 +1080,24 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
signaling_thread->SetName("signaling_thread", NULL);
CHECK(worker_thread->Start() && signaling_thread->Start())
<< "Failed to start threads";
scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
WebRtcVideoEncoderFactory* encoder_factory = nullptr;
WebRtcVideoDecoderFactory* decoder_factory = nullptr;
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
if (vp8_hw_acceleration_enabled) {
encoder_factory.reset(new MediaCodecVideoEncoderFactory());
decoder_factory.reset(new MediaCodecVideoDecoderFactory());
if (video_hw_acceleration_enabled) {
encoder_factory = new MediaCodecVideoEncoderFactory();
decoder_factory = new MediaCodecVideoDecoderFactory();
}
#endif
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
webrtc::CreatePeerConnectionFactory(worker_thread,
signaling_thread,
NULL,
encoder_factory.release(),
decoder_factory.release()));
encoder_factory,
decoder_factory));
OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
worker_thread, signaling_thread, factory.release());
worker_thread, signaling_thread,
encoder_factory, decoder_factory,
factory.release());
return jlongFromPointer(owned_factory);
}
@ -1188,6 +1196,22 @@ JOW(void, PeerConnectionFactory_nativeSetOptions)(
factory->SetOptions(options_to_set);
}
JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) {
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>
(owned_factory->decoder_factory());
if (decoder_factory) {
LOG(LS_INFO) << "Set EGL context for HW acceleration.";
decoder_factory->SetEGLContext(jni, render_egl_context);
}
#endif
}
static std::string
GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
jclass enumClass = FindClass(jni, className.c_str());

View File

@ -64,7 +64,7 @@ public class PeerConnectionFactory {
// decoding thread.
public static native boolean initializeAndroidGlobals(
Object context, boolean initializeAudio, boolean initializeVideo,
boolean vp8HwAcceleration, Object renderEGLContext);
boolean videoHwAcceleration);
// Field trial initialization. Must be called before PeerConnectionFactory
// is created.
@ -131,12 +131,14 @@ public class PeerConnectionFactory {
nativeSetOptions(nativeFactory, options);
}
public void setVideoHwAccelerationOptions(Object renderEGLContext) {
nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext);
}
public void dispose() {
freeFactory(nativeFactory);
}
public native void nativeSetOptions(long nativeFactory, Options options);
private static native long nativeCreatePeerConnectionFactory();
private static native long nativeCreateObserver(
@ -162,5 +164,10 @@ public class PeerConnectionFactory {
private static native long nativeCreateAudioTrack(
long nativeFactory, String id, long nativeSource);
public native void nativeSetOptions(long nativeFactory, Options options);
private static native void nativeSetVideoHwAccelerationOptions(
long nativeFactory, Object renderEGLContext);
private static native void freeFactory(long nativeFactory);
}

View File

@ -394,8 +394,7 @@ public class CallActivity extends Activity
Log.d(TAG, "Creating peer connection factory, delay=" + delta + "ms");
peerConnectionClient = PeerConnectionClient.getInstance();
peerConnectionClient.createPeerConnectionFactory(CallActivity.this,
VideoRendererGui.getEGLContext(), peerConnectionParameters,
CallActivity.this);
peerConnectionParameters, CallActivity.this);
}
if (signalingParameters != null) {
Log.w(TAG, "EGL context is ready after room connection.");
@ -481,7 +480,7 @@ public class CallActivity extends Activity
return;
}
logAndToast("Creating peer connection, delay=" + delta + "ms");
peerConnectionClient.createPeerConnection(
peerConnectionClient.createPeerConnection(VideoRendererGui.getEGLContext(),
localRender, remoteRender, signalingParameters);
if (signalingParameters.initiator) {

View File

@ -216,7 +216,6 @@ public class PeerConnectionClient {
public void createPeerConnectionFactory(
final Context context,
final EGLContext renderEGLContext,
final PeerConnectionParameters peerConnectionParameters,
final PeerConnectionEvents events) {
this.peerConnectionParameters = peerConnectionParameters;
@ -241,12 +240,13 @@ public class PeerConnectionClient {
executor.execute(new Runnable() {
@Override
public void run() {
createPeerConnectionFactoryInternal(context, renderEGLContext);
createPeerConnectionFactoryInternal(context);
}
});
}
public void createPeerConnection(
final EGLContext renderEGLContext,
final VideoRenderer.Callbacks localRender,
final VideoRenderer.Callbacks remoteRender,
final SignalingParameters signalingParameters) {
@ -261,7 +261,7 @@ public class PeerConnectionClient {
@Override
public void run() {
createMediaConstraintsInternal();
createPeerConnectionInternal();
createPeerConnectionInternal(renderEGLContext);
}
});
}
@ -279,11 +279,9 @@ public class PeerConnectionClient {
return videoCallEnabled;
}
private void createPeerConnectionFactoryInternal(
Context context, EGLContext renderEGLContext) {
Log.d(TAG, "Create peer connection factory with EGLContext "
+ renderEGLContext + ". Use video: "
+ peerConnectionParameters.videoCallEnabled);
private void createPeerConnectionFactoryInternal(Context context) {
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
// Check if VP9 is used by default.
if (videoCallEnabled && peerConnectionParameters.videoCodec != null
@ -304,9 +302,8 @@ public class PeerConnectionClient {
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
if (!PeerConnectionFactory.initializeAndroidGlobals(
context, true, true,
peerConnectionParameters.videoCodecHwAcceleration, renderEGLContext)) {
if (!PeerConnectionFactory.initializeAndroidGlobals(context, true, true,
peerConnectionParameters.videoCodecHwAcceleration)) {
events.onPeerConnectionError("Failed to initializeAndroidGlobals");
}
factory = new PeerConnectionFactory();
@ -402,18 +399,24 @@ public class PeerConnectionClient {
}
}
private void createPeerConnectionInternal() {
private void createPeerConnectionInternal(EGLContext renderEGLContext) {
if (factory == null || isError) {
Log.e(TAG, "Peerconnection factory is not created");
return;
}
Log.d(TAG, "Create peer connection");
Log.d(TAG, "Create peer connection.");
Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
if (videoConstraints != null) {
Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
}
queuedRemoteCandidates = new LinkedList<IceCandidate>();
if (videoCallEnabled) {
Log.d(TAG, "EGLContext: " + renderEGLContext);
factory.setVideoHwAccelerationOptions(renderEGLContext);
}
PeerConnection.RTCConfiguration rtcConfig =
new PeerConnection.RTCConfiguration(signalingParameters.iceServers);
// TCP candidates are only useful when connecting to a server that supports

View File

@ -238,10 +238,9 @@ public class PeerConnectionClientTest extends InstrumentationTestCase
options.networkIgnoreMask = 0;
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
getInstrumentation().getContext(), null,
peerConnectionParameters, this);
getInstrumentation().getContext(), peerConnectionParameters, this);
client.createPeerConnection(
localRenderer, remoteRenderer, signalingParameters);
null, localRenderer, remoteRenderer, signalingParameters);
client.createOffer();
return client;
}