Android: Transform legacy codec factories into VideoCodecFactories

We want to have an easy migration path away from MediaCodecVideoEncoder
and MediaCodecVideoDecoder and remove the special treatment of these
in our JNI code. This CL transforms these video codecs into proper
VideoCodecFactories that can be injected in the PeerConnectionFactory
like any other external factory.

To summarize, this CL:
 * Provides a trivial migration path for external clients.
 * Removes special treatment of the legacy factories in our JNI code.

Bug: webrtc:7925
Change-Id: I7ee8a6b0ce5ac0f3dc9c06d1587b8a9e52e0b684
Reviewed-on: https://webrtc-review.googlesource.com/88442
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23972}
This commit is contained in:
Magnus Jedvert
2018-07-13 16:09:20 +02:00
committed by Commit Bot
parent dfbced6504
commit e26ff4b581
12 changed files with 225 additions and 390 deletions

View File

@ -295,6 +295,9 @@ rtc_static_library("video_jni") {
deps = []
sources += [
"src/jni/androidmediacodeccommon.h",
"src/jni/androidmediadecoder.cc",
"src/jni/androidmediaencoder.cc",
"src/jni/androidvideotracksource.cc",
"src/jni/androidvideotracksource.h",
"src/jni/encodedimage.cc",
@ -330,7 +333,6 @@ rtc_static_library("video_jni") {
deps += [
":base_jni",
":generated_video_jni",
":legacy_video_jni", # TODO(bugs.webrtc.org/7925): Remove.
":native_api_jni",
":videoframe_jni",
":vp8_jni", # TODO(bugs.webrtc.org/7925): Remove.
@ -386,39 +388,6 @@ rtc_static_library("videoframe_jni") {
]
}
# TODO(bugs.webrtc.org/7925): Remove.
rtc_static_library("legacy_video_jni") {
sources = [
"src/jni/androidmediacodeccommon.h",
"src/jni/androidmediadecoder.cc",
"src/jni/androidmediadecoder_jni.h",
"src/jni/androidmediaencoder.cc",
"src/jni/androidmediaencoder_jni.h",
]
deps = [
":base_jni",
":generated_video_jni",
":native_api_jni",
":videoframe_jni",
"../..:webrtc_common",
"../../api/video_codecs:video_codecs_api",
"../../common_video:common_video",
"../../media:rtc_internal_video_codecs",
"../../media:rtc_media_base",
"../../modules/video_coding:video_codec_interface",
"../../modules/video_coding:video_coding_utility",
"../../rtc_base:checks",
"../../rtc_base:rtc_base",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
"../../rtc_base:sequenced_task_checker",
"../../rtc_base:weak_ptr",
"../../system_wrappers:field_trial_api",
"//third_party/libyuv",
]
}
rtc_static_library("null_video_jni") {
sources = [
"src/jni/pc/null_video.cc",
@ -1040,7 +1009,9 @@ rtc_android_library("peerconnection_java") {
deps = [
":audio_api_java",
":base_java",
":default_video_codec_factory_java",
":logging_java",
":swcodecs_java",
":video_api_java",
":video_java",
"//modules/audio_device:audio_device_java",

View File

@ -21,6 +21,7 @@ import android.os.SystemClock;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
@ -44,6 +45,86 @@ public class MediaCodecVideoDecoder {
// possibly to minimize the amount of translation work necessary.
private static final String TAG = "MediaCodecVideoDecoder";
/**
* Create a VideoDecoderFactory that can be injected in the PeerConnectionFactory and replicate
* the old behavior.
*/
public static VideoDecoderFactory createFactory() {
return new DefaultVideoDecoderFactory(new HwDecoderFactory());
}
// Factory for creating HW MediaCodecVideoDecoder instances.
static class HwDecoderFactory implements VideoDecoderFactory {
private static boolean isSameCodec(VideoCodecInfo codecA, VideoCodecInfo codecB) {
if (!codecA.name.equalsIgnoreCase(codecB.name)) {
return false;
}
return codecA.name.equalsIgnoreCase("H264")
? H264Utils.isSameH264Profile(codecA.params, codecB.params)
: true;
}
private static boolean isCodecSupported(
VideoCodecInfo[] supportedCodecs, VideoCodecInfo codec) {
for (VideoCodecInfo supportedCodec : supportedCodecs) {
if (isSameCodec(supportedCodec, codec)) {
return true;
}
}
return false;
}
private static VideoCodecInfo[] getSupportedHardwareCodecs() {
final List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
if (isVp8HwSupported()) {
Logging.d(TAG, "VP8 HW Decoder supported.");
codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
}
if (isVp9HwSupported()) {
Logging.d(TAG, "VP9 HW Decoder supported.");
codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
}
if (isH264HighProfileHwSupported()) {
Logging.d(TAG, "H.264 High Profile HW Decoder supported.");
codecs.add(H264Utils.DEFAULT_H264_HIGH_PROFILE_CODEC);
}
if (isH264HwSupported()) {
Logging.d(TAG, "H.264 HW Decoder supported.");
codecs.add(H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC);
}
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
}
private final VideoCodecInfo[] supportedHardwareCodecs = getSupportedHardwareCodecs();
@Override
public VideoCodecInfo[] getSupportedCodecs() {
return supportedHardwareCodecs;
}
@Nullable
@Override
public VideoDecoder createDecoder(VideoCodecInfo codec) {
if (!isCodecSupported(supportedHardwareCodecs, codec)) {
Logging.d(TAG, "No HW video decoder for codec " + codec.name);
return null;
}
Logging.d(TAG, "Create HW video decoder for " + codec.name);
return new WrappedNativeVideoDecoder() {
@Override
public long createNativeVideoDecoder() {
return nativeCreateDecoder(codec.name, useSurface());
}
};
}
}
private static final long MAX_DECODE_TIME_MS = 200;
// TODO(magjed): Use MediaFormat constants when part of the public API.
@ -175,7 +256,6 @@ public class MediaCodecVideoDecoder {
}
}
@CalledByNative
static boolean useSurface() {
return eglBase != null;
}
@ -203,25 +283,21 @@ public class MediaCodecVideoDecoder {
}
// Functions to query if HW decoding is supported.
@CalledByNativeUnchecked
public static boolean isVp8HwSupported() {
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes()) != null);
}
@CalledByNativeUnchecked
public static boolean isVp9HwSupported() {
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
}
@CalledByNativeUnchecked
public static boolean isH264HwSupported() {
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes()) != null);
}
@CalledByNative
public static boolean isH264HighProfileHwSupported() {
if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) {
return false;
@ -942,4 +1018,6 @@ public class MediaCodecVideoDecoder {
int getSliceHeight() {
return sliceHeight;
}
private static native long nativeCreateDecoder(String codec, boolean useSurface);
}

View File

@ -23,6 +23,7 @@ import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
@ -47,6 +48,93 @@ public class MediaCodecVideoEncoder {
private static final String TAG = "MediaCodecVideoEncoder";
/**
* Create a VideoEncoderFactory that can be injected in the PeerConnectionFactory and replicate
* the old behavior.
*/
public static VideoEncoderFactory createFactory() {
return new DefaultVideoEncoderFactory(new HwEncoderFactory());
}
// Factory for creating HW MediaCodecVideoEncoder instances.
static class HwEncoderFactory implements VideoEncoderFactory {
private static boolean isSameCodec(VideoCodecInfo codecA, VideoCodecInfo codecB) {
if (!codecA.name.equalsIgnoreCase(codecB.name)) {
return false;
}
return codecA.name.equalsIgnoreCase("H264")
? H264Utils.isSameH264Profile(codecA.params, codecB.params)
: true;
}
private static boolean isCodecSupported(
VideoCodecInfo[] supportedCodecs, VideoCodecInfo codec) {
for (VideoCodecInfo supportedCodec : supportedCodecs) {
if (isSameCodec(supportedCodec, codec)) {
return true;
}
}
return false;
}
private static VideoCodecInfo[] getSupportedHardwareCodecs() {
final List<VideoCodecInfo> codecs = new ArrayList<VideoCodecInfo>();
if (isVp8HwSupported()) {
Logging.d(TAG, "VP8 HW Encoder supported.");
codecs.add(new VideoCodecInfo("VP8", new HashMap<>()));
}
if (isVp9HwSupported()) {
Logging.d(TAG, "VP9 HW Encoder supported.");
codecs.add(new VideoCodecInfo("VP9", new HashMap<>()));
}
// Check if high profile is supported by decoder. If yes, encoder can always
// fall back to baseline profile as a subset as high profile.
if (MediaCodecVideoDecoder.isH264HighProfileHwSupported()) {
Logging.d(TAG, "H.264 High Profile HW Encoder supported.");
codecs.add(H264Utils.DEFAULT_H264_HIGH_PROFILE_CODEC);
}
if (isH264HwSupported()) {
Logging.d(TAG, "H.264 HW Encoder supported.");
codecs.add(H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC);
}
return codecs.toArray(new VideoCodecInfo[codecs.size()]);
}
private final VideoCodecInfo[] supportedHardwareCodecs = getSupportedHardwareCodecs();
@Override
public VideoCodecInfo[] getSupportedCodecs() {
return supportedHardwareCodecs;
}
@Nullable
@Override
public VideoEncoder createEncoder(VideoCodecInfo info) {
if (!isCodecSupported(supportedHardwareCodecs, info)) {
Logging.d(TAG, "No HW video encoder for codec " + info.name);
return null;
}
Logging.d(TAG, "Create HW video encoder for " + info.name);
return new WrappedNativeVideoEncoder() {
@Override
public long createNativeVideoEncoder() {
return nativeCreateEncoder(
info, /* hasEgl14Context= */ staticEglBase instanceof EglBase14);
}
@Override
public boolean isHardwareEncoder() {
return true;
}
};
}
}
// Tracks webrtc::VideoCodecType.
public enum VideoCodecType {
VIDEO_CODEC_UNKNOWN,
@ -175,11 +263,6 @@ public class MediaCodecVideoEncoder {
return staticEglBase == null ? null : staticEglBase.getEglBaseContext();
}
@CalledByNative
static boolean hasEgl14Context() {
return staticEglBase instanceof EglBase14;
}
// List of supported HW VP8 encoders.
private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties(
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
@ -297,7 +380,6 @@ public class MediaCodecVideoEncoder {
}
// Functions to query if HW encoding is supported.
@CalledByNative
public static boolean isVp8HwSupported() {
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null);
@ -311,13 +393,11 @@ public class MediaCodecVideoEncoder {
}
}
@CalledByNative
public static boolean isVp9HwSupported() {
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
}
@CalledByNative
public static boolean isH264HwSupported() {
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
&& (findHwEncoder(H264_MIME_TYPE, h264HwList(), supportedColorList) != null);
@ -1022,4 +1102,5 @@ public class MediaCodecVideoEncoder {
/** Fills an inputBuffer with the given index with data from the byte buffers. */
private static native void nativeFillInputBuffer(long encoder, int inputBuffer, ByteBuffer dataY,
int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV);
private static native long nativeCreateEncoder(VideoCodecInfo info, boolean hasEgl14Context);
}

View File

@ -29,6 +29,7 @@ public class PeerConnectionFactory {
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
private final long nativeFactory;
private static boolean enableVideoHwAcceleration;
private static volatile boolean internalTracerInitialized = false;
@Nullable private static Thread networkThread;
@Nullable private static Thread workerThread;
@ -201,6 +202,22 @@ public class PeerConnectionFactory {
}
public PeerConnectionFactory createPeerConnectionFactory() {
VideoEncoderFactory encoderFactory = this.encoderFactory;
VideoDecoderFactory decoderFactory = this.decoderFactory;
// For legacy reasons, we provide implicit built-in codec factories.
// TODO(bugs.webrtc.org/9181): Remove code below. All codec factories should be injected
// explicitly.
if (encoderFactory == null && decoderFactory == null && !enableVideoHwAcceleration) {
encoderFactory = new SoftwareVideoEncoderFactory();
decoderFactory = new SoftwareVideoDecoderFactory();
} else {
if (encoderFactory == null) {
encoderFactory = MediaCodecVideoEncoder.createFactory();
}
if (decoderFactory == null) {
decoderFactory = MediaCodecVideoDecoder.createFactory();
}
}
return new PeerConnectionFactory(options, audioDeviceModule, encoderFactory, decoderFactory,
audioProcessingFactory, fecControllerFactoryFactory);
}
@ -218,7 +235,8 @@ public class PeerConnectionFactory {
public static void initialize(InitializationOptions options) {
ContextUtils.initialize(options.applicationContext);
NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName);
nativeInitializeAndroidGlobals(options.enableVideoHwAcceleration);
enableVideoHwAcceleration = options.enableVideoHwAcceleration;
nativeInitializeAndroidGlobals();
nativeInitializeFieldTrials(options.fieldTrials);
if (options.enableInternalTracer && !internalTracerInitialized) {
initializeInternalTracer();
@ -476,7 +494,7 @@ public class PeerConnectionFactory {
// Must be called at least once before creating a PeerConnectionFactory
// (for example, at application startup time).
private static native void nativeInitializeAndroidGlobals(boolean videoHwAcceleration);
private static native void nativeInitializeAndroidGlobals();
private static native void nativeInitializeFieldTrials(String fieldTrialsInitString);
private static native String nativeFindFieldTrialsFullName(String name);
private static native void nativeInitializeInternalTracer();

View File

@ -13,10 +13,6 @@
#include <memory>
#include <vector>
// NOTICE: androidmediadecoder_jni.h must be included before
// androidmediacodeccommon.h to avoid build errors.
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "api/video_codecs/sdp_video_format.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "common_video/include/i420_buffer_pool.h"
@ -42,7 +38,6 @@
using rtc::Bind;
using rtc::Thread;
using rtc::ThreadManager;
namespace webrtc {
namespace jni {
@ -773,67 +768,20 @@ void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
}
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
ALOGD << "MediaCodecVideoDecoderFactory ctor";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
supported_formats_.clear();
if (Java_MediaCodecVideoDecoder_isVp8HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "VP8 HW Decoder supported.";
supported_formats_.push_back(SdpVideoFormat(cricket::kVp8CodecName));
}
if (Java_MediaCodecVideoDecoder_isVp9HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "VP9 HW Decoder supported.";
supported_formats_.push_back(SdpVideoFormat(cricket::kVp9CodecName));
}
if (Java_MediaCodecVideoDecoder_isH264HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "H264 HW Decoder supported.";
supported_formats_.push_back(SdpVideoFormat(cricket::kH264CodecName));
}
}
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
ALOGD << "MediaCodecVideoDecoderFactory dtor";
}
std::vector<SdpVideoFormat> MediaCodecVideoDecoderFactory::GetSupportedFormats()
const {
return supported_formats_;
}
std::unique_ptr<VideoDecoder> MediaCodecVideoDecoderFactory::CreateVideoDecoder(
const SdpVideoFormat& format) {
if (supported_formats_.empty()) {
ALOGW << "No HW video decoder for type " << format.name;
return nullptr;
}
for (SdpVideoFormat supported_format : supported_formats_) {
if (supported_format.name == format.name) {
ALOGD << "Create HW video decoder for type " << format.name;
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
return rtc::MakeUnique<MediaCodecVideoDecoder>(
jni, PayloadStringToCodecType(format.name),
Java_MediaCodecVideoDecoder_useSurface(jni));
}
}
ALOGW << "Can not find HW video decoder for type " << format.name;
return nullptr;
}
bool MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(JNIEnv* env) {
return Java_MediaCodecVideoDecoder_isH264HighProfileHwSupported(env);
}
const char* MediaCodecVideoDecoder::ImplementationName() const {
return "MediaCodec";
}
static jlong JNI_MediaCodecVideoDecoder_CreateDecoder(
JNIEnv* env,
const JavaParamRef<jclass>&,
const JavaParamRef<jstring>& codec,
jboolean use_surface) {
ScopedLocalRefFrame local_ref_frame(env);
return jlongFromPointer(new MediaCodecVideoDecoder(
env, PayloadStringToCodecType(JavaToNativeString(env, codec)),
use_surface));
}
} // namespace jni
} // namespace webrtc

View File

@ -1,42 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_H_
#define SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_H_
#include <vector>
#include "api/video_codecs/video_decoder_factory.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Implementation of Android MediaCodec based decoder factory.
class MediaCodecVideoDecoderFactory : public VideoDecoderFactory {
public:
MediaCodecVideoDecoderFactory();
~MediaCodecVideoDecoderFactory() override;
// VideoDecoderFactory implementation.
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
std::unique_ptr<VideoDecoder> CreateVideoDecoder(
const SdpVideoFormat& format) override;
static bool IsH264HighProfileSupported(JNIEnv* env);
private:
std::vector<SdpVideoFormat> supported_formats_;
};
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_H_

View File

@ -8,10 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
// NOTICE: androidmediaencoder_jni.h must be included before
// androidmediacodeccommon.h to avoid build errors.
#include "sdk/android/src/jni/androidmediaencoder_jni.h"
#include <algorithm>
#include <list>
#include <memory>
@ -43,8 +39,8 @@
#include "sdk/android/generated_video_jni/jni/MediaCodecVideoEncoder_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/videocodecinfo.h"
#include "sdk/android/src/jni/videoframe.h"
#include "system_wrappers/include/field_trial.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@ -1210,96 +1206,6 @@ const char* MediaCodecVideoEncoder::ImplementationName() const {
return "MediaCodec";
}
MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
supported_formats_.clear();
bool is_vp8_hw_supported = Java_MediaCodecVideoEncoder_isVp8HwSupported(jni);
if (is_vp8_hw_supported) {
ALOGD << "VP8 HW Encoder supported.";
supported_formats_.push_back(SdpVideoFormat(cricket::kVp8CodecName));
}
bool is_vp9_hw_supported = Java_MediaCodecVideoEncoder_isVp9HwSupported(jni);
if (is_vp9_hw_supported) {
ALOGD << "VP9 HW Encoder supported.";
supported_formats_.push_back(SdpVideoFormat(cricket::kVp9CodecName));
}
// Check if high profile is supported by decoder. If yes, encoder can always
// fall back to baseline profile as a subset as high profile.
bool is_h264_high_profile_hw_supported =
MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(jni);
if (is_h264_high_profile_hw_supported) {
ALOGD << "H.264 High Profile HW Encoder supported.";
// TODO(magjed): Enumerate actual level instead of using hardcoded level
// 3.1. Level 3.1 is 1280x720@30fps which is enough for now.
SdpVideoFormat constrained_high(cricket::kH264CodecName);
const H264::ProfileLevelId constrained_high_profile(
H264::kProfileConstrainedHigh, H264::kLevel3_1);
constrained_high.parameters[cricket::kH264FmtpProfileLevelId] =
*H264::ProfileLevelIdToString(constrained_high_profile);
constrained_high.parameters[cricket::kH264FmtpLevelAsymmetryAllowed] = "1";
constrained_high.parameters[cricket::kH264FmtpPacketizationMode] = "1";
supported_formats_.push_back(constrained_high);
}
bool is_h264_hw_supported =
Java_MediaCodecVideoEncoder_isH264HwSupported(jni);
if (is_h264_hw_supported) {
ALOGD << "H.264 HW Encoder supported.";
// TODO(magjed): Push Constrained High profile as well when negotiation is
// ready, http://crbug/webrtc/6337. We can negotiate Constrained High
// profile as long as we have decode support for it and still send Baseline
// since Baseline is a subset of the High profile.
SdpVideoFormat constrained_baseline(cricket::kH264CodecName);
const H264::ProfileLevelId constrained_baseline_profile(
H264::kProfileConstrainedBaseline, H264::kLevel3_1);
constrained_baseline.parameters[cricket::kH264FmtpProfileLevelId] =
*H264::ProfileLevelIdToString(constrained_baseline_profile);
constrained_baseline.parameters[cricket::kH264FmtpLevelAsymmetryAllowed] =
"1";
constrained_baseline.parameters[cricket::kH264FmtpPacketizationMode] = "1";
supported_formats_.push_back(constrained_baseline);
}
}
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
ALOGD << "MediaCodecVideoEncoderFactory dtor";
}
std::unique_ptr<VideoEncoder> MediaCodecVideoEncoderFactory::CreateVideoEncoder(
const SdpVideoFormat& format) {
if (GetSupportedFormats().empty()) {
ALOGW << "No HW video encoder for codec " << format.name;
return nullptr;
}
if (IsFormatSupported(GetSupportedFormats(), format)) {
ALOGD << "Create HW video encoder for " << format.name;
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
return rtc::MakeUnique<MediaCodecVideoEncoder>(
jni, format, Java_MediaCodecVideoEncoder_hasEgl14Context(jni));
}
ALOGW << "Can not find HW video encoder for type " << format.name;
return nullptr;
}
std::vector<SdpVideoFormat> MediaCodecVideoEncoderFactory::GetSupportedFormats()
const {
return supported_formats_;
}
VideoEncoderFactory::CodecInfo MediaCodecVideoEncoderFactory::QueryVideoEncoder(
const SdpVideoFormat& format) const {
VideoEncoderFactory::CodecInfo codec_info;
codec_info.is_hardware_accelerated =
IsFormatSupported(supported_formats_, format);
codec_info.has_internal_source = false;
return codec_info;
}
static void JNI_MediaCodecVideoEncoder_FillInputBuffer(
JNIEnv* jni,
const JavaParamRef<jclass>&,
@ -1330,5 +1236,15 @@ static void JNI_MediaCodecVideoEncoder_FillInputBuffer(
stride_u, buffer_v, stride_v);
}
static jlong JNI_MediaCodecVideoEncoder_CreateEncoder(
JNIEnv* env,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& format,
jboolean has_egl_context) {
ScopedLocalRefFrame local_ref_frame(env);
return jlongFromPointer(new MediaCodecVideoEncoder(
env, VideoCodecInfoToSdpVideoFormat(env, format), has_egl_context));
}
} // namespace jni
} // namespace webrtc

View File

@ -1,46 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_H_
#define SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_H_
#include <vector>
#include "api/video_codecs/video_encoder_factory.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Implementation of Android MediaCodec based encoder factory.
class MediaCodecVideoEncoderFactory : public VideoEncoderFactory {
public:
MediaCodecVideoEncoderFactory();
~MediaCodecVideoEncoderFactory() override;
void SetEGLContext(JNIEnv* jni, jobject egl_context);
// VideoEncoderFactory implementation.
std::vector<SdpVideoFormat> GetSupportedFormats() const override;
CodecInfo QueryVideoEncoder(const SdpVideoFormat& format) const override;
std::unique_ptr<VideoEncoder> CreateVideoEncoder(
const SdpVideoFormat& format) override;
private:
jobject egl_context_;
// Empty if platform support is lacking, const after ctor returns.
std::vector<SdpVideoFormat> supported_formats_;
};
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_H_

View File

@ -35,23 +35,5 @@ void* CreateVideoSource(JNIEnv* env,
return nullptr;
}
std::unique_ptr<VideoEncoderFactory> CreateLegacyVideoEncoderFactory() {
return nullptr;
}
std::unique_ptr<VideoDecoderFactory> CreateLegacyVideoDecoderFactory() {
return nullptr;
}
std::unique_ptr<VideoEncoderFactory> WrapLegacyVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> legacy_encoder_factory) {
return nullptr;
}
std::unique_ptr<VideoDecoderFactory> WrapLegacyVideoDecoderFactory(
std::unique_ptr<VideoDecoderFactory> legacy_decoder_factory) {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -80,7 +80,6 @@ static std::unique_ptr<std::string> field_trials_init_string;
// Set in PeerConnectionFactory_initializeAndroidGlobals().
static bool factory_static_initialized = false;
static bool video_hw_acceleration_enabled = true;
// Set in PeerConnectionFactory_InjectLoggable().
static std::unique_ptr<JNILogSink> jni_log_sink;
@ -122,9 +121,7 @@ jobject NativeToJavaPeerConnectionFactory(
static void JNI_PeerConnectionFactory_InitializeAndroidGlobals(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jboolean video_hw_acceleration) {
video_hw_acceleration_enabled = video_hw_acceleration;
const JavaParamRef<jclass>&) {
if (!factory_static_initialized) {
JVM::Initialize(GetJVM());
factory_static_initialized = true;
@ -239,44 +236,12 @@ jlong CreatePeerConnectionFactoryForJava(
std::unique_ptr<RtcEventLogFactoryInterface> rtc_event_log_factory(
CreateRtcEventLogFactory());
std::unique_ptr<VideoEncoderFactory> video_encoder_factory;
std::unique_ptr<VideoDecoderFactory> video_decoder_factory;
std::unique_ptr<cricket::MediaEngineInterface> media_engine;
if (jencoder_factory.is_null() && jdecoder_factory.is_null() &&
!video_hw_acceleration_enabled) {
// Legacy path for clients that are explicitly calling
// setEnableVideoHwAcceleration(false) and not injecting neither encoder nor
// decoder. These clients should be migrated to only pass in
// SoftwareVideoEncoderFactory instead.
video_encoder_factory =
WrapLegacyVideoEncoderFactory(/* legacy_encoder_factory= */ nullptr);
video_decoder_factory =
WrapLegacyVideoDecoderFactory(/* legacy_decoder_factory= */ nullptr);
} else {
if (jencoder_factory.is_null()) {
// TODO(bugs.webrtc.org/7925): When all clients switched to injectable
// factories, remove the legacy codec factories
video_encoder_factory =
WrapLegacyVideoEncoderFactory(CreateLegacyVideoEncoderFactory());
} else {
video_encoder_factory = std::unique_ptr<VideoEncoderFactory>(
CreateVideoEncoderFactory(jni, jencoder_factory));
}
if (jdecoder_factory.is_null()) {
// TODO(bugs.webrtc.org/7925): When all clients switched to injectable
// factories, remove the legacy codec factories
video_decoder_factory =
WrapLegacyVideoDecoderFactory(CreateLegacyVideoDecoderFactory());
} else {
video_decoder_factory = std::unique_ptr<VideoDecoderFactory>(
CreateVideoDecoderFactory(jni, jdecoder_factory));
}
}
media_engine.reset(CreateMediaEngine(
std::unique_ptr<cricket::MediaEngineInterface> media_engine(CreateMediaEngine(
audio_device_module, audio_encoder_factory, audio_decoder_factory,
std::move(video_encoder_factory), std::move(video_decoder_factory),
std::unique_ptr<VideoEncoderFactory>(
CreateVideoEncoderFactory(jni, jencoder_factory)),
std::unique_ptr<VideoDecoderFactory>(
CreateVideoDecoderFactory(jni, jdecoder_factory)),
audio_mixer, audio_processor));
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(

View File

@ -13,16 +13,12 @@
#include <jni.h>
#include <memory>
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "api/videosourceproxy.h"
#include "media/engine/convert_legacy_video_factory.h"
#include "rtc_base/logging.h"
#include "rtc_base/ptr_util.h"
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "sdk/android/src/jni/androidmediaencoder_jni.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
#include "sdk/android/src/jni/videodecoderfactorywrapper.h"
#include "sdk/android/src/jni/videoencoderfactorywrapper.h"
@ -53,29 +49,5 @@ void* CreateVideoSource(JNIEnv* env,
.release();
}
std::unique_ptr<VideoEncoderFactory> CreateLegacyVideoEncoderFactory() {
return rtc::MakeUnique<MediaCodecVideoEncoderFactory>();
}
std::unique_ptr<VideoDecoderFactory> CreateLegacyVideoDecoderFactory() {
return rtc::MakeUnique<MediaCodecVideoDecoderFactory>();
}
std::unique_ptr<VideoEncoderFactory> WrapLegacyVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> legacy_encoder_factory) {
return legacy_encoder_factory ? std::unique_ptr<VideoEncoderFactory>(
cricket::ConvertVideoEncoderFactory(
std::move(legacy_encoder_factory)))
: CreateBuiltinVideoEncoderFactory();
}
std::unique_ptr<VideoDecoderFactory> WrapLegacyVideoDecoderFactory(
std::unique_ptr<VideoDecoderFactory> legacy_decoder_factory) {
return legacy_decoder_factory ? std::unique_ptr<VideoDecoderFactory>(
cricket::ConvertVideoDecoderFactory(
std::move(legacy_decoder_factory)))
: CreateBuiltinVideoDecoderFactory();
}
} // namespace jni
} // namespace webrtc

View File

@ -38,14 +38,6 @@ void* CreateVideoSource(JNIEnv* env,
rtc::Thread* worker_thread,
jboolean is_screencast);
std::unique_ptr<VideoEncoderFactory> CreateLegacyVideoEncoderFactory();
std::unique_ptr<VideoDecoderFactory> CreateLegacyVideoDecoderFactory();
std::unique_ptr<VideoEncoderFactory> WrapLegacyVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> legacy_encoder_factory);
std::unique_ptr<VideoDecoderFactory> WrapLegacyVideoDecoderFactory(
std::unique_ptr<VideoDecoderFactory> legacy_decoder_factory);
} // namespace jni
} // namespace webrtc