VideoCaptureAndroid: rewrote the (standalone) implementation of video capture on Android.

Besides being ~40% the size of the previous implementation, this makes it so
that VideoCaptureAndroid can stop and restart capture, which is necessary to
support onPause/onResume reasonably on Android.

BUG=1407
R=henrike@webrtc.org, wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2334004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4915 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
fischman@webrtc.org
2013-10-03 18:23:13 +00:00
parent ddc5a19ce9
commit 4e65e07e41
28 changed files with 745 additions and 1550 deletions

View File

@ -1241,7 +1241,7 @@ JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context) { JNIEnv* jni, jclass, jobject context) {
CHECK(g_jvm, "JNI_OnLoad failed to run?"); CHECK(g_jvm, "JNI_OnLoad failed to run?");
bool failure = false; bool failure = false;
failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm, context); failure |= webrtc::VideoEngine::SetAndroidObjects(g_jvm);
failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context); failure |= webrtc::VoiceEngine::SetAndroidObjects(g_jvm, jni, context);
return !failure; return !failure;
} }
@ -1543,7 +1543,7 @@ JOW(jlong, VideoCapturer_nativeCreateVideoCapturer)(
CHECK(device_manager->Init(), "DeviceManager::Init() failed"); CHECK(device_manager->Init(), "DeviceManager::Init() failed");
cricket::Device device; cricket::Device device;
if (!device_manager->GetVideoCaptureDevice(device_name, &device)) { if (!device_manager->GetVideoCaptureDevice(device_name, &device)) {
LOG(LS_ERROR) << "GetVideoCaptureDevice failed"; LOG(LS_ERROR) << "GetVideoCaptureDevice failed for " << device_name;
return 0; return 0;
} }
talk_base::scoped_ptr<cricket::VideoCapturer> capturer( talk_base::scoped_ptr<cricket::VideoCapturer> capturer(
@ -1566,6 +1566,28 @@ JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
return (jlong)renderer.release(); return (jlong)renderer.release();
} }
JOW(jlong, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
cricket::VideoCapturer* capturer =
reinterpret_cast<VideoSourceInterface*>(j_p)->GetVideoCapturer();
talk_base::scoped_ptr<cricket::VideoFormatPod> format(
new cricket::VideoFormatPod(*capturer->GetCaptureFormat()));
capturer->Stop();
return jlongFromPointer(format.release());
}
JOW(void, VideoSource_restart)(
JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
talk_base::scoped_ptr<cricket::VideoFormatPod> format(
reinterpret_cast<cricket::VideoFormatPod*>(j_p_format));
reinterpret_cast<VideoSourceInterface*>(j_p_source)->GetVideoCapturer()->
StartCapturing(cricket::VideoFormat(*format));
}
JOW(jboolean, VideoSource_freeNativeVideoFormat)(
JNIEnv* jni, jclass, jlong j_p) {
delete reinterpret_cast<cricket::VideoFormatPod*>(j_p);
}
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) { JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
return JavaStringFromStdString( return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id()); jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());

View File

@ -28,9 +28,46 @@
package org.webrtc; package org.webrtc;
/** Java version of VideoSourceInterface. */ /**
* Java version of VideoSourceInterface, extended with stop/restart
* functionality to allow explicit control of the camera device on android,
* where there is no support for multiple open capture devices and the cost of
* holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
* its output to the encoder) can be too high to bear.
*/
public class VideoSource extends MediaSource { public class VideoSource extends MediaSource {
private long nativeVideoFormatAtStop;
public VideoSource(long nativeSource) { public VideoSource(long nativeSource) {
super(nativeSource); super(nativeSource);
} }
// Stop capture feeding this source.
public void stop() {
nativeVideoFormatAtStop = stop(nativeSource);
}
// Restart capture feeding this source. stop() must have been called since
// the last call to restart() (if any). Note that this isn't "start()";
// sources are started by default at birth.
public void restart() {
restart(nativeSource, nativeVideoFormatAtStop);
nativeVideoFormatAtStop = 0;
}
@Override
public void dispose() {
if (nativeVideoFormatAtStop != 0) {
freeNativeVideoFormat(nativeVideoFormatAtStop);
nativeVideoFormatAtStop = 0;
}
super.dispose();
}
// This stop() returns an owned C++ VideoFormat pointer for use in restart()
// and dispose().
private static native long stop(long nativeSource);
private static native void restart(
long nativeSource, long nativeVideoFormatAtStop);
private static native void freeNativeVideoFormat(long nativeVideoFormat);
} }

View File

@ -156,19 +156,18 @@ public class AppRTCDemoActivity extends Activity
public void onPause() { public void onPause() {
super.onPause(); super.onPause();
vsv.onPause(); vsv.onPause();
// TODO(fischman): IWBN to support pause/resume, but the WebRTC codebase if (videoSource != null) {
// isn't ready for that yet; e.g. videoSource.stop();
// https://code.google.com/p/webrtc/issues/detail?id=1407 }
// Instead, simply exit instead of pausing (the alternative leads to
// system-borking with wedged cameras; e.g. b/8224551)
disconnectAndExit();
} }
@Override @Override
public void onResume() { public void onResume() {
// The onResume() is a lie! See TODO(fischman) in onPause() above.
super.onResume(); super.onResume();
vsv.onResume(); vsv.onResume();
if (videoSource != null) {
videoSource.restart();
}
} }
@Override @Override
@ -249,7 +248,8 @@ public class AppRTCDemoActivity extends Activity
} }
@Override @Override
public void onDestroy() { protected void onDestroy() {
disconnectAndExit();
super.onDestroy(); super.onDestroy();
} }
@ -524,7 +524,6 @@ public class AppRTCDemoActivity extends Activity
return; return;
} }
quit[0] = true; quit[0] = true;
wakeLock.release();
if (pc != null) { if (pc != null) {
pc.dispose(); pc.dispose();
pc = null; pc = null;
@ -542,6 +541,7 @@ public class AppRTCDemoActivity extends Activity
factory.dispose(); factory.dispose();
factory = null; factory = null;
} }
wakeLock.release();
finish(); finish();
} }
} }

View File

@ -75,6 +75,7 @@ public class VideoStreamsView
public VideoStreamsView(Context c, Point screenDimensions) { public VideoStreamsView(Context c, Point screenDimensions) {
super(c); super(c);
this.screenDimensions = screenDimensions; this.screenDimensions = screenDimensions;
setPreserveEGLContextOnPause(true);
setEGLContextClientVersion(2); setEGLContextClientVersion(2);
setRenderer(this); setRenderer(this);
setRenderMode(RENDERMODE_WHEN_DIRTY); setRenderMode(RENDERMODE_WHEN_DIRTY);

View File

@ -107,7 +107,6 @@
'android_java_files': [ 'android_java_files': [
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java', '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java',
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java', '<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java', '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java', '<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java',
'<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java', '<(webrtc_modules_dir)/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java',

View File

@ -12,39 +12,9 @@
#include <assert.h> #include <assert.h>
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace.h"
namespace {
class AttachThreadScoped {
public:
explicit AttachThreadScoped(JavaVM* jvm)
: attached_(false), jvm_(jvm), env_(NULL) {
jint ret_val = jvm->GetEnv(reinterpret_cast<void**>(&env_),
REQUIRED_JNI_VERSION);
if (ret_val == JNI_EDETACHED) {
// Attach the thread to the Java VM.
ret_val = jvm_->AttachCurrentThread(&env_, NULL);
attached_ = ret_val == JNI_OK;
assert(attached_);
}
}
~AttachThreadScoped() {
if (attached_ && (jvm_->DetachCurrentThread() < 0)) {
assert(false);
}
}
JNIEnv* env() { return env_; }
private:
bool attached_;
JavaVM* jvm_;
JNIEnv* env_;
};
} // namespace
namespace webrtc { namespace webrtc {
static JavaVM* g_jvm_ = NULL; static JavaVM* g_jvm_ = NULL;

View File

@ -18,8 +18,6 @@
namespace webrtc { namespace webrtc {
#define REQUIRED_JNI_VERSION JNI_VERSION_1_4
class AudioManagerJni { class AudioManagerJni {
public: public:
AudioManagerJni(); AudioManagerJni();

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
#define WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_
#include <jni.h>
namespace webrtc {
// Attach thread to JVM if necessary and detach at scope end if originally
// attached.
class AttachThreadScoped {
public:
explicit AttachThreadScoped(JavaVM* jvm);
~AttachThreadScoped();
JNIEnv* env();
private:
bool attached_;
JavaVM* jvm_;
JNIEnv* env_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_UTILITY_INTERFACE_HELPERS_ANDROID_H_

View File

@ -0,0 +1,37 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/utility/interface/helpers_android.h"
#include <assert.h>
#include <stddef.h>
namespace webrtc {
AttachThreadScoped::AttachThreadScoped(JavaVM* jvm)
: attached_(false), jvm_(jvm), env_(NULL) {
jint ret_val = jvm->GetEnv(reinterpret_cast<void**>(&env_), JNI_VERSION_1_4);
if (ret_val == JNI_EDETACHED) {
// Attach the thread to the Java VM.
ret_val = jvm_->AttachCurrentThread(&env_, NULL);
attached_ = ret_val == JNI_OK;
assert(attached_);
}
}
AttachThreadScoped::~AttachThreadScoped() {
if (attached_ && (jvm_->DetachCurrentThread() < 0)) {
assert(false);
}
}
JNIEnv* AttachThreadScoped::env() { return env_; }
} // namespace webrtc

View File

@ -21,6 +21,7 @@
'../interface/audio_frame_operations.h', '../interface/audio_frame_operations.h',
'../interface/file_player.h', '../interface/file_player.h',
'../interface/file_recorder.h', '../interface/file_recorder.h',
'../interface/helpers_android.h',
'../interface/process_thread.h', '../interface/process_thread.h',
'../interface/rtp_dump.h', '../interface/rtp_dump.h',
'audio_frame_operations.cc', 'audio_frame_operations.cc',
@ -30,6 +31,7 @@
'file_player_impl.h', 'file_player_impl.h',
'file_recorder_impl.cc', 'file_recorder_impl.cc',
'file_recorder_impl.h', 'file_recorder_impl.h',
'helpers_android.cc',
'process_thread_impl.cc', 'process_thread_impl.cc',
'process_thread_impl.h', 'process_thread_impl.h',
'rtp_dump_impl.cc', 'rtp_dump_impl.cc',

View File

@ -10,9 +10,14 @@
#include "webrtc/modules/video_capture/android/device_info_android.h" #include "webrtc/modules/video_capture/android/device_info_android.h"
#include <stdio.h> #include <algorithm>
#include <sstream>
#include <vector>
#include "json/json.h"
#include "third_party/icu/source/common/unicode/unistr.h"
#include "webrtc/modules/video_capture/android/video_capture_android.h" #include "webrtc/modules/video_capture/android/video_capture_android.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace.h"
@ -22,65 +27,136 @@ namespace webrtc
namespace videocapturemodule namespace videocapturemodule
{ {
static jclass g_capabilityClass = NULL; static std::string ResolutionsToString(
const std::vector<std::pair<int, int> >& pairs) {
// static std::stringstream stream;
void DeviceInfoAndroid::SetAndroidCaptureClasses(jclass capabilityClass) { for (size_t i = 0; i < pairs.size(); ++i) {
g_capabilityClass = capabilityClass; if (i > 0)
stream << ", ";
stream << "(" << pairs[i].first << "x" << pairs[i].second << ")";
}
return stream.str();
} }
VideoCaptureModule::DeviceInfo* struct AndroidCameraInfo {
VideoCaptureImpl::CreateDeviceInfo (const int32_t id) { std::string name;
videocapturemodule::DeviceInfoAndroid *deviceInfo = int min_mfps, max_mfps; // FPS*1000.
new videocapturemodule::DeviceInfoAndroid(id); bool front_facing;
if (deviceInfo && deviceInfo->Init() != 0) { int orientation;
delete deviceInfo; std::vector<std::pair<int, int> > resolutions; // Pairs are: (width,height).
deviceInfo = NULL;
std::string ToString() {
std::stringstream stream;
stream << "Name: [" << name << "], mfps: [" << min_mfps << ":" << max_mfps
<< "], front_facing: " << front_facing
<< ", orientation: " << orientation << ", resolutions: ["
<< ResolutionsToString(resolutions) << "]";
return stream.str();
} }
return deviceInfo; };
// Camera info; populated during DeviceInfoAndroid::Initialize() and immutable
// thereafter.
static std::vector<AndroidCameraInfo>* g_camera_info = NULL;
// Set |*index| to the index of |name| in g_camera_info or return false if no
// match found.
static bool FindCameraIndexByName(const std::string& name, size_t* index) {
for (size_t i = 0; i < g_camera_info->size(); ++i) {
if (g_camera_info->at(i).name == name) {
*index = i;
return true;
}
}
return false;
}
// Returns a pointer to the named member of g_camera_info, or NULL if no match
// is found.
static AndroidCameraInfo* FindCameraInfoByName(const std::string& name) {
size_t index = 0;
if (FindCameraIndexByName(name, &index))
return &g_camera_info->at(index);
return NULL;
}
// static
void DeviceInfoAndroid::Initialize(JNIEnv* jni) {
// TODO(henrike): this "if" would make a lot more sense as an assert, but
// Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine() and
// Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate() conspire to
// prevent this. Once that code is made to only
// VideoEngine::SetAndroidObjects() once per process, this can turn into an
// assert.
if (g_camera_info)
return;
g_camera_info = new std::vector<AndroidCameraInfo>();
jclass j_info_class =
jni->FindClass("org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
assert(j_info_class);
jmethodID j_initialize = jni->GetStaticMethodID(
j_info_class, "getDeviceInfo", "()Ljava/lang/String;");
jstring j_json_info = static_cast<jstring>(
jni->CallStaticObjectMethod(j_info_class, j_initialize));
const jchar* jchars = jni->GetStringChars(j_json_info, NULL);
icu::UnicodeString ustr(jchars, jni->GetStringLength(j_json_info));
jni->ReleaseStringChars(j_json_info, jchars);
std::string json_info;
ustr.toUTF8String(json_info);
Json::Value cameras;
Json::Reader reader(Json::Features::strictMode());
bool parsed = reader.parse(json_info, cameras);
if (!parsed) {
std::stringstream stream;
stream << "Failed to parse configuration:\n"
<< reader.getFormattedErrorMessages();
assert(false);
return;
}
for (Json::ArrayIndex i = 0; i < cameras.size(); ++i) {
const Json::Value& camera = cameras[i];
AndroidCameraInfo info;
info.name = camera["name"].asString();
info.min_mfps = camera["min_mfps"].asInt();
info.max_mfps = camera["max_mfps"].asInt();
info.front_facing = camera["front_facing"].asBool();
info.orientation = camera["orientation"].asInt();
Json::Value sizes = camera["sizes"];
for (Json::ArrayIndex j = 0; j < sizes.size(); ++j) {
const Json::Value& size = sizes[j];
info.resolutions.push_back(std::make_pair(
size["width"].asInt(), size["height"].asInt()));
}
g_camera_info->push_back(info);
}
}
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
const int32_t id) {
return new videocapturemodule::DeviceInfoAndroid(id);
} }
DeviceInfoAndroid::DeviceInfoAndroid(const int32_t id) : DeviceInfoAndroid::DeviceInfoAndroid(const int32_t id) :
DeviceInfoImpl(id) { DeviceInfoImpl(id) {
} }
DeviceInfoAndroid::~DeviceInfoAndroid() {
}
bool DeviceInfoAndroid::FindCameraIndex(const char* deviceUniqueIdUTF8,
size_t* index) {
return FindCameraIndexByName(deviceUniqueIdUTF8, index);
}
int32_t DeviceInfoAndroid::Init() { int32_t DeviceInfoAndroid::Init() {
return 0; return 0;
} }
DeviceInfoAndroid::~DeviceInfoAndroid() {
}
uint32_t DeviceInfoAndroid::NumberOfDevices() { uint32_t DeviceInfoAndroid::NumberOfDevices() {
JNIEnv *env; return g_camera_info->size();
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
return 0;
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
jint numberOfDevices = 0;
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
} }
int32_t DeviceInfoAndroid::GetDeviceName( int32_t DeviceInfoAndroid::GetDeviceName(
@ -91,235 +167,56 @@ int32_t DeviceInfoAndroid::GetDeviceName(
uint32_t deviceUniqueIdUTF8Length, uint32_t deviceUniqueIdUTF8Length,
char* /*productUniqueIdUTF8*/, char* /*productUniqueIdUTF8*/,
uint32_t /*productUniqueIdUTF8Length*/) { uint32_t /*productUniqueIdUTF8Length*/) {
if (deviceNumber >= g_camera_info->size())
JNIEnv *env; return -1;
jclass javaCmDevInfoClass; const AndroidCameraInfo& info = g_camera_info->at(deviceNumber);
jobject javaCmDevInfoObject; if (info.name.length() + 1 > deviceNameLength ||
int32_t result = 0; info.name.length() + 1 > deviceUniqueIdUTF8Length) {
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
return -1; return -1;
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL) {
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
} else {
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength =
env->GetStringUTFLength((jstring) javaDeviceNameObj);
if ((uint32_t) javaDeviceNameCharLength <
deviceUniqueIdUTF8Length) {
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
if ((uint32_t) javaDeviceNameCharLength < deviceNameLength) {
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
} // javaDeviceNameObj == NULL
} }
else { memcpy(deviceNameUTF8, info.name.c_str(), info.name.length() + 1);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, memcpy(deviceUniqueIdUTF8, info.name.c_str(), info.name.length() + 1);
"%s: Failed to find GetDeviceUniqueName function id", return 0;
__FUNCTION__);
result = -1;
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
} }
int32_t DeviceInfoAndroid::CreateCapabilityMap( int32_t DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8) { const char* deviceUniqueIdUTF8) {
_captureCapabilities.clear(); _captureCapabilities.clear();
const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
JNIEnv *env; if (info == NULL)
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
return -1; return -1;
// Find the capability class for (size_t i = 0; i < info->resolutions.size(); ++i) {
jclass javaCapClass = g_capabilityClass; const std::pair<int, int>& size = info->resolutions[i];
if (javaCapClass == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: SetAndroidCaptureClasses must be called first!",
__FUNCTION__);
return -1;
}
// get the method ID for the Android Java GetCapabilityArray .
jmethodID cid = env->GetMethodID(
javaCmDevInfoClass,
"GetCapabilityArray",
"(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
if (cid == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get an array with capabilities back.
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, capureIdString);
if (!javaCapabilitiesObj) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to call java GetCapabilityArray.",
__FUNCTION__);
return -1;
}
jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I");
jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get field Id.", __FUNCTION__);
return -1;
}
const jsize numberOfCapabilities =
env->GetArrayLength((jarray) javaCapabilitiesObj);
for (jsize i = 0; i < numberOfCapabilities; ++i) {
VideoCaptureCapability cap; VideoCaptureCapability cap;
jobject capabilityElement = env->GetObjectArrayElement( cap.width = size.first;
(jobjectArray) javaCapabilitiesObj, cap.height = size.second;
i); cap.maxFPS = info->max_mfps / 1000;
cap.expectedCaptureDelay = kExpectedCaptureDelay;
cap.width = env->GetIntField(capabilityElement, widthField);
cap.height = env->GetIntField(capabilityElement, heigtField);
cap.expectedCaptureDelay = _expectedCaptureDelay;
cap.rawType = kVideoNV21; cap.rawType = kVideoNV21;
cap.maxFPS = env->GetIntField(capabilityElement, maxFpsField);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap.width, cap.height, cap.maxFPS);
_captureCapabilities.push_back(cap); _captureCapabilities.push_back(cap);
} }
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
_lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
_lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName,
deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"CreateCapabilityMap %d", _captureCapabilities.size());
return _captureCapabilities.size(); return _captureCapabilities.size();
} }
int32_t DeviceInfoAndroid::GetOrientation( int32_t DeviceInfoAndroid::GetOrientation(
const char* deviceUniqueIdUTF8, const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation) { VideoCaptureRotation& orientation) {
JNIEnv *env; const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
jclass javaCmDevInfoClass; if (info == NULL ||
jobject javaCmDevInfoObject; !VideoCaptureImpl::RotationFromDegrees(info->orientation, &orientation)) {
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
return -1; return -1;
}
return 0;
}
// get the method ID for the Android Java GetOrientation . void DeviceInfoAndroid::GetFpsRange(const char* deviceUniqueIdUTF8,
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", int* min_mfps, int* max_mfps) {
"(Ljava/lang/String;)I"); const AndroidCameraInfo* info = FindCameraInfoByName(deviceUniqueIdUTF8);
if (cid == NULL) { if (info == NULL)
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); return;
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, *min_mfps = info->min_mfps;
"%s: Can't find method GetOrientation.", __FUNCTION__); *max_mfps = info->max_mfps;
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get the orientation.
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
capureIdString);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
int32_t retValue = 0;
switch (jorientation) {
case -1: // Error
orientation = kCameraRotate0;
retValue = -1;
break;
case 0:
orientation = kCameraRotate0;
break;
case 90:
orientation = kCameraRotate90;
break;
case 180:
orientation = kCameraRotate180;
break;
case 270:
orientation = kCameraRotate270;
break;
case 360:
orientation = kCameraRotate0;
break;
}
return retValue;
} }
} // namespace videocapturemodule } // namespace videocapturemodule

View File

@ -21,19 +21,18 @@ namespace webrtc
namespace videocapturemodule namespace videocapturemodule
{ {
// Android logging, uncomment to print trace to
// logcat instead of trace file/callback
// #include <android/log.h>
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
class DeviceInfoAndroid : public DeviceInfoImpl { class DeviceInfoAndroid : public DeviceInfoImpl {
public: public:
static void SetAndroidCaptureClasses(jclass capabilityClass); static void Initialize(JNIEnv* env);
DeviceInfoAndroid(const int32_t id);
int32_t Init(); DeviceInfoAndroid(int32_t id);
virtual ~DeviceInfoAndroid(); virtual ~DeviceInfoAndroid();
// Set |*index| to the index of the camera matching |deviceUniqueIdUTF8|, or
// return false if no match.
bool FindCameraIndex(const char* deviceUniqueIdUTF8, size_t* index);
virtual int32_t Init();
virtual uint32_t NumberOfDevices(); virtual uint32_t NumberOfDevices();
virtual int32_t GetDeviceName( virtual int32_t GetDeviceName(
uint32_t deviceNumber, uint32_t deviceNumber,
@ -53,9 +52,14 @@ class DeviceInfoAndroid : public DeviceInfoImpl {
uint32_t /*positionY*/) { return -1; } uint32_t /*positionY*/) { return -1; }
virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8, virtual int32_t GetOrientation(const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation); VideoCaptureRotation& orientation);
// Populate |min_mfps| and |max_mfps| with the supported range of the device.
void GetFpsRange(const char* deviceUniqueIdUTF8,
int* min_mfps,
int* max_mfps);
private: private:
bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); enum { kExpectedCaptureDelay = 190};
enum {_expectedCaptureDelay = 190};
}; };
} // namespace videocapturemodule } // namespace videocapturemodule

View File

@ -1,17 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
public class CaptureCapabilityAndroid {
public int width = 0;
public int height = 0;
public int maxFPS = 0;
}

View File

@ -14,9 +14,6 @@ import java.io.IOException;
import java.util.Locale; import java.util.Locale;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import org.webrtc.videoengine.CaptureCapabilityAndroid;
import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.PixelFormat; import android.graphics.PixelFormat;
import android.graphics.Rect; import android.graphics.Rect;
@ -28,240 +25,177 @@ import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback; import android.view.SurfaceHolder.Callback;
// Wrapper for android Camera, with support for direct local preview rendering.
// Threading notes: this class is called from ViE C++ code, and from Camera &
// SurfaceHolder Java callbacks. Since these calls happen on different threads,
// the entry points to this class are all synchronized. This shouldn't present
// a performance bottleneck because only onPreviewFrame() is called more than
// once (and is called serially on a single thread), so the lock should be
// uncontended.
public class VideoCaptureAndroid implements PreviewCallback, Callback { public class VideoCaptureAndroid implements PreviewCallback, Callback {
private final static String TAG = "WEBRTC-JC";
private final static String TAG = "WEBRTC-JC"; private Camera camera; // Only non-null while capturing.
private final int id;
private final Camera.CameraInfo info;
private final long native_capturer; // |VideoCaptureAndroid*| in C++.
private SurfaceHolder localPreview;
private SurfaceTexture dummySurfaceTexture;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private final int numCaptureBuffers = 3;
private Camera camera; public VideoCaptureAndroid(int id, long native_capturer) {
private AndroidVideoCaptureDevice currentDevice = null; this.id = id;
public ReentrantLock previewBufferLock = new ReentrantLock(); this.native_capturer = native_capturer;
// This lock takes sync with StartCapture and SurfaceChanged this.info = new Camera.CameraInfo();
private ReentrantLock captureLock = new ReentrantLock(); Camera.getCameraInfo(id, info);
private int PIXEL_FORMAT = ImageFormat.NV21; }
PixelFormat pixelFormat = new PixelFormat();
// True when the C++ layer has ordered the camera to be started.
private boolean isCaptureStarted = false;
private boolean isCaptureRunning = false;
private boolean isSurfaceReady = false;
private final int numCaptureBuffers = 3; // Called by native code. Returns true if capturer is started.
private int expectedFrameSize = 0; //
private int orientation = 0; // Note that this actually opens the camera, which can be a slow operation and
private int id = 0; // thus might be done on a background thread, but ViE API needs a
// C++ callback context variable. // synchronous success return value so we can't do that.
private long context = 0; private synchronized boolean startCapture(
private SurfaceHolder localPreview = null; int width, int height, int min_mfps, int max_mfps) {
private SurfaceTexture dummySurfaceTexture = null; Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
// True if this class owns the preview video buffers. min_mfps + ":" + max_mfps);
private boolean ownsBuffers = false; Throwable error = null;
try {
camera = Camera.open(id);
private int mCaptureWidth = -1; localPreview = ViERenderer.GetLocalRenderer();
private int mCaptureHeight = -1; if (localPreview != null) {
private int mCaptureFPS = -1; localPreview.addCallback(this);
if (localPreview.getSurface() != null &&
public static localPreview.getSurface().isValid()) {
void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { camera.setPreviewDisplay(localPreview);
Log.d(TAG, "DeleteVideoCaptureAndroid");
if (captureAndroid.camera == null) {
return;
} }
} else {
captureAndroid.StopCapture(); // No local renderer (we only care about onPreviewFrame() buffers, not a
captureAndroid.camera.release(); // directly-displayed UI element). Camera won't capture without
captureAndroid.camera = null; // setPreview{Texture,Display}, so we create a dummy SurfaceTexture and
captureAndroid.context = 0; // hand it over to Camera, but never listen for frame-ready callbacks,
} // and never call updateTexImage on it.
public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
AndroidVideoCaptureDevice in_device) {
id = in_id;
context = in_context;
camera = in_camera;
currentDevice = in_device;
}
private int tryStartCapture(int width, int height, int frameRate) {
if (camera == null) {
Log.e(TAG, "Camera not initialized %d" + id);
return -1;
}
Log.d(TAG, "tryStartCapture: " + width +
"x" + height +", frameRate: " + frameRate +
", isCaptureRunning: " + isCaptureRunning +
", isSurfaceReady: " + isSurfaceReady +
", isCaptureStarted: " + isCaptureStarted);
if (isCaptureRunning || !isCaptureStarted) {
return 0;
}
CaptureCapabilityAndroid currentCapability =
new CaptureCapabilityAndroid();
currentCapability.width = width;
currentCapability.height = height;
currentCapability.maxFPS = frameRate;
PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(currentCapability.width,
currentCapability.height);
parameters.setPreviewFormat(PIXEL_FORMAT);
parameters.setPreviewFrameRate(currentCapability.maxFPS);
try { try {
camera.setParameters(parameters); // "42" because http://goo.gl/KaEn8
} catch (RuntimeException e) { dummySurfaceTexture = new SurfaceTexture(42);
Log.e(TAG, "setParameters failed", e); camera.setPreviewTexture(dummySurfaceTexture);
return -1;
}
int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
byte[] buffer = null;
for (int i = 0; i < numCaptureBuffers; i++) {
buffer = new byte[bufSize];
camera.addCallbackBuffer(buffer);
}
camera.setPreviewCallbackWithBuffer(this);
ownsBuffers = true;
camera.startPreview();
previewBufferLock.lock();
expectedFrameSize = bufSize;
isCaptureRunning = true;
previewBufferLock.unlock();
return 0;
}
public int StartCapture(int width, int height, int frameRate) {
Log.d(TAG, "StartCapture width " + width +
" height " + height +" frame rate " + frameRate);
// Get the local preview SurfaceHolder from the static render class
localPreview = ViERenderer.GetLocalRenderer();
if (localPreview != null) {
if (localPreview.getSurface() != null &&
localPreview.getSurface().isValid()) {
surfaceCreated(localPreview);
}
localPreview.addCallback(this);
} else {
// No local renderer. Camera won't capture without
// setPreview{Texture,Display}, so we create a dummy SurfaceTexture
// and hand it over to Camera, but never listen for frame-ready
// callbacks, and never call updateTexImage on it.
captureLock.lock();
try {
dummySurfaceTexture = new SurfaceTexture(42);
camera.setPreviewTexture(dummySurfaceTexture);
} catch (IOException e) {
throw new RuntimeException(e);
}
captureLock.unlock();
}
captureLock.lock();
isCaptureStarted = true;
mCaptureWidth = width;
mCaptureHeight = height;
mCaptureFPS = frameRate;
int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
captureLock.unlock();
return res;
}
public int StopCapture() {
Log.d(TAG, "StopCapture");
try {
previewBufferLock.lock();
isCaptureRunning = false;
previewBufferLock.unlock();
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
} catch (RuntimeException e) {
Log.e(TAG, "Failed to stop camera", e);
return -1;
}
isCaptureStarted = false;
return 0;
}
native void ProvideCameraFrame(byte[] data, int length, long captureObject);
public void onPreviewFrame(byte[] data, Camera camera) {
previewBufferLock.lock();
// The following line is for debug only
// Log.v(TAG, "preview frame length " + data.length +
// " context" + context);
if (isCaptureRunning) {
// If StartCapture has been called but not StopCapture
// Call the C++ layer with the captured frame
if (data.length == expectedFrameSize) {
ProvideCameraFrame(data, expectedFrameSize, context);
if (ownsBuffers) {
// Give the video buffer to the camera service again.
camera.addCallbackBuffer(data);
}
}
}
previewBufferLock.unlock();
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
public void SetPreviewRotation(int rotation) {
Log.v(TAG, "SetPreviewRotation:" + rotation);
if (camera == null) {
return;
}
int resultRotation = 0;
if (currentDevice.frontCameraType ==
VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
// this is a 2.3 or later front facing camera.
// SetDisplayOrientation will flip the image horizontally
// before doing the rotation.
resultRotation = ( 360 - rotation ) % 360; // compensate the mirror
}
else {
// Back facing or 2.2 or previous front camera
resultRotation = rotation;
}
camera.setDisplayOrientation(resultRotation);
}
public void surfaceChanged(SurfaceHolder holder,
int format, int width, int height) {
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
}
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
captureLock.lock();
try {
if (camera != null) {
camera.setPreviewDisplay(holder);
}
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "Failed to set preview surface!", e); throw new RuntimeException(e);
} }
captureLock.unlock(); }
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(width, height);
parameters.setPreviewFpsRange(min_mfps, max_mfps);
int format = ImageFormat.NV21;
parameters.setPreviewFormat(format);
camera.setParameters(parameters);
int bufSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
for (int i = 0; i < numCaptureBuffers; i++) {
camera.addCallbackBuffer(new byte[bufSize]);
}
camera.setPreviewCallbackWithBuffer(this);
camera.startPreview();
return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "startCapture failed", error);
if (camera != null) {
stopCapture();
}
return false;
}
// Called by native code. Returns true when camera is known to be stopped.
private synchronized boolean stopCapture() {
Log.d(TAG, "stopCapture");
if (camera == null) {
throw new RuntimeException("Camera is already stopped!");
}
Throwable error = null;
try {
if (localPreview != null) {
localPreview.removeCallback(this);
camera.setPreviewDisplay(null);
} else {
camera.setPreviewTexture(null);
}
camera.setPreviewCallbackWithBuffer(null);
camera.stopPreview();
camera.release();
camera = null;
return true;
} catch (IOException e) {
error = e;
} catch (RuntimeException e) {
error = e;
}
Log.e(TAG, "Failed to stop camera", error);
return false;
}
private native void ProvideCameraFrame(
byte[] data, int length, long captureObject);
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
ProvideCameraFrame(data, data.length, native_capturer);
camera.addCallbackBuffer(data);
}
// Sets the rotation of the preview render window.
// Does not affect the captured video image.
// Called by native code.
private synchronized void setPreviewRotation(int rotation) {
Log.v(TAG, "setPreviewRotation:" + rotation);
if (camera == null) {
return;
} }
public void surfaceDestroyed(SurfaceHolder holder) { int resultRotation = 0;
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed"); if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
captureLock.lock(); // This is a front facing camera. SetDisplayOrientation will flip
try { // the image horizontally before doing the rotation.
if (camera != null) { resultRotation = ( 360 - rotation ) % 360; // Compensate for the mirror.
camera.setPreviewDisplay(null); } else {
} // Back-facing camera.
} catch (IOException e) { resultRotation = rotation;
Log.e(TAG, "Failed to clear preview surface!", e);
}
captureLock.unlock();
} }
camera.setDisplayOrientation(resultRotation);
}
public synchronized void surfaceChanged(
SurfaceHolder holder, int format, int width, int height) {
Log.d(TAG, "VideoCaptureAndroid::surfaceChanged ignored: " +
format + ": " + width + "x" + height);
}
public synchronized void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
try {
if (camera != null) {
camera.setPreviewDisplay(holder);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public synchronized void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
try {
if (camera != null) {
camera.setPreviewDisplay(null);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
} }

View File

@ -17,377 +17,79 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import dalvik.system.DexClassLoader;
import android.content.Context; import android.content.Context;
import android.hardware.Camera; import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.Size; import android.hardware.Camera.Size;
import android.hardware.Camera;
import android.util.Log; import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class VideoCaptureDeviceInfoAndroid { public class VideoCaptureDeviceInfoAndroid {
private final static String TAG = "WEBRTC-JC";
//Context private static boolean isFrontFacing(CameraInfo info) {
Context context; return info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT;
}
// Set VERBOSE as the default logging level because camera device info private static String deviceUniqueName(int index, CameraInfo info) {
// is very useful information and doesn't degrade performance normally return "Camera " + index +", Facing " +
private final static String TAG = "WEBRTC"; (isFrontFacing(info) ? "front" : "back") +
", Orientation "+ info.orientation;
}
// Private class with info about all available cameras and the capabilities // Returns information about all cameras on the device as a serialized JSON
public class AndroidVideoCaptureDevice { // array of dictionaries encoding information about a single device. Since
AndroidVideoCaptureDevice() { // this reflects static information about the hardware present, there is no
frontCameraType = FrontFacingCameraType.None; // need to call this function more than once in a single process. It is
index = 0; // marked "private" as it is only called by native code.
} private static String getDeviceInfo() {
try {
public String deviceUniqueName; JSONArray devices = new JSONArray();
public CaptureCapabilityAndroid captureCapabilies[]; for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
public FrontFacingCameraType frontCameraType; CameraInfo info = new CameraInfo();
Camera.getCameraInfo(i, info);
// Orientation of camera as described in String uniqueName = deviceUniqueName(i, info);
// android.hardware.Camera.CameraInfo.Orientation JSONObject cameraDict = new JSONObject();
public int orientation; devices.put(cameraDict);
// Camera index used in Camera.Open on Android 2.3 and onwards List<Size> supportedSizes;
public int index; List<int[]> supportedFpsRanges;
}
public enum FrontFacingCameraType {
None, // This is not a front facing camera
GalaxyS, // Galaxy S front facing camera.
HTCEvo, // HTC Evo front facing camera
Android23, // Android 2.3 front facing camera.
}
String currentDeviceUniqueId;
int id;
List<AndroidVideoCaptureDevice> deviceList;
public static VideoCaptureDeviceInfoAndroid
CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
Log.d(TAG,
String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
VideoCaptureDeviceInfoAndroid self =
new VideoCaptureDeviceInfoAndroid(in_id, in_context);
if(self != null && self.Init() == 0) {
return self;
}
else {
Log.d(TAG, "Failed to create VideoCaptureDeviceInfoAndroid.");
}
return null;
}
private VideoCaptureDeviceInfoAndroid(int in_id,
Context in_context) {
id = in_id;
context = in_context;
deviceList = new ArrayList<AndroidVideoCaptureDevice>();
}
private int Init() {
// Populate the deviceList with available cameras and their capabilities.
Camera camera = null;
if(android.os.Build.VERSION.SDK_INT > 8) {
// From Android 2.3 and onwards
for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(i, info);
newDevice.index = i;
newDevice.orientation=info.orientation;
if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
newDevice.deviceUniqueName =
"Camera " + i +", Facing back, Orientation "+ info.orientation;
Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation);
}
else {
newDevice.deviceUniqueName =
"Camera " + i +", Facing front, Orientation "+ info.orientation;
newDevice.frontCameraType = FrontFacingCameraType.Android23;
Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation);
}
camera = Camera.open(i);
Camera.Parameters parameters = camera.getParameters();
AddDeviceInfo(newDevice, parameters);
camera.release();
camera = null;
deviceList.add(newDevice);
}
}
VerifyCapabilities();
return 0;
}
// Adds the capture capabilities of the currently opened device
private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
Camera.Parameters parameters) {
List<Size> sizes = parameters.getSupportedPreviewSizes();
List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
int maxFPS = 0;
if (frameRates != null) {
for(Integer frameRate:frameRates) {
if(frameRate > maxFPS) {
maxFPS = frameRate;
}
}
}
newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
for(int i = 0; i < sizes.size(); ++i) {
Size s = sizes.get(i);
newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
newDevice.captureCapabilies[i].height = s.height;
newDevice.captureCapabilies[i].width = s.width;
newDevice.captureCapabilies[i].maxFPS = maxFPS;
Log.v(TAG, "VideoCaptureDeviceInfo " + ", maxFPS: " + maxFPS +
", width: " + s.width + ", height: " + s.height);
}
}
// Function that make sure device specific capabilities are
// in the capability list.
// Ie Galaxy S supports CIF but does not list CIF as a supported capability.
// Motorola Droid Camera does not work with frame rate above 15fps.
// http://code.google.com/p/android/issues/detail?id=5514#c0
private void VerifyCapabilities() {
// Nexus S or Galaxy S
if(android.os.Build.DEVICE.equals("GT-I9000") ||
android.os.Build.DEVICE.equals("crespo")) {
CaptureCapabilityAndroid specificCapability =
new CaptureCapabilityAndroid();
specificCapability.width = 352;
specificCapability.height = 288;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 176;
specificCapability.height = 144;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
specificCapability = new CaptureCapabilityAndroid();
specificCapability.width = 320;
specificCapability.height = 240;
specificCapability.maxFPS = 15;
AddDeviceSpecificCapability(specificCapability);
}
// Motorola Milestone Camera server does not work at 30fps
// even though it reports that it can
if(android.os.Build.MANUFACTURER.equals("motorola") &&
android.os.Build.DEVICE.equals("umts_sholes")) {
for (AndroidVideoCaptureDevice device : deviceList) {
for (CaptureCapabilityAndroid capability : device.captureCapabilies) {
capability.maxFPS = 15;
}
}
}
}
private void AddDeviceSpecificCapability(
CaptureCapabilityAndroid specificCapability) {
for(AndroidVideoCaptureDevice device:deviceList) {
boolean foundCapability = false;
for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
if(capability.width == specificCapability.width &&
capability.height == specificCapability.height) {
foundCapability = true;
break;
}
}
if(foundCapability==false) {
CaptureCapabilityAndroid newCaptureCapabilies[]=
new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
for(int i = 0; i < device.captureCapabilies.length; ++i) {
newCaptureCapabilies[i+1] = device.captureCapabilies[i];
}
newCaptureCapabilies[0] = specificCapability;
device.captureCapabilies = newCaptureCapabilies;
}
}
}
// Returns the number of Capture devices that is supported
public int NumberOfDevices() {
return deviceList.size();
}
public String GetDeviceUniqueName(int deviceNumber) {
if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
return null;
}
return deviceList.get(deviceNumber).deviceUniqueName;
}
public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
{
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return (CaptureCapabilityAndroid[]) device.captureCapabilies;
}
}
return null;
}
// Returns the camera orientation as described by
// android.hardware.Camera.CameraInfo.orientation
public int GetOrientation(String deviceUniqueId) {
for (AndroidVideoCaptureDevice device: deviceList) {
if(device.deviceUniqueName.equals(deviceUniqueId)) {
return device.orientation;
}
}
return -1;
}
// Returns an instance of VideoCaptureAndroid.
public VideoCaptureAndroid AllocateCamera(int id, long context,
String deviceUniqueId) {
try { try {
Log.d(TAG, "AllocateCamera " + deviceUniqueId); Camera camera = Camera.open(i);
Parameters parameters = camera.getParameters();
Camera camera = null; supportedSizes = parameters.getSupportedPreviewSizes();
AndroidVideoCaptureDevice deviceToUse = null; supportedFpsRanges = parameters.getSupportedPreviewFpsRange();
for (AndroidVideoCaptureDevice device: deviceList) { camera.release();
if(device.deviceUniqueName.equals(deviceUniqueId)) { Log.d(TAG, uniqueName);
// Found the wanted camera } catch (RuntimeException e) {
deviceToUse = device; Log.e(TAG, "Failed to open " + uniqueName + ", skipping");
switch(device.frontCameraType) { continue;
case GalaxyS:
camera = AllocateGalaxySFrontCamera();
break;
case HTCEvo:
camera = AllocateEVOFrontFacingCamera();
break;
default:
// From Android 2.3 and onwards)
if(android.os.Build.VERSION.SDK_INT>8)
camera=Camera.open(device.index);
else
camera=Camera.open(); // Default camera
}
}
}
if(camera == null) {
return null;
}
Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
return new VideoCaptureAndroid(id, context, camera, deviceToUse);
} catch (NoSuchMethodException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (ClassNotFoundException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (InvocationTargetException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} catch (IllegalAccessException e) {
Log.e(TAG, "AllocateCamera Failed to open camera", e);
} }
return null; JSONArray sizes = new JSONArray();
for (Size supportedSize : supportedSizes) {
JSONObject size = new JSONObject();
size.put("width", supportedSize.width);
size.put("height", supportedSize.height);
sizes.put(size);
}
// Android SDK deals in integral "milliframes per second"
// (i.e. fps*1000, instead of floating-point frames-per-second) so we
// preserve that through the Java->C++->Java round-trip.
int[] mfps = supportedFpsRanges.get(supportedFpsRanges.size() - 1);
cameraDict.put("name", uniqueName);
cameraDict.put("front_facing", isFrontFacing(info))
.put("orientation", info.orientation)
.put("sizes", sizes)
.put("min_mfps", mfps[Parameters.PREVIEW_FPS_MIN_INDEX])
.put("max_mfps", mfps[Parameters.PREVIEW_FPS_MAX_INDEX]);
}
String ret = devices.toString(2);
return ret;
} catch (JSONException e) {
throw new RuntimeException(e);
} }
}
// Searches for a front facing camera device. This is device specific code.
private Camera.Parameters
SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
throws SecurityException, IllegalArgumentException,
NoSuchMethodException, ClassNotFoundException,
IllegalAccessException, InvocationTargetException {
// Check the id of the opened camera device
// Returns null on X10 and 1 on Samsung Galaxy S.
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
String cameraId = parameters.get("camera-id");
if(cameraId != null && cameraId.equals("1")) {
// This might be a Samsung Galaxy S with a front facing camera.
parameters.set("camera-id", 2);
camera.setParameters(parameters);
parameters = camera.getParameters();
newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
newDevice.orientation = 0;
camera.release();
return parameters;
}
camera.release();
// Check for Evo front facing camera
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
boolean exists = file.exists();
if (!exists) {
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
exists = file.exists();
}
if(exists) {
newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
newDevice.orientation = 0;
Camera evCamera = AllocateEVOFrontFacingCamera();
parameters = evCamera.getParameters();
evCamera.release();
return parameters;
}
return null;
}
// Returns a handle to HTC front facing camera.
// The caller is responsible to release it on completion.
private Camera AllocateEVOFrontFacingCamera()
throws SecurityException, NoSuchMethodException,
ClassNotFoundException, IllegalArgumentException,
IllegalAccessException, InvocationTargetException {
String classPath = null;
File file =
new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
boolean exists = file.exists();
if (!exists){
file =
new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
exists = file.exists();
}
if(!exists) {
return null;
}
String dexOutputDir = "";
if(context != null) {
dexOutputDir = context.getFilesDir().getAbsolutePath();
File mFilesDir = new File(dexOutputDir, "dexfiles");
if(!mFilesDir.exists()){
// Log.e("*WEBRTCN*", "Directory doesn't exists");
if(!mFilesDir.mkdirs()) {
// Log.e("*WEBRTCN*", "Unable to create files directory");
}
}
}
dexOutputDir += "/dexfiles";
DexClassLoader loader =
new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
null, ClassLoader.getSystemClassLoader());
Method method = loader.loadClass(classPath).getDeclaredMethod(
"getFrontFacingCamera", (Class[]) null);
Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
return camera;
}
// Returns a handle to Galaxy S front camera.
// The caller is responsible to release it on completion.
private Camera AllocateGalaxySFrontCamera() {
Camera camera = Camera.open();
Camera.Parameters parameters = camera.getParameters();
parameters.set("camera-id",2);
camera.setParameters(parameters);
return camera;
}
} }

View File

@ -10,599 +10,173 @@
#include "webrtc/modules/video_capture/android/video_capture_android.h" #include "webrtc/modules/video_capture/android/video_capture_android.h"
#include <stdio.h> #include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h" #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
#include "webrtc/system_wrappers/interface/ref_count.h" #include "webrtc/system_wrappers/interface/ref_count.h"
#include "webrtc/system_wrappers/interface/trace.h" #include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc static JavaVM* g_jvm = NULL;
{ static jclass g_java_capturer_class = NULL; // VideoCaptureAndroid.class.
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only
// keep and reference java vm.
int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext) {
return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(
javaVM,
javaContext);
}
#endif
namespace videocapturemodule namespace webrtc {
{
// Called by Java when the camera has a new frame to deliver.
void JNICALL ProvideCameraFrame(
JNIEnv* env,
jobject,
jbyteArray javaCameraFrame,
jint length,
jlong context) {
webrtc::videocapturemodule::VideoCaptureAndroid* captureModule =
reinterpret_cast<webrtc::videocapturemodule::VideoCaptureAndroid*>(
context);
jbyte* cameraFrame = env->GetByteArrayElements(javaCameraFrame, NULL);
captureModule->OnIncomingFrame(
reinterpret_cast<uint8_t*>(cameraFrame), length, 0);
env->ReleaseByteArrayElements(javaCameraFrame, cameraFrame, JNI_ABORT);
}
int32_t SetCaptureAndroidVM(JavaVM* javaVM) {
g_jvm = javaVM;
AttachThreadScoped ats(g_jvm);
videocapturemodule::DeviceInfoAndroid::Initialize(ats.env());
jclass j_capture_class =
ats.env()->FindClass("org/webrtc/videoengine/VideoCaptureAndroid");
assert(j_capture_class);
g_java_capturer_class =
reinterpret_cast<jclass>(ats.env()->NewGlobalRef(j_capture_class));
assert(g_java_capturer_class);
JNINativeMethod native_method = {
"ProvideCameraFrame", "([BIJ)V",
reinterpret_cast<void*>(&ProvideCameraFrame)
};
if (ats.env()->RegisterNatives(g_java_capturer_class, &native_method, 1) != 0)
assert(false);
return 0;
}
namespace videocapturemodule {
VideoCaptureModule* VideoCaptureImpl::Create( VideoCaptureModule* VideoCaptureImpl::Create(
const int32_t id, const int32_t id,
const char* deviceUniqueIdUTF8) { const char* deviceUniqueIdUTF8) {
RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation = RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id); new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
if (implementation->Init(id, deviceUniqueIdUTF8) != 0) {
if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) {
delete implementation; delete implementation;
implementation = NULL; implementation = NULL;
} }
return implementation; return implementation;
} }
// Android logging, uncomment to print trace to int32_t VideoCaptureAndroid::OnIncomingFrame(uint8_t* videoFrame,
// logcat instead of trace file/callback int32_t videoFrameLength,
// #include <android/log.h> int64_t captureTime) {
// #undef WEBRTC_TRACE return IncomingFrame(
// #define WEBRTC_TRACE(a,b,c,...) videoFrame, videoFrameLength, _captureCapability, captureTime);
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
//VideoCaptureAndroid.java
jclass VideoCaptureAndroid::g_javaCmClass = NULL;
//VideoCaptureDeviceInfoAndroid.java
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
//static instance of VideoCaptureDeviceInfoAndroid.java
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
jobject VideoCaptureAndroid::g_javaContext = NULL;
/*
* Register references to Java Capture class.
*/
int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
void* javaContext) {
g_jvm = static_cast<JavaVM*> (javaVM);
g_javaContext = static_cast<jobject> (javaContext);
if (javaVM) {
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get Java environment", __FUNCTION__);
return -1;
}
// get java capture class type (note path to class packet)
jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
if (!javaCmClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmClass = static_cast<jclass>
(env->NewGlobalRef(javaCmClassLocal));
if (!g_javaCmClass) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: InitVideoEngineJava(): could not create"
" Java Camera class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaCmClassLocal);
JNINativeMethod nativeFunctions =
{ "ProvideCameraFrame", "([BIJ)V",
(void*) &VideoCaptureAndroid::ProvideCameraFrame };
if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Registered native functions", __FUNCTION__);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to register native functions",
__FUNCTION__);
return -1;
}
jclass capabilityClassLocal = env->FindClass(
"org/webrtc/videoengine/CaptureCapabilityAndroid");
if (!capabilityClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
capabilityClassLocal));
DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
// get java capture class type (note path to class packet)
jclass javaCmDevInfoClassLocal = env->FindClass(
"org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
if (!javaCmDevInfoClassLocal) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not find java class", __FUNCTION__);
return -1;
}
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmDevInfoClass = static_cast<jclass>
(env->NewGlobalRef(javaCmDevInfoClassLocal));
if (!g_javaCmDevInfoClass) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: InitVideoEngineJava(): could not create Java "
"Camera Device info class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaCmDevInfoClassLocal);
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"VideoCaptureDeviceInfoAndroid get method id");
// get the method ID for the Android Java CaptureClass static
//CreateVideoCaptureAndroid factory method.
jmethodID cid = env->GetStaticMethodID(
g_javaCmDevInfoClass,
"CreateVideoCaptureDeviceInfoAndroid",
"(ILandroid/content/Context;)"
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get java"
"VideoCaptureDeviceInfoAndroid constructor ID",
__FUNCTION__);
return -1;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: construct static java device object", __FUNCTION__);
// construct the object by calling the static constructor object
jobject javaCameraDeviceInfoObjLocal =
env->CallStaticObjectMethod(g_javaCmDevInfoClass,
cid, (int) -1,
g_javaContext);
if (!javaCameraDeviceInfoObjLocal) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: could not create Java Capture Device info object",
__FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that
// we are referencing it after this function has returned)
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
if (!g_javaCmDevInfoObject) {
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceAudioDevice,
-1,
"%s: could not create Java"
"cameradevinceinfo object reference",
__FUNCTION__);
return -1;
}
// Delete local object ref, we only use the global ref
env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
return 0;
}
else {
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: JVM is NULL, assuming deinit", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: SetAndroidObjects not called with a valid JVM.",
__FUNCTION__);
return -1;
}
JNIEnv* env = NULL;
bool attached = false;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
-1, "%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
attached = true;
}
env->DeleteGlobalRef(g_javaCmDevInfoObject);
env->DeleteGlobalRef(g_javaCmDevInfoClass);
env->DeleteGlobalRef(g_javaCmClass);
if (attached && g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
env = (JNIEnv *) NULL;
}
return 0;
} }
int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached) {
// get the JNI env for this thread
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: SetAndroidObjects not called with a valid JVM.",
__FUNCTION__);
return -1;
}
attached = false;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
attached = true;
}
javaCmDevInfoClass = g_javaCmDevInfoClass;
javaCmDevInfoObject = g_javaCmDevInfoObject;
return 0;
}
int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
bool attached) {
if (attached && g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
}
/*
* JNI callback from Java class. Called
* when the camera has a new frame to deliver
* Class: org_webrtc_capturemodule_VideoCaptureAndroid
* Method: ProvideCameraFrame
* Signature: ([BIJ)V
*/
void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length,
jlong context) {
VideoCaptureAndroid* captureModule =
reinterpret_cast<VideoCaptureAndroid*>(context);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
captureModule->IncomingFrame((uint8_t*) cameraFrame,
length,captureModule->_frameInfo,0);
env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
}
VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id) VideoCaptureAndroid::VideoCaptureAndroid(const int32_t id)
: VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL), : VideoCaptureImpl(id),
_deviceInfo(id),
_jCapturer(NULL),
_captureStarted(false) { _captureStarted(false) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: context %x", __FUNCTION__, (int) this);
} }
// ----------------------------------------------------------------------------
// Init
//
// Initializes needed Java resources like the JNI interface to
// VideoCaptureAndroid.java
// ----------------------------------------------------------------------------
int32_t VideoCaptureAndroid::Init(const int32_t id, int32_t VideoCaptureAndroid::Init(const int32_t id,
const char* deviceUniqueIdUTF8) { const char* deviceUniqueIdUTF8) {
const int nameLength = strlen(deviceUniqueIdUTF8); const int nameLength = strlen(deviceUniqueIdUTF8);
if (nameLength >= kVideoCaptureUniqueNameLength) { if (nameLength >= kVideoCaptureUniqueNameLength)
return -1; return -1;
}
// Store the device name // Store the device name
_deviceUniqueId = new char[nameLength + 1]; _deviceUniqueId = new char[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
if (_capInfo.Init() != 0) { AttachThreadScoped ats(g_jvm);
WEBRTC_TRACE(webrtc::kTraceError, JNIEnv* env = ats.env();
webrtc::kTraceVideoCapture,
_id, jmethodID ctor = env->GetMethodID(g_java_capturer_class, "<init>", "(IJ)V");
"%s: Failed to initialize CaptureDeviceInfo", assert(ctor);
__FUNCTION__); jlong j_this = reinterpret_cast<intptr_t>(this);
size_t camera_id = 0;
if (!_deviceInfo.FindCameraIndex(deviceUniqueIdUTF8, &camera_id))
return -1; return -1;
} _jCapturer = env->NewGlobalRef(
env->NewObject(g_java_capturer_class, ctor, camera_id, j_this));
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", assert(_jCapturer);
__FUNCTION__);
// use the jvm that has been set
if (!g_jvm) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
JNIEnv *env;
bool isAttached = false;
// get the JNI env for this thread
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"get method id");
// get the method ID for the Android Java
// CaptureDeviceInfoClass AllocateCamera factory method.
char signature[256];
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
signature);
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: could not get constructor ID", __FUNCTION__);
return -1; /* exception thrown */
}
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
// construct the object by calling the static constructor object
jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
cid, (jint) id,
(jlong) this,
capureIdString);
if (!javaCameraObjLocal) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
"%s: could not create Java Capture object", __FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
if (!_javaCaptureObj) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
"%s: could not create Java camera object reference",
__FUNCTION__);
return -1;
}
// Delete local object ref, we only use the global ref
env->DeleteLocalRef(javaCameraObjLocal);
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
return 0; return 0;
} }
VideoCaptureAndroid::~VideoCaptureAndroid() { VideoCaptureAndroid::~VideoCaptureAndroid() {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", // Ensure Java camera is released even if our caller didn't explicitly Stop.
__FUNCTION__); if (_captureStarted)
if (_javaCaptureObj == NULL || g_jvm == NULL) { StopCapture();
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, AttachThreadScoped ats(g_jvm);
"%s: Nothing to clean", __FUNCTION__); ats.env()->DeleteGlobalRef(_jCapturer);
}
else {
bool isAttached = false;
// get the JNI env for this thread
JNIEnv *env;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
// get the method ID for the Android Java CaptureClass static
// DeleteVideoCaptureAndroid method. Call this to release the camera so
// another application can use it.
jmethodID cid = env->GetStaticMethodID(
g_javaCmClass,
"DeleteVideoCaptureAndroid",
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
// Close the camera by calling the static destruct function.
env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
// Delete global object ref to the camera.
env->DeleteGlobalRef(_javaCaptureObj);
_javaCaptureObj = NULL;
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find DeleteVideoCaptureAndroid id",
__FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
_id, "%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
} }
int32_t VideoCaptureAndroid::StartCapture( int32_t VideoCaptureAndroid::StartCapture(
const VideoCaptureCapability& capability) { const VideoCaptureCapability& capability) {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, AttachThreadScoped ats(g_jvm);
"%s: ", __FUNCTION__); JNIEnv* env = ats.env();
bool isAttached = false; if (_deviceInfo.GetBestMatchedCapability(
int32_t result = 0; _deviceUniqueId, capability, _captureCapability) < 0) {
// get the JNI env for this thread
JNIEnv *env;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
_frameInfo) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: GetBestMatchedCapability failed. Req cap w%d h%d", "%s: GetBestMatchedCapability failed: %dx%d",
__FUNCTION__, capability.width, capability.height); __FUNCTION__, capability.width, capability.height);
return -1; return -1;
} }
// Store the new expected capture delay _captureDelay = _captureCapability.expectedCaptureDelay;
_captureDelay = _frameInfo.expectedCaptureDelay;
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, jmethodID j_start =
"%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width, env->GetMethodID(g_java_capturer_class, "startCapture", "(IIII)Z");
_frameInfo.height); assert(j_start);
int min_mfps = 0;
// get the method ID for the Android Java int max_mfps = 0;
// CaptureClass static StartCapture method. _deviceInfo.GetFpsRange(_deviceUniqueId, &min_mfps, &max_mfps);
jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I"); bool started = env->CallBooleanMethod(_jCapturer, j_start,
if (cid != NULL) { _captureCapability.width,
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, _captureCapability.height,
"%s: Call StartCapture", __FUNCTION__); min_mfps, max_mfps);
// Close the camera by calling the static destruct function. if (started) {
result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
_frameInfo.height, _frameInfo.maxFPS);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find StartCapture id", __FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
if (result == 0) {
_requestedCapability = capability; _requestedCapability = capability;
_captureStarted = true; _captureStarted = true;
} }
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, return started ? 0 : -1;
"%s: result %d", __FUNCTION__, result);
return result;
} }
int32_t VideoCaptureAndroid::StopCapture() { int32_t VideoCaptureAndroid::StopCapture() {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, AttachThreadScoped ats(g_jvm);
"%s: ", __FUNCTION__); JNIEnv* env = ats.env();
bool isAttached = false;
int32_t result = 0;
// get the JNI env for this thread
JNIEnv *env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
}
else {
isAttached = true;
}
}
memset(&_requestedCapability, 0, sizeof(_requestedCapability)); memset(&_requestedCapability, 0, sizeof(_requestedCapability));
memset(&_frameInfo, 0, sizeof(_frameInfo)); memset(&_captureCapability, 0, sizeof(_captureCapability));
// get the method ID for the Android Java CaptureClass StopCapture method.
jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
if (cid != NULL) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
"%s: Call StopCapture", __FUNCTION__);
// Close the camera by calling the static destruct function.
result = env->CallIntMethod(_javaCaptureObj, cid);
}
else {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find StopCapture id", __FUNCTION__);
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
_captureStarted = false; _captureStarted = false;
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, jmethodID j_stop =
"%s: result %d", __FUNCTION__, result); env->GetMethodID(g_java_capturer_class, "stopCapture", "()Z");
return result; return env->CallBooleanMethod(_jCapturer, j_stop) ? 0 : -1;
} }
bool VideoCaptureAndroid::CaptureStarted() { bool VideoCaptureAndroid::CaptureStarted() {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
return _captureStarted; return _captureStarted;
} }
int32_t VideoCaptureAndroid::CaptureSettings( int32_t VideoCaptureAndroid::CaptureSettings(
VideoCaptureCapability& settings) { VideoCaptureCapability& settings) {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: ", __FUNCTION__);
settings = _requestedCapability; settings = _requestedCapability;
return 0; return 0;
} }
@ -610,64 +184,20 @@ int32_t VideoCaptureAndroid::CaptureSettings(
int32_t VideoCaptureAndroid::SetCaptureRotation( int32_t VideoCaptureAndroid::SetCaptureRotation(
VideoCaptureRotation rotation) { VideoCaptureRotation rotation) {
CriticalSectionScoped cs(&_apiCs); CriticalSectionScoped cs(&_apiCs);
if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) { if (VideoCaptureImpl::SetCaptureRotation(rotation) != 0)
if (!g_jvm) return 0;
return -1;
// get the JNI env for this thread AttachThreadScoped ats(g_jvm);
JNIEnv *env; JNIEnv* env = ats.env();
bool isAttached = false;
// get the JNI env for this thread
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
"(I)V");
if (cid == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get java SetPreviewRotation ID",
__FUNCTION__);
return -1;
}
jint rotateFrame = 0;
switch (rotation) {
case kCameraRotate0:
rotateFrame = 0;
break;
case kCameraRotate90:
rotateFrame = 90;
break;
case kCameraRotate180:
rotateFrame = 180;
break;
case kCameraRotate270:
rotateFrame = 270;
break;
}
env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
_id, "%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
jmethodID j_spr =
env->GetMethodID(g_java_capturer_class, "setPreviewRotation", "(I)V");
assert(j_spr);
int rotation_degrees;
if (RotationInDegrees(rotation, &rotation_degrees) != 0) {
assert(false);
} }
env->CallVoidMethod(_jCapturer, j_spr, rotation_degrees);
return 0; return 0;
} }

View File

@ -16,49 +16,31 @@
#include "webrtc/modules/video_capture/android/device_info_android.h" #include "webrtc/modules/video_capture/android/device_info_android.h"
#include "webrtc/modules/video_capture/video_capture_impl.h" #include "webrtc/modules/video_capture/video_capture_impl.h"
#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
namespace webrtc { namespace webrtc {
namespace videocapturemodule { namespace videocapturemodule {
class VideoCaptureAndroid : public VideoCaptureImpl { class VideoCaptureAndroid : public VideoCaptureImpl {
public: public:
static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
static int32_t AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached);
static int32_t ReleaseAndroidDeviceInfoObjects(bool attached);
VideoCaptureAndroid(const int32_t id); VideoCaptureAndroid(const int32_t id);
virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8); virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
virtual int32_t StartCapture(const VideoCaptureCapability& capability);
virtual int32_t StartCapture(
const VideoCaptureCapability& capability);
virtual int32_t StopCapture(); virtual int32_t StopCapture();
virtual bool CaptureStarted(); virtual bool CaptureStarted();
virtual int32_t CaptureSettings(VideoCaptureCapability& settings); virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation); virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
int32_t OnIncomingFrame(uint8_t* videoFrame,
int32_t videoFrameLength,
int64_t captureTime = 0);
protected: protected:
virtual ~VideoCaptureAndroid(); virtual ~VideoCaptureAndroid();
static void JNICALL ProvideCameraFrame (JNIEnv * env,
jobject,
jbyteArray javaCameraFrame,
jint length, jlong context);
DeviceInfoAndroid _capInfo;
jobject _javaCaptureObj; // Java Camera object.
VideoCaptureCapability _frameInfo;
bool _captureStarted;
static JavaVM* g_jvm; DeviceInfoAndroid _deviceInfo;
static jclass g_javaCmClass; jobject _jCapturer; // Global ref to Java VideoCaptureAndroid object.
static jclass g_javaCmDevInfoClass; VideoCaptureCapability _captureCapability;
//Static java object implementing the needed device info functions; bool _captureStarted;
static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
}; };
} // namespace videocapturemodule } // namespace videocapturemodule

View File

@ -14,10 +14,14 @@
#include "webrtc/modules/interface/module.h" #include "webrtc/modules/interface/module.h"
#include "webrtc/modules/video_capture/include/video_capture_defines.h" #include "webrtc/modules/video_capture/include/video_capture_defines.h"
#ifdef ANDROID
#include <jni.h>
#endif
namespace webrtc { namespace webrtc {
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext); int32_t SetCaptureAndroidVM(JavaVM* javaVM);
#endif #endif
class VideoCaptureModule: public RefCountedModule { class VideoCaptureModule: public RefCountedModule {

View File

@ -91,6 +91,10 @@
}, },
}], # win }], # win
['OS=="android"', { ['OS=="android"', {
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(DEPTH)/third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
],
'sources': [ 'sources': [
'android/device_info_android.cc', 'android/device_info_android.cc',
'android/device_info_android.h', 'android/device_info_android.h',

View File

@ -41,6 +41,47 @@ const char* VideoCaptureImpl::CurrentDeviceName() const
return _deviceUniqueId; return _deviceUniqueId;
} }
// static
int32_t VideoCaptureImpl::RotationFromDegrees(int degrees,
VideoCaptureRotation* rotation) {
switch (degrees) {
case 0:
*rotation = kCameraRotate0;
return 0;
case 90:
*rotation = kCameraRotate90;
return 0;
case 180:
*rotation = kCameraRotate180;
return 0;
case 270:
*rotation = kCameraRotate270;
return 0;
default:
return -1;;
}
}
// static
int32_t VideoCaptureImpl::RotationInDegrees(VideoCaptureRotation rotation,
int* degrees) {
switch (rotation) {
case kCameraRotate0:
*degrees = 0;
return 0;
case kCameraRotate90:
*degrees = 90;
return 0;
case kCameraRotate180:
*degrees = 180;
return 0;
case kCameraRotate270:
*degrees = 270;
return 0;
}
return -1;
}
int32_t VideoCaptureImpl::ChangeUniqueId(const int32_t id) int32_t VideoCaptureImpl::ChangeUniqueId(const int32_t id)
{ {
_id = id; _id = id;
@ -358,6 +399,8 @@ int32_t VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation) {
case kCameraRotate270: case kCameraRotate270:
_rotateFrame = kRotate270; _rotateFrame = kRotate270;
break; break;
default:
return -1;
} }
return 0; return 0;
} }

View File

@ -51,6 +51,13 @@ public:
static DeviceInfo* CreateDeviceInfo(const int32_t id); static DeviceInfo* CreateDeviceInfo(const int32_t id);
// Helpers for converting between (integral) degrees and
// VideoCaptureRotation values. Return 0 on success.
static int32_t RotationFromDegrees(int degrees,
VideoCaptureRotation* rotation);
static int32_t RotationInDegrees(VideoCaptureRotation rotation,
int* degrees);
// Implements Module declared functions. // Implements Module declared functions.
virtual int32_t ChangeUniqueId(const int32_t id); virtual int32_t ChangeUniqueId(const int32_t id);

View File

@ -142,6 +142,11 @@
'WEBRTC_CLOCK_TYPE_REALTIME', 'WEBRTC_CLOCK_TYPE_REALTIME',
], ],
'dependencies': [ 'cpu_features_android', ], 'dependencies': [ 'cpu_features_android', ],
'link_settings': {
'libraries': [
'-llog',
],
},
}, { # OS!="android" }, { # OS!="android"
'sources!': [ 'sources!': [
'../interface/logcat_trace_context.h', '../interface/logcat_trace_context.h',

View File

@ -21,6 +21,10 @@
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
#include <jni.h>
#endif
namespace webrtc { namespace webrtc {
class Config; class Config;
@ -61,10 +65,10 @@ class WEBRTC_DLLEXPORT VideoEngine {
// user receives callbacks for generated trace messages. // user receives callbacks for generated trace messages.
static int SetTraceCallback(TraceCallback* callback); static int SetTraceCallback(TraceCallback* callback);
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
// Android specific. // Android specific.
// Provides VideoEngine with pointers to objects supplied by the Java static int SetAndroidObjects(JavaVM* java_vm);
// applications JNI interface. #endif
static int SetAndroidObjects(void* java_vm, void* java_context);
protected: protected:
VideoEngine() {} VideoEngine() {}

View File

@ -311,7 +311,7 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideo
return 0; return 0;
} }
VideoEngine::SetAndroidObjects(webrtcGlobalVM, context); VideoEngine::SetAndroidObjects(webrtcGlobalVM);
// Create // Create
vieData.vie = VideoEngine::Create(); vieData.vie = VideoEngine::Create();

View File

@ -22,7 +22,6 @@ LOCAL_MODULE_TAGS := tests
LOCAL_SRC_FILES := \ LOCAL_SRC_FILES := \
src/org/webrtc/vieautotest/ViEAutotest.java \ src/org/webrtc/vieautotest/ViEAutotest.java \
$(MY_CAPTURE_PATH)/CaptureCapabilityAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \ $(MY_CAPTURE_PATH)/VideoCaptureAndroid.java \
$(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \ $(MY_CAPTURE_PATH)/VideoCaptureDeviceInfoAndroid.java \
$(MY_RENDER_PATH)/ViEAndroidGLES20.java \ $(MY_RENDER_PATH)/ViEAndroidGLES20.java \

View File

@ -11,16 +11,17 @@
#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_ #ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_ #define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
class ViEAutoTestAndroid #include <jni.h>
{
public: class ViEAutoTestAndroid {
static int RunAutotest(int testSelection, public:
int subTestSelection, static int RunAutotest(int testSelection,
void* window1, int subTestSelection,
void* window2, void* window1,
void* javaVM, void* window2,
void* env, JavaVM* javaVM,
void* context); void* env,
void* context);
}; };
#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_ #endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_

View File

@ -18,10 +18,10 @@
int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection, int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
void* window1, void* window2, void* window1, void* window2,
void* javaVM, void* env, void* context) { JavaVM* javaVM, void* env, void* context) {
ViEAutoTest vieAutoTest(window1, window2); ViEAutoTest vieAutoTest(window1, window2);
ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection); ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
webrtc::VideoEngine::SetAndroidObjects(javaVM, context); webrtc::VideoEngine::SetAndroidObjects(javaVM);
#ifndef WEBRTC_ANDROID_OPENSLES #ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly // voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context); webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);

View File

@ -163,12 +163,12 @@ int VideoEngine::SetTraceCallback(TraceCallback* callback) {
return Trace::SetTraceCallback(callback); return Trace::SetTraceCallback(callback);
} }
int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) { #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
int VideoEngine::SetAndroidObjects(JavaVM* javaVM) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId, WEBRTC_TRACE(kTraceApiCall, kTraceVideo, kModuleId,
"SetAndroidObjects()"); "SetAndroidObjects()");
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD) if (SetCaptureAndroidVM(javaVM) != 0) {
if (SetCaptureAndroidVM(javaVM, javaContext) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId, WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
"Could not set capture Android VM"); "Could not set capture Android VM");
return -1; return -1;
@ -179,11 +179,7 @@ int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) {
return -1; return -1;
} }
return 0; return 0;
#else
WEBRTC_TRACE(kTraceError, kTraceVideo, kModuleId,
"WEBRTC_ANDROID not defined for VideoEngine::SetAndroidObjects");
return -1;
#endif
} }
#endif
} // namespace webrtc } // namespace webrtc