Android JNI: Clean up AndroidVideoTrackSource and NativeHandleImpl

I'm preparing adding support for Java VideoFrames in
AndroidVideoTrackSource. I split out small unrelated clean-ups into this
CL in order to make the big CL more focused.

Bug: webrtc:7749
Change-Id: Ib261ab8eb055898b39307d4e78935bf60d323820
Reviewed-on: https://chromium-review.googlesource.com/539638
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18660}
This commit is contained in:
Magnus Jedvert
2017-06-19 15:04:19 +02:00
committed by Commit Bot
parent 04f4d126f8
commit 3093ef193e
4 changed files with 27 additions and 29 deletions

View File

@ -55,11 +55,9 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length, int length,
int width, int width,
int height, int height,
int rotation, VideoRotation rotation,
int64_t timestamp_ns) { int64_t timestamp_ns) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us = int64_t translated_camera_time_us =
@ -91,7 +89,7 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
y_plane += width * crop_y + crop_x; y_plane += width * crop_y + crop_x;
uv_plane += uv_width * crop_y + crop_x; uv_plane += uv_width * crop_y + crop_x;
rtc::scoped_refptr<webrtc::I420Buffer> buffer = rtc::scoped_refptr<I420Buffer> buffer =
buffer_pool_.CreateBuffer(adapted_width, adapted_height); buffer_pool_.CreateBuffer(adapted_width, adapted_height);
nv12toi420_scaler_.NV12ToI420Scale( nv12toi420_scaler_.NV12ToI420Scale(
@ -101,19 +99,16 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(), buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
buffer->StrideU(), buffer->width(), buffer->height()); buffer->StrideU(), buffer->width(), buffer->height());
OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation), OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
translated_camera_time_us));
} }
void AndroidVideoTrackSource::OnTextureFrameCaptured( void AndroidVideoTrackSource::OnTextureFrameCaptured(
int width, int width,
int height, int height,
int rotation, VideoRotation rotation,
int64_t timestamp_ns, int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle) { const webrtc_jni::NativeHandleImpl& handle) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread()); RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec; int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us = int64_t translated_camera_time_us =
@ -140,25 +135,21 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
crop_x / static_cast<float>(width), crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height)); crop_y / static_cast<float>(height));
// Make a local copy, since value of apply_rotation() may change // Note that apply_rotation() may change under our feet, so we should only
// under our feet. // check once.
bool do_rotate = apply_rotation(); if (apply_rotation()) {
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
if (do_rotate) {
if (rotation == webrtc::kVideoRotation_90 ||
rotation == webrtc::kVideoRotation_270) {
std::swap(adapted_width, adapted_height); std::swap(adapted_width, adapted_height);
} }
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation)); matrix.Rotate(rotation);
rotation = kVideoRotation_0;
} }
OnFrame(VideoFrame( OnFrame(VideoFrame(
surface_texture_helper_->CreateTextureFrame( surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height, adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)), webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
do_rotate ? webrtc::kVideoRotation_0 rotation, translated_camera_time_us));
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
} }
void AndroidVideoTrackSource::OnOutputFormatRequest(int width, void AndroidVideoTrackSource::OnOutputFormatRequest(int width,

View File

@ -50,12 +50,12 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
int length, int length,
int width, int width,
int height, int height,
int rotation, VideoRotation rotation,
int64_t timestamp_ns); int64_t timestamp_ns);
void OnTextureFrameCaptured(int width, void OnTextureFrameCaptured(int width,
int height, int height,
int rotation, VideoRotation rotation,
int64_t timestamp_ns, int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle); const webrtc_jni::NativeHandleImpl& handle);
@ -73,8 +73,8 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
SourceState state_; SourceState state_;
rtc::VideoBroadcaster broadcaster_; rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_; rtc::TimestampAligner timestamp_aligner_;
webrtc::NV12ToI420Scaler nv12toi420_scaler_; NV12ToI420Scaler nv12toi420_scaler_;
webrtc::I420BufferPool buffer_pool_; I420BufferPool buffer_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_; rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
const bool is_screencast_; const bool is_screencast_;
}; };

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/api/videosourceproxy.h" #include "webrtc/api/videosourceproxy.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
#include "webrtc/sdk/android/src/jni/androidvideotracksource.h" #include "webrtc/sdk/android/src/jni/androidvideotracksource.h"
@ -18,6 +19,12 @@
#define JOW_OBSERVER_METHOD(rettype, name) \ #define JOW_OBSERVER_METHOD(rettype, name) \
JOW(rettype, AndroidVideoTrackSourceObserver_##name) JOW(rettype, AndroidVideoTrackSourceObserver_##name)
static webrtc::VideoRotation jintToVideoRotation(jint rotation) {
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
return static_cast<webrtc::VideoRotation>(rotation);
}
namespace webrtc_jni { namespace webrtc_jni {
static webrtc::AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy( static webrtc::AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(
@ -40,8 +47,8 @@ JOW_OBSERVER_METHOD(void, nativeOnByteBufferFrameCaptured)
webrtc::AndroidVideoTrackSource* source = webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source); AndroidVideoTrackSourceFromJavaProxy(j_source);
jbyte* bytes = jni->GetByteArrayElements(j_frame, nullptr); jbyte* bytes = jni->GetByteArrayElements(j_frame, nullptr);
source->OnByteBufferFrameCaptured(bytes, length, width, height, rotation, source->OnByteBufferFrameCaptured(bytes, length, width, height,
timestamp); jintToVideoRotation(rotation), timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT); jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
} }
@ -58,7 +65,7 @@ JOW_OBSERVER_METHOD(void, nativeOnTextureFrameCaptured)
webrtc::AndroidVideoTrackSource* source = webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source); AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnTextureFrameCaptured( source->OnTextureFrameCaptured(
j_width, j_height, j_rotation, j_timestamp, j_width, j_height, jintToVideoRotation(j_rotation), j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix)); NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
} }