Android JNI: Clean up AndroidVideoTrackSource and NativeHandleImpl

I'm preparing adding support for Java VideoFrames in
AndroidVideoTrackSource. I split out small unrelated clean-ups into this
CL in order to make the big CL more focused.

Bug: webrtc:7749
Change-Id: Ib261ab8eb055898b39307d4e78935bf60d323820
Reviewed-on: https://chromium-review.googlesource.com/539638
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18660}
This commit is contained in:
Magnus Jedvert
2017-06-19 15:04:19 +02:00
committed by Commit Bot
parent 04f4d126f8
commit 3093ef193e
4 changed files with 27 additions and 29 deletions

View File

@ -55,11 +55,9 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
int rotation,
VideoRotation rotation,
int64_t timestamp_ns) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
@ -91,7 +89,7 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
y_plane += width * crop_y + crop_x;
uv_plane += uv_width * crop_y + crop_x;
rtc::scoped_refptr<webrtc::I420Buffer> buffer =
rtc::scoped_refptr<I420Buffer> buffer =
buffer_pool_.CreateBuffer(adapted_width, adapted_height);
nv12toi420_scaler_.NV12ToI420Scale(
@ -101,19 +99,16 @@ void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
buffer->StrideU(), buffer->width(), buffer->height());
OnFrame(VideoFrame(buffer, static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
int width,
int height,
int rotation,
VideoRotation rotation,
int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
@ -140,25 +135,21 @@ void AndroidVideoTrackSource::OnTextureFrameCaptured(
crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height));
// Make a local copy, since value of apply_rotation() may change
// under our feet.
bool do_rotate = apply_rotation();
if (do_rotate) {
if (rotation == webrtc::kVideoRotation_90 ||
rotation == webrtc::kVideoRotation_270) {
// Note that apply_rotation() may change under our feet, so we should only
// check once.
if (apply_rotation()) {
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
std::swap(adapted_width, adapted_height);
}
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
matrix.Rotate(rotation);
rotation = kVideoRotation_0;
}
OnFrame(VideoFrame(
surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
do_rotate ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us));
rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,

View File

@ -50,12 +50,12 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
int length,
int width,
int height,
int rotation,
VideoRotation rotation,
int64_t timestamp_ns);
void OnTextureFrameCaptured(int width,
int height,
int rotation,
VideoRotation rotation,
int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle);
@ -73,8 +73,8 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
SourceState state_;
rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_;
webrtc::NV12ToI420Scaler nv12toi420_scaler_;
webrtc::I420BufferPool buffer_pool_;
NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
const bool is_screencast_;
};

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/api/videosourceproxy.h"
#include "webrtc/base/logging.h"
#include "webrtc/sdk/android/src/jni/androidvideotracksource.h"
@ -18,6 +19,12 @@
#define JOW_OBSERVER_METHOD(rettype, name) \
JOW(rettype, AndroidVideoTrackSourceObserver_##name)
static webrtc::VideoRotation jintToVideoRotation(jint rotation) {
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
return static_cast<webrtc::VideoRotation>(rotation);
}
namespace webrtc_jni {
static webrtc::AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(
@ -40,8 +47,8 @@ JOW_OBSERVER_METHOD(void, nativeOnByteBufferFrameCaptured)
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
jbyte* bytes = jni->GetByteArrayElements(j_frame, nullptr);
source->OnByteBufferFrameCaptured(bytes, length, width, height, rotation,
timestamp);
source->OnByteBufferFrameCaptured(bytes, length, width, height,
jintToVideoRotation(rotation), timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
}
@ -58,7 +65,7 @@ JOW_OBSERVER_METHOD(void, nativeOnTextureFrameCaptured)
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnTextureFrameCaptured(
j_width, j_height, j_rotation, j_timestamp,
j_width, j_height, jintToVideoRotation(j_rotation), j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
}

View File

@ -169,9 +169,9 @@ int AndroidTextureBuffer::height() const {
}
rtc::scoped_refptr<webrtc::I420BufferInterface> AndroidTextureBuffer::ToI420() {
int uv_width = (width()+7) / 8;
int uv_width = (width() + 7) / 8;
int stride = 8 * uv_width;
int uv_height = (height()+1)/2;
int uv_height = (height() + 1) / 2;
size_t size = stride * (height() + uv_height);
// The data is owned by the frame, and the normal case is that the
// data is deleted by the frame's destructor callback.