Fix handling non-tightly packed ByteBuffers in HardwareVideoDecoder.

Before this CL, there would be an out-of-bounds write in the ByteBuffer
copying when a decoded frame had height != sliceHeight.

Bug: webrtc:9194
Change-Id: Ibb80e5555e8f00d9e1fd4cb8a73f5e4ccd5a0b81
Tested: 640x360 loopback with eglContext == null in AppRTCMobile on Pixel.
Reviewed-on: https://webrtc-review.googlesource.com/74120
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23184}
This commit is contained in:
Sami Kalliomäki
2018-05-08 15:22:08 +02:00
committed by Commit Bot
parent c710ac142e
commit ee98be7811
9 changed files with 87 additions and 83 deletions

View File

@ -114,8 +114,8 @@ TEST(VideoProcessorIntegrationTestMediaCodec,
TEST(VideoProcessorIntegrationTestMediaCodec, ForemanMixedRes100kbpsVp8H264) { TEST(VideoProcessorIntegrationTestMediaCodec, ForemanMixedRes100kbpsVp8H264) {
auto config = CreateTestConfig(); auto config = CreateTestConfig();
const int kNumFrames = 30; const int kNumFrames = 30;
// TODO(brandtr): Add H.264 when we have fixed the encoder. const std::vector<std::string> codecs = {cricket::kVp8CodecName,
const std::vector<std::string> codecs = {cricket::kVp8CodecName}; cricket::kH264CodecName};
const std::vector<std::tuple<int, int>> resolutions = { const std::vector<std::tuple<int, int>> resolutions = {
{128, 96}, {160, 120}, {176, 144}, {240, 136}, {320, 240}, {480, 272}}; {128, 96}, {160, 120}, {176, 144}, {240, 136}, {320, 240}, {480, 272}};
const std::vector<RateProfile> rate_profiles = { const std::vector<RateProfile> rate_profiles = {

View File

@ -278,7 +278,6 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/VideoEncoderFactory.java", "api/org/webrtc/VideoEncoderFactory.java",
"api/org/webrtc/VideoEncoderFallback.java", "api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrame.java", "api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoFrameDrawer.java",
"api/org/webrtc/VideoRenderer.java", "api/org/webrtc/VideoRenderer.java",
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoSource.java",
@ -330,7 +329,6 @@ rtc_static_library("video_jni") {
"src/jni/videoencoderwrapper.h", "src/jni/videoencoderwrapper.h",
"src/jni/videoframe.cc", "src/jni/videoframe.cc",
"src/jni/videoframe.h", "src/jni/videoframe.h",
"src/jni/videoframedrawer.cc",
"src/jni/videosink.cc", "src/jni/videosink.cc",
"src/jni/videosink.h", "src/jni/videosink.h",
"src/jni/videotrack.cc", "src/jni/videotrack.cc",

View File

@ -13,8 +13,8 @@ package org.webrtc;
import android.graphics.Matrix; import android.graphics.Matrix;
import android.graphics.Point; import android.graphics.Point;
import android.opengl.GLES20; import android.opengl.GLES20;
import javax.annotation.Nullable;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import javax.annotation.Nullable;
/** /**
* Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or * Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
@ -98,8 +98,8 @@ public class VideoFrameDrawer {
// Input is packed already. // Input is packed already.
packedByteBuffer = planes[i]; packedByteBuffer = planes[i];
} else { } else {
nativeCopyPlane( YuvHelper.copyPlane(
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]); planes[i], strides[i], copyBuffer, planeWidths[i], planeWidths[i], planeHeights[i]);
packedByteBuffer = copyBuffer; packedByteBuffer = copyBuffer;
} }
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i], GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
@ -229,8 +229,4 @@ public class VideoFrameDrawer {
yuvUploader.release(); yuvUploader.release();
lastI420Frame = null; lastI420Frame = null;
} }
// Helper native function to do a video frame plane copying.
static native void nativeCopyPlane(
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
} }

View File

@ -97,6 +97,12 @@ public class YuvHelper {
dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode); dstChromaWidth, dstV, dstChromaWidth, srcWidth, srcHeight, rotationMode);
} }
/** Helper method for copying a single colour plane. */
public static void copyPlane(
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
nativeCopyPlane(src, srcStride, dst, dstStride, width, height);
}
public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU, public static void I420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, int srcStrideU,
ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) { int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height) {
@ -119,6 +125,8 @@ public class YuvHelper {
dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode); dstStrideU, dstV, dstStrideV, srcWidth, srcHeight, rotationMode);
} }
private static native void nativeCopyPlane(
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height);
private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU, private static native void nativeI420Copy(ByteBuffer srcY, int srcStrideY, ByteBuffer srcU,
int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY, int srcStrideU, ByteBuffer srcV, int srcStrideV, ByteBuffer dstY, int dstStrideY,
ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height); ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV, int width, int height);

View File

@ -45,6 +45,12 @@ public final class HardwareVideoDecoderTest {
CLASS_PARAMS.add(new ParameterSet() CLASS_PARAMS.add(new ParameterSet()
.value("VP8" /* codecType */, true /* useEglContext */) .value("VP8" /* codecType */, true /* useEglContext */)
.name("VP8WithEglContext")); .name("VP8WithEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value("H264" /* codecType */, false /* useEglContext */)
.name("H264WithoutEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value("H264" /* codecType */, true /* useEglContext */)
.name("H264WithEglContext"));
} }
private final String codecType; private final String codecType;
@ -65,12 +71,12 @@ public final class HardwareVideoDecoderTest {
private static final boolean ENABLE_INTEL_VP8_ENCODER = true; private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true; private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoEncoder.Settings ENCODER_SETTINGS = private static final VideoEncoder.Settings ENCODER_SETTINGS =
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */, new VideoEncoder.Settings(1 /* core */, TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 300 /* kbps */,
30 /* fps */, true /* automaticResizeOn */); 30 /* fps */, true /* automaticResizeOn */);
private static final int DECODE_TIMEOUT_MS = 1000; private static final int DECODE_TIMEOUT_MS = 1000;
private static final VideoDecoder.Settings SETTINGS = private static final VideoDecoder.Settings SETTINGS =
new VideoDecoder.Settings(1 /* core */, 640 /* width */, 480 /* height */); new VideoDecoder.Settings(1 /* core */, TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT);
private static class MockDecodeCallback implements VideoDecoder.Callback { private static class MockDecodeCallback implements VideoDecoder.Callback {
private BlockingQueue<VideoFrame> frameQueue = new LinkedBlockingQueue<>(); private BlockingQueue<VideoFrame> frameQueue = new LinkedBlockingQueue<>();

View File

@ -58,6 +58,17 @@ public class YuvHelperTest {
NativeLibrary.initialize(new NativeLibrary.DefaultLoader()); NativeLibrary.initialize(new NativeLibrary.DefaultLoader());
} }
@SmallTest
@Test
public void testCopyPlane() {
final int dstStride = TEST_WIDTH;
final ByteBuffer dst = ByteBuffer.allocateDirect(TEST_HEIGHT * dstStride);
YuvHelper.copyPlane(TEST_I420_Y, TEST_I420_STRIDE_Y, dst, dstStride, TEST_WIDTH, TEST_HEIGHT);
assertByteBufferContentEquals(new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9}, dst);
}
@SmallTest @SmallTest
@Test @Test
public void testI420Copy() { public void testI420Copy() {

View File

@ -15,13 +15,13 @@ import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.os.SystemClock; import android.os.SystemClock;
import javax.annotation.Nullable;
import android.view.Surface; import android.view.Surface;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque; import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.webrtc.ThreadUtils.ThreadChecker; import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */ /** Android hardware video decoder. */
@ -512,37 +512,57 @@ class HardwareVideoDecoder
private VideoFrame.Buffer copyI420Buffer( private VideoFrame.Buffer copyI420Buffer(
ByteBuffer buffer, int stride, int sliceHeight, int width, int height) { ByteBuffer buffer, int stride, int sliceHeight, int width, int height) {
if (stride % 2 != 0) {
throw new AssertionError("Stride is not divisible by two: " + stride);
}
// Note that the case with odd |sliceHeight| is handled in a special way.
// The chroma height contained in the payload is rounded down instead of
// up, making it one row less than what we expect in WebRTC. Therefore, we
// have to duplicate the last chroma rows for this case. Also, the offset
// between the Y plane and the U plane is unintuitive for this case. See
// http://bugs.webrtc.org/6651 for more info.
final int chromaWidth = (width + 1) / 2;
final int chromaHeight = (sliceHeight % 2 == 0) ? (height + 1) / 2 : height / 2;
final int uvStride = stride / 2; final int uvStride = stride / 2;
final int yPos = 0; final int yPos = 0;
final int yEnd = yPos + stride * height;
final int uPos = yPos + stride * sliceHeight; final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * (sliceHeight / 2); final int uEnd = uPos + uvStride * chromaHeight;
final int vPos = uPos + uvStride * sliceHeight / 2; final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * (sliceHeight / 2); final int vEnd = vPos + uvStride * chromaHeight;
VideoFrame.I420Buffer frameBuffer = JavaI420Buffer.allocate(width, height); VideoFrame.I420Buffer frameBuffer = JavaI420Buffer.allocate(width, height);
ByteBuffer dataY = frameBuffer.getDataY(); buffer.limit(yEnd);
buffer.position(yPos); buffer.position(yPos);
buffer.limit(uPos); YuvHelper.copyPlane(
dataY.put(buffer); buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
ByteBuffer dataU = frameBuffer.getDataU();
buffer.position(uPos);
buffer.limit(uEnd); buffer.limit(uEnd);
dataU.put(buffer); buffer.position(uPos);
if (sliceHeight % 2 != 0) { YuvHelper.copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
buffer.position(uEnd - uvStride); // Repeat the last row. chromaWidth, chromaHeight);
dataU.put(buffer); if (sliceHeight % 2 == 1) {
buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
ByteBuffer dataU = frameBuffer.getDataU();
dataU.position(frameBuffer.getStrideU() * chromaHeight); // Seek to beginning of last row.
dataU.put(buffer); // Copy the last row.
} }
ByteBuffer dataV = frameBuffer.getDataV();
buffer.position(vPos);
buffer.limit(vEnd); buffer.limit(vEnd);
dataV.put(buffer); buffer.position(vPos);
if (sliceHeight % 2 != 0) { YuvHelper.copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
buffer.position(vEnd - uvStride); // Repeat the last row. chromaWidth, chromaHeight);
dataV.put(buffer); if (sliceHeight % 2 == 1) {
buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
ByteBuffer dataV = frameBuffer.getDataV();
dataV.position(frameBuffer.getStrideV() * chromaHeight); // Seek to beginning of last row.
dataV.put(buffer); // Copy the last row.
} }
return frameBuffer; return frameBuffer;

View File

@ -1,52 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "sdk/android/generated_video_jni/jni/VideoFrameDrawer_jni.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
namespace webrtc {
namespace jni {
static void JNI_VideoFrameDrawer_CopyPlane(
JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_buffer,
jint width,
jint height,
jint src_stride,
const JavaParamRef<jobject>& j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer.obj());
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer.obj());
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_src_buffer.obj()));
uint8_t* dst = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_dst_buffer.obj()));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni
} // namespace webrtc

View File

@ -13,10 +13,27 @@
#include "sdk/android/generated_video_jni/jni/YuvHelper_jni.h" #include "sdk/android/generated_video_jni/jni/YuvHelper_jni.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/planar_functions.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {
void JNI_YuvHelper_CopyPlane(JNIEnv* jni,
const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src,
jint src_stride,
const JavaParamRef<jobject>& j_dst,
jint dst_stride,
jint width,
jint height) {
const uint8_t* src =
static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_src.obj()));
uint8_t* dst =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst.obj()));
libyuv::CopyPlane(src, src_stride, dst, dst_stride, width, height);
}
void JNI_YuvHelper_I420Copy(JNIEnv* jni, void JNI_YuvHelper_I420Copy(JNIEnv* jni,
const JavaParamRef<jclass>&, const JavaParamRef<jclass>&,
const JavaParamRef<jobject>& j_src_y, const JavaParamRef<jobject>& j_src_y,