Revert of Support for video file instead of camera and output video out to file (patchset #17 id:320001 of https://codereview.webrtc.org/2273573003/ )

Reason for revert:
Breaks internal project.

Original issue's description:
> Support for video file instead of camera and output video out to file
>
> When video out to file is enabled the remote video which is recorded is
> not show on screen.
>
> You can use this command line for file input and output:
> monkeyrunner ./webrtc/examples/androidapp/start_loopback_stubbed_camera_saved_video_out.py --devname 02157df28cd47001 --videoin /storage/emulated/0/reference_video_1280x720_30fps.y4m --videoout /storage/emulated/0/output.y4m --videoout_width 1280 --videoout_height 720 --videooutsave /tmp/out.y4m
>
> BUG=webrtc:6545
>
> Committed: https://crrev.com/44666997ca912705f8f96c9bd211e719525a3ccc
> Cr-Commit-Position: refs/heads/master@{#14660}

TBR=magjed@webrtc.org,sakal@webrtc.org,jansson@chromium.org,mandermo@google.com,mandermo@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:6545

Review-Url: https://codereview.webrtc.org/2425763003
Cr-Commit-Position: refs/heads/master@{#14664}
This commit is contained in:
kjellander
2016-10-18 06:05:35 -07:00
committed by Commit bot
parent f33970b15e
commit 67a8c986ab
10 changed files with 186 additions and 933 deletions

View File

@ -1,211 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.os.SystemClock;
import java.util.concurrent.TimeUnit;
import java.util.Timer;
import java.util.TimerTask;
import java.io.RandomAccessFile;
import java.io.IOException;
public class FileVideoCapturer implements VideoCapturer {
private interface VideoReader {
int getFrameWidth();
int getFrameHeight();
byte[] getNextFrame();
void close();
}
/**
* Read video data from file for the .y4m container.
*/
private static class VideoReaderY4M implements VideoReader {
private final static String TAG = "VideoReaderY4M";
private final int frameWidth;
private final int frameHeight;
private final int frameSize;
// First char after header
private final long videoStart;
private static final String Y4M_FRAME_DELIMETER = "FRAME";
private final RandomAccessFile mediaFileStream;
public int getFrameWidth() {
return frameWidth;
}
public int getFrameHeight() {
return frameHeight;
}
public VideoReaderY4M(String file) throws IOException {
mediaFileStream = new RandomAccessFile(file, "r");
StringBuilder builder = new StringBuilder();
for (;;) {
int c = mediaFileStream.read();
if (c == -1) {
// End of file reached.
throw new RuntimeException("Found end of file before end of header for file: " + file);
}
if (c == '\n') {
// End of header found.
break;
}
builder.append((char) c);
}
videoStart = mediaFileStream.getFilePointer();
String header = builder.toString();
String[] headerTokens = header.split("[ ]");
Logging.d(TAG, "header: " + header + ", headerTokens" + headerTokens);
int w = 0;
int h = 0;
String colorSpace = "";
for (String tok : headerTokens) {
char c = tok.charAt(0);
switch (c) {
case 'W':
w = Integer.parseInt(tok.substring(1));
break;
case 'H':
h = Integer.parseInt(tok.substring(1));
break;
case 'C':
colorSpace = tok.substring(1);
break;
}
}
Logging.d(TAG, "Color space: " + colorSpace);
if (!colorSpace.equals("420")) {
throw new IllegalArgumentException("Does not support any other color space than I420");
}
if ((w % 2) == 1 || (h % 2) == 1) {
throw new IllegalArgumentException("Does not support odd width or height");
}
frameWidth = w;
frameHeight = h;
frameSize = w * h * 3 / 2;
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
}
public byte[] getNextFrame() {
byte[] frame = new byte[frameSize];
try {
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
// We reach end of file, loop
mediaFileStream.seek(videoStart);
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
throw new RuntimeException("Error looping video");
}
}
String frameDelimStr = new String(frameDelim);
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
throw new RuntimeException(
"Frames should be delimited by FRAME plus newline, found delimter was: '"
+ frameDelimStr + "'");
}
mediaFileStream.readFully(frame);
byte[] nv21Frame = new byte[frameSize];
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
return nv21Frame;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void close() {
try {
mediaFileStream.close();
} catch (IOException e) {
Logging.e(TAG, "Problem closing file", e);
}
}
}
private final static String TAG = "FileVideoCapturer";
private final VideoReader videoReader;
private CapturerObserver capturerObserver;
private final Timer timer = new Timer();
private final TimerTask tickTask = new TimerTask() {
@Override
public void run() {
tick();
}
};
private int getFrameWidth() {
return videoReader.getFrameWidth();
}
private int getFrameHeight() {
return videoReader.getFrameHeight();
}
public FileVideoCapturer(String inputFile) throws IOException {
try {
videoReader = new VideoReaderY4M(inputFile);
} catch (IOException e) {
Logging.d(TAG, "Could not open video file: " + inputFile);
throw e;
}
}
private byte[] getNextFrame() {
return videoReader.getNextFrame();
}
public void tick() {
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
byte[] frameData = getNextFrame();
capturerObserver.onByteBufferFrameCaptured(
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.capturerObserver = capturerObserver;
}
@Override
public void startCapture(int width, int height, int framerate) {
timer.schedule(tickTask, 0, 1000 / framerate);
}
@Override
public void stopCapture() throws InterruptedException {
timer.cancel();
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
// Empty on purpose
}
@Override
public void dispose() {
videoReader.close();
}
@Override
public boolean isScreencast() {
return false;
}
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
}

View File

@ -76,7 +76,7 @@ class SurfaceTextureHelper {
}
// State for YUV conversion, instantiated on demand.
static class YuvConverter {
static private class YuvConverter {
private final EglBase eglBase;
private final GlShader shader;
private boolean released = false;

View File

@ -1,135 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.os.Handler;
import android.os.HandlerThread;
import java.nio.ByteBuffer;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
/**
* Can be used to save the video frames to file.
*/
public class VideoFileRenderer implements VideoRenderer.Callbacks {
private static final String TAG = "VideoFileRenderer";
private final SurfaceTextureHelper.YuvConverter yuvConverter;
private final HandlerThread renderThread;
private final Object handlerLock = new Object();
private final Handler renderThreadHandler;
private final FileOutputStream videoOutFile;
private final int outputFileWidth;
private final int outputFileHeight;
private final int outputFrameSize;
private final ByteBuffer outputFrameBuffer;
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
EglBase.Context sharedContext) throws IOException {
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
throw new IllegalArgumentException("Does not support uneven width or height");
}
yuvConverter = new SurfaceTextureHelper.YuvConverter(sharedContext);
this.outputFileWidth = outputFileWidth;
this.outputFileHeight = outputFileHeight;
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
videoOutFile = new FileOutputStream(outputFile);
videoOutFile.write(
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
.getBytes());
renderThread = new HandlerThread(TAG);
renderThread.start();
renderThreadHandler = new Handler(renderThread.getLooper());
}
@Override
public void renderFrame(final VideoRenderer.I420Frame frame) {
renderThreadHandler.post(new Runnable() {
@Override
public void run() {
renderFrameOnRenderThread(frame);
}
});
}
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
try {
videoOutFile.write("FRAME\n".getBytes());
if (!frame.yuvFrame) {
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
frame.textureId, texMatrix);
int stride = outputFileWidth;
byte[] data = outputFrameBuffer.array();
int offset = outputFrameBuffer.arrayOffset();
// Write Y
videoOutFile.write(data, offset, outputFileWidth * outputFileHeight);
// Write U
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
videoOutFile.write(data, offset + r * stride, stride / 2);
}
// Write V
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
videoOutFile.write(data, offset + r * stride + stride / 2, stride / 2);
}
} else {
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
outputFrameBuffer, outputFileWidth, outputFileHeight);
videoOutFile.write(
outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
}
} catch (IOException e) {
Logging.e(TAG, "Failed to write to file for video out");
throw new RuntimeException(e);
} finally {
VideoRenderer.renderFrameDone(frame);
}
}
public void release() {
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
renderThreadHandler.post(new Runnable() {
@Override
public void run() {
try {
videoOutFile.close();
} catch (IOException e) {
Logging.d(TAG, "Error closing output video file");
}
cleanupBarrier.countDown();
}
});
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
renderThread.quit();
}
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
int dstWidth, int dstHeight);
}