Android: Support externally aligned timestamps

This support is needed if there is a big delay between the creation of
frames and the time they are delivered to the WebRTC C++ layer in
AndroidVideoTrackSource. This is the case if e.g. some heavy video
processing is applied to the frames that takes a couple of hundred
milliseconds. Currently, timestamps coming from Android video sources
are aligned to rtc::TimeMicros() once they reach the WebRTC C++ layer in
AndroidVideoTrackSource. At this point, we "forget" any latency that
might occur before this point, and audio/video sync consequently
suffers.

Bug: webrtc:9991
Change-Id: I7b1aaca9a60a978b9195dd5e5eed4779a0055607
Reviewed-on: https://webrtc-review.googlesource.com/c/110783
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25654}
This commit is contained in:
Magnus Jedvert
2018-11-15 12:07:32 +01:00
committed by Commit Bot
parent 2277ac6718
commit 9514071500
18 changed files with 298 additions and 45 deletions

View File

@ -100,7 +100,8 @@ void AndroidCallClient::Call(JNIEnv* env,
remote_sink_ = webrtc::JavaToNativeVideoSink(env, remote_sink.obj());
video_source_ = webrtc::CreateJavaVideoSource(env, signaling_thread_.get(),
false /* is_screencast */);
/* is_screencast= */ false,
/* align_timestamps= */ true);
CreatePeerConnection();
Connect();

View File

@ -448,7 +448,8 @@ void SimplePeerConnection::AddStreams(bool audio_only) {
rtc::scoped_refptr<webrtc::jni::AndroidVideoTrackSource> source(
new rtc::RefCountedObject<webrtc::jni::AndroidVideoTrackSource>(
g_signaling_thread.get(), env, false));
g_signaling_thread.get(), env, /* is_screencast= */ false,
/* align_timestamps= */ true));
rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source =
webrtc::VideoTrackSourceProxy::Create(g_signaling_thread.get(),
g_worker_thread.get(), source);

View File

@ -254,6 +254,7 @@ if (is_android) {
"api/org/webrtc/VideoFrameDrawer.java",
"api/org/webrtc/YuvConverter.java",
"api/org/webrtc/YuvHelper.java",
"api/org/webrtc/TimestampAligner.java",
"src/java/org/webrtc/EglBase10.java",
"src/java/org/webrtc/EglBase14.java",
"src/java/org/webrtc/GlGenericDrawer.java",
@ -628,6 +629,7 @@ if (is_android) {
"src/jni/nv12buffer.cc",
"src/jni/nv21buffer.cc",
"src/jni/pc/video.cc",
"src/jni/timestampaligner.cc",
"src/jni/videocodecinfo.cc",
"src/jni/videocodecinfo.h",
"src/jni/videocodecstatus.cc",
@ -1209,6 +1211,7 @@ if (is_android) {
"api/org/webrtc/JavaI420Buffer.java",
"api/org/webrtc/MediaCodecVideoDecoder.java",
"api/org/webrtc/MediaCodecVideoEncoder.java",
"api/org/webrtc/TimestampAligner.java",
"api/org/webrtc/VideoCodecInfo.java",
"api/org/webrtc/VideoCodecStatus.java",
"api/org/webrtc/VideoDecoder.java",
@ -1384,6 +1387,7 @@ if (is_android) {
"instrumentationtests/src/org/webrtc/TestConstants.java",
"instrumentationtests/src/org/webrtc/VideoFileRendererTest.java",
"instrumentationtests/src/org/webrtc/VideoFrameBufferTest.java",
"instrumentationtests/src/org/webrtc/TimestampAlignerTest.java",
"instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java",
"instrumentationtests/src/org/webrtc/YuvHelperTest.java",
]

View File

@ -440,9 +440,25 @@ public class PeerConnectionFactory {
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
}
public VideoSource createVideoSource(boolean isScreencast) {
/**
* Create video source with given parameters. If alignTimestamps is false, the caller is
* responsible for aligning the frame timestamps to rtc::TimeNanos(). This can be used to achieve
* higher accuracy if there is a big delay between frame creation and frames being delivered to
* the returned video source. If alignTimestamps is true, timestamps will be aligned to
* rtc::TimeNanos() when they arrive to the returned video source.
*/
public VideoSource createVideoSource(boolean isScreencast, boolean alignTimestamps) {
checkPeerConnectionFactoryExists();
return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast));
return new VideoSource(nativeCreateVideoSource(nativeFactory, isScreencast, alignTimestamps));
}
/**
* Same as above with alignTimestamps set to true.
*
* @see #createVideoSource(boolean, boolean)
*/
public VideoSource createVideoSource(boolean isScreencast) {
return createVideoSource(isScreencast, /* alignTimestamps= */ true);
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
@ -567,7 +583,8 @@ public class PeerConnectionFactory {
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver,
SSLCertificateVerifier sslCertificateVerifier);
private static native long nativeCreateLocalMediaStream(long factory, String label);
private static native long nativeCreateVideoSource(long factory, boolean is_screencast);
private static native long nativeCreateVideoSource(
long factory, boolean is_screencast, boolean alignTimestamps);
private static native long nativeCreateVideoTrack(
long factory, String id, long nativeVideoSource);
private static native long nativeCreateAudioSource(long factory, MediaConstraints constraints);

View File

@ -36,10 +36,14 @@ public class SurfaceTextureHelper {
/**
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
* initialize a pixel buffer surface and make it current.
* initialize a pixel buffer surface and make it current. If alignTimestamps is true, the frame
* timestamps will be aligned to rtc::TimeNanos(). If frame timestamps are aligned to
* rtc::TimeNanos() there is no need for aligning timestamps again in
* PeerConnectionFactory.createVideoSource(). This makes the timestamps more accurate and
* closer to actual creation time.
*/
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
final String threadName, final EglBase.Context sharedContext, boolean alignTimestamps) {
final HandlerThread thread = new HandlerThread(threadName);
thread.start();
final Handler handler = new Handler(thread.getLooper());
@ -53,7 +57,7 @@ public class SurfaceTextureHelper {
@Override
public SurfaceTextureHelper call() {
try {
return new SurfaceTextureHelper(sharedContext, handler);
return new SurfaceTextureHelper(sharedContext, handler, alignTimestamps);
} catch (RuntimeException e) {
Logging.e(TAG, threadName + " create failure", e);
return null;
@ -62,11 +66,22 @@ public class SurfaceTextureHelper {
});
}
/**
* Same as above with alignTimestamps set to false.
*
* @see #create(String, EglBase.Context, boolean)
*/
public static SurfaceTextureHelper create(
final String threadName, final EglBase.Context sharedContext) {
return create(threadName, sharedContext, /* alignTimestamps= */ false);
}
private final Handler handler;
private final EglBase eglBase;
private final SurfaceTexture surfaceTexture;
private final int oesTextureId;
private final YuvConverter yuvConverter = new YuvConverter();
@Nullable private final TimestampAligner timestampAligner;
// These variables are only accessed from the |handler| thread.
@Nullable private VideoSink listener;
@ -95,11 +110,13 @@ public class SurfaceTextureHelper {
}
};
private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
private SurfaceTextureHelper(
EglBase.Context sharedContext, Handler handler, boolean alignTimestamps) {
if (handler.getLooper().getThread() != Thread.currentThread()) {
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
}
this.handler = handler;
this.timestampAligner = alignTimestamps ? new TimestampAligner() : null;
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
try {
@ -264,7 +281,10 @@ public class SurfaceTextureHelper {
final float[] transformMatrix = new float[16];
surfaceTexture.getTransformMatrix(transformMatrix);
final long timestampNs = surfaceTexture.getTimestamp();
long timestampNs = surfaceTexture.getTimestamp();
if (timestampAligner != null) {
timestampNs = timestampAligner.translateTimestamp(timestampNs);
}
if (textureWidth == 0 || textureHeight == 0) {
throw new RuntimeException("Texture size has not been set.");
}
@ -289,5 +309,8 @@ public class SurfaceTextureHelper {
surfaceTexture.release();
eglBase.release();
handler.getLooper().quit();
if (timestampAligner != null) {
timestampAligner.dispose();
}
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* The TimestampAligner class helps translating camera timestamps into the same timescale as is
* used by rtc::TimeNanos(). Some cameras have built in timestamping which is more accurate than
* reading the system clock, but using a different epoch and unknown clock drift. Frame timestamps
* in webrtc should use rtc::TimeNanos (system monotonic time), and this class provides a filter
* which lets us use the rtc::TimeNanos timescale, and at the same time take advantage of higher
* accuracy of the camera clock. This class is a wrapper on top of rtc::TimestampAligner.
*/
public class TimestampAligner {
/**
* Wrapper around rtc::TimeNanos(). This is normally same as System.nanoTime(), but call this
* function to be safe.
*/
public static long getRtcTimeNanos() {
return nativeRtcTimeNanos();
}
private volatile long nativeTimestampAligner = nativeCreateTimestampAligner();
/**
* Translates camera timestamps to the same timescale as is used by rtc::TimeNanos().
* |cameraTimeNs| is assumed to be accurate, but with an unknown epoch and clock drift. Returns
* the translated timestamp.
*/
public long translateTimestamp(long cameraTimeNs) {
checkNativeAlignerExists();
return nativeTranslateTimestamp(nativeTimestampAligner, cameraTimeNs);
}
/** Dispose native timestamp aligner. */
public void dispose() {
checkNativeAlignerExists();
nativeReleaseTimestampAligner(nativeTimestampAligner);
nativeTimestampAligner = 0;
}
private void checkNativeAlignerExists() {
if (nativeTimestampAligner == 0) {
throw new IllegalStateException("TimestampAligner has been disposed.");
}
}
private static native long nativeRtcTimeNanos();
private static native long nativeCreateTimestampAligner();
private static native void nativeReleaseTimestampAligner(long timestampAligner);
private static native long nativeTranslateTimestamp(long timestampAligner, long cameraTimeNs);
}

View File

@ -0,0 +1,46 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.support.test.filters.SmallTest;
import org.chromium.base.test.params.BaseJUnit4RunnerDelegate;
import org.chromium.base.test.params.ParameterAnnotations.UseRunnerDelegate;
import org.junit.BeforeClass;
import org.junit.Test;
@UseRunnerDelegate(BaseJUnit4RunnerDelegate.class)
public class TimestampAlignerTest {
@BeforeClass
public static void setUp() {
System.loadLibrary(TestConstants.NATIVE_LIBRARY);
}
@Test
@SmallTest
public void testGetRtcTimeNanos() {
TimestampAligner.getRtcTimeNanos();
}
@Test
@SmallTest
public void testDispose() {
final TimestampAligner timestampAligner = new TimestampAligner();
timestampAligner.dispose();
}
@Test
@SmallTest
public void testTranslateTimestamp() {
final TimestampAligner timestampAligner = new TimestampAligner();
timestampAligner.translateTimestamp(/* cameraTimeNs= */ 123);
timestampAligner.dispose();
}
}

View File

@ -25,12 +25,14 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
public:
JavaVideoTrackSourceImpl(JNIEnv* env,
rtc::Thread* signaling_thread,
bool is_screencast)
bool is_screencast,
bool align_timestamps)
: android_video_track_source_(
new rtc::RefCountedObject<jni::AndroidVideoTrackSource>(
signaling_thread,
env,
is_screencast)),
is_screencast,
align_timestamps)),
native_capturer_observer_(jni::CreateJavaNativeCapturerObserver(
env,
android_video_track_source_)) {}
@ -96,9 +98,10 @@ class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
JNIEnv* jni,
rtc::Thread* signaling_thread,
bool is_screencast) {
bool is_screencast,
bool align_timestamps) {
return new rtc::RefCountedObject<JavaVideoTrackSourceImpl>(
jni, signaling_thread, is_screencast);
jni, signaling_thread, is_screencast, align_timestamps);
}
} // namespace webrtc

View File

@ -33,7 +33,8 @@ class JavaVideoTrackSourceInterface : public VideoTrackSourceInterface {
rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
JNIEnv* env,
rtc::Thread* signaling_thread,
bool is_screencast);
bool is_screencast,
bool align_timestamps);
} // namespace webrtc

View File

@ -24,8 +24,9 @@ public class JavaVideoSourceTestHelper {
}
@CalledByNative
public static void deliverFrame(int width, int height, int rotation, CapturerObserver observer) {
public static void deliverFrame(
int width, int height, int rotation, long timestampNs, CapturerObserver observer) {
observer.onFrameCaptured(
new VideoFrame(JavaI420Buffer.allocate(width, height), rotation, 0 /* timestampNs= */));
new VideoFrame(JavaI420Buffer.allocate(width, height), rotation, timestampNs));
}
}

View File

@ -40,9 +40,9 @@ TEST(JavaVideoSourceTest, CreateJavaVideoSource) {
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, true /* align_timestamps */);
ASSERT_NE(nullptr, video_track_source);
EXPECT_NE(nullptr,
@ -57,9 +57,9 @@ TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, true /* align_timestamps */);
video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
jni::Java_JavaVideoSourceTestHelper_startCapture(
@ -68,8 +68,9 @@ TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
const int width = 20;
const int height = 32;
const int rotation = 180;
const int64_t timestamp = 987654321;
jni::Java_JavaVideoSourceTestHelper_deliverFrame(
env, width, height, rotation,
env, width, height, rotation, timestamp,
video_track_source->GetJavaVideoCapturerObserver(env));
std::vector<VideoFrame> frames = test_video_sink.GetFrames();
@ -80,15 +81,49 @@ TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
EXPECT_EQ(rotation, frame.rotation());
}
TEST(JavaVideoSourceTest,
OnFrameCapturedFrameIsDeliveredToSinkWithPreservedTimestamp) {
TestVideoSink test_video_sink;
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, false /* align_timestamps */);
video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
jni::Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
true /* success */);
const int width = 20;
const int height = 32;
const int rotation = 180;
const int64_t timestamp = 987654321;
jni::Java_JavaVideoSourceTestHelper_deliverFrame(
env, width, height, rotation, 987654321,
video_track_source->GetJavaVideoCapturerObserver(env));
std::vector<VideoFrame> frames = test_video_sink.GetFrames();
ASSERT_EQ(1u, frames.size());
webrtc::VideoFrame frame = frames[0];
EXPECT_EQ(width, frame.width());
EXPECT_EQ(height, frame.height());
EXPECT_EQ(rotation, frame.rotation());
EXPECT_EQ(timestamp / 1000, frame.timestamp_us());
}
TEST(JavaVideoSourceTest, CapturerStartedSuccessStateBecomesLive) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, true /* align_timestamps */);
jni::Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
@ -104,9 +139,9 @@ TEST(JavaVideoSourceTest, CapturerStartedFailureStateBecomesEnded) {
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, true /* align_timestamps */);
jni::Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
@ -122,9 +157,9 @@ TEST(JavaVideoSourceTest, CapturerStoppedStateBecomesEnded) {
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
CreateJavaVideoSource(
env, rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */, true /* align_timestamps */);
jni::Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),

View File

@ -25,10 +25,12 @@ const int kRequiredResolutionAlignment = 2;
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
bool is_screencast)
bool is_screencast,
bool align_timestamps)
: AdaptedVideoTrackSource(kRequiredResolutionAlignment),
signaling_thread_(signaling_thread),
is_screencast_(is_screencast) {
is_screencast_(is_screencast),
align_timestamps_(align_timestamps) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
}
@ -75,7 +77,9 @@ void AndroidVideoTrackSource::OnFrameCaptured(
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
align_timestamps_ ? timestamp_aligner_.TranslateTimestamp(
camera_time_us, rtc::TimeMicros())
: camera_time_us;
int adapted_width;
int adapted_height;

View File

@ -29,7 +29,8 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
bool is_screencast = false);
bool is_screencast,
bool align_timestamps);
~AndroidVideoTrackSource() override;
bool is_screencast() const override;
@ -64,8 +65,9 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker camera_thread_checker_;
SourceState state_;
rtc::TimestampAligner timestamp_aligner_;
const bool is_screencast_;
rtc::TimestampAligner timestamp_aligner_;
const bool align_timestamps_;
};
} // namespace jni

View File

@ -31,7 +31,8 @@ VideoDecoderFactory* CreateVideoDecoderFactory(
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jboolean is_screencast) {
jboolean is_screencast,
jboolean align_timestamps) {
return nullptr;
}

View File

@ -473,12 +473,13 @@ static jlong JNI_PeerConnectionFactory_CreateVideoSource(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong native_factory,
jboolean is_screencast) {
jboolean is_screencast,
jboolean align_timestamps) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
return jlongFromPointer(CreateVideoSource(jni, factory->signaling_thread(),
factory->worker_thread(),
is_screencast));
is_screencast, align_timestamps));
}
static jlong JNI_PeerConnectionFactory_CreateVideoTrack(

View File

@ -44,10 +44,11 @@ VideoDecoderFactory* CreateVideoDecoderFactory(
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jboolean is_screencast) {
jboolean is_screencast,
jboolean align_timestamps) {
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(signaling_thread, env,
is_screencast));
new rtc::RefCountedObject<AndroidVideoTrackSource>(
signaling_thread, env, is_screencast, align_timestamps));
return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source)
.release();
}

View File

@ -36,7 +36,8 @@ VideoDecoderFactory* CreateVideoDecoderFactory(
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jboolean is_screencast);
jboolean is_screencast,
jboolean align_timestamps);
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,52 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "rtc_base/timestampaligner.h"
#include "rtc_base/timeutils.h"
#include "sdk/android/generated_video_jni/jni/TimestampAligner_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
static jlong JNI_TimestampAligner_RtcTimeNanos(
JNIEnv* env,
const JavaParamRef<jclass>& j_caller) {
return rtc::TimeNanos();
}
static jlong JNI_TimestampAligner_CreateTimestampAligner(
JNIEnv* env,
const JavaParamRef<jclass>& j_caller) {
return jlongFromPointer(new rtc::TimestampAligner());
}
static void JNI_TimestampAligner_ReleaseTimestampAligner(
JNIEnv* env,
const JavaParamRef<jclass>& j_caller,
jlong timestamp_aligner) {
delete reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner);
}
static jlong JNI_TimestampAligner_TranslateTimestamp(
JNIEnv* env,
const JavaParamRef<jclass>& j_caller,
jlong timestamp_aligner,
jlong camera_time_ns) {
return reinterpret_cast<rtc::TimestampAligner*>(timestamp_aligner)
->TranslateTimestamp(camera_time_ns / rtc::kNumNanosecsPerMicrosec,
rtc::TimeMicros()) *
rtc::kNumNanosecsPerMicrosec;
}
} // namespace jni
} // namespace webrtc