AVFoundation Video Capturer: Remove thread jump when delivering frames

WebRTC no longer has any restriction on what thread frames should be
delivered on. One possible problem with this CL is that NV21->I420
conversion and scaling is done on the thread that delivers frames, which
might cause fps regressions.

R=nisse@webrtc.org, perkj@webrtc.org, tkchin@webrtc.org

Review URL: https://codereview.webrtc.org/2137503003 .

Cr-Commit-Position: refs/heads/master@{#14021}
This commit is contained in:
Magnus Jedvert
2016-09-01 15:15:00 +02:00
parent 671d8008be
commit 0bade0df3b
4 changed files with 6 additions and 68 deletions

View File

@ -13,16 +13,12 @@
namespace rtc {
TimestampAligner::TimestampAligner() : frames_seen_(0), offset_us_(0) {
thread_checker_.DetachFromThread();
}
TimestampAligner::TimestampAligner() : frames_seen_(0), offset_us_(0) {}
TimestampAligner::~TimestampAligner() {}
int64_t TimestampAligner::UpdateOffset(int64_t camera_time_us,
int64_t system_time_us) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// Estimate the offset between system monotonic time and the capture
// time from the camera. The camera is assumed to provide more
// accurate timestamps than we get from the system time. But the
@ -91,8 +87,6 @@ int64_t TimestampAligner::UpdateOffset(int64_t camera_time_us,
int64_t TimestampAligner::ClipTimestamp(int64_t time_us,
int64_t system_time_us) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
// Make timestamps monotonic.
if (!prev_translated_time_us_) {
// Initialize.

View File

@ -14,10 +14,11 @@
#include "webrtc/base/basictypes.h"
#include "webrtc/base/constructormagic.h"
#include "webrtc/base/optional.h"
#include "webrtc/base/thread_checker.h"
namespace rtc {
// This class is not thread safe, so all calls to it must be synchronized
// externally.
class TimestampAligner {
public:
TimestampAligner();
@ -30,8 +31,6 @@ class TimestampAligner {
int64_t ClipTimestamp(int64_t filtered_time_us, int64_t system_time_us);
private:
rtc::ThreadChecker thread_checker_;
// State for the timestamp translation.
int frames_seen_;
// Estimated offset between camera time and system monotonic time.

View File

@ -25,8 +25,7 @@ class Thread;
namespace webrtc {
class AVFoundationVideoCapturer : public cricket::VideoCapturer,
public rtc::MessageHandler {
class AVFoundationVideoCapturer : public cricket::VideoCapturer {
public:
AVFoundationVideoCapturer();
~AVFoundationVideoCapturer();
@ -59,16 +58,8 @@ class AVFoundationVideoCapturer : public cricket::VideoCapturer,
void CaptureSampleBuffer(CMSampleBufferRef sample_buffer,
webrtc::VideoRotation rotation);
// Handles messages from posts.
void OnMessage(rtc::Message *msg) override;
private:
void OnFrameMessage(CVImageBufferRef image_buffer,
webrtc::VideoRotation rotation,
int64_t capture_time_ns);
RTCAVFoundationVideoCapturerInternal *_capturer;
rtc::Thread *_startThread; // Set in Start(), unset in Stop().
webrtc::I420BufferPool _buffer_pool;
}; // AVFoundationVideoCapturer

View File

@ -596,18 +596,7 @@ enum AVFoundationVideoCapturerMessageType : uint32_t {
kMessageTypeFrame,
};
struct AVFoundationFrame {
AVFoundationFrame(CVImageBufferRef buffer,
webrtc::VideoRotation rotation,
int64_t time)
: image_buffer(buffer), rotation(rotation), capture_time(time) {}
CVImageBufferRef image_buffer;
webrtc::VideoRotation rotation;
int64_t capture_time;
};
AVFoundationVideoCapturer::AVFoundationVideoCapturer()
: _capturer(nil), _startThread(nullptr) {
AVFoundationVideoCapturer::AVFoundationVideoCapturer() : _capturer(nil) {
// Set our supported formats. This matches kAvailablePresets.
_capturer =
[[RTCAVFoundationVideoCapturerInternal alloc] initWithCapturer:this];
@ -663,11 +652,6 @@ cricket::CaptureState AVFoundationVideoCapturer::Start(
_capturer.captureSession.sessionPreset = desiredPreset;
[_capturer.captureSession commitConfiguration];
// Keep track of which thread capture started on. This is the thread that
// frames need to be sent to.
RTC_DCHECK(!_startThread);
_startThread = rtc::Thread::Current();
SetCaptureFormat(&format);
// This isn't super accurate because it takes a while for the AVCaptureSession
// to spin up, and this call returns async.
@ -686,7 +670,6 @@ cricket::CaptureState AVFoundationVideoCapturer::Start(
void AVFoundationVideoCapturer::Stop() {
[_capturer stop];
SetCaptureFormat(NULL);
_startThread = nullptr;
}
bool AVFoundationVideoCapturer::IsRunning() {
@ -722,32 +705,6 @@ void AVFoundationVideoCapturer::CaptureSampleBuffer(
return;
}
// Retain the buffer and post it to the webrtc thread. It will be released
// after it has successfully been signaled.
CVBufferRetain(image_buffer);
AVFoundationFrame frame(image_buffer, rotation, rtc::TimeNanos());
_startThread->Post(RTC_FROM_HERE, this, kMessageTypeFrame,
new rtc::TypedMessageData<AVFoundationFrame>(frame));
}
void AVFoundationVideoCapturer::OnMessage(rtc::Message *msg) {
switch (msg->message_id) {
case kMessageTypeFrame: {
rtc::TypedMessageData<AVFoundationFrame>* data =
static_cast<rtc::TypedMessageData<AVFoundationFrame>*>(msg->pdata);
const AVFoundationFrame& frame = data->data();
OnFrameMessage(frame.image_buffer, frame.rotation, frame.capture_time);
delete data;
break;
}
}
}
void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
webrtc::VideoRotation rotation,
int64_t capture_time_ns) {
RTC_DCHECK(_startThread->IsCurrent());
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(image_buffer);
@ -763,11 +720,10 @@ void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
int64_t translated_camera_time_us;
if (!AdaptFrame(captured_width, captured_height,
capture_time_ns / rtc::kNumNanosecsPerMicrosec,
rtc::TimeNanos() / rtc::kNumNanosecsPerMicrosec,
rtc::TimeMicros(), &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y,
&translated_camera_time_us)) {
CVBufferRelease(image_buffer);
return;
}
@ -801,8 +757,6 @@ void AVFoundationVideoCapturer::OnFrameMessage(CVImageBufferRef image_buffer,
OnFrame(cricket::WebRtcVideoFrame(buffer, rotation,
translated_camera_time_us, 0),
captured_width, captured_height);
CVBufferRelease(image_buffer);
}
} // namespace webrtc