Fix native api in preparation for native_api example.

Add native api conversions for video frames and video renderer. This
also requires some changes to sdk/BUILD to avoid cyclic dependencies.

Bug: webrtc:8832
Change-Id: Ibf21e63bdcae195dcb61d63f9262e6a8dc4fa790
Reviewed-on: https://webrtc-review.googlesource.com/57142
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22340}
This commit is contained in:
Anders Carlsson
2018-03-07 10:32:03 +01:00
committed by Commit Bot
parent a2d89fc9f5
commit 9823ee47d3
26 changed files with 393 additions and 125 deletions

View File

@ -38,8 +38,6 @@
#include "sdk/objc/Framework/Native/src/objc_video_encoder_factory.h"
#endif
#include "Video/objcvideotracksource.h"
#include "api/videosourceproxy.h"
// Adding the nogncheck to disable the including header check.
// The no-media version PeerConnectionFactory doesn't depend on media related
// C++ target.
@ -232,12 +230,8 @@
}
- (RTCVideoSource *)videoSource {
rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>());
return [[RTCVideoSource alloc]
initWithNativeVideoSource:webrtc::VideoTrackSourceProxy::Create(_signalingThread.get(),
_workerThread.get(),
objcVideoTrackSource)];
return [[RTCVideoSource alloc] initWithSignalingThread:_signalingThread.get()
workerThread:_workerThread.get()];
}
- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source

View File

@ -1,26 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420Buffer+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_frame.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
- (instancetype)initWithNativeVideoFrame:(const webrtc::VideoFrame &)frame;
- (webrtc::VideoFrame)nativeVideoFrame;
@end
NS_ASSUME_NONNULL_END

View File

@ -8,23 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoFrame+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_frame.h"
#include "rtc_base/timeutils.h"
#include "sdk/objc/Framework/Native/api/video_frame_buffer.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
id<RTCVideoFrameBuffer> nativeToRtcFrameBuffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer> &buffer) {
return buffer->type() == webrtc::VideoFrameBuffer::Type::kNative ?
static_cast<webrtc::ObjCFrameBuffer *>(buffer.get())->wrapped_frame_buffer() :
[[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()];
}
@implementation RTCVideoFrame {
RTCVideoRotation _rotation;
int64_t _timeStampNs;
@ -94,24 +81,4 @@ id<RTCVideoFrameBuffer> nativeToRtcFrameBuffer(
return self;
}
- (instancetype)initWithNativeVideoFrame:(const webrtc::VideoFrame &)frame {
if (self = [self initWithBuffer:nativeToRtcFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]) {
self.timeStamp = frame.timestamp();
}
return self;
}
- (webrtc::VideoFrame)nativeVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frameBuffer =
webrtc::ObjCToNativeVideoFrameBuffer(self.buffer);
webrtc::VideoFrame videoFrame(frameBuffer,
(webrtc::VideoRotation)self.rotation,
self.timeStampNs / rtc::kNumNanosecsPerMicrosec);
videoFrame.set_timestamp(self.timeStamp);
return videoFrame;
}
@end

View File

@ -9,13 +9,14 @@
*/
#import "RTCI420Buffer+Private.h"
#import "RTCVideoFrame+Private.h"
#import "RTCVideoRendererAdapter+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include <memory>
#include "sdk/objc/Framework/Native/api/video_frame.h"
namespace webrtc {
class VideoRendererAdapter
@ -27,7 +28,7 @@ class VideoRendererAdapter
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithNativeVideoFrame:nativeVideoFrame];
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)

View File

@ -35,6 +35,9 @@ NS_ASSUME_NONNULL_BEGIN
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
- (instancetype)initWithSignalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread;
@end
NS_ASSUME_NONNULL_END

View File

@ -12,16 +12,16 @@
#include "api/videosourceproxy.h"
#include "rtc_base/checks.h"
#include "sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
static webrtc::ObjcVideoTrackSource *getObjcVideoSource(
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
webrtc::VideoTrackSourceProxy *proxy_source =
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
return static_cast<webrtc::ObjcVideoTrackSource *>(proxy_source->internal());
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
}
// TODO(magjed): Refactor this class and target ObjcVideoTrackSource only once
// TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
// info.
@implementation RTCVideoSource {
@ -45,17 +45,26 @@ static webrtc::ObjcVideoTrackSource *getObjcVideoSource(
return nil;
}
- (instancetype)initWithSignalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
return [self initWithNativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
signalingThread, workerThread, objCVideoTrackSource)];
}
- (NSString *)description {
NSString *stateString = [[self class] stringForState:self.state];
return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
}
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
getObjcVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
}
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
getObjcVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
}
#pragma mark - Private

View File

@ -1,50 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#include "WebRTC/RTCMacros.h"
#include "media/base/adaptedvideotracksource.h"
#include "rtc_base/timestampaligner.h"
RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame);
namespace webrtc {
class ObjcVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
ObjcVideoTrackSource();
// This class can not be used for implementing screen casting. Hopefully, this
// function will be removed before we add that to iOS/Mac.
bool is_screencast() const override { return false; }
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
rtc::Optional<bool> needs_denoising() const override { return false; }
SourceState state() const override { return SourceState::kLive; }
bool remote() const override { return false; }
// Called by RTCVideoSource.
void OnCapturedFrame(RTCVideoFrame* frame);
void OnOutputFormatRequest(int width, int height, int fps);
private:
rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_;
};
} // namespace webrtc
#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_

View File

@ -1,79 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/i420_buffer.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
namespace webrtc {
ObjcVideoTrackSource::ObjcVideoTrackSource() {}
void ObjcVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format);
}
void ObjcVideoTrackSource::OnCapturedFrame(RTCVideoFrame* frame) {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
const int64_t translated_timestamp_us =
timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(frame.width, frame.height, timestamp_us, &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
// Adapted CVPixelBuffer frame.
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x + rtcPixelBuffer.cropX
cropY:crop_y + rtcPixelBuffer.cropY]);
} else {
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}
// Applying rotation is only supported for legacy reasons and performance is
// not critical here.
webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(frame.rotation);
if (apply_rotation() && rotation != kVideoRotation_0) {
buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation);
rotation = kVideoRotation_0;
}
OnFrame(webrtc::VideoFrame(buffer, rotation, translated_timestamp_us));
}
} // namespace webrtc