Fix native api in preparation for native_api example.

Add native api conversions for video frames and video renderer. This
also requires some changes to sdk/BUILD to avoid cyclic dependencies.

Bug: webrtc:8832
Change-Id: Ibf21e63bdcae195dcb61d63f9262e6a8dc4fa790
Reviewed-on: https://webrtc-review.googlesource.com/57142
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22340}
This commit is contained in:
Anders Carlsson
2018-03-07 10:32:03 +01:00
committed by Commit Bot
parent a2d89fc9f5
commit 9823ee47d3
26 changed files with 393 additions and 125 deletions

View File

@ -38,8 +38,6 @@
#include "sdk/objc/Framework/Native/src/objc_video_encoder_factory.h"
#endif
#include "Video/objcvideotracksource.h"
#include "api/videosourceproxy.h"
// Adding the nogncheck to disable the including header check.
// The no-media version PeerConnectionFactory doesn't depend on media related
// C++ target.
@ -232,12 +230,8 @@
}
- (RTCVideoSource *)videoSource {
rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>());
return [[RTCVideoSource alloc]
initWithNativeVideoSource:webrtc::VideoTrackSourceProxy::Create(_signalingThread.get(),
_workerThread.get(),
objcVideoTrackSource)];
return [[RTCVideoSource alloc] initWithSignalingThread:_signalingThread.get()
workerThread:_workerThread.get()];
}
- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source

View File

@ -1,26 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCI420Buffer+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_frame.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
- (instancetype)initWithNativeVideoFrame:(const webrtc::VideoFrame &)frame;
- (webrtc::VideoFrame)nativeVideoFrame;
@end
NS_ASSUME_NONNULL_END

View File

@ -8,23 +8,10 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoFrame+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_frame.h"
#include "rtc_base/timeutils.h"
#include "sdk/objc/Framework/Native/api/video_frame_buffer.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
id<RTCVideoFrameBuffer> nativeToRtcFrameBuffer(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer> &buffer) {
return buffer->type() == webrtc::VideoFrameBuffer::Type::kNative ?
static_cast<webrtc::ObjCFrameBuffer *>(buffer.get())->wrapped_frame_buffer() :
[[RTCI420Buffer alloc] initWithFrameBuffer:buffer->ToI420()];
}
@implementation RTCVideoFrame {
RTCVideoRotation _rotation;
int64_t _timeStampNs;
@ -94,24 +81,4 @@ id<RTCVideoFrameBuffer> nativeToRtcFrameBuffer(
return self;
}
- (instancetype)initWithNativeVideoFrame:(const webrtc::VideoFrame &)frame {
if (self = [self initWithBuffer:nativeToRtcFrameBuffer(frame.video_frame_buffer())
rotation:RTCVideoRotation(frame.rotation())
timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]) {
self.timeStamp = frame.timestamp();
}
return self;
}
- (webrtc::VideoFrame)nativeVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frameBuffer =
webrtc::ObjCToNativeVideoFrameBuffer(self.buffer);
webrtc::VideoFrame videoFrame(frameBuffer,
(webrtc::VideoRotation)self.rotation,
self.timeStampNs / rtc::kNumNanosecsPerMicrosec);
videoFrame.set_timestamp(self.timeStamp);
return videoFrame;
}
@end

View File

@ -9,13 +9,14 @@
*/
#import "RTCI420Buffer+Private.h"
#import "RTCVideoFrame+Private.h"
#import "RTCVideoRendererAdapter+Private.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include <memory>
#include "sdk/objc/Framework/Native/api/video_frame.h"
namespace webrtc {
class VideoRendererAdapter
@ -27,7 +28,7 @@ class VideoRendererAdapter
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc] initWithNativeVideoFrame:nativeVideoFrame];
RTCVideoFrame* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame);
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)

View File

@ -35,6 +35,9 @@ NS_ASSUME_NONNULL_BEGIN
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
- (instancetype)initWithSignalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread;
@end
NS_ASSUME_NONNULL_END

View File

@ -12,16 +12,16 @@
#include "api/videosourceproxy.h"
#include "rtc_base/checks.h"
#include "sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
#include "sdk/objc/Framework/Native/src/objc_video_track_source.h"
static webrtc::ObjcVideoTrackSource *getObjcVideoSource(
static webrtc::ObjCVideoTrackSource *getObjCVideoSource(
const rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> nativeSource) {
webrtc::VideoTrackSourceProxy *proxy_source =
static_cast<webrtc::VideoTrackSourceProxy *>(nativeSource.get());
return static_cast<webrtc::ObjcVideoTrackSource *>(proxy_source->internal());
return static_cast<webrtc::ObjCVideoTrackSource *>(proxy_source->internal());
}
// TODO(magjed): Refactor this class and target ObjcVideoTrackSource only once
// TODO(magjed): Refactor this class and target ObjCVideoTrackSource only once
// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
// info.
@implementation RTCVideoSource {
@ -45,17 +45,26 @@ static webrtc::ObjcVideoTrackSource *getObjcVideoSource(
return nil;
}
- (instancetype)initWithSignalingThread:(rtc::Thread *)signalingThread
workerThread:(rtc::Thread *)workerThread {
rtc::scoped_refptr<webrtc::ObjCVideoTrackSource> objCVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjCVideoTrackSource>());
return [self initWithNativeVideoSource:webrtc::VideoTrackSourceProxy::Create(
signalingThread, workerThread, objCVideoTrackSource)];
}
- (NSString *)description {
NSString *stateString = [[self class] stringForState:self.state];
return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
}
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
getObjcVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
getObjCVideoSource(_nativeVideoSource)->OnCapturedFrame(frame);
}
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
getObjcVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
getObjCVideoSource(_nativeVideoSource)->OnOutputFormatRequest(width, height, fps);
}
#pragma mark - Private