Fix native api in preparation for native_api example.

Add native api conversions for video frames and video renderer. This
also requires some changes to sdk/BUILD to avoid cyclic dependencies.

Bug: webrtc:8832
Change-Id: Ibf21e63bdcae195dcb61d63f9262e6a8dc4fa790
Reviewed-on: https://webrtc-review.googlesource.com/57142
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22340}
This commit is contained in:
Anders Carlsson
2018-03-07 10:32:03 +01:00
committed by Commit Bot
parent a2d89fc9f5
commit 9823ee47d3
26 changed files with 393 additions and 125 deletions

View File

@ -1,50 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#define SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_
#include "WebRTC/RTCMacros.h"
#include "media/base/adaptedvideotracksource.h"
#include "rtc_base/timestampaligner.h"
RTC_FWD_DECL_OBJC_CLASS(RTCVideoFrame);
namespace webrtc {
class ObjcVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
ObjcVideoTrackSource();
// This class can not be used for implementing screen casting. Hopefully, this
// function will be removed before we add that to iOS/Mac.
bool is_screencast() const override { return false; }
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
rtc::Optional<bool> needs_denoising() const override { return false; }
SourceState state() const override { return SourceState::kLive; }
bool remote() const override { return false; }
// Called by RTCVideoSource.
void OnCapturedFrame(RTCVideoFrame* frame);
void OnOutputFormatRequest(int width, int height, int fps);
private:
rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_;
};
} // namespace webrtc
#endif // SDK_OBJC_FRAMEWORK_CLASSES_VIDEO_OBJCVIDEOTRACKSOURCE_H_

View File

@ -1,79 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/i420_buffer.h"
#include "sdk/objc/Framework/Native/src/objc_frame_buffer.h"
namespace webrtc {
ObjcVideoTrackSource::ObjcVideoTrackSource() {}
void ObjcVideoTrackSource::OnOutputFormatRequest(int width, int height, int fps) {
cricket::VideoFormat format(width, height, cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format);
}
void ObjcVideoTrackSource::OnCapturedFrame(RTCVideoFrame* frame) {
const int64_t timestamp_us = frame.timeStampNs / rtc::kNumNanosecsPerMicrosec;
const int64_t translated_timestamp_us =
timestamp_aligner_.TranslateTimestamp(timestamp_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(frame.width, frame.height, timestamp_us, &adapted_width, &adapted_height,
&crop_width, &crop_height, &crop_x, &crop_y)) {
return;
}
rtc::scoped_refptr<VideoFrameBuffer> buffer;
if (adapted_width == frame.width && adapted_height == frame.height) {
// No adaption - optimized path.
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
} else if ([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
// Adapted CVPixelBuffer frame.
RTCCVPixelBuffer *rtcPixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>([[RTCCVPixelBuffer alloc]
initWithPixelBuffer:rtcPixelBuffer.pixelBuffer
adaptedWidth:adapted_width
adaptedHeight:adapted_height
cropWidth:crop_width
cropHeight:crop_height
cropX:crop_x + rtcPixelBuffer.cropX
cropY:crop_y + rtcPixelBuffer.cropY]);
} else {
// Adapted I420 frame.
// TODO(magjed): Optimize this I420 path.
rtc::scoped_refptr<I420Buffer> i420_buffer = I420Buffer::Create(adapted_width, adapted_height);
buffer = new rtc::RefCountedObject<ObjCFrameBuffer>(frame.buffer);
i420_buffer->CropAndScaleFrom(*buffer->ToI420(), crop_x, crop_y, crop_width, crop_height);
buffer = i420_buffer;
}
// Applying rotation is only supported for legacy reasons and performance is
// not critical here.
webrtc::VideoRotation rotation = static_cast<webrtc::VideoRotation>(frame.rotation);
if (apply_rotation() && rotation != kVideoRotation_0) {
buffer = I420Buffer::Rotate(*buffer->ToI420(), rotation);
rotation = kVideoRotation_0;
}
OnFrame(webrtc::VideoFrame(buffer, rotation, translated_timestamp_us));
}
} // namespace webrtc