PipeWire capturer: make use of ScreenCaptureFrameQueue
This allows us to keep always some frame around so we can return it everytime consumer asks us to capture a frame as before we either returned current frame or nothing as there was no new frame available. This will be needed in order to support mouse cursor separately as DesktopAndCursorComposer requires frame everytime, even if it's the same one as before so we can combine it with the mouse cursor. Bug: webrtc:13429 Change-Id: Ice87968846870c0a880ab469d9e052b4978e658c Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/239362 Reviewed-by: Mark Foltz <mfoltz@chromium.org> Commit-Queue: Mark Foltz <mfoltz@chromium.org> Cr-Commit-Position: refs/heads/main@{#35956}
This commit is contained in:

committed by
WebRTC LUCI CQ

parent
409ac89b43
commit
49e0e77e40
@ -59,7 +59,7 @@ void BaseCapturerPipeWire::CaptureFrame() {
|
||||
return;
|
||||
}
|
||||
|
||||
std::unique_ptr<BasicDesktopFrame> frame =
|
||||
std::unique_ptr<DesktopFrame> frame =
|
||||
options_.screencast_stream()->CaptureFrame();
|
||||
|
||||
if (!frame || !frame->data()) {
|
||||
|
@ -23,7 +23,9 @@
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "absl/memory/memory.h"
|
||||
#include "modules/desktop_capture/linux/wayland/egl_dmabuf.h"
|
||||
#include "modules/desktop_capture/screen_capture_frame_queue.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "rtc_base/sanitizer.h"
|
||||
@ -172,7 +174,7 @@ class SharedScreenCastStreamPrivate {
|
||||
|
||||
bool StartScreenCastStream(uint32_t stream_node_id, int fd);
|
||||
void StopScreenCastStream();
|
||||
std::unique_ptr<BasicDesktopFrame> CaptureFrame();
|
||||
std::unique_ptr<DesktopFrame> CaptureFrame();
|
||||
|
||||
private:
|
||||
uint32_t pw_stream_node_id_ = 0;
|
||||
@ -181,8 +183,9 @@ class SharedScreenCastStreamPrivate {
|
||||
DesktopSize desktop_size_ = {};
|
||||
DesktopSize video_size_;
|
||||
|
||||
webrtc::Mutex current_frame_lock_;
|
||||
std::unique_ptr<BasicDesktopFrame> current_frame_;
|
||||
webrtc::Mutex queue_lock_;
|
||||
ScreenCaptureFrameQueue<SharedDesktopFrame> queue_
|
||||
RTC_GUARDED_BY(&queue_lock_);
|
||||
|
||||
int64_t modifier_;
|
||||
std::unique_ptr<EglDmaBuf> egl_dmabuf_;
|
||||
@ -541,15 +544,15 @@ void SharedScreenCastStreamPrivate::StopScreenCastStream() {
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<BasicDesktopFrame>
|
||||
SharedScreenCastStreamPrivate::CaptureFrame() {
|
||||
webrtc::MutexLock lock(¤t_frame_lock_);
|
||||
std::unique_ptr<DesktopFrame> SharedScreenCastStreamPrivate::CaptureFrame() {
|
||||
webrtc::MutexLock lock(&queue_lock_);
|
||||
|
||||
if (!current_frame_ || !current_frame_->data()) {
|
||||
return nullptr;
|
||||
if (!queue_.current_frame()) {
|
||||
return std::unique_ptr<DesktopFrame>{};
|
||||
}
|
||||
|
||||
return std::move(current_frame_);
|
||||
std::unique_ptr<SharedDesktopFrame> frame = queue_.current_frame()->Share();
|
||||
return std::move(frame);
|
||||
}
|
||||
|
||||
void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) {
|
||||
@ -653,25 +656,41 @@ void SharedScreenCastStreamPrivate::ProcessBuffer(pw_buffer* buffer) {
|
||||
? video_metadata->region.position.x
|
||||
: 0;
|
||||
|
||||
webrtc::MutexLock lock(¤t_frame_lock_);
|
||||
|
||||
uint8_t* updated_src = src + (spa_buffer->datas[0].chunk->stride * y_offset) +
|
||||
(kBytesPerPixel * x_offset);
|
||||
current_frame_ = std::make_unique<BasicDesktopFrame>(
|
||||
DesktopSize(video_size_.width(), video_size_.height()));
|
||||
current_frame_->CopyPixelsFrom(
|
||||
|
||||
webrtc::MutexLock lock(&queue_lock_);
|
||||
|
||||
// Move to the next frame if the current one is being used and shared
|
||||
if (queue_.current_frame() && queue_.current_frame()->IsShared()) {
|
||||
queue_.MoveToNextFrame();
|
||||
if (queue_.current_frame() && queue_.current_frame()->IsShared()) {
|
||||
RTC_LOG(LS_WARNING)
|
||||
<< "Failed to process PipeWire buffer: no available frame";
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!queue_.current_frame() ||
|
||||
!queue_.current_frame()->size().equals(video_size_)) {
|
||||
std::unique_ptr<DesktopFrame> frame(new BasicDesktopFrame(
|
||||
DesktopSize(video_size_.width(), video_size_.height())));
|
||||
queue_.ReplaceCurrentFrame(SharedDesktopFrame::Wrap(std::move(frame)));
|
||||
}
|
||||
|
||||
queue_.current_frame()->CopyPixelsFrom(
|
||||
updated_src,
|
||||
(spa_buffer->datas[0].chunk->stride - (kBytesPerPixel * x_offset)),
|
||||
DesktopRect::MakeWH(video_size_.width(), video_size_.height()));
|
||||
|
||||
if (spa_video_format_.format == SPA_VIDEO_FORMAT_RGBx ||
|
||||
spa_video_format_.format == SPA_VIDEO_FORMAT_RGBA) {
|
||||
uint8_t* tmp_src = current_frame_->data();
|
||||
uint8_t* tmp_src = queue_.current_frame()->data();
|
||||
for (int i = 0; i < video_size_.height(); ++i) {
|
||||
// If both sides decided to go with the RGBx format we need to convert it
|
||||
// to BGRx to match color format expected by WebRTC.
|
||||
ConvertRGBxToBGRx(tmp_src, current_frame_->stride());
|
||||
tmp_src += current_frame_->stride();
|
||||
ConvertRGBxToBGRx(tmp_src, queue_.current_frame()->stride());
|
||||
tmp_src += queue_.current_frame()->stride();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -706,7 +725,7 @@ void SharedScreenCastStream::StopScreenCastStream() {
|
||||
private_->StopScreenCastStream();
|
||||
}
|
||||
|
||||
std::unique_ptr<BasicDesktopFrame> SharedScreenCastStream::CaptureFrame() {
|
||||
std::unique_ptr<DesktopFrame> SharedScreenCastStream::CaptureFrame() {
|
||||
return private_->CaptureFrame();
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ class RTC_EXPORT SharedScreenCastStream
|
||||
|
||||
bool StartScreenCastStream(uint32_t stream_node_id, int fd);
|
||||
void StopScreenCastStream();
|
||||
std::unique_ptr<BasicDesktopFrame> CaptureFrame();
|
||||
std::unique_ptr<DesktopFrame> CaptureFrame();
|
||||
|
||||
~SharedScreenCastStream();
|
||||
|
||||
|
Reference in New Issue
Block a user