Webrtc二 | 共享桌面
环境
- Ubuntu18.04
- webrtc M84
改造peerconnection例子,用来实现桌面共享
改造类CapturerTrackSource
- 增加桌面的数据源类
RcrtcDesktopCapturerTrackSource
- 继承
webrtc::DesktopCapturer::Callback
, 用于桌面数据捕获与回调 - 继承类
rtc::VideoSourceInterface<webrtc::VideoFrame>
, 用于rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
添加class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame>
的回调
详细代码:
#ifndef __RCRTC_DESKTOP_CAPTURER__
#define __RCRTC_DESKTOP_CAPTURER__
#include <stdio.h>
#include "api/video/i420_buffer.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
#include "media/base/video_broadcaster.h"
#include "media/base/video_common.h"
#include "modules/desktop_capture/desktop_and_cursor_composer.h"
#include "rtc_base/thread.h"
#include "system_wrappers/include/sleep.h"
#include "test/vcm_capturer.h"
#include "rtc_base/platform_thread.h"
typedef void (*desktop_capture_frame_callback)(int width, int height, int y_stride, int u_stride, int v_stride, const uint8_t *y, const uint8_t *u, const uint8_t *v, void *context);
class RcrtcDesktopCapturerTrackSource : public webrtc::DesktopCapturer::Callback,
public rtc::VideoSourceInterface<webrtc::VideoFrame>
{
public:
RcrtcDesktopCapturerTrackSource(/*const std::map<std::string, std::string>& opts*/) {
Start(nullptr);
}
inline ~RcrtcDesktopCapturerTrackSource() override {}
static RcrtcDesktopCapturerTrackSource *Create()
{
std::unique_ptr<RcrtcDesktopCapturerTrackSource> desk(new RcrtcDesktopCapturerTrackSource);
return desk.release();
}
// overide webrtc::DesktopCapturer::Callback
void OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr<webrtc::DesktopFrame> frame) override
{
if (result == webrtc::DesktopCapturer::Result::SUCCESS)
{
int width = frame->size().width();
int height = frame->size().height();
rtc::scoped_refptr<webrtc::I420Buffer> I420buffer =
webrtc::I420Buffer::Create(width, height);
int stride=width;
uint8_t* yplane=I420buffer->MutableDataY();
uint8_t* uplane=I420buffer->MutableDataU();
uint8_t* vplane=I420buffer->MutableDataV();
const int conversionResult = libyuv::ConvertToI420( frame->data(), 0,
yplane,stride,
uplane,(stride+1)/2,
vplane,(stride+1)/2,
0, 0,
width,height,
width,height,
// I420buffer->width(), I420buffer->height(),
// I420buffer->width(), I420buffer->height(),
libyuv::kRotate0, ::libyuv::FOURCC_ARGB);
if (conversionResult >= 0)
{
// webrtc::VideoFrame videoFrame(I420buffer,
// webrtc::VideoRotation::kVideoRotation_0,
// rtc::TimeMicros());
// rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
// videoFrame.video_frame_buffer()->ToI420());
// _frameCallback(buffer->width(), buffer->height(), buffer->StrideY(),
// buffer->StrideU(), buffer->StrideV(), buffer->DataY(),
// buffer->DataU(), buffer->DataV(), this->_userContext);
webrtc::VideoFrame videoFrame=webrtc::VideoFrame(I420buffer,0,0,webrtc::kVideoRotation_0);
_sink->OnFrame(videoFrame);
}
}
}
void setExccludeWindow(webrtc::DesktopCapturer::Source windowId)
{
_excludeWindowList.push_back(windowId);
}
static void CaptureDeskThread(void *obj)
{
RcrtcDesktopCapturerTrackSource *capture = static_cast<RcrtcDesktopCapturerTrackSource *>(obj);
capture->CaptureProcess();
}
void CaptureProcess()
{
webrtc::DesktopCaptureOptions opts =
webrtc::DesktopCaptureOptions::CreateDefault();
// opts.set_allow_use_magnification_api(true); //设置过滤窗口选项
// 使用 DesktopAndCursorComposer 可以采集鼠标
std::unique_ptr<webrtc::DesktopCapturer> capturer =
std::unique_ptr<webrtc::DesktopCapturer>(
new webrtc::DesktopAndCursorComposer(
webrtc::DesktopCapturer::CreateScreenCapturer(opts), opts));
// 设置开始采集状态
capturer->Start(this);
// 设置要过滤的窗口
for (auto source : _excludeWindowList)
{
capturer->SetExcludedWindow(source.id);
}
while (_isrunning)
{
webrtc::SleepMs(_msPerFrame);
// 采集桌面图像
capturer->CaptureFrame();
}
}
bool Start(desktop_capture_frame_callback back)
{
printf("start desk capturer...\n");
if (!_capture_thread)
{
_frameCallback = back;
_isrunning = true;
_capture_thread.reset(new rtc::PlatformThread(RcrtcDesktopCapturerTrackSource::CaptureDeskThread, this,
"CaptureThread", rtc::ThreadPriority::kHighPriority));
_capture_thread->Start();
}
// CaptureThread();
// _capture_thread = rtc::Thread::Create();
// _capture_thread->Start();
// _capture_thread->PostTask(RTC_FROM_HERE, [&]
// { CaptureThread(); });
}
void Stop()
{
if (_isrunning)
{
_isrunning = false;
_capture_thread->Stop();
_capture_thread.reset();
}
}
void AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame> *sink,
const rtc::VideoSinkWants &wants)
{
// Start(nullptr);
_sink = sink;
}
void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame> *sink)
{
if (sink == _sink)
{
_sink = nullptr;
}
Stop();
}
public:
int _msPerFrame = 100; // 100毫秒采集一次,每秒钟采集10帧
webrtc::DesktopCapturer::SourceList _excludeWindowList; //需要过滤的窗口列表
desktop_capture_frame_callback _frameCallback = nullptr; //视频输出回调
void *_userContext = nullptr;
private:
std::unique_ptr<rtc::PlatformThread> _capture_thread;
bool _isrunning = false;
rtc::VideoSinkInterface<webrtc::VideoFrame> *_sink;
};
#endif
peerconnection.cc
class CapturerTrackSource : public webrtc::VideoTrackSource {
public:
static rtc::scoped_refptr<CapturerTrackSource> Create() {
const size_t kWidth = 640;
const size_t kHeight = 480;
const size_t kFps = 30;
std::unique_ptr<webrtc::test::VcmCapturer> capturer;
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(
webrtc::VideoCaptureFactory::CreateDeviceInfo());
if (!info) {
return nullptr;
}
int num_devices = info->NumberOfDevices();
for (int i = 0; i < num_devices; ++i) {
capturer = absl::WrapUnique(
webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i));
if (capturer) {
return new
rtc::RefCountedObject<CapturerTrackSource>(std::move(capturer));
}
}
return nullptr;
}
static rtc::scoped_refptr<CapturerTrackSource> CreateDesk()
{
std::unique_ptr<RcrtcDesktopCapturerTrackSource> desk(RcrtcDesktopCapturerTrackSource::Create());
if (desk) {
printf("CreateDesk success\n");
return new rtc::RefCountedObject<CapturerTrackSource>(std::move(desk));
}
printf("CreateDesk failed...\n");
return nullptr;
}
protected:
explicit CapturerTrackSource(
std::unique_ptr<webrtc::test::VcmCapturer> capturer)
: VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {}
explicit CapturerTrackSource(
std::unique_ptr<RcrtcDesktopCapturerTrackSource> desk)
: VideoTrackSource(/*remote=*/false), desk_(std::move(desk)) {}
private:
rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
return desk_.get();
// return capturer_.get();
}
std::unique_ptr<webrtc::test::VcmCapturer> capturer_;
std::unique_ptr<RcrtcDesktopCapturerTrackSource> desk_;
};
参考: https://blog.csdn.net/weixin_29405665/article/details/107320004
--完--
- 原文作者: 留白
- 原文链接: https://zfunnily.github.io/2021/11/webrtcdesk/
- 更新时间:2024-04-16 01:01:05
- 本文声明:转载请标记原文作者及链接