Google Chrome项目中实现了WebRTC并提供了示例,WebRTC提供了视频会议的核心技术,包括音视频的采集、编解码、网络传输、显示等功能,并且还支持跨平台:windows,linux,mac,android。Chrome是开源的,提供了视频会议示例(基于摄像头),也有屏幕截图组件(全屏、窗口),但是没有屏幕分享的例子,搜了百度、谷歌、github都没有。在Google论坛找到了关键的图像转视频流后实现了此模块。
在72版本测试通过,有同事在53版本也可以。
代码如下:
MyCapturer.h
/**
* Windows屏幕录像模块
*/
#include <media/base/videocapturer.h>
#include <media/base/videocommon.h>
#include <modules/desktop_capture/desktop_capturer.h>
#include <modules/desktop_capture/desktop_frame.h>
#include <api/video/i420_buffer.h>
#include <api/scoped_refptr.h>
#include <rtc_base/messagehandler.h>
#include <rtc_base/thread.h>
class MyCapturer : public cricket::VideoCapturer,
public rtc::MessageHandler,
public webrtc::DesktopCapturer::Callback {
public:
MyCapturer();
~MyCapturer();
void CaptureFrame();
virtual cricket::CaptureState Start(
const cricket::VideoFormat& capture_format);
virtual void Stop();
virtual bool IsRunning();
virtual bool IsScreencast() const { return true; }
virtual void OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr<webrtc::DesktopFrame> frame);
virtual void OnMessage(rtc::Message* msg);
protected:
virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs);
private:
std::unique_ptr<webrtc::DesktopCapturer> capturer;
rtc::scoped_refptr<webrtc::I420Buffer> i420_buffer_;
};
MyCapturer.cc
#include "MyCapturer.h"
#include <base/threading/thread.h>
#include <modules/desktop_capture/desktop_capture_options.h>
#include <third_party/libyuv/include/libyuv.h>
MyCapturer::MyCapturer() {
std::vector<cricket::VideoFormat> formats;
formats.push_back(cricket::VideoFormat(
800, 600, cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
SetSupportedFormats(formats);
}
MyCapturer ::~MyCapturer() {}
cricket::CaptureState MyCapturer::Start(
const cricket::VideoFormat& capture_format) {
cricket::VideoFormat supported;
if (GetBestCaptureFormat(capture_format, &supported))
SetCaptureFormat(&supported);
SetCaptureState(cricket::CS_RUNNING);
auto options = webrtc::DesktopCaptureOptions::CreateDefault();
options.set_allow_directx_capturer(true);
capturer = webrtc::DesktopCapturer::CreateScreenCapturer(options);
capturer->Start(this);
CaptureFrame();
return cricket::CS_RUNNING;
}
void MyCapturer::Stop() {
SetCaptureState(cricket::CS_STOPPED);
SetCaptureFormat(NULL);
}
bool MyCapturer::IsRunning() {
return capture_state() == cricket::CS_RUNNING;
}
bool MyCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
fourccs->push_back(cricket::FOURCC_I420);
fourccs->push_back(cricket::FOURCC_MJPG);
return true;
}
void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr<webrtc::DesktopFrame> frame) {
if (result != webrtc::DesktopCapturer::Result::SUCCESS)
return;
int width = frame->size().width();
int height = frame->size().height();
if (!i420_buffer_.get() ||
i420_buffer_->width() * i420_buffer_->height() < width * height) {
i420_buffer_ = webrtc::I420Buffer::Create(width, height);
}
libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(),
i420_buffer_->StrideY(), i420_buffer_->MutableDataU(),
i420_buffer_->StrideU(), i420_buffer_->MutableDataV(),
i420_buffer_->StrideV(), 0, 0, width, height, width,
height, libyuv::kRotate0, libyuv::FOURCC_ARGB);
OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0),
width, height);
}
void MyCapturer::OnMessage(rtc::Message* msg) {
if (msg->message_id == 0)
CaptureFrame();
}
void MyCapturer::CaptureFrame() {
capturer->CaptureFrame();
rtc::Location loc(__FUNCTION__, __FILE__);
rtc::Thread::Current()->PostDelayed(loc, 33, this, 0);
}
将这个类加入peerconnection/client工程后,在Conductor::AddTracks()中调用,将
std::unique_ptr<cricket::VideoCapturer> video_device = OpenVideoCaptureDevice();
修改为
std::unique_ptr<cricket::VideoCapturer> video_device = std::unique_ptr<cricket::VideoCapturer>(new MyCapturer());