Support screen sharing with xdg-desktop-portal

This commit is contained in:
David Elsing 2023-03-11 14:36:51 +01:00
parent d916d6cd63
commit 1ba6a4d78d
9 changed files with 937 additions and 89 deletions

View file

@ -73,6 +73,7 @@ if (APPLE OR WIN32)
endif() endif()
option(VOIP "Whether to enable voip support. Disable this, if you don't have gstreamer." ${VOIP_DEFAULT}) option(VOIP "Whether to enable voip support. Disable this, if you don't have gstreamer." ${VOIP_DEFAULT})
cmake_dependent_option(SCREENSHARE_X11 "Whether to enable screenshare support on X11." ON "VOIP" OFF) cmake_dependent_option(SCREENSHARE_X11 "Whether to enable screenshare support on X11." ON "VOIP" OFF)
cmake_dependent_option(SCREENSHARE_XDP "Whether to enable screenshare support using xdg-desktop-portal." ON "VOIP" OFF)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
@ -414,6 +415,9 @@ set(SRC_FILES
src/voip/CallDevices.h src/voip/CallDevices.h
src/voip/CallManager.cpp src/voip/CallManager.cpp
src/voip/CallManager.h src/voip/CallManager.h
src/voip/ScreenCastPortal.cpp
src/voip/ScreenCastPortal.h
src/voip/WebRTCSession.h
src/voip/WebRTCSession.cpp src/voip/WebRTCSession.cpp
src/voip/WebRTCSession.h src/voip/WebRTCSession.h

View file

@ -103,7 +103,6 @@ Popup {
} }
Button { Button {
visible: CallManager.screenShareSupported
text: qsTr("Screen") text: qsTr("Screen")
icon.source: "qrc:/icons/icons/ui/screen-share.svg" icon.source: "qrc:/icons/icons/ui/screen-share.svg"
onClicked: { onClicked: {

View file

@ -14,8 +14,15 @@ Popup {
anchors.centerIn: parent; anchors.centerIn: parent;
Component.onCompleted: { Component.onCompleted: {
if (CallManager.screenShareX11Available)
CallManager.setScreenShareType(ScreenShareType.X11);
else
CallManager.setScreenShareType(ScreenShareType.XDP);
frameRateCombo.currentIndex = frameRateCombo.find(Settings.screenShareFrameRate); frameRateCombo.currentIndex = frameRateCombo.find(Settings.screenShareFrameRate);
} }
Component.onDestruction: {
CallManager.closeScreenShare();
}
palette: Nheko.colors palette: Nheko.colors
ColumnLayout { ColumnLayout {
@ -29,6 +36,42 @@ Popup {
color: Nheko.colors.windowText color: Nheko.colors.windowText
} }
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Label {
Layout.alignment: Qt.AlignLeft
text: qsTr("Method:")
color: Nheko.colors.windowText
}
RadioButton {
id: screenshare_X11
text: qsTr("X11");
visible: CallManager.screenShareX11Available
checked: CallManager.screenShareX11Available
onToggled: {
if (screenshare_X11.checked)
CallManager.setScreenShareType(ScreenShareType.X11);
else
CallManager.setScreenShareType(ScreenShareType.XDP);
}
}
RadioButton {
id: screenshare_XDP
text: qsTr("xdg-desktop-portal");
checked: !CallManager.screenShareX11Available
onToggled: {
if (screenshare_XDP.checked)
CallManager.setScreenShareType(ScreenShareType.XDP);
else
CallManager.setScreenShareType(ScreenShareType.X11);
}
}
}
RowLayout { RowLayout {
Layout.leftMargin: 8 Layout.leftMargin: 8
Layout.rightMargin: 8 Layout.rightMargin: 8
@ -41,12 +84,23 @@ Popup {
} }
ComboBox { ComboBox {
visible: screenshare_X11.checked
id: windowCombo id: windowCombo
Layout.fillWidth: true Layout.fillWidth: true
model: CallManager.windowList() model: CallManager.windowList()
} }
Button {
visible: screenshare_XDP.checked
highlighted: !CallManager.screenShareReady
text: qsTr("Request screencast")
onClicked: {
Settings.screenShareHideCursor = hideCursorCheckBox.checked;
CallManager.setupScreenShareXDP();
}
}
} }
RowLayout { RowLayout {
@ -122,6 +176,7 @@ Popup {
} }
Button { Button {
visible: CallManager.screenShareReady
text: qsTr("Share") text: qsTr("Share")
icon.source: "qrc:/icons/icons/ui/screen-share.svg" icon.source: "qrc:/icons/icons/ui/screen-share.svg"
@ -137,6 +192,7 @@ Popup {
} }
Button { Button {
visible: CallManager.screenShareReady
text: qsTr("Preview") text: qsTr("Preview")
onClicked: { onClicked: {
CallManager.previewWindow(windowCombo.currentIndex); CallManager.previewWindow(windowCombo.currentIndex);

View file

@ -9,6 +9,7 @@
#include <cstdlib> #include <cstdlib>
#include <memory> #include <memory>
#include <QGuiApplication>
#include <QMediaPlaylist> #include <QMediaPlaylist>
#include <QUrl> #include <QUrl>
@ -22,6 +23,8 @@
#include "Utils.h" #include "Utils.h"
#include "mtx/responses/turn_server.hpp" #include "mtx/responses/turn_server.hpp"
#include "voip/ScreenCastPortal.h"
#include "voip/WebRTCSession.h"
/* /*
* Select Answer when one instance of the client supports v0 * Select Answer when one instance of the client supports v0
@ -47,6 +50,7 @@ using namespace mtx::events;
using namespace mtx::events::voip; using namespace mtx::events::voip;
using webrtc::CallType; using webrtc::CallType;
using webrtc::ScreenShareType;
//! Session Description Object //! Session Description Object
typedef RTCSessionDescriptionInit SDO; typedef RTCSessionDescriptionInit SDO;
@ -64,6 +68,12 @@ CallManager::CallManager(QObject *parent)
qRegisterMetaType<mtx::events::voip::CallCandidates::Candidate>(); qRegisterMetaType<mtx::events::voip::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>(); qRegisterMetaType<mtx::responses::TurnServer>();
if (screenShareX11Available()) {
screenShareType_ = ScreenShareType::X11;
} else {
screenShareType_ = ScreenShareType::XDP;
}
connect( connect(
&session_, &session_,
&WebRTCSession::offerCreated, &WebRTCSession::offerCreated,
@ -176,6 +186,13 @@ CallManager::CallManager(QObject *parent)
break; break;
} }
}); });
#ifdef GSTREAMER_AVAILABLE
connect(&ScreenCastPortal::instance(),
&ScreenCastPortal::readyChanged,
this,
&CallManager::screenShareChanged);
#endif
} }
void void
@ -191,8 +208,10 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
std::string errorMessage; std::string errorMessage;
if (!session_.havePlugins( if (!session_.havePlugins(callType != CallType::VOICE,
callType != CallType::VOICE, callType == CallType::SCREEN, &errorMessage)) { callType == CallType::SCREEN,
screenShareType_,
&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return; return;
} }
@ -212,14 +231,22 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
return; return;
} }
#ifdef GSTREAMER_AVAILABLE
if (callType == CallType::SCREEN) { if (callType == CallType::SCREEN) {
if (!screenShareSupported()) if (screenShareType_ == ScreenShareType::X11) {
return; if (windows_.empty() || windowIndex >= windows_.size()) {
if (windows_.empty() || windowIndex >= windows_.size()) { nhlog::ui()->error("WebRTC: window index out of range");
nhlog::ui()->error("WebRTC: window index out of range"); return;
return; }
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
if (sc_portal.getStream() == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
return;
}
} }
} }
#endif
if (haveCallInvite_) { if (haveCallInvite_) {
nhlog::ui()->debug( nhlog::ui()->debug(
@ -255,8 +282,12 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
invitee_ = callParty_.toStdString(); invitee_ = callParty_.toStdString();
emit newInviteState(); emit newInviteState();
playRingtone(QUrl(QStringLiteral("qrc:/media/media/ringback.ogg")), true); playRingtone(QUrl(QStringLiteral("qrc:/media/media/ringback.ogg")), true);
if (!session_.createOffer(callType,
callType == CallType::SCREEN ? windows_[windowIndex].second : 0)) { uint32_t shareWindowId =
callType == CallType::SCREEN && screenShareType_ == ScreenShareType::X11
? windows_[windowIndex].second
: 0;
if (!session_.createOffer(callType, screenShareType_, shareWindowId)) {
emit ChatPage::instance()->showNotification(QStringLiteral("Problem setting up call.")); emit ChatPage::instance()->showNotification(QStringLiteral("Problem setting up call."));
endCall(); endCall();
} }
@ -466,8 +497,10 @@ CallManager::acceptInvite()
stopRingtone(); stopRingtone();
std::string errorMessage; std::string errorMessage;
if (!session_.havePlugins( if (!session_.havePlugins(callType_ != CallType::VOICE,
callType_ != CallType::VOICE, callType_ == CallType::SCREEN, &errorMessage)) { callType_ == CallType::SCREEN,
screenShareType_,
&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp(CallHangUp::Reason::UserMediaFailed); hangUp(CallHangUp::Reason::UserMediaFailed);
return; return;
@ -713,9 +746,13 @@ CallManager::callsSupported()
} }
bool bool
CallManager::screenShareSupported() CallManager::screenShareX11Available()
{ {
return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY"); #ifdef GSTREAMER_AVAILABLE
return std::getenv("DISPLAY");
#else
return false;
#endif
} }
QStringList QStringList
@ -746,6 +783,7 @@ CallManager::generateCallID()
void void
CallManager::clear(bool endAllCalls) CallManager::clear(bool endAllCalls)
{ {
closeScreenShare();
roomid_.clear(); roomid_.clear();
callParty_.clear(); callParty_.clear();
callPartyDisplayName_.clear(); callPartyDisplayName_.clear();
@ -810,9 +848,27 @@ CallManager::stopRingtone()
player_.setPlaylist(nullptr); player_.setPlaylist(nullptr);
} }
bool
CallManager::screenShareReady() const
{
#ifdef GSTREAMER_AVAILABLE
if (screenShareType_ == ScreenShareType::X11) {
return true;
} else {
return ScreenCastPortal::instance().ready();
}
#else
return false;
#endif
}
QStringList QStringList
CallManager::windowList() CallManager::windowList()
{ {
if (!screenShareX11Available()) {
return {};
}
windows_.clear(); windows_.clear();
windows_.push_back({tr("Entire screen"), 0}); windows_.push_back({tr("Entire screen"), 0});
@ -880,26 +936,52 @@ namespace {
GstElement *pipe_ = nullptr; GstElement *pipe_ = nullptr;
unsigned int busWatchId_ = 0; unsigned int busWatchId_ = 0;
void
close_preview_stream()
{
if (pipe_) {
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
}
gboolean gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer G_GNUC_UNUSED) newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer G_GNUC_UNUSED)
{ {
switch (GST_MESSAGE_TYPE(msg)) { switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
if (pipe_) { close_preview_stream();
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
break; break;
case GST_MESSAGE_ERROR: {
GError *err = nullptr;
gchar *dbg_info = nullptr;
gst_message_parse_error(msg, &err, &dbg_info);
nhlog::ui()->error("GST error: {}", dbg_info);
g_error_free(err);
g_free(dbg_info);
close_preview_stream();
break;
}
default: default:
break; break;
} }
return TRUE; return TRUE;
} }
static GstElement *
make_preview_sink()
{
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
return gst_element_factory_make("waylandsink", nullptr);
} else {
return gst_element_factory_make("ximagesink", nullptr);
}
}
} }
#endif #endif
@ -907,38 +989,81 @@ void
CallManager::previewWindow(unsigned int index) const CallManager::previewWindow(unsigned int index) const
{ {
#ifdef GSTREAMER_AVAILABLE #ifdef GSTREAMER_AVAILABLE
if (windows_.empty() || index >= windows_.size() || !gst_is_initialized()) if (!gst_is_initialized())
return; return;
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr); if (pipe_ != nullptr) {
if (!ximagesrc) { nhlog::ui()->warn("Preview already started");
nhlog::ui()->error("Failed to create ximagesrc");
return; return;
} }
if (screenShareType_ == ScreenShareType::X11 &&
(!screenShareX11Available() || windows_.empty() || index >= windows_.size())) {
nhlog::ui()->error("X11 screencast not available");
return;
}
auto settings = ChatPage::instance()->userSettings();
pipe_ = gst_pipeline_new(nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *videoscale = gst_element_factory_make("videoscale", nullptr); GstElement *videoscale = gst_element_factory_make("videoscale", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstElement *ximagesink = gst_element_factory_make("ximagesink", nullptr); GstElement *preview_sink = make_preview_sink();
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
g_object_set(ximagesrc, "use-damage", FALSE, nullptr); gst_bin_add_many(
g_object_set(ximagesrc, "show-pointer", FALSE, nullptr); GST_BIN(pipe_), videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
GstCaps *caps = gst_caps_new_simple( GstCaps *caps = gst_caps_new_simple(
"video/x-raw", "width", G_TYPE_INT, 480, "height", G_TYPE_INT, 360, nullptr); "video/x-raw", "framerate", GST_TYPE_FRACTION, settings->screenShareFrameRate(), 1, nullptr);
g_object_set(capsfilter, "caps", caps, nullptr); g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps); gst_caps_unref(caps);
pipe_ = gst_pipeline_new(nullptr); GstElement *screencastsrc = nullptr;
gst_bin_add_many( if (screenShareType_ == ScreenShareType::X11) {
GST_BIN(pipe_), ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr); GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr);
if (!ximagesrc) {
nhlog::ui()->error("Failed to create ximagesrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", nullptr);
g_object_set(pipewiresrc, "fd", (gint)stream->fd, nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
screencastsrc = pipewiresrc;
}
if (!gst_element_link_many( if (!gst_element_link_many(
ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr)) { screencastsrc, videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr)) {
nhlog::ui()->error("Failed to link preview window elements"); nhlog::ui()->error("Failed to link preview window elements");
gst_object_unref(pipe_); gst_object_unref(pipe_);
pipe_ = nullptr; pipe_ = nullptr;
return; return;
} }
if (gst_element_set_state(pipe_, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { if (gst_element_set_state(pipe_, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
nhlog::ui()->error("Unable to start preview pipeline"); nhlog::ui()->error("Unable to start preview pipeline");
gst_object_unref(pipe_); gst_object_unref(pipe_);
@ -954,6 +1079,40 @@ CallManager::previewWindow(unsigned int index) const
#endif #endif
} }
void
CallManager::setupScreenShareXDP()
{
#ifdef GSTREAMER_AVAILABLE
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.init();
screenShareType_ = ScreenShareType::XDP;
#endif
}
void
CallManager::setScreenShareType(webrtc::ScreenShareType screenShareType)
{
#ifdef GSTREAMER_AVAILABLE
closeScreenShare();
screenShareType_ = screenShareType;
emit screenShareChanged();
#else
(void)screenShareType;
#endif
}
void
CallManager::closeScreenShare()
{
#ifdef GSTREAMER_AVAILABLE
close_preview_stream();
if (!isOnCall()) {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.close();
}
#endif
}
namespace { namespace {
std::vector<std::string> std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer) getTurnURIs(const mtx::responses::TurnServer &turnServer)

View file

@ -17,6 +17,7 @@
#include "WebRTCSession.h" #include "WebRTCSession.h"
#include "mtx/events/collections.hpp" #include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp" #include "mtx/events/voip.hpp"
#include "voip/ScreenCastPortal.h"
#include <mtxclient/utils.hpp> #include <mtxclient/utils.hpp>
namespace mtx::responses { namespace mtx::responses {
@ -32,6 +33,8 @@ class CallManager final : public QObject
Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY newCallState) Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY newCallState)
Q_PROPERTY(bool isOnCallOnOtherDevice READ isOnCallOnOtherDevice NOTIFY newCallDeviceState) Q_PROPERTY(bool isOnCallOnOtherDevice READ isOnCallOnOtherDevice NOTIFY newCallDeviceState)
Q_PROPERTY(webrtc::CallType callType READ callType NOTIFY newInviteState) Q_PROPERTY(webrtc::CallType callType READ callType NOTIFY newInviteState)
Q_PROPERTY(
webrtc::ScreenShareType screenShareType READ screenShareType NOTIFY screenShareChanged)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY newCallState) Q_PROPERTY(webrtc::State callState READ callState NOTIFY newCallState)
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState) Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
Q_PROPERTY(QString callPartyDisplayName READ callPartyDisplayName NOTIFY newInviteState) Q_PROPERTY(QString callPartyDisplayName READ callPartyDisplayName NOTIFY newInviteState)
@ -41,7 +44,8 @@ class CallManager final : public QObject
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged) Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged) Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT) Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
Q_PROPERTY(bool screenShareSupported READ screenShareSupported CONSTANT) Q_PROPERTY(bool screenShareX11Available READ screenShareX11Available CONSTANT)
Q_PROPERTY(bool screenShareReady READ screenShareReady NOTIFY screenShareChanged)
public: public:
CallManager(QObject *); CallManager(QObject *);
@ -51,6 +55,7 @@ public:
bool isOnCallOnOtherDevice() const { return (isOnCallOnOtherDevice_ != ""); } bool isOnCallOnOtherDevice() const { return (isOnCallOnOtherDevice_ != ""); }
bool checkSharesRoom(QString roomid_, std::string invitee) const; bool checkSharesRoom(QString roomid_, std::string invitee) const;
webrtc::CallType callType() const { return callType_; } webrtc::CallType callType() const { return callType_; }
webrtc::ScreenShareType screenShareType() const { return screenShareType_; }
webrtc::State callState() const { return session_.state(); } webrtc::State callState() const { return session_.state(); }
QString callParty() const { return callParty_; } QString callParty() const { return callParty_; }
QString callPartyDisplayName() const { return callPartyDisplayName_; } QString callPartyDisplayName() const { return callPartyDisplayName_; }
@ -60,9 +65,10 @@ public:
QStringList mics() const { return devices(false); } QStringList mics() const { return devices(false); }
QStringList cameras() const { return devices(true); } QStringList cameras() const { return devices(true); }
void refreshTurnServer(); void refreshTurnServer();
bool screenShareReady() const;
static bool callsSupported(); static bool callsSupported();
static bool screenShareSupported(); static bool screenShareX11Available();
public slots: public slots:
void sendInvite(const QString &roomid, webrtc::CallType, unsigned int windowIndex = 0); void sendInvite(const QString &roomid, webrtc::CallType, unsigned int windowIndex = 0);
@ -73,6 +79,9 @@ public slots:
void hangUp( void hangUp(
mtx::events::voip::CallHangUp::Reason = mtx::events::voip::CallHangUp::Reason::UserHangUp); mtx::events::voip::CallHangUp::Reason = mtx::events::voip::CallHangUp::Reason::UserHangUp);
void rejectInvite(); void rejectInvite();
void setupScreenShareXDP();
void setScreenShareType(webrtc::ScreenShareType);
void closeScreenShare();
QStringList windowList(); QStringList windowList();
void previewWindow(unsigned int windowIndex) const; void previewWindow(unsigned int windowIndex) const;
@ -90,6 +99,7 @@ signals:
void micMuteChanged(); void micMuteChanged();
void devicesChanged(); void devicesChanged();
void turnServerRetrieved(const mtx::responses::TurnServer &); void turnServerRetrieved(const mtx::responses::TurnServer &);
void screenShareChanged();
private slots: private slots:
void retrieveTurnServer(); void retrieveTurnServer();
@ -102,14 +112,15 @@ private:
QString callPartyAvatarUrl_; QString callPartyAvatarUrl_;
std::string callPartyVersion_ = "1"; std::string callPartyVersion_ = "1";
std::string callid_; std::string callid_;
std::string partyid_ = mtx::client::utils::random_token(8, false); std::string partyid_ = mtx::client::utils::random_token(8, false);
std::string selectedpartyid_ = ""; std::string selectedpartyid_ = "";
std::string invitee_ = ""; std::string invitee_ = "";
const uint32_t timeoutms_ = 120000; const uint32_t timeoutms_ = 120000;
webrtc::CallType callType_ = webrtc::CallType::VOICE; webrtc::CallType callType_ = webrtc::CallType::VOICE;
bool haveCallInvite_ = false; webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
bool answerSelected_ = false; bool haveCallInvite_ = false;
std::string isOnCallOnOtherDevice_ = ""; bool answerSelected_ = false;
std::string isOnCallOnOtherDevice_ = "";
std::string inviteSDP_; std::string inviteSDP_;
std::vector<mtx::events::voip::CallCandidates::Candidate> remoteICECandidates_; std::vector<mtx::events::voip::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_; std::vector<std::string> turnURIs_;

View file

@ -0,0 +1,471 @@
#ifdef GSTREAMER_AVAILABLE
#include "ScreenCastPortal.h"
#include "ChatPage.h"
#include "Logging.h"
#include "UserSettingsPage.h"
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <random>
static QString
make_token()
{
thread_local std::random_device rng;
std::uniform_int_distribution<char> index_dist(0, 9);
std::string token;
token.reserve(5 + 64);
token += "nheko";
for (uint8_t i = 0; i < 64; ++i)
token.push_back('0' + index_dist(rng));
return QString::fromStdString(std::move(token));
}
static QString
handle_path(QString handle_token)
{
QString sender = QDBusConnection::sessionBus().baseService();
if (sender[0] == ':')
sender.remove(0, 1);
sender.replace(".", "_");
return QStringLiteral("/org/freedesktop/portal/desktop/request/") + sender +
QStringLiteral("/") + handle_token;
}
void
ScreenCastPortal::init()
{
switch (state) {
case State::Closed:
state = State::Starting;
createSession();
break;
case State::Starting:
nhlog::ui()->warn("ScreenCastPortal already starting");
break;
case State::Started:
close(true);
break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal still closing");
break;
}
}
const ScreenCastPortal::Stream *
ScreenCastPortal::getStream() const
{
if (state != State::Started)
return nullptr;
else
return &stream;
}
bool
ScreenCastPortal::ready() const
{
return state == State::Started;
}
void
ScreenCastPortal::close(bool reinit)
{
switch (state) {
case State::Closed:
if (reinit)
init();
break;
case State::Starting:
if (!reinit) {
// Remaining handler will abort.
state = State::Closed;
}
break;
case State::Started: {
state = State::Closing;
emit readyChanged();
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
sessionHandle.path(),
QStringLiteral("org.freedesktop.portal.Session"),
QStringLiteral("Close"));
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(watcher,
&QDBusPendingCallWatcher::finished,
this,
[this, reinit](QDBusPendingCallWatcher *self) {
QDBusPendingReply reply = *self;
if (!reply.isValid()) {
nhlog::ui()->warn("org.freedesktop.portal.ScreenCast (Close): {}",
reply.error().message().toStdString());
}
state = State::Closed;
if (reinit)
init();
});
} break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal already closing");
break;
}
}
void
ScreenCastPortal::closedHandler(uint response, const QVariantMap &)
{
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (Closed): {}", response);
}
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: Connection closed");
state = State::Closed;
emit readyChanged();
}
void
ScreenCastPortal::createSession()
{
// Connect before sending the request to avoid missing the reply
QString handle_token = make_token();
QDBusConnection::sessionBus().connect(QStringLiteral("org.freedesktop.portal.Desktop"),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(createSessionHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("CreateSession"));
msg << QVariantMap{{QStringLiteral("handle_token"), handle_token},
{QStringLiteral("session_handle_token"), make_token()}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (CreateSession): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::createSessionHandler(uint response, const QVariantMap &results)
{
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (CreateSession Response): {}",
response);
close();
return;
}
sessionHandle = QDBusObjectPath(results.value(QStringLiteral("session_handle")).toString());
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: sessionHandle = {}",
sessionHandle.path().toStdString());
getAvailableSourceTypes();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
}
void
ScreenCastPortal::getAvailableSourceTypes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableSourceTypes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusVariant> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableSourceTypes): {}",
reply.error().message().toStdString());
close();
return;
}
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableSourceTypes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableSourceTypes)");
close();
return;
}
getAvailableCursorModes();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
});
}
void
ScreenCastPortal::getAvailableCursorModes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableCursorModes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusVariant> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableCursorModes): {}",
reply.error().message().toStdString());
close();
return;
}
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableCursorModes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableCursorModes)");
close();
return;
}
selectSources();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
});
}
void
ScreenCastPortal::selectSources()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
QDBusConnection::sessionBus().connect(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(selectSourcesHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("SelectSources"));
QVariantMap options{{QStringLiteral("multiple"), false},
{QStringLiteral("types"), availableSourceTypes},
{QStringLiteral("handle_token"), handle_token}};
auto settings = ChatPage::instance()->userSettings();
if (settings->screenShareHideCursor() && (availableCursorModes & (uint)1) != 0) {
options["cursor_mode"] = (uint)1;
}
msg << QVariant::fromValue(sessionHandle) << options;
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (SelectSources): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::selectSourcesHandler(uint response, const QVariantMap &)
{
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (SelectSources Response): {}",
response);
close();
return;
}
start();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
}
void
ScreenCastPortal::start()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
QDBusConnection::sessionBus().connect(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(startHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("Start"));
msg << QVariant::fromValue(sessionHandle) << QString()
<< QVariantMap{{QStringLiteral("handle_token"), handle_token}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start): {}",
reply.error().message().toStdString());
} else {
}
});
}
struct PipeWireStream
{
quint32 nodeId = 0;
QVariantMap map;
};
Q_DECLARE_METATYPE(PipeWireStream)
const QDBusArgument &
operator>>(const QDBusArgument &argument, PipeWireStream &stream)
{
argument.beginStructure();
argument >> stream.nodeId;
argument.beginMap();
while (!argument.atEnd()) {
QString key;
QVariant map;
argument.beginMapEntry();
argument >> key >> map;
argument.endMapEntry();
stream.map.insert(key, map);
}
argument.endMap();
argument.endStructure();
return argument;
}
void
ScreenCastPortal::startHandler(uint response, const QVariantMap &results)
{
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start Response): {}", response);
close();
return;
}
QVector<PipeWireStream> streams =
qdbus_cast<QVector<PipeWireStream>>(results.value(QStringLiteral("streams")));
if (streams.size() == 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast: No stream was returned");
close();
return;
}
stream.nodeId = streams[0].nodeId;
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: nodeId = {}", stream.nodeId);
openPipeWireRemote();
}
void
ScreenCastPortal::openPipeWireRemote()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("OpenPipeWireRemote"));
msg << QVariant::fromValue(sessionHandle) << QVariantMap{};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusUnixFileDescriptor> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (OpenPipeWireRemote): {}",
reply.error().message().toStdString());
close();
} else {
stream.fd = reply.value().fileDescriptor();
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: fd = {}", stream.fd);
state = State::Started;
emit readyChanged();
}
});
}
#endif

View file

@ -0,0 +1,66 @@
#pragma once
#ifdef GSTREAMER_AVAILABLE
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <QObject>
class ScreenCastPortal final : public QObject
{
Q_OBJECT
public:
struct Stream
{
int fd;
quint32 nodeId;
};
static ScreenCastPortal &instance()
{
static ScreenCastPortal instance;
return instance;
}
void init();
const Stream *getStream() const;
bool ready() const;
void close(bool reinit = false);
public slots:
void createSessionHandler(uint response, const QVariantMap &results);
void closedHandler(uint response, const QVariantMap &results);
void selectSourcesHandler(uint response, const QVariantMap &results);
void startHandler(uint response, const QVariantMap &results);
signals:
void readyChanged();
private:
void createSession();
void getAvailableSourceTypes();
void getAvailableCursorModes();
void selectSources();
void start();
void openPipeWireRemote();
QDBusObjectPath sessionHandle;
uint availableSourceTypes;
uint availableCursorModes;
Stream stream;
enum class State
{
Closed,
Starting,
Started,
Closing,
};
State state = State::Closed;
};
#endif

View file

@ -2,6 +2,7 @@
// //
// SPDX-License-Identifier: GPL-3.0-or-later // SPDX-License-Identifier: GPL-3.0-or-later
#include <QGuiApplication>
#include <QQmlEngine> #include <QQmlEngine>
#include <QQuickItem> #include <QQuickItem>
#include <algorithm> #include <algorithm>
@ -19,6 +20,7 @@
#include "Logging.h" #include "Logging.h"
#include "UserSettingsPage.h" #include "UserSettingsPage.h"
#include "WebRTCSession.h" #include "WebRTCSession.h"
#include "voip/ScreenCastPortal.h"
#ifdef GSTREAMER_AVAILABLE #ifdef GSTREAMER_AVAILABLE
extern "C" extern "C"
@ -40,9 +42,11 @@ extern "C"
#define STUN_SERVER "stun://turn.matrix.org:3478" #define STUN_SERVER "stun://turn.matrix.org:3478"
Q_DECLARE_METATYPE(webrtc::CallType) Q_DECLARE_METATYPE(webrtc::CallType)
Q_DECLARE_METATYPE(webrtc::ScreenShareType)
Q_DECLARE_METATYPE(webrtc::State) Q_DECLARE_METATYPE(webrtc::State)
using webrtc::CallType; using webrtc::CallType;
using webrtc::ScreenShareType;
using webrtc::State; using webrtc::State;
WebRTCSession::WebRTCSession() WebRTCSession::WebRTCSession()
@ -56,6 +60,14 @@ WebRTCSession::WebRTCSession()
"CallType", "CallType",
QStringLiteral("Can't instantiate enum")); QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::ScreenShareType>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko",
1,
0,
"ScreenShareType",
QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::State>(); qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject, qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko", "im.nheko",
@ -578,13 +590,13 @@ getMediaAttributes(const GstSDPMessage *sdp,
} }
bool bool
WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage) WebRTCSession::havePlugins(bool isVideo,
bool isScreenshare,
ScreenShareType screenShareType,
std::string *errorMessage)
{ {
if (!initialised_ && !init(errorMessage)) if (!initialised_ && !init(errorMessage))
return false; return false;
if (haveVoicePlugins_ && (!isVideo || haveVideoPlugins_) &&
(!isX11Screenshare || haveX11ScreensharePlugins_))
return true;
static constexpr std::initializer_list<const char *> audio_elements = { static constexpr std::initializer_list<const char *> audio_elements = {
"audioconvert", "audioconvert",
@ -611,10 +623,6 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
"videoscale", "videoscale",
"vp8enc", "vp8enc",
}; };
static constexpr std::initializer_list<const char *> screenshare_elements = {
"ximagesink",
"ximagesrc",
};
std::string strError("Missing GStreamer elements: "); std::string strError("Missing GStreamer elements: ");
GstRegistry *registry = gst_registry_get(); GstRegistry *registry = gst_registry_get();
@ -641,18 +649,35 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
// check both elements at once // check both elements at once
if (isVideo) if (isVideo)
haveVideoPlugins_ = check_plugins(video_elements); haveVideoPlugins_ = check_plugins(video_elements);
if (isX11Screenshare)
haveX11ScreensharePlugins_ = check_plugins(screenshare_elements); bool haveScreensharePlugins = false;
if (isScreenshare) {
haveScreensharePlugins = check_plugins({"videorate"});
if (haveScreensharePlugins) {
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
haveScreensharePlugins = check_plugins({"waylandsink"});
} else {
haveScreensharePlugins = check_plugins({"ximagesink"});
}
}
if (haveScreensharePlugins) {
if (screenShareType == ScreenShareType::X11) {
haveScreensharePlugins = check_plugins({"ximagesrc"});
} else {
haveScreensharePlugins = check_plugins({"pipewiresrc"});
}
}
}
if (!haveVoicePlugins_ || (isVideo && !haveVideoPlugins_) || if (!haveVoicePlugins_ || (isVideo && !haveVideoPlugins_) ||
(isX11Screenshare && !haveX11ScreensharePlugins_)) { (isScreenshare && !haveScreensharePlugins)) {
nhlog::ui()->error(strError); nhlog::ui()->error(strError);
if (errorMessage) if (errorMessage)
*errorMessage = strError; *errorMessage = strError;
return false; return false;
} }
if (isVideo || isX11Screenshare) { if (isVideo || isScreenshare) {
// load qmlglsink to register GStreamer's GstGLVideoItem QML type // load qmlglsink to register GStreamer's GstGLVideoItem QML type
GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr); GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr);
gst_object_unref(qmlglsink); gst_object_unref(qmlglsink);
@ -661,12 +686,15 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
} }
bool bool
WebRTCSession::createOffer(CallType callType, uint32_t shareWindowId) WebRTCSession::createOffer(CallType callType,
ScreenShareType screenShareType,
uint32_t shareWindowId)
{ {
clear(); clear();
isOffering_ = true; isOffering_ = true;
callType_ = callType; callType_ = callType;
shareWindowId_ = shareWindowId; screenShareType_ = screenShareType;
shareWindowId_ = shareWindowId;
// opus and vp8 rtp payload types must be defined dynamically // opus and vp8 rtp payload types must be defined dynamically
// therefore from the range [96-127] // therefore from the range [96-127]
@ -924,6 +952,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *tee = gst_element_factory_make("tee", "videosrctee"); GstElement *tee = gst_element_factory_make("tee", "videosrctee");
gst_bin_add_many(GST_BIN(pipe_), videoconvert, tee, nullptr); gst_bin_add_many(GST_BIN(pipe_), videoconvert, tee, nullptr);
if (callType_ == CallType::VIDEO || (settings->screenSharePiP() && devices_.haveCamera())) { if (callType_ == CallType::VIDEO || (settings->screenSharePiP() && devices_.haveCamera())) {
std::pair<int, int> resolution; std::pair<int, int> resolution;
std::pair<int, int> frameRate; std::pair<int, int> frameRate;
@ -969,16 +998,56 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
nhlog::ui()->debug("WebRTC: screen share hide mouse cursor: {}", nhlog::ui()->debug("WebRTC: screen share hide mouse cursor: {}",
settings->screenShareHideCursor()); settings->screenShareHideCursor());
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare"); GstElement *screencastsrc = nullptr;
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc"); if (screenShareType_ == ScreenShareType::X11) {
return false; GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare");
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc");
return false;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", "screenshare");
if (!pipewiresrc) {
nhlog::ui()->error("WebRTC: failed to create pipewiresrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
g_object_set(pipewiresrc, "fd", (gint)stream->fd, nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
gst_bin_add(GST_BIN(pipe_), videorate);
if (!gst_element_link(pipewiresrc, videorate)) {
nhlog::ui()->error("WebRTC: failed to link pipewiresrc -> videorate");
return false;
}
screencastsrc = videorate;
} }
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
GstCaps *caps = gst_caps_new_simple("video/x-raw", GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format",
G_TYPE_STRING,
"I420", // For vp8enc
"framerate", "framerate",
GST_TYPE_FRACTION, GST_TYPE_FRACTION,
settings->screenShareFrameRate(), settings->screenShareFrameRate(),
@ -987,13 +1056,13 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr); GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
g_object_set(capsfilter, "caps", caps, nullptr); g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps); gst_caps_unref(caps);
gst_bin_add_many(GST_BIN(pipe_), ximagesrc, capsfilter, nullptr); gst_bin_add(GST_BIN(pipe_), capsfilter);
if (settings->screenSharePiP() && devices_.haveCamera()) { if (settings->screenSharePiP() && devices_.haveCamera()) {
GstElement *compositor = gst_element_factory_make("compositor", nullptr); GstElement *compositor = gst_element_factory_make("compositor", nullptr);
g_object_set(compositor, "background", 1, nullptr); g_object_set(compositor, "background", 1, nullptr);
gst_bin_add(GST_BIN(pipe_), compositor); gst_bin_add(GST_BIN(pipe_), compositor);
if (!gst_element_link_many(ximagesrc, compositor, capsfilter, tee, nullptr)) { if (!gst_element_link_many(screencastsrc, compositor, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements"); nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false; return false;
} }
@ -1006,7 +1075,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
return false; return false;
} }
gst_object_unref(srcpad); gst_object_unref(srcpad);
} else if (!gst_element_link_many(ximagesrc, videoconvert, capsfilter, tee, nullptr)) { } else if (!gst_element_link_many(screencastsrc, videoconvert, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements"); nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false; return false;
} }
@ -1157,7 +1226,7 @@ WebRTCSession::end()
#else #else
bool bool
WebRTCSession::havePlugins(bool, bool, std::string *) WebRTCSession::havePlugins(bool, bool, ScreenShareType, std::string *)
{ {
return false; return false;
} }
@ -1171,8 +1240,11 @@ WebRTCSession::haveLocalPiP() const
// clang-format off // clang-format off
// clang-format < 12 is buggy on this // clang-format < 12 is buggy on this
bool bool
WebRTCSession::createOffer(webrtc::CallType, uint32_t) WebRTCSession::createOffer(webrtc::CallType,
ScreenShareType screenShareType,
uint32_t)
{ {
(void)screenShareType;
return false; return false;
} }
// clang-format on // clang-format on

View file

@ -26,6 +26,13 @@ enum class CallType
}; };
Q_ENUM_NS(CallType) Q_ENUM_NS(CallType)
enum class ScreenShareType
{
X11,
XDP
};
Q_ENUM_NS(ScreenShareType)
enum class State enum class State
{ {
DISCONNECTED, DISCONNECTED,
@ -52,7 +59,10 @@ public:
return instance; return instance;
} }
bool havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage = nullptr); bool havePlugins(bool isVideo,
bool isScreenshare,
webrtc::ScreenShareType screenShareType,
std::string *errorMessage = nullptr);
webrtc::CallType callType() const { return callType_; } webrtc::CallType callType() const { return callType_; }
webrtc::State state() const { return state_; } webrtc::State state() const { return state_; }
bool haveLocalPiP() const; bool haveLocalPiP() const;
@ -60,7 +70,7 @@ public:
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; } bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; } bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
bool createOffer(webrtc::CallType, uint32_t shareWindowId); bool createOffer(webrtc::CallType, webrtc::ScreenShareType, uint32_t shareWindowId);
bool acceptOffer(const std::string &sdp); bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp); bool acceptAnswer(const std::string &sdp);
bool acceptNegotiation(const std::string &sdp); bool acceptNegotiation(const std::string &sdp);
@ -91,19 +101,19 @@ private:
WebRTCSession(); WebRTCSession();
CallDevices &devices_; CallDevices &devices_;
bool initialised_ = false; bool initialised_ = false;
bool haveVoicePlugins_ = false; bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false; bool haveVideoPlugins_ = false;
bool haveX11ScreensharePlugins_ = false; webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::CallType callType_ = webrtc::CallType::VOICE; webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
webrtc::State state_ = webrtc::State::DISCONNECTED; webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isOffering_ = false; bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false; bool isRemoteVideoRecvOnly_ = false;
bool isRemoteVideoSendOnly_ = false; bool isRemoteVideoSendOnly_ = false;
QQuickItem *videoItem_ = nullptr; QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr; GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr; GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0; unsigned int busWatchId_ = 0;
std::vector<std::string> turnServers_; std::vector<std::string> turnServers_;
uint32_t shareWindowId_ = 0; uint32_t shareWindowId_ = 0;