Support screen sharing with xdg-desktop-portal

This commit is contained in:
David Elsing 2023-03-11 14:36:51 +01:00
parent d916d6cd63
commit 1ba6a4d78d
9 changed files with 937 additions and 89 deletions

View file

@ -73,6 +73,7 @@ if (APPLE OR WIN32)
endif()
option(VOIP "Whether to enable voip support. Disable this, if you don't have gstreamer." ${VOIP_DEFAULT})
cmake_dependent_option(SCREENSHARE_X11 "Whether to enable screenshare support on X11." ON "VOIP" OFF)
cmake_dependent_option(SCREENSHARE_XDP "Whether to enable screenshare support using xdg-desktop-portal." ON "VOIP" OFF)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
@ -414,6 +415,9 @@ set(SRC_FILES
src/voip/CallDevices.h
src/voip/CallManager.cpp
src/voip/CallManager.h
src/voip/ScreenCastPortal.cpp
src/voip/ScreenCastPortal.h
src/voip/WebRTCSession.h
src/voip/WebRTCSession.cpp
src/voip/WebRTCSession.h

View file

@ -103,7 +103,6 @@ Popup {
}
Button {
visible: CallManager.screenShareSupported
text: qsTr("Screen")
icon.source: "qrc:/icons/icons/ui/screen-share.svg"
onClicked: {

View file

@ -14,8 +14,15 @@ Popup {
anchors.centerIn: parent;
Component.onCompleted: {
if (CallManager.screenShareX11Available)
CallManager.setScreenShareType(ScreenShareType.X11);
else
CallManager.setScreenShareType(ScreenShareType.XDP);
frameRateCombo.currentIndex = frameRateCombo.find(Settings.screenShareFrameRate);
}
Component.onDestruction: {
CallManager.closeScreenShare();
}
palette: Nheko.colors
ColumnLayout {
@ -29,6 +36,42 @@ Popup {
color: Nheko.colors.windowText
}
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Label {
Layout.alignment: Qt.AlignLeft
text: qsTr("Method:")
color: Nheko.colors.windowText
}
RadioButton {
id: screenshare_X11
text: qsTr("X11");
visible: CallManager.screenShareX11Available
checked: CallManager.screenShareX11Available
onToggled: {
if (screenshare_X11.checked)
CallManager.setScreenShareType(ScreenShareType.X11);
else
CallManager.setScreenShareType(ScreenShareType.XDP);
}
}
RadioButton {
id: screenshare_XDP
text: qsTr("xdg-desktop-portal");
checked: !CallManager.screenShareX11Available
onToggled: {
if (screenshare_XDP.checked)
CallManager.setScreenShareType(ScreenShareType.XDP);
else
CallManager.setScreenShareType(ScreenShareType.X11);
}
}
}
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
@ -41,12 +84,23 @@ Popup {
}
ComboBox {
visible: screenshare_X11.checked
id: windowCombo
Layout.fillWidth: true
model: CallManager.windowList()
}
Button {
visible: screenshare_XDP.checked
highlighted: !CallManager.screenShareReady
text: qsTr("Request screencast")
onClicked: {
Settings.screenShareHideCursor = hideCursorCheckBox.checked;
CallManager.setupScreenShareXDP();
}
}
}
RowLayout {
@ -122,6 +176,7 @@ Popup {
}
Button {
visible: CallManager.screenShareReady
text: qsTr("Share")
icon.source: "qrc:/icons/icons/ui/screen-share.svg"
@ -137,6 +192,7 @@ Popup {
}
Button {
visible: CallManager.screenShareReady
text: qsTr("Preview")
onClicked: {
CallManager.previewWindow(windowCombo.currentIndex);

View file

@ -9,6 +9,7 @@
#include <cstdlib>
#include <memory>
#include <QGuiApplication>
#include <QMediaPlaylist>
#include <QUrl>
@ -22,6 +23,8 @@
#include "Utils.h"
#include "mtx/responses/turn_server.hpp"
#include "voip/ScreenCastPortal.h"
#include "voip/WebRTCSession.h"
/*
* Select Answer when one instance of the client supports v0
@ -47,6 +50,7 @@ using namespace mtx::events;
using namespace mtx::events::voip;
using webrtc::CallType;
using webrtc::ScreenShareType;
//! Session Description Object
typedef RTCSessionDescriptionInit SDO;
@ -64,6 +68,12 @@ CallManager::CallManager(QObject *parent)
qRegisterMetaType<mtx::events::voip::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>();
if (screenShareX11Available()) {
screenShareType_ = ScreenShareType::X11;
} else {
screenShareType_ = ScreenShareType::XDP;
}
connect(
&session_,
&WebRTCSession::offerCreated,
@ -176,6 +186,13 @@ CallManager::CallManager(QObject *parent)
break;
}
});
#ifdef GSTREAMER_AVAILABLE
connect(&ScreenCastPortal::instance(),
&ScreenCastPortal::readyChanged,
this,
&CallManager::screenShareChanged);
#endif
}
void
@ -191,8 +208,10 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
std::string errorMessage;
if (!session_.havePlugins(
callType != CallType::VOICE, callType == CallType::SCREEN, &errorMessage)) {
if (!session_.havePlugins(callType != CallType::VOICE,
callType == CallType::SCREEN,
screenShareType_,
&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return;
}
@ -212,14 +231,22 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
return;
}
#ifdef GSTREAMER_AVAILABLE
if (callType == CallType::SCREEN) {
if (!screenShareSupported())
return;
if (windows_.empty() || windowIndex >= windows_.size()) {
nhlog::ui()->error("WebRTC: window index out of range");
return;
if (screenShareType_ == ScreenShareType::X11) {
if (windows_.empty() || windowIndex >= windows_.size()) {
nhlog::ui()->error("WebRTC: window index out of range");
return;
}
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
if (sc_portal.getStream() == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
return;
}
}
}
#endif
if (haveCallInvite_) {
nhlog::ui()->debug(
@ -255,8 +282,12 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
invitee_ = callParty_.toStdString();
emit newInviteState();
playRingtone(QUrl(QStringLiteral("qrc:/media/media/ringback.ogg")), true);
if (!session_.createOffer(callType,
callType == CallType::SCREEN ? windows_[windowIndex].second : 0)) {
uint32_t shareWindowId =
callType == CallType::SCREEN && screenShareType_ == ScreenShareType::X11
? windows_[windowIndex].second
: 0;
if (!session_.createOffer(callType, screenShareType_, shareWindowId)) {
emit ChatPage::instance()->showNotification(QStringLiteral("Problem setting up call."));
endCall();
}
@ -466,8 +497,10 @@ CallManager::acceptInvite()
stopRingtone();
std::string errorMessage;
if (!session_.havePlugins(
callType_ != CallType::VOICE, callType_ == CallType::SCREEN, &errorMessage)) {
if (!session_.havePlugins(callType_ != CallType::VOICE,
callType_ == CallType::SCREEN,
screenShareType_,
&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp(CallHangUp::Reason::UserMediaFailed);
return;
@ -713,9 +746,13 @@ CallManager::callsSupported()
}
bool
CallManager::screenShareSupported()
CallManager::screenShareX11Available()
{
return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY");
#ifdef GSTREAMER_AVAILABLE
return std::getenv("DISPLAY");
#else
return false;
#endif
}
QStringList
@ -746,6 +783,7 @@ CallManager::generateCallID()
void
CallManager::clear(bool endAllCalls)
{
closeScreenShare();
roomid_.clear();
callParty_.clear();
callPartyDisplayName_.clear();
@ -810,9 +848,27 @@ CallManager::stopRingtone()
player_.setPlaylist(nullptr);
}
bool
CallManager::screenShareReady() const
{
#ifdef GSTREAMER_AVAILABLE
if (screenShareType_ == ScreenShareType::X11) {
return true;
} else {
return ScreenCastPortal::instance().ready();
}
#else
return false;
#endif
}
QStringList
CallManager::windowList()
{
if (!screenShareX11Available()) {
return {};
}
windows_.clear();
windows_.push_back({tr("Entire screen"), 0});
@ -880,26 +936,52 @@ namespace {
GstElement *pipe_ = nullptr;
unsigned int busWatchId_ = 0;
void
close_preview_stream()
{
if (pipe_) {
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
}
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer G_GNUC_UNUSED)
{
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
if (pipe_) {
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
close_preview_stream();
break;
case GST_MESSAGE_ERROR: {
GError *err = nullptr;
gchar *dbg_info = nullptr;
gst_message_parse_error(msg, &err, &dbg_info);
nhlog::ui()->error("GST error: {}", dbg_info);
g_error_free(err);
g_free(dbg_info);
close_preview_stream();
break;
}
default:
break;
}
return TRUE;
}
static GstElement *
make_preview_sink()
{
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
return gst_element_factory_make("waylandsink", nullptr);
} else {
return gst_element_factory_make("ximagesink", nullptr);
}
}
}
#endif
@ -907,38 +989,81 @@ void
CallManager::previewWindow(unsigned int index) const
{
#ifdef GSTREAMER_AVAILABLE
if (windows_.empty() || index >= windows_.size() || !gst_is_initialized())
if (!gst_is_initialized())
return;
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr);
if (!ximagesrc) {
nhlog::ui()->error("Failed to create ximagesrc");
if (pipe_ != nullptr) {
nhlog::ui()->warn("Preview already started");
return;
}
if (screenShareType_ == ScreenShareType::X11 &&
(!screenShareX11Available() || windows_.empty() || index >= windows_.size())) {
nhlog::ui()->error("X11 screencast not available");
return;
}
auto settings = ChatPage::instance()->userSettings();
pipe_ = gst_pipeline_new(nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *videoscale = gst_element_factory_make("videoscale", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstElement *ximagesink = gst_element_factory_make("ximagesink", nullptr);
GstElement *preview_sink = make_preview_sink();
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "show-pointer", FALSE, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
gst_bin_add_many(
GST_BIN(pipe_), videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr);
GstCaps *caps = gst_caps_new_simple(
"video/x-raw", "width", G_TYPE_INT, 480, "height", G_TYPE_INT, 360, nullptr);
"video/x-raw", "framerate", GST_TYPE_FRACTION, settings->screenShareFrameRate(), 1, nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);
pipe_ = gst_pipeline_new(nullptr);
gst_bin_add_many(
GST_BIN(pipe_), ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr);
GstElement *screencastsrc = nullptr;
if (screenShareType_ == ScreenShareType::X11) {
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr);
if (!ximagesrc) {
nhlog::ui()->error("Failed to create ximagesrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", nullptr);
g_object_set(pipewiresrc, "fd", (gint)stream->fd, nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
screencastsrc = pipewiresrc;
}
if (!gst_element_link_many(
ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr)) {
screencastsrc, videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr)) {
nhlog::ui()->error("Failed to link preview window elements");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
if (gst_element_set_state(pipe_, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
nhlog::ui()->error("Unable to start preview pipeline");
gst_object_unref(pipe_);
@ -954,6 +1079,40 @@ CallManager::previewWindow(unsigned int index) const
#endif
}
void
CallManager::setupScreenShareXDP()
{
#ifdef GSTREAMER_AVAILABLE
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.init();
screenShareType_ = ScreenShareType::XDP;
#endif
}
void
CallManager::setScreenShareType(webrtc::ScreenShareType screenShareType)
{
#ifdef GSTREAMER_AVAILABLE
closeScreenShare();
screenShareType_ = screenShareType;
emit screenShareChanged();
#else
(void)screenShareType;
#endif
}
void
CallManager::closeScreenShare()
{
#ifdef GSTREAMER_AVAILABLE
close_preview_stream();
if (!isOnCall()) {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.close();
}
#endif
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)

View file

@ -17,6 +17,7 @@
#include "WebRTCSession.h"
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
#include "voip/ScreenCastPortal.h"
#include <mtxclient/utils.hpp>
namespace mtx::responses {
@ -32,6 +33,8 @@ class CallManager final : public QObject
Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY newCallState)
Q_PROPERTY(bool isOnCallOnOtherDevice READ isOnCallOnOtherDevice NOTIFY newCallDeviceState)
Q_PROPERTY(webrtc::CallType callType READ callType NOTIFY newInviteState)
Q_PROPERTY(
webrtc::ScreenShareType screenShareType READ screenShareType NOTIFY screenShareChanged)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY newCallState)
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
Q_PROPERTY(QString callPartyDisplayName READ callPartyDisplayName NOTIFY newInviteState)
@ -41,7 +44,8 @@ class CallManager final : public QObject
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
Q_PROPERTY(bool screenShareSupported READ screenShareSupported CONSTANT)
Q_PROPERTY(bool screenShareX11Available READ screenShareX11Available CONSTANT)
Q_PROPERTY(bool screenShareReady READ screenShareReady NOTIFY screenShareChanged)
public:
CallManager(QObject *);
@ -51,6 +55,7 @@ public:
bool isOnCallOnOtherDevice() const { return (isOnCallOnOtherDevice_ != ""); }
bool checkSharesRoom(QString roomid_, std::string invitee) const;
webrtc::CallType callType() const { return callType_; }
webrtc::ScreenShareType screenShareType() const { return screenShareType_; }
webrtc::State callState() const { return session_.state(); }
QString callParty() const { return callParty_; }
QString callPartyDisplayName() const { return callPartyDisplayName_; }
@ -60,9 +65,10 @@ public:
QStringList mics() const { return devices(false); }
QStringList cameras() const { return devices(true); }
void refreshTurnServer();
bool screenShareReady() const;
static bool callsSupported();
static bool screenShareSupported();
static bool screenShareX11Available();
public slots:
void sendInvite(const QString &roomid, webrtc::CallType, unsigned int windowIndex = 0);
@ -73,6 +79,9 @@ public slots:
void hangUp(
mtx::events::voip::CallHangUp::Reason = mtx::events::voip::CallHangUp::Reason::UserHangUp);
void rejectInvite();
void setupScreenShareXDP();
void setScreenShareType(webrtc::ScreenShareType);
void closeScreenShare();
QStringList windowList();
void previewWindow(unsigned int windowIndex) const;
@ -90,6 +99,7 @@ signals:
void micMuteChanged();
void devicesChanged();
void turnServerRetrieved(const mtx::responses::TurnServer &);
void screenShareChanged();
private slots:
void retrieveTurnServer();
@ -102,14 +112,15 @@ private:
QString callPartyAvatarUrl_;
std::string callPartyVersion_ = "1";
std::string callid_;
std::string partyid_ = mtx::client::utils::random_token(8, false);
std::string selectedpartyid_ = "";
std::string invitee_ = "";
const uint32_t timeoutms_ = 120000;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
bool haveCallInvite_ = false;
bool answerSelected_ = false;
std::string isOnCallOnOtherDevice_ = "";
std::string partyid_ = mtx::client::utils::random_token(8, false);
std::string selectedpartyid_ = "";
std::string invitee_ = "";
const uint32_t timeoutms_ = 120000;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
bool haveCallInvite_ = false;
bool answerSelected_ = false;
std::string isOnCallOnOtherDevice_ = "";
std::string inviteSDP_;
std::vector<mtx::events::voip::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_;

View file

@ -0,0 +1,471 @@
#ifdef GSTREAMER_AVAILABLE
#include "ScreenCastPortal.h"
#include "ChatPage.h"
#include "Logging.h"
#include "UserSettingsPage.h"
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <random>
static QString
make_token()
{
thread_local std::random_device rng;
std::uniform_int_distribution<char> index_dist(0, 9);
std::string token;
token.reserve(5 + 64);
token += "nheko";
for (uint8_t i = 0; i < 64; ++i)
token.push_back('0' + index_dist(rng));
return QString::fromStdString(std::move(token));
}
static QString
handle_path(QString handle_token)
{
QString sender = QDBusConnection::sessionBus().baseService();
if (sender[0] == ':')
sender.remove(0, 1);
sender.replace(".", "_");
return QStringLiteral("/org/freedesktop/portal/desktop/request/") + sender +
QStringLiteral("/") + handle_token;
}
void
ScreenCastPortal::init()
{
switch (state) {
case State::Closed:
state = State::Starting;
createSession();
break;
case State::Starting:
nhlog::ui()->warn("ScreenCastPortal already starting");
break;
case State::Started:
close(true);
break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal still closing");
break;
}
}
const ScreenCastPortal::Stream *
ScreenCastPortal::getStream() const
{
if (state != State::Started)
return nullptr;
else
return &stream;
}
bool
ScreenCastPortal::ready() const
{
return state == State::Started;
}
void
ScreenCastPortal::close(bool reinit)
{
switch (state) {
case State::Closed:
if (reinit)
init();
break;
case State::Starting:
if (!reinit) {
// Remaining handler will abort.
state = State::Closed;
}
break;
case State::Started: {
state = State::Closing;
emit readyChanged();
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
sessionHandle.path(),
QStringLiteral("org.freedesktop.portal.Session"),
QStringLiteral("Close"));
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(watcher,
&QDBusPendingCallWatcher::finished,
this,
[this, reinit](QDBusPendingCallWatcher *self) {
QDBusPendingReply reply = *self;
if (!reply.isValid()) {
nhlog::ui()->warn("org.freedesktop.portal.ScreenCast (Close): {}",
reply.error().message().toStdString());
}
state = State::Closed;
if (reinit)
init();
});
} break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal already closing");
break;
}
}
void
ScreenCastPortal::closedHandler(uint response, const QVariantMap &)
{
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (Closed): {}", response);
}
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: Connection closed");
state = State::Closed;
emit readyChanged();
}
void
ScreenCastPortal::createSession()
{
// Connect before sending the request to avoid missing the reply
QString handle_token = make_token();
QDBusConnection::sessionBus().connect(QStringLiteral("org.freedesktop.portal.Desktop"),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(createSessionHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("CreateSession"));
msg << QVariantMap{{QStringLiteral("handle_token"), handle_token},
{QStringLiteral("session_handle_token"), make_token()}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (CreateSession): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::createSessionHandler(uint response, const QVariantMap &results)
{
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (CreateSession Response): {}",
response);
close();
return;
}
sessionHandle = QDBusObjectPath(results.value(QStringLiteral("session_handle")).toString());
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: sessionHandle = {}",
sessionHandle.path().toStdString());
getAvailableSourceTypes();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
}
void
ScreenCastPortal::getAvailableSourceTypes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableSourceTypes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusVariant> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableSourceTypes): {}",
reply.error().message().toStdString());
close();
return;
}
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableSourceTypes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableSourceTypes)");
close();
return;
}
getAvailableCursorModes();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
});
}
void
ScreenCastPortal::getAvailableCursorModes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableCursorModes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusVariant> reply = *self;
self->deleteLater();
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableCursorModes): {}",
reply.error().message().toStdString());
close();
return;
}
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableCursorModes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableCursorModes)");
close();
return;
}
selectSources();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
});
}
void
ScreenCastPortal::selectSources()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
QDBusConnection::sessionBus().connect(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(selectSourcesHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("SelectSources"));
QVariantMap options{{QStringLiteral("multiple"), false},
{QStringLiteral("types"), availableSourceTypes},
{QStringLiteral("handle_token"), handle_token}};
auto settings = ChatPage::instance()->userSettings();
if (settings->screenShareHideCursor() && (availableCursorModes & (uint)1) != 0) {
options["cursor_mode"] = (uint)1;
}
msg << QVariant::fromValue(sessionHandle) << options;
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (SelectSources): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::selectSourcesHandler(uint response, const QVariantMap &)
{
switch (state) {
case State::Closed:
nhlog::ui()->warn("ScreenCastPortal not starting");
break;
case State::Starting: {
if (response != 0) {
nhlog::ui()->error("org.freedekstop.portal.ScreenCast (SelectSources Response): {}",
response);
close();
return;
}
start();
} break;
case State::Started:
nhlog::ui()->warn("ScreenCastPortal already started");
break;
case State::Closing:
break;
}
}
void
ScreenCastPortal::start()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
QDBusConnection::sessionBus().connect(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
this,
SLOT(startHandler(uint, QVariantMap)));
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("Start"));
msg << QVariant::fromValue(sessionHandle) << QString()
<< QVariantMap{{QStringLiteral("handle_token"), handle_token}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start): {}",
reply.error().message().toStdString());
} else {
}
});
}
struct PipeWireStream
{
quint32 nodeId = 0;
QVariantMap map;
};
Q_DECLARE_METATYPE(PipeWireStream)
const QDBusArgument &
operator>>(const QDBusArgument &argument, PipeWireStream &stream)
{
argument.beginStructure();
argument >> stream.nodeId;
argument.beginMap();
while (!argument.atEnd()) {
QString key;
QVariant map;
argument.beginMapEntry();
argument >> key >> map;
argument.endMapEntry();
stream.map.insert(key, map);
}
argument.endMap();
argument.endStructure();
return argument;
}
void
ScreenCastPortal::startHandler(uint response, const QVariantMap &results)
{
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start Response): {}", response);
close();
return;
}
QVector<PipeWireStream> streams =
qdbus_cast<QVector<PipeWireStream>>(results.value(QStringLiteral("streams")));
if (streams.size() == 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast: No stream was returned");
close();
return;
}
stream.nodeId = streams[0].nodeId;
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: nodeId = {}", stream.nodeId);
openPipeWireRemote();
}
void
ScreenCastPortal::openPipeWireRemote()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("OpenPipeWireRemote"));
msg << QVariant::fromValue(sessionHandle) << QVariantMap{};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
QDBusPendingReply<QDBusUnixFileDescriptor> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (OpenPipeWireRemote): {}",
reply.error().message().toStdString());
close();
} else {
stream.fd = reply.value().fileDescriptor();
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: fd = {}", stream.fd);
state = State::Started;
emit readyChanged();
}
});
}
#endif

View file

@ -0,0 +1,66 @@
#pragma once
#ifdef GSTREAMER_AVAILABLE
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <QObject>
class ScreenCastPortal final : public QObject
{
Q_OBJECT
public:
struct Stream
{
int fd;
quint32 nodeId;
};
static ScreenCastPortal &instance()
{
static ScreenCastPortal instance;
return instance;
}
void init();
const Stream *getStream() const;
bool ready() const;
void close(bool reinit = false);
public slots:
void createSessionHandler(uint response, const QVariantMap &results);
void closedHandler(uint response, const QVariantMap &results);
void selectSourcesHandler(uint response, const QVariantMap &results);
void startHandler(uint response, const QVariantMap &results);
signals:
void readyChanged();
private:
void createSession();
void getAvailableSourceTypes();
void getAvailableCursorModes();
void selectSources();
void start();
void openPipeWireRemote();
QDBusObjectPath sessionHandle;
uint availableSourceTypes;
uint availableCursorModes;
Stream stream;
enum class State
{
Closed,
Starting,
Started,
Closing,
};
State state = State::Closed;
};
#endif

View file

@ -2,6 +2,7 @@
//
// SPDX-License-Identifier: GPL-3.0-or-later
#include <QGuiApplication>
#include <QQmlEngine>
#include <QQuickItem>
#include <algorithm>
@ -19,6 +20,7 @@
#include "Logging.h"
#include "UserSettingsPage.h"
#include "WebRTCSession.h"
#include "voip/ScreenCastPortal.h"
#ifdef GSTREAMER_AVAILABLE
extern "C"
@ -40,9 +42,11 @@ extern "C"
#define STUN_SERVER "stun://turn.matrix.org:3478"
Q_DECLARE_METATYPE(webrtc::CallType)
Q_DECLARE_METATYPE(webrtc::ScreenShareType)
Q_DECLARE_METATYPE(webrtc::State)
using webrtc::CallType;
using webrtc::ScreenShareType;
using webrtc::State;
WebRTCSession::WebRTCSession()
@ -56,6 +60,14 @@ WebRTCSession::WebRTCSession()
"CallType",
QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::ScreenShareType>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko",
1,
0,
"ScreenShareType",
QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko",
@ -578,13 +590,13 @@ getMediaAttributes(const GstSDPMessage *sdp,
}
bool
WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage)
WebRTCSession::havePlugins(bool isVideo,
bool isScreenshare,
ScreenShareType screenShareType,
std::string *errorMessage)
{
if (!initialised_ && !init(errorMessage))
return false;
if (haveVoicePlugins_ && (!isVideo || haveVideoPlugins_) &&
(!isX11Screenshare || haveX11ScreensharePlugins_))
return true;
static constexpr std::initializer_list<const char *> audio_elements = {
"audioconvert",
@ -611,10 +623,6 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
"videoscale",
"vp8enc",
};
static constexpr std::initializer_list<const char *> screenshare_elements = {
"ximagesink",
"ximagesrc",
};
std::string strError("Missing GStreamer elements: ");
GstRegistry *registry = gst_registry_get();
@ -641,18 +649,35 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
// check both elements at once
if (isVideo)
haveVideoPlugins_ = check_plugins(video_elements);
if (isX11Screenshare)
haveX11ScreensharePlugins_ = check_plugins(screenshare_elements);
bool haveScreensharePlugins = false;
if (isScreenshare) {
haveScreensharePlugins = check_plugins({"videorate"});
if (haveScreensharePlugins) {
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
haveScreensharePlugins = check_plugins({"waylandsink"});
} else {
haveScreensharePlugins = check_plugins({"ximagesink"});
}
}
if (haveScreensharePlugins) {
if (screenShareType == ScreenShareType::X11) {
haveScreensharePlugins = check_plugins({"ximagesrc"});
} else {
haveScreensharePlugins = check_plugins({"pipewiresrc"});
}
}
}
if (!haveVoicePlugins_ || (isVideo && !haveVideoPlugins_) ||
(isX11Screenshare && !haveX11ScreensharePlugins_)) {
(isScreenshare && !haveScreensharePlugins)) {
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
return false;
}
if (isVideo || isX11Screenshare) {
if (isVideo || isScreenshare) {
// load qmlglsink to register GStreamer's GstGLVideoItem QML type
GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr);
gst_object_unref(qmlglsink);
@ -661,12 +686,15 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
}
bool
WebRTCSession::createOffer(CallType callType, uint32_t shareWindowId)
WebRTCSession::createOffer(CallType callType,
ScreenShareType screenShareType,
uint32_t shareWindowId)
{
clear();
isOffering_ = true;
callType_ = callType;
shareWindowId_ = shareWindowId;
isOffering_ = true;
callType_ = callType;
screenShareType_ = screenShareType;
shareWindowId_ = shareWindowId;
// opus and vp8 rtp payload types must be defined dynamically
// therefore from the range [96-127]
@ -924,6 +952,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *tee = gst_element_factory_make("tee", "videosrctee");
gst_bin_add_many(GST_BIN(pipe_), videoconvert, tee, nullptr);
if (callType_ == CallType::VIDEO || (settings->screenSharePiP() && devices_.haveCamera())) {
std::pair<int, int> resolution;
std::pair<int, int> frameRate;
@ -969,16 +998,56 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
nhlog::ui()->debug("WebRTC: screen share hide mouse cursor: {}",
settings->screenShareHideCursor());
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare");
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc");
return false;
GstElement *screencastsrc = nullptr;
if (screenShareType_ == ScreenShareType::X11) {
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare");
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc");
return false;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", "screenshare");
if (!pipewiresrc) {
nhlog::ui()->error("WebRTC: failed to create pipewiresrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
g_object_set(pipewiresrc, "fd", (gint)stream->fd, nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
gst_bin_add(GST_BIN(pipe_), videorate);
if (!gst_element_link(pipewiresrc, videorate)) {
nhlog::ui()->error("WebRTC: failed to link pipewiresrc -> videorate");
return false;
}
screencastsrc = videorate;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format",
G_TYPE_STRING,
"I420", // For vp8enc
"framerate",
GST_TYPE_FRACTION,
settings->screenShareFrameRate(),
@ -987,13 +1056,13 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);
gst_bin_add_many(GST_BIN(pipe_), ximagesrc, capsfilter, nullptr);
gst_bin_add(GST_BIN(pipe_), capsfilter);
if (settings->screenSharePiP() && devices_.haveCamera()) {
GstElement *compositor = gst_element_factory_make("compositor", nullptr);
g_object_set(compositor, "background", 1, nullptr);
gst_bin_add(GST_BIN(pipe_), compositor);
if (!gst_element_link_many(ximagesrc, compositor, capsfilter, tee, nullptr)) {
if (!gst_element_link_many(screencastsrc, compositor, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false;
}
@ -1006,7 +1075,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
return false;
}
gst_object_unref(srcpad);
} else if (!gst_element_link_many(ximagesrc, videoconvert, capsfilter, tee, nullptr)) {
} else if (!gst_element_link_many(screencastsrc, videoconvert, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false;
}
@ -1157,7 +1226,7 @@ WebRTCSession::end()
#else
bool
WebRTCSession::havePlugins(bool, bool, std::string *)
WebRTCSession::havePlugins(bool, bool, ScreenShareType, std::string *)
{
return false;
}
@ -1171,8 +1240,11 @@ WebRTCSession::haveLocalPiP() const
// clang-format off
// clang-format < 12 is buggy on this
bool
WebRTCSession::createOffer(webrtc::CallType, uint32_t)
WebRTCSession::createOffer(webrtc::CallType,
ScreenShareType screenShareType,
uint32_t)
{
(void)screenShareType;
return false;
}
// clang-format on

View file

@ -26,6 +26,13 @@ enum class CallType
};
Q_ENUM_NS(CallType)
enum class ScreenShareType
{
X11,
XDP
};
Q_ENUM_NS(ScreenShareType)
enum class State
{
DISCONNECTED,
@ -52,7 +59,10 @@ public:
return instance;
}
bool havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage = nullptr);
bool havePlugins(bool isVideo,
bool isScreenshare,
webrtc::ScreenShareType screenShareType,
std::string *errorMessage = nullptr);
webrtc::CallType callType() const { return callType_; }
webrtc::State state() const { return state_; }
bool haveLocalPiP() const;
@ -60,7 +70,7 @@ public:
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
bool createOffer(webrtc::CallType, uint32_t shareWindowId);
bool createOffer(webrtc::CallType, webrtc::ScreenShareType, uint32_t shareWindowId);
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
bool acceptNegotiation(const std::string &sdp);
@ -91,19 +101,19 @@ private:
WebRTCSession();
CallDevices &devices_;
bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
bool haveX11ScreensharePlugins_ = false;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false;
bool isRemoteVideoSendOnly_ = false;
QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0;
bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false;
bool isRemoteVideoSendOnly_ = false;
QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0;
std::vector<std::string> turnServers_;
uint32_t shareWindowId_ = 0;