Merge pull request #1411 from dtelsing/screenshare-xdg-desktop-portal

Support screen sharing with xdg-desktop-portal
This commit is contained in:
DeepBlueV7.X 2023-03-30 23:07:39 +00:00 committed by GitHub
commit 5de730830a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 997 additions and 96 deletions

View file

@ -73,6 +73,7 @@ if (APPLE OR WIN32)
endif()
option(VOIP "Whether to enable voip support. Disable this, if you don't have gstreamer." ${VOIP_DEFAULT})
cmake_dependent_option(SCREENSHARE_X11 "Whether to enable screenshare support on X11." ON "VOIP" OFF)
cmake_dependent_option(SCREENSHARE_XDP "Whether to enable screenshare support using xdg-desktop-portal." ON "VOIP" OFF)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
@ -414,6 +415,9 @@ set(SRC_FILES
src/voip/CallDevices.h
src/voip/CallManager.cpp
src/voip/CallManager.h
src/voip/ScreenCastPortal.cpp
src/voip/ScreenCastPortal.h
src/voip/WebRTCSession.h
src/voip/WebRTCSession.cpp
src/voip/WebRTCSession.h

View file

@ -103,7 +103,6 @@ Popup {
}
Button {
visible: CallManager.screenShareSupported
text: qsTr("Screen")
icon.source: "qrc:/icons/icons/ui/screen-share.svg"
onClicked: {

View file

@ -16,6 +16,9 @@ Popup {
Component.onCompleted: {
frameRateCombo.currentIndex = frameRateCombo.find(Settings.screenShareFrameRate);
}
Component.onDestruction: {
CallManager.closeScreenShare();
}
palette: Nheko.colors
ColumnLayout {
@ -29,6 +32,26 @@ Popup {
color: Nheko.colors.windowText
}
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Label {
Layout.alignment: Qt.AlignLeft
text: qsTr("Method:")
color: Nheko.colors.windowText
}
ComboBox {
id: screenshareType
Layout.fillWidth: true
model: CallManager.screenShareTypeList()
onCurrentIndexChanged: CallManager.setScreenShareType(currentIndex);
}
}
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
@ -41,12 +64,23 @@ Popup {
}
ComboBox {
visible: CallManager.screenShareType == ScreenShareType.X11
id: windowCombo
Layout.fillWidth: true
model: CallManager.windowList()
}
Button {
visible: CallManager.screenShareType == ScreenShareType.XDP
highlighted: !CallManager.screenShareReady
text: qsTr("Request screencast")
onClicked: {
Settings.screenShareHideCursor = hideCursorCheckBox.checked;
CallManager.setupScreenShareXDP();
}
}
}
RowLayout {
@ -122,6 +156,7 @@ Popup {
}
Button {
visible: CallManager.screenShareReady
text: qsTr("Share")
icon.source: "qrc:/icons/icons/ui/screen-share.svg"
@ -137,6 +172,7 @@ Popup {
}
Button {
visible: CallManager.screenShareReady
text: qsTr("Preview")
onClicked: {
CallManager.previewWindow(windowCombo.currentIndex);

View file

@ -9,6 +9,7 @@
#include <cstdlib>
#include <memory>
#include <QGuiApplication>
#include <QMediaPlaylist>
#include <QUrl>
@ -22,6 +23,8 @@
#include "Utils.h"
#include "mtx/responses/turn_server.hpp"
#include "voip/ScreenCastPortal.h"
#include "voip/WebRTCSession.h"
/*
* Select Answer when one instance of the client supports v0
@ -47,6 +50,7 @@ using namespace mtx::events;
using namespace mtx::events::voip;
using webrtc::CallType;
using webrtc::ScreenShareType;
//! Session Description Object
typedef RTCSessionDescriptionInit SDO;
@ -64,6 +68,23 @@ CallManager::CallManager(QObject *parent)
qRegisterMetaType<mtx::events::voip::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>();
#ifdef GSTREAMER_AVAILABLE
std::string errorMessage;
if (session_.havePlugins(true, true, ScreenShareType::XDP, &errorMessage)) {
screenShareTypes_.push_back(ScreenShareType::XDP);
screenShareType_ = ScreenShareType::XDP;
}
if (std::getenv("DISPLAY")) {
screenShareTypes_.push_back(ScreenShareType::X11);
if (QGuiApplication::platformName() != QStringLiteral("wayland")) {
// Selected by default
screenShareType_ = ScreenShareType::X11;
std::swap(screenShareTypes_[0], screenShareTypes_[1]);
}
}
#endif
connect(
&session_,
&WebRTCSession::offerCreated,
@ -176,6 +197,13 @@ CallManager::CallManager(QObject *parent)
break;
}
});
#ifdef GSTREAMER_AVAILABLE
connect(&ScreenCastPortal::instance(),
&ScreenCastPortal::readyChanged,
this,
&CallManager::screenShareChanged);
#endif
}
void
@ -191,11 +219,6 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
std::string errorMessage;
if (!session_.havePlugins(
callType != CallType::VOICE, callType == CallType::SCREEN, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return;
}
callType_ = callType;
roomid_ = roomid;
@ -212,14 +235,22 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
return;
}
#ifdef GSTREAMER_AVAILABLE
if (callType == CallType::SCREEN) {
if (!screenShareSupported())
return;
if (windows_.empty() || windowIndex >= windows_.size()) {
nhlog::ui()->error("WebRTC: window index out of range");
return;
if (screenShareType_ == ScreenShareType::X11) {
if (windows_.empty() || windowIndex >= windows_.size()) {
nhlog::ui()->error("WebRTC: window index out of range");
return;
}
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
if (sc_portal.getStream() == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
return;
}
}
}
#endif
if (haveCallInvite_) {
nhlog::ui()->debug(
@ -255,8 +286,12 @@ CallManager::sendInvite(const QString &roomid, CallType callType, unsigned int w
invitee_ = callParty_.toStdString();
emit newInviteState();
playRingtone(QUrl(QStringLiteral("qrc:/media/media/ringback.ogg")), true);
if (!session_.createOffer(callType,
callType == CallType::SCREEN ? windows_[windowIndex].second : 0)) {
uint32_t shareWindowId =
callType == CallType::SCREEN && screenShareType_ == ScreenShareType::X11
? windows_[windowIndex].second
: 0;
if (!session_.createOffer(callType, screenShareType_, shareWindowId)) {
emit ChatPage::instance()->showNotification(QStringLiteral("Problem setting up call."));
endCall();
}
@ -466,8 +501,10 @@ CallManager::acceptInvite()
stopRingtone();
std::string errorMessage;
if (!session_.havePlugins(
callType_ != CallType::VOICE, callType_ == CallType::SCREEN, &errorMessage)) {
if (!session_.havePlugins(callType_ != CallType::VOICE,
callType_ == CallType::SCREEN,
screenShareType_,
&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp(CallHangUp::Reason::UserMediaFailed);
return;
@ -712,12 +749,6 @@ CallManager::callsSupported()
#endif
}
bool
CallManager::screenShareSupported()
{
return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY");
}
QStringList
CallManager::devices(bool isVideo) const
{
@ -746,6 +777,7 @@ CallManager::generateCallID()
void
CallManager::clear(bool endAllCalls)
{
closeScreenShare();
roomid_.clear();
callParty_.clear();
callPartyDisplayName_.clear();
@ -810,9 +842,47 @@ CallManager::stopRingtone()
player_.setPlaylist(nullptr);
}
bool
CallManager::screenShareReady() const
{
#ifdef GSTREAMER_AVAILABLE
if (screenShareType_ == ScreenShareType::X11) {
return true;
} else {
return ScreenCastPortal::instance().ready();
}
#else
return false;
#endif
}
QStringList
CallManager::screenShareTypeList()
{
QStringList ret;
ret.reserve(2);
for (ScreenShareType type : screenShareTypes_) {
switch (type) {
case ScreenShareType::X11:
ret.append(tr("X11"));
break;
case ScreenShareType::XDP:
ret.append(tr("PipeWire"));
break;
}
}
return ret;
}
QStringList
CallManager::windowList()
{
if (!(std::find(screenShareTypes_.begin(), screenShareTypes_.end(), ScreenShareType::X11) !=
screenShareTypes_.end())) {
return {};
}
windows_.clear();
windows_.push_back({tr("Entire screen"), 0});
@ -880,26 +950,52 @@ namespace {
GstElement *pipe_ = nullptr;
unsigned int busWatchId_ = 0;
void
close_preview_stream()
{
if (pipe_) {
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
}
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer G_GNUC_UNUSED)
{
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
if (pipe_) {
gst_element_set_state(GST_ELEMENT(pipe_), GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
if (busWatchId_) {
g_source_remove(busWatchId_);
busWatchId_ = 0;
}
close_preview_stream();
break;
case GST_MESSAGE_ERROR: {
GError *err = nullptr;
gchar *dbg_info = nullptr;
gst_message_parse_error(msg, &err, &dbg_info);
nhlog::ui()->error("GST error: {}", dbg_info);
g_error_free(err);
g_free(dbg_info);
close_preview_stream();
break;
}
default:
break;
}
return TRUE;
}
static GstElement *
make_preview_sink()
{
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
return gst_element_factory_make("waylandsink", nullptr);
} else {
return gst_element_factory_make("ximagesink", nullptr);
}
}
}
#endif
@ -907,38 +1003,81 @@ void
CallManager::previewWindow(unsigned int index) const
{
#ifdef GSTREAMER_AVAILABLE
if (windows_.empty() || index >= windows_.size() || !gst_is_initialized())
if (!gst_is_initialized())
return;
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr);
if (!ximagesrc) {
nhlog::ui()->error("Failed to create ximagesrc");
if (pipe_ != nullptr) {
nhlog::ui()->warn("Preview already started");
return;
}
if (screenShareType_ == ScreenShareType::X11 &&
(windows_.empty() || index >= windows_.size())) {
nhlog::ui()->error("X11 screencast not available");
return;
}
auto settings = ChatPage::instance()->userSettings();
pipe_ = gst_pipeline_new(nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *videoscale = gst_element_factory_make("videoscale", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstElement *ximagesink = gst_element_factory_make("ximagesink", nullptr);
GstElement *preview_sink = make_preview_sink();
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "show-pointer", FALSE, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
gst_bin_add_many(
GST_BIN(pipe_), videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr);
GstCaps *caps = gst_caps_new_simple(
"video/x-raw", "width", G_TYPE_INT, 480, "height", G_TYPE_INT, 360, nullptr);
"video/x-raw", "framerate", GST_TYPE_FRACTION, settings->screenShareFrameRate(), 1, nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);
pipe_ = gst_pipeline_new(nullptr);
gst_bin_add_many(
GST_BIN(pipe_), ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr);
GstElement *screencastsrc = nullptr;
if (screenShareType_ == ScreenShareType::X11) {
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", nullptr);
if (!ximagesrc) {
nhlog::ui()->error("Failed to create ximagesrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", windows_[index].second, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", nullptr);
g_object_set(pipewiresrc, "fd", (gint)stream->fd.fileDescriptor(), nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
screencastsrc = pipewiresrc;
}
if (!gst_element_link_many(
ximagesrc, videoconvert, videoscale, capsfilter, ximagesink, nullptr)) {
screencastsrc, videorate, videoconvert, videoscale, capsfilter, preview_sink, nullptr)) {
nhlog::ui()->error("Failed to link preview window elements");
gst_object_unref(pipe_);
pipe_ = nullptr;
return;
}
if (gst_element_set_state(pipe_, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
nhlog::ui()->error("Unable to start preview pipeline");
gst_object_unref(pipe_);
@ -954,6 +1093,41 @@ CallManager::previewWindow(unsigned int index) const
#endif
}
void
CallManager::setupScreenShareXDP()
{
#ifdef GSTREAMER_AVAILABLE
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.init();
#endif
}
void
CallManager::setScreenShareType(unsigned int index)
{
#ifdef GSTREAMER_AVAILABLE
closeScreenShare();
if (index >= screenShareTypes_.size())
nhlog::ui()->error("WebRTC: Screen share type index out of range");
screenShareType_ = screenShareTypes_[index];
emit screenShareChanged();
#else
(void)index;
#endif
}
void
CallManager::closeScreenShare()
{
#ifdef GSTREAMER_AVAILABLE
close_preview_stream();
if (!isOnCall()) {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
sc_portal.close();
}
#endif
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)

View file

@ -17,6 +17,7 @@
#include "WebRTCSession.h"
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
#include "voip/ScreenCastPortal.h"
#include <mtxclient/utils.hpp>
namespace mtx::responses {
@ -32,6 +33,8 @@ class CallManager final : public QObject
Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY newCallState)
Q_PROPERTY(bool isOnCallOnOtherDevice READ isOnCallOnOtherDevice NOTIFY newCallDeviceState)
Q_PROPERTY(webrtc::CallType callType READ callType NOTIFY newInviteState)
Q_PROPERTY(
webrtc::ScreenShareType screenShareType READ screenShareType NOTIFY screenShareChanged)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY newCallState)
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
Q_PROPERTY(QString callPartyDisplayName READ callPartyDisplayName NOTIFY newInviteState)
@ -41,7 +44,7 @@ class CallManager final : public QObject
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
Q_PROPERTY(bool screenShareSupported READ screenShareSupported CONSTANT)
Q_PROPERTY(bool screenShareReady READ screenShareReady NOTIFY screenShareChanged)
public:
CallManager(QObject *);
@ -51,6 +54,7 @@ public:
bool isOnCallOnOtherDevice() const { return (isOnCallOnOtherDevice_ != ""); }
bool checkSharesRoom(QString roomid_, std::string invitee) const;
webrtc::CallType callType() const { return callType_; }
webrtc::ScreenShareType screenShareType() const { return screenShareType_; }
webrtc::State callState() const { return session_.state(); }
QString callParty() const { return callParty_; }
QString callPartyDisplayName() const { return callPartyDisplayName_; }
@ -60,9 +64,9 @@ public:
QStringList mics() const { return devices(false); }
QStringList cameras() const { return devices(true); }
void refreshTurnServer();
bool screenShareReady() const;
static bool callsSupported();
static bool screenShareSupported();
public slots:
void sendInvite(const QString &roomid, webrtc::CallType, unsigned int windowIndex = 0);
@ -73,6 +77,10 @@ public slots:
void hangUp(
mtx::events::voip::CallHangUp::Reason = mtx::events::voip::CallHangUp::Reason::UserHangUp);
void rejectInvite();
void setupScreenShareXDP();
void setScreenShareType(unsigned int index);
void closeScreenShare();
QStringList screenShareTypeList();
QStringList windowList();
void previewWindow(unsigned int windowIndex) const;
@ -90,6 +98,7 @@ signals:
void micMuteChanged();
void devicesChanged();
void turnServerRetrieved(const mtx::responses::TurnServer &);
void screenShareChanged();
private slots:
void retrieveTurnServer();
@ -102,19 +111,21 @@ private:
QString callPartyAvatarUrl_;
std::string callPartyVersion_ = "1";
std::string callid_;
std::string partyid_ = mtx::client::utils::random_token(8, false);
std::string selectedpartyid_ = "";
std::string invitee_ = "";
const uint32_t timeoutms_ = 120000;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
bool haveCallInvite_ = false;
bool answerSelected_ = false;
std::string isOnCallOnOtherDevice_ = "";
std::string partyid_ = mtx::client::utils::random_token(8, false);
std::string selectedpartyid_ = "";
std::string invitee_ = "";
const uint32_t timeoutms_ = 120000;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
bool haveCallInvite_ = false;
bool answerSelected_ = false;
std::string isOnCallOnOtherDevice_ = "";
std::string inviteSDP_;
std::vector<mtx::events::voip::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_;
QTimer turnServerTimer_;
QMediaPlayer player_;
std::vector<webrtc::ScreenShareType> screenShareTypes_;
std::vector<std::pair<QString, uint32_t>> windows_;
std::vector<std::string> rejectCallPartyIDs_;

View file

@ -0,0 +1,516 @@
// SPDX-FileCopyrightText: Nheko Contributors
//
// SPDX-License-Identifier: GPL-3.0-or-later
#ifdef GSTREAMER_AVAILABLE
#include "ScreenCastPortal.h"
#include "ChatPage.h"
#include "Logging.h"
#include "UserSettingsPage.h"
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <mtxclient/utils.hpp>
#include <random>
static QString
make_token()
{
return QString::fromStdString("nheko" + mtx::client::utils::random_token(64, false));
}
static QString
handle_path(QString handle_token)
{
QString sender = QDBusConnection::sessionBus().baseService();
if (sender[0] == ':')
sender.remove(0, 1);
sender.replace(".", "_");
return QStringLiteral("/org/freedesktop/portal/desktop/request/") + sender +
QStringLiteral("/") + handle_token;
}
bool
ScreenCastPortal::makeConnection(QString service,
QString path,
QString interface,
QString name,
const char *slot)
{
if (QDBusConnection::sessionBus().connect(service, path, interface, name, this, slot)) {
last_connection = {
std::move(service), std::move(path), std::move(interface), std::move(name), slot};
return true;
}
return false;
}
void
ScreenCastPortal::disconnectClose()
{
QDBusConnection::sessionBus().disconnect(QStringLiteral("org.freedesktop.portal.Desktop"),
sessionHandle.path(),
QStringLiteral("org.freedesktop.portal.Session"),
QStringLiteral("Closed"),
this,
SLOT(closedHandler(QVariantMap)));
}
void
ScreenCastPortal::removeConnection()
{
if (!last_connection.has_value())
return;
const auto &connection = *last_connection;
QDBusConnection::sessionBus().disconnect(connection[0],
connection[1],
connection[2],
connection[3],
this,
connection[4].toLocal8Bit().data());
last_connection = std::nullopt;
}
void
ScreenCastPortal::init()
{
switch (state) {
case State::Closed:
state = State::Starting;
createSession();
break;
case State::Starting:
nhlog::ui()->warn("ScreenCastPortal already starting");
break;
case State::Started:
close(true);
break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal still closing");
break;
}
}
const ScreenCastPortal::Stream *
ScreenCastPortal::getStream() const
{
if (state != State::Started)
return nullptr;
else
return &stream;
}
bool
ScreenCastPortal::ready() const
{
return state == State::Started;
}
void
ScreenCastPortal::close(bool reinit)
{
switch (state) {
case State::Closed:
if (reinit)
init();
break;
case State::Starting:
if (!reinit) {
disconnectClose();
removeConnection();
state = State::Closed;
}
break;
case State::Started: {
state = State::Closing;
disconnectClose();
// Close file descriptor if it was opened
stream = Stream{};
emit readyChanged();
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
sessionHandle.path(),
QStringLiteral("org.freedesktop.portal.Session"),
QStringLiteral("Close"));
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(watcher,
&QDBusPendingCallWatcher::finished,
this,
[this, reinit](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply reply = *self;
if (!reply.isValid()) {
nhlog::ui()->warn("org.freedesktop.portal.ScreenCast (Close): {}",
reply.error().message().toStdString());
}
state = State::Closed;
if (reinit)
init();
});
} break;
case State::Closing:
nhlog::ui()->warn("ScreenCastPortal already closing");
break;
}
}
void
ScreenCastPortal::closedHandler(uint response, const QVariantMap &)
{
removeConnection();
disconnectClose();
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Closed): {}", response);
}
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: Connection closed");
state = State::Closed;
emit readyChanged();
}
void
ScreenCastPortal::createSession()
{
// Connect before sending the request to avoid missing the reply
QString handle_token = make_token();
if (!makeConnection(QStringLiteral("org.freedesktop.portal.Desktop"),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
SLOT(createSessionHandler(uint, QVariantMap)))) {
nhlog::ui()->error(
"Connection to signal Response for org.freedesktop.portal.Request failed");
close();
return;
}
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("CreateSession"));
msg << QVariantMap{{QStringLiteral("handle_token"), handle_token},
{QStringLiteral("session_handle_token"), make_token()}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (CreateSession): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::createSessionHandler(uint response, const QVariantMap &results)
{
removeConnection();
if (state != State::Starting) {
nhlog::ui()->warn("ScreenCastPortal not starting");
return;
}
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (CreateSession Response): {}",
response);
close();
return;
}
sessionHandle = QDBusObjectPath(results.value(QStringLiteral("session_handle")).toString());
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: sessionHandle = {}",
sessionHandle.path().toStdString());
QDBusConnection::sessionBus().connect(QStringLiteral("org.freedesktop.portal.Desktop"),
sessionHandle.path(),
QStringLiteral("org.freedesktop.portal.Session"),
QStringLiteral("Closed"),
this,
SLOT(closedHandler(QVariantMap)));
getAvailableSourceTypes();
}
void
ScreenCastPortal::getAvailableSourceTypes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableSourceTypes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusVariant> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableSourceTypes): {}",
reply.error().message().toStdString());
close();
return;
}
if (state != State::Starting) {
nhlog::ui()->warn("ScreenCastPortal not starting");
return;
}
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableSourceTypes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableSourceTypes)");
close();
return;
}
getAvailableCursorModes();
});
}
void
ScreenCastPortal::getAvailableCursorModes()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.DBus.Properties"),
QStringLiteral("Get"));
msg << QStringLiteral("org.freedesktop.portal.ScreenCast")
<< QStringLiteral("AvailableCursorModes");
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusVariant> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.DBus.Properties (Get AvailableCursorModes): {}",
reply.error().message().toStdString());
close();
return;
}
if (state != State::Starting) {
nhlog::ui()->warn("ScreenCastPortal not starting");
return;
}
const auto &value = reply.value().variant();
if (value.canConvert<uint>()) {
availableCursorModes = value.value<uint>();
} else {
nhlog::ui()->error("Invalid reply from org.freedesktop.DBus.Properties (Get "
"AvailableCursorModes)");
close();
return;
}
selectSources();
});
}
void
ScreenCastPortal::selectSources()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
if (!makeConnection(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
SLOT(selectSourcesHandler(uint, QVariantMap)))) {
nhlog::ui()->error(
"Connection to signal Response for org.freedesktop.portal.Request failed");
close();
return;
}
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("SelectSources"));
QVariantMap options{{QStringLiteral("multiple"), false},
{QStringLiteral("types"), availableSourceTypes},
{QStringLiteral("handle_token"), handle_token}};
auto settings = ChatPage::instance()->userSettings();
if (settings->screenShareHideCursor() && (availableCursorModes & (uint)1) != 0) {
options["cursor_mode"] = (uint)1;
}
msg << QVariant::fromValue(sessionHandle) << options;
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (SelectSources): {}",
reply.error().message().toStdString());
close();
}
});
}
void
ScreenCastPortal::selectSourcesHandler(uint response, const QVariantMap &)
{
removeConnection();
if (state != State::Starting) {
nhlog::ui()->warn("ScreenCastPortal not starting");
return;
}
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (SelectSources Response): {}",
response);
close();
return;
}
start();
}
void
ScreenCastPortal::start()
{
// Connect before sending the request to avoid missing the reply
auto handle_token = make_token();
if (!makeConnection(QString(),
handle_path(handle_token),
QStringLiteral("org.freedesktop.portal.Request"),
QStringLiteral("Response"),
SLOT(startHandler(uint, QVariantMap)))) {
nhlog::ui()->error("Connection to org.freedesktop.portal.Request Response failed");
close();
return;
}
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("Start"));
msg << QVariant::fromValue(sessionHandle) << QString()
<< QVariantMap{{QStringLiteral("handle_token"), handle_token}};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusObjectPath> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start): {}",
reply.error().message().toStdString());
}
});
}
struct PipeWireStream
{
quint32 nodeId = 0;
QVariantMap map;
};
Q_DECLARE_METATYPE(PipeWireStream)
const QDBusArgument &
operator>>(const QDBusArgument &argument, PipeWireStream &stream)
{
argument.beginStructure();
argument >> stream.nodeId;
argument.beginMap();
while (!argument.atEnd()) {
QString key;
QVariant map;
argument.beginMapEntry();
argument >> key >> map;
argument.endMapEntry();
stream.map.insert(key, map);
}
argument.endMap();
argument.endStructure();
return argument;
}
void
ScreenCastPortal::startHandler(uint response, const QVariantMap &results)
{
removeConnection();
if (response != 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (Start Response): {}", response);
close();
return;
}
QVector<PipeWireStream> streams =
qdbus_cast<QVector<PipeWireStream>>(results.value(QStringLiteral("streams")));
if (streams.size() == 0) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast: No stream was returned");
close();
return;
}
stream.nodeId = streams[0].nodeId;
nhlog::ui()->debug("org.freedesktop.portal.ScreenCast: nodeId = {}", stream.nodeId);
openPipeWireRemote();
}
void
ScreenCastPortal::openPipeWireRemote()
{
auto msg = QDBusMessage::createMethodCall(QStringLiteral("org.freedesktop.portal.Desktop"),
QStringLiteral("/org/freedesktop/portal/desktop"),
QStringLiteral("org.freedesktop.portal.ScreenCast"),
QStringLiteral("OpenPipeWireRemote"));
msg << QVariant::fromValue(sessionHandle) << QVariantMap{};
QDBusPendingCall pendingCall = QDBusConnection::sessionBus().asyncCall(msg);
QDBusPendingCallWatcher *watcher = new QDBusPendingCallWatcher(pendingCall, this);
connect(
watcher, &QDBusPendingCallWatcher::finished, this, [this](QDBusPendingCallWatcher *self) {
self->deleteLater();
QDBusPendingReply<QDBusUnixFileDescriptor> reply = *self;
if (!reply.isValid()) {
nhlog::ui()->error("org.freedesktop.portal.ScreenCast (OpenPipeWireRemote): {}",
reply.error().message().toStdString());
close();
} else {
stream.fd = std::move(reply.value());
nhlog::ui()->error("org.freedesktop.portal.ScreenCast: fd = {}",
stream.fd.fileDescriptor());
state = State::Started;
emit readyChanged();
}
});
}
#endif

View file

@ -0,0 +1,79 @@
// SPDX-FileCopyrightText: Nheko Contributors
//
// SPDX-License-Identifier: GPL-3.0-or-later
#pragma once
#ifdef GSTREAMER_AVAILABLE
#include <QDBusConnection>
#include <QDBusMessage>
#include <QDBusPendingCallWatcher>
#include <QDBusPendingReply>
#include <QDBusUnixFileDescriptor>
#include <QObject>
#include <optional>
class ScreenCastPortal final : public QObject
{
Q_OBJECT
public:
struct Stream
{
QDBusUnixFileDescriptor fd;
quint32 nodeId;
};
static ScreenCastPortal &instance()
{
static ScreenCastPortal instance;
return instance;
}
void init();
const Stream *getStream() const;
bool ready() const;
void close(bool reinit = false);
public slots:
void createSessionHandler(uint response, const QVariantMap &results);
void closedHandler(uint response, const QVariantMap &results);
void selectSourcesHandler(uint response, const QVariantMap &results);
void startHandler(uint response, const QVariantMap &results);
signals:
void readyChanged();
private:
void createSession();
void getAvailableSourceTypes();
void getAvailableCursorModes();
void selectSources();
void start();
void openPipeWireRemote();
bool makeConnection(QString service,
QString path,
QString interface,
QString name,
const char *slot);
void removeConnection();
void disconnectClose();
QDBusObjectPath sessionHandle;
uint availableSourceTypes;
uint availableCursorModes;
Stream stream;
enum class State
{
Closed,
Starting,
Started,
Closing,
};
State state = State::Closed;
std::optional<std::array<QString, 5>> last_connection;
};
#endif

View file

@ -2,6 +2,7 @@
//
// SPDX-License-Identifier: GPL-3.0-or-later
#include <QGuiApplication>
#include <QQmlEngine>
#include <QQuickItem>
#include <algorithm>
@ -19,6 +20,7 @@
#include "Logging.h"
#include "UserSettingsPage.h"
#include "WebRTCSession.h"
#include "voip/ScreenCastPortal.h"
#ifdef GSTREAMER_AVAILABLE
extern "C"
@ -40,9 +42,11 @@ extern "C"
#define STUN_SERVER "stun://turn.matrix.org:3478"
Q_DECLARE_METATYPE(webrtc::CallType)
Q_DECLARE_METATYPE(webrtc::ScreenShareType)
Q_DECLARE_METATYPE(webrtc::State)
using webrtc::CallType;
using webrtc::ScreenShareType;
using webrtc::State;
WebRTCSession::WebRTCSession()
@ -56,6 +60,14 @@ WebRTCSession::WebRTCSession()
"CallType",
QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::ScreenShareType>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko",
1,
0,
"ScreenShareType",
QStringLiteral("Can't instantiate enum"));
qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject(webrtc::staticMetaObject,
"im.nheko",
@ -578,13 +590,13 @@ getMediaAttributes(const GstSDPMessage *sdp,
}
bool
WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage)
WebRTCSession::havePlugins(bool isVideo,
bool isScreenshare,
ScreenShareType screenShareType,
std::string *errorMessage)
{
if (!initialised_ && !init(errorMessage))
return false;
if (haveVoicePlugins_ && (!isVideo || haveVideoPlugins_) &&
(!isX11Screenshare || haveX11ScreensharePlugins_))
return true;
static constexpr std::initializer_list<const char *> audio_elements = {
"audioconvert",
@ -611,10 +623,6 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
"videoscale",
"vp8enc",
};
static constexpr std::initializer_list<const char *> screenshare_elements = {
"ximagesink",
"ximagesrc",
};
std::string strError("Missing GStreamer elements: ");
GstRegistry *registry = gst_registry_get();
@ -641,18 +649,35 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
// check both elements at once
if (isVideo)
haveVideoPlugins_ = check_plugins(video_elements);
if (isX11Screenshare)
haveX11ScreensharePlugins_ = check_plugins(screenshare_elements);
bool haveScreensharePlugins = false;
if (isScreenshare) {
haveScreensharePlugins = check_plugins({"videorate"});
if (haveScreensharePlugins) {
if (QGuiApplication::platformName() == QStringLiteral("wayland")) {
haveScreensharePlugins = check_plugins({"waylandsink"});
} else {
haveScreensharePlugins = check_plugins({"ximagesink"});
}
}
if (haveScreensharePlugins) {
if (screenShareType == ScreenShareType::X11) {
haveScreensharePlugins = check_plugins({"ximagesrc"});
} else {
haveScreensharePlugins = check_plugins({"pipewiresrc"});
}
}
}
if (!haveVoicePlugins_ || (isVideo && !haveVideoPlugins_) ||
(isX11Screenshare && !haveX11ScreensharePlugins_)) {
(isScreenshare && !haveScreensharePlugins)) {
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
return false;
}
if (isVideo || isX11Screenshare) {
if (isVideo || isScreenshare) {
// load qmlglsink to register GStreamer's GstGLVideoItem QML type
GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr);
gst_object_unref(qmlglsink);
@ -661,12 +686,15 @@ WebRTCSession::havePlugins(bool isVideo, bool isX11Screenshare, std::string *err
}
bool
WebRTCSession::createOffer(CallType callType, uint32_t shareWindowId)
WebRTCSession::createOffer(CallType callType,
ScreenShareType screenShareType,
uint32_t shareWindowId)
{
clear();
isOffering_ = true;
callType_ = callType;
shareWindowId_ = shareWindowId;
isOffering_ = true;
callType_ = callType;
screenShareType_ = screenShareType;
shareWindowId_ = shareWindowId;
// opus and vp8 rtp payload types must be defined dynamically
// therefore from the range [96-127]
@ -924,6 +952,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *tee = gst_element_factory_make("tee", "videosrctee");
gst_bin_add_many(GST_BIN(pipe_), videoconvert, tee, nullptr);
if (callType_ == CallType::VIDEO || (settings->screenSharePiP() && devices_.haveCamera())) {
std::pair<int, int> resolution;
std::pair<int, int> frameRate;
@ -969,16 +998,56 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
nhlog::ui()->debug("WebRTC: screen share hide mouse cursor: {}",
settings->screenShareHideCursor());
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare");
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc");
return false;
GstElement *screencastsrc = nullptr;
if (screenShareType_ == ScreenShareType::X11) {
GstElement *ximagesrc = gst_element_factory_make("ximagesrc", "screenshare");
if (!ximagesrc) {
nhlog::ui()->error("WebRTC: failed to create ximagesrc");
return false;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
g_object_set(ximagesrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), ximagesrc);
screencastsrc = ximagesrc;
} else {
ScreenCastPortal &sc_portal = ScreenCastPortal::instance();
GstElement *pipewiresrc = gst_element_factory_make("pipewiresrc", "screenshare");
if (!pipewiresrc) {
nhlog::ui()->error("WebRTC: failed to create pipewiresrc");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
const ScreenCastPortal::Stream *stream = sc_portal.getStream();
if (stream == nullptr) {
nhlog::ui()->error("xdg-desktop-portal stream not started");
gst_object_unref(pipe_);
pipe_ = nullptr;
return false;
}
g_object_set(pipewiresrc, "fd", (gint)stream->fd.fileDescriptor(), nullptr);
std::string path = std::to_string(stream->nodeId);
g_object_set(pipewiresrc, "path", path.c_str(), nullptr);
g_object_set(pipewiresrc, "do-timestamp", (gboolean)1, nullptr);
gst_bin_add(GST_BIN(pipe_), pipewiresrc);
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
gst_bin_add(GST_BIN(pipe_), videorate);
if (!gst_element_link(pipewiresrc, videorate)) {
nhlog::ui()->error("WebRTC: failed to link pipewiresrc -> videorate");
return false;
}
screencastsrc = videorate;
}
g_object_set(ximagesrc, "use-damage", FALSE, nullptr);
g_object_set(ximagesrc, "xid", shareWindowId_, nullptr);
g_object_set(ximagesrc, "show-pointer", !settings->screenShareHideCursor(), nullptr);
GstCaps *caps = gst_caps_new_simple("video/x-raw",
"format",
G_TYPE_STRING,
"I420", // For vp8enc
"framerate",
GST_TYPE_FRACTION,
settings->screenShareFrameRate(),
@ -987,13 +1056,13 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
g_object_set(capsfilter, "caps", caps, nullptr);
gst_caps_unref(caps);
gst_bin_add_many(GST_BIN(pipe_), ximagesrc, capsfilter, nullptr);
gst_bin_add(GST_BIN(pipe_), capsfilter);
if (settings->screenSharePiP() && devices_.haveCamera()) {
GstElement *compositor = gst_element_factory_make("compositor", nullptr);
g_object_set(compositor, "background", 1, nullptr);
gst_bin_add(GST_BIN(pipe_), compositor);
if (!gst_element_link_many(ximagesrc, compositor, capsfilter, tee, nullptr)) {
if (!gst_element_link_many(screencastsrc, compositor, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false;
}
@ -1006,7 +1075,7 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
return false;
}
gst_object_unref(srcpad);
} else if (!gst_element_link_many(ximagesrc, videoconvert, capsfilter, tee, nullptr)) {
} else if (!gst_element_link_many(screencastsrc, videoconvert, capsfilter, tee, nullptr)) {
nhlog::ui()->error("WebRTC: failed to link screen share elements");
return false;
}
@ -1157,7 +1226,7 @@ WebRTCSession::end()
#else
bool
WebRTCSession::havePlugins(bool, bool, std::string *)
WebRTCSession::havePlugins(bool, bool, ScreenShareType, std::string *)
{
return false;
}
@ -1171,8 +1240,11 @@ WebRTCSession::haveLocalPiP() const
// clang-format off
// clang-format < 12 is buggy on this
bool
WebRTCSession::createOffer(webrtc::CallType, uint32_t)
WebRTCSession::createOffer(webrtc::CallType,
ScreenShareType screenShareType,
uint32_t)
{
(void)screenShareType;
return false;
}
// clang-format on

View file

@ -26,6 +26,13 @@ enum class CallType
};
Q_ENUM_NS(CallType)
enum class ScreenShareType
{
X11,
XDP
};
Q_ENUM_NS(ScreenShareType)
enum class State
{
DISCONNECTED,
@ -52,7 +59,10 @@ public:
return instance;
}
bool havePlugins(bool isVideo, bool isX11Screenshare, std::string *errorMessage = nullptr);
bool havePlugins(bool isVideo,
bool isScreenshare,
webrtc::ScreenShareType screenShareType,
std::string *errorMessage = nullptr);
webrtc::CallType callType() const { return callType_; }
webrtc::State state() const { return state_; }
bool haveLocalPiP() const;
@ -60,7 +70,7 @@ public:
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
bool createOffer(webrtc::CallType, uint32_t shareWindowId);
bool createOffer(webrtc::CallType, webrtc::ScreenShareType, uint32_t shareWindowId);
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
bool acceptNegotiation(const std::string &sdp);
@ -91,19 +101,19 @@ private:
WebRTCSession();
CallDevices &devices_;
bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
bool haveX11ScreensharePlugins_ = false;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false;
bool isRemoteVideoSendOnly_ = false;
QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0;
bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
webrtc::CallType callType_ = webrtc::CallType::VOICE;
webrtc::ScreenShareType screenShareType_ = webrtc::ScreenShareType::X11;
webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false;
bool isRemoteVideoSendOnly_ = false;
QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0;
std::vector<std::string> turnServers_;
uint32_t shareWindowId_ = 0;