mirror of
https://github.com/Nheko-Reborn/nheko.git
synced 2024-11-29 06:08:48 +03:00
Display screen sharing content locally
This commit is contained in:
parent
e8e88e7d79
commit
70c77cdc44
7 changed files with 90 additions and 68 deletions
|
@ -249,7 +249,7 @@ Page {
|
||||||
}
|
}
|
||||||
|
|
||||||
Loader {
|
Loader {
|
||||||
source: CallManager.isOnCall && CallManager.haveVideo ? "voip/VideoCall.qml" : ""
|
source: CallManager.isOnCall && CallManager.callType != CallType.VOICE ? "voip/VideoCall.qml" : ""
|
||||||
onLoaded: TimelineManager.setVideoCallItem()
|
onLoaded: TimelineManager.setVideoCallItem()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ Rectangle {
|
||||||
MouseArea {
|
MouseArea {
|
||||||
anchors.fill: parent
|
anchors.fill: parent
|
||||||
onClicked: {
|
onClicked: {
|
||||||
if (CallManager.haveVideo)
|
if (CallManager.callType != CallType.VOICE)
|
||||||
stackLayout.currentIndex = stackLayout.currentIndex ? 0 : 1;
|
stackLayout.currentIndex = stackLayout.currentIndex ? 0 : 1;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ Rectangle {
|
||||||
|
|
||||||
PropertyChanges {
|
PropertyChanges {
|
||||||
target: stackLayout
|
target: stackLayout
|
||||||
currentIndex: CallManager.haveVideo ? 1 : 0
|
currentIndex: CallManager.callType != CallType.VOICE ? 1 : 0
|
||||||
}
|
}
|
||||||
|
|
||||||
},
|
},
|
||||||
|
@ -196,15 +196,15 @@ Rectangle {
|
||||||
}
|
}
|
||||||
|
|
||||||
ImageButton {
|
ImageButton {
|
||||||
visible: CallManager.haveLocalCamera
|
visible: CallManager.haveLocalPiP
|
||||||
width: 24
|
width: 24
|
||||||
height: 24
|
height: 24
|
||||||
buttonTextColor: "#000000"
|
buttonTextColor: "#000000"
|
||||||
image: ":/icons/icons/ui/toggle-camera-view.png"
|
image: ":/icons/icons/ui/toggle-camera-view.png"
|
||||||
hoverEnabled: true
|
hoverEnabled: true
|
||||||
ToolTip.visible: hovered
|
ToolTip.visible: hovered
|
||||||
ToolTip.text: qsTr("Toggle camera view")
|
ToolTip.text: qsTr("Hide/Show Picture-in-Picture")
|
||||||
onClicked: CallManager.toggleCameraView()
|
onClicked: CallManager.toggleLocalPiP()
|
||||||
}
|
}
|
||||||
|
|
||||||
ImageButton {
|
ImageButton {
|
||||||
|
|
|
@ -21,7 +21,10 @@ Popup {
|
||||||
|
|
||||||
ColumnLayout {
|
ColumnLayout {
|
||||||
Label {
|
Label {
|
||||||
Layout.margins: 8
|
Layout.topMargin: 16
|
||||||
|
Layout.bottomMargin: 16
|
||||||
|
Layout.leftMargin: 8
|
||||||
|
Layout.rightMargin: 8
|
||||||
Layout.alignment: Qt.AlignLeft
|
Layout.alignment: Qt.AlignLeft
|
||||||
text: qsTr("Share desktop with %1?").arg(TimelineManager.timeline.roomName)
|
text: qsTr("Share desktop with %1?").arg(TimelineManager.timeline.roomName)
|
||||||
color: colors.windowText
|
color: colors.windowText
|
||||||
|
|
|
@ -405,15 +405,6 @@ CallManager::screenShareSupported()
|
||||||
return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY");
|
return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY");
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
CallManager::haveVideo() const
|
|
||||||
{
|
|
||||||
return callType() == CallType::VIDEO ||
|
|
||||||
(callType() == CallType::SCREEN &&
|
|
||||||
(ChatPage::instance()->userSettings()->screenShareRemoteVideo() &&
|
|
||||||
!session_.isRemoteVideoRecvOnly()));
|
|
||||||
}
|
|
||||||
|
|
||||||
QStringList
|
QStringList
|
||||||
CallManager::devices(bool isVideo) const
|
CallManager::devices(bool isVideo) const
|
||||||
{
|
{
|
||||||
|
|
|
@ -30,8 +30,7 @@ class CallManager : public QObject
|
||||||
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
|
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
|
||||||
Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY newInviteState)
|
Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY newInviteState)
|
||||||
Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged)
|
Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged)
|
||||||
Q_PROPERTY(bool haveLocalCamera READ haveLocalCamera NOTIFY newCallState)
|
Q_PROPERTY(bool haveLocalPiP READ haveLocalPiP NOTIFY newCallState)
|
||||||
Q_PROPERTY(bool haveVideo READ haveVideo NOTIFY newInviteState)
|
|
||||||
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
|
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
|
||||||
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
|
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
|
||||||
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
|
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
|
||||||
|
@ -47,8 +46,7 @@ public:
|
||||||
QString callParty() const { return callParty_; }
|
QString callParty() const { return callParty_; }
|
||||||
QString callPartyAvatarUrl() const { return callPartyAvatarUrl_; }
|
QString callPartyAvatarUrl() const { return callPartyAvatarUrl_; }
|
||||||
bool isMicMuted() const { return session_.isMicMuted(); }
|
bool isMicMuted() const { return session_.isMicMuted(); }
|
||||||
bool haveLocalCamera() const { return session_.haveLocalCamera(); }
|
bool haveLocalPiP() const { return session_.haveLocalPiP(); }
|
||||||
bool haveVideo() const;
|
|
||||||
QStringList mics() const { return devices(false); }
|
QStringList mics() const { return devices(false); }
|
||||||
QStringList cameras() const { return devices(true); }
|
QStringList cameras() const { return devices(true); }
|
||||||
void refreshTurnServer();
|
void refreshTurnServer();
|
||||||
|
@ -60,7 +58,7 @@ public slots:
|
||||||
void sendInvite(const QString &roomid, webrtc::CallType);
|
void sendInvite(const QString &roomid, webrtc::CallType);
|
||||||
void syncEvent(const mtx::events::collections::TimelineEvents &event);
|
void syncEvent(const mtx::events::collections::TimelineEvents &event);
|
||||||
void toggleMicMute();
|
void toggleMicMute();
|
||||||
void toggleCameraView() { session_.toggleCameraView(); }
|
void toggleLocalPiP() { session_.toggleLocalPiP(); }
|
||||||
void acceptInvite();
|
void acceptInvite();
|
||||||
void hangUp(
|
void hangUp(
|
||||||
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
|
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
|
||||||
|
|
|
@ -350,42 +350,59 @@ getResolution(GstPad *pad)
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::pair<int, int>
|
||||||
|
getResolution(GstElement *pipe, const gchar *elementName, const gchar *padName)
|
||||||
|
{
|
||||||
|
GstElement *element = gst_bin_get_by_name(GST_BIN(pipe), elementName);
|
||||||
|
GstPad *pad = gst_element_get_static_pad(element, padName);
|
||||||
|
auto ret = getResolution(pad);
|
||||||
|
gst_object_unref(pad);
|
||||||
|
gst_object_unref(element);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<int, int>
|
||||||
|
getPiPDimensions(const std::pair<int, int> resolution, int fullWidth, double scaleFactor)
|
||||||
|
{
|
||||||
|
int pipWidth = fullWidth * scaleFactor;
|
||||||
|
int pipHeight = static_cast<double>(resolution.second) / resolution.first * pipWidth;
|
||||||
|
return {pipWidth, pipHeight};
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
addLocalPiP(GstElement *pipe, const std::pair<int, int> &videoCallSize)
|
addLocalPiP(GstElement *pipe, const std::pair<int, int> &videoCallSize)
|
||||||
{
|
{
|
||||||
// embed localUser's camera into received video
|
// embed localUser's camera into received video (CallType::VIDEO)
|
||||||
|
// OR embed screen share into received video (CallType::SCREEN)
|
||||||
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee");
|
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee");
|
||||||
if (!tee)
|
if (!tee)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||||
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
|
gst_bin_add(GST_BIN(pipe), queue);
|
||||||
gst_bin_add_many(GST_BIN(pipe), queue, videorate, nullptr);
|
gst_element_link(tee, queue);
|
||||||
gst_element_link_many(tee, queue, videorate, nullptr);
|
|
||||||
gst_element_sync_state_with_parent(queue);
|
gst_element_sync_state_with_parent(queue);
|
||||||
gst_element_sync_state_with_parent(videorate);
|
|
||||||
gst_object_unref(tee);
|
gst_object_unref(tee);
|
||||||
|
|
||||||
GstElement *camerafilter = gst_bin_get_by_name(GST_BIN(pipe), "camerafilter");
|
|
||||||
GstPad *filtersinkpad = gst_element_get_static_pad(camerafilter, "sink");
|
|
||||||
auto cameraResolution = getResolution(filtersinkpad);
|
|
||||||
int pipWidth = videoCallSize.first / 4;
|
|
||||||
int pipHeight =
|
|
||||||
static_cast<double>(cameraResolution.second) / cameraResolution.first * pipWidth;
|
|
||||||
nhlog::ui()->debug("WebRTC: local picture-in-picture: {}x{}", pipWidth, pipHeight);
|
|
||||||
gst_object_unref(filtersinkpad);
|
|
||||||
gst_object_unref(camerafilter);
|
|
||||||
|
|
||||||
GstPad *camerapad = gst_element_get_static_pad(videorate, "src");
|
|
||||||
GstElement *compositor = gst_bin_get_by_name(GST_BIN(pipe), "compositor");
|
GstElement *compositor = gst_bin_get_by_name(GST_BIN(pipe), "compositor");
|
||||||
localPiPSinkPad_ = gst_element_get_request_pad(compositor, "sink_%u");
|
localPiPSinkPad_ = gst_element_get_request_pad(compositor, "sink_%u");
|
||||||
g_object_set(localPiPSinkPad_, "zorder", 2, nullptr);
|
g_object_set(localPiPSinkPad_, "zorder", 2, nullptr);
|
||||||
g_object_set(localPiPSinkPad_, "width", pipWidth, "height", pipHeight, nullptr);
|
|
||||||
|
bool isVideo = WebRTCSession::instance().callType() == CallType::VIDEO;
|
||||||
|
const gchar *element = isVideo ? "camerafilter" : "screenshare";
|
||||||
|
const gchar *pad = isVideo ? "sink" : "src";
|
||||||
|
auto resolution = getResolution(pipe, element, pad);
|
||||||
|
auto pipSize = getPiPDimensions(resolution, videoCallSize.first, 0.25);
|
||||||
|
nhlog::ui()->debug(
|
||||||
|
"WebRTC: local picture-in-picture: {}x{}", pipSize.first, pipSize.second);
|
||||||
|
g_object_set(localPiPSinkPad_, "width", pipSize.first, "height", pipSize.second, nullptr);
|
||||||
gint offset = videoCallSize.first / 80;
|
gint offset = videoCallSize.first / 80;
|
||||||
g_object_set(localPiPSinkPad_, "xpos", offset, "ypos", offset, nullptr);
|
g_object_set(localPiPSinkPad_, "xpos", offset, "ypos", offset, nullptr);
|
||||||
if (GST_PAD_LINK_FAILED(gst_pad_link(camerapad, localPiPSinkPad_)))
|
|
||||||
|
GstPad *srcpad = gst_element_get_static_pad(queue, "src");
|
||||||
|
if (GST_PAD_LINK_FAILED(gst_pad_link(srcpad, localPiPSinkPad_)))
|
||||||
nhlog::ui()->error("WebRTC: failed to link local PiP elements");
|
nhlog::ui()->error("WebRTC: failed to link local PiP elements");
|
||||||
gst_object_unref(camerapad);
|
gst_object_unref(srcpad);
|
||||||
gst_object_unref(compositor);
|
gst_object_unref(compositor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,31 +411,37 @@ addRemotePiP(GstElement *pipe)
|
||||||
{
|
{
|
||||||
// embed localUser's camera into screen image being shared
|
// embed localUser's camera into screen image being shared
|
||||||
if (remotePiPSinkPad_) {
|
if (remotePiPSinkPad_) {
|
||||||
GstElement *screen = gst_bin_get_by_name(GST_BIN(pipe), "screenshare");
|
auto camRes = getResolution(pipe, "camerafilter", "sink");
|
||||||
GstPad *srcpad = gst_element_get_static_pad(screen, "src");
|
auto shareRes = getResolution(pipe, "screenshare", "src");
|
||||||
auto resolution = getResolution(srcpad);
|
auto pipSize = getPiPDimensions(camRes, shareRes.first, 0.2);
|
||||||
nhlog::ui()->debug(
|
nhlog::ui()->debug(
|
||||||
"WebRTC: screen share: {}x{}", resolution.first, resolution.second);
|
"WebRTC: screen share picture-in-picture: {}x{}", pipSize.first, pipSize.second);
|
||||||
gst_object_unref(srcpad);
|
|
||||||
gst_object_unref(screen);
|
|
||||||
|
|
||||||
int pipWidth = resolution.first / 5;
|
gint offset = shareRes.first / 100;
|
||||||
int pipHeight =
|
|
||||||
static_cast<double>(resolution.second) / resolution.first * pipWidth;
|
|
||||||
nhlog::ui()->debug(
|
|
||||||
"WebRTC: screen share picture-in-picture: {}x{}", pipWidth, pipHeight);
|
|
||||||
gint offset = resolution.first / 100;
|
|
||||||
g_object_set(remotePiPSinkPad_, "zorder", 2, nullptr);
|
g_object_set(remotePiPSinkPad_, "zorder", 2, nullptr);
|
||||||
g_object_set(remotePiPSinkPad_, "width", pipWidth, "height", pipHeight, nullptr);
|
g_object_set(
|
||||||
|
remotePiPSinkPad_, "width", pipSize.first, "height", pipSize.second, nullptr);
|
||||||
g_object_set(remotePiPSinkPad_,
|
g_object_set(remotePiPSinkPad_,
|
||||||
"xpos",
|
"xpos",
|
||||||
resolution.first - pipWidth - offset,
|
shareRes.first - pipSize.first - offset,
|
||||||
"ypos",
|
"ypos",
|
||||||
resolution.second - pipHeight - offset,
|
shareRes.second - pipSize.second - offset,
|
||||||
nullptr);
|
nullptr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
addLocalVideo(GstElement *pipe)
|
||||||
|
{
|
||||||
|
GstElement *queue = newVideoSinkChain(pipe);
|
||||||
|
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee");
|
||||||
|
GstPad *srcpad = gst_element_get_request_pad(tee, "src_%u");
|
||||||
|
GstPad *sinkpad = gst_element_get_static_pad(queue, "sink");
|
||||||
|
if (GST_PAD_LINK_FAILED(gst_pad_link(srcpad, sinkpad)))
|
||||||
|
nhlog::ui()->error("WebRTC: failed to link videosrctee -> video sink chain");
|
||||||
|
gst_object_unref(srcpad);
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
||||||
{
|
{
|
||||||
|
@ -454,8 +477,7 @@ linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
||||||
nhlog::ui()->info("WebRTC: incoming video resolution: {}x{}",
|
nhlog::ui()->info("WebRTC: incoming video resolution: {}x{}",
|
||||||
videoCallSize.first,
|
videoCallSize.first,
|
||||||
videoCallSize.second);
|
videoCallSize.second);
|
||||||
if (session->callType() == CallType::VIDEO)
|
addLocalPiP(pipe, videoCallSize);
|
||||||
addLocalPiP(pipe, videoCallSize);
|
|
||||||
} else {
|
} else {
|
||||||
g_free(mediaType);
|
g_free(mediaType);
|
||||||
nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad));
|
nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad));
|
||||||
|
@ -478,6 +500,8 @@ linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
||||||
g_timeout_add_seconds(3, testPacketLoss, nullptr);
|
g_timeout_add_seconds(3, testPacketLoss, nullptr);
|
||||||
}
|
}
|
||||||
addRemotePiP(pipe);
|
addRemotePiP(pipe);
|
||||||
|
if (session->isRemoteVideoRecvOnly())
|
||||||
|
addLocalVideo(pipe);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
gst_object_unref(queuepad);
|
gst_object_unref(queuepad);
|
||||||
|
@ -1011,13 +1035,19 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
WebRTCSession::haveLocalCamera() const
|
WebRTCSession::haveLocalPiP() const
|
||||||
{
|
{
|
||||||
if (callType_ == CallType::VIDEO && state_ >= State::INITIATED) {
|
if (state_ >= State::INITIATED) {
|
||||||
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe_), "videosrctee");
|
if (callType_ == CallType::VOICE || isRemoteVideoRecvOnly_)
|
||||||
if (tee) {
|
return false;
|
||||||
gst_object_unref(tee);
|
else if (callType_ == CallType::SCREEN)
|
||||||
return true;
|
return true;
|
||||||
|
else {
|
||||||
|
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe_), "videosrctee");
|
||||||
|
if (tee) {
|
||||||
|
gst_object_unref(tee);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -1051,7 +1081,7 @@ WebRTCSession::toggleMicMute()
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
WebRTCSession::toggleCameraView()
|
WebRTCSession::toggleLocalPiP()
|
||||||
{
|
{
|
||||||
if (localPiPSinkPad_) {
|
if (localPiPSinkPad_) {
|
||||||
guint zorder;
|
guint zorder;
|
||||||
|
@ -1108,7 +1138,7 @@ WebRTCSession::havePlugins(bool, std::string *)
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
WebRTCSession::haveLocalCamera() const
|
WebRTCSession::haveLocalPiP() const
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1144,7 +1174,7 @@ WebRTCSession::toggleMicMute()
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
WebRTCSession::toggleCameraView()
|
WebRTCSession::toggleLocalPiP()
|
||||||
{}
|
{}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|
|
@ -52,7 +52,7 @@ public:
|
||||||
bool havePlugins(bool isVideo, std::string *errorMessage = nullptr);
|
bool havePlugins(bool isVideo, std::string *errorMessage = nullptr);
|
||||||
webrtc::CallType callType() const { return callType_; }
|
webrtc::CallType callType() const { return callType_; }
|
||||||
webrtc::State state() const { return state_; }
|
webrtc::State state() const { return state_; }
|
||||||
bool haveLocalCamera() const;
|
bool haveLocalPiP() const;
|
||||||
bool isOffering() const { return isOffering_; }
|
bool isOffering() const { return isOffering_; }
|
||||||
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
|
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
|
||||||
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
|
bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; }
|
||||||
|
@ -64,7 +64,7 @@ public:
|
||||||
|
|
||||||
bool isMicMuted() const;
|
bool isMicMuted() const;
|
||||||
bool toggleMicMute();
|
bool toggleMicMute();
|
||||||
void toggleCameraView();
|
void toggleLocalPiP();
|
||||||
void end();
|
void end();
|
||||||
|
|
||||||
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
|
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
|
||||||
|
|
Loading…
Reference in a new issue