mirror of
https://github.com/Nheko-Reborn/nheko.git
synced 2024-11-22 11:00:48 +03:00
Support voice calls
This commit is contained in:
parent
c973fd759b
commit
7a206441c8
33 changed files with 1655 additions and 101 deletions
|
@ -227,6 +227,7 @@ configure_file(cmake/nheko.h config/nheko.h)
|
|||
#
|
||||
set(SRC_FILES
|
||||
# Dialogs
|
||||
src/dialogs/AcceptCall.cpp
|
||||
src/dialogs/CreateRoom.cpp
|
||||
src/dialogs/FallbackAuth.cpp
|
||||
src/dialogs/ImageOverlay.cpp
|
||||
|
@ -235,6 +236,7 @@ set(SRC_FILES
|
|||
src/dialogs/LeaveRoom.cpp
|
||||
src/dialogs/Logout.cpp
|
||||
src/dialogs/MemberList.cpp
|
||||
src/dialogs/PlaceCall.cpp
|
||||
src/dialogs/PreviewUploadOverlay.cpp
|
||||
src/dialogs/ReCaptcha.cpp
|
||||
src/dialogs/ReadReceipts.cpp
|
||||
|
@ -278,9 +280,11 @@ set(SRC_FILES
|
|||
src/ui/Theme.cpp
|
||||
src/ui/ThemeManager.cpp
|
||||
|
||||
src/ActiveCallBar.cpp
|
||||
src/AvatarProvider.cpp
|
||||
src/BlurhashProvider.cpp
|
||||
src/Cache.cpp
|
||||
src/CallManager.cpp
|
||||
src/ChatPage.cpp
|
||||
src/ColorImageProvider.cpp
|
||||
src/CommunitiesList.cpp
|
||||
|
@ -306,6 +310,7 @@ set(SRC_FILES
|
|||
src/UserInfoWidget.cpp
|
||||
src/UserSettingsPage.cpp
|
||||
src/Utils.cpp
|
||||
src/WebRTCSession.cpp
|
||||
src/WelcomePage.cpp
|
||||
src/popups/PopupItem.cpp
|
||||
src/popups/SuggestionsPopup.cpp
|
||||
|
@ -423,6 +428,10 @@ else()
|
|||
find_package(Tweeny REQUIRED)
|
||||
endif()
|
||||
|
||||
include(FindPkgConfig)
|
||||
pkg_check_modules(GST_SDP REQUIRED IMPORTED_TARGET gstreamer-sdp-1.0>=1.14)
|
||||
pkg_check_modules(GST_WEBRTC REQUIRED IMPORTED_TARGET gstreamer-webrtc-1.0>=1.14)
|
||||
|
||||
# single instance functionality
|
||||
set(QAPPLICATION_CLASS QApplication CACHE STRING "Inheritance class for SingleApplication")
|
||||
add_subdirectory(third_party/SingleApplication-3.1.3.1/)
|
||||
|
@ -431,6 +440,7 @@ feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAG
|
|||
|
||||
qt5_wrap_cpp(MOC_HEADERS
|
||||
# Dialogs
|
||||
src/dialogs/AcceptCall.h
|
||||
src/dialogs/CreateRoom.h
|
||||
src/dialogs/FallbackAuth.h
|
||||
src/dialogs/ImageOverlay.h
|
||||
|
@ -439,6 +449,7 @@ qt5_wrap_cpp(MOC_HEADERS
|
|||
src/dialogs/LeaveRoom.h
|
||||
src/dialogs/Logout.h
|
||||
src/dialogs/MemberList.h
|
||||
src/dialogs/PlaceCall.h
|
||||
src/dialogs/PreviewUploadOverlay.h
|
||||
src/dialogs/RawMessage.h
|
||||
src/dialogs/ReCaptcha.h
|
||||
|
@ -482,9 +493,11 @@ qt5_wrap_cpp(MOC_HEADERS
|
|||
|
||||
src/notifications/Manager.h
|
||||
|
||||
src/ActiveCallBar.h
|
||||
src/AvatarProvider.h
|
||||
src/BlurhashProvider.h
|
||||
src/Cache_p.h
|
||||
src/CallManager.h
|
||||
src/ChatPage.h
|
||||
src/CommunitiesList.h
|
||||
src/CommunitiesListItem.h
|
||||
|
@ -504,6 +517,7 @@ qt5_wrap_cpp(MOC_HEADERS
|
|||
src/TrayIcon.h
|
||||
src/UserInfoWidget.h
|
||||
src/UserSettingsPage.h
|
||||
src/WebRTCSession.h
|
||||
src/WelcomePage.h
|
||||
src/popups/PopupItem.h
|
||||
src/popups/SuggestionsPopup.h
|
||||
|
@ -583,6 +597,8 @@ target_link_libraries(nheko PRIVATE
|
|||
lmdbxx::lmdbxx
|
||||
liblmdb::lmdb
|
||||
tweeny
|
||||
PkgConfig::GST_SDP
|
||||
PkgConfig::GST_WEBRTC
|
||||
SingleApplication::SingleApplication)
|
||||
|
||||
if(${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.16.0")
|
||||
|
|
|
@ -404,6 +404,21 @@ Example: https://server.my:8787</translation>
|
|||
<source>%1 created and configured room: %2</source>
|
||||
<translation>%1 created and configured room: %2</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+6"/>
|
||||
<source>%1 placed a voice call.</source>
|
||||
<translation>%1 placed a voice call.</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+6"/>
|
||||
<source>%1 answered the call.</source>
|
||||
<translation>%1 answered the call.</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+6"/>
|
||||
<source>%1 ended the call.</source>
|
||||
<translation>%1 ended the call.</translation>
|
||||
</message>
|
||||
</context>
|
||||
<context>
|
||||
<name>Placeholder</name>
|
||||
|
@ -1796,6 +1811,36 @@ Media size: %2
|
|||
<source>%1 sent an encrypted message</source>
|
||||
<translation>%1 sent an encrypted message</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+5"/>
|
||||
<source>You placed a call</source>
|
||||
<translation>You placed a call</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+3"/>
|
||||
<source>%1 placed a call</source>
|
||||
<translation>%1 placed a call</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+5"/>
|
||||
<source>You answered a call</source>
|
||||
<translation>You answered a call</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+3"/>
|
||||
<source>%1 answered a call</source>
|
||||
<translation>%1 answered a call</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+5"/>
|
||||
<source>You ended a call</source>
|
||||
<translation>You ended a call</translation>
|
||||
</message>
|
||||
<message>
|
||||
<location line="+3"/>
|
||||
<source>%1 ended a call</source>
|
||||
<translation>%1 ended a call</translation>
|
||||
</message>
|
||||
</context>
|
||||
<context>
|
||||
<name>popups::UserMentions</name>
|
||||
|
|
BIN
resources/media/callend.mp3
Normal file
BIN
resources/media/callend.mp3
Normal file
Binary file not shown.
BIN
resources/media/callend.ogg
Normal file
BIN
resources/media/callend.ogg
Normal file
Binary file not shown.
BIN
resources/media/ring.mp3
Normal file
BIN
resources/media/ring.mp3
Normal file
Binary file not shown.
BIN
resources/media/ring.ogg
Normal file
BIN
resources/media/ring.ogg
Normal file
Binary file not shown.
BIN
resources/media/ringback.mp3
Normal file
BIN
resources/media/ringback.mp3
Normal file
Binary file not shown.
BIN
resources/media/ringback.ogg
Normal file
BIN
resources/media/ringback.ogg
Normal file
Binary file not shown.
|
@ -90,6 +90,24 @@ Item {
|
|||
text: qsTr("%1 created and configured room: %2").arg(model.data.userName).arg(model.data.roomId)
|
||||
}
|
||||
}
|
||||
DelegateChoice {
|
||||
roleValue: MtxEvent.CallInvite
|
||||
NoticeMessage {
|
||||
text: qsTr("%1 placed a voice call.").arg(model.data.userName)
|
||||
}
|
||||
}
|
||||
DelegateChoice {
|
||||
roleValue: MtxEvent.CallAnswer
|
||||
NoticeMessage {
|
||||
text: qsTr("%1 answered the call.").arg(model.data.userName)
|
||||
}
|
||||
}
|
||||
DelegateChoice {
|
||||
roleValue: MtxEvent.CallHangUp
|
||||
NoticeMessage {
|
||||
text: qsTr("%1 ended the call.").arg(model.data.userName)
|
||||
}
|
||||
}
|
||||
DelegateChoice {
|
||||
// TODO: make a more complex formatter for the power levels.
|
||||
roleValue: MtxEvent.PowerLevels
|
||||
|
|
|
@ -136,4 +136,9 @@
|
|||
<file>qml/delegates/Placeholder.qml</file>
|
||||
<file>qml/delegates/Reply.qml</file>
|
||||
</qresource>
|
||||
<qresource prefix="/media">
|
||||
<file>media/ring.ogg</file>
|
||||
<file>media/ringback.ogg</file>
|
||||
<file>media/callend.ogg</file>
|
||||
</qresource>
|
||||
</RCC>
|
||||
|
|
74
src/ActiveCallBar.cpp
Normal file
74
src/ActiveCallBar.cpp
Normal file
|
@ -0,0 +1,74 @@
|
|||
#include <QHBoxLayout>
|
||||
#include <QIcon>
|
||||
#include <QLabel>
|
||||
#include <QString>
|
||||
|
||||
#include "ActiveCallBar.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "ui/FlatButton.h"
|
||||
|
||||
ActiveCallBar::ActiveCallBar(QWidget *parent)
|
||||
: QWidget(parent)
|
||||
{
|
||||
setAutoFillBackground(true);
|
||||
auto p = palette();
|
||||
p.setColor(backgroundRole(), Qt::green);
|
||||
setPalette(p);
|
||||
|
||||
QFont f;
|
||||
f.setPointSizeF(f.pointSizeF());
|
||||
|
||||
const int fontHeight = QFontMetrics(f).height();
|
||||
const int widgetMargin = fontHeight / 3;
|
||||
const int contentHeight = fontHeight * 3;
|
||||
|
||||
setFixedHeight(contentHeight + widgetMargin);
|
||||
|
||||
topLayout_ = new QHBoxLayout(this);
|
||||
topLayout_->setSpacing(widgetMargin);
|
||||
topLayout_->setContentsMargins(
|
||||
2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
|
||||
topLayout_->setSizeConstraint(QLayout::SetMinimumSize);
|
||||
|
||||
QFont labelFont;
|
||||
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.2);
|
||||
labelFont.setWeight(QFont::Medium);
|
||||
|
||||
callPartyLabel_ = new QLabel(this);
|
||||
callPartyLabel_->setFont(labelFont);
|
||||
|
||||
// TODO microphone mute/unmute icons
|
||||
muteBtn_ = new FlatButton(this);
|
||||
QIcon muteIcon;
|
||||
muteIcon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png");
|
||||
muteBtn_->setIcon(muteIcon);
|
||||
muteBtn_->setIconSize(QSize(buttonSize_ / 2, buttonSize_ / 2));
|
||||
muteBtn_->setToolTip(tr("Mute Mic"));
|
||||
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
|
||||
muteBtn_->setCornerRadius(buttonSize_ / 2);
|
||||
connect(muteBtn_, &FlatButton::clicked, this, [this]() {
|
||||
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_)) {
|
||||
QIcon icon;
|
||||
if (muted_) {
|
||||
muteBtn_->setToolTip("Unmute Mic");
|
||||
icon.addFile(":/icons/icons/ui/round-remove-button.png");
|
||||
} else {
|
||||
muteBtn_->setToolTip("Mute Mic");
|
||||
icon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png");
|
||||
}
|
||||
muteBtn_->setIcon(icon);
|
||||
}
|
||||
});
|
||||
|
||||
topLayout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
|
||||
topLayout_->addWidget(muteBtn_, 0, Qt::AlignRight);
|
||||
}
|
||||
|
||||
void
|
||||
ActiveCallBar::setCallParty(const QString &userid, const QString &displayName)
|
||||
{
|
||||
if (!displayName.isEmpty() && displayName != userid)
|
||||
callPartyLabel_->setText("Active Call: " + displayName + " (" + userid + ")");
|
||||
else
|
||||
callPartyLabel_->setText("Active Call: " + userid);
|
||||
}
|
26
src/ActiveCallBar.h
Normal file
26
src/ActiveCallBar.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
#pragma once
|
||||
|
||||
#include <QWidget>
|
||||
|
||||
class QHBoxLayout;
|
||||
class QLabel;
|
||||
class QString;
|
||||
class FlatButton;
|
||||
|
||||
class ActiveCallBar : public QWidget
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
ActiveCallBar(QWidget *parent = nullptr);
|
||||
|
||||
public slots:
|
||||
void setCallParty(const QString &userid, const QString &displayName);
|
||||
|
||||
private:
|
||||
QHBoxLayout *topLayout_ = nullptr;
|
||||
QLabel *callPartyLabel_ = nullptr;
|
||||
FlatButton *muteBtn_ = nullptr;
|
||||
int buttonSize_ = 32;
|
||||
bool muted_ = false;
|
||||
};
|
|
@ -1364,6 +1364,9 @@ Cache::getLastMessageInfo(lmdb::txn &txn, const std::string &room_id)
|
|||
|
||||
if (!(obj["event"]["type"] == "m.room.message" ||
|
||||
obj["event"]["type"] == "m.sticker" ||
|
||||
obj["event"]["type"] == "m.call.invite" ||
|
||||
obj["event"]["type"] == "m.call.answer" ||
|
||||
obj["event"]["type"] == "m.call.hangup" ||
|
||||
obj["event"]["type"] == "m.room.encrypted"))
|
||||
continue;
|
||||
|
||||
|
|
315
src/CallManager.cpp
Normal file
315
src/CallManager.cpp
Normal file
|
@ -0,0 +1,315 @@
|
|||
#include <chrono>
|
||||
|
||||
#include <QMediaPlaylist>
|
||||
#include <QUrl>
|
||||
|
||||
#include "CallManager.h"
|
||||
#include "Cache.h"
|
||||
#include "ChatPage.h"
|
||||
#include "Logging.h"
|
||||
#include "MainWindow.h"
|
||||
#include "MatrixClient.h"
|
||||
#include "UserSettingsPage.h"
|
||||
#include "WebRTCSession.h"
|
||||
|
||||
#include "dialogs/AcceptCall.h"
|
||||
|
||||
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>)
|
||||
Q_DECLARE_METATYPE(mtx::responses::TurnServer)
|
||||
|
||||
using namespace mtx::events;
|
||||
using namespace mtx::events::msg;
|
||||
|
||||
// TODO Allow altenative in settings
|
||||
#define STUN_SERVER "stun://turn.matrix.org:3478"
|
||||
|
||||
CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
|
||||
: QObject(),
|
||||
session_(WebRTCSession::instance()),
|
||||
turnServerTimer_(this),
|
||||
settings_(userSettings)
|
||||
{
|
||||
qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
|
||||
qRegisterMetaType<mtx::responses::TurnServer>();
|
||||
|
||||
connect(&session_, &WebRTCSession::offerCreated, this,
|
||||
[this](const std::string &sdp,
|
||||
const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates)
|
||||
{
|
||||
nhlog::ui()->debug("Offer created with callid_ and roomid_: {} {}", callid_, roomid_.toStdString());
|
||||
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
|
||||
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
|
||||
});
|
||||
|
||||
connect(&session_, &WebRTCSession::answerCreated, this,
|
||||
[this](const std::string &sdp,
|
||||
const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates)
|
||||
{
|
||||
nhlog::ui()->debug("Answer created with callid_ and roomid_: {} {}", callid_, roomid_.toStdString());
|
||||
emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
|
||||
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
|
||||
});
|
||||
|
||||
connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
|
||||
turnServerTimer_.start(2000);
|
||||
|
||||
connect(this, &CallManager::turnServerRetrieved, this,
|
||||
[this](const mtx::responses::TurnServer &res)
|
||||
{
|
||||
nhlog::net()->info("TURN server(s) retrieved from homeserver:");
|
||||
nhlog::net()->info("username: {}", res.username);
|
||||
nhlog::net()->info("ttl: {}", res.ttl);
|
||||
for (const auto &u : res.uris)
|
||||
nhlog::net()->info("uri: {}", u);
|
||||
|
||||
turnServer_ = res;
|
||||
turnServerTimer_.setInterval(res.ttl * 1000 * 0.9);
|
||||
});
|
||||
|
||||
connect(&session_, &WebRTCSession::pipelineChanged, this,
|
||||
[this](bool started) {
|
||||
if (!started)
|
||||
playRingtone("qrc:/media/media/callend.ogg", false);
|
||||
});
|
||||
|
||||
connect(&player_, &QMediaPlayer::mediaStatusChanged, this,
|
||||
[this](QMediaPlayer::MediaStatus status) {
|
||||
if (status == QMediaPlayer::LoadedMedia)
|
||||
player_.play();
|
||||
});
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::sendInvite(const QString &roomid)
|
||||
{
|
||||
if (onActiveCall())
|
||||
return;
|
||||
|
||||
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
|
||||
if (members.size() != 2) {
|
||||
emit ChatPage::instance()->showNotification("Voice/Video calls are limited to 1:1 rooms");
|
||||
return;
|
||||
}
|
||||
|
||||
std::string errorMessage;
|
||||
if (!session_.init(&errorMessage)) {
|
||||
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
|
||||
return;
|
||||
}
|
||||
|
||||
roomid_ = roomid;
|
||||
setTurnServers();
|
||||
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
|
||||
|
||||
// TODO Add invite timeout
|
||||
generateCallID();
|
||||
const RoomMember &callee = members.front().user_id == utils::localUser() ? members.back() : members.front();
|
||||
emit newCallParty(callee.user_id, callee.display_name);
|
||||
playRingtone("qrc:/media/media/ringback.ogg", true);
|
||||
if (!session_.createOffer()) {
|
||||
emit ChatPage::instance()->showNotification("Problem setting up call");
|
||||
endCall();
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::hangUp()
|
||||
{
|
||||
nhlog::ui()->debug("CallManager::hangUp: roomid_: {}", roomid_.toStdString());
|
||||
if (!callid_.empty()) {
|
||||
emit newMessage(roomid_, CallHangUp{callid_, 0, CallHangUp::Reason::User});
|
||||
endCall();
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
CallManager::onActiveCall()
|
||||
{
|
||||
return session_.isActive();
|
||||
}
|
||||
|
||||
void CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
|
||||
{
|
||||
if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event)
|
||||
|| handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
|
||||
return;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
bool
|
||||
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event)
|
||||
{
|
||||
if (std::holds_alternative<RoomEvent<T>>(event)) {
|
||||
handleEvent(std::get<RoomEvent<T>>(event));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
|
||||
{
|
||||
nhlog::ui()->debug("CallManager::incoming CallInvite from {} with id {}", callInviteEvent.sender, callInviteEvent.content.call_id);
|
||||
|
||||
if (callInviteEvent.content.call_id.empty())
|
||||
return;
|
||||
|
||||
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
|
||||
if (onActiveCall() || members.size() != 2) {
|
||||
emit newMessage(QString::fromStdString(callInviteEvent.room_id),
|
||||
CallHangUp{callInviteEvent.content.call_id, 0, CallHangUp::Reason::InviteTimeOut});
|
||||
return;
|
||||
}
|
||||
|
||||
playRingtone("qrc:/media/media/ring.ogg", true);
|
||||
roomid_ = QString::fromStdString(callInviteEvent.room_id);
|
||||
callid_ = callInviteEvent.content.call_id;
|
||||
remoteICECandidates_.clear();
|
||||
|
||||
const RoomMember &caller = members.front().user_id == utils::localUser() ? members.back() : members.front();
|
||||
emit newCallParty(caller.user_id, caller.display_name);
|
||||
|
||||
auto dialog = new dialogs::AcceptCall(caller.user_id, caller.display_name, MainWindow::instance());
|
||||
connect(dialog, &dialogs::AcceptCall::accept, this,
|
||||
[this, callInviteEvent](){
|
||||
MainWindow::instance()->hideOverlay();
|
||||
answerInvite(callInviteEvent.content);});
|
||||
connect(dialog, &dialogs::AcceptCall::reject, this,
|
||||
[this](){
|
||||
MainWindow::instance()->hideOverlay();
|
||||
hangUp();});
|
||||
MainWindow::instance()->showSolidOverlayModal(dialog);
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::answerInvite(const CallInvite &invite)
|
||||
{
|
||||
stopRingtone();
|
||||
std::string errorMessage;
|
||||
if (!session_.init(&errorMessage)) {
|
||||
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
|
||||
hangUp();
|
||||
return;
|
||||
}
|
||||
|
||||
setTurnServers();
|
||||
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
|
||||
|
||||
if (!session_.acceptOffer(invite.sdp)) {
|
||||
emit ChatPage::instance()->showNotification("Problem setting up call");
|
||||
hangUp();
|
||||
return;
|
||||
}
|
||||
session_.acceptICECandidates(remoteICECandidates_);
|
||||
remoteICECandidates_.clear();
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
|
||||
{
|
||||
nhlog::ui()->debug("CallManager::incoming CallCandidates from {} with id {}", callCandidatesEvent.sender, callCandidatesEvent.content.call_id);
|
||||
if (callid_ == callCandidatesEvent.content.call_id) {
|
||||
if (onActiveCall())
|
||||
session_.acceptICECandidates(callCandidatesEvent.content.candidates);
|
||||
else {
|
||||
// CallInvite has been received and we're awaiting localUser to accept or reject the call
|
||||
for (const auto &c : callCandidatesEvent.content.candidates)
|
||||
remoteICECandidates_.push_back(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
|
||||
{
|
||||
nhlog::ui()->debug("CallManager::incoming CallAnswer from {} with id {}", callAnswerEvent.sender, callAnswerEvent.content.call_id);
|
||||
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
|
||||
stopRingtone();
|
||||
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
|
||||
emit ChatPage::instance()->showNotification("Problem setting up call");
|
||||
hangUp();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
|
||||
{
|
||||
nhlog::ui()->debug("CallManager::incoming CallHangUp from {} with id {}", callHangUpEvent.sender, callHangUpEvent.content.call_id);
|
||||
if (onActiveCall() && callid_ == callHangUpEvent.content.call_id)
|
||||
endCall();
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::generateCallID()
|
||||
{
|
||||
using namespace std::chrono;
|
||||
uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
|
||||
callid_ = "c" + std::to_string(ms);
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::endCall()
|
||||
{
|
||||
stopRingtone();
|
||||
session_.end();
|
||||
roomid_.clear();
|
||||
callid_.clear();
|
||||
remoteICECandidates_.clear();
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::retrieveTurnServer()
|
||||
{
|
||||
http::client()->get_turn_server(
|
||||
[this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
|
||||
if (err) {
|
||||
turnServerTimer_.setInterval(5000);
|
||||
return;
|
||||
}
|
||||
emit turnServerRetrieved(res);
|
||||
});
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::setTurnServers()
|
||||
{
|
||||
// gstreamer expects (percent-encoded): turn(s)://username:password@host:port?transport=udp(tcp)
|
||||
std::vector<std::string> uris;
|
||||
for (const auto &uri : turnServer_.uris) {
|
||||
if (auto c = uri.find(':'); c == std::string::npos) {
|
||||
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
std::string scheme = std::string(uri, 0, c);
|
||||
if (scheme != "turn" && scheme != "turns") {
|
||||
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
|
||||
continue;
|
||||
}
|
||||
std::string res = scheme + "://" + turnServer_.username + ":" + turnServer_.password
|
||||
+ "@" + std::string(uri, ++c);
|
||||
QString encodedUri = QUrl::toPercentEncoding(QString::fromStdString(res));
|
||||
uris.push_back(encodedUri.toStdString());
|
||||
}
|
||||
}
|
||||
if (!uris.empty())
|
||||
session_.setTurnServers(uris);
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::playRingtone(const QString &ringtone, bool repeat)
|
||||
{
|
||||
static QMediaPlaylist playlist;
|
||||
playlist.clear();
|
||||
playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop : QMediaPlaylist::CurrentItemOnce);
|
||||
playlist.addMedia(QUrl(ringtone));
|
||||
player_.setVolume(100);
|
||||
player_.setPlaylist(&playlist);
|
||||
}
|
||||
|
||||
void
|
||||
CallManager::stopRingtone()
|
||||
{
|
||||
player_.setPlaylist(nullptr);
|
||||
}
|
67
src/CallManager.h
Normal file
67
src/CallManager.h
Normal file
|
@ -0,0 +1,67 @@
|
|||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <QObject>
|
||||
#include <QMediaPlayer>
|
||||
#include <QSharedPointer>
|
||||
#include <QString>
|
||||
#include <QTimer>
|
||||
|
||||
#include "mtx/events/collections.hpp"
|
||||
#include "mtx/events/voip.hpp"
|
||||
#include "mtx/responses/turn_server.hpp"
|
||||
|
||||
class UserSettings;
|
||||
class WebRTCSession;
|
||||
|
||||
class CallManager : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
CallManager(QSharedPointer<UserSettings>);
|
||||
|
||||
void sendInvite(const QString &roomid);
|
||||
void hangUp();
|
||||
bool onActiveCall();
|
||||
|
||||
public slots:
|
||||
void syncEvent(const mtx::events::collections::TimelineEvents &event);
|
||||
|
||||
signals:
|
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite&);
|
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates&);
|
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer&);
|
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp&);
|
||||
void turnServerRetrieved(const mtx::responses::TurnServer&);
|
||||
void newCallParty(const QString &userid, const QString& displayName);
|
||||
|
||||
private slots:
|
||||
void retrieveTurnServer();
|
||||
|
||||
private:
|
||||
WebRTCSession& session_;
|
||||
QString roomid_;
|
||||
std::string callid_;
|
||||
const uint32_t timeoutms_ = 120000;
|
||||
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_;
|
||||
mtx::responses::TurnServer turnServer_;
|
||||
QTimer turnServerTimer_;
|
||||
QSharedPointer<UserSettings> settings_;
|
||||
QMediaPlayer player_;
|
||||
|
||||
template<typename T>
|
||||
bool handleEvent_(const mtx::events::collections::TimelineEvents &event);
|
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite>&);
|
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates>&);
|
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer>&);
|
||||
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp>&);
|
||||
void answerInvite(const mtx::events::msg::CallInvite&);
|
||||
void generateCallID();
|
||||
void endCall();
|
||||
void setTurnServers();
|
||||
void playRingtone(const QString &ringtone, bool repeat);
|
||||
void stopRingtone();
|
||||
};
|
|
@ -22,6 +22,7 @@
|
|||
#include <QShortcut>
|
||||
#include <QtConcurrent>
|
||||
|
||||
#include "ActiveCallBar.h"
|
||||
#include "AvatarProvider.h"
|
||||
#include "Cache.h"
|
||||
#include "Cache_p.h"
|
||||
|
@ -40,11 +41,13 @@
|
|||
#include "UserInfoWidget.h"
|
||||
#include "UserSettingsPage.h"
|
||||
#include "Utils.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "ui/OverlayModal.h"
|
||||
#include "ui/Theme.h"
|
||||
|
||||
#include "notifications/Manager.h"
|
||||
|
||||
#include "dialogs/PlaceCall.h"
|
||||
#include "dialogs/ReadReceipts.h"
|
||||
#include "popups/UserMentions.h"
|
||||
#include "timeline/TimelineViewManager.h"
|
||||
|
@ -68,6 +71,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
|||
, isConnected_(true)
|
||||
, userSettings_{userSettings}
|
||||
, notificationsManager(this)
|
||||
, callManager_(userSettings)
|
||||
{
|
||||
setObjectName("chatPage");
|
||||
|
||||
|
@ -123,11 +127,26 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
|||
contentLayout_->setMargin(0);
|
||||
|
||||
top_bar_ = new TopRoomBar(this);
|
||||
view_manager_ = new TimelineViewManager(userSettings_, this);
|
||||
view_manager_ = new TimelineViewManager(userSettings_, &callManager_, this);
|
||||
|
||||
contentLayout_->addWidget(top_bar_);
|
||||
contentLayout_->addWidget(view_manager_->getWidget());
|
||||
|
||||
activeCallBar_ = new ActiveCallBar(this);
|
||||
contentLayout_->addWidget(activeCallBar_);
|
||||
activeCallBar_->hide();
|
||||
connect(
|
||||
&callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
|
||||
connect(&WebRTCSession::instance(),
|
||||
&WebRTCSession::pipelineChanged,
|
||||
this,
|
||||
[this](bool callStarted) {
|
||||
if (callStarted)
|
||||
activeCallBar_->show();
|
||||
else
|
||||
activeCallBar_->hide();
|
||||
});
|
||||
|
||||
// Splitter
|
||||
splitter->addWidget(sideBar_);
|
||||
splitter->addWidget(content_);
|
||||
|
@ -446,6 +465,31 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
|||
roomid, filename, encryptedFile, url, mime, dsize);
|
||||
});
|
||||
|
||||
connect(text_input_, &TextInputWidget::callButtonPress, this, [this]() {
|
||||
if (callManager_.onActiveCall()) {
|
||||
callManager_.hangUp();
|
||||
} else {
|
||||
if (cache::singleRoomInfo(current_room_.toStdString()).member_count != 2) {
|
||||
showNotification("Voice/Video calls are limited to 1:1 rooms");
|
||||
} else {
|
||||
std::vector<RoomMember> members(
|
||||
cache::getMembers(current_room_.toStdString()));
|
||||
const RoomMember &callee =
|
||||
members.front().user_id == utils::localUser() ? members.back()
|
||||
: members.front();
|
||||
auto dialog =
|
||||
new dialogs::PlaceCall(callee.user_id, callee.display_name, this);
|
||||
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
|
||||
callManager_.sendInvite(current_room_);
|
||||
});
|
||||
connect(dialog, &dialogs::PlaceCall::video, this, [this]() {
|
||||
showNotification("Video calls not yet implemented");
|
||||
});
|
||||
dialog->show();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
connect(room_list_, &RoomList::roomAvatarChanged, this, &ChatPage::updateTopBarAvatar);
|
||||
|
||||
connect(
|
||||
|
@ -569,6 +613,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
|||
|
||||
connect(this, &ChatPage::dropToLoginPageCb, this, &ChatPage::dropToLoginPage);
|
||||
|
||||
connectCallMessage<mtx::events::msg::CallInvite>();
|
||||
connectCallMessage<mtx::events::msg::CallCandidates>();
|
||||
connectCallMessage<mtx::events::msg::CallAnswer>();
|
||||
connectCallMessage<mtx::events::msg::CallHangUp>();
|
||||
|
||||
instance_ = this;
|
||||
}
|
||||
|
||||
|
@ -1430,3 +1479,13 @@ ChatPage::initiateLogout()
|
|||
|
||||
emit showOverlayProgressBar();
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void
|
||||
ChatPage::connectCallMessage()
|
||||
{
|
||||
connect(&callManager_,
|
||||
qOverload<const QString &, const T &>(&CallManager::newMessage),
|
||||
view_manager_,
|
||||
qOverload<const QString &, const T &>(&TimelineViewManager::queueCallMessage));
|
||||
}
|
||||
|
|
|
@ -35,11 +35,13 @@
|
|||
#include <QWidget>
|
||||
|
||||
#include "CacheStructs.h"
|
||||
#include "CallManager.h"
|
||||
#include "CommunitiesList.h"
|
||||
#include "Utils.h"
|
||||
#include "notifications/Manager.h"
|
||||
#include "popups/UserMentions.h"
|
||||
|
||||
class ActiveCallBar;
|
||||
class OverlayModal;
|
||||
class QuickSwitcher;
|
||||
class RoomList;
|
||||
|
@ -50,7 +52,6 @@ class TimelineViewManager;
|
|||
class TopRoomBar;
|
||||
class UserInfoWidget;
|
||||
class UserSettings;
|
||||
class NotificationsManager;
|
||||
|
||||
constexpr int CONSENSUS_TIMEOUT = 1000;
|
||||
constexpr int SHOW_CONTENT_TIMEOUT = 3000;
|
||||
|
@ -216,6 +217,9 @@ private:
|
|||
|
||||
void showNotificationsDialog(const QPoint &point);
|
||||
|
||||
template<typename T>
|
||||
void connectCallMessage();
|
||||
|
||||
QHBoxLayout *topLayout_;
|
||||
Splitter *splitter;
|
||||
|
||||
|
@ -235,6 +239,7 @@ private:
|
|||
|
||||
TopRoomBar *top_bar_;
|
||||
TextInputWidget *text_input_;
|
||||
ActiveCallBar *activeCallBar_;
|
||||
|
||||
QTimer connectivityTimer_;
|
||||
std::atomic_bool isConnected_;
|
||||
|
@ -252,6 +257,7 @@ private:
|
|||
QSharedPointer<UserSettings> userSettings_;
|
||||
|
||||
NotificationsManager notificationsManager;
|
||||
CallManager callManager_;
|
||||
};
|
||||
|
||||
template<class Collection>
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include "Logging.h"
|
||||
#include "TextInputWidget.h"
|
||||
#include "Utils.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "ui/FlatButton.h"
|
||||
#include "ui/LoadingIndicator.h"
|
||||
|
||||
|
@ -453,6 +454,13 @@ TextInputWidget::TextInputWidget(QWidget *parent)
|
|||
topLayout_->setSpacing(0);
|
||||
topLayout_->setContentsMargins(13, 1, 13, 0);
|
||||
|
||||
callBtn_ = new FlatButton(this);
|
||||
changeCallButtonState(false);
|
||||
connect(&WebRTCSession::instance(),
|
||||
&WebRTCSession::pipelineChanged,
|
||||
this,
|
||||
&TextInputWidget::changeCallButtonState);
|
||||
|
||||
QIcon send_file_icon;
|
||||
send_file_icon.addFile(":/icons/icons/ui/paper-clip-outline.png");
|
||||
|
||||
|
@ -521,6 +529,7 @@ TextInputWidget::TextInputWidget(QWidget *parent)
|
|||
emojiBtn_->setIcon(emoji_icon);
|
||||
emojiBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight));
|
||||
|
||||
topLayout_->addWidget(callBtn_);
|
||||
topLayout_->addWidget(sendFileBtn_);
|
||||
topLayout_->addWidget(input_);
|
||||
topLayout_->addWidget(emojiBtn_);
|
||||
|
@ -528,6 +537,7 @@ TextInputWidget::TextInputWidget(QWidget *parent)
|
|||
|
||||
setLayout(topLayout_);
|
||||
|
||||
connect(callBtn_, &FlatButton::clicked, this, &TextInputWidget::callButtonPress);
|
||||
connect(sendMessageBtn_, &FlatButton::clicked, input_, &FilteredTextEdit::submit);
|
||||
connect(sendFileBtn_, SIGNAL(clicked()), this, SLOT(openFileSelection()));
|
||||
connect(input_, &FilteredTextEdit::message, this, &TextInputWidget::sendTextMessage);
|
||||
|
@ -652,3 +662,19 @@ TextInputWidget::paintEvent(QPaintEvent *)
|
|||
|
||||
style()->drawPrimitive(QStyle::PE_Widget, &opt, &p, this);
|
||||
}
|
||||
|
||||
void
|
||||
TextInputWidget::changeCallButtonState(bool callStarted)
|
||||
{
|
||||
// TODO Telephone and HangUp icons - co-opt the ones below for now
|
||||
QIcon icon;
|
||||
if (callStarted) {
|
||||
callBtn_->setToolTip(tr("Hang up"));
|
||||
icon.addFile(":/icons/icons/ui/remove-symbol.png");
|
||||
} else {
|
||||
callBtn_->setToolTip(tr("Place a call"));
|
||||
icon.addFile(":/icons/icons/ui/speech-bubbles-comment-option.png");
|
||||
}
|
||||
callBtn_->setIcon(icon);
|
||||
callBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight));
|
||||
}
|
||||
|
|
|
@ -149,6 +149,7 @@ public slots:
|
|||
void openFileSelection();
|
||||
void hideUploadSpinner();
|
||||
void focusLineEdit() { input_->setFocus(); }
|
||||
void changeCallButtonState(bool callStarted);
|
||||
|
||||
private slots:
|
||||
void addSelectedEmoji(const QString &emoji);
|
||||
|
@ -161,6 +162,7 @@ signals:
|
|||
void uploadMedia(const QSharedPointer<QIODevice> data,
|
||||
QString mimeClass,
|
||||
const QString &filename);
|
||||
void callButtonPress();
|
||||
|
||||
void sendJoinRoomRequest(const QString &room);
|
||||
void sendInviteRoomRequest(const QString &userid, const QString &reason);
|
||||
|
@ -185,6 +187,7 @@ private:
|
|||
|
||||
LoadingIndicator *spinner_;
|
||||
|
||||
FlatButton *callBtn_;
|
||||
FlatButton *sendFileBtn_;
|
||||
FlatButton *sendMessageBtn_;
|
||||
emoji::PickButton *emojiBtn_;
|
||||
|
|
|
@ -77,6 +77,7 @@ UserSettings::load()
|
|||
presence_ =
|
||||
settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
|
||||
.value<Presence>();
|
||||
useStunServer_ = settings.value("user/use_stun_server", false).toBool();
|
||||
|
||||
applyTheme();
|
||||
}
|
||||
|
@ -279,6 +280,16 @@ UserSettings::setTheme(QString theme)
|
|||
emit themeChanged(theme);
|
||||
}
|
||||
|
||||
void
|
||||
UserSettings::setUseStunServer(bool useStunServer)
|
||||
{
|
||||
if (useStunServer == useStunServer_)
|
||||
return;
|
||||
useStunServer_ = useStunServer;
|
||||
emit useStunServerChanged(useStunServer);
|
||||
save();
|
||||
}
|
||||
|
||||
void
|
||||
UserSettings::applyTheme()
|
||||
{
|
||||
|
@ -364,6 +375,7 @@ UserSettings::save()
|
|||
settings.setValue("font_family", font_);
|
||||
settings.setValue("emoji_font_family", emojiFont_);
|
||||
settings.setValue("presence", QVariant::fromValue(presence_));
|
||||
settings.setValue("use_stun_server", useStunServer_);
|
||||
|
||||
settings.endGroup();
|
||||
|
||||
|
@ -429,6 +441,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
|
|||
markdown_ = new Toggle{this};
|
||||
desktopNotifications_ = new Toggle{this};
|
||||
alertOnNotification_ = new Toggle{this};
|
||||
useStunServer_ = new Toggle{this};
|
||||
scaleFactorCombo_ = new QComboBox{this};
|
||||
fontSizeCombo_ = new QComboBox{this};
|
||||
fontSelectionCombo_ = new QComboBox{this};
|
||||
|
@ -482,6 +495,12 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
|
|||
timelineMaxWidthSpin_->setMaximum(100'000'000);
|
||||
timelineMaxWidthSpin_->setSingleStep(10);
|
||||
|
||||
auto callsLabel = new QLabel{tr("CALLS"), this};
|
||||
callsLabel->setFixedHeight(callsLabel->minimumHeight() + LayoutTopMargin);
|
||||
callsLabel->setAlignment(Qt::AlignBottom);
|
||||
callsLabel->setFont(font);
|
||||
useStunServer_ = new Toggle{this};
|
||||
|
||||
auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this};
|
||||
encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin);
|
||||
encryptionLabel_->setAlignment(Qt::AlignBottom);
|
||||
|
@ -612,6 +631,13 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
|
|||
#endif
|
||||
|
||||
boxWrap(tr("Theme"), themeCombo_);
|
||||
|
||||
formLayout_->addRow(callsLabel);
|
||||
formLayout_->addRow(new HorizontalLine{this});
|
||||
boxWrap(tr("Allow Fallback Call Assist Server"),
|
||||
useStunServer_,
|
||||
tr("Will use turn.matrix.org as assist when your home server does not offer one."));
|
||||
|
||||
formLayout_->addRow(encryptionLabel_);
|
||||
formLayout_->addRow(new HorizontalLine{this});
|
||||
boxWrap(tr("Device ID"), deviceIdValue_);
|
||||
|
@ -724,6 +750,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
|
|||
settings_->setEnlargeEmojiOnlyMessages(!disabled);
|
||||
});
|
||||
|
||||
connect(useStunServer_, &Toggle::toggled, this, [this](bool disabled) {
|
||||
settings_->setUseStunServer(!disabled);
|
||||
});
|
||||
|
||||
connect(timelineMaxWidthSpin_,
|
||||
qOverload<int>(&QSpinBox::valueChanged),
|
||||
this,
|
||||
|
@ -766,6 +796,7 @@ UserSettingsPage::showEvent(QShowEvent *)
|
|||
enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages());
|
||||
deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
|
||||
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
|
||||
useStunServer_->setState(!settings_->useStunServer());
|
||||
|
||||
deviceFingerprintValue_->setText(
|
||||
utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519));
|
||||
|
|
|
@ -71,6 +71,8 @@ class UserSettings : public QObject
|
|||
Q_PROPERTY(
|
||||
QString emojiFont READ emojiFont WRITE setEmojiFontFamily NOTIFY emojiFontChanged)
|
||||
Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
|
||||
Q_PROPERTY(
|
||||
bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
|
||||
|
||||
public:
|
||||
UserSettings();
|
||||
|
@ -107,6 +109,7 @@ public:
|
|||
void setAvatarCircles(bool state);
|
||||
void setDecryptSidebar(bool state);
|
||||
void setPresence(Presence state);
|
||||
void setUseStunServer(bool state);
|
||||
|
||||
QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
|
||||
bool messageHoverHighlight() const { return messageHoverHighlight_; }
|
||||
|
@ -132,6 +135,7 @@ public:
|
|||
QString font() const { return font_; }
|
||||
QString emojiFont() const { return emojiFont_; }
|
||||
Presence presence() const { return presence_; }
|
||||
bool useStunServer() const { return useStunServer_; }
|
||||
|
||||
signals:
|
||||
void groupViewStateChanged(bool state);
|
||||
|
@ -154,6 +158,7 @@ signals:
|
|||
void fontChanged(QString state);
|
||||
void emojiFontChanged(QString state);
|
||||
void presenceChanged(Presence state);
|
||||
void useStunServerChanged(bool state);
|
||||
|
||||
private:
|
||||
// Default to system theme if QT_QPA_PLATFORMTHEME var is set.
|
||||
|
@ -181,6 +186,7 @@ private:
|
|||
QString font_;
|
||||
QString emojiFont_;
|
||||
Presence presence_;
|
||||
bool useStunServer_;
|
||||
};
|
||||
|
||||
class HorizontalLine : public QFrame
|
||||
|
@ -234,6 +240,7 @@ private:
|
|||
Toggle *desktopNotifications_;
|
||||
Toggle *alertOnNotification_;
|
||||
Toggle *avatarCircles_;
|
||||
Toggle *useStunServer_;
|
||||
Toggle *decryptSidebar_;
|
||||
QLabel *deviceFingerprintValue_;
|
||||
QLabel *deviceIdValue_;
|
||||
|
|
|
@ -35,11 +35,10 @@ createDescriptionInfo(const Event &event, const QString &localUser, const QStrin
|
|||
const auto username = cache::displayName(room_id, sender);
|
||||
const auto ts = QDateTime::fromMSecsSinceEpoch(msg.origin_server_ts);
|
||||
|
||||
return DescInfo{
|
||||
QString::fromStdString(msg.event_id),
|
||||
return DescInfo{QString::fromStdString(msg.event_id),
|
||||
sender,
|
||||
utils::messageDescription<T>(
|
||||
username, QString::fromStdString(msg.content.body).trimmed(), sender == localUser),
|
||||
username, utils::event_body(event).trimmed(), sender == localUser),
|
||||
utils::descriptiveTime(ts),
|
||||
msg.origin_server_ts,
|
||||
ts};
|
||||
|
@ -163,6 +162,9 @@ utils::getMessageDescription(const TimelineEvent &event,
|
|||
using Notice = mtx::events::RoomEvent<mtx::events::msg::Notice>;
|
||||
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
|
||||
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
|
||||
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
|
||||
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
|
||||
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
|
||||
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
|
||||
|
||||
if (std::holds_alternative<Audio>(event)) {
|
||||
|
@ -179,6 +181,12 @@ utils::getMessageDescription(const TimelineEvent &event,
|
|||
return createDescriptionInfo<Text>(event, localUser, room_id);
|
||||
} else if (std::holds_alternative<Video>(event)) {
|
||||
return createDescriptionInfo<Video>(event, localUser, room_id);
|
||||
} else if (std::holds_alternative<CallInvite>(event)) {
|
||||
return createDescriptionInfo<CallInvite>(event, localUser, room_id);
|
||||
} else if (std::holds_alternative<CallAnswer>(event)) {
|
||||
return createDescriptionInfo<CallAnswer>(event, localUser, room_id);
|
||||
} else if (std::holds_alternative<CallHangUp>(event)) {
|
||||
return createDescriptionInfo<CallHangUp>(event, localUser, room_id);
|
||||
} else if (std::holds_alternative<mtx::events::Sticker>(event)) {
|
||||
return createDescriptionInfo<mtx::events::Sticker>(event, localUser, room_id);
|
||||
} else if (auto msg = std::get_if<Encrypted>(&event); msg != nullptr) {
|
||||
|
|
27
src/Utils.h
27
src/Utils.h
|
@ -96,6 +96,9 @@ messageDescription(const QString &username = "",
|
|||
using Sticker = mtx::events::Sticker;
|
||||
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
|
||||
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
|
||||
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
|
||||
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
|
||||
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
|
||||
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
|
||||
|
||||
if (std::is_same<T, Audio>::value) {
|
||||
|
@ -164,6 +167,30 @@ messageDescription(const QString &username = "",
|
|||
return QCoreApplication::translate("message-description sent:",
|
||||
"%1 sent an encrypted message")
|
||||
.arg(username);
|
||||
} else if (std::is_same<T, CallInvite>::value) {
|
||||
if (isLocal)
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"You placed a call");
|
||||
else
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"%1 placed a call")
|
||||
.arg(username);
|
||||
} else if (std::is_same<T, CallAnswer>::value) {
|
||||
if (isLocal)
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"You answered a call");
|
||||
else
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"%1 answered a call")
|
||||
.arg(username);
|
||||
} else if (std::is_same<T, CallHangUp>::value) {
|
||||
if (isLocal)
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"You ended a call");
|
||||
else
|
||||
return QCoreApplication::translate("message-description sent:",
|
||||
"%1 ended a call")
|
||||
.arg(username);
|
||||
} else {
|
||||
return QCoreApplication::translate("utils", "Unknown Message Type");
|
||||
}
|
||||
|
|
438
src/WebRTCSession.cpp
Normal file
438
src/WebRTCSession.cpp
Normal file
|
@ -0,0 +1,438 @@
|
|||
#include "WebRTCSession.h"
|
||||
#include "Logging.h"
|
||||
|
||||
extern "C" {
|
||||
#include "gst/gst.h"
|
||||
#include "gst/sdp/sdp.h"
|
||||
|
||||
#define GST_USE_UNSTABLE_API
|
||||
#include "gst/webrtc/webrtc.h"
|
||||
}
|
||||
|
||||
namespace {
|
||||
bool gisoffer;
|
||||
std::string glocalsdp;
|
||||
std::vector<mtx::events::msg::CallCandidates::Candidate> gcandidates;
|
||||
|
||||
gboolean newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data);
|
||||
GstWebRTCSessionDescription* parseSDP(const std::string &sdp, GstWebRTCSDPType type);
|
||||
void generateOffer(GstElement *webrtc);
|
||||
void setLocalDescription(GstPromise *promise, gpointer webrtc);
|
||||
void addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED);
|
||||
gboolean onICEGatheringCompletion(gpointer timerid);
|
||||
void createAnswer(GstPromise *promise, gpointer webrtc);
|
||||
void addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe);
|
||||
void linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe);
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::init(std::string *errorMessage)
|
||||
{
|
||||
if (initialised_)
|
||||
return true;
|
||||
|
||||
GError *error = nullptr;
|
||||
if (!gst_init_check(nullptr, nullptr, &error)) {
|
||||
std::string strError = std::string("Failed to initialise GStreamer: ");
|
||||
if (error) {
|
||||
strError += error->message;
|
||||
g_error_free(error);
|
||||
}
|
||||
nhlog::ui()->error(strError);
|
||||
if (errorMessage)
|
||||
*errorMessage = strError;
|
||||
return false;
|
||||
}
|
||||
|
||||
gchar *version = gst_version_string();
|
||||
std::string gstVersion(version);
|
||||
g_free(version);
|
||||
nhlog::ui()->info("Initialised " + gstVersion);
|
||||
|
||||
// GStreamer Plugins:
|
||||
// Base: audioconvert, audioresample, opus, playback, videoconvert, volume
|
||||
// Good: autodetect, rtpmanager, vpx
|
||||
// Bad: dtls, srtp, webrtc
|
||||
// libnice [GLib]: nice
|
||||
initialised_ = true;
|
||||
std::string strError = gstVersion + ": Missing plugins: ";
|
||||
const gchar *needed[] = {"audioconvert", "audioresample", "autodetect", "dtls", "nice",
|
||||
"opus", "playback", "rtpmanager", "srtp", "videoconvert", "vpx", "volume", "webrtc", nullptr};
|
||||
GstRegistry *registry = gst_registry_get();
|
||||
for (guint i = 0; i < g_strv_length((gchar**)needed); i++) {
|
||||
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
|
||||
if (!plugin) {
|
||||
strError += needed[i];
|
||||
initialised_ = false;
|
||||
continue;
|
||||
}
|
||||
gst_object_unref(plugin);
|
||||
}
|
||||
|
||||
if (!initialised_) {
|
||||
nhlog::ui()->error(strError);
|
||||
if (errorMessage)
|
||||
*errorMessage = strError;
|
||||
}
|
||||
return initialised_;
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::createOffer()
|
||||
{
|
||||
gisoffer = true;
|
||||
glocalsdp.clear();
|
||||
gcandidates.clear();
|
||||
return startPipeline(111); // a dynamic opus payload type
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::acceptOffer(const std::string& sdp)
|
||||
{
|
||||
nhlog::ui()->debug("Received offer:\n{}", sdp);
|
||||
gisoffer = false;
|
||||
glocalsdp.clear();
|
||||
gcandidates.clear();
|
||||
|
||||
// eg a=rtpmap:111 opus/48000/2
|
||||
int opusPayloadType = 0;
|
||||
if (auto e = sdp.find("opus"); e == std::string::npos) {
|
||||
nhlog::ui()->error("WebRTC: remote offer - opus media attribute missing");
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
if (auto s = sdp.rfind(':', e); s == std::string::npos) {
|
||||
nhlog::ui()->error("WebRTC: remote offer - unable to determine opus payload type");
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
++s;
|
||||
try {
|
||||
opusPayloadType = std::stoi(std::string(sdp, s, e - s));
|
||||
}
|
||||
catch(...) {
|
||||
nhlog::ui()->error("WebRTC: remote offer - unable to determine opus payload type");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
|
||||
if (!offer)
|
||||
return false;
|
||||
|
||||
if (!startPipeline(opusPayloadType))
|
||||
return false;
|
||||
|
||||
// set-remote-description first, then create-answer
|
||||
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
|
||||
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
|
||||
gst_webrtc_session_description_free(offer);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::startPipeline(int opusPayloadType)
|
||||
{
|
||||
if (isActive())
|
||||
return false;
|
||||
|
||||
if (!createPipeline(opusPayloadType))
|
||||
return false;
|
||||
|
||||
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
|
||||
|
||||
if (!stunServer_.empty()) {
|
||||
nhlog::ui()->info("WebRTC: Setting stun server: {}", stunServer_);
|
||||
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
|
||||
}
|
||||
addTurnServers();
|
||||
|
||||
// generate the offer when the pipeline goes to PLAYING
|
||||
if (gisoffer)
|
||||
g_signal_connect(webrtc_, "on-negotiation-needed", G_CALLBACK(generateOffer), nullptr);
|
||||
|
||||
// on-ice-candidate is emitted when a local ICE candidate has been gathered
|
||||
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
|
||||
|
||||
// incoming streams trigger pad-added
|
||||
gst_element_set_state(pipe_, GST_STATE_READY);
|
||||
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
|
||||
|
||||
// webrtcbin lifetime is the same as that of the pipeline
|
||||
gst_object_unref(webrtc_);
|
||||
|
||||
// start the pipeline
|
||||
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
|
||||
if (ret == GST_STATE_CHANGE_FAILURE) {
|
||||
nhlog::ui()->error("WebRTC: unable to start pipeline");
|
||||
gst_object_unref(pipe_);
|
||||
pipe_ = nullptr;
|
||||
webrtc_ = nullptr;
|
||||
return false;
|
||||
}
|
||||
|
||||
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
|
||||
gst_bus_add_watch(bus, newBusMessage, this);
|
||||
gst_object_unref(bus);
|
||||
emit pipelineChanged(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
#define RTP_CAPS_OPUS "application/x-rtp,media=audio,encoding-name=OPUS,payload="
|
||||
|
||||
bool
|
||||
WebRTCSession::createPipeline(int opusPayloadType)
|
||||
{
|
||||
std::string pipeline("webrtcbin bundle-policy=max-bundle name=webrtcbin "
|
||||
"autoaudiosrc ! volume name=srclevel ! audioconvert ! audioresample ! queue ! opusenc ! rtpopuspay ! "
|
||||
"queue ! " RTP_CAPS_OPUS + std::to_string(opusPayloadType) + " ! webrtcbin.");
|
||||
|
||||
webrtc_ = nullptr;
|
||||
GError *error = nullptr;
|
||||
pipe_ = gst_parse_launch(pipeline.c_str(), &error);
|
||||
if (error) {
|
||||
nhlog::ui()->error("WebRTC: Failed to parse pipeline: {}", error->message);
|
||||
g_error_free(error);
|
||||
if (pipe_) {
|
||||
gst_object_unref(pipe_);
|
||||
pipe_ = nullptr;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::acceptAnswer(const std::string &sdp)
|
||||
{
|
||||
nhlog::ui()->debug("WebRTC: Received sdp:\n{}", sdp);
|
||||
if (!isActive())
|
||||
return false;
|
||||
|
||||
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
|
||||
if (!answer)
|
||||
return false;
|
||||
|
||||
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
|
||||
gst_webrtc_session_description_free(answer);
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate>& candidates)
|
||||
{
|
||||
if (isActive()) {
|
||||
for (const auto& c : candidates)
|
||||
g_signal_emit_by_name(webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
|
||||
}
|
||||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
|
||||
{
|
||||
if (!isActive())
|
||||
return false;
|
||||
|
||||
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
|
||||
if (!srclevel)
|
||||
return false;
|
||||
|
||||
gboolean muted;
|
||||
g_object_get(srclevel, "mute", &muted, nullptr);
|
||||
g_object_set(srclevel, "mute", !muted, nullptr);
|
||||
gst_object_unref(srclevel);
|
||||
isMuted = !muted;
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
WebRTCSession::end()
|
||||
{
|
||||
if (pipe_) {
|
||||
gst_element_set_state(pipe_, GST_STATE_NULL);
|
||||
gst_object_unref(pipe_);
|
||||
pipe_ = nullptr;
|
||||
}
|
||||
webrtc_ = nullptr;
|
||||
emit pipelineChanged(false);
|
||||
}
|
||||
|
||||
void
|
||||
WebRTCSession::addTurnServers()
|
||||
{
|
||||
if (!webrtc_)
|
||||
return;
|
||||
|
||||
for (const auto &uri : turnServers_) {
|
||||
gboolean res;
|
||||
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&res));
|
||||
if (res)
|
||||
nhlog::ui()->info("WebRTC: Set TURN server: {}", uri);
|
||||
else
|
||||
nhlog::ui()->error("WebRTC: Failed to set TURN server: {}", uri);
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
gboolean
|
||||
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
|
||||
{
|
||||
WebRTCSession *session = (WebRTCSession*)user_data;
|
||||
switch (GST_MESSAGE_TYPE(msg)) {
|
||||
case GST_MESSAGE_EOS:
|
||||
session->end();
|
||||
break;
|
||||
case GST_MESSAGE_ERROR:
|
||||
GError *error;
|
||||
gchar *debug;
|
||||
gst_message_parse_error(msg, &error, &debug);
|
||||
nhlog::ui()->error("WebRTC: Error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
|
||||
g_clear_error(&error);
|
||||
g_free(debug);
|
||||
session->end();
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
GstWebRTCSessionDescription*
|
||||
parseSDP(const std::string &sdp, GstWebRTCSDPType type)
|
||||
{
|
||||
GstSDPMessage *msg;
|
||||
gst_sdp_message_new(&msg);
|
||||
if (gst_sdp_message_parse_buffer((guint8*)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
|
||||
return gst_webrtc_session_description_new(type, msg);
|
||||
}
|
||||
else {
|
||||
nhlog::ui()->error("WebRTC: Failed to parse remote session description");
|
||||
gst_object_unref(msg);
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
generateOffer(GstElement *webrtc)
|
||||
{
|
||||
// create-offer first, then set-local-description
|
||||
GstPromise *promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
|
||||
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
|
||||
}
|
||||
|
||||
void
|
||||
setLocalDescription(GstPromise *promise, gpointer webrtc)
|
||||
{
|
||||
const GstStructure *reply = gst_promise_get_reply(promise);
|
||||
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
|
||||
GstWebRTCSessionDescription *gstsdp = nullptr;
|
||||
gst_structure_get(reply, isAnswer ? "answer" : "offer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &gstsdp, nullptr);
|
||||
gst_promise_unref(promise);
|
||||
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
|
||||
|
||||
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
|
||||
glocalsdp = std::string(sdp);
|
||||
g_free(sdp);
|
||||
gst_webrtc_session_description_free(gstsdp);
|
||||
|
||||
nhlog::ui()->debug("WebRTC: Local description set ({}):\n{}", isAnswer ? "answer" : "offer", glocalsdp);
|
||||
}
|
||||
|
||||
void
|
||||
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED, guint mlineIndex, gchar *candidate, gpointer G_GNUC_UNUSED)
|
||||
{
|
||||
gcandidates.push_back({"audio", (uint16_t)mlineIndex, candidate});
|
||||
|
||||
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early
|
||||
// fixed in v1.18
|
||||
// use a 100ms timeout in the meantime
|
||||
static guint timerid = 0;
|
||||
if (timerid)
|
||||
g_source_remove(timerid);
|
||||
|
||||
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
|
||||
}
|
||||
|
||||
gboolean
|
||||
onICEGatheringCompletion(gpointer timerid)
|
||||
{
|
||||
*(guint*)(timerid) = 0;
|
||||
if (gisoffer)
|
||||
emit WebRTCSession::instance().offerCreated(glocalsdp, gcandidates);
|
||||
else
|
||||
emit WebRTCSession::instance().answerCreated(glocalsdp, gcandidates);
|
||||
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
void
|
||||
createAnswer(GstPromise *promise, gpointer webrtc)
|
||||
{
|
||||
// create-answer first, then set-local-description
|
||||
gst_promise_unref(promise);
|
||||
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
|
||||
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
|
||||
}
|
||||
|
||||
void
|
||||
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
|
||||
{
|
||||
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
|
||||
return;
|
||||
|
||||
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
|
||||
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
|
||||
gst_bin_add(GST_BIN(pipe), decodebin);
|
||||
gst_element_sync_state_with_parent(decodebin);
|
||||
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
|
||||
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
|
||||
nhlog::ui()->error("WebRTC: Unable to link new pad");
|
||||
gst_object_unref(sinkpad);
|
||||
}
|
||||
|
||||
void
|
||||
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
|
||||
{
|
||||
GstCaps *caps = gst_pad_get_current_caps(newpad);
|
||||
if (!caps)
|
||||
return;
|
||||
|
||||
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
|
||||
gst_caps_unref(caps);
|
||||
|
||||
GstPad *queuepad = nullptr;
|
||||
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||
|
||||
if (g_str_has_prefix(name, "audio")) {
|
||||
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
|
||||
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
|
||||
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
|
||||
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
|
||||
gst_element_sync_state_with_parent(queue);
|
||||
gst_element_sync_state_with_parent(convert);
|
||||
gst_element_sync_state_with_parent(resample);
|
||||
gst_element_sync_state_with_parent(sink);
|
||||
gst_element_link_many(queue, convert, resample, sink, nullptr);
|
||||
queuepad = gst_element_get_static_pad(queue, "sink");
|
||||
}
|
||||
else if (g_str_has_prefix(name, "video")) {
|
||||
GstElement *convert = gst_element_factory_make("videoconvert", nullptr);
|
||||
GstElement *sink = gst_element_factory_make("autovideosink", nullptr);
|
||||
gst_bin_add_many(GST_BIN(pipe), queue, convert, sink, nullptr);
|
||||
gst_element_sync_state_with_parent(queue);
|
||||
gst_element_sync_state_with_parent(convert);
|
||||
gst_element_sync_state_with_parent(sink);
|
||||
gst_element_link_many(queue, convert, sink, nullptr);
|
||||
queuepad = gst_element_get_static_pad(queue, "sink");
|
||||
}
|
||||
|
||||
if (queuepad) {
|
||||
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
|
||||
nhlog::ui()->error("WebRTC: Unable to link new pad");
|
||||
gst_object_unref(queuepad);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
58
src/WebRTCSession.h
Normal file
58
src/WebRTCSession.h
Normal file
|
@ -0,0 +1,58 @@
|
|||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include "mtx/events/voip.hpp"
|
||||
|
||||
typedef struct _GstElement GstElement;
|
||||
|
||||
class WebRTCSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
static WebRTCSession& instance()
|
||||
{
|
||||
static WebRTCSession instance;
|
||||
return instance;
|
||||
}
|
||||
|
||||
bool init(std::string *errorMessage = nullptr);
|
||||
|
||||
bool createOffer();
|
||||
bool acceptOffer(const std::string &sdp);
|
||||
bool acceptAnswer(const std::string &sdp);
|
||||
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
|
||||
|
||||
bool isActive() { return pipe_ != nullptr; }
|
||||
bool toggleMuteAudioSrc(bool &isMuted);
|
||||
void end();
|
||||
|
||||
void setStunServer(const std::string &stunServer) {stunServer_ = stunServer;}
|
||||
void setTurnServers(const std::vector<std::string> &uris) {turnServers_ = uris;}
|
||||
|
||||
signals:
|
||||
void offerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
|
||||
void answerCreated(const std::string &sdp, const std::vector<mtx::events::msg::CallCandidates::Candidate>&);
|
||||
void pipelineChanged(bool started);
|
||||
|
||||
private:
|
||||
WebRTCSession() : QObject() {}
|
||||
|
||||
bool initialised_ = false;
|
||||
GstElement *pipe_ = nullptr;
|
||||
GstElement *webrtc_ = nullptr;
|
||||
std::string stunServer_;
|
||||
std::vector<std::string> turnServers_;
|
||||
|
||||
bool startPipeline(int opusPayloadType);
|
||||
bool createPipeline(int opusPayloadType);
|
||||
void addTurnServers();
|
||||
|
||||
public:
|
||||
WebRTCSession(WebRTCSession const&) = delete;
|
||||
void operator=(WebRTCSession const&) = delete;
|
||||
};
|
53
src/dialogs/AcceptCall.cpp
Normal file
53
src/dialogs/AcceptCall.cpp
Normal file
|
@ -0,0 +1,53 @@
|
|||
#include <QLabel>
|
||||
#include <QPushButton>
|
||||
#include <QVBoxLayout>
|
||||
|
||||
#include "Config.h"
|
||||
#include "dialogs/AcceptCall.h"
|
||||
|
||||
namespace dialogs {
|
||||
|
||||
AcceptCall::AcceptCall(const QString &caller, const QString &displayName, QWidget *parent)
|
||||
: QWidget(parent)
|
||||
{
|
||||
setAutoFillBackground(true);
|
||||
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
|
||||
setWindowModality(Qt::WindowModal);
|
||||
setAttribute(Qt::WA_DeleteOnClose, true);
|
||||
|
||||
auto layout = new QVBoxLayout(this);
|
||||
layout->setSpacing(conf::modals::WIDGET_SPACING);
|
||||
layout->setMargin(conf::modals::WIDGET_MARGIN);
|
||||
|
||||
auto buttonLayout = new QHBoxLayout();
|
||||
buttonLayout->setSpacing(15);
|
||||
buttonLayout->setMargin(0);
|
||||
|
||||
acceptBtn_ = new QPushButton(tr("Accept"), this);
|
||||
acceptBtn_->setDefault(true);
|
||||
rejectBtn_ = new QPushButton(tr("Reject"), this);
|
||||
|
||||
buttonLayout->addStretch(1);
|
||||
buttonLayout->addWidget(acceptBtn_);
|
||||
buttonLayout->addWidget(rejectBtn_);
|
||||
|
||||
QLabel *label;
|
||||
if (!displayName.isEmpty() && displayName != caller)
|
||||
label = new QLabel("Accept call from " + displayName + " (" + caller + ")?", this);
|
||||
else
|
||||
label = new QLabel("Accept call from " + caller + "?", this);
|
||||
|
||||
layout->addWidget(label);
|
||||
layout->addLayout(buttonLayout);
|
||||
|
||||
connect(acceptBtn_, &QPushButton::clicked, this, [this]() {
|
||||
emit accept();
|
||||
emit close();
|
||||
});
|
||||
connect(rejectBtn_, &QPushButton::clicked, this, [this]() {
|
||||
emit reject();
|
||||
emit close();
|
||||
});
|
||||
}
|
||||
|
||||
}
|
26
src/dialogs/AcceptCall.h
Normal file
26
src/dialogs/AcceptCall.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
#pragma once
|
||||
|
||||
#include <QString>
|
||||
#include <QWidget>
|
||||
|
||||
class QPushButton;
|
||||
|
||||
namespace dialogs {
|
||||
|
||||
class AcceptCall : public QWidget
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
AcceptCall(const QString &caller, const QString &displayName, QWidget *parent = nullptr);
|
||||
|
||||
signals:
|
||||
void accept();
|
||||
void reject();
|
||||
|
||||
private:
|
||||
QPushButton *acceptBtn_;
|
||||
QPushButton *rejectBtn_;
|
||||
};
|
||||
|
||||
}
|
60
src/dialogs/PlaceCall.cpp
Normal file
60
src/dialogs/PlaceCall.cpp
Normal file
|
@ -0,0 +1,60 @@
|
|||
#include <QLabel>
|
||||
#include <QPushButton>
|
||||
#include <QString>
|
||||
#include <QVBoxLayout>
|
||||
|
||||
#include "Config.h"
|
||||
#include "dialogs/PlaceCall.h"
|
||||
|
||||
namespace dialogs {
|
||||
|
||||
PlaceCall::PlaceCall(const QString &callee, const QString &displayName, QWidget *parent)
|
||||
: QWidget(parent)
|
||||
{
|
||||
setAutoFillBackground(true);
|
||||
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
|
||||
setWindowModality(Qt::WindowModal);
|
||||
setAttribute(Qt::WA_DeleteOnClose, true);
|
||||
|
||||
auto layout = new QVBoxLayout(this);
|
||||
layout->setSpacing(conf::modals::WIDGET_SPACING);
|
||||
layout->setMargin(conf::modals::WIDGET_MARGIN);
|
||||
|
||||
auto buttonLayout = new QHBoxLayout();
|
||||
buttonLayout->setSpacing(15);
|
||||
buttonLayout->setMargin(0);
|
||||
|
||||
voiceBtn_ = new QPushButton(tr("Voice Call"), this);
|
||||
voiceBtn_->setDefault(true);
|
||||
videoBtn_ = new QPushButton(tr("Video Call"), this);
|
||||
cancelBtn_ = new QPushButton(tr("Cancel"), this);
|
||||
|
||||
buttonLayout->addStretch(1);
|
||||
buttonLayout->addWidget(voiceBtn_);
|
||||
buttonLayout->addWidget(videoBtn_);
|
||||
buttonLayout->addWidget(cancelBtn_);
|
||||
|
||||
QLabel *label;
|
||||
if (!displayName.isEmpty() && displayName != callee)
|
||||
label = new QLabel("Place a call to " + displayName + " (" + callee + ")?", this);
|
||||
else
|
||||
label = new QLabel("Place a call to " + callee + "?", this);
|
||||
|
||||
layout->addWidget(label);
|
||||
layout->addLayout(buttonLayout);
|
||||
|
||||
connect(voiceBtn_, &QPushButton::clicked, this, [this]() {
|
||||
emit voice();
|
||||
emit close();
|
||||
});
|
||||
connect(videoBtn_, &QPushButton::clicked, this, [this]() {
|
||||
emit video();
|
||||
emit close();
|
||||
});
|
||||
connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
|
||||
emit cancel();
|
||||
emit close();
|
||||
});
|
||||
}
|
||||
|
||||
}
|
28
src/dialogs/PlaceCall.h
Normal file
28
src/dialogs/PlaceCall.h
Normal file
|
@ -0,0 +1,28 @@
|
|||
#pragma once
|
||||
|
||||
#include <QWidget>
|
||||
|
||||
class QPushButton;
|
||||
class QString;
|
||||
|
||||
namespace dialogs {
|
||||
|
||||
class PlaceCall : public QWidget
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
PlaceCall(const QString &callee, const QString &displayName, QWidget *parent = nullptr);
|
||||
|
||||
signals:
|
||||
void voice();
|
||||
void video();
|
||||
void cancel();
|
||||
|
||||
private:
|
||||
QPushButton *voiceBtn_;
|
||||
QPushButton *videoBtn_;
|
||||
QPushButton *cancelBtn_;
|
||||
};
|
||||
|
||||
}
|
|
@ -121,6 +121,21 @@ struct RoomEventType
|
|||
{
|
||||
return qml_mtx_events::EventType::Redacted;
|
||||
}
|
||||
qml_mtx_events::EventType operator()(
|
||||
const mtx::events::Event<mtx::events::msg::CallInvite> &)
|
||||
{
|
||||
return qml_mtx_events::EventType::CallInvite;
|
||||
}
|
||||
qml_mtx_events::EventType operator()(
|
||||
const mtx::events::Event<mtx::events::msg::CallAnswer> &)
|
||||
{
|
||||
return qml_mtx_events::EventType::CallAnswer;
|
||||
}
|
||||
qml_mtx_events::EventType operator()(
|
||||
const mtx::events::Event<mtx::events::msg::CallHangUp> &)
|
||||
{
|
||||
return qml_mtx_events::EventType::CallHangUp;
|
||||
}
|
||||
// ::EventType::Type operator()(const Event<mtx::events::msg::Location> &e) { return
|
||||
// ::EventType::LocationMessage; }
|
||||
};
|
||||
|
@ -538,7 +553,7 @@ TimelineModel::addEvents(const mtx::responses::Timeline &timeline)
|
|||
if (timeline.events.empty())
|
||||
return;
|
||||
|
||||
std::vector<QString> ids = internalAddEvents(timeline.events);
|
||||
std::vector<QString> ids = internalAddEvents(timeline.events, true);
|
||||
|
||||
if (!ids.empty()) {
|
||||
beginInsertRows(QModelIndex(), 0, static_cast<int>(ids.size() - 1));
|
||||
|
@ -572,6 +587,23 @@ isMessage(const mtx::events::EncryptedEvent<T> &)
|
|||
return true;
|
||||
}
|
||||
|
||||
auto
|
||||
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
auto
|
||||
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
auto
|
||||
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Workaround. We also want to see a room at the top, if we just joined it
|
||||
auto
|
||||
isYourJoin(const mtx::events::StateEvent<mtx::events::state::Member> &e)
|
||||
|
@ -623,7 +655,8 @@ TimelineModel::updateLastMessage()
|
|||
|
||||
std::vector<QString>
|
||||
TimelineModel::internalAddEvents(
|
||||
const std::vector<mtx::events::collections::TimelineEvents> &timeline)
|
||||
const std::vector<mtx::events::collections::TimelineEvents> &timeline,
|
||||
bool emitCallEvents)
|
||||
{
|
||||
std::vector<QString> ids;
|
||||
for (auto e : timeline) {
|
||||
|
@ -717,6 +750,46 @@ TimelineModel::internalAddEvents(
|
|||
|
||||
if (encInfo)
|
||||
emit newEncryptedImage(encInfo.value());
|
||||
|
||||
if (emitCallEvents) {
|
||||
// event room_id is not set, apparently due to spec bug
|
||||
if (auto callInvite = std::get_if<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallInvite>>(&e_)) {
|
||||
callInvite->room_id = room_id_.toStdString();
|
||||
emit newCallEvent(e_);
|
||||
} else if (std::holds_alternative<mtx::events::RoomEvent<
|
||||
mtx::events::msg::CallCandidates>>(e_) ||
|
||||
std::holds_alternative<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>( e_) ||
|
||||
std::holds_alternative<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>( e_)) {
|
||||
emit newCallEvent(e_);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (std::holds_alternative<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallCandidates>>(e)) {
|
||||
// don't display CallCandidate events to user
|
||||
events.insert(id, e);
|
||||
if (emitCallEvents)
|
||||
emit newCallEvent(e);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (emitCallEvents) {
|
||||
// event room_id is not set, apparently due to spec bug
|
||||
if (auto callInvite =
|
||||
std::get_if<mtx::events::RoomEvent<mtx::events::msg::CallInvite>>(
|
||||
&e)) {
|
||||
callInvite->room_id = room_id_.toStdString();
|
||||
emit newCallEvent(e);
|
||||
} else if (std::holds_alternative<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallAnswer>>(e) ||
|
||||
std::holds_alternative<
|
||||
mtx::events::RoomEvent<mtx::events::msg::CallHangUp>>(e)) {
|
||||
emit newCallEvent(e);
|
||||
}
|
||||
}
|
||||
|
||||
this->events.insert(id, e);
|
||||
|
@ -774,7 +847,7 @@ TimelineModel::readEvent(const std::string &id)
|
|||
void
|
||||
TimelineModel::addBackwardsEvents(const mtx::responses::Messages &msgs)
|
||||
{
|
||||
std::vector<QString> ids = internalAddEvents(msgs.chunk);
|
||||
std::vector<QString> ids = internalAddEvents(msgs.chunk, false);
|
||||
|
||||
if (!ids.empty()) {
|
||||
beginInsertRows(QModelIndex(),
|
||||
|
@ -1064,14 +1137,17 @@ TimelineModel::markEventsAsRead(const std::vector<QString> &event_ids)
|
|||
}
|
||||
|
||||
void
|
||||
TimelineModel::sendEncryptedMessage(const std::string &txn_id, nlohmann::json content)
|
||||
TimelineModel::sendEncryptedMessageEvent(const std::string &txn_id,
|
||||
nlohmann::json content,
|
||||
mtx::events::EventType eventType)
|
||||
{
|
||||
const auto room_id = room_id_.toStdString();
|
||||
|
||||
using namespace mtx::events;
|
||||
using namespace mtx::identifiers;
|
||||
|
||||
json doc = {{"type", "m.room.message"}, {"content", content}, {"room_id", room_id}};
|
||||
json doc = {
|
||||
{"type", mtx::events::to_string(eventType)}, {"content", content}, {"room_id", room_id}};
|
||||
|
||||
try {
|
||||
// Check if we have already an outbound megolm session then we can use.
|
||||
|
@ -1375,59 +1451,27 @@ struct SendMessageVisitor
|
|||
, model_(model)
|
||||
{}
|
||||
|
||||
// Do-nothing operator for all unhandled events
|
||||
template<typename T>
|
||||
void operator()(const mtx::events::Event<T> &)
|
||||
{}
|
||||
// Operator for m.room.message events that contain a msgtype in their content
|
||||
template<typename T,
|
||||
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
|
||||
void operator()(const mtx::events::RoomEvent<T> &msg)
|
||||
|
||||
template<typename T, mtx::events::EventType Event>
|
||||
void sendRoomEvent(const mtx::events::RoomEvent<T> &msg)
|
||||
{
|
||||
if (cache::isRoomEncrypted(model_->room_id_.toStdString())) {
|
||||
auto encInfo = mtx::accessors::file(msg);
|
||||
if (encInfo)
|
||||
emit model_->newEncryptedImage(encInfo.value());
|
||||
|
||||
model_->sendEncryptedMessage(txn_id_qstr_.toStdString(),
|
||||
nlohmann::json(msg.content));
|
||||
model_->sendEncryptedMessageEvent(
|
||||
txn_id_qstr_.toStdString(), nlohmann::json(msg.content), Event);
|
||||
} else {
|
||||
QString txn_id_qstr = txn_id_qstr_;
|
||||
TimelineModel *model = model_;
|
||||
http::client()->send_room_message<T, mtx::events::EventType::RoomMessage>(
|
||||
model->room_id_.toStdString(),
|
||||
txn_id_qstr.toStdString(),
|
||||
msg.content,
|
||||
[txn_id_qstr, model](const mtx::responses::EventId &res,
|
||||
mtx::http::RequestErr err) {
|
||||
if (err) {
|
||||
const int status_code =
|
||||
static_cast<int>(err->status_code);
|
||||
nhlog::net()->warn("[{}] failed to send message: {} {}",
|
||||
txn_id_qstr.toStdString(),
|
||||
err->matrix_error.error,
|
||||
status_code);
|
||||
emit model->messageFailed(txn_id_qstr);
|
||||
}
|
||||
emit model->messageSent(
|
||||
txn_id_qstr, QString::fromStdString(res.event_id.to_string()));
|
||||
});
|
||||
sendUnencryptedRoomEvent<T, Event>(msg);
|
||||
}
|
||||
}
|
||||
|
||||
// Special operator for reactions, which are a type of m.room.message, but need to be
|
||||
// handled distinctly for their differences from normal room messages. Specifically,
|
||||
// reactions need to have the relation outside of ciphertext, or synapse / the homeserver
|
||||
// cannot handle it correctly. See the MSC for more details:
|
||||
// https://github.com/matrix-org/matrix-doc/blob/matthew/msc1849/proposals/1849-aggregations.md#end-to-end-encryption
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::Reaction> &msg)
|
||||
|
||||
template<typename T, mtx::events::EventType Event>
|
||||
void sendUnencryptedRoomEvent(const mtx::events::RoomEvent<T> &msg)
|
||||
{
|
||||
QString txn_id_qstr = txn_id_qstr_;
|
||||
TimelineModel *model = model_;
|
||||
http::client()
|
||||
->send_room_message<mtx::events::msg::Reaction, mtx::events::EventType::Reaction>(
|
||||
http::client()->send_room_message<T, Event>(
|
||||
model->room_id_.toStdString(),
|
||||
txn_id_qstr.toStdString(),
|
||||
msg.content,
|
||||
|
@ -1441,11 +1485,59 @@ struct SendMessageVisitor
|
|||
status_code);
|
||||
emit model->messageFailed(txn_id_qstr);
|
||||
}
|
||||
emit model->messageSent(
|
||||
txn_id_qstr, QString::fromStdString(res.event_id.to_string()));
|
||||
emit model->messageSent(txn_id_qstr,
|
||||
QString::fromStdString(res.event_id.to_string()));
|
||||
});
|
||||
}
|
||||
|
||||
// Do-nothing operator for all unhandled events
|
||||
template<typename T>
|
||||
void operator()(const mtx::events::Event<T> &)
|
||||
{}
|
||||
|
||||
// Operator for m.room.message events that contain a msgtype in their content
|
||||
template<typename T,
|
||||
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
|
||||
void operator()(const mtx::events::RoomEvent<T> &msg)
|
||||
{
|
||||
sendRoomEvent<T, mtx::events::EventType::RoomMessage>(msg);
|
||||
}
|
||||
|
||||
// Special operator for reactions, which are a type of m.room.message, but need to be
|
||||
// handled distinctly for their differences from normal room messages. Specifically,
|
||||
// reactions need to have the relation outside of ciphertext, or synapse / the homeserver
|
||||
// cannot handle it correctly. See the MSC for more details:
|
||||
// https://github.com/matrix-org/matrix-doc/blob/matthew/msc1849/proposals/1849-aggregations.md#end-to-end-encryption
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::Reaction> &msg)
|
||||
{
|
||||
sendUnencryptedRoomEvent<mtx::events::msg::Reaction,
|
||||
mtx::events::EventType::Reaction>(msg);
|
||||
}
|
||||
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &event)
|
||||
{
|
||||
sendRoomEvent<mtx::events::msg::CallInvite, mtx::events::EventType::CallInvite>(
|
||||
event);
|
||||
}
|
||||
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &event)
|
||||
{
|
||||
sendRoomEvent<mtx::events::msg::CallCandidates,
|
||||
mtx::events::EventType::CallCandidates>(event);
|
||||
}
|
||||
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &event)
|
||||
{
|
||||
sendRoomEvent<mtx::events::msg::CallAnswer, mtx::events::EventType::CallAnswer>(
|
||||
event);
|
||||
}
|
||||
|
||||
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &event)
|
||||
{
|
||||
sendRoomEvent<mtx::events::msg::CallHangUp, mtx::events::EventType::CallHangUp>(
|
||||
event);
|
||||
}
|
||||
|
||||
QString txn_id_qstr_;
|
||||
TimelineModel *model_;
|
||||
};
|
||||
|
@ -1467,14 +1559,13 @@ TimelineModel::addPendingMessage(mtx::events::collections::TimelineEvents event)
|
|||
{
|
||||
std::visit(
|
||||
[](auto &msg) {
|
||||
msg.type = mtx::events::EventType::RoomMessage;
|
||||
msg.event_id = http::client()->generate_txn_id();
|
||||
msg.sender = http::client()->user_id().to_string();
|
||||
msg.origin_server_ts = QDateTime::currentMSecsSinceEpoch();
|
||||
},
|
||||
event);
|
||||
|
||||
internalAddEvents({event});
|
||||
internalAddEvents({event}, false);
|
||||
|
||||
QString txn_id_qstr = QString::fromStdString(mtx::accessors::event_id(event));
|
||||
pending.push_back(txn_id_qstr);
|
||||
|
|
|
@ -36,6 +36,12 @@ enum EventType
|
|||
Aliases,
|
||||
/// m.room.avatar
|
||||
Avatar,
|
||||
/// m.call.invite
|
||||
CallInvite,
|
||||
/// m.call.answer
|
||||
CallAnswer,
|
||||
/// m.call.hangup
|
||||
CallHangUp,
|
||||
/// m.room.canonical_alias
|
||||
CanonicalAlias,
|
||||
/// m.room.create
|
||||
|
@ -200,7 +206,7 @@ public:
|
|||
void updateLastMessage();
|
||||
void addEvents(const mtx::responses::Timeline &events);
|
||||
template<class T>
|
||||
void sendMessage(const T &msg);
|
||||
void sendMessageEvent(const T &content, mtx::events::EventType eventType);
|
||||
RelatedInfo relatedInfo(QString id);
|
||||
|
||||
public slots:
|
||||
|
@ -255,13 +261,17 @@ signals:
|
|||
void typingUsersChanged(std::vector<QString> users);
|
||||
void replyChanged(QString reply);
|
||||
void paginationInProgressChanged(const bool);
|
||||
void newCallEvent(const mtx::events::collections::TimelineEvents &event);
|
||||
|
||||
private:
|
||||
DecryptionResult decryptEvent(
|
||||
const mtx::events::EncryptedEvent<mtx::events::msg::Encrypted> &e) const;
|
||||
std::vector<QString> internalAddEvents(
|
||||
const std::vector<mtx::events::collections::TimelineEvents> &timeline);
|
||||
void sendEncryptedMessage(const std::string &txn_id, nlohmann::json content);
|
||||
const std::vector<mtx::events::collections::TimelineEvents> &timeline,
|
||||
bool emitCallEvents);
|
||||
void sendEncryptedMessageEvent(const std::string &txn_id,
|
||||
nlohmann::json content,
|
||||
mtx::events::EventType);
|
||||
void handleClaimedKeys(std::shared_ptr<StateKeeper> keeper,
|
||||
const std::map<std::string, std::string> &room_key,
|
||||
const std::map<std::string, DevicePublicKeys> &pks,
|
||||
|
@ -296,9 +306,10 @@ private:
|
|||
|
||||
template<class T>
|
||||
void
|
||||
TimelineModel::sendMessage(const T &msg)
|
||||
TimelineModel::sendMessageEvent(const T &content, mtx::events::EventType eventType)
|
||||
{
|
||||
mtx::events::RoomEvent<T> msgCopy = {};
|
||||
msgCopy.content = msg;
|
||||
msgCopy.content = content;
|
||||
msgCopy.type = eventType;
|
||||
emit newMessageToSend(msgCopy);
|
||||
}
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
#include <QMetaType>
|
||||
#include <QPalette>
|
||||
#include <QQmlContext>
|
||||
#include <QString>
|
||||
|
||||
#include "BlurhashProvider.h"
|
||||
#include "CallManager.h"
|
||||
#include "ChatPage.h"
|
||||
#include "ColorImageProvider.h"
|
||||
#include "DelegateChooser.h"
|
||||
|
@ -71,10 +73,13 @@ TimelineViewManager::userStatus(QString id) const
|
|||
return QString::fromStdString(cache::statusMessage(id.toStdString()));
|
||||
}
|
||||
|
||||
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
||||
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings,
|
||||
CallManager *callManager,
|
||||
QWidget *parent)
|
||||
: imgProvider(new MxcImageProvider())
|
||||
, colorImgProvider(new ColorImageProvider())
|
||||
, blurhashProvider(new BlurhashProvider())
|
||||
, callManager_(callManager)
|
||||
, settings(userSettings)
|
||||
{
|
||||
qmlRegisterUncreatableMetaObject(qml_mtx_events::staticMetaObject,
|
||||
|
@ -139,7 +144,17 @@ TimelineViewManager::sync(const mtx::responses::Rooms &rooms)
|
|||
// addRoom will only add the room, if it doesn't exist
|
||||
addRoom(QString::fromStdString(room_id));
|
||||
const auto &room_model = models.value(QString::fromStdString(room_id));
|
||||
if (!isInitialSync_)
|
||||
connect(room_model.data(),
|
||||
&TimelineModel::newCallEvent,
|
||||
callManager_,
|
||||
&CallManager::syncEvent);
|
||||
room_model->addEvents(room.timeline);
|
||||
if (!isInitialSync_)
|
||||
disconnect(room_model.data(),
|
||||
&TimelineModel::newCallEvent,
|
||||
callManager_,
|
||||
&CallManager::syncEvent);
|
||||
|
||||
if (ChatPage::instance()->userSettings()->typingNotifications()) {
|
||||
std::vector<QString> typing;
|
||||
|
@ -285,7 +300,7 @@ TimelineViewManager::queueTextMessage(const QString &msg)
|
|||
timeline_->resetReply();
|
||||
}
|
||||
|
||||
timeline_->sendMessage(text);
|
||||
timeline_->sendMessageEvent(text, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -307,7 +322,7 @@ TimelineViewManager::queueEmoteMessage(const QString &msg)
|
|||
}
|
||||
|
||||
if (timeline_)
|
||||
timeline_->sendMessage(emote);
|
||||
timeline_->sendMessageEvent(emote, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -337,7 +352,7 @@ TimelineViewManager::queueReactionMessage(const QString &roomId,
|
|||
reaction.relates_to.key = reactionKey.toStdString();
|
||||
|
||||
auto model = models.value(roomId);
|
||||
model->sendMessage(reaction);
|
||||
model->sendMessageEvent(reaction, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -366,7 +381,7 @@ TimelineViewManager::queueImageMessage(const QString &roomid,
|
|||
model->resetReply();
|
||||
}
|
||||
|
||||
model->sendMessage(image);
|
||||
model->sendMessageEvent(image, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -391,7 +406,7 @@ TimelineViewManager::queueFileMessage(
|
|||
model->resetReply();
|
||||
}
|
||||
|
||||
model->sendMessage(file);
|
||||
model->sendMessageEvent(file, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -415,7 +430,7 @@ TimelineViewManager::queueAudioMessage(const QString &roomid,
|
|||
model->resetReply();
|
||||
}
|
||||
|
||||
model->sendMessage(audio);
|
||||
model->sendMessageEvent(audio, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -439,5 +454,34 @@ TimelineViewManager::queueVideoMessage(const QString &roomid,
|
|||
model->resetReply();
|
||||
}
|
||||
|
||||
model->sendMessage(video);
|
||||
model->sendMessageEvent(video, mtx::events::EventType::RoomMessage);
|
||||
}
|
||||
|
||||
void
|
||||
TimelineViewManager::queueCallMessage(const QString &roomid,
|
||||
const mtx::events::msg::CallInvite &callInvite)
|
||||
{
|
||||
models.value(roomid)->sendMessageEvent(callInvite, mtx::events::EventType::CallInvite);
|
||||
}
|
||||
|
||||
void
|
||||
TimelineViewManager::queueCallMessage(const QString &roomid,
|
||||
const mtx::events::msg::CallCandidates &callCandidates)
|
||||
{
|
||||
models.value(roomid)->sendMessageEvent(callCandidates,
|
||||
mtx::events::EventType::CallCandidates);
|
||||
}
|
||||
|
||||
void
|
||||
TimelineViewManager::queueCallMessage(const QString &roomid,
|
||||
const mtx::events::msg::CallAnswer &callAnswer)
|
||||
{
|
||||
models.value(roomid)->sendMessageEvent(callAnswer, mtx::events::EventType::CallAnswer);
|
||||
}
|
||||
|
||||
void
|
||||
TimelineViewManager::queueCallMessage(const QString &roomid,
|
||||
const mtx::events::msg::CallHangUp &callHangUp)
|
||||
{
|
||||
models.value(roomid)->sendMessageEvent(callHangUp, mtx::events::EventType::CallHangUp);
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
class MxcImageProvider;
|
||||
class BlurhashProvider;
|
||||
class CallManager;
|
||||
class ColorImageProvider;
|
||||
class UserSettings;
|
||||
|
||||
|
@ -31,7 +32,9 @@ class TimelineViewManager : public QObject
|
|||
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
|
||||
|
||||
public:
|
||||
TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent = nullptr);
|
||||
TimelineViewManager(QSharedPointer<UserSettings> userSettings,
|
||||
CallManager *callManager,
|
||||
QWidget *parent = nullptr);
|
||||
QWidget *getWidget() const { return container; }
|
||||
|
||||
void sync(const mtx::responses::Rooms &rooms);
|
||||
|
@ -96,6 +99,11 @@ public slots:
|
|||
const QString &url,
|
||||
const QString &mime,
|
||||
uint64_t dsize);
|
||||
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
|
||||
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
|
||||
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
|
||||
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
|
||||
|
||||
void updateEncryptedDescriptions();
|
||||
|
||||
private:
|
||||
|
@ -112,6 +120,7 @@ private:
|
|||
|
||||
QHash<QString, QSharedPointer<TimelineModel>> models;
|
||||
TimelineModel *timeline_ = nullptr;
|
||||
CallManager *callManager_ = nullptr;
|
||||
|
||||
bool isInitialSync_ = true;
|
||||
|
||||
|
|
Loading…
Reference in a new issue