Merge remote-tracking branch 'upstream/master' into device-verification

This commit is contained in:
CH Chethan Reddy 2020-08-30 22:27:14 +05:30
commit b174bd9380
54 changed files with 6706 additions and 223 deletions

View file

@ -225,6 +225,7 @@ configure_file(cmake/nheko.h config/nheko.h)
#
set(SRC_FILES
# Dialogs
src/dialogs/AcceptCall.cpp
src/dialogs/CreateRoom.cpp
src/dialogs/FallbackAuth.cpp
src/dialogs/ImageOverlay.cpp
@ -233,6 +234,7 @@ set(SRC_FILES
src/dialogs/LeaveRoom.cpp
src/dialogs/Logout.cpp
src/dialogs/MemberList.cpp
src/dialogs/PlaceCall.cpp
src/dialogs/PreviewUploadOverlay.cpp
src/dialogs/ReCaptcha.cpp
src/dialogs/ReadReceipts.cpp
@ -277,9 +279,11 @@ set(SRC_FILES
src/ui/ThemeManager.cpp
src/ui/UserProfile.cpp
src/ActiveCallBar.cpp
src/AvatarProvider.cpp
src/BlurhashProvider.cpp
src/Cache.cpp
src/CallManager.cpp
src/ChatPage.cpp
src/ColorImageProvider.cpp
src/CommunitiesList.cpp
@ -306,6 +310,7 @@ set(SRC_FILES
src/UserInfoWidget.cpp
src/UserSettingsPage.cpp
src/Utils.cpp
src/WebRTCSession.cpp
src/WelcomePage.cpp
src/popups/PopupItem.cpp
src/popups/SuggestionsPopup.cpp
@ -423,6 +428,9 @@ else()
find_package(Tweeny REQUIRED)
endif()
include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.14 gstreamer-webrtc-1.0>=1.14)
# single instance functionality
set(QAPPLICATION_CLASS QApplication CACHE STRING "Inheritance class for SingleApplication")
add_subdirectory(third_party/SingleApplication-3.1.3.1/)
@ -431,6 +439,7 @@ feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAG
qt5_wrap_cpp(MOC_HEADERS
# Dialogs
src/dialogs/AcceptCall.h
src/dialogs/CreateRoom.h
src/dialogs/FallbackAuth.h
src/dialogs/ImageOverlay.h
@ -439,6 +448,7 @@ qt5_wrap_cpp(MOC_HEADERS
src/dialogs/LeaveRoom.h
src/dialogs/Logout.h
src/dialogs/MemberList.h
src/dialogs/PlaceCall.h
src/dialogs/PreviewUploadOverlay.h
src/dialogs/RawMessage.h
src/dialogs/ReCaptcha.h
@ -483,9 +493,11 @@ qt5_wrap_cpp(MOC_HEADERS
src/notifications/Manager.h
src/ActiveCallBar.h
src/AvatarProvider.h
src/BlurhashProvider.h
src/Cache_p.h
src/CallManager.h
src/ChatPage.h
src/CommunitiesList.h
src/CommunitiesListItem.h
@ -506,6 +518,7 @@ qt5_wrap_cpp(MOC_HEADERS
src/TrayIcon.h
src/UserInfoWidget.h
src/UserSettingsPage.h
src/WebRTCSession.h
src/WelcomePage.h
src/popups/PopupItem.h
src/popups/SuggestionsPopup.h
@ -594,6 +607,11 @@ target_precompile_headers(nheko
)
endif()
if (TARGET PkgConfig::GSTREAMER)
target_link_libraries(nheko PRIVATE PkgConfig::GSTREAMER)
target_compile_definitions(nheko PRIVATE GSTREAMER_AVAILABLE)
endif()
if(MSVC)
target_link_libraries(nheko PRIVATE ntdll)
endif()

View file

@ -75,6 +75,14 @@ sudo eselect repository enable matrix
sudo emerge -a nheko
```
#### Nix(os)
```bash
nix-env -iA nixpkgs.nheko
# or
nix-shell -p nheko --run nheko
```
#### Alpine Linux (and postmarketOS)
Make sure you have the testing repositories from `edge` enabled. Note that this is not needed on postmarketOS.

Binary file not shown.

After

Width:  |  Height:  |  Size: 643 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 759 B

View file

@ -198,7 +198,7 @@
<location filename="../qml/emoji/EmojiPicker.qml" line="+117"/>
<location line="+139"/>
<source>Search</source>
<translation type="unfinished"></translation>
<translation>Search</translation>
</message>
<message>
<location line="-42"/>
@ -404,6 +404,21 @@ Example: https://server.my:8787</translation>
<source>%1 created and configured room: %2</source>
<translation>%1 created and configured room: %2</translation>
</message>
<message>
<location line="+6"/>
<source>%1 placed a %2 call.</source>
<translation>%1 placed a %2 call.</translation>
</message>
<message>
<location line="+6"/>
<source>%1 answered the call.</source>
<translation>%1 answered the call.</translation>
</message>
<message>
<location line="+6"/>
<source>%1 ended the call.</source>
<translation>%1 ended the call.</translation>
</message>
</context>
<context>
<name>Placeholder</name>
@ -1796,6 +1811,36 @@ Media size: %2
<source>%1 sent an encrypted message</source>
<translation>%1 sent an encrypted message</translation>
</message>
<message>
<location line="+5"/>
<source>You placed a call</source>
<translation>You placed a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 placed a call</source>
<translation>%1 placed a call</translation>
</message>
<message>
<location line="+5"/>
<source>You answered a call</source>
<translation>You answered a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 answered a call</source>
<translation>%1 answered a call</translation>
</message>
<message>
<location line="+5"/>
<source>You ended a call</source>
<translation>You ended a call</translation>
</message>
<message>
<location line="+3"/>
<source>%1 ended a call</source>
<translation>%1 ended a call</translation>
</message>
</context>
<context>
<name>popups::UserMentions</name>

1815
resources/langs/nheko_ro.ts Normal file

File diff suppressed because it is too large Load diff

1604
resources/langs/nheko_si.ts Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,5 @@
The below media files were obtained from https://github.com/matrix-org/matrix-react-sdk/tree/develop/res/media
callend.ogg
ringback.ogg
ring.ogg

BIN
resources/media/callend.ogg Normal file

Binary file not shown.

BIN
resources/media/ring.ogg Normal file

Binary file not shown.

Binary file not shown.

View file

@ -7,7 +7,8 @@ TextEdit {
textFormat: TextEdit.RichText
readOnly: true
wrapMode: Text.Wrap
selectByMouse: ma.containsMouse // try to make scrollable by finger but selectable by mouse
selectByMouse: true
activeFocusOnPress: false
color: colors.text
onLinkActivated: {
@ -18,14 +19,13 @@ TextEdit {
TimelineManager.setHistoryView(match[1])
chat.positionViewAtIndex(chat.model.idToIndex(match[2]), ListView.Contain)
}
else Qt.openUrlExternally(link)
else timelineManager.openLink(link)
}
MouseArea
{
id: ma
anchors.fill: parent
propagateComposedEvents: true
hoverEnabled: true
acceptedButtons: Qt.NoButton
cursorShape: parent.hoveredLink ? Qt.PointingHandCursor : Qt.ArrowCursor
}

View file

@ -130,6 +130,7 @@ Item {
Label {
Layout.alignment: Qt.AlignRight | Qt.AlignTop
text: model.timestamp.toLocaleTimeString("HH:mm")
width: Math.max(implicitWidth, text.length*fontMetrics.maximumCharacterWidth)
color: inactiveColors.text
MouseArea{

View file

@ -21,7 +21,6 @@ Page {
property real highlightHue: colors.highlight.hslHue
property real highlightSat: colors.highlight.hslSaturation
property real highlightLight: colors.highlight.hslLightness
property variant userProfile
palette: colors
@ -287,6 +286,7 @@ Page {
width: contentWidth * 1.2
horizontalAlignment: Text.AlignHCenter
verticalAlignment: Text.AlignVCenter
background: Rectangle {
radius: parent.height / 2
color: colors.base

View file

@ -96,6 +96,30 @@ Item {
text: qsTr("%1 created and configured room: %2").arg(model.data.userName).arg(model.data.roomId)
}
}
DelegateChoice {
roleValue: MtxEvent.CallInvite
NoticeMessage {
text: qsTr("%1 placed a %2 call.").arg(model.data.userName).arg(model.data.callType)
}
}
DelegateChoice {
roleValue: MtxEvent.CallAnswer
NoticeMessage {
text: qsTr("%1 answered the call.").arg(model.data.userName)
}
}
DelegateChoice {
roleValue: MtxEvent.CallHangUp
NoticeMessage {
text: qsTr("%1 ended the call.").arg(model.data.userName)
}
}
DelegateChoice {
roleValue: MtxEvent.CallCandidates
NoticeMessage {
text: qsTr("Negotiating call...")
}
}
DelegateChoice {
// TODO: make a more complex formatter for the power levels.
roleValue: MtxEvent.PowerLevels

View file

@ -70,6 +70,11 @@
<file>icons/ui/mail-reply.png</file>
<file>icons/ui/place-call.png</file>
<file>icons/ui/end-call.png</file>
<file>icons/ui/microphone-mute.png</file>
<file>icons/ui/microphone-unmute.png</file>
<file>icons/emoji-categories/people.png</file>
<file>icons/emoji-categories/people@2x.png</file>
<file>icons/emoji-categories/nature.png</file>
@ -138,4 +143,9 @@
<file>qml/delegates/Reply.qml</file>
<file>qml/device-verification/DeviceVerification.qml</file>
</qresource>
<qresource prefix="/media">
<file>media/ring.ogg</file>
<file>media/ringback.ogg</file>
<file>media/callend.ogg</file>
</qresource>
</RCC>

160
src/ActiveCallBar.cpp Normal file
View file

@ -0,0 +1,160 @@
#include <cstdio>
#include <QDateTime>
#include <QHBoxLayout>
#include <QIcon>
#include <QLabel>
#include <QString>
#include <QTimer>
#include "ActiveCallBar.h"
#include "ChatPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "ui/Avatar.h"
#include "ui/FlatButton.h"
ActiveCallBar::ActiveCallBar(QWidget *parent)
: QWidget(parent)
{
setAutoFillBackground(true);
auto p = palette();
p.setColor(backgroundRole(), QColor(46, 204, 113));
setPalette(p);
QFont f;
f.setPointSizeF(f.pointSizeF());
const int fontHeight = QFontMetrics(f).height();
const int widgetMargin = fontHeight / 3;
const int contentHeight = fontHeight * 3;
setFixedHeight(contentHeight + widgetMargin);
layout_ = new QHBoxLayout(this);
layout_->setSpacing(widgetMargin);
layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
QFont labelFont;
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
labelFont.setWeight(QFont::Medium);
avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5);
callPartyLabel_ = new QLabel(this);
callPartyLabel_->setFont(labelFont);
stateLabel_ = new QLabel(this);
stateLabel_->setFont(labelFont);
durationLabel_ = new QLabel(this);
durationLabel_->setFont(labelFont);
durationLabel_->hide();
muteBtn_ = new FlatButton(this);
setMuteIcon(false);
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
muteBtn_->setCornerRadius(buttonSize_ / 2);
connect(muteBtn_, &FlatButton::clicked, this, [this]() {
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
setMuteIcon(muted_);
});
layout_->addWidget(avatar_, 0, Qt::AlignLeft);
layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
layout_->addWidget(stateLabel_, 0, Qt::AlignLeft);
layout_->addWidget(durationLabel_, 0, Qt::AlignLeft);
layout_->addStretch();
layout_->addWidget(muteBtn_, 0, Qt::AlignCenter);
layout_->addSpacing(18);
timer_ = new QTimer(this);
connect(timer_, &QTimer::timeout, this, [this]() {
auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
int s = seconds % 60;
int m = (seconds / 60) % 60;
int h = seconds / 3600;
char buf[12];
if (h)
snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
else
snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
durationLabel_->setText(buf);
});
connect(
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
}
void
ActiveCallBar::setMuteIcon(bool muted)
{
QIcon icon;
if (muted) {
muteBtn_->setToolTip("Unmute Mic");
icon.addFile(":/icons/icons/ui/microphone-unmute.png");
} else {
muteBtn_->setToolTip("Mute Mic");
icon.addFile(":/icons/icons/ui/microphone-mute.png");
}
muteBtn_->setIcon(icon);
muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_));
}
void
ActiveCallBar::setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl)
{
callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " ");
if (!avatarUrl.isEmpty())
avatar_->setImage(avatarUrl);
else
avatar_->setLetter(utils::firstChar(roomName));
}
void
ActiveCallBar::update(WebRTCSession::State state)
{
switch (state) {
case WebRTCSession::State::INITIATING:
show();
stateLabel_->setText("Initiating call...");
break;
case WebRTCSession::State::INITIATED:
show();
stateLabel_->setText("Call initiated...");
break;
case WebRTCSession::State::OFFERSENT:
show();
stateLabel_->setText("Calling...");
break;
case WebRTCSession::State::CONNECTING:
show();
stateLabel_->setText("Connecting...");
break;
case WebRTCSession::State::CONNECTED:
show();
callStartTime_ = QDateTime::currentSecsSinceEpoch();
timer_->start(1000);
stateLabel_->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
durationLabel_->setText("00:00");
durationLabel_->show();
break;
case WebRTCSession::State::ICEFAILED:
case WebRTCSession::State::DISCONNECTED:
hide();
timer_->stop();
callPartyLabel_->setText(QString());
stateLabel_->setText(QString());
durationLabel_->setText(QString());
durationLabel_->hide();
setMuteIcon(false);
break;
default:
break;
}
}

40
src/ActiveCallBar.h Normal file
View file

@ -0,0 +1,40 @@
#pragma once
#include <QWidget>
#include "WebRTCSession.h"
class QHBoxLayout;
class QLabel;
class QTimer;
class Avatar;
class FlatButton;
class ActiveCallBar : public QWidget
{
Q_OBJECT
public:
ActiveCallBar(QWidget *parent = nullptr);
public slots:
void update(WebRTCSession::State);
void setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private:
QHBoxLayout *layout_ = nullptr;
Avatar *avatar_ = nullptr;
QLabel *callPartyLabel_ = nullptr;
QLabel *stateLabel_ = nullptr;
QLabel *durationLabel_ = nullptr;
FlatButton *muteBtn_ = nullptr;
int buttonSize_ = 22;
bool muted_ = false;
qint64 callStartTime_ = 0;
QTimer *timer_ = nullptr;
void setMuteIcon(bool muted);
};

View file

@ -35,6 +35,7 @@
#include "EventAccessors.h"
#include "Logging.h"
#include "MatrixClient.h"
#include "Olm.h"
#include "Utils.h"
//! Should be changed when a breaking change occurs in the cache format.
@ -95,6 +96,33 @@ namespace {
std::unique_ptr<Cache> instance_ = nullptr;
}
static bool
isHiddenEvent(mtx::events::collections::TimelineEvents e, const std::string &room_id)
{
using namespace mtx::events;
if (auto encryptedEvent = std::get_if<EncryptedEvent<msg::Encrypted>>(&e)) {
MegolmSessionIndex index;
index.room_id = room_id;
index.session_id = encryptedEvent->content.session_id;
index.sender_key = encryptedEvent->content.sender_key;
auto result = olm::decryptEvent(index, *encryptedEvent);
if (!result.error)
e = result.event.value();
}
static constexpr std::initializer_list<EventType> hiddenEvents = {
EventType::Reaction, EventType::CallCandidates, EventType::Unsupported};
return std::visit(
[](const auto &ev) {
return std::any_of(hiddenEvents.begin(),
hiddenEvents.end(),
[ev](EventType type) { return type == ev.type; });
},
e);
}
Cache::Cache(const QString &userId, QObject *parent)
: QObject{parent}
, env_{nullptr}
@ -160,7 +188,10 @@ Cache::setup()
}
try {
env_.open(statePath.toStdString().c_str());
// NOTE(Nico): We may want to use (MDB_MAPASYNC | MDB_WRITEMAP) in the future, but
// it can really mess up our database, so we shouldn't. For now, hopefully
// NOMETASYNC is fast enough.
env_.open(statePath.toStdString().c_str(), MDB_NOMETASYNC);
} catch (const lmdb::error &e) {
if (e.code() != MDB_VERSION_MISMATCH && e.code() != MDB_INVALID) {
throw std::runtime_error("LMDB initialization failed" +
@ -776,6 +807,7 @@ Cache::runMigrations()
}},
};
nhlog::db()->info("Running migrations, this may take a while!");
for (const auto &[target_version, migration] : migrations) {
if (target_version > stored_version)
if (!migration()) {
@ -783,6 +815,7 @@ Cache::runMigrations()
return false;
}
}
nhlog::db()->info("Migrations finished.");
setCurrentFormat();
return true;
@ -1608,7 +1641,8 @@ Cache::getLastMessageInfo(lmdb::txn &txn, const std::string &room_id)
}
if (!(obj["type"] == "m.room.message" || obj["type"] == "m.sticker" ||
obj["type"] == "m.room.encrypted"))
obj["type"] == "m.call.invite" || obj["type"] == "m.call.answer" ||
obj["type"] == "m.call.hangup" || obj["type"] == "m.room.encrypted"))
continue;
mtx::events::collections::TimelineEvent te;
@ -2326,6 +2360,11 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
lmdb::val event_id = event_id_val;
json orderEntry = json::object();
orderEntry["event_id"] = event_id_val;
if (first && !res.prev_batch.empty())
orderEntry["prev_batch"] = res.prev_batch;
lmdb::val txn_order;
if (!txn_id.empty() &&
lmdb::dbi_get(txn, evToOrderDb, lmdb::val(txn_id), txn_order)) {
@ -2339,7 +2378,7 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
lmdb::dbi_del(txn, msg2orderDb, lmdb::val(txn_id));
}
lmdb::dbi_put(txn, orderDb, txn_order, event_id);
lmdb::dbi_put(txn, orderDb, txn_order, lmdb::val(orderEntry.dump()));
lmdb::dbi_put(txn, evToOrderDb, event_id, txn_order);
lmdb::dbi_del(txn, evToOrderDb, lmdb::val(txn_id));
@ -2411,10 +2450,6 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
++index;
json orderEntry = json::object();
orderEntry["event_id"] = event_id_val;
if (first && !res.prev_batch.empty())
orderEntry["prev_batch"] = res.prev_batch;
first = false;
nhlog::db()->debug("saving '{}'", orderEntry.dump());
@ -2426,7 +2461,7 @@ Cache::saveTimelineMessages(lmdb::txn &txn,
lmdb::dbi_put(txn, evToOrderDb, event_id, lmdb::val(&index, sizeof(index)));
// TODO(Nico): Allow blacklisting more event types in UI
if (event["type"] != "m.reaction" && event["type"] != "m.dummy") {
if (!isHiddenEvent(e, room_id)) {
++msgIndex;
lmdb::cursor_put(msgCursor.handle(),
lmdb::val(&msgIndex, sizeof(msgIndex)),
@ -2462,6 +2497,7 @@ Cache::saveOldMessages(const std::string &room_id, const mtx::responses::Message
auto relationsDb = getRelationsDb(txn, room_id);
auto orderDb = getEventOrderDb(txn, room_id);
auto evToOrderDb = getEventToOrderDb(txn, room_id);
auto msg2orderDb = getMessageToOrderDb(txn, room_id);
auto order2msgDb = getOrderToMessageDb(txn, room_id);
@ -2505,9 +2541,10 @@ Cache::saveOldMessages(const std::string &room_id, const mtx::responses::Message
lmdb::dbi_put(
txn, orderDb, lmdb::val(&index, sizeof(index)), lmdb::val(orderEntry.dump()));
lmdb::dbi_put(txn, evToOrderDb, event_id, lmdb::val(&index, sizeof(index)));
// TODO(Nico): Allow blacklisting more event types in UI
if (event["type"] != "m.reaction" && event["type"] != "m.dummy") {
if (!isHiddenEvent(e, room_id)) {
--msgIndex;
lmdb::dbi_put(
txn, order2msgDb, lmdb::val(&msgIndex, sizeof(msgIndex)), event_id);
@ -2538,6 +2575,94 @@ Cache::saveOldMessages(const std::string &room_id, const mtx::responses::Message
return msgIndex;
}
void
Cache::clearTimeline(const std::string &room_id)
{
auto txn = lmdb::txn::begin(env_);
auto eventsDb = getEventsDb(txn, room_id);
auto relationsDb = getRelationsDb(txn, room_id);
auto orderDb = getEventOrderDb(txn, room_id);
auto evToOrderDb = getEventToOrderDb(txn, room_id);
auto msg2orderDb = getMessageToOrderDb(txn, room_id);
auto order2msgDb = getOrderToMessageDb(txn, room_id);
lmdb::val indexVal, val;
auto cursor = lmdb::cursor::open(txn, orderDb);
bool start = true;
bool passed_pagination_token = false;
while (cursor.get(indexVal, val, start ? MDB_LAST : MDB_PREV)) {
start = false;
json obj;
try {
obj = json::parse(std::string_view(val.data(), val.size()));
} catch (std::exception &) {
// workaround bug in the initial db format, where we sometimes didn't store
// json...
obj = {{"event_id", std::string(val.data(), val.size())}};
}
if (passed_pagination_token) {
if (obj.count("event_id") != 0) {
lmdb::val event_id = obj["event_id"].get<std::string>();
lmdb::dbi_del(txn, evToOrderDb, event_id);
lmdb::dbi_del(txn, eventsDb, event_id);
lmdb::dbi_del(txn, relationsDb, event_id);
lmdb::val order{};
bool exists = lmdb::dbi_get(txn, msg2orderDb, event_id, order);
if (exists) {
lmdb::dbi_del(txn, order2msgDb, order);
lmdb::dbi_del(txn, msg2orderDb, event_id);
}
}
lmdb::cursor_del(cursor);
} else {
if (obj.count("prev_batch") != 0)
passed_pagination_token = true;
}
}
auto msgCursor = lmdb::cursor::open(txn, order2msgDb);
start = true;
while (msgCursor.get(indexVal, val, start ? MDB_LAST : MDB_PREV)) {
start = false;
lmdb::val eventId;
bool innerStart = true;
bool found = false;
while (cursor.get(indexVal, eventId, innerStart ? MDB_LAST : MDB_PREV)) {
innerStart = false;
json obj;
try {
obj = json::parse(std::string_view(eventId.data(), eventId.size()));
} catch (std::exception &) {
obj = {{"event_id", std::string(eventId.data(), eventId.size())}};
}
if (obj["event_id"] == std::string(val.data(), val.size())) {
found = true;
break;
}
}
if (!found)
break;
}
do {
lmdb::cursor_del(msgCursor);
} while (msgCursor.get(indexVal, val, MDB_PREV));
cursor.close();
msgCursor.close();
txn.commit();
}
mtx::responses::Notifications
Cache::getTimelineMentionsForRoom(lmdb::txn &txn, const std::string &room_id)
{
@ -2677,9 +2802,11 @@ Cache::deleteOldMessages()
for (const auto &room_id : room_ids) {
auto orderDb = getEventOrderDb(txn, room_id);
auto evToOrderDb = getEventToOrderDb(txn, room_id);
auto o2m = getOrderToMessageDb(txn, room_id);
auto m2o = getMessageToOrderDb(txn, room_id);
auto eventsDb = getEventsDb(txn, room_id);
auto relationsDb = getRelationsDb(txn, room_id);
auto cursor = lmdb::cursor::open(txn, orderDb);
uint64_t first, last;
@ -2700,14 +2827,17 @@ Cache::deleteOldMessages()
bool start = true;
while (cursor.get(indexVal, val, start ? MDB_FIRST : MDB_NEXT) &&
message_count-- < MAX_RESTORED_MESSAGES) {
message_count-- > MAX_RESTORED_MESSAGES) {
start = false;
auto obj = json::parse(std::string_view(val.data(), val.size()));
if (obj.count("event_id") != 0) {
lmdb::val event_id = obj["event_id"].get<std::string>();
lmdb::dbi_del(txn, evToOrderDb, event_id);
lmdb::dbi_del(txn, eventsDb, event_id);
lmdb::dbi_del(txn, relationsDb, event_id);
lmdb::val order{};
bool exists = lmdb::dbi_get(txn, m2o, event_id, order);
if (exists) {

View file

@ -218,6 +218,9 @@ public:
const std::string &room_id);
void removePendingStatus(const std::string &room_id, const std::string &txn_id);
//! clear timeline keeping only the latest batch
void clearTimeline(const std::string &room_id);
//! Remove old unused data.
void deleteOldMessages();
void deleteOldData() noexcept;

458
src/CallManager.cpp Normal file
View file

@ -0,0 +1,458 @@
#include <algorithm>
#include <cctype>
#include <chrono>
#include <cstdint>
#include <QMediaPlaylist>
#include <QUrl>
#include "Cache.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "Logging.h"
#include "MainWindow.h"
#include "MatrixClient.h"
#include "UserSettingsPage.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "mtx/responses/turn_server.hpp"
Q_DECLARE_METATYPE(std::vector<mtx::events::msg::CallCandidates::Candidate>)
Q_DECLARE_METATYPE(mtx::events::msg::CallCandidates::Candidate)
Q_DECLARE_METATYPE(mtx::responses::TurnServer)
using namespace mtx::events;
using namespace mtx::events::msg;
// https://github.com/vector-im/riot-web/issues/10173
#define STUN_SERVER "stun://turn.matrix.org:3478"
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer);
}
CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
: QObject()
, session_(WebRTCSession::instance())
, turnServerTimer_(this)
, settings_(userSettings)
{
qRegisterMetaType<std::vector<mtx::events::msg::CallCandidates::Candidate>>();
qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>();
connect(
&session_,
&WebRTCSession::offerCreated,
this,
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
nhlog::ui()->debug("WebRTC: call id: {} - sending offer", callid_);
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
QTimer::singleShot(timeoutms_, this, [this]() {
if (session_.state() == WebRTCSession::State::OFFERSENT) {
hangUp(CallHangUp::Reason::InviteTimeOut);
emit ChatPage::instance()->showNotification(
"The remote side failed to pick up.");
}
});
});
connect(
&session_,
&WebRTCSession::answerCreated,
this,
[this](const std::string &sdp, const std::vector<CallCandidates::Candidate> &candidates) {
nhlog::ui()->debug("WebRTC: call id: {} - sending answer", callid_);
emit newMessage(roomid_, CallAnswer{callid_, sdp, 0});
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
});
connect(&session_,
&WebRTCSession::newICECandidate,
this,
[this](const CallCandidates::Candidate &candidate) {
nhlog::ui()->debug("WebRTC: call id: {} - sending ice candidate", callid_);
emit newMessage(roomid_, CallCandidates{callid_, {candidate}, 0});
});
connect(&turnServerTimer_, &QTimer::timeout, this, &CallManager::retrieveTurnServer);
connect(this,
&CallManager::turnServerRetrieved,
this,
[this](const mtx::responses::TurnServer &res) {
nhlog::net()->info("TURN server(s) retrieved from homeserver:");
nhlog::net()->info("username: {}", res.username);
nhlog::net()->info("ttl: {} seconds", res.ttl);
for (const auto &u : res.uris)
nhlog::net()->info("uri: {}", u);
// Request new credentials close to expiry
// See https://tools.ietf.org/html/draft-uberti-behave-turn-rest-00
turnURIs_ = getTurnURIs(res);
uint32_t ttl = std::max(res.ttl, UINT32_C(3600));
if (res.ttl < 3600)
nhlog::net()->warn("Setting ttl to 1 hour");
turnServerTimer_.setInterval(ttl * 1000 * 0.9);
});
connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) {
switch (state) {
case WebRTCSession::State::DISCONNECTED:
playRingtone("qrc:/media/media/callend.ogg", false);
clear();
break;
case WebRTCSession::State::ICEFAILED: {
QString error("Call connection failed.");
if (turnURIs_.empty())
error += " Your homeserver has no configured TURN server.";
emit ChatPage::instance()->showNotification(error);
hangUp(CallHangUp::Reason::ICEFailed);
break;
}
default:
break;
}
});
connect(&player_,
&QMediaPlayer::mediaStatusChanged,
this,
[this](QMediaPlayer::MediaStatus status) {
if (status == QMediaPlayer::LoadedMedia)
player_.play();
});
}
void
CallManager::sendInvite(const QString &roomid)
{
if (onActiveCall())
return;
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
if (roomInfo.member_count != 2) {
emit ChatPage::instance()->showNotification(
"Voice calls are limited to 1:1 rooms.");
return;
}
std::string errorMessage;
if (!session_.init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return;
}
roomid_ = roomid;
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_);
generateCallID();
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
playRingtone("qrc:/media/media/ringback.ogg", true);
if (!session_.createOffer()) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
endCall();
}
}
namespace {
std::string
callHangUpReasonString(CallHangUp::Reason reason)
{
switch (reason) {
case CallHangUp::Reason::ICEFailed:
return "ICE failed";
case CallHangUp::Reason::InviteTimeOut:
return "Invite time out";
default:
return "User";
}
}
}
void
CallManager::hangUp(CallHangUp::Reason reason)
{
if (!callid_.empty()) {
nhlog::ui()->debug(
"WebRTC: call id: {} - hanging up ({})", callid_, callHangUpReasonString(reason));
emit newMessage(roomid_, CallHangUp{callid_, 0, reason});
endCall();
}
}
bool
CallManager::onActiveCall()
{
return session_.state() != WebRTCSession::State::DISCONNECTED;
}
void
CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
{
#ifdef GSTREAMER_AVAILABLE
if (handleEvent_<CallInvite>(event) || handleEvent_<CallCandidates>(event) ||
handleEvent_<CallAnswer>(event) || handleEvent_<CallHangUp>(event))
return;
#else
(void)event;
#endif
}
template<typename T>
bool
CallManager::handleEvent_(const mtx::events::collections::TimelineEvents &event)
{
if (std::holds_alternative<RoomEvent<T>>(event)) {
handleEvent(std::get<RoomEvent<T>>(event));
return true;
}
return false;
}
void
CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
{
const char video[] = "m=video";
const std::string &sdp = callInviteEvent.content.sdp;
bool isVideo = std::search(sdp.cbegin(),
sdp.cend(),
std::cbegin(video),
std::cend(video) - 1,
[](unsigned char c1, unsigned char c2) {
return std::tolower(c1) == std::tolower(c2);
}) != sdp.cend();
nhlog::ui()->debug("WebRTC: call id: {} - incoming {} CallInvite from {}",
callInviteEvent.content.call_id,
(isVideo ? "video" : "voice"),
callInviteEvent.sender);
if (callInviteEvent.content.call_id.empty())
return;
auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
if (onActiveCall() || roomInfo.member_count != 2 || isVideo) {
emit newMessage(QString::fromStdString(callInviteEvent.room_id),
CallHangUp{callInviteEvent.content.call_id,
0,
CallHangUp::Reason::InviteTimeOut});
return;
}
playRingtone("qrc:/media/media/ring.ogg", true);
roomid_ = QString::fromStdString(callInviteEvent.room_id);
callid_ = callInviteEvent.content.call_id;
remoteICECandidates_.clear();
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
const RoomMember &caller =
members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
auto dialog = new dialogs::AcceptCall(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
settings_,
MainWindow::instance());
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() {
MainWindow::instance()->hideOverlay();
answerInvite(callInviteEvent.content);
});
connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
MainWindow::instance()->hideOverlay();
hangUp();
});
MainWindow::instance()->showSolidOverlayModal(dialog);
}
void
CallManager::answerInvite(const CallInvite &invite)
{
stopRingtone();
std::string errorMessage;
if (!session_.init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp();
return;
}
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_);
if (!session_.acceptOffer(invite.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp();
return;
}
session_.acceptICECandidates(remoteICECandidates_);
remoteICECandidates_.clear();
}
void
CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
{
if (callCandidatesEvent.sender == utils::localUser().toStdString())
return;
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallCandidates from {}",
callCandidatesEvent.content.call_id,
callCandidatesEvent.sender);
if (callid_ == callCandidatesEvent.content.call_id) {
if (onActiveCall())
session_.acceptICECandidates(callCandidatesEvent.content.candidates);
else {
// CallInvite has been received and we're awaiting localUser to accept or
// reject the call
for (const auto &c : callCandidatesEvent.content.candidates)
remoteICECandidates_.push_back(c);
}
}
}
void
CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
{
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallAnswer from {}",
callAnswerEvent.content.call_id,
callAnswerEvent.sender);
if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
callid_ == callAnswerEvent.content.call_id) {
emit ChatPage::instance()->showNotification("Call answered on another device.");
stopRingtone();
MainWindow::instance()->hideOverlay();
return;
}
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
stopRingtone();
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp();
}
}
}
void
CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
{
nhlog::ui()->debug("WebRTC: call id: {} - incoming CallHangUp ({}) from {}",
callHangUpEvent.content.call_id,
callHangUpReasonString(callHangUpEvent.content.reason),
callHangUpEvent.sender);
if (callid_ == callHangUpEvent.content.call_id) {
MainWindow::instance()->hideOverlay();
endCall();
}
}
void
CallManager::generateCallID()
{
using namespace std::chrono;
uint64_t ms = duration_cast<milliseconds>(system_clock::now().time_since_epoch()).count();
callid_ = "c" + std::to_string(ms);
}
void
CallManager::clear()
{
roomid_.clear();
callid_.clear();
remoteICECandidates_.clear();
}
void
CallManager::endCall()
{
stopRingtone();
clear();
session_.end();
}
void
CallManager::refreshTurnServer()
{
turnURIs_.clear();
turnServerTimer_.start(2000);
}
void
CallManager::retrieveTurnServer()
{
http::client()->get_turn_server(
[this](const mtx::responses::TurnServer &res, mtx::http::RequestErr err) {
if (err) {
turnServerTimer_.setInterval(5000);
return;
}
emit turnServerRetrieved(res);
});
}
void
CallManager::playRingtone(const QString &ringtone, bool repeat)
{
static QMediaPlaylist playlist;
playlist.clear();
playlist.setPlaybackMode(repeat ? QMediaPlaylist::CurrentItemInLoop
: QMediaPlaylist::CurrentItemOnce);
playlist.addMedia(QUrl(ringtone));
player_.setVolume(100);
player_.setPlaylist(&playlist);
}
void
CallManager::stopRingtone()
{
player_.setPlaylist(nullptr);
}
namespace {
std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer)
{
// gstreamer expects: turn(s)://username:password@host:port?transport=udp(tcp)
// where username and password are percent-encoded
std::vector<std::string> ret;
for (const auto &uri : turnServer.uris) {
if (auto c = uri.find(':'); c == std::string::npos) {
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
continue;
} else {
std::string scheme = std::string(uri, 0, c);
if (scheme != "turn" && scheme != "turns") {
nhlog::ui()->error("Invalid TURN server uri: {}", uri);
continue;
}
QString encodedUri =
QString::fromStdString(scheme) + "://" +
QUrl::toPercentEncoding(QString::fromStdString(turnServer.username)) +
":" +
QUrl::toPercentEncoding(QString::fromStdString(turnServer.password)) +
"@" + QString::fromStdString(std::string(uri, ++c));
ret.push_back(encodedUri.toStdString());
}
}
return ret;
}
}

75
src/CallManager.h Normal file
View file

@ -0,0 +1,75 @@
#pragma once
#include <string>
#include <vector>
#include <QMediaPlayer>
#include <QObject>
#include <QSharedPointer>
#include <QString>
#include <QTimer>
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
namespace mtx::responses {
struct TurnServer;
}
class UserSettings;
class WebRTCSession;
class CallManager : public QObject
{
Q_OBJECT
public:
CallManager(QSharedPointer<UserSettings>);
void sendInvite(const QString &roomid);
void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
bool onActiveCall();
void refreshTurnServer();
public slots:
void syncEvent(const mtx::events::collections::TimelineEvents &event);
signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void turnServerRetrieved(const mtx::responses::TurnServer &);
void newCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private slots:
void retrieveTurnServer();
private:
WebRTCSession &session_;
QString roomid_;
std::string callid_;
const uint32_t timeoutms_ = 120000;
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_;
QTimer turnServerTimer_;
QSharedPointer<UserSettings> settings_;
QMediaPlayer player_;
template<typename T>
bool handleEvent_(const mtx::events::collections::TimelineEvents &event);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
void answerInvite(const mtx::events::msg::CallInvite &);
void generateCallID();
void clear();
void endCall();
void playRingtone(const QString &ringtone, bool repeat);
void stopRingtone();
};

View file

@ -22,6 +22,7 @@
#include <QShortcut>
#include <QtConcurrent>
#include "ActiveCallBar.h"
#include "AvatarProvider.h"
#include "Cache.h"
#include "Cache_p.h"
@ -41,11 +42,13 @@
#include "UserInfoWidget.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "ui/OverlayModal.h"
#include "ui/Theme.h"
#include "notifications/Manager.h"
#include "dialogs/PlaceCall.h"
#include "dialogs/ReadReceipts.h"
#include "popups/UserMentions.h"
#include "timeline/TimelineViewManager.h"
@ -69,6 +72,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
, isConnected_(true)
, userSettings_{userSettings}
, notificationsManager(this)
, callManager_(userSettings)
{
setObjectName("chatPage");
@ -124,11 +128,17 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
contentLayout_->setMargin(0);
top_bar_ = new TopRoomBar(this);
view_manager_ = new TimelineViewManager(userSettings_, this);
view_manager_ = new TimelineViewManager(userSettings_, &callManager_, this);
contentLayout_->addWidget(top_bar_);
contentLayout_->addWidget(view_manager_->getWidget());
activeCallBar_ = new ActiveCallBar(this);
contentLayout_->addWidget(activeCallBar_);
activeCallBar_->hide();
connect(
&callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
// Splitter
splitter->addWidget(sideBar_);
splitter->addWidget(content_);
@ -156,6 +166,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
trySync();
});
connect(text_input_,
&TextInputWidget::clearRoomTimeline,
view_manager_,
&TimelineViewManager::clearCurrentRoomTimeline);
connect(
new QShortcut(QKeySequence("Ctrl+Down"), this), &QShortcut::activated, this, [this]() {
if (isVisible())
@ -444,6 +459,35 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
roomid, filename, encryptedFile, url, mime, dsize);
});
connect(text_input_, &TextInputWidget::callButtonPress, this, [this]() {
if (callManager_.onActiveCall()) {
callManager_.hangUp();
} else {
if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
roomInfo.member_count != 2) {
showNotification("Voice calls are limited to 1:1 rooms.");
} else {
std::vector<RoomMember> members(
cache::getMembers(current_room_.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back()
: members.front();
auto dialog = new dialogs::PlaceCall(
callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
userSettings_,
MainWindow::instance());
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
callManager_.sendInvite(current_room_);
});
utils::centerWidget(dialog, MainWindow::instance());
dialog->show();
}
}
});
connect(room_list_, &RoomList::roomAvatarChanged, this, &ChatPage::updateTopBarAvatar);
connect(
@ -576,6 +620,11 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
connect(this, &ChatPage::dropToLoginPageCb, this, &ChatPage::dropToLoginPage);
connectCallMessage<mtx::events::msg::CallInvite>();
connectCallMessage<mtx::events::msg::CallCandidates>();
connectCallMessage<mtx::events::msg::CallAnswer>();
connectCallMessage<mtx::events::msg::CallHangUp>();
instance_ = this;
}
@ -678,6 +727,8 @@ ChatPage::bootstrap(QString userid, QString homeserver, QString token)
const bool isInitialized = cache::isInitialized();
const auto cacheVersion = cache::formatVersion();
callManager_.refreshTurnServer();
if (!isInitialized) {
cache::setCurrentFormat();
} else {
@ -1160,11 +1211,19 @@ ChatPage::leaveRoom(const QString &room_id)
void
ChatPage::inviteUser(QString userid, QString reason)
{
auto room = current_room_;
if (QMessageBox::question(this,
tr("Confirm invite"),
tr("Do you really want to invite %1 (%2)?")
.arg(cache::displayName(current_room_, userid))
.arg(userid)) != QMessageBox::Yes)
return;
http::client()->invite_user(
current_room_.toStdString(),
room.toStdString(),
userid.toStdString(),
[this, userid, room = current_room_](const mtx::responses::Empty &,
mtx::http::RequestErr err) {
[this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
if (err) {
emit showNotification(
tr("Failed to invite %1 to %2: %3")
@ -1179,11 +1238,19 @@ ChatPage::inviteUser(QString userid, QString reason)
void
ChatPage::kickUser(QString userid, QString reason)
{
auto room = current_room_;
if (QMessageBox::question(this,
tr("Confirm kick"),
tr("Do you really want to kick %1 (%2)?")
.arg(cache::displayName(current_room_, userid))
.arg(userid)) != QMessageBox::Yes)
return;
http::client()->kick_user(
current_room_.toStdString(),
room.toStdString(),
userid.toStdString(),
[this, userid, room = current_room_](const mtx::responses::Empty &,
mtx::http::RequestErr err) {
[this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
if (err) {
emit showNotification(
tr("Failed to kick %1 to %2: %3")
@ -1198,11 +1265,19 @@ ChatPage::kickUser(QString userid, QString reason)
void
ChatPage::banUser(QString userid, QString reason)
{
auto room = current_room_;
if (QMessageBox::question(this,
tr("Confirm ban"),
tr("Do you really want to ban %1 (%2)?")
.arg(cache::displayName(current_room_, userid))
.arg(userid)) != QMessageBox::Yes)
return;
http::client()->ban_user(
current_room_.toStdString(),
room.toStdString(),
userid.toStdString(),
[this, userid, room = current_room_](const mtx::responses::Empty &,
mtx::http::RequestErr err) {
[this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
if (err) {
emit showNotification(
tr("Failed to ban %1 in %2: %3")
@ -1217,11 +1292,19 @@ ChatPage::banUser(QString userid, QString reason)
void
ChatPage::unbanUser(QString userid, QString reason)
{
auto room = current_room_;
if (QMessageBox::question(this,
tr("Confirm unban"),
tr("Do you really want to unban %1 (%2)?")
.arg(cache::displayName(current_room_, userid))
.arg(userid)) != QMessageBox::Yes)
return;
http::client()->unban_user(
current_room_.toStdString(),
room.toStdString(),
userid.toStdString(),
[this, userid, room = current_room_](const mtx::responses::Empty &,
mtx::http::RequestErr err) {
[this, userid, room](const mtx::responses::Empty &, mtx::http::RequestErr err) {
if (err) {
emit showNotification(
tr("Failed to unban %1 in %2: %3")
@ -1478,3 +1561,13 @@ ChatPage::query_keys(
http::client()->query_keys(req, cb);
}
}
template<typename T>
void
ChatPage::connectCallMessage()
{
connect(&callManager_,
qOverload<const QString &, const T &>(&CallManager::newMessage),
view_manager_,
qOverload<const QString &, const T &>(&TimelineViewManager::queueCallMessage));
}

View file

@ -36,11 +36,13 @@
#include <QWidget>
#include "CacheStructs.h"
#include "CallManager.h"
#include "CommunitiesList.h"
#include "Utils.h"
#include "notifications/Manager.h"
#include "popups/UserMentions.h"
class ActiveCallBar;
class OverlayModal;
class QuickSwitcher;
class RoomList;
@ -241,6 +243,9 @@ private:
void showNotificationsDialog(const QPoint &point);
template<typename T>
void connectCallMessage();
QHBoxLayout *topLayout_;
Splitter *splitter;
@ -260,6 +265,7 @@ private:
TopRoomBar *top_bar_;
TextInputWidget *text_input_;
ActiveCallBar *activeCallBar_;
QTimer connectivityTimer_;
std::atomic_bool isConnected_;
@ -277,6 +283,7 @@ private:
QSharedPointer<UserSettings> userSettings_;
NotificationsManager notificationsManager;
CallManager callManager_;
};
template<class Collection>

View file

@ -55,7 +55,7 @@ const QRegularExpression url_regex(
// match an URL, that is not quoted, i.e.
// vvvvvv match quote via negative lookahead/lookbehind vv
// vvvv atomic match url -> fail if there is a " before or after vvv
R"((?<!")(?>((www\.(?!\.)|[a-z][a-z0-9+.-]*://)[^\s<>'"]+[^!,\.\s<>'"\]\)\:]))(?!"))");
R"((?<!["'])(?>((www\.(?!\.)|[a-z][a-z0-9+.-]*://)[^\s<>'"]+[^!,\.\s<>'"\]\)\:]))(?!["']))");
}
// Window geometry.

View file

@ -165,6 +165,7 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
}
if (this->method == DeviceVerificationFlow::Method::Emoji) {
std::cout<<info<<std::endl;
this->sasList = this->sas->generate_bytes_emoji(info);
} else if (this->method == DeviceVerificationFlow::Method::Decimal) {
this->sasList = this->sas->generate_bytes_decimal(info);
@ -235,7 +236,7 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
&ChatPage::recievedDeviceVerificationReady,
this,
[this](const mtx::events::msg::KeyVerificationReady &msg) {
if (!sender) {
if (!sender && msg.from_device != http::client()->device_id()) {
this->deleteLater();
emit verificationCanceled();
return;
@ -243,7 +244,7 @@ DeviceVerificationFlow::DeviceVerificationFlow(QObject *,
if (msg.transaction_id.has_value()) {
if (msg.transaction_id.value() != this->transaction_id)
return;
} else if (msg.relates_to.has_value()) {
} else if ((msg.relates_to.has_value() && sender)) {
if (msg.relates_to.value().event_id != this->relation.event_id)
return;
else {
@ -405,7 +406,7 @@ DeviceVerificationFlow::acceptVerificationRequest()
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationAccept);
}
}
//! responds verification request
@ -432,7 +433,7 @@ DeviceVerificationFlow::sendVerificationReady()
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationReady);
}
}
//! accepts a verification
@ -456,7 +457,7 @@ DeviceVerificationFlow::sendVerificationDone()
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationDone);
}
}
//! starts the verification flow
@ -489,7 +490,7 @@ DeviceVerificationFlow::startVerificationRequest()
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
this->canonical_json = nlohmann::json(req);
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationStart);
}
}
//! sends a verification request
@ -525,7 +526,7 @@ DeviceVerificationFlow::sendVerificationRequest()
req.body = "User is requesting to verify keys with you. However, your client does "
"not support this method, so you will need to use the legacy method of "
"key verification.";
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationRequest);
}
}
//! cancels a verification flow
@ -573,7 +574,7 @@ DeviceVerificationFlow::cancelVerification(DeviceVerificationFlow::Error error_c
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationCancel);
this->deleteLater();
}
@ -612,7 +613,7 @@ DeviceVerificationFlow::sendVerificationKey()
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationKey);
}
}
//! sends the mac of the keys
@ -659,7 +660,7 @@ DeviceVerificationFlow::sendVerificationMac()
});
} else if (this->type == DeviceVerificationFlow::Type::RoomMsg && model_) {
req.relates_to = this->relation;
(model_)->sendMessage(req);
(model_)->sendMessageEvent(req, mtx::events::EventType::KeyVerificationMac);
}
}
//! Completes the verification flow

View file

@ -1,4 +1,4 @@
#pragma once
ith#pragma once
#include "Olm.h"

View file

@ -1,5 +1,7 @@
#include "EventAccessors.h"
#include <algorithm>
#include <cctype>
#include <type_traits>
namespace {
@ -72,6 +74,29 @@ struct EventRoomTopic
}
};
struct CallType
{
template<class T>
std::string operator()(const T &e)
{
if constexpr (std::is_same_v<mtx::events::RoomEvent<mtx::events::msg::CallInvite>,
T>) {
const char video[] = "m=video";
const std::string &sdp = e.content.sdp;
return std::search(sdp.cbegin(),
sdp.cend(),
std::cbegin(video),
std::cend(video) - 1,
[](unsigned char c1, unsigned char c2) {
return std::tolower(c1) == std::tolower(c2);
}) != sdp.cend()
? "video"
: "voice";
}
return std::string();
}
};
struct EventBody
{
template<class C>
@ -353,6 +378,12 @@ mtx::accessors::room_topic(const mtx::events::collections::TimelineEvents &event
return std::visit(EventRoomTopic{}, event);
}
std::string
mtx::accessors::call_type(const mtx::events::collections::TimelineEvents &event)
{
return std::visit(CallType{}, event);
}
std::string
mtx::accessors::body(const mtx::events::collections::TimelineEvents &event)
{

View file

@ -30,6 +30,9 @@ room_name(const mtx::events::collections::TimelineEvents &event);
std::string
room_topic(const mtx::events::collections::TimelineEvents &event);
std::string
call_type(const mtx::events::collections::TimelineEvents &event);
std::string
body(const mtx::events::collections::TimelineEvents &event);

View file

@ -17,6 +17,7 @@
#include <QApplication>
#include <QLayout>
#include <QMessageBox>
#include <QPluginLoader>
#include <QSettings>
#include <QShortcut>
@ -35,6 +36,7 @@
#include "TrayIcon.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "WelcomePage.h"
#include "ui/LoadingIndicator.h"
#include "ui/OverlayModal.h"
@ -285,6 +287,14 @@ MainWindow::showChatPage()
void
MainWindow::closeEvent(QCloseEvent *event)
{
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") !=
QMessageBox::Yes) {
event->ignore();
return;
}
}
if (!qApp->isSavingSession() && isVisible() && pageSupportsTray() &&
userSettings_->tray()) {
event->ignore();
@ -424,8 +434,17 @@ void
MainWindow::openLogoutDialog()
{
auto dialog = new dialogs::Logout(this);
connect(
dialog, &dialogs::Logout::loggingOut, this, [this]() { chat_page_->initiateLogout(); });
connect(dialog, &dialogs::Logout::loggingOut, this, [this]() {
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
if (QMessageBox::question(
this, "nheko", "A call is in progress. Log out?") !=
QMessageBox::Yes) {
return;
}
WebRTCSession::instance().end();
}
chat_page_->initiateLogout();
});
showDialog(dialog);
}

View file

@ -4,6 +4,7 @@
#include "Olm.h"
#include "Cache.h"
#include "Cache_p.h"
#include "ChatPage.h"
#include "DeviceVerificationFlow.h"
#include "Logging.h"
@ -365,22 +366,25 @@ send_key_request_for(const std::string &room_id,
using namespace mtx::events;
nhlog::crypto()->debug("sending key request: {}", json(e).dump(2));
auto payload = json{{"action", "request"},
{"request_id", http::client()->generate_txn_id()},
{"requesting_device_id", http::client()->device_id()},
{"body",
{{"algorithm", MEGOLM_ALGO},
{"room_id", room_id},
{"sender_key", e.content.sender_key},
{"session_id", e.content.session_id}}}};
json body;
body["messages"][e.sender] = json::object();
body["messages"][e.sender][e.content.device_id] = payload;
mtx::events::msg::KeyRequest request;
request.action = mtx::events::msg::RequestAction::Request;
request.algorithm = MEGOLM_ALGO;
request.room_id = room_id;
request.sender_key = e.content.sender_key;
request.session_id = e.content.session_id;
request.request_id = "key_request." + http::client()->generate_txn_id();
request.requesting_device_id = http::client()->device_id();
nhlog::crypto()->debug("m.room_key_request: {}", body.dump(2));
nhlog::crypto()->debug("m.room_key_request: {}", json(request).dump(2));
http::client()->send_to_device("m.room_key_request", body, [e](mtx::http::RequestErr err) {
std::map<mtx::identifiers::User, std::map<std::string, decltype(request)>> body;
body[mtx::identifiers::parse<mtx::identifiers::User>(e.sender)][e.content.device_id] =
request;
body[http::client()->user_id()]["*"] = request;
http::client()->send_to_device(
http::client()->generate_txn_id(), body, [e](mtx::http::RequestErr err) {
if (err) {
nhlog::net()->warn("failed to send "
"send_to_device "
@ -388,8 +392,9 @@ send_key_request_for(const std::string &room_id,
err->matrix_error.error);
}
nhlog::net()->info(
"m.room_key_request sent to {}:{}", e.sender, e.content.device_id);
nhlog::net()->info("m.room_key_request sent to {}:{} and your own devices",
e.sender,
e.content.device_id);
});
}
@ -610,4 +615,50 @@ send_megolm_key_to_device(const std::string &user_id,
});
}
DecryptionResult
decryptEvent(const MegolmSessionIndex &index,
const mtx::events::EncryptedEvent<mtx::events::msg::Encrypted> &event)
{
try {
if (!cache::client()->inboundMegolmSessionExists(index)) {
return {DecryptionErrorCode::MissingSession, std::nullopt, std::nullopt};
}
} catch (const lmdb::error &e) {
return {DecryptionErrorCode::DbError, e.what(), std::nullopt};
}
// TODO: Lookup index,event_id,origin_server_ts tuple for replay attack errors
// TODO: Verify sender_key
std::string msg_str;
try {
auto session = cache::client()->getInboundMegolmSession(index);
auto res = olm::client()->decrypt_group_message(session, event.content.ciphertext);
msg_str = std::string((char *)res.data.data(), res.data.size());
} catch (const lmdb::error &e) {
return {DecryptionErrorCode::DbError, e.what(), std::nullopt};
} catch (const mtx::crypto::olm_exception &e) {
return {DecryptionErrorCode::DecryptionFailed, e.what(), std::nullopt};
}
// Add missing fields for the event.
json body = json::parse(msg_str);
body["event_id"] = event.event_id;
body["sender"] = event.sender;
body["origin_server_ts"] = event.origin_server_ts;
body["unsigned"] = event.unsigned_data;
// relations are unencrypted in content...
if (json old_ev = event; old_ev["content"].count("m.relates_to") != 0)
body["content"]["m.relates_to"] = old_ev["content"]["m.relates_to"];
mtx::events::collections::TimelineEvent te;
try {
mtx::events::collections::from_json(body, te);
} catch (std::exception &e) {
return {DecryptionErrorCode::ParsingFailed, e.what(), std::nullopt};
}
return {std::nullopt, std::nullopt, std::move(te.data)};
}
} // namespace olm

View file

@ -7,10 +7,30 @@
#include <mtx/events/encrypted.hpp>
#include <mtxclient/crypto/client.hpp>
#include <CacheCryptoStructs.h>
constexpr auto OLM_ALGO = "m.olm.v1.curve25519-aes-sha2";
namespace olm {
enum class DecryptionErrorCode
{
MissingSession, // Session was not found, retrieve from backup or request from other devices
// and try again
DbError, // DB read failed
DecryptionFailed, // libolm error
ParsingFailed, // Failed to parse the actual event
ReplayAttack, // Megolm index reused
UnknownFingerprint, // Unknown device Fingerprint
};
struct DecryptionResult
{
std::optional<DecryptionErrorCode> error;
std::optional<std::string> error_message;
std::optional<mtx::events::collections::TimelineEvents> event;
};
struct OlmMessage
{
std::string sender_key;
@ -65,6 +85,10 @@ encrypt_group_message(const std::string &room_id,
const std::string &device_id,
nlohmann::json body);
DecryptionResult
decryptEvent(const MegolmSessionIndex &index,
const mtx::events::EncryptedEvent<mtx::events::msg::Encrypted> &event);
void
mark_keys_as_published();

View file

@ -453,6 +453,15 @@ TextInputWidget::TextInputWidget(QWidget *parent)
topLayout_->setSpacing(0);
topLayout_->setContentsMargins(13, 1, 13, 0);
#ifdef GSTREAMER_AVAILABLE
callBtn_ = new FlatButton(this);
changeCallButtonState(WebRTCSession::State::DISCONNECTED);
connect(&WebRTCSession::instance(),
&WebRTCSession::stateChanged,
this,
&TextInputWidget::changeCallButtonState);
#endif
QIcon send_file_icon;
send_file_icon.addFile(":/icons/icons/ui/paper-clip-outline.png");
@ -521,6 +530,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
emojiBtn_->setIcon(emoji_icon);
emojiBtn_->setIconSize(QSize(ButtonHeight, ButtonHeight));
#ifdef GSTREAMER_AVAILABLE
topLayout_->addWidget(callBtn_);
#endif
topLayout_->addWidget(sendFileBtn_);
topLayout_->addWidget(input_);
topLayout_->addWidget(emojiBtn_);
@ -528,6 +540,9 @@ TextInputWidget::TextInputWidget(QWidget *parent)
setLayout(topLayout_);
#ifdef GSTREAMER_AVAILABLE
connect(callBtn_, &FlatButton::clicked, this, &TextInputWidget::callButtonPress);
#endif
connect(sendMessageBtn_, &FlatButton::clicked, input_, &FilteredTextEdit::submit);
connect(sendFileBtn_, SIGNAL(clicked()), this, SLOT(openFileSelection()));
connect(input_, &FilteredTextEdit::message, this, &TextInputWidget::sendTextMessage);
@ -566,27 +581,29 @@ void
TextInputWidget::command(QString command, QString args)
{
if (command == "me") {
sendEmoteMessage(args);
emit sendEmoteMessage(args);
} else if (command == "join") {
sendJoinRoomRequest(args);
emit sendJoinRoomRequest(args);
} else if (command == "invite") {
sendInviteRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
emit sendInviteRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
} else if (command == "kick") {
sendKickRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
emit sendKickRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
} else if (command == "ban") {
sendBanRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
emit sendBanRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
} else if (command == "unban") {
sendUnbanRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
emit sendUnbanRoomRequest(args.section(' ', 0, 0), args.section(' ', 1, -1));
} else if (command == "roomnick") {
changeRoomNick(args);
emit changeRoomNick(args);
} else if (command == "shrug") {
sendTextMessage("¯\\_(ツ)_/¯");
emit sendTextMessage("¯\\_(ツ)_/¯");
} else if (command == "fliptable") {
sendTextMessage("(╯°□°)╯︵ ┻━┻");
emit sendTextMessage("(╯°□°) ");
} else if (command == "unfliptable") {
sendTextMessage(" ┯━┯╭( º _ º╭)");
emit sendTextMessage(" ┯━┯╭( º _ º╭)");
} else if (command == "sovietflip") {
sendTextMessage("ノ┬─┬ノ ︵ ( \\o°o)\\");
emit sendTextMessage("ノ┬─┬ノ ︵ ( \\o°o)\\");
} else if (command == "clear-timeline") {
emit clearRoomTimeline();
}
}
@ -618,7 +635,7 @@ TextInputWidget::showUploadSpinner()
topLayout_->removeWidget(sendFileBtn_);
sendFileBtn_->hide();
topLayout_->insertWidget(0, spinner_);
topLayout_->insertWidget(1, spinner_);
spinner_->start();
}
@ -626,7 +643,7 @@ void
TextInputWidget::hideUploadSpinner()
{
topLayout_->removeWidget(spinner_);
topLayout_->insertWidget(0, sendFileBtn_);
topLayout_->insertWidget(1, sendFileBtn_);
sendFileBtn_->show();
spinner_->stop();
}
@ -652,3 +669,19 @@ TextInputWidget::paintEvent(QPaintEvent *)
style()->drawPrimitive(QStyle::PE_Widget, &opt, &p, this);
}
void
TextInputWidget::changeCallButtonState(WebRTCSession::State state)
{
QIcon icon;
if (state == WebRTCSession::State::ICEFAILED ||
state == WebRTCSession::State::DISCONNECTED) {
callBtn_->setToolTip(tr("Place a call"));
icon.addFile(":/icons/icons/ui/place-call.png");
} else {
callBtn_->setToolTip(tr("Hang up"));
icon.addFile(":/icons/icons/ui/end-call.png");
}
callBtn_->setIcon(icon);
callBtn_->setIconSize(QSize(ButtonHeight * 1.1, ButtonHeight * 1.1));
}

View file

@ -26,6 +26,7 @@
#include <QTextEdit>
#include <QWidget>
#include "WebRTCSession.h"
#include "dialogs/PreviewUploadOverlay.h"
#include "emoji/PickButton.h"
#include "popups/SuggestionsPopup.h"
@ -149,6 +150,7 @@ public slots:
void openFileSelection();
void hideUploadSpinner();
void focusLineEdit() { input_->setFocus(); }
void changeCallButtonState(WebRTCSession::State);
private slots:
void addSelectedEmoji(const QString &emoji);
@ -156,11 +158,13 @@ private slots:
signals:
void sendTextMessage(const QString &msg);
void sendEmoteMessage(QString msg);
void clearRoomTimeline();
void heightChanged(int height);
void uploadMedia(const QSharedPointer<QIODevice> data,
QString mimeClass,
const QString &filename);
void callButtonPress();
void sendJoinRoomRequest(const QString &room);
void sendInviteRoomRequest(const QString &userid, const QString &reason);
@ -185,6 +189,7 @@ private:
LoadingIndicator *spinner_;
FlatButton *callBtn_;
FlatButton *sendFileBtn_;
FlatButton *sendMessageBtn_;
emoji::PickButton *emojiBtn_;

View file

@ -77,6 +77,8 @@ UserSettings::load()
presence_ =
settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
.value<Presence>();
useStunServer_ = settings.value("user/use_stun_server", false).toBool();
defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString();
applyTheme();
}
@ -279,6 +281,26 @@ UserSettings::setTheme(QString theme)
emit themeChanged(theme);
}
void
UserSettings::setUseStunServer(bool useStunServer)
{
if (useStunServer == useStunServer_)
return;
useStunServer_ = useStunServer;
emit useStunServerChanged(useStunServer);
save();
}
void
UserSettings::setDefaultAudioSource(const QString &defaultAudioSource)
{
if (defaultAudioSource == defaultAudioSource_)
return;
defaultAudioSource_ = defaultAudioSource;
emit defaultAudioSourceChanged(defaultAudioSource);
save();
}
void
UserSettings::applyTheme()
{
@ -364,6 +386,8 @@ UserSettings::save()
settings.setValue("font_family", font_);
settings.setValue("emoji_font_family", emojiFont_);
settings.setValue("presence", QVariant::fromValue(presence_));
settings.setValue("use_stun_server", useStunServer_);
settings.setValue("default_audio_source", defaultAudioSource_);
settings.endGroup();
@ -429,6 +453,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
markdown_ = new Toggle{this};
desktopNotifications_ = new Toggle{this};
alertOnNotification_ = new Toggle{this};
useStunServer_ = new Toggle{this};
scaleFactorCombo_ = new QComboBox{this};
fontSizeCombo_ = new QComboBox{this};
fontSelectionCombo_ = new QComboBox{this};
@ -482,6 +507,15 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
timelineMaxWidthSpin_->setMaximum(100'000'000);
timelineMaxWidthSpin_->setSingleStep(10);
auto callsLabel = new QLabel{tr("CALLS"), this};
callsLabel->setFixedHeight(callsLabel->minimumHeight() + LayoutTopMargin);
callsLabel->setAlignment(Qt::AlignBottom);
callsLabel->setFont(font);
useStunServer_ = new Toggle{this};
defaultAudioSourceValue_ = new QLabel(this);
defaultAudioSourceValue_->setFont(font);
auto encryptionLabel_ = new QLabel{tr("ENCRYPTION"), this};
encryptionLabel_->setFixedHeight(encryptionLabel_->minimumHeight() + LayoutTopMargin);
encryptionLabel_->setAlignment(Qt::AlignBottom);
@ -612,6 +646,14 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
#endif
boxWrap(tr("Theme"), themeCombo_);
formLayout_->addRow(callsLabel);
formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Allow fallback call assist server"),
useStunServer_,
tr("Will use turn.matrix.org as assist when your home server does not offer one."));
boxWrap(tr("Default audio source device"), defaultAudioSourceValue_);
formLayout_->addRow(encryptionLabel_);
formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Device ID"), deviceIdValue_);
@ -724,6 +766,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
settings_->setEnlargeEmojiOnlyMessages(!disabled);
});
connect(useStunServer_, &Toggle::toggled, this, [this](bool disabled) {
settings_->setUseStunServer(!disabled);
});
connect(timelineMaxWidthSpin_,
qOverload<int>(&QSpinBox::valueChanged),
this,
@ -766,6 +812,8 @@ UserSettingsPage::showEvent(QShowEvent *)
enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages());
deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
useStunServer_->setState(!settings_->useStunServer());
defaultAudioSourceValue_->setText(settings_->defaultAudioSource());
deviceFingerprintValue_->setText(
utils::humanReadableFingerprint(olm::client()->identity_keys().ed25519));

View file

@ -71,6 +71,10 @@ class UserSettings : public QObject
Q_PROPERTY(
QString emojiFont READ emojiFont WRITE setEmojiFontFamily NOTIFY emojiFontChanged)
Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
Q_PROPERTY(
bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource
NOTIFY defaultAudioSourceChanged)
public:
UserSettings();
@ -107,6 +111,8 @@ public:
void setAvatarCircles(bool state);
void setDecryptSidebar(bool state);
void setPresence(Presence state);
void setUseStunServer(bool state);
void setDefaultAudioSource(const QString &deviceName);
QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
bool messageHoverHighlight() const { return messageHoverHighlight_; }
@ -132,6 +138,8 @@ public:
QString font() const { return font_; }
QString emojiFont() const { return emojiFont_; }
Presence presence() const { return presence_; }
bool useStunServer() const { return useStunServer_; }
QString defaultAudioSource() const { return defaultAudioSource_; }
signals:
void groupViewStateChanged(bool state);
@ -154,6 +162,8 @@ signals:
void fontChanged(QString state);
void emojiFontChanged(QString state);
void presenceChanged(Presence state);
void useStunServerChanged(bool state);
void defaultAudioSourceChanged(const QString &deviceName);
private:
// Default to system theme if QT_QPA_PLATFORMTHEME var is set.
@ -181,6 +191,8 @@ private:
QString font_;
QString emojiFont_;
Presence presence_;
bool useStunServer_;
QString defaultAudioSource_;
};
class HorizontalLine : public QFrame
@ -234,9 +246,11 @@ private:
Toggle *desktopNotifications_;
Toggle *alertOnNotification_;
Toggle *avatarCircles_;
Toggle *useStunServer_;
Toggle *decryptSidebar_;
QLabel *deviceFingerprintValue_;
QLabel *deviceIdValue_;
QLabel *defaultAudioSourceValue_;
QComboBox *themeCombo_;
QComboBox *scaleFactorCombo_;

View file

@ -35,11 +35,10 @@ createDescriptionInfo(const Event &event, const QString &localUser, const QStrin
const auto username = cache::displayName(room_id, sender);
const auto ts = QDateTime::fromMSecsSinceEpoch(msg.origin_server_ts);
return DescInfo{
QString::fromStdString(msg.event_id),
return DescInfo{QString::fromStdString(msg.event_id),
sender,
utils::messageDescription<T>(
username, QString::fromStdString(msg.content.body).trimmed(), sender == localUser),
username, utils::event_body(event).trimmed(), sender == localUser),
utils::descriptiveTime(ts),
msg.origin_server_ts,
ts};
@ -163,6 +162,9 @@ utils::getMessageDescription(const TimelineEvent &event,
using Notice = mtx::events::RoomEvent<mtx::events::msg::Notice>;
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
if (std::holds_alternative<Audio>(event)) {
@ -179,6 +181,12 @@ utils::getMessageDescription(const TimelineEvent &event,
return createDescriptionInfo<Text>(event, localUser, room_id);
} else if (std::holds_alternative<Video>(event)) {
return createDescriptionInfo<Video>(event, localUser, room_id);
} else if (std::holds_alternative<CallInvite>(event)) {
return createDescriptionInfo<CallInvite>(event, localUser, room_id);
} else if (std::holds_alternative<CallAnswer>(event)) {
return createDescriptionInfo<CallAnswer>(event, localUser, room_id);
} else if (std::holds_alternative<CallHangUp>(event)) {
return createDescriptionInfo<CallHangUp>(event, localUser, room_id);
} else if (std::holds_alternative<mtx::events::Sticker>(event)) {
return createDescriptionInfo<mtx::events::Sticker>(event, localUser, room_id);
} else if (auto msg = std::get_if<Encrypted>(&event); msg != nullptr) {

View file

@ -96,6 +96,9 @@ messageDescription(const QString &username = "",
using Sticker = mtx::events::Sticker;
using Text = mtx::events::RoomEvent<mtx::events::msg::Text>;
using Video = mtx::events::RoomEvent<mtx::events::msg::Video>;
using CallInvite = mtx::events::RoomEvent<mtx::events::msg::CallInvite>;
using CallAnswer = mtx::events::RoomEvent<mtx::events::msg::CallAnswer>;
using CallHangUp = mtx::events::RoomEvent<mtx::events::msg::CallHangUp>;
using Encrypted = mtx::events::EncryptedEvent<mtx::events::msg::Encrypted>;
if (std::is_same<T, Audio>::value) {
@ -164,6 +167,30 @@ messageDescription(const QString &username = "",
return QCoreApplication::translate("message-description sent:",
"%1 sent an encrypted message")
.arg(username);
} else if (std::is_same<T, CallInvite>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You placed a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 placed a call")
.arg(username);
} else if (std::is_same<T, CallAnswer>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You answered a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 answered a call")
.arg(username);
} else if (std::is_same<T, CallHangUp>::value) {
if (isLocal)
return QCoreApplication::translate("message-description sent:",
"You ended a call");
else
return QCoreApplication::translate("message-description sent:",
"%1 ended a call")
.arg(username);
} else {
return QCoreApplication::translate("utils", "Unknown Message Type");
}

697
src/WebRTCSession.cpp Normal file
View file

@ -0,0 +1,697 @@
#include <cctype>
#include "Logging.h"
#include "WebRTCSession.h"
#ifdef GSTREAMER_AVAILABLE
extern "C"
{
#include "gst/gst.h"
#include "gst/sdp/sdp.h"
#define GST_USE_UNSTABLE_API
#include "gst/webrtc/webrtc.h"
}
#endif
Q_DECLARE_METATYPE(WebRTCSession::State)
WebRTCSession::WebRTCSession()
: QObject()
{
qRegisterMetaType<WebRTCSession::State>();
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
}
bool
WebRTCSession::init(std::string *errorMessage)
{
#ifdef GSTREAMER_AVAILABLE
if (initialised_)
return true;
GError *error = nullptr;
if (!gst_init_check(nullptr, nullptr, &error)) {
std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
if (error) {
strError += error->message;
g_error_free(error);
}
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
return false;
}
gchar *version = gst_version_string();
std::string gstVersion(version);
g_free(version);
nhlog::ui()->info("WebRTC: initialised " + gstVersion);
// GStreamer Plugins:
// Base: audioconvert, audioresample, opus, playback, volume
// Good: autodetect, rtpmanager
// Bad: dtls, srtp, webrtc
// libnice [GLib]: nice
initialised_ = true;
std::string strError = gstVersion + ": Missing plugins: ";
const gchar *needed[] = {"audioconvert",
"audioresample",
"autodetect",
"dtls",
"nice",
"opus",
"playback",
"rtpmanager",
"srtp",
"volume",
"webrtc",
nullptr};
GstRegistry *registry = gst_registry_get();
for (guint i = 0; i < g_strv_length((gchar **)needed); i++) {
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
if (!plugin) {
strError += std::string(needed[i]) + " ";
initialised_ = false;
continue;
}
gst_object_unref(plugin);
}
if (!initialised_) {
nhlog::ui()->error(strError);
if (errorMessage)
*errorMessage = strError;
}
return initialised_;
#else
(void)errorMessage;
return false;
#endif
}
#ifdef GSTREAMER_AVAILABLE
namespace {
bool isoffering_;
std::string localsdp_;
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
{
WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
nhlog::ui()->error("WebRTC: end of stream");
session->end();
break;
case GST_MESSAGE_ERROR:
GError *error;
gchar *debug;
gst_message_parse_error(msg, &error, &debug);
nhlog::ui()->error(
"WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
g_clear_error(&error);
g_free(debug);
session->end();
break;
default:
break;
}
return TRUE;
}
GstWebRTCSessionDescription *
parseSDP(const std::string &sdp, GstWebRTCSDPType type)
{
GstSDPMessage *msg;
gst_sdp_message_new(&msg);
if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
return gst_webrtc_session_description_new(type, msg);
} else {
nhlog::ui()->error("WebRTC: failed to parse remote session description");
gst_object_unref(msg);
return nullptr;
}
}
void
setLocalDescription(GstPromise *promise, gpointer webrtc)
{
const GstStructure *reply = gst_promise_get_reply(promise);
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
GstWebRTCSessionDescription *gstsdp = nullptr;
gst_structure_get(reply,
isAnswer ? "answer" : "offer",
GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
&gstsdp,
nullptr);
gst_promise_unref(promise);
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
localsdp_ = std::string(sdp);
g_free(sdp);
gst_webrtc_session_description_free(gstsdp);
nhlog::ui()->debug(
"WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
}
void
createOffer(GstElement *webrtc)
{
// create-offer first, then set-local-description
GstPromise *promise =
gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
}
void
createAnswer(GstPromise *promise, gpointer webrtc)
{
// create-answer first, then set-local-description
gst_promise_unref(promise);
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
}
#if GST_CHECK_VERSION(1, 17, 0)
void
iceGatheringStateChanged(GstElement *webrtc,
GParamSpec *pspec G_GNUC_UNUSED,
gpointer user_data G_GNUC_UNUSED)
{
GstWebRTCICEGatheringState newState;
g_object_get(webrtc, "ice-gathering-state", &newState, nullptr);
if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete");
if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::OFFERSENT);
} else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::ANSWERSENT);
}
}
}
#else
gboolean
onICEGatheringCompletion(gpointer timerid)
{
*(guint *)(timerid) = 0;
if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
} else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
}
return FALSE;
}
#endif
void
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
guint mlineIndex,
gchar *candidate,
gpointer G_GNUC_UNUSED)
{
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
#if GST_CHECK_VERSION(1, 17, 0)
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
return;
#else
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
emit WebRTCSession::instance().newICECandidate(
{"audio", (uint16_t)mlineIndex, candidate});
return;
}
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.17.
// Use a 100ms timeout in the meantime
static guint timerid = 0;
if (timerid)
g_source_remove(timerid);
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
#endif
}
void
iceConnectionStateChanged(GstElement *webrtc,
GParamSpec *pspec G_GNUC_UNUSED,
gpointer user_data G_GNUC_UNUSED)
{
GstWebRTCICEConnectionState newState;
g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
switch (newState) {
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
break;
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
break;
default:
break;
}
}
void
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
{
GstCaps *caps = gst_pad_get_current_caps(newpad);
if (!caps)
return;
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
gst_caps_unref(caps);
GstPad *queuepad = nullptr;
if (g_str_has_prefix(name, "audio")) {
nhlog::ui()->debug("WebRTC: received incoming audio stream");
GstElement *queue = gst_element_factory_make("queue", nullptr);
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
gst_element_link_many(queue, convert, resample, sink, nullptr);
gst_element_sync_state_with_parent(queue);
gst_element_sync_state_with_parent(convert);
gst_element_sync_state_with_parent(resample);
gst_element_sync_state_with_parent(sink);
queuepad = gst_element_get_static_pad(queue, "sink");
}
if (queuepad) {
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
nhlog::ui()->error("WebRTC: unable to link new pad");
else {
emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::CONNECTED);
}
gst_object_unref(queuepad);
}
}
void
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
{
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
return;
nhlog::ui()->debug("WebRTC: received incoming stream");
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
gst_bin_add(GST_BIN(pipe), decodebin);
gst_element_sync_state_with_parent(decodebin);
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
nhlog::ui()->error("WebRTC: unable to link new pad");
gst_object_unref(sinkpad);
}
std::string::const_iterator
findName(const std::string &sdp, const std::string &name)
{
return std::search(
sdp.cbegin(),
sdp.cend(),
name.cbegin(),
name.cend(),
[](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); });
}
int
getPayloadType(const std::string &sdp, const std::string &name)
{
// eg a=rtpmap:111 opus/48000/2
auto e = findName(sdp, name);
if (e == sdp.cend()) {
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
return -1;
}
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
" payload type");
return -1;
} else {
++s;
try {
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
} catch (...) {
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
" payload type");
}
}
return -1;
}
}
bool
WebRTCSession::createOffer()
{
isoffering_ = true;
localsdp_.clear();
localcandidates_.clear();
return startPipeline(111); // a dynamic opus payload type
}
bool
WebRTCSession::acceptOffer(const std::string &sdp)
{
nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
if (state_ != State::DISCONNECTED)
return false;
isoffering_ = false;
localsdp_.clear();
localcandidates_.clear();
int opusPayloadType = getPayloadType(sdp, "opus");
if (opusPayloadType == -1)
return false;
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
if (!offer)
return false;
if (!startPipeline(opusPayloadType)) {
gst_webrtc_session_description_free(offer);
return false;
}
// set-remote-description first, then create-answer
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
gst_webrtc_session_description_free(offer);
return true;
}
bool
WebRTCSession::acceptAnswer(const std::string &sdp)
{
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
if (state_ != State::OFFERSENT)
return false;
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
if (!answer) {
end();
return false;
}
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
gst_webrtc_session_description_free(answer);
return true;
}
void
WebRTCSession::acceptICECandidates(
const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
{
if (state_ >= State::INITIATED) {
for (const auto &c : candidates) {
nhlog::ui()->debug(
"WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
g_signal_emit_by_name(
webrtc_, "add-ice-candidate", c.sdpMLineIndex, c.candidate.c_str());
}
}
}
bool
WebRTCSession::startPipeline(int opusPayloadType)
{
if (state_ != State::DISCONNECTED)
return false;
emit stateChanged(State::INITIATING);
if (!createPipeline(opusPayloadType))
return false;
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
if (!stunServer_.empty()) {
nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
}
for (const auto &uri : turnServers_) {
nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
gboolean udata;
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
}
if (turnServers_.empty())
nhlog::ui()->warn("WebRTC: no TURN server provided");
// generate the offer when the pipeline goes to PLAYING
if (isoffering_)
g_signal_connect(
webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr);
// on-ice-candidate is emitted when a local ICE candidate has been gathered
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
// capture ICE failure
g_signal_connect(
webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr);
// incoming streams trigger pad-added
gst_element_set_state(pipe_, GST_STATE_READY);
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
#if GST_CHECK_VERSION(1, 17, 0)
// capture ICE gathering completion
g_signal_connect(
webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr);
#endif
// webrtcbin lifetime is the same as that of the pipeline
gst_object_unref(webrtc_);
// start the pipeline
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
nhlog::ui()->error("WebRTC: unable to start pipeline");
end();
return false;
}
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
gst_bus_add_watch(bus, newBusMessage, this);
gst_object_unref(bus);
emit stateChanged(State::INITIATED);
return true;
}
bool
WebRTCSession::createPipeline(int opusPayloadType)
{
int nSources = audioSources_ ? g_list_length(audioSources_) : 0;
if (nSources == 0) {
nhlog::ui()->error("WebRTC: no audio sources");
return false;
}
if (audioSourceIndex_ < 0 || audioSourceIndex_ >= nSources) {
nhlog::ui()->error("WebRTC: invalid audio source index");
return false;
}
GstElement *source = gst_device_create_element(
GST_DEVICE_CAST(g_list_nth_data(audioSources_, audioSourceIndex_)), nullptr);
GstElement *volume = gst_element_factory_make("volume", "srclevel");
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
GstElement *queue1 = gst_element_factory_make("queue", nullptr);
GstElement *opusenc = gst_element_factory_make("opusenc", nullptr);
GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr);
GstElement *queue2 = gst_element_factory_make("queue", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
"media",
G_TYPE_STRING,
"audio",
"encoding-name",
G_TYPE_STRING,
"OPUS",
"payload",
G_TYPE_INT,
opusPayloadType,
nullptr);
g_object_set(capsfilter, "caps", rtpcaps, nullptr);
gst_caps_unref(rtpcaps);
GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin");
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
pipe_ = gst_pipeline_new(nullptr);
gst_bin_add_many(GST_BIN(pipe_),
source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr);
if (!gst_element_link_many(source,
volume,
convert,
resample,
queue1,
opusenc,
rtp,
queue2,
capsfilter,
webrtcbin,
nullptr)) {
nhlog::ui()->error("WebRTC: failed to link pipeline elements");
end();
return false;
}
return true;
}
bool
WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
{
if (state_ < State::INITIATED)
return false;
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
if (!srclevel)
return false;
gboolean muted;
g_object_get(srclevel, "mute", &muted, nullptr);
g_object_set(srclevel, "mute", !muted, nullptr);
gst_object_unref(srclevel);
isMuted = !muted;
return true;
}
void
WebRTCSession::end()
{
nhlog::ui()->debug("WebRTC: ending session");
if (pipe_) {
gst_element_set_state(pipe_, GST_STATE_NULL);
gst_object_unref(pipe_);
pipe_ = nullptr;
}
webrtc_ = nullptr;
if (state_ != State::DISCONNECTED)
emit stateChanged(State::DISCONNECTED);
}
void
WebRTCSession::refreshDevices()
{
if (!initialised_)
return;
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
}
g_list_free_full(audioSources_, g_object_unref);
audioSources_ = gst_device_monitor_get_devices(monitor);
}
std::vector<std::string>
WebRTCSession::getAudioSourceNames(const std::string &defaultDevice)
{
if (!initialised_)
return {};
refreshDevices();
std::vector<std::string> ret;
ret.reserve(g_list_length(audioSources_));
for (GList *l = audioSources_; l != nullptr; l = l->next) {
gchar *name = gst_device_get_display_name(GST_DEVICE_CAST(l->data));
ret.emplace_back(name);
g_free(name);
if (ret.back() == defaultDevice) {
// move default device to top of the list
std::swap(audioSources_->data, l->data);
std::swap(ret.front(), ret.back());
}
}
return ret;
}
#else
bool
WebRTCSession::createOffer()
{
return false;
}
bool
WebRTCSession::acceptOffer(const std::string &)
{
return false;
}
bool
WebRTCSession::acceptAnswer(const std::string &)
{
return false;
}
void
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &)
{}
bool
WebRTCSession::startPipeline(int)
{
return false;
}
bool
WebRTCSession::createPipeline(int)
{
return false;
}
bool
WebRTCSession::toggleMuteAudioSrc(bool &)
{
return false;
}
void
WebRTCSession::end()
{}
void
WebRTCSession::refreshDevices()
{}
std::vector<std::string>
WebRTCSession::getAudioSourceNames(const std::string &)
{
return {};
}
#endif

83
src/WebRTCSession.h Normal file
View file

@ -0,0 +1,83 @@
#pragma once
#include <string>
#include <vector>
#include <QObject>
#include "mtx/events/voip.hpp"
typedef struct _GList GList;
typedef struct _GstElement GstElement;
class WebRTCSession : public QObject
{
Q_OBJECT
public:
enum class State
{
DISCONNECTED,
ICEFAILED,
INITIATING,
INITIATED,
OFFERSENT,
ANSWERSENT,
CONNECTING,
CONNECTED
};
static WebRTCSession &instance()
{
static WebRTCSession instance;
return instance;
}
bool init(std::string *errorMessage = nullptr);
State state() const { return state_; }
bool createOffer();
bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp);
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
bool toggleMuteAudioSrc(bool &isMuted);
void end();
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice);
void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; }
signals:
void offerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void answerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
private slots:
void setState(State state) { state_ = state; }
private:
WebRTCSession();
bool initialised_ = false;
State state_ = State::DISCONNECTED;
GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr;
std::string stunServer_;
std::vector<std::string> turnServers_;
GList *audioSources_ = nullptr;
int audioSourceIndex_ = -1;
bool startPipeline(int opusPayloadType);
bool createPipeline(int opusPayloadType);
void refreshDevices();
public:
WebRTCSession(WebRTCSession const &) = delete;
void operator=(WebRTCSession const &) = delete;
};

134
src/dialogs/AcceptCall.cpp Normal file
View file

@ -0,0 +1,134 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "ui/Avatar.h"
namespace dialogs {
AcceptCall::AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification(
"Incoming call: No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH);
setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
QFont f;
f.setPointSizeF(f.pointSizeF());
QFont labelFont;
labelFont.setWeight(QFont::Medium);
QLabel *displayNameLabel = nullptr;
if (!displayName.isEmpty() && displayName != caller) {
displayNameLabel = new QLabel(displayName, this);
labelFont.setPointSizeF(f.pointSizeF() * 2);
displayNameLabel->setFont(labelFont);
displayNameLabel->setAlignment(Qt::AlignCenter);
}
QLabel *callerLabel = new QLabel(caller, this);
labelFont.setPointSizeF(f.pointSizeF() * 1.2);
callerLabel->setFont(labelFont);
callerLabel->setAlignment(Qt::AlignCenter);
auto avatar = new Avatar(this, QFontMetrics(f).height() * 6);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 22;
QLabel *callTypeIndicator = new QLabel(this);
callTypeIndicator->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2)));
QLabel *callTypeLabel = new QLabel("Voice Call", this);
labelFont.setPointSizeF(f.pointSizeF() * 1.1);
callTypeLabel->setFont(labelFont);
callTypeLabel->setAlignment(Qt::AlignCenter);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(18);
acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true);
acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
acceptBtn_->setIconSize(QSize(iconSize, iconSize));
rejectBtn_ = new QPushButton(tr("Reject"), this);
rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png"));
rejectBtn_->setIconSize(QSize(iconSize, iconSize));
buttonLayout->addWidget(acceptBtn_);
buttonLayout->addWidget(rejectBtn_);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(
QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
if (displayNameLabel)
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
layout->addWidget(callerLabel, 0, Qt::AlignCenter);
layout->addWidget(avatar, 0, Qt::AlignCenter);
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit accept();
emit close();
});
connect(rejectBtn_, &QPushButton::clicked, this, [this]() {
emit reject();
emit close();
});
}
}

36
src/dialogs/AcceptCall.h Normal file
View file

@ -0,0 +1,36 @@
#pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget>
class QPushButton;
class QString;
class UserSettings;
namespace dialogs {
class AcceptCall : public QWidget
{
Q_OBJECT
public:
AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr);
signals:
void accept();
void reject();
private:
QPushButton *acceptBtn_;
QPushButton *rejectBtn_;
std::vector<std::string> audioDevices_;
};
}

103
src/dialogs/PlaceCall.cpp Normal file
View file

@ -0,0 +1,103 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/PlaceCall.h"
#include "ui/Avatar.h"
namespace dialogs {
PlaceCall::PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
audioDevices_ = WebRTCSession::instance().getAudioSourceNames(
settings->defaultAudioSource().toStdString());
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification("No audio sources found.");
emit close();
return;
}
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(15);
buttonLayout->setMargin(0);
QFont f;
f.setPointSizeF(f.pointSizeF());
auto avatar = new Avatar(this, QFontMetrics(f).height() * 3);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 18;
voiceBtn_ = new QPushButton(tr("Voice"), this);
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
voiceBtn_->setIconSize(QSize(iconSize, iconSize));
voiceBtn_->setDefault(true);
cancelBtn_ = new QPushButton(tr("Cancel"), this);
buttonLayout->addWidget(avatar);
buttonLayout->addStretch();
buttonLayout->addWidget(voiceBtn_);
buttonLayout->addWidget(cancelBtn_);
QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel("Place a call to " + name + "?", this);
auto deviceLayout = new QHBoxLayout;
auto audioLabel = new QLabel(this);
audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png")
.pixmap(QSize(iconSize * 1.2, iconSize * 1.2)));
auto deviceList = new QComboBox(this);
for (const auto &d : audioDevices_)
deviceList->addItem(QString::fromStdString(d));
deviceLayout->addStretch();
deviceLayout->addWidget(audioLabel);
deviceLayout->addWidget(deviceList);
layout->addWidget(label);
layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout);
connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex());
settings->setDefaultAudioSource(
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit voice();
emit close();
});
connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
emit cancel();
emit close();
});
}
}

36
src/dialogs/PlaceCall.h Normal file
View file

@ -0,0 +1,36 @@
#pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget>
class QPushButton;
class QString;
class UserSettings;
namespace dialogs {
class PlaceCall : public QWidget
{
Q_OBJECT
public:
PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr);
signals:
void voice();
void cancel();
private:
QPushButton *voiceBtn_;
QPushButton *cancelBtn_;
std::vector<std::string> audioDevices_;
};
}

316
src/dialogs/UserProfile.cpp Normal file
View file

@ -0,0 +1,316 @@
#include <QHBoxLayout>
#include <QLabel>
#include <QListWidget>
#include <QMessageBox>
#include <QShortcut>
#include <QVBoxLayout>
#include "Cache.h"
#include "ChatPage.h"
#include "Logging.h"
#include "MatrixClient.h"
#include "Utils.h"
#include "dialogs/UserProfile.h"
#include "ui/Avatar.h"
#include "ui/FlatButton.h"
using namespace dialogs;
Q_DECLARE_METATYPE(std::vector<DeviceInfo>)
constexpr int BUTTON_SIZE = 36;
constexpr int BUTTON_RADIUS = BUTTON_SIZE / 2;
constexpr int WIDGET_MARGIN = 20;
constexpr int TOP_WIDGET_MARGIN = 2 * WIDGET_MARGIN;
constexpr int WIDGET_SPACING = 15;
constexpr int TEXT_SPACING = 4;
constexpr int DEVICE_SPACING = 5;
DeviceItem::DeviceItem(DeviceInfo device, QWidget *parent)
: QWidget(parent)
, info_(std::move(device))
{
QFont font;
font.setBold(true);
auto deviceIdLabel = new QLabel(info_.device_id, this);
deviceIdLabel->setFont(font);
auto layout = new QVBoxLayout{this};
layout->addWidget(deviceIdLabel);
if (!info_.display_name.isEmpty())
layout->addWidget(new QLabel(info_.display_name, this));
layout->setMargin(0);
layout->setSpacing(4);
}
UserProfile::UserProfile(QWidget *parent)
: QWidget(parent)
{
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setAttribute(Qt::WA_DeleteOnClose, true);
QIcon banIcon, kickIcon, ignoreIcon, startChatIcon;
banIcon.addFile(":/icons/icons/ui/do-not-disturb-rounded-sign.png");
banBtn_ = new FlatButton(this);
banBtn_->setFixedSize(BUTTON_SIZE, BUTTON_SIZE);
banBtn_->setCornerRadius(BUTTON_RADIUS);
banBtn_->setIcon(banIcon);
banBtn_->setIconSize(QSize(BUTTON_RADIUS, BUTTON_RADIUS));
banBtn_->setToolTip(tr("Ban the user from the room"));
ignoreIcon.addFile(":/icons/icons/ui/volume-off-indicator.png");
ignoreBtn_ = new FlatButton(this);
ignoreBtn_->setFixedSize(BUTTON_SIZE, BUTTON_SIZE);
ignoreBtn_->setCornerRadius(BUTTON_RADIUS);
ignoreBtn_->setIcon(ignoreIcon);
ignoreBtn_->setIconSize(QSize(BUTTON_RADIUS, BUTTON_RADIUS));
ignoreBtn_->setToolTip(tr("Ignore messages from this user"));
ignoreBtn_->setDisabled(true); // Not used yet.
kickIcon.addFile(":/icons/icons/ui/round-remove-button.png");
kickBtn_ = new FlatButton(this);
kickBtn_->setFixedSize(BUTTON_SIZE, BUTTON_SIZE);
kickBtn_->setCornerRadius(BUTTON_RADIUS);
kickBtn_->setIcon(kickIcon);
kickBtn_->setIconSize(QSize(BUTTON_RADIUS, BUTTON_RADIUS));
kickBtn_->setToolTip(tr("Kick the user from the room"));
startChatIcon.addFile(":/icons/icons/ui/black-bubble-speech.png");
startChat_ = new FlatButton(this);
startChat_->setFixedSize(BUTTON_SIZE, BUTTON_SIZE);
startChat_->setCornerRadius(BUTTON_RADIUS);
startChat_->setIcon(startChatIcon);
startChat_->setIconSize(QSize(BUTTON_RADIUS, BUTTON_RADIUS));
startChat_->setToolTip(tr("Start a conversation"));
connect(startChat_, &QPushButton::clicked, this, [this]() {
auto user_id = userIdLabel_->text();
mtx::requests::CreateRoom req;
req.preset = mtx::requests::Preset::PrivateChat;
req.visibility = mtx::requests::Visibility::Private;
if (utils::localUser() != user_id)
req.invite = {user_id.toStdString()};
if (QMessageBox::question(
this,
tr("Confirm DM"),
tr("Do you really want to invite %1 (%2) to a direct chat?")
.arg(cache::displayName(roomId_, user_id))
.arg(user_id)) != QMessageBox::Yes)
return;
emit ChatPage::instance()->createRoom(req);
});
connect(banBtn_, &QPushButton::clicked, this, [this] {
ChatPage::instance()->banUser(userIdLabel_->text(), "");
});
connect(kickBtn_, &QPushButton::clicked, this, [this] {
ChatPage::instance()->kickUser(userIdLabel_->text(), "");
});
// Button line
auto btnLayout = new QHBoxLayout;
btnLayout->addStretch(1);
btnLayout->addWidget(startChat_);
btnLayout->addWidget(ignoreBtn_);
btnLayout->addWidget(kickBtn_);
btnLayout->addWidget(banBtn_);
btnLayout->addStretch(1);
btnLayout->setSpacing(8);
btnLayout->setMargin(0);
avatar_ = new Avatar(this, 128);
avatar_->setLetter("X");
QFont font;
font.setPointSizeF(font.pointSizeF() * 2);
userIdLabel_ = new QLabel(this);
displayNameLabel_ = new QLabel(this);
displayNameLabel_->setFont(font);
auto textLayout = new QVBoxLayout;
textLayout->addWidget(displayNameLabel_);
textLayout->addWidget(userIdLabel_);
textLayout->setAlignment(displayNameLabel_, Qt::AlignCenter | Qt::AlignTop);
textLayout->setAlignment(userIdLabel_, Qt::AlignCenter | Qt::AlignTop);
textLayout->setSpacing(TEXT_SPACING);
textLayout->setMargin(0);
devices_ = new QListWidget{this};
devices_->setFrameStyle(QFrame::NoFrame);
devices_->setSelectionMode(QAbstractItemView::NoSelection);
devices_->setAttribute(Qt::WA_MacShowFocusRect, 0);
devices_->setSpacing(DEVICE_SPACING);
QFont descriptionLabelFont;
descriptionLabelFont.setWeight(65);
devicesLabel_ = new QLabel(tr("Devices").toUpper(), this);
devicesLabel_->setFont(descriptionLabelFont);
devicesLabel_->hide();
devicesLabel_->setFixedSize(devicesLabel_->sizeHint());
auto okBtn = new QPushButton("OK", this);
auto closeLayout = new QHBoxLayout();
closeLayout->setSpacing(15);
closeLayout->addStretch(1);
closeLayout->addWidget(okBtn);
auto vlayout = new QVBoxLayout{this};
vlayout->addWidget(avatar_, 0, Qt::AlignCenter | Qt::AlignTop);
vlayout->addLayout(textLayout);
vlayout->addLayout(btnLayout);
vlayout->addWidget(devicesLabel_, 0, Qt::AlignLeft);
vlayout->addWidget(devices_, 1);
vlayout->addLayout(closeLayout);
QFont largeFont;
largeFont.setPointSizeF(largeFont.pointSizeF() * 1.5);
setMinimumWidth(
std::max(devices_->sizeHint().width() + 4 * WIDGET_MARGIN, conf::window::minModalWidth));
setSizePolicy(QSizePolicy::Minimum, QSizePolicy::Minimum);
vlayout->setSpacing(WIDGET_SPACING);
vlayout->setContentsMargins(WIDGET_MARGIN, TOP_WIDGET_MARGIN, WIDGET_MARGIN, WIDGET_MARGIN);
qRegisterMetaType<std::vector<DeviceInfo>>();
auto closeShortcut = new QShortcut(QKeySequence(QKeySequence::Cancel), this);
connect(closeShortcut, &QShortcut::activated, this, &UserProfile::close);
connect(okBtn, &QPushButton::clicked, this, &UserProfile::close);
}
void
UserProfile::resetToDefaults()
{
avatar_->setLetter("X");
devices_->clear();
ignoreBtn_->show();
devices_->hide();
devicesLabel_->hide();
}
void
UserProfile::init(const QString &userId, const QString &roomId)
{
resetToDefaults();
this->roomId_ = roomId;
auto displayName = cache::displayName(roomId, userId);
userIdLabel_->setText(userId);
displayNameLabel_->setText(displayName);
avatar_->setLetter(utils::firstChar(displayName));
avatar_->setImage(roomId, userId);
auto localUser = utils::localUser();
try {
bool hasMemberRights =
cache::hasEnoughPowerLevel({mtx::events::EventType::RoomMember},
roomId.toStdString(),
localUser.toStdString());
if (!hasMemberRights) {
kickBtn_->hide();
banBtn_->hide();
} else {
kickBtn_->show();
banBtn_->show();
}
} catch (const lmdb::error &e) {
nhlog::db()->warn("lmdb error: {}", e.what());
}
if (localUser == userId) {
// TODO: click on display name & avatar to change.
kickBtn_->hide();
banBtn_->hide();
ignoreBtn_->hide();
}
mtx::requests::QueryKeys req;
req.device_keys[userId.toStdString()] = {};
// A proxy object is used to emit the signal instead of the original object
// which might be destroyed by the time the http call finishes.
auto proxy = std::make_shared<Proxy>();
QObject::connect(proxy.get(), &Proxy::done, this, &UserProfile::updateDeviceList);
http::client()->query_keys(
req,
[user_id = userId.toStdString(), proxy = std::move(proxy)](
const mtx::responses::QueryKeys &res, mtx::http::RequestErr err) {
if (err) {
nhlog::net()->warn("failed to query device keys: {} {}",
err->matrix_error.error,
static_cast<int>(err->status_code));
// TODO: Notify the UI.
return;
}
if (res.device_keys.empty() ||
(res.device_keys.find(user_id) == res.device_keys.end())) {
nhlog::net()->warn("no devices retrieved {}", user_id);
return;
}
auto devices = res.device_keys.at(user_id);
std::vector<DeviceInfo> deviceInfo;
for (const auto &d : devices) {
auto device = d.second;
// TODO: Verify signatures and ignore those that don't pass.
deviceInfo.emplace_back(DeviceInfo{
QString::fromStdString(d.first),
QString::fromStdString(device.unsigned_info.device_display_name)});
}
std::sort(deviceInfo.begin(),
deviceInfo.end(),
[](const DeviceInfo &a, const DeviceInfo &b) {
return a.device_id > b.device_id;
});
if (!deviceInfo.empty())
emit proxy->done(QString::fromStdString(user_id), deviceInfo);
});
}
void
UserProfile::updateDeviceList(const QString &user_id, const std::vector<DeviceInfo> &devices)
{
if (user_id != userIdLabel_->text())
return;
for (const auto &dev : devices) {
auto deviceItem = new DeviceItem(dev, this);
auto item = new QListWidgetItem;
item->setSizeHint(deviceItem->minimumSizeHint());
item->setFlags(Qt::NoItemFlags);
item->setTextAlignment(Qt::AlignCenter);
devices_->insertItem(devices_->count() - 1, item);
devices_->setItemWidget(item, deviceItem);
}
devicesLabel_->show();
devices_->show();
adjustSize();
}

70
src/dialogs/UserProfile.h Normal file
View file

@ -0,0 +1,70 @@
#pragma once
#include <QString>
#include <QWidget>
class Avatar;
class FlatButton;
class QLabel;
class QListWidget;
class Toggle;
struct DeviceInfo
{
QString device_id;
QString display_name;
};
class Proxy : public QObject
{
Q_OBJECT
signals:
void done(const QString &user_id, const std::vector<DeviceInfo> &devices);
};
namespace dialogs {
class DeviceItem : public QWidget
{
Q_OBJECT
public:
explicit DeviceItem(DeviceInfo device, QWidget *parent);
private:
DeviceInfo info_;
// Toggle *verifyToggle_;
};
class UserProfile : public QWidget
{
Q_OBJECT
public:
explicit UserProfile(QWidget *parent = nullptr);
void init(const QString &userId, const QString &roomId);
private slots:
void updateDeviceList(const QString &user_id, const std::vector<DeviceInfo> &devices);
private:
void resetToDefaults();
Avatar *avatar_;
QString roomId_;
QLabel *userIdLabel_;
QLabel *displayNameLabel_;
FlatButton *banBtn_;
FlatButton *kickBtn_;
FlatButton *ignoreBtn_;
FlatButton *startChat_;
QLabel *devicesLabel_;
QListWidget *devices_;
};
} // dialogs

View file

@ -185,6 +185,26 @@ EventStore::addPending(mtx::events::collections::TimelineEvents event)
emit processPending();
}
void
EventStore::clearTimeline()
{
emit beginResetModel();
cache::client()->clearTimeline(room_id_);
auto range = cache::client()->getTimelineRange(room_id_);
if (range) {
nhlog::db()->info("Range {} {}", range->last, range->first);
this->last = range->last;
this->first = range->first;
} else {
this->first = std::numeric_limits<uint64_t>::max();
this->last = std::numeric_limits<uint64_t>::max();
}
nhlog::ui()->info("Range {} {}", this->last, this->first);
emit endResetModel();
}
void
EventStore::handleSync(const mtx::responses::Timeline &events)
{
@ -448,36 +468,89 @@ EventStore::decryptEvent(const IdIndex &idx,
index.session_id = e.content.session_id;
index.sender_key = e.content.sender_key;
mtx::events::RoomEvent<mtx::events::msg::Notice> dummy;
dummy.origin_server_ts = e.origin_server_ts;
dummy.event_id = e.event_id;
dummy.sender = e.sender;
dummy.content.body =
tr("-- Encrypted Event (No keys found for decryption) --",
"Placeholder, when the message was not decrypted yet or can't be decrypted.")
.toStdString();
auto asCacheEntry = [&idx](mtx::events::collections::TimelineEvents &&event) {
auto event_ptr = new mtx::events::collections::TimelineEvents(std::move(event));
decryptedEvents_.insert(idx, event_ptr);
return event_ptr;
};
try {
if (!cache::client()->inboundMegolmSessionExists(index)) {
auto decryptionResult = olm::decryptEvent(index, e);
mtx::events::RoomEvent<mtx::events::msg::Notice> dummy;
dummy.origin_server_ts = e.origin_server_ts;
dummy.event_id = e.event_id;
dummy.sender = e.sender;
if (decryptionResult.error) {
switch (*decryptionResult.error) {
case olm::DecryptionErrorCode::MissingSession:
dummy.content.body =
tr("-- Encrypted Event (No keys found for decryption) --",
"Placeholder, when the message was not decrypted yet or can't be "
"decrypted.")
.toStdString();
nhlog::crypto()->info("Could not find inbound megolm session ({}, {}, {})",
index.room_id,
index.session_id,
e.sender);
// TODO: request megolm session_id & session_key from the sender.
return asCacheEntry(std::move(dummy));
}
} catch (const lmdb::error &e) {
nhlog::db()->critical("failed to check megolm session's existence: {}", e.what());
dummy.content.body = tr("-- Decryption Error (failed to communicate with DB) --",
"Placeholder, when the message can't be decrypted, because "
"the DB access failed when trying to lookup the session.")
// TODO: Check if this actually works and look in key backup
olm::send_key_request_for(room_id_, e);
break;
case olm::DecryptionErrorCode::DbError:
nhlog::db()->critical(
"failed to retrieve megolm session with index ({}, {}, {})",
index.room_id,
index.session_id,
index.sender_key,
decryptionResult.error_message.value_or(""));
dummy.content.body =
tr("-- Decryption Error (failed to retrieve megolm keys from db) --",
"Placeholder, when the message can't be decrypted, because the DB "
"access "
"failed.")
.toStdString();
break;
case olm::DecryptionErrorCode::DecryptionFailed:
nhlog::crypto()->critical(
"failed to decrypt message with index ({}, {}, {}): {}",
index.room_id,
index.session_id,
index.sender_key,
decryptionResult.error_message.value_or(""));
dummy.content.body =
tr("-- Decryption Error (%1) --",
"Placeholder, when the message can't be decrypted. In this case, the "
"Olm "
"decrytion returned an error, which is passed as %1.")
.arg(
QString::fromStdString(decryptionResult.error_message.value_or("")))
.toStdString();
break;
case olm::DecryptionErrorCode::ParsingFailed:
dummy.content.body =
tr("-- Encrypted Event (Unknown event type) --",
"Placeholder, when the message was decrypted, but we couldn't parse "
"it, because "
"Nheko/mtxclient don't support that event type yet.")
.toStdString();
break;
case olm::DecryptionErrorCode::ReplayAttack:
nhlog::crypto()->critical(
"Reply attack while decryptiong event {} in room {} from {}!",
e.event_id,
room_id_,
index.sender_key);
dummy.content.body =
tr("-- Reply attack! This message index was reused! --").toStdString();
break;
case olm::DecryptionErrorCode::UnknownFingerprint:
// TODO: don't fail, just show in UI.
nhlog::crypto()->critical("Message by unverified fingerprint {}",
index.sender_key);
dummy.content.body =
tr("-- Message by unverified device! --").toStdString();
break;
}
return asCacheEntry(std::move(dummy));
}
@ -547,6 +620,11 @@ EventStore::decryptEvent(const IdIndex &idx,
"Nheko/mtxclient don't support that event type yet.")
.toStdString();
return asCacheEntry(std::move(dummy));
auto encInfo = mtx::accessors::file(decryptionResult.event.value());
if (encInfo)
emit newEncryptedImage(encInfo.value());
return asCacheEntry(std::move(decryptionResult.event.value()));
}
mtx::events::collections::TimelineEvents *
@ -608,6 +686,12 @@ EventStore::fetchMore()
http::client()->messages(
opts, [this, opts](const mtx::responses::Messages &res, mtx::http::RequestErr err) {
if (cache::client()->previousBatchToken(room_id_) != opts.from) {
nhlog::net()->warn("Cache cleared while fetching more messages, dropping "
"/messages response");
emit fetchedMore();
return;
}
if (err) {
nhlog::net()->error("failed to call /messages ({}): {} - {} - {}",
opts.room_id,

View file

@ -104,6 +104,7 @@ signals:
public slots:
void addPending(mtx::events::collections::TimelineEvents event);
void clearTimeline();
private:
mtx::events::collections::TimelineEvents *decryptEvent(

View file

@ -160,6 +160,26 @@ struct RoomEventType
{
return qml_mtx_events::EventType::Redacted;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallInvite> &)
{
return qml_mtx_events::EventType::CallInvite;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallAnswer> &)
{
return qml_mtx_events::EventType::CallAnswer;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallHangUp> &)
{
return qml_mtx_events::EventType::CallHangUp;
}
qml_mtx_events::EventType operator()(
const mtx::events::Event<mtx::events::msg::CallCandidates> &)
{
return qml_mtx_events::EventType::CallCandidates;
}
// ::EventType::Type operator()(const Event<mtx::events::msg::Location> &e) { return
// ::EventType::LocationMessage; }
};
@ -271,6 +291,7 @@ TimelineModel::roleNames() const
{RoomId, "roomId"},
{RoomName, "roomName"},
{RoomTopic, "roomTopic"},
{CallType, "callType"},
{Dump, "dump"},
};
}
@ -422,6 +443,8 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
return QVariant(QString::fromStdString(room_name(event)));
case RoomTopic:
return QVariant(QString::fromStdString(room_topic(event)));
case CallType:
return QVariant(QString::fromStdString(call_type(event)));
case Dump: {
QVariantMap m;
auto names = roleNames();
@ -452,6 +475,7 @@ TimelineModel::data(const mtx::events::collections::TimelineEvents &event, int r
m.insert(names[ReplyTo], data(event, static_cast<int>(ReplyTo)));
m.insert(names[RoomName], data(event, static_cast<int>(RoomName)));
m.insert(names[RoomTopic], data(event, static_cast<int>(RoomTopic)));
m.insert(names[CallType], data(event, static_cast<int>(CallType)));
return QVariant(m);
}
@ -548,7 +572,31 @@ TimelineModel::addEvents(const mtx::responses::Timeline &timeline)
events.handleSync(timeline);
if (!timeline.events.empty())
using namespace mtx::events;
for (auto e : timeline.events) {
if (auto encryptedEvent = std::get_if<EncryptedEvent<msg::Encrypted>>(&e)) {
MegolmSessionIndex index;
index.room_id = room_id_.toStdString();
index.session_id = encryptedEvent->content.session_id;
index.sender_key = encryptedEvent->content.sender_key;
auto result = olm::decryptEvent(index, *encryptedEvent);
if (result.event)
e = result.event.value();
}
if (std::holds_alternative<RoomEvent<msg::CallCandidates>>(e) ||
std::holds_alternative<RoomEvent<msg::CallInvite>>(e) ||
std::holds_alternative<RoomEvent<msg::CallAnswer>>(e) ||
std::holds_alternative<RoomEvent<msg::CallHangUp>>(e))
std::visit(
[this](auto &event) {
event.room_id = room_id_.toStdString();
if (event.sender != http::client()->user_id().to_string())
emit newCallEvent(event);
},
e);
}
updateLastMessage();
}
@ -574,6 +622,23 @@ isMessage(const mtx::events::EncryptedEvent<T> &)
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &)
{
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &)
{
return true;
}
auto
isMessage(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &)
{
return true;
}
// Workaround. We also want to see a room at the top, if we just joined it
auto
isYourJoin(const mtx::events::StateEvent<mtx::events::state::Member> &e)
@ -806,15 +871,16 @@ TimelineModel::markEventsAsRead(const std::vector<QString> &event_ids)
template<typename T>
void
TimelineModel::sendEncryptedMessage(mtx::events::RoomEvent<T> msg)
TimelineModel::sendEncryptedMessage(mtx::events::RoomEvent<T> msg, mtx::events::EventType eventType)
{
const auto room_id = room_id_.toStdString();
using namespace mtx::events;
using namespace mtx::identifiers;
json doc = {
{"type", to_string(msg.type)}, {"content", json(msg.content)}, {"room_id", room_id}};
json doc = {{"type", mtx::events::to_string(eventType)},
{"content", msg.content},
{"room_id", room_id}};
try {
// Check if we have already an outbound megolm session then we can use.
@ -1093,60 +1159,34 @@ struct SendMessageVisitor
: model_(model)
{}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationRequest> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationReady> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationStart> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationAccept> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationMac> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationKey> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationDone> &msg)
{
model_->sendEncryptedMessage(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationCancel> &msg)
{
model_->sendEncryptedMessage(msg);
}
// Do-nothing operator for all unhandled events
template<typename T>
void operator()(const mtx::events::Event<T> &)
{}
// Operator for m.room.message events that contain a msgtype in their content
template<typename T,
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
void operator()(const mtx::events::RoomEvent<T> &msg)
template<typename T, mtx::events::EventType Event>
void sendRoomEvent(mtx::events::RoomEvent<T> msg)
{
if (cache::isRoomEncrypted(model_->room_id_.toStdString())) {
auto encInfo = mtx::accessors::file(msg);
if (encInfo)
emit model_->newEncryptedImage(encInfo.value());
model_->sendEncryptedMessage(msg);
model_->sendEncryptedMessage(msg, Event);
} else {
msg.type = Event;
emit model_->addPendingMessageToStore(msg);
}
}
// Do-nothing operator for all unhandled events
template<typename T>
void operator()(const mtx::events::Event<T> &)
{}
// Operator for m.room.message events that contain a msgtype in their content
template<typename T,
std::enable_if_t<std::is_same<decltype(T::msgtype), std::string>::value, int> = 0>
void operator()(mtx::events::RoomEvent<T> msg)
{
sendRoomEvent<T, mtx::events::EventType::RoomMessage>(msg);
}
// Special operator for reactions, which are a type of m.room.message, but need to be
// handled distinctly for their differences from normal room messages. Specifically,
// reactions need to have the relation outside of ciphertext, or synapse / the homeserver
@ -1158,6 +1198,78 @@ struct SendMessageVisitor
emit model_->addPendingMessageToStore(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallInvite> &event)
{
sendRoomEvent<mtx::events::msg::CallInvite, mtx::events::EventType::CallInvite>(
event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &event)
{
sendRoomEvent<mtx::events::msg::CallCandidates,
mtx::events::EventType::CallCandidates>(event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &event)
{
sendRoomEvent<mtx::events::msg::CallAnswer, mtx::events::EventType::CallAnswer>(
event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &event)
{
sendRoomEvent<mtx::events::msg::CallHangUp, mtx::events::EventType::CallHangUp>(
event);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationRequest> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationRequest,
mtx::events::EventType::RoomMessage>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationReady> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationReady,
mtx::events::EventType::KeyVerificationReady>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationStart> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationStart,
mtx::events::EventType::KeyVerificationStart>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationAccept> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationAccept,
mtx::events::EventType::KeyVerificationAccept>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationMac> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationMac,
mtx::events::EventType::KeyVerificationMac>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationKey> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationKey,
mtx::events::EventType::KeyVerificationKey>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationDone> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationDone,
mtx::events::EventType::KeyVerificationDone>(msg);
}
void operator()(const mtx::events::RoomEvent<mtx::events::msg::KeyVerificationCancel> &msg)
{
sendRoomEvent<mtx::events::msg::KeyVerificationCancel,
mtx::events::EventType::KeyVerificationCancel>(msg);
}
TimelineModel *model_;
};
@ -1173,39 +1285,6 @@ TimelineModel::addPendingMessage(mtx::events::collections::TimelineEvents event)
},
event);
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationReady>>(&event)) {
std::visit(
[](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationReady; },
event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationStart>>(&event)) {
std::visit(
[](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationStart; },
event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationKey>>(&event)) {
std::visit([](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationKey; },
event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationMac>>(&event)) {
std::visit([](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationMac; },
event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationDone>>(&event)) {
std::visit(
[](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationDone; }, event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationCancel>>(&event)) {
std::visit(
[](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationCancel; },
event);
}
if (std::get_if<mtx::events::RoomEvent<mtx::events::msg::KeyVerificationAccept>>(&event)) {
std::visit(
[](auto &msg) { msg.type = mtx::events::EventType::KeyVerificationAccept; },
event);
}
std::visit(SendMessageVisitor{this}, event);
}

View file

@ -37,6 +37,14 @@ enum EventType
Aliases,
/// m.room.avatar
Avatar,
/// m.call.invite
CallInvite,
/// m.call.answer
CallAnswer,
/// m.call.hangup
CallHangUp,
/// m.call.candidates
CallCandidates,
/// m.room.canonical_alias
CanonicalAlias,
/// m.room.create
@ -173,6 +181,7 @@ public:
RoomId,
RoomName,
RoomTopic,
CallType,
Dump,
};
@ -218,7 +227,7 @@ public:
void updateLastMessage();
void addEvents(const mtx::responses::Timeline &events);
template<class T>
void sendMessage(const T &msg);
void sendMessageEvent(const T &content, mtx::events::EventType eventType);
RelatedInfo relatedInfo(QString id);
public slots:
@ -251,6 +260,7 @@ public slots:
}
}
void setDecryptDescription(bool decrypt) { decryptDescription = decrypt; }
void clearTimeline() { events.clearTimeline(); }
private slots:
void addPendingMessage(mtx::events::collections::TimelineEvents event);
@ -264,6 +274,7 @@ signals:
void typingUsersChanged(std::vector<QString> users);
void replyChanged(QString reply);
void paginationInProgressChanged(const bool);
void newCallEvent(const mtx::events::collections::TimelineEvents &event);
void openProfile(UserProfile *profile);
@ -273,7 +284,7 @@ signals:
private:
template<typename T>
void sendEncryptedMessage(mtx::events::RoomEvent<T> msg);
void sendEncryptedMessage(mtx::events::RoomEvent<T> msg, mtx::events::EventType eventType);
void handleClaimedKeys(std::shared_ptr<StateKeeper> keeper,
const std::map<std::string, std::string> &room_key,
const std::map<std::string, DevicePublicKeys> &pks,
@ -304,9 +315,10 @@ private:
template<class T>
void
TimelineModel::sendMessage(const T &msg)
TimelineModel::sendMessageEvent(const T &content, mtx::events::EventType eventType)
{
mtx::events::RoomEvent<T> msgCopy = {};
msgCopy.content = msg;
msgCopy.content = content;
msgCopy.type = eventType;
emit newMessageToSend(msgCopy);
}

View file

@ -1,11 +1,14 @@
#include "TimelineViewManager.h"
#include <QDesktopServices>
#include <QMetaType>
#include <QPalette>
#include <QQmlContext>
#include <QQmlEngine>
#include <QString>
#include "BlurhashProvider.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "ColorImageProvider.h"
#include "DelegateChooser.h"
@ -97,10 +100,13 @@ TimelineViewManager::userStatus(QString id) const
return QString::fromStdString(cache::statusMessage(id.toStdString()));
}
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent)
TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettings,
CallManager *callManager,
QWidget *parent)
: imgProvider(new MxcImageProvider())
, colorImgProvider(new ColorImageProvider())
, blurhashProvider(new BlurhashProvider())
, callManager_(callManager)
, settings(userSettings)
{
qRegisterMetaType<mtx::events::msg::KeyVerificationAccept>();
@ -285,6 +291,10 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
}
}
});
connect(dynamic_cast<ChatPage *>(parent), &ChatPage::loggedOut, this, [this]() {
isInitialSync_ = true;
emit initialSyncChanged(true);
});
}
void
@ -294,7 +304,17 @@ TimelineViewManager::sync(const mtx::responses::Rooms &rooms)
// addRoom will only add the room, if it doesn't exist
addRoom(QString::fromStdString(room_id));
const auto &room_model = models.value(QString::fromStdString(room_id));
if (!isInitialSync_)
connect(room_model.data(),
&TimelineModel::newCallEvent,
callManager_,
&CallManager::syncEvent);
room_model->addEvents(room.timeline);
if (!isInitialSync_)
disconnect(room_model.data(),
&TimelineModel::newCallEvent,
callManager_,
&CallManager::syncEvent);
if (ChatPage::instance()->userSettings()->typingNotifications()) {
std::vector<QString> typing;
@ -371,6 +391,12 @@ TimelineViewManager::openImageOverlay(QString mxcUrl, QString eventId) const
});
}
void
TimelineViewManager::openLink(QString link) const
{
QDesktopServices::openUrl(link);
}
void
TimelineViewManager::updateReadReceipts(const QString &room_id,
const std::vector<QString> &event_ids)
@ -440,7 +466,7 @@ TimelineViewManager::queueTextMessage(const QString &msg)
timeline_->resetReply();
}
timeline_->sendMessage(text);
timeline_->sendMessageEvent(text, mtx::events::EventType::RoomMessage);
}
void
@ -462,7 +488,7 @@ TimelineViewManager::queueEmoteMessage(const QString &msg)
}
if (timeline_)
timeline_->sendMessage(emote);
timeline_->sendMessageEvent(emote, mtx::events::EventType::RoomMessage);
}
void
@ -491,7 +517,7 @@ TimelineViewManager::queueReactionMessage(const QString &reactedEvent, const QSt
reaction.relates_to.event_id = reactedEvent.toStdString();
reaction.relates_to.key = reactionKey.toStdString();
timeline_->sendMessage(reaction);
timeline_->sendMessageEvent(reaction, mtx::events::EventType::Reaction);
// Otherwise, we have previously reacted and the reaction should be redacted
} else {
timeline_->redactEvent(selfReactedEvent);
@ -527,7 +553,7 @@ TimelineViewManager::queueImageMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(image);
model->sendMessageEvent(image, mtx::events::EventType::RoomMessage);
}
void
@ -555,7 +581,7 @@ TimelineViewManager::queueFileMessage(
model->resetReply();
}
model->sendMessage(file);
model->sendMessageEvent(file, mtx::events::EventType::RoomMessage);
}
void
@ -583,7 +609,7 @@ TimelineViewManager::queueAudioMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(audio);
model->sendMessageEvent(audio, mtx::events::EventType::RoomMessage);
}
void
@ -610,5 +636,34 @@ TimelineViewManager::queueVideoMessage(const QString &roomid,
model->resetReply();
}
model->sendMessage(video);
model->sendMessageEvent(video, mtx::events::EventType::RoomMessage);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallInvite &callInvite)
{
models.value(roomid)->sendMessageEvent(callInvite, mtx::events::EventType::CallInvite);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallCandidates &callCandidates)
{
models.value(roomid)->sendMessageEvent(callCandidates,
mtx::events::EventType::CallCandidates);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallAnswer &callAnswer)
{
models.value(roomid)->sendMessageEvent(callAnswer, mtx::events::EventType::CallAnswer);
}
void
TimelineViewManager::queueCallMessage(const QString &roomid,
const mtx::events::msg::CallHangUp &callHangUp)
{
models.value(roomid)->sendMessageEvent(callHangUp, mtx::events::EventType::CallHangUp);
}

View file

@ -19,6 +19,7 @@
class MxcImageProvider;
class BlurhashProvider;
class CallManager;
class ColorImageProvider;
class UserSettings;
@ -46,7 +47,9 @@ class TimelineViewManager : public QObject
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
public:
TimelineViewManager(QSharedPointer<UserSettings> userSettings, QWidget *parent = nullptr);
TimelineViewManager(QSharedPointer<UserSettings> userSettings,
CallManager *callManager,
QWidget *parent = nullptr);
QWidget *getWidget() const { return container; }
void sync(const mtx::responses::Rooms &rooms);
@ -62,6 +65,8 @@ public:
Q_INVOKABLE QString userPresence(QString id) const;
Q_INVOKABLE QString userStatus(QString id) const;
Q_INVOKABLE void openLink(QString link) const;
signals:
void clearRoomMessageCount(QString roomid);
void updateRoomsLastMessage(QString roomid, const DescInfo &info);
@ -110,8 +115,19 @@ public slots:
const QString &url,
const QString &mime,
uint64_t dsize);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void updateEncryptedDescriptions();
void clearCurrentRoomTimeline()
{
if (timeline_)
timeline_->clearTimeline();
}
private:
#ifdef USE_QUICK_VIEW
QQuickView *view;
@ -126,6 +142,7 @@ private:
QHash<QString, QSharedPointer<TimelineModel>> models;
TimelineModel *timeline_ = nullptr;
CallManager *callManager_ = nullptr;
bool isInitialSync_ = true;