Support video calls

This commit is contained in:
trilene 2020-10-27 13:14:06 -04:00
parent 3499abd99a
commit d1f3a3ef40
18 changed files with 1171 additions and 447 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 353 B

View file

@ -10,6 +10,12 @@ Rectangle {
color: "#2ECC71" color: "#2ECC71"
implicitHeight: rowLayout.height + 8 implicitHeight: rowLayout.height + 8
MouseArea {
anchors.fill: parent
onClicked: if (TimelineManager.onVideoCall)
stackLayout.currentIndex = stackLayout.currentIndex ? 0 : 1;
}
RowLayout { RowLayout {
id: rowLayout id: rowLayout
@ -33,7 +39,8 @@ Rectangle {
Image { Image {
Layout.preferredWidth: 24 Layout.preferredWidth: 24
Layout.preferredHeight: 24 Layout.preferredHeight: 24
source: "qrc:/icons/icons/ui/place-call.png" source: TimelineManager.onVideoCall ?
"qrc:/icons/icons/ui/video-call.png" : "qrc:/icons/icons/ui/place-call.png"
} }
Label { Label {
@ -58,9 +65,12 @@ Rectangle {
callStateLabel.text = "00:00"; callStateLabel.text = "00:00";
var d = new Date(); var d = new Date();
callTimer.startTime = Math.floor(d.getTime() / 1000); callTimer.startTime = Math.floor(d.getTime() / 1000);
if (TimelineManager.onVideoCall)
stackLayout.currentIndex = 1;
break; break;
case WebRTCState.DISCONNECTED: case WebRTCState.DISCONNECTED:
callStateLabel.text = ""; callStateLabel.text = "";
stackLayout.currentIndex = 0;
} }
} }

View file

@ -4,7 +4,7 @@ import "./emoji"
import QtGraphicalEffects 1.0 import QtGraphicalEffects 1.0
import QtQuick 2.9 import QtQuick 2.9
import QtQuick.Controls 2.3 import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2 import QtQuick.Layouts 1.3
import QtQuick.Window 2.2 import QtQuick.Window 2.2
import im.nheko 1.0 import im.nheko 1.0
import im.nheko.EmojiModel 1.0 import im.nheko.EmojiModel 1.0
@ -282,6 +282,17 @@ Page {
} }
StackLayout {
id: stackLayout
currentIndex: 0
Connections {
target: TimelineManager
function onActiveTimelineChanged() {
stackLayout.currentIndex = 0;
}
}
ListView { ListView {
id: chat id: chat
@ -486,6 +497,13 @@ Page {
} }
Loader {
id: videoCallLoader
source: TimelineManager.onVideoCall ? "VideoCall.qml" : ""
onLoaded: TimelineManager.setVideoCallItem()
}
}
Item { Item {
id: chatFooter id: chatFooter

View file

@ -0,0 +1,7 @@
import QtQuick 2.9
import org.freedesktop.gstreamer.GLVideoItem 1.0
GstGLVideoItem {
objectName: "videoCallItem"
}

View file

@ -74,6 +74,7 @@
<file>icons/ui/end-call.png</file> <file>icons/ui/end-call.png</file>
<file>icons/ui/microphone-mute.png</file> <file>icons/ui/microphone-mute.png</file>
<file>icons/ui/microphone-unmute.png</file> <file>icons/ui/microphone-unmute.png</file>
<file>icons/ui/video-call.png</file>
<file>icons/emoji-categories/people.png</file> <file>icons/emoji-categories/people.png</file>
<file>icons/emoji-categories/people@2x.png</file> <file>icons/emoji-categories/people@2x.png</file>
@ -130,6 +131,7 @@
<file>qml/Reactions.qml</file> <file>qml/Reactions.qml</file>
<file>qml/ScrollHelper.qml</file> <file>qml/ScrollHelper.qml</file>
<file>qml/TimelineRow.qml</file> <file>qml/TimelineRow.qml</file>
<file>qml/VideoCall.qml</file>
<file>qml/emoji/EmojiButton.qml</file> <file>qml/emoji/EmojiButton.qml</file>
<file>qml/emoji/EmojiPicker.qml</file> <file>qml/emoji/EmojiPicker.qml</file>
<file>qml/UserProfile.qml</file> <file>qml/UserProfile.qml</file>

View file

@ -25,9 +25,6 @@ Q_DECLARE_METATYPE(mtx::responses::TurnServer)
using namespace mtx::events; using namespace mtx::events;
using namespace mtx::events::msg; using namespace mtx::events::msg;
// https://github.com/vector-im/riot-web/issues/10173
#define STUN_SERVER "stun://turn.matrix.org:3478"
namespace { namespace {
std::vector<std::string> std::vector<std::string>
getTurnURIs(const mtx::responses::TurnServer &turnServer); getTurnURIs(const mtx::responses::TurnServer &turnServer);
@ -43,6 +40,8 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>(); qRegisterMetaType<mtx::events::msg::CallCandidates::Candidate>();
qRegisterMetaType<mtx::responses::TurnServer>(); qRegisterMetaType<mtx::responses::TurnServer>();
session_.setSettings(userSettings);
connect( connect(
&session_, &session_,
&WebRTCSession::offerCreated, &WebRTCSession::offerCreated,
@ -128,30 +127,29 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
} }
void void
CallManager::sendInvite(const QString &roomid) CallManager::sendInvite(const QString &roomid, bool isVideo)
{ {
if (onActiveCall()) if (onActiveCall())
return; return;
auto roomInfo = cache::singleRoomInfo(roomid.toStdString()); auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
if (roomInfo.member_count != 2) { if (roomInfo.member_count != 2) {
emit ChatPage::instance()->showNotification( emit ChatPage::instance()->showNotification("Calls are limited to 1:1 rooms.");
"Voice calls are limited to 1:1 rooms.");
return; return;
} }
std::string errorMessage; std::string errorMessage;
if (!session_.init(&errorMessage)) { if (!session_.havePlugins(false, &errorMessage) ||
(isVideo && !session_.havePlugins(true, &errorMessage))) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
return; return;
} }
roomid_ = roomid; roomid_ = roomid;
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_); session_.setTurnServers(turnURIs_);
generateCallID(); generateCallID();
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_); nhlog::ui()->debug(
"WebRTC: call id: {} - creating {} invite", callid_, isVideo ? "video" : "voice");
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString())); std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
const RoomMember &callee = const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back() : members.front(); members.front().user_id == utils::localUser() ? members.back() : members.front();
@ -159,10 +157,12 @@ CallManager::sendInvite(const QString &roomid)
callPartyAvatarUrl_ = QString::fromStdString(roomInfo.avatar_url); callPartyAvatarUrl_ = QString::fromStdString(roomInfo.avatar_url);
emit newCallParty(); emit newCallParty();
playRingtone("qrc:/media/media/ringback.ogg", true); playRingtone("qrc:/media/media/ringback.ogg", true);
if (!session_.createOffer()) { if (!session_.createOffer(isVideo)) {
emit ChatPage::instance()->showNotification("Problem setting up call."); emit ChatPage::instance()->showNotification("Problem setting up call.");
endCall(); endCall();
} }
if (isVideo)
emit newVideoCallState();
} }
namespace { namespace {
@ -242,7 +242,7 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
return; return;
auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id); auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
if (onActiveCall() || roomInfo.member_count != 2 || isVideo) { if (onActiveCall() || roomInfo.member_count != 2) {
emit newMessage(QString::fromStdString(callInviteEvent.room_id), emit newMessage(QString::fromStdString(callInviteEvent.room_id),
CallHangUp{callInviteEvent.content.call_id, CallHangUp{callInviteEvent.content.call_id,
0, 0,
@ -266,10 +266,11 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url), QString::fromStdString(roomInfo.avatar_url),
settings_, settings_,
isVideo,
MainWindow::instance()); MainWindow::instance());
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent]() { connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent, isVideo]() {
MainWindow::instance()->hideOverlay(); MainWindow::instance()->hideOverlay();
answerInvite(callInviteEvent.content); answerInvite(callInviteEvent.content, isVideo);
}); });
connect(dialog, &dialogs::AcceptCall::reject, this, [this]() { connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
MainWindow::instance()->hideOverlay(); MainWindow::instance()->hideOverlay();
@ -279,19 +280,18 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
} }
void void
CallManager::answerInvite(const CallInvite &invite) CallManager::answerInvite(const CallInvite &invite, bool isVideo)
{ {
stopRingtone(); stopRingtone();
std::string errorMessage; std::string errorMessage;
if (!session_.init(&errorMessage)) { if (!session_.havePlugins(false, &errorMessage) ||
(isVideo && !session_.havePlugins(true, &errorMessage))) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp(); hangUp();
return; return;
} }
session_.setStunServer(settings_->useStunServer() ? STUN_SERVER : "");
session_.setTurnServers(turnURIs_); session_.setTurnServers(turnURIs_);
if (!session_.acceptOffer(invite.sdp)) { if (!session_.acceptOffer(invite.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call."); emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp(); hangUp();
@ -299,6 +299,8 @@ CallManager::answerInvite(const CallInvite &invite)
} }
session_.acceptICECandidates(remoteICECandidates_); session_.acceptICECandidates(remoteICECandidates_);
remoteICECandidates_.clear(); remoteICECandidates_.clear();
if (isVideo)
emit newVideoCallState();
} }
void void
@ -384,7 +386,10 @@ CallManager::endCall()
{ {
stopRingtone(); stopRingtone();
clear(); clear();
bool isVideo = session_.isVideo();
session_.end(); session_.end();
if (isVideo)
emit newVideoCallState();
} }
void void

View file

@ -26,7 +26,7 @@ class CallManager : public QObject
public: public:
CallManager(QSharedPointer<UserSettings>); CallManager(QSharedPointer<UserSettings>);
void sendInvite(const QString &roomid); void sendInvite(const QString &roomid, bool isVideo);
void hangUp( void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User); mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
bool onActiveCall() const; bool onActiveCall() const;
@ -43,6 +43,7 @@ signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &); void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &); void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void newCallParty(); void newCallParty();
void newVideoCallState();
void turnServerRetrieved(const mtx::responses::TurnServer &); void turnServerRetrieved(const mtx::responses::TurnServer &);
private slots: private slots:
@ -67,7 +68,7 @@ private:
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &); void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallCandidates> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &); void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallAnswer> &);
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &); void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
void answerInvite(const mtx::events::msg::CallInvite &); void answerInvite(const mtx::events::msg::CallInvite &, bool isVideo);
void generateCallID(); void generateCallID();
void clear(); void clear();
void endCall(); void endCall();

View file

@ -437,7 +437,7 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
} else { } else {
if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString()); if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
roomInfo.member_count != 2) { roomInfo.member_count != 2) {
showNotification("Voice calls are limited to 1:1 rooms."); showNotification("Calls are limited to 1:1 rooms.");
} else { } else {
std::vector<RoomMember> members( std::vector<RoomMember> members(
cache::getMembers(current_room_.toStdString())); cache::getMembers(current_room_.toStdString()));
@ -452,7 +452,10 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
userSettings_, userSettings_,
MainWindow::instance()); MainWindow::instance());
connect(dialog, &dialogs::PlaceCall::voice, this, [this]() { connect(dialog, &dialogs::PlaceCall::voice, this, [this]() {
callManager_.sendInvite(current_room_); callManager_.sendInvite(current_room_, false);
});
connect(dialog, &dialogs::PlaceCall::video, this, [this]() {
callManager_.sendInvite(current_room_, true);
}); });
utils::centerWidget(dialog, MainWindow::instance()); utils::centerWidget(dialog, MainWindow::instance());
dialog->show(); dialog->show();

View file

@ -42,6 +42,7 @@
#include "Olm.h" #include "Olm.h"
#include "UserSettingsPage.h" #include "UserSettingsPage.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "ui/FlatButton.h" #include "ui/FlatButton.h"
#include "ui/ToggleButton.h" #include "ui/ToggleButton.h"
@ -77,8 +78,11 @@ UserSettings::load()
presence_ = presence_ =
settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence)) settings.value("user/presence", QVariant::fromValue(Presence::AutomaticPresence))
.value<Presence>(); .value<Presence>();
microphone_ = settings.value("user/microphone", QString()).toString();
camera_ = settings.value("user/camera", QString()).toString();
cameraResolution_ = settings.value("user/camera_resolution", QString()).toString();
cameraFrameRate_ = settings.value("user/camera_frame_rate", QString()).toString();
useStunServer_ = settings.value("user/use_stun_server", false).toBool(); useStunServer_ = settings.value("user/use_stun_server", false).toBool();
defaultAudioSource_ = settings.value("user/default_audio_source", QString()).toString();
applyTheme(); applyTheme();
} }
@ -292,12 +296,42 @@ UserSettings::setUseStunServer(bool useStunServer)
} }
void void
UserSettings::setDefaultAudioSource(const QString &defaultAudioSource) UserSettings::setMicrophone(QString microphone)
{ {
if (defaultAudioSource == defaultAudioSource_) if (microphone == microphone_)
return; return;
defaultAudioSource_ = defaultAudioSource; microphone_ = microphone;
emit defaultAudioSourceChanged(defaultAudioSource); emit microphoneChanged(microphone);
save();
}
void
UserSettings::setCamera(QString camera)
{
if (camera == camera_)
return;
camera_ = camera;
emit cameraChanged(camera);
save();
}
void
UserSettings::setCameraResolution(QString resolution)
{
if (resolution == cameraResolution_)
return;
cameraResolution_ = resolution;
emit cameraResolutionChanged(resolution);
save();
}
void
UserSettings::setCameraFrameRate(QString frameRate)
{
if (frameRate == cameraFrameRate_)
return;
cameraFrameRate_ = frameRate;
emit cameraFrameRateChanged(frameRate);
save(); save();
} }
@ -386,8 +420,11 @@ UserSettings::save()
settings.setValue("font_family", font_); settings.setValue("font_family", font_);
settings.setValue("emoji_font_family", emojiFont_); settings.setValue("emoji_font_family", emojiFont_);
settings.setValue("presence", QVariant::fromValue(presence_)); settings.setValue("presence", QVariant::fromValue(presence_));
settings.setValue("microphone", microphone_);
settings.setValue("camera", camera_);
settings.setValue("camera_resolution", cameraResolution_);
settings.setValue("camera_frame_rate", cameraFrameRate_);
settings.setValue("use_stun_server", useStunServer_); settings.setValue("use_stun_server", useStunServer_);
settings.setValue("default_audio_source", defaultAudioSource_);
settings.endGroup(); settings.endGroup();
@ -458,6 +495,10 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
fontSizeCombo_ = new QComboBox{this}; fontSizeCombo_ = new QComboBox{this};
fontSelectionCombo_ = new QComboBox{this}; fontSelectionCombo_ = new QComboBox{this};
emojiFontSelectionCombo_ = new QComboBox{this}; emojiFontSelectionCombo_ = new QComboBox{this};
microphoneCombo_ = new QComboBox{this};
cameraCombo_ = new QComboBox{this};
cameraResolutionCombo_ = new QComboBox{this};
cameraFrameRateCombo_ = new QComboBox{this};
timelineMaxWidthSpin_ = new QSpinBox{this}; timelineMaxWidthSpin_ = new QSpinBox{this};
if (!settings_->tray()) if (!settings_->tray())
@ -645,6 +686,14 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
formLayout_->addRow(callsLabel); formLayout_->addRow(callsLabel);
formLayout_->addRow(new HorizontalLine{this}); formLayout_->addRow(new HorizontalLine{this});
boxWrap(tr("Microphone"), microphoneCombo_);
boxWrap(tr("Camera"), cameraCombo_);
boxWrap(tr("Camera resolution"), cameraResolutionCombo_);
boxWrap(tr("Camera frame rate"), cameraFrameRateCombo_);
microphoneCombo_->setSizeAdjustPolicy(QComboBox::AdjustToContents);
cameraCombo_->setSizeAdjustPolicy(QComboBox::AdjustToContents);
cameraResolutionCombo_->setSizeAdjustPolicy(QComboBox::AdjustToContents);
cameraFrameRateCombo_->setSizeAdjustPolicy(QComboBox::AdjustToContents);
boxWrap(tr("Allow fallback call assist server"), boxWrap(tr("Allow fallback call assist server"),
useStunServer_, useStunServer_,
tr("Will use turn.matrix.org as assist when your home server does not offer one.")); tr("Will use turn.matrix.org as assist when your home server does not offer one."));
@ -698,6 +747,38 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
connect(emojiFontSelectionCombo_, connect(emojiFontSelectionCombo_,
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged), static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &family) { settings_->setEmojiFontFamily(family.trimmed()); }); [this](const QString &family) { settings_->setEmojiFontFamily(family.trimmed()); });
connect(microphoneCombo_,
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &microphone) { settings_->setMicrophone(microphone); });
connect(cameraCombo_,
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &camera) {
settings_->setCamera(camera);
std::vector<std::string> resolutions =
WebRTCSession::instance().getResolutions(camera.toStdString());
cameraResolutionCombo_->clear();
for (const auto &resolution : resolutions)
cameraResolutionCombo_->addItem(QString::fromStdString(resolution));
});
connect(cameraResolutionCombo_,
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &resolution) {
settings_->setCameraResolution(resolution);
std::vector<std::string> frameRates =
WebRTCSession::instance().getFrameRates(settings_->camera().toStdString(),
resolution.toStdString());
cameraFrameRateCombo_->clear();
for (const auto &frameRate : frameRates)
cameraFrameRateCombo_->addItem(QString::fromStdString(frameRate));
});
connect(cameraFrameRateCombo_,
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &frameRate) { settings_->setCameraFrameRate(frameRate); });
connect(trayToggle_, &Toggle::toggled, this, [this](bool disabled) { connect(trayToggle_, &Toggle::toggled, this, [this](bool disabled) {
settings_->setTray(!disabled); settings_->setTray(!disabled);
if (disabled) { if (disabled) {
@ -807,6 +888,26 @@ UserSettingsPage::showEvent(QShowEvent *)
enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages()); enlargeEmojiOnlyMessages_->setState(!settings_->enlargeEmojiOnlyMessages());
deviceIdValue_->setText(QString::fromStdString(http::client()->device_id())); deviceIdValue_->setText(QString::fromStdString(http::client()->device_id()));
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth()); timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
WebRTCSession::instance().refreshDevices();
auto mics =
WebRTCSession::instance().getDeviceNames(false, settings_->microphone().toStdString());
microphoneCombo_->clear();
for (const auto &m : mics)
microphoneCombo_->addItem(QString::fromStdString(m));
auto cameraResolution = settings_->cameraResolution();
auto cameraFrameRate = settings_->cameraFrameRate();
auto cameras =
WebRTCSession::instance().getDeviceNames(true, settings_->camera().toStdString());
cameraCombo_->clear();
for (const auto &c : cameras)
cameraCombo_->addItem(QString::fromStdString(c));
utils::restoreCombobox(cameraResolutionCombo_, cameraResolution);
utils::restoreCombobox(cameraFrameRateCombo_, cameraFrameRate);
useStunServer_->setState(!settings_->useStunServer()); useStunServer_->setState(!settings_->useStunServer());
deviceFingerprintValue_->setText( deviceFingerprintValue_->setText(

View file

@ -73,8 +73,12 @@ class UserSettings : public QObject
Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged) Q_PROPERTY(Presence presence READ presence WRITE setPresence NOTIFY presenceChanged)
Q_PROPERTY( Q_PROPERTY(
bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged) bool useStunServer READ useStunServer WRITE setUseStunServer NOTIFY useStunServerChanged)
Q_PROPERTY(QString defaultAudioSource READ defaultAudioSource WRITE setDefaultAudioSource Q_PROPERTY(QString microphone READ microphone WRITE setMicrophone NOTIFY microphoneChanged)
NOTIFY defaultAudioSourceChanged) Q_PROPERTY(QString camera READ camera WRITE setCamera NOTIFY cameraChanged)
Q_PROPERTY(QString cameraResolution READ cameraResolution WRITE setCameraResolution NOTIFY
cameraResolutionChanged)
Q_PROPERTY(QString cameraFrameRate READ cameraFrameRate WRITE setCameraFrameRate NOTIFY
cameraFrameRateChanged)
public: public:
UserSettings(); UserSettings();
@ -111,8 +115,11 @@ public:
void setAvatarCircles(bool state); void setAvatarCircles(bool state);
void setDecryptSidebar(bool state); void setDecryptSidebar(bool state);
void setPresence(Presence state); void setPresence(Presence state);
void setMicrophone(QString microphone);
void setCamera(QString camera);
void setCameraResolution(QString resolution);
void setCameraFrameRate(QString frameRate);
void setUseStunServer(bool state); void setUseStunServer(bool state);
void setDefaultAudioSource(const QString &deviceName);
QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; } QString theme() const { return !theme_.isEmpty() ? theme_ : defaultTheme_; }
bool messageHoverHighlight() const { return messageHoverHighlight_; } bool messageHoverHighlight() const { return messageHoverHighlight_; }
@ -138,8 +145,11 @@ public:
QString font() const { return font_; } QString font() const { return font_; }
QString emojiFont() const { return emojiFont_; } QString emojiFont() const { return emojiFont_; }
Presence presence() const { return presence_; } Presence presence() const { return presence_; }
QString microphone() const { return microphone_; }
QString camera() const { return camera_; }
QString cameraResolution() const { return cameraResolution_; }
QString cameraFrameRate() const { return cameraFrameRate_; }
bool useStunServer() const { return useStunServer_; } bool useStunServer() const { return useStunServer_; }
QString defaultAudioSource() const { return defaultAudioSource_; }
signals: signals:
void groupViewStateChanged(bool state); void groupViewStateChanged(bool state);
@ -162,8 +172,11 @@ signals:
void fontChanged(QString state); void fontChanged(QString state);
void emojiFontChanged(QString state); void emojiFontChanged(QString state);
void presenceChanged(Presence state); void presenceChanged(Presence state);
void microphoneChanged(QString microphone);
void cameraChanged(QString camera);
void cameraResolutionChanged(QString resolution);
void cameraFrameRateChanged(QString frameRate);
void useStunServerChanged(bool state); void useStunServerChanged(bool state);
void defaultAudioSourceChanged(const QString &deviceName);
private: private:
// Default to system theme if QT_QPA_PLATFORMTHEME var is set. // Default to system theme if QT_QPA_PLATFORMTHEME var is set.
@ -191,8 +204,11 @@ private:
QString font_; QString font_;
QString emojiFont_; QString emojiFont_;
Presence presence_; Presence presence_;
QString microphone_;
QString camera_;
QString cameraResolution_;
QString cameraFrameRate_;
bool useStunServer_; bool useStunServer_;
QString defaultAudioSource_;
}; };
class HorizontalLine : public QFrame class HorizontalLine : public QFrame
@ -256,6 +272,10 @@ private:
QComboBox *fontSizeCombo_; QComboBox *fontSizeCombo_;
QComboBox *fontSelectionCombo_; QComboBox *fontSelectionCombo_;
QComboBox *emojiFontSelectionCombo_; QComboBox *emojiFontSelectionCombo_;
QComboBox *microphoneCombo_;
QComboBox *cameraCombo_;
QComboBox *cameraResolutionCombo_;
QComboBox *cameraFrameRateCombo_;
QSpinBox *timelineMaxWidthSpin_; QSpinBox *timelineMaxWidthSpin_;

File diff suppressed because it is too large Load diff

View file

@ -4,10 +4,13 @@
#include <vector> #include <vector>
#include <QObject> #include <QObject>
#include <QSharedPointer>
#include "mtx/events/voip.hpp" #include "mtx/events/voip.hpp"
typedef struct _GstElement GstElement; typedef struct _GstElement GstElement;
class QQuickItem;
class UserSettings;
namespace webrtc { namespace webrtc {
Q_NAMESPACE Q_NAMESPACE
@ -39,10 +42,13 @@ public:
return instance; return instance;
} }
bool init(std::string *errorMessage = nullptr); bool havePlugins(bool isVideo, std::string *errorMessage = nullptr);
webrtc::State state() const { return state_; } webrtc::State state() const { return state_; }
bool isVideo() const { return isVideo_; }
bool isOffering() const { return isOffering_; }
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
bool createOffer(); bool createOffer(bool isVideo);
bool acceptOffer(const std::string &sdp); bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp); bool acceptAnswer(const std::string &sdp);
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &); void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
@ -51,11 +57,18 @@ public:
bool toggleMicMute(); bool toggleMicMute();
void end(); void end();
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } void setSettings(QSharedPointer<UserSettings> settings) { settings_ = settings; }
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; } void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
std::vector<std::string> getAudioSourceNames(const std::string &defaultDevice); void refreshDevices();
void setAudioSource(int audioDeviceIndex) { audioSourceIndex_ = audioDeviceIndex; } std::vector<std::string> getDeviceNames(bool isVideo,
const std::string &defaultDevice) const;
std::vector<std::string> getResolutions(const std::string &cameraName) const;
std::vector<std::string> getFrameRates(const std::string &cameraName,
const std::string &resolution) const;
void setVideoItem(QQuickItem *item) { videoItem_ = item; }
QQuickItem *getVideoItem() const { return videoItem_; }
signals: signals:
void offerCreated(const std::string &sdp, void offerCreated(const std::string &sdp,
@ -72,17 +85,23 @@ private:
WebRTCSession(); WebRTCSession();
bool initialised_ = false; bool initialised_ = false;
bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false;
webrtc::State state_ = webrtc::State::DISCONNECTED; webrtc::State state_ = webrtc::State::DISCONNECTED;
bool isVideo_ = false;
bool isOffering_ = false;
bool isRemoteVideoRecvOnly_ = false;
QQuickItem *videoItem_ = nullptr;
GstElement *pipe_ = nullptr; GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr; GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0; unsigned int busWatchId_ = 0;
std::string stunServer_; QSharedPointer<UserSettings> settings_;
std::vector<std::string> turnServers_; std::vector<std::string> turnServers_;
int audioSourceIndex_ = -1;
bool startPipeline(int opusPayloadType); bool init(std::string *errorMessage = nullptr);
bool createPipeline(int opusPayloadType); bool startPipeline(int opusPayloadType, int vp8PayloadType);
void refreshDevices(); bool createPipeline(int opusPayloadType, int vp8PayloadType);
bool addVideoPipeline(int vp8PayloadType);
void startDeviceMonitor(); void startDeviceMonitor();
public: public:

View file

@ -19,23 +19,32 @@ AcceptCall::AcceptCall(const QString &caller,
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings, QSharedPointer<UserSettings> settings,
bool isVideo,
QWidget *parent) QWidget *parent)
: QWidget(parent) : QWidget(parent)
{ {
std::string errorMessage; std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) { WebRTCSession *session = &WebRTCSession::instance();
if (!session->havePlugins(false, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close(); emit close();
return; return;
} }
audioDevices_ = WebRTCSession::instance().getAudioSourceNames( if (isVideo && !session->havePlugins(true, &errorMessage)) {
settings->defaultAudioSource().toStdString()); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
if (audioDevices_.empty()) {
emit ChatPage::instance()->showNotification(
"Incoming call: No audio sources found.");
emit close(); emit close();
return; return;
} }
session->refreshDevices();
microphones_ = session->getDeviceNames(false, settings->microphone().toStdString());
if (microphones_.empty()) {
emit ChatPage::instance()->showNotification(
tr("Incoming call: No microphone found."));
emit close();
return;
}
if (isVideo)
cameras_ = session->getDeviceNames(true, settings->camera().toStdString());
setAutoFillBackground(true); setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
@ -77,9 +86,10 @@ AcceptCall::AcceptCall(const QString &caller,
const int iconSize = 22; const int iconSize = 22;
QLabel *callTypeIndicator = new QLabel(this); QLabel *callTypeIndicator = new QLabel(this);
callTypeIndicator->setPixmap( callTypeIndicator->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(iconSize * 2, iconSize * 2))); QIcon(isVideo ? ":/icons/icons/ui/video-call.png" : ":/icons/icons/ui/place-call.png")
.pixmap(QSize(iconSize * 2, iconSize * 2)));
QLabel *callTypeLabel = new QLabel("Voice Call", this); QLabel *callTypeLabel = new QLabel(isVideo ? tr("Video Call") : tr("Voice Call"), this);
labelFont.setPointSizeF(f.pointSizeF() * 1.1); labelFont.setPointSizeF(f.pointSizeF() * 1.1);
callTypeLabel->setFont(labelFont); callTypeLabel->setFont(labelFont);
callTypeLabel->setAlignment(Qt::AlignCenter); callTypeLabel->setAlignment(Qt::AlignCenter);
@ -88,7 +98,8 @@ AcceptCall::AcceptCall(const QString &caller,
buttonLayout->setSpacing(18); buttonLayout->setSpacing(18);
acceptBtn_ = new QPushButton(tr("Accept"), this); acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true); acceptBtn_->setDefault(true);
acceptBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); acceptBtn_->setIcon(
QIcon(isVideo ? ":/icons/icons/ui/video-call.png" : ":/icons/icons/ui/place-call.png"));
acceptBtn_->setIconSize(QSize(iconSize, iconSize)); acceptBtn_->setIconSize(QSize(iconSize, iconSize));
rejectBtn_ = new QPushButton(tr("Reject"), this); rejectBtn_ = new QPushButton(tr("Reject"), this);
@ -97,18 +108,17 @@ AcceptCall::AcceptCall(const QString &caller,
buttonLayout->addWidget(acceptBtn_); buttonLayout->addWidget(acceptBtn_);
buttonLayout->addWidget(rejectBtn_); buttonLayout->addWidget(rejectBtn_);
auto deviceLayout = new QHBoxLayout; microphoneCombo_ = new QComboBox(this);
auto audioLabel = new QLabel(this); for (const auto &m : microphones_)
audioLabel->setPixmap( microphoneCombo_->addItem(QIcon(":/icons/icons/ui/microphone-unmute.png"),
QIcon(":/icons/icons/ui/microphone-unmute.png").pixmap(QSize(iconSize, iconSize))); QString::fromStdString(m));
auto deviceList = new QComboBox(this); if (!cameras_.empty()) {
for (const auto &d : audioDevices_) cameraCombo_ = new QComboBox(this);
deviceList->addItem(QString::fromStdString(d)); for (const auto &c : cameras_)
cameraCombo_->addItem(QIcon(":/icons/icons/ui/video-call.png"),
deviceLayout->addStretch(); QString::fromStdString(c));
deviceLayout->addWidget(audioLabel); }
deviceLayout->addWidget(deviceList);
if (displayNameLabel) if (displayNameLabel)
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter); layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
@ -117,12 +127,17 @@ AcceptCall::AcceptCall(const QString &caller,
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter); layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter); layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout); layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout); layout->addWidget(microphoneCombo_);
if (cameraCombo_)
layout->addWidget(cameraCombo_);
connect(acceptBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { connect(acceptBtn_, &QPushButton::clicked, this, [this, settings, session]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); settings->setMicrophone(
settings->setDefaultAudioSource( QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
QString::fromStdString(audioDevices_[deviceList->currentIndex()])); if (cameraCombo_) {
settings->setCamera(
QString::fromStdString(cameras_[cameraCombo_->currentIndex()]));
}
emit accept(); emit accept();
emit close(); emit close();
}); });
@ -131,4 +146,5 @@ AcceptCall::AcceptCall(const QString &caller,
emit close(); emit close();
}); });
} }
} }

View file

@ -6,6 +6,7 @@
#include <QSharedPointer> #include <QSharedPointer>
#include <QWidget> #include <QWidget>
class QComboBox;
class QPushButton; class QPushButton;
class QString; class QString;
class UserSettings; class UserSettings;
@ -22,6 +23,7 @@ public:
const QString &roomName, const QString &roomName,
const QString &avatarUrl, const QString &avatarUrl,
QSharedPointer<UserSettings> settings, QSharedPointer<UserSettings> settings,
bool isVideo,
QWidget *parent = nullptr); QWidget *parent = nullptr);
signals: signals:
@ -29,8 +31,12 @@ signals:
void reject(); void reject();
private: private:
QPushButton *acceptBtn_; QPushButton *acceptBtn_ = nullptr;
QPushButton *rejectBtn_; QPushButton *rejectBtn_ = nullptr;
std::vector<std::string> audioDevices_; QComboBox *microphoneCombo_ = nullptr;
QComboBox *cameraCombo_ = nullptr;
std::vector<std::string> microphones_;
std::vector<std::string> cameras_;
}; };
} }

View file

@ -23,18 +23,20 @@ PlaceCall::PlaceCall(const QString &callee,
: QWidget(parent) : QWidget(parent)
{ {
std::string errorMessage; std::string errorMessage;
if (!WebRTCSession::instance().init(&errorMessage)) { WebRTCSession *session = &WebRTCSession::instance();
if (!session->havePlugins(false, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage)); emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close(); emit close();
return; return;
} }
audioDevices_ = WebRTCSession::instance().getAudioSourceNames( session->refreshDevices();
settings->defaultAudioSource().toStdString()); microphones_ = session->getDeviceNames(false, settings->microphone().toStdString());
if (audioDevices_.empty()) { if (microphones_.empty()) {
emit ChatPage::instance()->showNotification("No audio sources found."); emit ChatPage::instance()->showNotification(tr("No microphone found."));
emit close(); emit close();
return; return;
} }
cameras_ = session->getDeviceNames(true, settings->camera().toStdString());
setAutoFillBackground(true); setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint); setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
@ -56,48 +58,74 @@ PlaceCall::PlaceCall(const QString &callee,
avatar->setImage(avatarUrl); avatar->setImage(avatarUrl);
else else
avatar->setLetter(utils::firstChar(roomName)); avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 18;
voiceBtn_ = new QPushButton(tr("Voice"), this); voiceBtn_ = new QPushButton(tr("Voice"), this);
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png")); voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
voiceBtn_->setIconSize(QSize(iconSize, iconSize)); voiceBtn_->setIconSize(QSize(iconSize_, iconSize_));
voiceBtn_->setDefault(true); voiceBtn_->setDefault(true);
if (!cameras_.empty()) {
videoBtn_ = new QPushButton(tr("Video"), this);
videoBtn_->setIcon(QIcon(":/icons/icons/ui/video-call.png"));
videoBtn_->setIconSize(QSize(iconSize_, iconSize_));
}
cancelBtn_ = new QPushButton(tr("Cancel"), this); cancelBtn_ = new QPushButton(tr("Cancel"), this);
buttonLayout->addWidget(avatar); buttonLayout->addWidget(avatar);
buttonLayout->addStretch(); buttonLayout->addStretch();
buttonLayout->addWidget(voiceBtn_); buttonLayout->addWidget(voiceBtn_);
if (videoBtn_)
buttonLayout->addWidget(videoBtn_);
buttonLayout->addWidget(cancelBtn_); buttonLayout->addWidget(cancelBtn_);
QString name = displayName.isEmpty() ? callee : displayName; QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel("Place a call to " + name + "?", this); QLabel *label = new QLabel(tr("Place a call to ") + name + "?", this);
auto deviceLayout = new QHBoxLayout; microphoneCombo_ = new QComboBox(this);
auto audioLabel = new QLabel(this); for (const auto &m : microphones_)
audioLabel->setPixmap(QIcon(":/icons/icons/ui/microphone-unmute.png") microphoneCombo_->addItem(QIcon(":/icons/icons/ui/microphone-unmute.png"),
.pixmap(QSize(iconSize * 1.2, iconSize * 1.2))); QString::fromStdString(m));
auto deviceList = new QComboBox(this); if (videoBtn_) {
for (const auto &d : audioDevices_) cameraCombo_ = new QComboBox(this);
deviceList->addItem(QString::fromStdString(d)); for (const auto &c : cameras_)
cameraCombo_->addItem(QIcon(":/icons/icons/ui/video-call.png"),
deviceLayout->addStretch(); QString::fromStdString(c));
deviceLayout->addWidget(audioLabel); }
deviceLayout->addWidget(deviceList);
layout->addWidget(label); layout->addWidget(label);
layout->addLayout(buttonLayout); layout->addLayout(buttonLayout);
layout->addLayout(deviceLayout); layout->addStretch();
layout->addWidget(microphoneCombo_);
if (videoBtn_)
layout->addWidget(cameraCombo_);
connect(voiceBtn_, &QPushButton::clicked, this, [this, deviceList, settings]() { connect(voiceBtn_, &QPushButton::clicked, this, [this, settings, session]() {
WebRTCSession::instance().setAudioSource(deviceList->currentIndex()); settings->setMicrophone(
settings->setDefaultAudioSource( QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
QString::fromStdString(audioDevices_[deviceList->currentIndex()]));
emit voice(); emit voice();
emit close(); emit close();
}); });
if (videoBtn_)
connect(videoBtn_, &QPushButton::clicked, this, [this, settings, session]() {
std::string error;
if (!session->havePlugins(true, &error)) {
emit ChatPage::instance()->showNotification(
QString::fromStdString(error));
emit close();
return;
}
settings->setMicrophone(
QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
settings->setCamera(
QString::fromStdString(cameras_[cameraCombo_->currentIndex()]));
emit video();
emit close();
});
connect(cancelBtn_, &QPushButton::clicked, this, [this]() { connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
emit cancel(); emit cancel();
emit close(); emit close();
}); });
} }
} }

View file

@ -6,6 +6,7 @@
#include <QSharedPointer> #include <QSharedPointer>
#include <QWidget> #include <QWidget>
class QComboBox;
class QPushButton; class QPushButton;
class QString; class QString;
class UserSettings; class UserSettings;
@ -26,11 +27,18 @@ public:
signals: signals:
void voice(); void voice();
void video();
void cancel(); void cancel();
private: private:
QPushButton *voiceBtn_; const int iconSize_ = 18;
QPushButton *cancelBtn_; QPushButton *voiceBtn_ = nullptr;
std::vector<std::string> audioDevices_; QPushButton *videoBtn_ = nullptr;
QPushButton *cancelBtn_ = nullptr;
QComboBox *microphoneCombo_ = nullptr;
QComboBox *cameraCombo_ = nullptr;
std::vector<std::string> microphones_;
std::vector<std::string> cameras_;
}; };
} }

View file

@ -242,6 +242,17 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
&TimelineViewManager::callStateChanged); &TimelineViewManager::callStateChanged);
connect( connect(
callManager_, &CallManager::newCallParty, this, &TimelineViewManager::callPartyChanged); callManager_, &CallManager::newCallParty, this, &TimelineViewManager::callPartyChanged);
connect(callManager_,
&CallManager::newVideoCallState,
this,
&TimelineViewManager::videoCallChanged);
}
void
TimelineViewManager::setVideoCallItem()
{
WebRTCSession::instance().setVideoItem(
view->rootObject()->findChild<QQuickItem *>("videoCallItem"));
} }
void void

View file

@ -36,6 +36,7 @@ class TimelineViewManager : public QObject
Q_PROPERTY( Q_PROPERTY(
bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged) bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY callStateChanged) Q_PROPERTY(webrtc::State callState READ callState NOTIFY callStateChanged)
Q_PROPERTY(bool onVideoCall READ onVideoCall NOTIFY videoCallChanged)
Q_PROPERTY(QString callPartyName READ callPartyName NOTIFY callPartyChanged) Q_PROPERTY(QString callPartyName READ callPartyName NOTIFY callPartyChanged)
Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY callPartyChanged) Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY callPartyChanged)
Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged) Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged)
@ -55,6 +56,8 @@ public:
Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; } Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; }
bool isNarrowView() const { return isNarrowView_; } bool isNarrowView() const { return isNarrowView_; }
webrtc::State callState() const { return WebRTCSession::instance().state(); } webrtc::State callState() const { return WebRTCSession::instance().state(); }
bool onVideoCall() const { return WebRTCSession::instance().isVideo(); }
Q_INVOKABLE void setVideoCallItem();
QString callPartyName() const { return callManager_->callPartyName(); } QString callPartyName() const { return callManager_->callPartyName(); }
QString callPartyAvatarUrl() const { return callManager_->callPartyAvatarUrl(); } QString callPartyAvatarUrl() const { return callManager_->callPartyAvatarUrl(); }
bool isMicMuted() const { return WebRTCSession::instance().isMicMuted(); } bool isMicMuted() const { return WebRTCSession::instance().isMicMuted(); }
@ -89,6 +92,7 @@ signals:
void showRoomList(); void showRoomList();
void narrowViewChanged(); void narrowViewChanged();
void callStateChanged(webrtc::State); void callStateChanged(webrtc::State);
void videoCallChanged();
void callPartyChanged(); void callPartyChanged();
void micMuteChanged(); void micMuteChanged();