mirror of
https://github.com/Nheko-Reborn/nheko.git
synced 2024-11-22 11:00:48 +03:00
Port ActiveCallBar to Qml
This commit is contained in:
parent
9169a26e67
commit
da27670cbe
16 changed files with 212 additions and 277 deletions
|
@ -279,7 +279,6 @@ set(SRC_FILES
|
|||
src/ui/Theme.cpp
|
||||
src/ui/ThemeManager.cpp
|
||||
|
||||
src/ActiveCallBar.cpp
|
||||
src/AvatarProvider.cpp
|
||||
src/BlurhashProvider.cpp
|
||||
src/Cache.cpp
|
||||
|
@ -491,7 +490,6 @@ qt5_wrap_cpp(MOC_HEADERS
|
|||
|
||||
src/notifications/Manager.h
|
||||
|
||||
src/ActiveCallBar.h
|
||||
src/AvatarProvider.h
|
||||
src/BlurhashProvider.h
|
||||
src/Cache_p.h
|
||||
|
|
|
@ -2,7 +2,8 @@ import QtQuick 2.3
|
|||
import QtQuick.Controls 2.3
|
||||
|
||||
AbstractButton {
|
||||
property string image: undefined
|
||||
property string image
|
||||
property string src
|
||||
width: 16
|
||||
height: 16
|
||||
id: button
|
||||
|
@ -11,7 +12,7 @@ AbstractButton {
|
|||
id: buttonImg
|
||||
// Workaround, can't get icon.source working for now...
|
||||
anchors.fill: parent
|
||||
source: "image://colorimage/" + image + "?" + (button.hovered ? colors.highlight : colors.buttonText)
|
||||
source: src ? src : ("image://colorimage/" + image + "?" + (button.hovered ? colors.highlight : colors.buttonText))
|
||||
}
|
||||
|
||||
MouseArea
|
||||
|
|
|
@ -497,6 +497,146 @@ Page {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: activeCallBar
|
||||
visible: timelineManager.callState != WebRTCState.DISCONNECTED
|
||||
|
||||
Layout.fillWidth: true
|
||||
implicitHeight: topLayout.height + 16
|
||||
color: "#2ECC71"
|
||||
z: 3
|
||||
|
||||
GridLayout {
|
||||
anchors.left: parent.left
|
||||
anchors.right: parent.right
|
||||
anchors.margins: 8
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
|
||||
Avatar {
|
||||
Layout.column: 1
|
||||
Layout.row: 0
|
||||
Layout.rowSpan: 2
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
|
||||
width: avatarSize
|
||||
height: avatarSize
|
||||
|
||||
url: chat.model ? chat.model.roomAvatarUrl.replace("mxc://", "image://MxcImage/") : ""
|
||||
displayName: chat.model ? chat.model.roomName : qsTr("No room selected")
|
||||
}
|
||||
|
||||
Label {
|
||||
Layout.column: 2
|
||||
Layout.row: 0
|
||||
Layout.rowSpan: 2
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
|
||||
font.pointSize: fontMetrics.font.pointSize * 1.1
|
||||
text: chat.model ? " " + chat.model.roomName + " " : ""
|
||||
}
|
||||
|
||||
Image {
|
||||
Layout.column: 3
|
||||
Layout.row: 0
|
||||
Layout.rowSpan: 2
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
Layout.preferredWidth: 23
|
||||
Layout.preferredHeight: 23
|
||||
source: "qrc:/icons/icons/ui/place-call.png"
|
||||
}
|
||||
|
||||
Connections {
|
||||
target: timelineManager
|
||||
function onCallStateChanged(state) {
|
||||
switch (state) {
|
||||
case WebRTCState.INITIATING:
|
||||
callStateLabel.text = "Initiating call..."
|
||||
break;
|
||||
case WebRTCState.INITIATED:
|
||||
callStateLabel.text = "Call initiated..."
|
||||
break;
|
||||
case WebRTCState.OFFERSENT:
|
||||
callStateLabel.text = "Calling..."
|
||||
break;
|
||||
case WebRTCState.CONNECTING:
|
||||
callStateLabel.text = "Connecting..."
|
||||
break;
|
||||
case WebRTCState.CONNECTED:
|
||||
callStateLabel.text = "00:00"
|
||||
var d = new Date()
|
||||
callTimer.startTime = Math.floor(d.getTime() / 1000)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Label {
|
||||
id: callStateLabel
|
||||
Layout.column: 4
|
||||
Layout.row: 0
|
||||
Layout.rowSpan: 2
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
font.pointSize: fontMetrics.font.pointSize * 1.1
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: callTimer
|
||||
property int startTime
|
||||
interval: 1000
|
||||
running: timelineManager.callState == WebRTCState.CONNECTED
|
||||
repeat: true
|
||||
onTriggered: {
|
||||
var d = new Date()
|
||||
let seconds = Math.floor(d.getTime() / 1000 - startTime)
|
||||
let s = Math.floor(seconds % 60)
|
||||
let m = Math.floor(seconds / 60) % 60
|
||||
let h = Math.floor(seconds / 3600)
|
||||
callStateLabel.text = (h ? (pad(h) + ":") : "") + pad(m) + ":" + pad(s)
|
||||
}
|
||||
|
||||
function pad(n) {
|
||||
return (n < 10) ? ("0" + n) : n
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
Layout.column: 5
|
||||
Layout.fillWidth: true
|
||||
}
|
||||
|
||||
ImageButton {
|
||||
Layout.column: 6
|
||||
Layout.row: 0
|
||||
Layout.rowSpan: 2
|
||||
Layout.alignment: Qt.AlignVCenter
|
||||
|
||||
width: 22
|
||||
height: 22
|
||||
src: "qrc:/icons/icons/ui/microphone-mute.png"
|
||||
|
||||
hoverEnabled: true
|
||||
ToolTip.visible: hovered
|
||||
ToolTip.text: qsTr("Mute Mic")
|
||||
|
||||
onClicked: {
|
||||
if (timelineManager.toggleMuteAudioSource()) {
|
||||
src = "qrc:/icons/icons/ui/microphone-unmute.png"
|
||||
ToolTip.text = qsTr("Unmute Mic")
|
||||
}
|
||||
else {
|
||||
src = "qrc:/icons/icons/ui/microphone-mute.png"
|
||||
ToolTip.text = qsTr("Mute Mic")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
Layout.column: 7
|
||||
implicitWidth: 16
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,160 +0,0 @@
|
|||
#include <cstdio>
|
||||
|
||||
#include <QDateTime>
|
||||
#include <QHBoxLayout>
|
||||
#include <QIcon>
|
||||
#include <QLabel>
|
||||
#include <QString>
|
||||
#include <QTimer>
|
||||
|
||||
#include "ActiveCallBar.h"
|
||||
#include "ChatPage.h"
|
||||
#include "Utils.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "ui/Avatar.h"
|
||||
#include "ui/FlatButton.h"
|
||||
|
||||
ActiveCallBar::ActiveCallBar(QWidget *parent)
|
||||
: QWidget(parent)
|
||||
{
|
||||
setAutoFillBackground(true);
|
||||
auto p = palette();
|
||||
p.setColor(backgroundRole(), QColor(46, 204, 113));
|
||||
setPalette(p);
|
||||
|
||||
QFont f;
|
||||
f.setPointSizeF(f.pointSizeF());
|
||||
|
||||
const int fontHeight = QFontMetrics(f).height();
|
||||
const int widgetMargin = fontHeight / 3;
|
||||
const int contentHeight = fontHeight * 3;
|
||||
|
||||
setFixedHeight(contentHeight + widgetMargin);
|
||||
|
||||
layout_ = new QHBoxLayout(this);
|
||||
layout_->setSpacing(widgetMargin);
|
||||
layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
|
||||
|
||||
QFont labelFont;
|
||||
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
|
||||
labelFont.setWeight(QFont::Medium);
|
||||
|
||||
avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5);
|
||||
|
||||
callPartyLabel_ = new QLabel(this);
|
||||
callPartyLabel_->setFont(labelFont);
|
||||
|
||||
stateLabel_ = new QLabel(this);
|
||||
stateLabel_->setFont(labelFont);
|
||||
|
||||
durationLabel_ = new QLabel(this);
|
||||
durationLabel_->setFont(labelFont);
|
||||
durationLabel_->hide();
|
||||
|
||||
muteBtn_ = new FlatButton(this);
|
||||
setMuteIcon(false);
|
||||
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
|
||||
muteBtn_->setCornerRadius(buttonSize_ / 2);
|
||||
connect(muteBtn_, &FlatButton::clicked, this, [this]() {
|
||||
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
|
||||
setMuteIcon(muted_);
|
||||
});
|
||||
|
||||
layout_->addWidget(avatar_, 0, Qt::AlignLeft);
|
||||
layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
|
||||
layout_->addWidget(stateLabel_, 0, Qt::AlignLeft);
|
||||
layout_->addWidget(durationLabel_, 0, Qt::AlignLeft);
|
||||
layout_->addStretch();
|
||||
layout_->addWidget(muteBtn_, 0, Qt::AlignCenter);
|
||||
layout_->addSpacing(18);
|
||||
|
||||
timer_ = new QTimer(this);
|
||||
connect(timer_, &QTimer::timeout, this, [this]() {
|
||||
auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
|
||||
int s = seconds % 60;
|
||||
int m = (seconds / 60) % 60;
|
||||
int h = seconds / 3600;
|
||||
char buf[12];
|
||||
if (h)
|
||||
snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
|
||||
else
|
||||
snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
|
||||
durationLabel_->setText(buf);
|
||||
});
|
||||
|
||||
connect(
|
||||
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
|
||||
}
|
||||
|
||||
void
|
||||
ActiveCallBar::setMuteIcon(bool muted)
|
||||
{
|
||||
QIcon icon;
|
||||
if (muted) {
|
||||
muteBtn_->setToolTip("Unmute Mic");
|
||||
icon.addFile(":/icons/icons/ui/microphone-unmute.png");
|
||||
} else {
|
||||
muteBtn_->setToolTip("Mute Mic");
|
||||
icon.addFile(":/icons/icons/ui/microphone-mute.png");
|
||||
}
|
||||
muteBtn_->setIcon(icon);
|
||||
muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_));
|
||||
}
|
||||
|
||||
void
|
||||
ActiveCallBar::setCallParty(const QString &userid,
|
||||
const QString &displayName,
|
||||
const QString &roomName,
|
||||
const QString &avatarUrl)
|
||||
{
|
||||
callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " ");
|
||||
|
||||
if (!avatarUrl.isEmpty())
|
||||
avatar_->setImage(avatarUrl);
|
||||
else
|
||||
avatar_->setLetter(utils::firstChar(roomName));
|
||||
}
|
||||
|
||||
void
|
||||
ActiveCallBar::update(WebRTCSession::State state)
|
||||
{
|
||||
switch (state) {
|
||||
case WebRTCSession::State::INITIATING:
|
||||
show();
|
||||
stateLabel_->setText("Initiating call...");
|
||||
break;
|
||||
case WebRTCSession::State::INITIATED:
|
||||
show();
|
||||
stateLabel_->setText("Call initiated...");
|
||||
break;
|
||||
case WebRTCSession::State::OFFERSENT:
|
||||
show();
|
||||
stateLabel_->setText("Calling...");
|
||||
break;
|
||||
case WebRTCSession::State::CONNECTING:
|
||||
show();
|
||||
stateLabel_->setText("Connecting...");
|
||||
break;
|
||||
case WebRTCSession::State::CONNECTED:
|
||||
show();
|
||||
callStartTime_ = QDateTime::currentSecsSinceEpoch();
|
||||
timer_->start(1000);
|
||||
stateLabel_->setPixmap(
|
||||
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
|
||||
durationLabel_->setText("00:00");
|
||||
durationLabel_->show();
|
||||
break;
|
||||
case WebRTCSession::State::ICEFAILED:
|
||||
case WebRTCSession::State::DISCONNECTED:
|
||||
hide();
|
||||
timer_->stop();
|
||||
callPartyLabel_->setText(QString());
|
||||
stateLabel_->setText(QString());
|
||||
durationLabel_->setText(QString());
|
||||
durationLabel_->hide();
|
||||
setMuteIcon(false);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include <QWidget>
|
||||
|
||||
#include "WebRTCSession.h"
|
||||
|
||||
class QHBoxLayout;
|
||||
class QLabel;
|
||||
class QTimer;
|
||||
class Avatar;
|
||||
class FlatButton;
|
||||
|
||||
class ActiveCallBar : public QWidget
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
ActiveCallBar(QWidget *parent = nullptr);
|
||||
|
||||
public slots:
|
||||
void update(WebRTCSession::State);
|
||||
void setCallParty(const QString &userid,
|
||||
const QString &displayName,
|
||||
const QString &roomName,
|
||||
const QString &avatarUrl);
|
||||
|
||||
private:
|
||||
QHBoxLayout *layout_ = nullptr;
|
||||
Avatar *avatar_ = nullptr;
|
||||
QLabel *callPartyLabel_ = nullptr;
|
||||
QLabel *stateLabel_ = nullptr;
|
||||
QLabel *durationLabel_ = nullptr;
|
||||
FlatButton *muteBtn_ = nullptr;
|
||||
int buttonSize_ = 22;
|
||||
bool muted_ = false;
|
||||
qint64 callStartTime_ = 0;
|
||||
QTimer *timer_ = nullptr;
|
||||
|
||||
void setMuteIcon(bool muted);
|
||||
};
|
|
@ -52,7 +52,7 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
|
|||
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
|
||||
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
|
||||
QTimer::singleShot(timeoutms_, this, [this]() {
|
||||
if (session_.state() == WebRTCSession::State::OFFERSENT) {
|
||||
if (session_.state() == webrtc::State::OFFERSENT) {
|
||||
hangUp(CallHangUp::Reason::InviteTimeOut);
|
||||
emit ChatPage::instance()->showNotification(
|
||||
"The remote side failed to pick up.");
|
||||
|
@ -99,13 +99,13 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
|
|||
turnServerTimer_.setInterval(ttl * 1000 * 0.9);
|
||||
});
|
||||
|
||||
connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) {
|
||||
connect(&session_, &WebRTCSession::stateChanged, this, [this](webrtc::State state) {
|
||||
switch (state) {
|
||||
case WebRTCSession::State::DISCONNECTED:
|
||||
case webrtc::State::DISCONNECTED:
|
||||
playRingtone("qrc:/media/media/callend.ogg", false);
|
||||
clear();
|
||||
break;
|
||||
case WebRTCSession::State::ICEFAILED: {
|
||||
case webrtc::State::ICEFAILED: {
|
||||
QString error("Call connection failed.");
|
||||
if (turnURIs_.empty())
|
||||
error += " Your homeserver has no configured TURN server.";
|
||||
|
@ -152,13 +152,6 @@ CallManager::sendInvite(const QString &roomid)
|
|||
|
||||
generateCallID();
|
||||
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
|
||||
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
|
||||
const RoomMember &callee =
|
||||
members.front().user_id == utils::localUser() ? members.back() : members.front();
|
||||
emit newCallParty(callee.user_id,
|
||||
callee.display_name,
|
||||
QString::fromStdString(roomInfo.name),
|
||||
QString::fromStdString(roomInfo.avatar_url));
|
||||
playRingtone("qrc:/media/media/ringback.ogg", true);
|
||||
if (!session_.createOffer()) {
|
||||
emit ChatPage::instance()->showNotification("Problem setting up call.");
|
||||
|
@ -195,7 +188,7 @@ CallManager::hangUp(CallHangUp::Reason reason)
|
|||
bool
|
||||
CallManager::onActiveCall()
|
||||
{
|
||||
return session_.state() != WebRTCSession::State::DISCONNECTED;
|
||||
return session_.state() != webrtc::State::DISCONNECTED;
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -259,11 +252,6 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
|
|||
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
|
||||
const RoomMember &caller =
|
||||
members.front().user_id == utils::localUser() ? members.back() : members.front();
|
||||
emit newCallParty(caller.user_id,
|
||||
caller.display_name,
|
||||
QString::fromStdString(roomInfo.name),
|
||||
QString::fromStdString(roomInfo.avatar_url));
|
||||
|
||||
auto dialog = new dialogs::AcceptCall(caller.user_id,
|
||||
caller.display_name,
|
||||
QString::fromStdString(roomInfo.name),
|
||||
|
|
|
@ -41,10 +41,6 @@ signals:
|
|||
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
|
||||
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
|
||||
void turnServerRetrieved(const mtx::responses::TurnServer &);
|
||||
void newCallParty(const QString &userid,
|
||||
const QString &displayName,
|
||||
const QString &roomName,
|
||||
const QString &avatarUrl);
|
||||
|
||||
private slots:
|
||||
void retrieveTurnServer();
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
#include <QShortcut>
|
||||
#include <QtConcurrent>
|
||||
|
||||
#include "ActiveCallBar.h"
|
||||
#include "AvatarProvider.h"
|
||||
#include "Cache.h"
|
||||
#include "Cache_p.h"
|
||||
|
@ -40,7 +39,6 @@
|
|||
#include "UserInfoWidget.h"
|
||||
#include "UserSettingsPage.h"
|
||||
#include "Utils.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "ui/OverlayModal.h"
|
||||
#include "ui/Theme.h"
|
||||
|
||||
|
@ -129,12 +127,6 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
|
|||
|
||||
contentLayout_->addWidget(view_manager_->getWidget());
|
||||
|
||||
activeCallBar_ = new ActiveCallBar(this);
|
||||
contentLayout_->addWidget(activeCallBar_);
|
||||
activeCallBar_->hide();
|
||||
connect(
|
||||
&callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
|
||||
|
||||
// Splitter
|
||||
splitter->addWidget(sideBar_);
|
||||
splitter->addWidget(content_);
|
||||
|
|
|
@ -41,7 +41,6 @@
|
|||
#include "notifications/Manager.h"
|
||||
#include "popups/UserMentions.h"
|
||||
|
||||
class ActiveCallBar;
|
||||
class OverlayModal;
|
||||
class QuickSwitcher;
|
||||
class RoomList;
|
||||
|
@ -235,7 +234,6 @@ private:
|
|||
SideBarActions *sidebarActions_;
|
||||
|
||||
TextInputWidget *text_input_;
|
||||
ActiveCallBar *activeCallBar_;
|
||||
|
||||
QTimer connectivityTimer_;
|
||||
std::atomic_bool isConnected_;
|
||||
|
|
|
@ -288,7 +288,7 @@ MainWindow::showChatPage()
|
|||
void
|
||||
MainWindow::closeEvent(QCloseEvent *event)
|
||||
{
|
||||
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
|
||||
if (WebRTCSession::instance().state() != webrtc::State::DISCONNECTED) {
|
||||
if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") !=
|
||||
QMessageBox::Yes) {
|
||||
event->ignore();
|
||||
|
@ -440,7 +440,7 @@ MainWindow::openLogoutDialog()
|
|||
{
|
||||
auto dialog = new dialogs::Logout(this);
|
||||
connect(dialog, &dialogs::Logout::loggingOut, this, [this]() {
|
||||
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) {
|
||||
if (WebRTCSession::instance().state() != webrtc::State::DISCONNECTED) {
|
||||
if (QMessageBox::question(
|
||||
this, "nheko", "A call is in progress. Log out?") !=
|
||||
QMessageBox::Yes) {
|
||||
|
|
|
@ -560,7 +560,7 @@ TextInputWidget::TextInputWidget(QWidget *parent)
|
|||
|
||||
#ifdef GSTREAMER_AVAILABLE
|
||||
callBtn_ = new FlatButton(this);
|
||||
changeCallButtonState(WebRTCSession::State::DISCONNECTED);
|
||||
changeCallButtonState(webrtc::State::DISCONNECTED);
|
||||
connect(&WebRTCSession::instance(),
|
||||
&WebRTCSession::stateChanged,
|
||||
this,
|
||||
|
@ -776,11 +776,11 @@ TextInputWidget::paintEvent(QPaintEvent *)
|
|||
}
|
||||
|
||||
void
|
||||
TextInputWidget::changeCallButtonState(WebRTCSession::State state)
|
||||
TextInputWidget::changeCallButtonState(webrtc::State state)
|
||||
{
|
||||
QIcon icon;
|
||||
if (state == WebRTCSession::State::ICEFAILED ||
|
||||
state == WebRTCSession::State::DISCONNECTED) {
|
||||
if (state == webrtc::State::ICEFAILED ||
|
||||
state == webrtc::State::DISCONNECTED) {
|
||||
callBtn_->setToolTip(tr("Place a call"));
|
||||
icon.addFile(":/icons/icons/ui/place-call.png");
|
||||
} else {
|
||||
|
|
|
@ -164,7 +164,7 @@ public slots:
|
|||
void openFileSelection();
|
||||
void hideUploadSpinner();
|
||||
void focusLineEdit() { input_->setFocus(); }
|
||||
void changeCallButtonState(WebRTCSession::State);
|
||||
void changeCallButtonState(webrtc::State);
|
||||
|
||||
private slots:
|
||||
void addSelectedEmoji(const QString &emoji);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include <cctype>
|
||||
#include <QQmlEngine>
|
||||
|
||||
#include "Logging.h"
|
||||
#include "WebRTCSession.h"
|
||||
|
@ -14,12 +15,22 @@ extern "C"
|
|||
}
|
||||
#endif
|
||||
|
||||
Q_DECLARE_METATYPE(WebRTCSession::State)
|
||||
Q_DECLARE_METATYPE(webrtc::State)
|
||||
|
||||
using webrtc::State;
|
||||
|
||||
WebRTCSession::WebRTCSession()
|
||||
: QObject()
|
||||
{
|
||||
qRegisterMetaType<WebRTCSession::State>();
|
||||
qRegisterMetaType<webrtc::State>();
|
||||
qmlRegisterUncreatableMetaObject(
|
||||
webrtc::staticMetaObject,
|
||||
"im.nheko",
|
||||
1,
|
||||
0,
|
||||
"WebRTCState",
|
||||
"Can't instantiate enum");
|
||||
|
||||
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
|
||||
init();
|
||||
}
|
||||
|
@ -247,11 +258,11 @@ iceGatheringStateChanged(GstElement *webrtc,
|
|||
if (isoffering_) {
|
||||
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
|
||||
emit WebRTCSession::instance().stateChanged(
|
||||
WebRTCSession::State::OFFERSENT);
|
||||
State::OFFERSENT);
|
||||
} else {
|
||||
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
|
||||
emit WebRTCSession::instance().stateChanged(
|
||||
WebRTCSession::State::ANSWERSENT);
|
||||
State::ANSWERSENT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -264,10 +275,10 @@ onICEGatheringCompletion(gpointer timerid)
|
|||
*(guint *)(timerid) = 0;
|
||||
if (isoffering_) {
|
||||
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
|
||||
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
|
||||
emit WebRTCSession::instance().stateChanged(State::OFFERSENT);
|
||||
} else {
|
||||
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
|
||||
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
|
||||
emit WebRTCSession::instance().stateChanged(State::ANSWERSENT);
|
||||
}
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -285,7 +296,7 @@ addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
|
|||
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
|
||||
return;
|
||||
#else
|
||||
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
|
||||
if (WebRTCSession::instance().state() >= State::OFFERSENT) {
|
||||
emit WebRTCSession::instance().newICECandidate(
|
||||
{"audio", (uint16_t)mlineIndex, candidate});
|
||||
return;
|
||||
|
@ -314,11 +325,11 @@ iceConnectionStateChanged(GstElement *webrtc,
|
|||
switch (newState) {
|
||||
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
|
||||
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
|
||||
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
|
||||
emit WebRTCSession::instance().stateChanged(State::CONNECTING);
|
||||
break;
|
||||
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
|
||||
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
|
||||
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
|
||||
emit WebRTCSession::instance().stateChanged(State::ICEFAILED);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
@ -356,7 +367,7 @@ linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe
|
|||
nhlog::ui()->error("WebRTC: unable to link new pad");
|
||||
else {
|
||||
emit WebRTCSession::instance().stateChanged(
|
||||
WebRTCSession::State::CONNECTED);
|
||||
State::CONNECTED);
|
||||
}
|
||||
gst_object_unref(queuepad);
|
||||
}
|
||||
|
@ -633,21 +644,17 @@ WebRTCSession::createPipeline(int opusPayloadType)
|
|||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
|
||||
WebRTCSession::toggleMuteAudioSource()
|
||||
{
|
||||
if (state_ < State::INITIATED)
|
||||
return false;
|
||||
|
||||
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
|
||||
if (!srclevel)
|
||||
return false;
|
||||
|
||||
gboolean muted;
|
||||
g_object_get(srclevel, "mute", &muted, nullptr);
|
||||
g_object_set(srclevel, "mute", !muted, nullptr);
|
||||
gst_object_unref(srclevel);
|
||||
isMuted = !muted;
|
||||
return true;
|
||||
return !muted;
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -778,7 +785,7 @@ WebRTCSession::createPipeline(int)
|
|||
}
|
||||
|
||||
bool
|
||||
WebRTCSession::toggleMuteAudioSrc(bool &)
|
||||
WebRTCSession::toggleMuteAudioSource()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -9,13 +9,11 @@
|
|||
|
||||
typedef struct _GstElement GstElement;
|
||||
|
||||
class WebRTCSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
namespace webrtc {
|
||||
Q_NAMESPACE
|
||||
|
||||
public:
|
||||
enum class State
|
||||
{
|
||||
enum class State
|
||||
{
|
||||
DISCONNECTED,
|
||||
ICEFAILED,
|
||||
INITIATING,
|
||||
|
@ -24,8 +22,17 @@ public:
|
|||
ANSWERSENT,
|
||||
CONNECTING,
|
||||
CONNECTED
|
||||
};
|
||||
|
||||
};
|
||||
Q_ENUM_NS(State)
|
||||
|
||||
}
|
||||
|
||||
class WebRTCSession : public QObject
|
||||
{
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
static WebRTCSession &instance()
|
||||
{
|
||||
static WebRTCSession instance;
|
||||
|
@ -33,14 +40,14 @@ public:
|
|||
}
|
||||
|
||||
bool init(std::string *errorMessage = nullptr);
|
||||
State state() const { return state_; }
|
||||
webrtc::State state() const { return state_; }
|
||||
|
||||
bool createOffer();
|
||||
bool acceptOffer(const std::string &sdp);
|
||||
bool acceptAnswer(const std::string &sdp);
|
||||
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
|
||||
|
||||
bool toggleMuteAudioSrc(bool &isMuted);
|
||||
bool toggleMuteAudioSource();
|
||||
void end();
|
||||
|
||||
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
|
||||
|
@ -55,16 +62,16 @@ signals:
|
|||
void answerCreated(const std::string &sdp,
|
||||
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
|
||||
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
|
||||
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt
|
||||
void stateChanged(webrtc::State);
|
||||
|
||||
private slots:
|
||||
void setState(State state) { state_ = state; }
|
||||
void setState(webrtc::State state) { state_ = state; }
|
||||
|
||||
private:
|
||||
WebRTCSession();
|
||||
|
||||
bool initialised_ = false;
|
||||
State state_ = State::DISCONNECTED;
|
||||
webrtc::State state_ = webrtc::State::DISCONNECTED;
|
||||
GstElement *pipe_ = nullptr;
|
||||
GstElement *webrtc_ = nullptr;
|
||||
unsigned int busWatchId_ = 0;
|
||||
|
|
|
@ -141,6 +141,8 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
|
|||
isInitialSync_ = true;
|
||||
emit initialSyncChanged(true);
|
||||
});
|
||||
connect(
|
||||
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &TimelineViewManager::callStateChanged);
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include "Logging.h"
|
||||
#include "TimelineModel.h"
|
||||
#include "Utils.h"
|
||||
#include "WebRTCSession.h"
|
||||
#include "emoji/EmojiModel.h"
|
||||
#include "emoji/Provider.h"
|
||||
|
||||
|
@ -33,6 +34,8 @@ class TimelineViewManager : public QObject
|
|||
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
|
||||
Q_PROPERTY(
|
||||
bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged)
|
||||
Q_PROPERTY(
|
||||
webrtc::State callState READ callState NOTIFY callStateChanged)
|
||||
|
||||
public:
|
||||
TimelineViewManager(QSharedPointer<UserSettings> userSettings,
|
||||
|
@ -48,6 +51,8 @@ public:
|
|||
Q_INVOKABLE TimelineModel *activeTimeline() const { return timeline_; }
|
||||
Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; }
|
||||
bool isNarrowView() const { return isNarrowView_; }
|
||||
webrtc::State callState() const { return WebRTCSession::instance().state(); }
|
||||
Q_INVOKABLE bool toggleMuteAudioSource() { return WebRTCSession::instance().toggleMuteAudioSource(); }
|
||||
Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const;
|
||||
Q_INVOKABLE QColor userColor(QString id, QColor background);
|
||||
Q_INVOKABLE QString escapeEmoji(QString str) const;
|
||||
|
@ -72,6 +77,7 @@ signals:
|
|||
void inviteUsers(QStringList users);
|
||||
void showRoomList();
|
||||
void narrowViewChanged();
|
||||
void callStateChanged(webrtc::State);
|
||||
|
||||
public slots:
|
||||
void updateReadReceipts(const QString &room_id, const std::vector<QString> &event_ids);
|
||||
|
|
Loading…
Reference in a new issue