Merge master and fix conflicts

This commit is contained in:
Joseph Donofry 2021-01-11 17:51:39 -05:00
commit 3ff8b3ad8c
No known key found for this signature in database
GPG key ID: E8A1D78EF044B0CB
44 changed files with 1038 additions and 684 deletions

View file

@ -1,12 +1,12 @@
tap "nlohmann/json"
brew "python3"
brew "pkg-config"
brew "clang-format"
brew "cmake"
brew "ninja"
brew "openssl"
brew "qt5"
brew "python3"
brew "nlohmann_json"
brew "gstreamer"
brew "gst-plugins-base"

View file

@ -52,7 +52,8 @@ build-macos:
tags: [macos]
before_script:
- brew update
- brew bundle --file=./.ci/macos/Brewfile
- brew reinstall --force python3
- brew bundle --file=./.ci/macos/Brewfile --force --cleanup
- pip3 install dmgbuild
script:
- export PATH=/usr/local/opt/qt/bin/:${PATH}
@ -95,8 +96,8 @@ build-flatpak-amd64:
- export VERSION=$(git describe)
- mkdir -p build-flatpak
- cd build-flatpak
- flatpak-builder --user --disable-rofiles-fuse --ccache --repo=repo --default-branch=${CI_COMMIT_REF_NAME} --subject="Build of Nheko ${VERSION} `date`" app ../io.github.NhekoReborn.Nheko.json
- flatpak build-bundle repo nheko-amd64.flatpak io.github.NhekoReborn.Nheko ${CI_COMMIT_REF_NAME}
- flatpak-builder --user --disable-rofiles-fuse --ccache --repo=repo --default-branch=${CI_COMMIT_REF_NAME//\//_} --subject="Build of Nheko ${VERSION} `date`" app ../io.github.NhekoReborn.Nheko.json
- flatpak build-bundle repo nheko-amd64.flatpak io.github.NhekoReborn.Nheko ${CI_COMMIT_REF_NAME//\//_}
after_script:
- bash ./.ci/upload-nightly-gitlab.sh build-flatpak/nheko-amd64.flatpak
cache:
@ -122,8 +123,8 @@ build-flatpak-arm64:
- export VERSION=$(git describe)
- mkdir -p build-flatpak
- cd build-flatpak
- flatpak-builder --user --disable-rofiles-fuse --ccache --repo=repo --default-branch=${CI_COMMIT_REF_NAME} --subject="Build of Nheko ${VERSION} `date` for arm64" app ../io.github.NhekoReborn.Nheko.json
- flatpak build-bundle repo nheko-arm64.flatpak io.github.NhekoReborn.Nheko ${CI_COMMIT_REF_NAME}
- flatpak-builder --user --disable-rofiles-fuse --ccache --repo=repo --default-branch=${CI_COMMIT_REF_NAME//\//_} --subject="Build of Nheko ${VERSION} `date` for arm64" app ../io.github.NhekoReborn.Nheko.json
- flatpak build-bundle repo nheko-arm64.flatpak io.github.NhekoReborn.Nheko ${CI_COMMIT_REF_NAME//\//_}
after_script:
- bash ./.ci/upload-nightly-gitlab.sh build-flatpak/nheko-arm64.flatpak
cache:

View file

@ -245,7 +245,6 @@ configure_file(cmake/nheko.h config/nheko.h)
#
set(SRC_FILES
# Dialogs
src/dialogs/AcceptCall.cpp
src/dialogs/CreateRoom.cpp
src/dialogs/FallbackAuth.cpp
src/dialogs/ImageOverlay.cpp
@ -254,7 +253,6 @@ set(SRC_FILES
src/dialogs/LeaveRoom.cpp
src/dialogs/Logout.cpp
src/dialogs/MemberList.cpp
src/dialogs/PlaceCall.cpp
src/dialogs/PreviewUploadOverlay.cpp
src/dialogs/ReCaptcha.cpp
src/dialogs/ReadReceipts.cpp
@ -356,7 +354,7 @@ if(USE_BUNDLED_MTXCLIENT)
FetchContent_Declare(
MatrixClient
GIT_REPOSITORY https://github.com/Nheko-Reborn/mtxclient.git
GIT_TAG ce8bc9c3dd6bba432e716f55136133111b0186e7
GIT_TAG cad81d1677a4845366b93112f8f2e267ee8c9ae0
)
set(BUILD_LIB_EXAMPLES OFF CACHE INTERNAL "")
set(BUILD_LIB_TESTS OFF CACHE INTERNAL "")
@ -445,7 +443,12 @@ else()
endif()
include(FindPkgConfig)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.14 gstreamer-webrtc-1.0>=1.14)
pkg_check_modules(GSTREAMER IMPORTED_TARGET gstreamer-sdp-1.0>=1.16 gstreamer-webrtc-1.0>=1.16)
if (TARGET PkgConfig::GSTREAMER)
add_feature_info(voip ON "GStreamer found. Call support is enabled automatically.")
else()
add_feature_info(voip OFF "GStreamer could not be found on your system. As a consequence call support has been disabled. If you don't want that, make sure gstreamer-sdp-1.0>=1.16 gstreamer-webrtc-1.0>=1.16 can be found via pkgconfig.")
endif()
# single instance functionality
set(QAPPLICATION_CLASS QApplication CACHE STRING "Inheritance class for SingleApplication")
@ -455,7 +458,6 @@ feature_summary(WHAT ALL INCLUDE_QUIET_PACKAGES FATAL_ON_MISSING_REQUIRED_PACKAG
qt5_wrap_cpp(MOC_HEADERS
# Dialogs
src/dialogs/AcceptCall.h
src/dialogs/CreateRoom.h
src/dialogs/FallbackAuth.h
src/dialogs/ImageOverlay.h
@ -464,7 +466,6 @@ qt5_wrap_cpp(MOC_HEADERS
src/dialogs/LeaveRoom.h
src/dialogs/Logout.h
src/dialogs/MemberList.h
src/dialogs/PlaceCall.h
src/dialogs/PreviewUploadOverlay.h
src/dialogs/RawMessage.h
src/dialogs/ReCaptcha.h

View file

@ -28,7 +28,7 @@ Most of the features you would expect from a chat application are missing right
but we are getting close to a more feature complete client.
Specifically there is support for:
- E2E encryption.
- VoIP calls (voice & video)
- VoIP calls (voice & video).
- User registration.
- Creating, joining & leaving rooms.
- Sending & receiving invites.
@ -210,6 +210,14 @@ sudo apt install cmake gcc make automake liblmdb-dev \
qt5keychain-dev
```
##### Fedora
```bash
sudo dnf install qt5-qtbase-devel qt5-linguist qt5-qtsvg-devel qt5-qtmultimedia-devel \
qt5-qtquickcontrols2-devel qtkeychain-qt5-devel spdlog-devel openssl-devel \
libolm-devel cmark-devel lmdb-devel lmdbxx-devel tweeny-devel
```
##### Guix
```bash

View file

@ -97,7 +97,8 @@
{
"config-opts": [
"-DCMAKE_BUILD_TYPE=Release",
"-DBUILD_TEST_APPLICATION=OFF"
"-DBUILD_TEST_APPLICATION=OFF",
"-DQTKEYCHAIN_STATIC=ON"
],
"buildsystem": "cmake-ninja",
"name": "QtKeychain",
@ -124,16 +125,6 @@
}
]
},
{
"name": "sodium",
"sources": [
{
"sha256": "6f504490b342a4f8a4c4a02fc9b866cbef8622d5df4e5452b46be121e46636c1",
"type": "archive",
"url": "https://github.com/jedisct1/libsodium/releases/download/1.0.18-RELEASE/libsodium-1.0.18.tar.gz"
}
]
},
{
"build-commands": [
"./bootstrap.sh --with-libraries=thread,system,iostreams --prefix=/app",
@ -161,7 +152,7 @@
"name": "mtxclient",
"sources": [
{
"commit": "ce8bc9c3dd6bba432e716f55136133111b0186e7",
"commit": "cad81d1677a4845366b93112f8f2e267ee8c9ae0",
"type": "git",
"url": "https://github.com/Nheko-Reborn/mtxclient.git"
}

View file

@ -8,3 +8,4 @@ Type=Application
Categories=Network;InstantMessaging;Qt;
StartupWMClass=nheko
Terminal=false
MimeType=x-scheme-handler/matrix;

View file

@ -1,3 +1,4 @@
import "./voip"
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
@ -10,6 +11,14 @@ Rectangle {
Layout.preferredHeight: textInput.height
Layout.minimumHeight: 40
Component {
id: placeCallDialog
PlaceCall {
}
}
RowLayout {
id: inputBar
@ -17,18 +26,31 @@ Rectangle {
spacing: 16
ImageButton {
visible: TimelineManager.callsSupported
visible: CallManager.callsSupported
opacity: CallManager.haveCallInvite ? 0.3 : 1
Layout.alignment: Qt.AlignBottom
hoverEnabled: true
width: 22
height: 22
image: TimelineManager.isOnCall ? ":/icons/icons/ui/end-call.png" : ":/icons/icons/ui/place-call.png"
image: CallManager.isOnCall ? ":/icons/icons/ui/end-call.png" : ":/icons/icons/ui/place-call.png"
ToolTip.visible: hovered
ToolTip.text: TimelineManager.isOnCall ? qsTr("Hang up") : qsTr("Place a call")
ToolTip.text: CallManager.isOnCall ? qsTr("Hang up") : qsTr("Place a call")
Layout.topMargin: 8
Layout.bottomMargin: 8
Layout.leftMargin: 16
onClicked: TimelineManager.timeline.input.callButton()
onClicked: {
if (TimelineManager.timeline) {
if (CallManager.haveCallInvite) {
return ;
} else if (CallManager.isOnCall) {
CallManager.hangUp();
} else {
CallManager.refreshDevices();
var dialog = placeCallDialog.createObject(timelineRoot);
dialog.open();
}
}
}
}
ImageButton {
@ -39,7 +61,7 @@ Rectangle {
image: ":/icons/icons/ui/paper-clip-outline.png"
Layout.topMargin: 8
Layout.bottomMargin: 8
Layout.leftMargin: TimelineManager.callsSupported ? 0 : 16
Layout.leftMargin: CallManager.callsSupported ? 0 : 16
onClicked: TimelineManager.timeline.input.openFileSelection()
ToolTip.visible: hovered
ToolTip.text: qsTr("Send a file")

View file

@ -140,6 +140,15 @@ ListView {
}
Label {
color: colors.buttonText
text: TimelineManager.userStatus(modelData.userId)
textFormat: Text.PlainText
elide: Text.ElideRight
width: chat.delegateMaxWidth - parent.spacing * 2 - userName.implicitWidth - avatarSize
font.italic: true
}
}
}

View file

@ -1,6 +1,7 @@
import "./delegates"
import "./device-verification"
import "./emoji"
import "./voip"
import QtGraphicalEffects 1.0
import QtQuick 2.9
import QtQuick.Controls 2.3
@ -210,7 +211,7 @@ Page {
}
Loader {
source: TimelineManager.onVideoCall ? "VideoCall.qml" : ""
source: CallManager.isOnCall && CallManager.isVideo ? "voip/VideoCall.qml" : ""
onLoaded: TimelineManager.setVideoCallItem()
}
@ -223,6 +224,13 @@ Page {
}
CallInviteBar {
id: callInviteBar
Layout.fillWidth: true
z: 3
}
ActiveCallBar {
Layout.fillWidth: true
z: 3

View file

@ -116,10 +116,10 @@ Item {
]
Connections {
function onPressed(mouse) {
// Button
// Default to center
function onPressed(mouse) {
// MouseArea
if (mouse) {
ripple.centerX = mouse.x;

View file

@ -1,19 +1,18 @@
import "../"
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
import im.nheko 1.0
Rectangle {
id: activeCallBar
visible: TimelineManager.callState != WebRTCState.DISCONNECTED
color: "#2ECC71"
visible: CallManager.isOnCall
color: callInviteBar.color
implicitHeight: visible ? rowLayout.height + 8 : 0
MouseArea {
anchors.fill: parent
onClicked: {
if (TimelineManager.onVideoCall)
if (CallManager.isVideo)
stackLayout.currentIndex = stackLayout.currentIndex ? 0 : 1;
}
@ -30,63 +29,66 @@ Rectangle {
Avatar {
width: avatarSize
height: avatarSize
url: TimelineManager.callPartyAvatarUrl.replace("mxc://", "image://MxcImage/")
displayName: TimelineManager.callPartyName
url: CallManager.callPartyAvatarUrl.replace("mxc://", "image://MxcImage/")
displayName: CallManager.callParty
}
Label {
Layout.leftMargin: 8
font.pointSize: fontMetrics.font.pointSize * 1.1
text: " " + TimelineManager.callPartyName + " "
text: CallManager.callParty
color: "#000000"
}
Image {
Layout.leftMargin: 4
Layout.preferredWidth: 24
Layout.preferredHeight: 24
source: TimelineManager.onVideoCall ? "qrc:/icons/icons/ui/video-call.png" : "qrc:/icons/icons/ui/place-call.png"
source: CallManager.isVideo ? "qrc:/icons/icons/ui/video-call.png" : "qrc:/icons/icons/ui/place-call.png"
}
Label {
id: callStateLabel
font.pointSize: fontMetrics.font.pointSize * 1.1
color: "#000000"
}
Item {
state: TimelineManager.callState
states: [
State {
name: "OFFERSENT"
when: state == WebRTCState.OFFERSENT
when: CallManager.callState == WebRTCState.OFFERSENT
PropertyChanges {
target: callStateLabel
text: "Calling..."
text: qsTr("Calling...")
}
},
State {
name: "CONNECTING"
when: state == WebRTCState.CONNECTING
when: CallManager.callState == WebRTCState.CONNECTING
PropertyChanges {
target: callStateLabel
text: "Connecting..."
text: qsTr("Connecting...")
}
},
State {
name: "ANSWERSENT"
when: state == WebRTCState.ANSWERSENT
when: CallManager.callState == WebRTCState.ANSWERSENT
PropertyChanges {
target: callStateLabel
text: "Connecting..."
text: qsTr("Connecting...")
}
},
State {
name: "CONNECTED"
when: state == WebRTCState.CONNECTED
when: CallManager.callState == WebRTCState.CONNECTED
PropertyChanges {
target: callStateLabel
@ -100,13 +102,13 @@ Rectangle {
PropertyChanges {
target: stackLayout
currentIndex: TimelineManager.onVideoCall ? 1 : 0
currentIndex: CallManager.isVideo ? 1 : 0
}
},
State {
name: "DISCONNECTED"
when: state == WebRTCState.DISCONNECTED
when: CallManager.callState == WebRTCState.DISCONNECTED
PropertyChanges {
target: callStateLabel
@ -132,7 +134,7 @@ Rectangle {
}
interval: 1000
running: TimelineManager.callState == WebRTCState.CONNECTED
running: CallManager.callState == WebRTCState.CONNECTED
repeat: true
onTriggered: {
var d = new Date();
@ -149,34 +151,28 @@ Rectangle {
}
ImageButton {
visible: TimelineManager.onVideoCall
visible: CallManager.haveLocalVideo
width: 24
height: 24
buttonTextColor: "#000000"
image: ":/icons/icons/ui/toggle-camera-view.png"
hoverEnabled: true
ToolTip.visible: hovered
ToolTip.text: "Toggle camera view"
onClicked: TimelineManager.toggleCameraView()
}
Item {
implicitWidth: 8
ToolTip.text: qsTr("Toggle camera view")
onClicked: CallManager.toggleCameraView()
}
ImageButton {
Layout.leftMargin: 8
Layout.rightMargin: 16
width: 24
height: 24
buttonTextColor: "#000000"
image: TimelineManager.isMicMuted ? ":/icons/icons/ui/microphone-unmute.png" : ":/icons/icons/ui/microphone-mute.png"
image: CallManager.isMicMuted ? ":/icons/icons/ui/microphone-unmute.png" : ":/icons/icons/ui/microphone-mute.png"
hoverEnabled: true
ToolTip.visible: hovered
ToolTip.text: TimelineManager.isMicMuted ? qsTr("Unmute Mic") : qsTr("Mute Mic")
onClicked: TimelineManager.toggleMicMute()
}
Item {
implicitWidth: 16
ToolTip.text: CallManager.isMicMuted ? qsTr("Unmute Mic") : qsTr("Mute Mic")
onClicked: CallManager.toggleMicMute()
}
}

View file

@ -0,0 +1,78 @@
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
import im.nheko 1.0
Popup {
modal: true
anchors.centerIn: parent
palette: colors
ColumnLayout {
spacing: 16
ColumnLayout {
spacing: 8
Layout.topMargin: 8
Layout.leftMargin: 8
Layout.rightMargin: 8
RowLayout {
Image {
Layout.preferredWidth: 22
Layout.preferredHeight: 22
source: "image://colorimage/:/icons/icons/ui/microphone-unmute.png?" + colors.windowText
}
ComboBox {
id: micCombo
Layout.fillWidth: true
model: CallManager.mics
}
}
RowLayout {
visible: CallManager.isVideo && CallManager.cameras.length > 0
Image {
Layout.preferredWidth: 22
Layout.preferredHeight: 22
source: "image://colorimage/:/icons/icons/ui/video-call.png?" + colors.windowText
}
ComboBox {
id: cameraCombo
Layout.fillWidth: true
model: CallManager.cameras
}
}
}
DialogButtonBox {
Layout.leftMargin: 128
standardButtons: DialogButtonBox.Ok | DialogButtonBox.Cancel
onAccepted: {
Settings.microphone = micCombo.currentText;
if (cameraCombo.visible)
Settings.camera = cameraCombo.currentText;
close();
}
onRejected: {
close();
}
}
}
background: Rectangle {
color: colors.window
border.color: colors.windowText
}
}

View file

@ -0,0 +1,128 @@
import "../"
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
import im.nheko 1.0
Rectangle {
visible: CallManager.haveCallInvite
color: "#2ECC71"
implicitHeight: visible ? rowLayout.height + 8 : 0
Component {
id: devicesDialog
CallDevices {
}
}
Component {
id: deviceError
DeviceError {
}
}
RowLayout {
id: rowLayout
anchors.left: parent.left
anchors.right: parent.right
anchors.verticalCenter: parent.verticalCenter
anchors.leftMargin: 8
Avatar {
width: avatarSize
height: avatarSize
url: CallManager.callPartyAvatarUrl.replace("mxc://", "image://MxcImage/")
displayName: CallManager.callParty
}
Label {
Layout.leftMargin: 8
font.pointSize: fontMetrics.font.pointSize * 1.1
text: CallManager.callParty
color: "#000000"
}
Image {
Layout.leftMargin: 4
Layout.preferredWidth: 24
Layout.preferredHeight: 24
source: CallManager.isVideo ? "qrc:/icons/icons/ui/video-call.png" : "qrc:/icons/icons/ui/place-call.png"
}
Label {
font.pointSize: fontMetrics.font.pointSize * 1.1
text: CallManager.isVideo ? qsTr("Video Call") : qsTr("Voice Call")
color: "#000000"
}
Item {
Layout.fillWidth: true
}
ImageButton {
Layout.rightMargin: 16
width: 20
height: 20
buttonTextColor: "#000000"
image: ":/icons/icons/ui/settings.png"
hoverEnabled: true
ToolTip.visible: hovered
ToolTip.text: qsTr("Devices")
onClicked: {
CallManager.refreshDevices();
var dialog = devicesDialog.createObject(timelineRoot);
dialog.open();
}
}
Button {
Layout.rightMargin: 4
icon.source: CallManager.isVideo ? "qrc:/icons/icons/ui/video-call.png" : "qrc:/icons/icons/ui/place-call.png"
text: qsTr(" Accept ")
palette: colors
onClicked: {
if (CallManager.mics.length == 0) {
var dialog = deviceError.createObject(timelineRoot, {
"errorString": qsTr("No microphone found."),
"image": ":/icons/icons/ui/place-call.png"
});
dialog.open();
return ;
} else if (!CallManager.mics.includes(Settings.microphone)) {
var dialog = deviceError.createObject(timelineRoot, {
"errorString": qsTr("Unknown microphone: ") + Settings.microphone,
"image": ":/icons/icons/ui/place-call.png"
});
dialog.open();
return ;
}
if (CallManager.isVideo && CallManager.cameras.length > 0 && !CallManager.cameras.includes(Settings.camera)) {
var dialog = deviceError.createObject(timelineRoot, {
"errorString": qsTr("Unknown camera: ") + Settings.camera,
"image": ":/icons/icons/ui/video-call.png"
});
dialog.open();
return ;
}
CallManager.acceptInvite();
}
}
Button {
Layout.rightMargin: 16
icon.source: "qrc:/icons/icons/ui/end-call.png"
text: qsTr(" Decline ")
palette: colors
onClicked: {
CallManager.hangUp();
}
}
}
}

View file

@ -0,0 +1,32 @@
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
import im.nheko 1.0
Popup {
property string errorString
property var image
modal: true
anchors.centerIn: parent
RowLayout {
Image {
Layout.preferredWidth: 16
Layout.preferredHeight: 16
source: "image://colorimage/" + image + "?" + colors.windowText
}
Label {
text: errorString
color: colors.windowText
}
}
background: Rectangle {
color: colors.window
border.color: colors.windowText
}
}

View file

@ -0,0 +1,154 @@
import "../"
import QtQuick 2.9
import QtQuick.Controls 2.3
import QtQuick.Layouts 1.2
import im.nheko 1.0
Popup {
modal: true
anchors.centerIn: parent
palette: colors
Component {
id: deviceError
DeviceError {
}
}
ColumnLayout {
id: columnLayout
spacing: 16
RowLayout {
Layout.topMargin: 8
Layout.leftMargin: 8
Label {
text: qsTr("Place a call to ") + TimelineManager.timeline.roomName + "?"
color: colors.windowText
}
Item {
Layout.fillWidth: true
}
}
RowLayout {
id: buttonLayout
function validateMic() {
if (CallManager.mics.length == 0) {
var dialog = deviceError.createObject(timelineRoot, {
"errorString": qsTr("No microphone found."),
"image": ":/icons/icons/ui/place-call.png"
});
dialog.open();
return false;
}
return true;
}
Layout.leftMargin: 8
Layout.rightMargin: 8
Avatar {
Layout.rightMargin: cameraCombo.visible ? 16 : 64
width: avatarSize
height: avatarSize
url: TimelineManager.timeline.roomAvatarUrl.replace("mxc://", "image://MxcImage/")
displayName: TimelineManager.timeline.roomName
}
Button {
text: qsTr(" Voice ")
icon.source: "qrc:/icons/icons/ui/place-call.png"
onClicked: {
if (buttonLayout.validateMic()) {
Settings.microphone = micCombo.currentText;
CallManager.sendInvite(TimelineManager.timeline.roomId(), false);
close();
}
}
}
Button {
visible: CallManager.cameras.length > 0
text: qsTr(" Video ")
icon.source: "qrc:/icons/icons/ui/video-call.png"
onClicked: {
if (buttonLayout.validateMic()) {
Settings.microphone = micCombo.currentText;
Settings.camera = cameraCombo.currentText;
CallManager.sendInvite(TimelineManager.timeline.roomId(), true);
close();
}
}
}
Button {
text: qsTr("Cancel")
onClicked: {
close();
}
}
}
ColumnLayout {
spacing: 8
RowLayout {
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: cameraCombo.visible ? 0 : 8
Image {
Layout.preferredWidth: 22
Layout.preferredHeight: 22
source: "image://colorimage/:/icons/icons/ui/microphone-unmute.png?" + colors.windowText
}
ComboBox {
id: micCombo
Layout.fillWidth: true
model: CallManager.mics
}
}
RowLayout {
visible: CallManager.cameras.length > 0
Layout.leftMargin: 8
Layout.rightMargin: 8
Layout.bottomMargin: 8
Image {
Layout.preferredWidth: 22
Layout.preferredHeight: 22
source: "image://colorimage/:/icons/icons/ui/video-call.png?" + colors.windowText
}
ComboBox {
id: cameraCombo
Layout.fillWidth: true
model: CallManager.cameras
}
}
}
}
background: Rectangle {
color: colors.window
border.color: colors.windowText
}
}

View file

@ -123,7 +123,6 @@
<file>qtquickcontrols2.conf</file>
<file>qml/TimelineView.qml</file>
<file>qml/ActiveCallBar.qml</file>
<file>qml/Avatar.qml</file>
<file>qml/Completer.qml</file>
<file>qml/EncryptionIndicator.qml</file>
@ -139,7 +138,6 @@
<file>qml/TimelineRow.qml</file>
<file>qml/TopBar.qml</file>
<file>qml/TypingIndicator.qml</file>
<file>qml/VideoCall.qml</file>
<file>qml/emoji/EmojiButton.qml</file>
<file>qml/emoji/EmojiPicker.qml</file>
<file>qml/UserProfile.qml</file>
@ -159,7 +157,16 @@
<file>qml/device-verification/NewVerificationRequest.qml</file>
<file>qml/device-verification/Failed.qml</file>
<file>qml/device-verification/Success.qml</file>
<<<<<<< HEAD
<file>qml/ui/Ripple.qml</file>
=======
<file>qml/voip/ActiveCallBar.qml</file>
<file>qml/voip/CallDevices.qml</file>
<file>qml/voip/CallInviteBar.qml</file>
<file>qml/voip/DeviceError.qml</file>
<file>qml/voip/PlaceCall.qml</file>
<file>qml/voip/VideoCall.qml</file>
>>>>>>> b8b642219db37b1bf46ae47c01a446a7a4e24317
</qresource>
<qresource prefix="/media">
<file>media/ring.ogg</file>

View file

@ -124,16 +124,14 @@ Cache::isHiddenEvent(lmdb::txn &txn,
EventType::Reaction, EventType::CallCandidates, EventType::Unsupported};
if (auto temp = getAccountData(txn, mtx::events::EventType::NhekoHiddenEvents, ""))
hiddenEvents = std::move(
std::get<
mtx::events::Event<mtx::events::account_data::nheko_extensions::HiddenEvents>>(
*temp)
hiddenEvents =
std::move(std::get<mtx::events::AccountDataEvent<
mtx::events::account_data::nheko_extensions::HiddenEvents>>(*temp)
.content);
if (auto temp = getAccountData(txn, mtx::events::EventType::NhekoHiddenEvents, room_id))
hiddenEvents = std::move(
std::get<
mtx::events::Event<mtx::events::account_data::nheko_extensions::HiddenEvents>>(
*temp)
hiddenEvents =
std::move(std::get<mtx::events::AccountDataEvent<
mtx::events::account_data::nheko_extensions::HiddenEvents>>(*temp)
.content);
return std::visit(
@ -1197,7 +1195,7 @@ void
Cache::saveState(const mtx::responses::Sync &res)
{
using namespace mtx::events;
auto user_id = this->localUserId_.toStdString();
auto local_user_id = this->localUserId_.toStdString();
auto currentBatchToken = nextBatchToken();
@ -1252,13 +1250,19 @@ Cache::saveState(const mtx::responses::Sync &res)
evt);
// for tag events
if (std::holds_alternative<Event<account_data::Tags>>(evt)) {
auto tags_evt = std::get<Event<account_data::Tags>>(evt);
if (std::holds_alternative<AccountDataEvent<account_data::Tags>>(
evt)) {
auto tags_evt =
std::get<AccountDataEvent<account_data::Tags>>(evt);
has_new_tags = true;
for (const auto &tag : tags_evt.content.tags) {
updatedInfo.tags.push_back(tag.first);
}
}
if (auto fr = std::get_if<mtx::events::AccountDataEvent<
mtx::events::account_data::FullyRead>>(&evt)) {
nhlog::db()->debug("Fully read: {}", fr->content.event_id);
}
}
if (!has_new_tags) {
// retrieve the old tags, they haven't changed
@ -1282,7 +1286,20 @@ Cache::saveState(const mtx::responses::Sync &res)
lmdb::dbi_put(
txn, roomsDb_, lmdb::val(room.first), lmdb::val(json(updatedInfo).dump()));
updateReadReceipt(txn, room.first, room.second.ephemeral.receipts);
for (const auto &e : room.second.ephemeral.events) {
if (auto receiptsEv = std::get_if<
mtx::events::EphemeralEvent<mtx::events::ephemeral::Receipt>>(&e)) {
Receipts receipts;
for (const auto &[event_id, userReceipts] :
receiptsEv->content.receipts) {
for (const auto &[user_id, receipt] : userReceipts.users) {
receipts[event_id][user_id] = receipt.ts;
}
}
updateReadReceipt(txn, room.first, receipts);
}
}
// Clean up non-valid invites.
removeInvite(txn, room.first);
@ -1302,19 +1319,27 @@ Cache::saveState(const mtx::responses::Sync &res)
std::map<QString, bool> readStatus;
for (const auto &room : res.rooms.join) {
if (!room.second.ephemeral.receipts.empty()) {
for (const auto &e : room.second.ephemeral.events) {
if (auto receiptsEv = std::get_if<
mtx::events::EphemeralEvent<mtx::events::ephemeral::Receipt>>(&e)) {
std::vector<QString> receipts;
for (const auto &receipt : room.second.ephemeral.receipts) {
for (const auto &receiptUsersTs : receipt.second) {
if (receiptUsersTs.first != user_id) {
for (const auto &[event_id, userReceipts] :
receiptsEv->content.receipts) {
for (const auto &[user_id, receipt] : userReceipts.users) {
(void)receipt;
if (user_id != local_user_id) {
receipts.push_back(
QString::fromStdString(receipt.first));
QString::fromStdString(event_id));
break;
}
}
}
if (!receipts.empty())
emit newReadReceipts(QString::fromStdString(room.first), receipts);
emit newReadReceipts(QString::fromStdString(room.first),
receipts);
}
}
readStatus.emplace(QString::fromStdString(room.first),
calculateRoomReadStatus(room.first));
@ -1440,7 +1465,7 @@ Cache::roomsWithTagUpdates(const mtx::responses::Sync &res)
for (const auto &room : res.rooms.join) {
bool hasUpdates = false;
for (const auto &evt : room.second.account_data.events) {
if (std::holds_alternative<Event<account_data::Tags>>(evt)) {
if (std::holds_alternative<AccountDataEvent<account_data::Tags>>(evt)) {
hasUpdates = true;
}
}
@ -2196,6 +2221,34 @@ Cache::getRoomVersion(lmdb::txn &txn, lmdb::dbi &statesdb)
return QString("1");
}
std::optional<mtx::events::state::CanonicalAlias>
Cache::getRoomAliases(const std::string &roomid)
{
using namespace mtx::events;
using namespace mtx::events::state;
auto txn = lmdb::txn::begin(env_, nullptr, MDB_RDONLY);
auto statesdb = getStatesDb(txn, roomid);
lmdb::val event;
bool res = lmdb::dbi_get(
txn, statesdb, lmdb::val(to_string(mtx::events::EventType::RoomCanonicalAlias)), event);
if (res) {
try {
StateEvent<CanonicalAlias> msg =
json::parse(std::string_view(event.data(), event.size()));
return msg.content;
} catch (const json::exception &e) {
nhlog::db()->warn("failed to parse m.room.canonical_alias event: {}",
e.what());
}
}
return std::nullopt;
}
QString
Cache::getInviteRoomName(lmdb::txn &txn, lmdb::dbi &statesdb, lmdb::dbi &membersdb)
{

View file

@ -81,6 +81,7 @@ public:
std::vector<std::string> joinedRooms();
QMap<QString, RoomInfo> roomInfo(bool withInvites = true);
std::optional<mtx::events::state::CanonicalAlias> getRoomAliases(const std::string &roomid);
std::map<QString, bool> invites();
//! Calculate & return the name of the room.

View file

@ -10,11 +10,9 @@
#include "CallManager.h"
#include "ChatPage.h"
#include "Logging.h"
#include "MainWindow.h"
#include "MatrixClient.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "mtx/responses/turn_server.hpp"
@ -112,6 +110,23 @@ CallManager::CallManager(QObject *parent)
default:
break;
}
emit newCallState();
});
connect(&session_, &WebRTCSession::devicesChanged, this, [this]() {
if (ChatPage::instance()->userSettings()->microphone().isEmpty()) {
auto mics = session_.getDeviceNames(false, std::string());
if (!mics.empty())
ChatPage::instance()->userSettings()->setMicrophone(
QString::fromStdString(mics.front()));
}
if (ChatPage::instance()->userSettings()->camera().isEmpty()) {
auto cameras = session_.getDeviceNames(true, std::string());
if (!cameras.empty())
ChatPage::instance()->userSettings()->setCamera(
QString::fromStdString(cameras.front()));
}
emit devicesChanged();
});
connect(&player_,
@ -144,7 +159,7 @@ CallManager::CallManager(QObject *parent)
void
CallManager::sendInvite(const QString &roomid, bool isVideo)
{
if (onActiveCall())
if (isOnCall())
return;
auto roomInfo = cache::singleRoomInfo(roomid.toStdString());
@ -160,6 +175,7 @@ CallManager::sendInvite(const QString &roomid, bool isVideo)
return;
}
isVideo_ = isVideo;
roomid_ = roomid;
session_.setTurnServers(turnURIs_);
generateCallID();
@ -168,16 +184,14 @@ CallManager::sendInvite(const QString &roomid, bool isVideo)
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back() : members.front();
callPartyName_ = callee.display_name.isEmpty() ? callee.user_id : callee.display_name;
callParty_ = callee.display_name.isEmpty() ? callee.user_id : callee.display_name;
callPartyAvatarUrl_ = QString::fromStdString(roomInfo.avatar_url);
emit newCallParty();
emit newInviteState();
playRingtone(QUrl("qrc:/media/media/ringback.ogg"), true);
if (!session_.createOffer(isVideo)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
endCall();
}
if (isVideo)
emit newVideoCallState();
}
namespace {
@ -206,12 +220,6 @@ CallManager::hangUp(CallHangUp::Reason reason)
}
}
bool
CallManager::onActiveCall() const
{
return session_.state() != webrtc::State::DISCONNECTED;
}
void
CallManager::syncEvent(const mtx::events::collections::TimelineEvents &event)
{
@ -257,7 +265,7 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
return;
auto roomInfo = cache::singleRoomInfo(callInviteEvent.room_id);
if (onActiveCall() || roomInfo.member_count != 2) {
if (isOnCall() || roomInfo.member_count != 2) {
emit newMessage(QString::fromStdString(callInviteEvent.room_id),
CallHangUp{callInviteEvent.content.call_id,
0,
@ -277,48 +285,41 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
const RoomMember &caller =
members.front().user_id == utils::localUser() ? members.back() : members.front();
callPartyName_ = caller.display_name.isEmpty() ? caller.user_id : caller.display_name;
callParty_ = caller.display_name.isEmpty() ? caller.user_id : caller.display_name;
callPartyAvatarUrl_ = QString::fromStdString(roomInfo.avatar_url);
emit newCallParty();
auto dialog = new dialogs::AcceptCall(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
isVideo,
MainWindow::instance());
connect(dialog, &dialogs::AcceptCall::accept, this, [this, callInviteEvent, isVideo]() {
MainWindow::instance()->hideOverlay();
answerInvite(callInviteEvent.content, isVideo);
});
connect(dialog, &dialogs::AcceptCall::reject, this, [this]() {
MainWindow::instance()->hideOverlay();
hangUp();
});
MainWindow::instance()->showSolidOverlayModal(dialog);
haveCallInvite_ = true;
isVideo_ = isVideo;
inviteSDP_ = callInviteEvent.content.sdp;
session_.refreshDevices();
emit newInviteState();
}
void
CallManager::answerInvite(const CallInvite &invite, bool isVideo)
CallManager::acceptInvite()
{
if (!haveCallInvite_)
return;
stopRingtone();
std::string errorMessage;
if (!session_.havePlugins(false, &errorMessage) ||
(isVideo && !session_.havePlugins(true, &errorMessage))) {
(isVideo_ && !session_.havePlugins(true, &errorMessage))) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
hangUp();
return;
}
session_.setTurnServers(turnURIs_);
if (!session_.acceptOffer(invite.sdp)) {
if (!session_.acceptOffer(inviteSDP_)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
hangUp();
return;
}
session_.acceptICECandidates(remoteICECandidates_);
remoteICECandidates_.clear();
if (isVideo)
emit newVideoCallState();
haveCallInvite_ = false;
emit newInviteState();
}
void
@ -332,7 +333,7 @@ CallManager::handleEvent(const RoomEvent<CallCandidates> &callCandidatesEvent)
callCandidatesEvent.sender);
if (callid_ == callCandidatesEvent.content.call_id) {
if (onActiveCall())
if (isOnCall())
session_.acceptICECandidates(callCandidatesEvent.content.candidates);
else {
// CallInvite has been received and we're awaiting localUser to accept or
@ -350,15 +351,19 @@ CallManager::handleEvent(const RoomEvent<CallAnswer> &callAnswerEvent)
callAnswerEvent.content.call_id,
callAnswerEvent.sender);
if (!onActiveCall() && callAnswerEvent.sender == utils::localUser().toStdString() &&
if (callAnswerEvent.sender == utils::localUser().toStdString() &&
callid_ == callAnswerEvent.content.call_id) {
emit ChatPage::instance()->showNotification("Call answered on another device.");
if (!isOnCall()) {
emit ChatPage::instance()->showNotification(
"Call answered on another device.");
stopRingtone();
MainWindow::instance()->hideOverlay();
haveCallInvite_ = false;
emit newInviteState();
}
return;
}
if (onActiveCall() && callid_ == callAnswerEvent.content.call_id) {
if (isOnCall() && callid_ == callAnswerEvent.content.call_id) {
stopRingtone();
if (!session_.acceptAnswer(callAnswerEvent.content.sdp)) {
emit ChatPage::instance()->showNotification("Problem setting up call.");
@ -375,10 +380,42 @@ CallManager::handleEvent(const RoomEvent<CallHangUp> &callHangUpEvent)
callHangUpReasonString(callHangUpEvent.content.reason),
callHangUpEvent.sender);
if (callid_ == callHangUpEvent.content.call_id) {
MainWindow::instance()->hideOverlay();
if (callid_ == callHangUpEvent.content.call_id)
endCall();
}
void
CallManager::toggleMicMute()
{
session_.toggleMicMute();
emit micMuteChanged();
}
bool
CallManager::callsSupported() const
{
#ifdef GSTREAMER_AVAILABLE
return true;
#else
return false;
#endif
}
QStringList
CallManager::devices(bool isVideo) const
{
QStringList ret;
const QString &defaultDevice = isVideo ? ChatPage::instance()->userSettings()->camera()
: ChatPage::instance()->userSettings()->microphone();
std::vector<std::string> devices =
session_.getDeviceNames(isVideo, defaultDevice.toStdString());
ret.reserve(devices.size());
std::transform(devices.cbegin(),
devices.cend(),
std::back_inserter(ret),
[](const auto &d) { return QString::fromStdString(d); });
return ret;
}
void
@ -393,9 +430,13 @@ void
CallManager::clear()
{
roomid_.clear();
callPartyName_.clear();
callParty_.clear();
callPartyAvatarUrl_.clear();
callid_.clear();
isVideo_ = false;
haveCallInvite_ = false;
emit newInviteState();
inviteSDP_.clear();
remoteICECandidates_.clear();
}
@ -403,11 +444,8 @@ void
CallManager::endCall()
{
stopRingtone();
clear();
bool isVideo = session_.isVideo();
session_.end();
if (isVideo)
emit newVideoCallState();
clear();
}
void

View file

@ -8,6 +8,7 @@
#include <QString>
#include <QTimer>
#include "WebRTCSession.h"
#include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp"
@ -15,34 +16,59 @@ namespace mtx::responses {
struct TurnServer;
}
class QStringList;
class QUrl;
class WebRTCSession;
class CallManager : public QObject
{
Q_OBJECT
Q_PROPERTY(bool haveCallInvite READ haveCallInvite NOTIFY newInviteState)
Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY newCallState)
Q_PROPERTY(bool isVideo READ isVideo NOTIFY newInviteState)
Q_PROPERTY(bool haveLocalVideo READ haveLocalVideo NOTIFY newCallState)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY newCallState)
Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState)
Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY newInviteState)
Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged)
Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged)
public:
CallManager(QObject *);
void sendInvite(const QString &roomid, bool isVideo);
void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
bool onActiveCall() const;
QString callPartyName() const { return callPartyName_; }
bool haveCallInvite() const { return haveCallInvite_; }
bool isOnCall() const { return session_.state() != webrtc::State::DISCONNECTED; }
bool isVideo() const { return isVideo_; }
bool haveLocalVideo() const { return session_.haveLocalVideo(); }
webrtc::State callState() const { return session_.state(); }
QString callParty() const { return callParty_; }
QString callPartyAvatarUrl() const { return callPartyAvatarUrl_; }
bool isMicMuted() const { return session_.isMicMuted(); }
bool callsSupported() const;
QStringList mics() const { return devices(false); }
QStringList cameras() const { return devices(true); }
void refreshTurnServer();
public slots:
void sendInvite(const QString &roomid, bool isVideo);
void syncEvent(const mtx::events::collections::TimelineEvents &event);
void refreshDevices() { session_.refreshDevices(); }
void toggleMicMute();
void toggleCameraView() { session_.toggleCameraView(); }
void acceptInvite();
void hangUp(
mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User);
signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void newMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void newCallParty();
void newVideoCallState();
void newInviteState();
void newCallState();
void micMuteChanged();
void devicesChanged();
void turnServerRetrieved(const mtx::responses::TurnServer &);
private slots:
@ -51,10 +77,13 @@ private slots:
private:
WebRTCSession &session_;
QString roomid_;
QString callPartyName_;
QString callParty_;
QString callPartyAvatarUrl_;
std::string callid_;
const uint32_t timeoutms_ = 120000;
bool isVideo_ = false;
bool haveCallInvite_ = false;
std::string inviteSDP_;
std::vector<mtx::events::msg::CallCandidates::Candidate> remoteICECandidates_;
std::vector<std::string> turnURIs_;
QTimer turnServerTimer_;
@ -68,6 +97,7 @@ private:
void handleEvent(const mtx::events::RoomEvent<mtx::events::msg::CallHangUp> &);
void answerInvite(const mtx::events::msg::CallInvite &, bool isVideo);
void generateCallID();
QStringList devices(bool isVideo) const;
void clear();
void endCall();
void playRingtone(const QUrl &ringtone, bool repeat);

View file

@ -47,7 +47,6 @@
#include "notifications/Manager.h"
#include "dialogs/PlaceCall.h"
#include "dialogs/ReadReceipts.h"
#include "popups/UserMentions.h"
#include "timeline/TimelineViewManager.h"
@ -282,6 +281,14 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
room_list_->highlightSelectedRoom(roomid);
activateWindow();
});
connect(&notificationsManager,
&NotificationsManager::sendNotificationReply,
this,
[this](const QString &roomid, const QString &eventid, const QString &body) {
view_manager_->queueReply(roomid, eventid, body);
room_list_->highlightSelectedRoom(roomid);
activateWindow();
});
setGroupViewState(userSettings_->groupView());
@ -911,6 +918,8 @@ ChatPage::joinRoom(const QString &room)
} catch (const lmdb::error &e) {
emit showNotification(tr("Failed to remove invite: %1").arg(e.what()));
}
room_list_->highlightSelectedRoom(QString::fromStdString(room_id));
});
}
@ -1261,3 +1270,141 @@ ChatPage::decryptDownloadedSecrets(mtx::secret_storage::AesHmacSha2KeyDescriptio
cache::storeSecret(secretName, decrypted);
}
}
void
ChatPage::startChat(QString userid)
{
auto joined_rooms = cache::joinedRooms();
auto room_infos = cache::getRoomInfo(joined_rooms);
for (std::string room_id : joined_rooms) {
if (room_infos[QString::fromStdString(room_id)].member_count == 2) {
auto room_members = cache::roomMembers(room_id);
if (std::find(room_members.begin(),
room_members.end(),
(userid).toStdString()) != room_members.end()) {
room_list_->highlightSelectedRoom(QString::fromStdString(room_id));
return;
}
}
}
mtx::requests::CreateRoom req;
req.preset = mtx::requests::Preset::PrivateChat;
req.visibility = mtx::requests::Visibility::Private;
if (utils::localUser() != userid)
req.invite = {userid.toStdString()};
emit ChatPage::instance()->createRoom(req);
}
static QString
mxidFromSegments(QStringRef sigil, QStringRef mxid)
{
if (mxid.isEmpty())
return "";
auto mxid_ = QUrl::fromPercentEncoding(mxid.toUtf8());
if (sigil == "user") {
return "@" + mxid_;
} else if (sigil == "roomid") {
return "!" + mxid_;
} else if (sigil == "room") {
return "#" + mxid_;
} else if (sigil == "group") {
return "+" + mxid_;
} else {
return "";
}
}
void
ChatPage::handleMatrixUri(const QByteArray &uri)
{
nhlog::ui()->info("Received uri! {}", uri.toStdString());
QUrl uri_{QString::fromUtf8(uri)};
if (uri_.scheme() != "matrix")
return;
auto tempPath = uri_.path(QUrl::ComponentFormattingOption::FullyEncoded);
if (tempPath.startsWith('/'))
tempPath.remove(0, 1);
auto segments = tempPath.splitRef('/');
if (segments.size() != 2 && segments.size() != 4)
return;
auto sigil1 = segments[0];
auto mxid1 = mxidFromSegments(sigil1, segments[1]);
if (mxid1.isEmpty())
return;
QString mxid2;
if (segments.size() == 4 && segments[2] == "event") {
if (segments[3].isEmpty())
return;
else
mxid2 = "$" + QUrl::fromPercentEncoding(segments[3].toUtf8());
}
std::vector<std::string> vias;
QString action;
for (QString item : uri_.query(QUrl::ComponentFormattingOption::FullyEncoded).split('&')) {
nhlog::ui()->info("item: {}", item.toStdString());
if (item.startsWith("action=")) {
action = item.remove("action=");
} else if (item.startsWith("via=")) {
vias.push_back(
QUrl::fromPercentEncoding(item.remove("via=").toUtf8()).toStdString());
}
}
if (sigil1 == "user") {
if (action.isEmpty()) {
view_manager_->activeTimeline()->openUserProfile(mxid1);
} else if (action == "chat") {
this->startChat(mxid1);
}
} else if (sigil1 == "roomid") {
auto joined_rooms = cache::joinedRooms();
auto targetRoomId = mxid1.toStdString();
for (auto roomid : joined_rooms) {
if (roomid == targetRoomId) {
room_list_->highlightSelectedRoom(mxid1);
break;
}
}
if (action == "join") {
joinRoom(mxid1);
}
} else if (sigil1 == "room") {
auto joined_rooms = cache::joinedRooms();
auto targetRoomAlias = mxid1.toStdString();
for (auto roomid : joined_rooms) {
auto aliases = cache::client()->getRoomAliases(roomid);
if (aliases) {
if (aliases->alias == targetRoomAlias) {
room_list_->highlightSelectedRoom(
QString::fromStdString(roomid));
break;
}
}
}
if (action == "join") {
joinRoom(mxid1);
}
}
}
void
ChatPage::handleMatrixUri(const QUrl &uri)
{
handleMatrixUri(uri.toString(QUrl::ComponentFormattingOption::FullyEncoded).toUtf8());
}

View file

@ -110,6 +110,10 @@ public:
mtx::presence::PresenceState currentPresence() const;
public slots:
void handleMatrixUri(const QByteArray &uri);
void handleMatrixUri(const QUrl &uri);
void startChat(QString userid);
void leaveRoom(const QString &room_id);
void createRoom(const mtx::requests::CreateRoom &req);
void joinRoom(const QString &room);

View file

@ -315,10 +315,14 @@ LoginPage::checkHomeserverVersion()
if (err || flows.flows.empty())
emit versionOkCb(LoginMethod::Password);
if (flows.flows[0].type == mtx::user_interactive::auth_types::sso)
emit versionOkCb(LoginMethod::SSO);
else
emit versionOkCb(LoginMethod::Password);
LoginMethod loginMethod_ = LoginMethod::Password;
for (const auto &flow : flows.flows) {
if (flow.type == mtx::user_interactive::auth_types::sso) {
loginMethod_ = LoginMethod::SSO;
break;
}
}
emit versionOk(loginMethod_);
});
});
}

View file

@ -579,13 +579,12 @@ encrypt_group_message(const std::string &room_id, const std::string &device_id,
mtx::common::RelatesTo r_relation;
// relations shouldn't be encrypted...
if (body["content"].contains("m.relates_to") &&
body["content"]["m.relates_to"].contains("m.in_reply_to")) {
if (body["content"].contains("m.relates_to")) {
if (body["content"]["m.relates_to"].contains("m.in_reply_to")) {
relation = body["content"]["m.relates_to"];
body["content"].erase("m.relates_to");
} else if (body["content"]["m.relates_to"].contains("event_id")) {
r_relation = body["content"]["m.relates_to"];
body["content"].erase("m.relates_to");
}
}
auto payload = olm::client()->encrypt_group_message(session.get(), body.dump());

View file

@ -54,7 +54,7 @@ QSharedPointer<UserSettings> UserSettings::instance_;
UserSettings::UserSettings()
{
connect(QCoreApplication::instance(), &QCoreApplication::aboutToQuit, [this]() {
connect(QCoreApplication::instance(), &QCoreApplication::aboutToQuit, []() {
instance_.clear();
});
}
@ -464,7 +464,7 @@ UserSettings::applyTheme()
stylefile.setFileName(":/styles/styles/nheko.qss");
QPalette lightActive(
/*windowText*/ QColor("#333"),
/*button*/ QColor("#333"),
/*button*/ QColor("white"),
/*light*/ QColor(0xef, 0xef, 0xef),
/*dark*/ QColor(110, 110, 110),
/*mid*/ QColor(220, 220, 220),
@ -477,7 +477,7 @@ UserSettings::applyTheme()
lightActive.setColor(QPalette::ToolTipBase, lightActive.base().color());
lightActive.setColor(QPalette::ToolTipText, lightActive.text().color());
lightActive.setColor(QPalette::Link, QColor("#0077b5"));
lightActive.setColor(QPalette::ButtonText, QColor("#495057"));
lightActive.setColor(QPalette::ButtonText, QColor("#333"));
QApplication::setPalette(lightActive);
} else if (this->theme() == "dark") {
stylefile.setFileName(":/styles/styles/nheko-dark.qss");

View file

@ -23,6 +23,8 @@
#include <QSharedPointer>
#include <QWidget>
#include <optional>
class Toggle;
class QLabel;
class QFormLayout;

View file

@ -242,12 +242,14 @@ newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
GstDevice *device;
gst_message_parse_device_added(msg, &device);
addDevice(device);
emit WebRTCSession::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_REMOVED: {
GstDevice *device;
gst_message_parse_device_removed(msg, &device);
removeDevice(device, false);
emit WebRTCSession::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_CHANGED: {
@ -554,6 +556,9 @@ void
addCameraView(GstElement *pipe, const std::pair<int, int> &videoCallSize)
{
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee");
if (!tee)
return;
GstElement *queue = gst_element_factory_make("queue", nullptr);
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
gst_bin_add_many(GST_BIN(pipe), queue, videorate, nullptr);
@ -1150,6 +1155,19 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
return true;
}
bool
WebRTCSession::haveLocalVideo() const
{
if (isVideo_ && state_ >= State::INITIATED) {
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe_), "videosrctee");
if (tee) {
gst_object_unref(tee);
return true;
}
}
return false;
}
bool
WebRTCSession::isMicMuted() const
{
@ -1274,6 +1292,7 @@ WebRTCSession::refreshDevices()
addDevice(GST_DEVICE_CAST(l->data));
g_list_free(devices);
}
emit devicesChanged();
#endif
}
@ -1324,6 +1343,12 @@ WebRTCSession::havePlugins(bool, std::string *)
return false;
}
bool
WebRTCSession::haveLocalVideo() const
{
return false;
}
bool
WebRTCSession::createOffer(bool)
{

View file

@ -43,6 +43,7 @@ public:
bool havePlugins(bool isVideo, std::string *errorMessage = nullptr);
webrtc::State state() const { return state_; }
bool isVideo() const { return isVideo_; }
bool haveLocalVideo() const;
bool isOffering() const { return isOffering_; }
bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; }
@ -75,6 +76,7 @@ signals:
const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(webrtc::State);
void devicesChanged();
private slots:
void setState(webrtc::State state) { state_ = state; }

View file

@ -1,152 +0,0 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/AcceptCall.h"
#include "ui/Avatar.h"
namespace dialogs {
AcceptCall::AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
bool isVideo,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
WebRTCSession *session = &WebRTCSession::instance();
if (!session->havePlugins(false, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
if (isVideo && !session->havePlugins(true, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
session->refreshDevices();
microphones_ = session->getDeviceNames(
false, ChatPage::instance()->userSettings()->microphone().toStdString());
if (microphones_.empty()) {
emit ChatPage::instance()->showNotification(
tr("Incoming call: No microphone found."));
emit close();
return;
}
if (isVideo)
cameras_ = session->getDeviceNames(
true, ChatPage::instance()->userSettings()->camera().toStdString());
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
setMinimumWidth(conf::modals::MIN_WIDGET_WIDTH);
setSizePolicy(QSizePolicy::Maximum, QSizePolicy::Maximum);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
QFont f;
f.setPointSizeF(f.pointSizeF());
QFont labelFont;
labelFont.setWeight(QFont::Medium);
QLabel *displayNameLabel = nullptr;
if (!displayName.isEmpty() && displayName != caller) {
displayNameLabel = new QLabel(displayName, this);
labelFont.setPointSizeF(f.pointSizeF() * 2);
displayNameLabel->setFont(labelFont);
displayNameLabel->setAlignment(Qt::AlignCenter);
}
QLabel *callerLabel = new QLabel(caller, this);
labelFont.setPointSizeF(f.pointSizeF() * 1.2);
callerLabel->setFont(labelFont);
callerLabel->setAlignment(Qt::AlignCenter);
auto avatar = new Avatar(this, QFontMetrics(f).height() * 6);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
const int iconSize = 22;
QLabel *callTypeIndicator = new QLabel(this);
callTypeIndicator->setPixmap(
QIcon(isVideo ? ":/icons/icons/ui/video-call.png" : ":/icons/icons/ui/place-call.png")
.pixmap(QSize(iconSize * 2, iconSize * 2)));
QLabel *callTypeLabel = new QLabel(isVideo ? tr("Video Call") : tr("Voice Call"), this);
labelFont.setPointSizeF(f.pointSizeF() * 1.1);
callTypeLabel->setFont(labelFont);
callTypeLabel->setAlignment(Qt::AlignCenter);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(18);
acceptBtn_ = new QPushButton(tr("Accept"), this);
acceptBtn_->setDefault(true);
acceptBtn_->setIcon(
QIcon(isVideo ? ":/icons/icons/ui/video-call.png" : ":/icons/icons/ui/place-call.png"));
acceptBtn_->setIconSize(QSize(iconSize, iconSize));
rejectBtn_ = new QPushButton(tr("Reject"), this);
rejectBtn_->setIcon(QIcon(":/icons/icons/ui/end-call.png"));
rejectBtn_->setIconSize(QSize(iconSize, iconSize));
buttonLayout->addWidget(acceptBtn_);
buttonLayout->addWidget(rejectBtn_);
microphoneCombo_ = new QComboBox(this);
for (const auto &m : microphones_)
microphoneCombo_->addItem(QIcon(":/icons/icons/ui/microphone-unmute.png"),
QString::fromStdString(m));
if (!cameras_.empty()) {
cameraCombo_ = new QComboBox(this);
for (const auto &c : cameras_)
cameraCombo_->addItem(QIcon(":/icons/icons/ui/video-call.png"),
QString::fromStdString(c));
}
if (displayNameLabel)
layout->addWidget(displayNameLabel, 0, Qt::AlignCenter);
layout->addWidget(callerLabel, 0, Qt::AlignCenter);
layout->addWidget(avatar, 0, Qt::AlignCenter);
layout->addWidget(callTypeIndicator, 0, Qt::AlignCenter);
layout->addWidget(callTypeLabel, 0, Qt::AlignCenter);
layout->addLayout(buttonLayout);
layout->addWidget(microphoneCombo_);
if (cameraCombo_)
layout->addWidget(cameraCombo_);
connect(acceptBtn_, &QPushButton::clicked, this, [this]() {
ChatPage::instance()->userSettings()->setMicrophone(
QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
if (cameraCombo_) {
ChatPage::instance()->userSettings()->setCamera(
QString::fromStdString(cameras_[cameraCombo_->currentIndex()]));
}
emit accept();
emit close();
});
connect(rejectBtn_, &QPushButton::clicked, this, [this]() {
emit reject();
emit close();
});
}
}

View file

@ -1,39 +0,0 @@
#pragma once
#include <string>
#include <vector>
#include <QWidget>
class QComboBox;
class QPushButton;
class QString;
namespace dialogs {
class AcceptCall : public QWidget
{
Q_OBJECT
public:
AcceptCall(const QString &caller,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
bool isVideo,
QWidget *parent = nullptr);
signals:
void accept();
void reject();
private:
QPushButton *acceptBtn_ = nullptr;
QPushButton *rejectBtn_ = nullptr;
QComboBox *microphoneCombo_ = nullptr;
QComboBox *cameraCombo_ = nullptr;
std::vector<std::string> microphones_;
std::vector<std::string> cameras_;
};
}

View file

@ -1,131 +0,0 @@
#include <QComboBox>
#include <QLabel>
#include <QPushButton>
#include <QString>
#include <QVBoxLayout>
#include "ChatPage.h"
#include "Config.h"
#include "UserSettingsPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "dialogs/PlaceCall.h"
#include "ui/Avatar.h"
namespace dialogs {
PlaceCall::PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent)
: QWidget(parent)
{
std::string errorMessage;
WebRTCSession *session = &WebRTCSession::instance();
if (!session->havePlugins(false, &errorMessage)) {
emit ChatPage::instance()->showNotification(QString::fromStdString(errorMessage));
emit close();
return;
}
session->refreshDevices();
microphones_ = session->getDeviceNames(false, settings->microphone().toStdString());
if (microphones_.empty()) {
emit ChatPage::instance()->showNotification(tr("No microphone found."));
emit close();
return;
}
cameras_ = session->getDeviceNames(true, settings->camera().toStdString());
setAutoFillBackground(true);
setWindowFlags(Qt::Tool | Qt::WindowStaysOnTopHint);
setWindowModality(Qt::WindowModal);
setAttribute(Qt::WA_DeleteOnClose, true);
auto layout = new QVBoxLayout(this);
layout->setSpacing(conf::modals::WIDGET_SPACING);
layout->setMargin(conf::modals::WIDGET_MARGIN);
auto buttonLayout = new QHBoxLayout;
buttonLayout->setSpacing(15);
buttonLayout->setMargin(0);
QFont f;
f.setPointSizeF(f.pointSizeF());
auto avatar = new Avatar(this, QFontMetrics(f).height() * 3);
if (!avatarUrl.isEmpty())
avatar->setImage(avatarUrl);
else
avatar->setLetter(utils::firstChar(roomName));
voiceBtn_ = new QPushButton(tr("Voice"), this);
voiceBtn_->setIcon(QIcon(":/icons/icons/ui/place-call.png"));
voiceBtn_->setIconSize(QSize(iconSize_, iconSize_));
voiceBtn_->setDefault(true);
if (!cameras_.empty()) {
videoBtn_ = new QPushButton(tr("Video"), this);
videoBtn_->setIcon(QIcon(":/icons/icons/ui/video-call.png"));
videoBtn_->setIconSize(QSize(iconSize_, iconSize_));
}
cancelBtn_ = new QPushButton(tr("Cancel"), this);
buttonLayout->addWidget(avatar);
buttonLayout->addStretch();
buttonLayout->addWidget(voiceBtn_);
if (videoBtn_)
buttonLayout->addWidget(videoBtn_);
buttonLayout->addWidget(cancelBtn_);
QString name = displayName.isEmpty() ? callee : displayName;
QLabel *label = new QLabel(tr("Place a call to ") + name + "?", this);
microphoneCombo_ = new QComboBox(this);
for (const auto &m : microphones_)
microphoneCombo_->addItem(QIcon(":/icons/icons/ui/microphone-unmute.png"),
QString::fromStdString(m));
if (videoBtn_) {
cameraCombo_ = new QComboBox(this);
for (const auto &c : cameras_)
cameraCombo_->addItem(QIcon(":/icons/icons/ui/video-call.png"),
QString::fromStdString(c));
}
layout->addWidget(label);
layout->addLayout(buttonLayout);
layout->addStretch();
layout->addWidget(microphoneCombo_);
if (videoBtn_)
layout->addWidget(cameraCombo_);
connect(voiceBtn_, &QPushButton::clicked, this, [this, settings]() {
settings->setMicrophone(
QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
emit voice();
emit close();
});
if (videoBtn_)
connect(videoBtn_, &QPushButton::clicked, this, [this, settings, session]() {
std::string error;
if (!session->havePlugins(true, &error)) {
emit ChatPage::instance()->showNotification(
QString::fromStdString(error));
emit close();
return;
}
settings->setMicrophone(
QString::fromStdString(microphones_[microphoneCombo_->currentIndex()]));
settings->setCamera(
QString::fromStdString(cameras_[cameraCombo_->currentIndex()]));
emit video();
emit close();
});
connect(cancelBtn_, &QPushButton::clicked, this, [this]() {
emit cancel();
emit close();
});
}
}

View file

@ -1,44 +0,0 @@
#pragma once
#include <string>
#include <vector>
#include <QSharedPointer>
#include <QWidget>
class QComboBox;
class QPushButton;
class QString;
class UserSettings;
namespace dialogs {
class PlaceCall : public QWidget
{
Q_OBJECT
public:
PlaceCall(const QString &callee,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl,
QSharedPointer<UserSettings> settings,
QWidget *parent = nullptr);
signals:
void voice();
void video();
void cancel();
private:
const int iconSize_ = 18;
QPushButton *voiceBtn_ = nullptr;
QPushButton *videoBtn_ = nullptr;
QPushButton *cancelBtn_ = nullptr;
QComboBox *microphoneCombo_ = nullptr;
QComboBox *cameraCombo_ = nullptr;
std::vector<std::string> microphones_;
std::vector<std::string> cameras_;
};
}

View file

@ -19,6 +19,7 @@
#include <QApplication>
#include <QCommandLineParser>
#include <QDesktopServices>
#include <QDesktopWidget>
#include <QDir>
#include <QFile>
@ -33,6 +34,7 @@
#include <QStandardPaths>
#include <QTranslator>
#include "ChatPage.h"
#include "Config.h"
#include "Logging.h"
#include "MainWindow.h"
@ -128,34 +130,43 @@ main(int argc, char *argv[])
// This is some hacky programming, but it's necessary (AFAIK?) to get the unique config name
// parsed before the SingleApplication userdata is set.
QString userdata{""};
QString matrixUri;
for (int i = 0; i < argc; ++i) {
if (QString{argv[i]}.startsWith("--profile=")) {
QString q{argv[i]};
q.remove("--profile=");
userdata = q;
} else if (QString{argv[i]}.startsWith("--p=")) {
QString q{argv[i]};
q.remove("-p=");
userdata = q;
} else if (QString{argv[i]} == "--profile" || QString{argv[i]} == "-p") {
QString arg{argv[i]};
if (arg.startsWith("--profile=")) {
arg.remove("--profile=");
userdata = arg;
} else if (arg.startsWith("--p=")) {
arg.remove("-p=");
userdata = arg;
} else if (arg == "--profile" || arg == "-p") {
if (i < argc - 1) // if i is less than argc - 1, we still have a parameter
// left to process as the name
{
++i; // the next arg is the name, so increment
userdata = QString{argv[i]};
}
} else if (arg.startsWith("matrix:")) {
matrixUri = arg;
}
}
SingleApplication app(argc,
argv,
false,
true,
SingleApplication::Mode::User |
SingleApplication::Mode::ExcludeAppPath |
SingleApplication::Mode::ExcludeAppVersion,
SingleApplication::Mode::ExcludeAppVersion |
SingleApplication::Mode::SecondaryNotification,
100,
userdata);
if (app.isSecondary()) {
// open uri in main instance
app.sendMessage(matrixUri.toUtf8());
return 0;
}
QCommandLineParser parser;
parser.addHelpOption();
parser.addVersionOption();
@ -245,6 +256,25 @@ main(int argc, char *argv[])
w.activateWindow();
});
QObject::connect(
&app,
&SingleApplication::receivedMessage,
ChatPage::instance(),
[&](quint32, QByteArray message) { ChatPage::instance()->handleMatrixUri(message); });
QMetaObject::Connection uriConnection;
if (app.isPrimary() && !matrixUri.isEmpty()) {
uriConnection = QObject::connect(ChatPage::instance(),
&ChatPage::contentLoaded,
ChatPage::instance(),
[&uriConnection, matrixUri]() {
ChatPage::instance()->handleMatrixUri(
matrixUri.toUtf8());
QObject::disconnect(uriConnection);
});
}
QDesktopServices::setUrlHandler("matrix", ChatPage::instance(), "handleMatrixUri");
#if defined(Q_OS_MAC)
// Temporary solution for the emoji picker until
// nheko has a proper menu bar with more functionality.

View file

@ -36,6 +36,7 @@ public:
signals:
void notificationClicked(const QString roomId, const QString eventId);
void sendNotificationReply(const QString roomId, const QString eventId, const QString body);
public slots:
void removeNotification(const QString &roomId, const QString &eventId);
@ -58,6 +59,7 @@ private:
private slots:
void actionInvoked(uint id, QString action);
void notificationClosed(uint id, uint reason);
void notificationReplied(uint id, QString reply);
};
#if defined(Q_OS_LINUX) || defined(Q_OS_FREEBSD)

View file

@ -28,6 +28,12 @@ NotificationsManager::NotificationsManager(QObject *parent)
"NotificationClosed",
this,
SLOT(notificationClosed(uint, uint)));
QDBusConnection::sessionBus().connect("org.freedesktop.Notifications",
"/org/freedesktop/Notifications",
"org.freedesktop.Notifications",
"NotificationReplied",
this,
SLOT(notificationReplied(uint, QString)));
}
void
@ -61,7 +67,12 @@ NotificationsManager::showNotification(const QString summary,
argumentList << ""; // app_icon
argumentList << summary; // summary
argumentList << text; // body
argumentList << (QStringList("default") << "reply"); // actions
// The list of actions has always the action name and then a localized version of that
// action. Currently we just use an empty string for that.
// TODO(Nico): Look into what to actually put there.
argumentList << (QStringList("default") << ""
<< "inline-reply"
<< ""); // actions
argumentList << hints; // hints
argumentList << (int)-1; // timeout in ms
@ -121,11 +132,22 @@ NotificationsManager::removeNotification(const QString &roomId, const QString &e
void
NotificationsManager::actionInvoked(uint id, QString action)
{
if (action == "default" && notificationIds.contains(id)) {
if (notificationIds.contains(id)) {
roomEventId idEntry = notificationIds[id];
if (action == "default") {
emit notificationClicked(idEntry.roomId, idEntry.eventId);
}
}
}
void
NotificationsManager::notificationReplied(uint id, QString reply)
{
if (notificationIds.contains(id)) {
roomEventId idEntry = notificationIds[id];
emit sendNotificationReply(idEntry.roomId, idEntry.eventId, reply);
}
}
void
NotificationsManager::notificationClosed(uint id, uint reason)

View file

@ -42,6 +42,11 @@ NotificationsManager::actionInvoked(uint, QString)
{
}
void
NotificationsManager::notificationReplied(uint, QString)
{
}
void
NotificationsManager::notificationClosed(uint, uint)
{

View file

@ -61,6 +61,7 @@ NotificationsManager::postNotification(const QString &room_id,
}
void NotificationsManager::actionInvoked(uint, QString) {}
void NotificationsManager::notificationReplied(uint, QString) {}
void NotificationsManager::notificationClosed(uint, uint) {}

View file

@ -13,7 +13,6 @@
#include <mtx/responses/media.hpp>
#include "Cache.h"
#include "CallManager.h"
#include "ChatPage.h"
#include "CompletionProxyModel.h"
#include "Logging.h"
@ -25,7 +24,6 @@
#include "UserSettingsPage.h"
#include "UsersModel.h"
#include "Utils.h"
#include "dialogs/PlaceCall.h"
#include "dialogs/PreviewUploadOverlay.h"
#include "emoji/EmojiModel.h"
@ -593,48 +591,6 @@ InputBar::showPreview(const QMimeData &source, QString path, const QStringList &
});
}
void
InputBar::callButton()
{
auto callManager_ = ChatPage::instance()->callManager();
if (callManager_->onActiveCall()) {
callManager_->hangUp();
} else {
auto current_room_ = room->roomId();
if (auto roomInfo = cache::singleRoomInfo(current_room_.toStdString());
roomInfo.member_count != 2) {
ChatPage::instance()->showNotification("Calls are limited to 1:1 rooms.");
} else {
std::vector<RoomMember> members(
cache::getMembers(current_room_.toStdString()));
const RoomMember &callee = members.front().user_id == utils::localUser()
? members.back()
: members.front();
auto dialog =
new dialogs::PlaceCall(callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url),
ChatPage::instance()->userSettings(),
MainWindow::instance());
connect(dialog,
&dialogs::PlaceCall::voice,
callManager_,
[callManager_, current_room_]() {
callManager_->sendInvite(current_room_, false);
});
connect(dialog,
&dialogs::PlaceCall::video,
callManager_,
[callManager_, current_room_]() {
callManager_->sendInvite(current_room_, true);
});
utils::centerWidget(dialog, MainWindow::instance());
dialog->show();
}
}
}
void
InputBar::startTyping()
{

View file

@ -41,7 +41,7 @@ public slots:
void updateState(int selectionStart, int selectionEnd, int cursorPosition, QString text);
void openFileSelection();
bool uploading() const { return uploading_; }
void callButton();
void message(QString body);
QObject *completerFor(QString completerName);
@ -54,7 +54,6 @@ signals:
void uploadingChanged(bool value);
private:
void message(QString body);
void emote(QString body);
void command(QString name, QString args);
void image(const QString &filename,

View file

@ -613,8 +613,15 @@ TimelineModel::addEvents(const mtx::responses::Timeline &timeline)
std::visit(
[this](auto &event) {
event.room_id = room_id_.toStdString();
if constexpr (std::is_same_v<std::decay_t<decltype(event)>,
RoomEvent<msg::CallAnswer>> ||
std::is_same_v<std::decay_t<decltype(event)>,
RoomEvent<msg::CallHangUp>>)
emit newCallEvent(event);
else {
if (event.sender != http::client()->user_id().to_string())
emit newCallEvent(event);
}
},
e);
else if (std::holds_alternative<StateEvent<state::Avatar>>(e))

View file

@ -136,6 +136,10 @@ TimelineViewManager::TimelineViewManager(CallManager *callManager, ChatPage *par
"im.nheko", 1, 0, "Settings", [](QQmlEngine *, QJSEngine *) -> QObject * {
return ChatPage::instance()->userSettings().data();
});
qmlRegisterSingletonType<CallManager>(
"im.nheko", 1, 0, "CallManager", [](QQmlEngine *, QJSEngine *) -> QObject * {
return ChatPage::instance()->callManager();
});
qRegisterMetaType<mtx::events::collections::TimelineEvents>();
qRegisterMetaType<std::vector<DeviceInfo>>();
@ -237,36 +241,6 @@ TimelineViewManager::TimelineViewManager(CallManager *callManager, ChatPage *par
isInitialSync_ = true;
emit initialSyncChanged(true);
});
connect(&WebRTCSession::instance(),
&WebRTCSession::stateChanged,
this,
&TimelineViewManager::callStateChanged);
connect(
callManager_, &CallManager::newCallParty, this, &TimelineViewManager::callPartyChanged);
connect(callManager_,
&CallManager::newVideoCallState,
this,
&TimelineViewManager::videoCallChanged);
connect(&WebRTCSession::instance(),
&WebRTCSession::stateChanged,
this,
&TimelineViewManager::onCallChanged);
}
bool
TimelineViewManager::isOnCall() const
{
return callManager_->onActiveCall();
}
bool
TimelineViewManager::callsSupported() const
{
#ifdef GSTREAMER_AVAILABLE
return true;
#else
return false;
#endif
}
void
@ -297,15 +271,22 @@ TimelineViewManager::sync(const mtx::responses::Rooms &rooms)
&CallManager::syncEvent);
if (ChatPage::instance()->userSettings()->typingNotifications()) {
for (const auto &ev : room.ephemeral.events) {
if (auto t = std::get_if<
mtx::events::EphemeralEvent<mtx::events::ephemeral::Typing>>(
&ev)) {
std::vector<QString> typing;
typing.reserve(room.ephemeral.typing.size());
for (const auto &user : room.ephemeral.typing) {
typing.reserve(t->content.user_ids.size());
for (const auto &user : t->content.user_ids) {
if (user != http::client()->user_id().to_string())
typing.push_back(QString::fromStdString(user));
typing.push_back(
QString::fromStdString(user));
}
room_model->updateTypingUsers(typing);
}
}
}
}
this->isInitialSync_ = false;
emit initialSyncChanged(false);
@ -347,19 +328,6 @@ TimelineViewManager::escapeEmoji(QString str) const
return utils::replaceEmoji(str);
}
void
TimelineViewManager::toggleMicMute()
{
WebRTCSession::instance().toggleMicMute();
emit micMuteChanged();
}
void
TimelineViewManager::toggleCameraView()
{
WebRTCSession::instance().toggleCameraView();
}
void
TimelineViewManager::openImageOverlay(QString mxcUrl, QString eventId) const
{
@ -501,6 +469,18 @@ TimelineViewManager::initWithMessages(const std::vector<QString> &roomIds)
addRoom(roomId);
}
void
TimelineViewManager::queueReply(const QString &roomid,
const QString &repliedToEvent,
const QString &replyBody)
{
auto room = models.find(roomid);
if (room != models.end()) {
room.value()->setReply(repliedToEvent);
room.value()->input()->message(replyBody);
}
}
void
TimelineViewManager::queueReactionMessage(const QString &reactedEvent, const QString &reactionKey)
{

View file

@ -36,13 +36,6 @@ class TimelineViewManager : public QObject
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
Q_PROPERTY(
bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged)
Q_PROPERTY(webrtc::State callState READ callState NOTIFY callStateChanged)
Q_PROPERTY(bool onVideoCall READ onVideoCall NOTIFY videoCallChanged)
Q_PROPERTY(QString callPartyName READ callPartyName NOTIFY callPartyChanged)
Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY callPartyChanged)
Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged)
Q_PROPERTY(bool isOnCall READ isOnCall NOTIFY onCallChanged)
Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT)
public:
TimelineViewManager(CallManager *callManager, ChatPage *parent = nullptr);
@ -61,14 +54,6 @@ public:
Q_INVOKABLE TimelineModel *activeTimeline() const { return timeline_; }
Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; }
bool isNarrowView() const { return isNarrowView_; }
webrtc::State callState() const { return WebRTCSession::instance().state(); }
bool onVideoCall() const { return WebRTCSession::instance().isVideo(); }
Q_INVOKABLE void setVideoCallItem();
QString callPartyName() const { return callManager_->callPartyName(); }
QString callPartyAvatarUrl() const { return callManager_->callPartyAvatarUrl(); }
bool isMicMuted() const { return WebRTCSession::instance().isMicMuted(); }
Q_INVOKABLE void toggleMicMute();
Q_INVOKABLE void toggleCameraView();
Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const;
Q_INVOKABLE QColor userColor(QString id, QColor background);
Q_INVOKABLE QString escapeEmoji(QString str) const;
@ -98,11 +83,6 @@ signals:
void inviteUsers(QStringList users);
void showRoomList();
void narrowViewChanged();
void callStateChanged(webrtc::State);
void videoCallChanged();
void callPartyChanged();
void micMuteChanged();
void onCallChanged();
public slots:
void updateReadReceipts(const QString &room_id, const std::vector<QString> &event_ids);
@ -120,6 +100,9 @@ public slots:
}
void updateColorPalette();
void queueReply(const QString &roomid,
const QString &repliedToEvent,
const QString &replyBody);
void queueReactionMessage(const QString &reactedEvent, const QString &reactionKey);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallInvite &);
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallCandidates &);
@ -127,8 +110,7 @@ public slots:
void queueCallMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void updateEncryptedDescriptions();
bool isOnCall() const;
bool callsSupported() const;
void setVideoCallItem();
void enableBackButton()
{

View file

@ -202,12 +202,7 @@ UserProfile::kickUser()
void
UserProfile::startChat()
{
mtx::requests::CreateRoom req;
req.preset = mtx::requests::Preset::PrivateChat;
req.visibility = mtx::requests::Visibility::Private;
if (utils::localUser() != this->userid_)
req.invite = {this->userid_.toStdString()};
emit ChatPage::instance()->createRoom(req);
ChatPage::instance()->startChat(this->userid_);
}
void