Move call device handling out of WebRTCSession

This commit is contained in:
trilene 2021-02-07 11:47:47 -05:00
parent 375e20462b
commit f1bc3ba587
8 changed files with 509 additions and 396 deletions

View file

@ -298,6 +298,7 @@ set(SRC_FILES
src/AvatarProvider.cpp src/AvatarProvider.cpp
src/BlurhashProvider.cpp src/BlurhashProvider.cpp
src/Cache.cpp src/Cache.cpp
src/CallDevices.cpp
src/CallManager.cpp src/CallManager.cpp
src/ChatPage.cpp src/ChatPage.cpp
src/ColorImageProvider.cpp src/ColorImageProvider.cpp
@ -512,6 +513,7 @@ qt5_wrap_cpp(MOC_HEADERS
src/AvatarProvider.h src/AvatarProvider.h
src/BlurhashProvider.h src/BlurhashProvider.h
src/Cache_p.h src/Cache_p.h
src/CallDevices.h
src/CallManager.h src/CallManager.h
src/ChatPage.h src/ChatPage.h
src/CommunitiesList.h src/CommunitiesList.h

433
src/CallDevices.cpp Normal file
View file

@ -0,0 +1,433 @@
#include <cstring>
#include <optional>
#include <string_view>
#include "CallDevices.h"
#include "ChatPage.h"
#include "Logging.h"
#include "UserSettingsPage.h"
#ifdef GSTREAMER_AVAILABLE
extern "C"
{
#include "gst/gst.h"
}
#endif
CallDevices::CallDevices()
: QObject()
{}
#ifdef GSTREAMER_AVAILABLE
namespace {
struct AudioSource
{
std::string name;
GstDevice *device;
};
struct VideoSource
{
struct Caps
{
std::string resolution;
std::vector<std::string> frameRates;
};
std::string name;
GstDevice *device;
std::vector<Caps> caps;
};
std::vector<AudioSource> audioSources_;
std::vector<VideoSource> videoSources_;
using FrameRate = std::pair<int, int>;
std::optional<FrameRate>
getFrameRate(const GValue *value)
{
if (GST_VALUE_HOLDS_FRACTION(value)) {
gint num = gst_value_get_fraction_numerator(value);
gint den = gst_value_get_fraction_denominator(value);
return FrameRate{num, den};
}
return std::nullopt;
}
void
addFrameRate(std::vector<std::string> &rates, const FrameRate &rate)
{
constexpr double minimumFrameRate = 15.0;
if (static_cast<double>(rate.first) / rate.second >= minimumFrameRate)
rates.push_back(std::to_string(rate.first) + "/" + std::to_string(rate.second));
}
void
setDefaultDevice(bool isVideo)
{
auto settings = ChatPage::instance()->userSettings();
if (isVideo && settings->camera().isEmpty()) {
const VideoSource &camera = videoSources_.front();
settings->setCamera(QString::fromStdString(camera.name));
settings->setCameraResolution(
QString::fromStdString(camera.caps.front().resolution));
settings->setCameraFrameRate(
QString::fromStdString(camera.caps.front().frameRates.front()));
} else if (!isVideo && settings->microphone().isEmpty()) {
settings->setMicrophone(QString::fromStdString(audioSources_.front().name));
}
}
void
addDevice(GstDevice *device)
{
if (!device)
return;
gchar *name = gst_device_get_display_name(device);
gchar *type = gst_device_get_device_class(device);
bool isVideo = !std::strncmp(type, "Video", 5);
g_free(type);
nhlog::ui()->debug("WebRTC: {} device added: {}", isVideo ? "video" : "audio", name);
if (!isVideo) {
audioSources_.push_back({name, device});
g_free(name);
setDefaultDevice(false);
return;
}
GstCaps *gstcaps = gst_device_get_caps(device);
if (!gstcaps) {
nhlog::ui()->debug("WebRTC: unable to get caps for {}", name);
g_free(name);
return;
}
VideoSource source{name, device, {}};
g_free(name);
guint nCaps = gst_caps_get_size(gstcaps);
for (guint i = 0; i < nCaps; ++i) {
GstStructure *structure = gst_caps_get_structure(gstcaps, i);
const gchar *name = gst_structure_get_name(structure);
if (!std::strcmp(name, "video/x-raw")) {
gint widthpx, heightpx;
if (gst_structure_get(structure,
"width",
G_TYPE_INT,
&widthpx,
"height",
G_TYPE_INT,
&heightpx,
nullptr)) {
VideoSource::Caps caps;
caps.resolution =
std::to_string(widthpx) + "x" + std::to_string(heightpx);
const GValue *value =
gst_structure_get_value(structure, "framerate");
if (auto fr = getFrameRate(value); fr)
addFrameRate(caps.frameRates, *fr);
else if (GST_VALUE_HOLDS_FRACTION_RANGE(value)) {
addFrameRate(
caps.frameRates,
*getFrameRate(gst_value_get_fraction_range_min(value)));
addFrameRate(
caps.frameRates,
*getFrameRate(gst_value_get_fraction_range_max(value)));
} else if (GST_VALUE_HOLDS_LIST(value)) {
guint nRates = gst_value_list_get_size(value);
for (guint j = 0; j < nRates; ++j) {
const GValue *rate =
gst_value_list_get_value(value, j);
if (auto fr = getFrameRate(rate); fr)
addFrameRate(caps.frameRates, *fr);
}
}
if (!caps.frameRates.empty())
source.caps.push_back(std::move(caps));
}
}
}
gst_caps_unref(gstcaps);
videoSources_.push_back(std::move(source));
setDefaultDevice(true);
}
#if GST_CHECK_VERSION(1, 18, 0)
template<typename T>
bool
removeDevice(T &sources, GstDevice *device, bool changed)
{
if (auto it = std::find_if(sources.begin(),
sources.end(),
[device](const auto &s) { return s.device == device; });
it != sources.end()) {
nhlog::ui()->debug(std::string("WebRTC: device ") +
(changed ? "changed: " : "removed: ") + "{}",
it->name);
gst_object_unref(device);
sources.erase(it);
return true;
}
return false;
}
void
removeDevice(GstDevice *device, bool changed)
{
if (device) {
if (removeDevice(audioSources_, device, changed) ||
removeDevice(videoSources_, device, changed))
return;
}
}
gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data G_GNUC_UNUSED)
{
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_DEVICE_ADDED: {
GstDevice *device;
gst_message_parse_device_added(msg, &device);
addDevice(device);
emit CallDevices::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_REMOVED: {
GstDevice *device;
gst_message_parse_device_removed(msg, &device);
removeDevice(device, false);
emit CallDevices::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_CHANGED: {
GstDevice *device;
GstDevice *oldDevice;
gst_message_parse_device_changed(msg, &device, &oldDevice);
removeDevice(oldDevice, true);
addDevice(device);
break;
}
default:
break;
}
return TRUE;
}
#endif
template<typename T>
std::vector<std::string>
deviceNames(T &sources, const std::string &defaultDevice)
{
std::vector<std::string> ret;
ret.reserve(sources.size());
for (const auto &s : sources)
ret.push_back(s.name);
// move default device to top of the list
if (auto it = std::find(ret.begin(), ret.end(), defaultDevice); it != ret.end())
std::swap(ret.front(), *it);
return ret;
}
std::optional<VideoSource>
getVideoSource(const std::string &cameraName)
{
if (auto it = std::find_if(videoSources_.cbegin(),
videoSources_.cend(),
[&cameraName](const auto &s) { return s.name == cameraName; });
it != videoSources_.cend()) {
return *it;
}
return std::nullopt;
}
std::pair<int, int>
tokenise(std::string_view str, char delim)
{
std::pair<int, int> ret;
ret.first = std::atoi(str.data());
auto pos = str.find_first_of(delim);
ret.second = std::atoi(str.data() + pos + 1);
return ret;
}
}
void
CallDevices::init()
{
#if GST_CHECK_VERSION(1, 18, 0)
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_caps_unref(caps);
GstBus *bus = gst_device_monitor_get_bus(monitor);
gst_bus_add_watch(bus, newBusMessage, nullptr);
gst_object_unref(bus);
if (!gst_device_monitor_start(monitor)) {
nhlog::ui()->error("WebRTC: failed to start device monitor");
return;
}
}
#endif
}
void
CallDevices::refresh()
{
#if !GST_CHECK_VERSION(1, 18, 0)
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_caps_unref(caps);
}
auto clearDevices = [](auto &sources) {
std::for_each(
sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); });
sources.clear();
};
clearDevices(audioSources_);
clearDevices(videoSources_);
GList *devices = gst_device_monitor_get_devices(monitor);
if (devices) {
for (GList *l = devices; l != nullptr; l = l->next)
addDevice(GST_DEVICE_CAST(l->data));
g_list_free(devices);
}
emit devicesChanged();
#endif
}
bool
CallDevices::haveMic() const
{
return !audioSources_.empty();
}
bool
CallDevices::haveCamera() const
{
return !videoSources_.empty();
}
std::vector<std::string>
CallDevices::names(bool isVideo, const std::string &defaultDevice) const
{
return isVideo ? deviceNames(videoSources_, defaultDevice)
: deviceNames(audioSources_, defaultDevice);
}
std::vector<std::string>
CallDevices::resolutions(const std::string &cameraName) const
{
std::vector<std::string> ret;
if (auto s = getVideoSource(cameraName); s) {
ret.reserve(s->caps.size());
for (const auto &c : s->caps)
ret.push_back(c.resolution);
}
return ret;
}
std::vector<std::string>
CallDevices::frameRates(const std::string &cameraName, const std::string &resolution) const
{
if (auto s = getVideoSource(cameraName); s) {
if (auto it =
std::find_if(s->caps.cbegin(),
s->caps.cend(),
[&](const auto &c) { return c.resolution == resolution; });
it != s->caps.cend())
return it->frameRates;
}
return {};
}
GstDevice *
CallDevices::audioDevice() const
{
std::string name = ChatPage::instance()->userSettings()->microphone().toStdString();
if (auto it = std::find_if(audioSources_.cbegin(),
audioSources_.cend(),
[&name](const auto &s) { return s.name == name; });
it != audioSources_.cend()) {
nhlog::ui()->debug("WebRTC: microphone: {}", name);
return it->device;
} else {
nhlog::ui()->error("WebRTC: unknown microphone: {}", name);
return nullptr;
}
}
GstDevice *
CallDevices::videoDevice(std::pair<int, int> &resolution, std::pair<int, int> &frameRate) const
{
auto settings = ChatPage::instance()->userSettings();
std::string name = settings->camera().toStdString();
if (auto s = getVideoSource(name); s) {
nhlog::ui()->debug("WebRTC: camera: {}", name);
resolution = tokenise(settings->cameraResolution().toStdString(), 'x');
frameRate = tokenise(settings->cameraFrameRate().toStdString(), '/');
nhlog::ui()->debug(
"WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
nhlog::ui()->debug(
"WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
return s->device;
} else {
nhlog::ui()->error("WebRTC: unknown camera: {}", name);
return nullptr;
}
}
#else
void
CallDevices::refresh()
{}
bool
CallDevices::haveMic() const
{
return false;
}
bool
CallDevices::haveCamera() const
{
return false;
}
std::vector<std::string>
CallDevices::names(bool, const std::string &) const
{
return {};
}
std::vector<std::string>
CallDevices::resolutions(const std::string &) const
{
return {};
}
std::vector<std::string>
CallDevices::frameRates(const std::string &, const std::string &) const
{
return {};
}
#endif

45
src/CallDevices.h Normal file
View file

@ -0,0 +1,45 @@
#pragma once
#include <string>
#include <utility>
#include <vector>
#include <QObject>
typedef struct _GstDevice GstDevice;
class CallDevices : public QObject
{
Q_OBJECT
public:
static CallDevices &instance()
{
static CallDevices instance;
return instance;
}
void refresh();
bool haveMic() const;
bool haveCamera() const;
std::vector<std::string> names(bool isVideo, const std::string &defaultDevice) const;
std::vector<std::string> resolutions(const std::string &cameraName) const;
std::vector<std::string> frameRates(const std::string &cameraName,
const std::string &resolution) const;
signals:
void devicesChanged();
private:
CallDevices();
friend class WebRTCSession;
void init();
GstDevice *audioDevice() const;
GstDevice *videoDevice(std::pair<int, int> &resolution,
std::pair<int, int> &frameRate) const;
public:
CallDevices(CallDevices const &) = delete;
void operator=(CallDevices const &) = delete;
};

View file

@ -7,6 +7,7 @@
#include <QUrl> #include <QUrl>
#include "Cache.h" #include "Cache.h"
#include "CallDevices.h"
#include "CallManager.h" #include "CallManager.h"
#include "ChatPage.h" #include "ChatPage.h"
#include "Logging.h" #include "Logging.h"
@ -114,21 +115,10 @@ CallManager::CallManager(QObject *parent)
emit newCallState(); emit newCallState();
}); });
connect(&session_, &WebRTCSession::devicesChanged, this, [this]() { connect(&CallDevices::instance(),
if (ChatPage::instance()->userSettings()->microphone().isEmpty()) { &CallDevices::devicesChanged,
auto mics = session_.getDeviceNames(false, std::string()); this,
if (!mics.empty()) &CallManager::devicesChanged);
ChatPage::instance()->userSettings()->setMicrophone(
QString::fromStdString(mics.front()));
}
if (ChatPage::instance()->userSettings()->camera().isEmpty()) {
auto cameras = session_.getDeviceNames(true, std::string());
if (!cameras.empty())
ChatPage::instance()->userSettings()->setCamera(
QString::fromStdString(cameras.front()));
}
emit devicesChanged();
});
connect(&player_, connect(&player_,
&QMediaPlayer::mediaStatusChanged, &QMediaPlayer::mediaStatusChanged,
@ -292,7 +282,7 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
haveCallInvite_ = true; haveCallInvite_ = true;
isVideo_ = isVideo; isVideo_ = isVideo;
inviteSDP_ = callInviteEvent.content.sdp; inviteSDP_ = callInviteEvent.content.sdp;
session_.refreshDevices(); CallDevices::instance().refresh();
emit newInviteState(); emit newInviteState();
} }
@ -409,7 +399,7 @@ CallManager::devices(bool isVideo) const
const QString &defaultDevice = isVideo ? ChatPage::instance()->userSettings()->camera() const QString &defaultDevice = isVideo ? ChatPage::instance()->userSettings()->camera()
: ChatPage::instance()->userSettings()->microphone(); : ChatPage::instance()->userSettings()->microphone();
std::vector<std::string> devices = std::vector<std::string> devices =
session_.getDeviceNames(isVideo, defaultDevice.toStdString()); CallDevices::instance().names(isVideo, defaultDevice.toStdString());
ret.reserve(devices.size()); ret.reserve(devices.size());
std::transform(devices.cbegin(), std::transform(devices.cbegin(),
devices.cend(), devices.cend(),

View file

@ -8,6 +8,7 @@
#include <QString> #include <QString>
#include <QTimer> #include <QTimer>
#include "CallDevices.h"
#include "WebRTCSession.h" #include "WebRTCSession.h"
#include "mtx/events/collections.hpp" #include "mtx/events/collections.hpp"
#include "mtx/events/voip.hpp" #include "mtx/events/voip.hpp"
@ -53,7 +54,7 @@ public:
public slots: public slots:
void sendInvite(const QString &roomid, bool isVideo); void sendInvite(const QString &roomid, bool isVideo);
void syncEvent(const mtx::events::collections::TimelineEvents &event); void syncEvent(const mtx::events::collections::TimelineEvents &event);
void refreshDevices() { session_.refreshDevices(); } void refreshDevices() { CallDevices::instance().refresh(); }
void toggleMicMute(); void toggleMicMute();
void toggleCameraView() { session_.toggleCameraView(); } void toggleCameraView() { session_.toggleCameraView(); }
void acceptInvite(); void acceptInvite();

View file

@ -39,12 +39,12 @@
#include <QtQml> #include <QtQml>
#include "Cache.h" #include "Cache.h"
#include "CallDevices.h"
#include "Config.h" #include "Config.h"
#include "MatrixClient.h" #include "MatrixClient.h"
#include "Olm.h" #include "Olm.h"
#include "UserSettingsPage.h" #include "UserSettingsPage.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "ui/FlatButton.h" #include "ui/FlatButton.h"
#include "ui/ToggleButton.h" #include "ui/ToggleButton.h"
@ -1060,7 +1060,7 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
[this](const QString &camera) { [this](const QString &camera) {
settings_->setCamera(camera); settings_->setCamera(camera);
std::vector<std::string> resolutions = std::vector<std::string> resolutions =
WebRTCSession::instance().getResolutions(camera.toStdString()); CallDevices::instance().resolutions(camera.toStdString());
cameraResolutionCombo_->clear(); cameraResolutionCombo_->clear();
for (const auto &resolution : resolutions) for (const auto &resolution : resolutions)
cameraResolutionCombo_->addItem(QString::fromStdString(resolution)); cameraResolutionCombo_->addItem(QString::fromStdString(resolution));
@ -1070,9 +1070,8 @@ UserSettingsPage::UserSettingsPage(QSharedPointer<UserSettings> settings, QWidge
static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged), static_cast<void (QComboBox::*)(const QString &)>(&QComboBox::currentTextChanged),
[this](const QString &resolution) { [this](const QString &resolution) {
settings_->setCameraResolution(resolution); settings_->setCameraResolution(resolution);
std::vector<std::string> frameRates = std::vector<std::string> frameRates = CallDevices::instance().frameRates(
WebRTCSession::instance().getFrameRates(settings_->camera().toStdString(), settings_->camera().toStdString(), resolution.toStdString());
resolution.toStdString());
cameraFrameRateCombo_->clear(); cameraFrameRateCombo_->clear();
for (const auto &frameRate : frameRates) for (const auto &frameRate : frameRates)
cameraFrameRateCombo_->addItem(QString::fromStdString(frameRate)); cameraFrameRateCombo_->addItem(QString::fromStdString(frameRate));
@ -1231,9 +1230,8 @@ UserSettingsPage::showEvent(QShowEvent *)
timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth()); timelineMaxWidthSpin_->setValue(settings_->timelineMaxWidth());
privacyScreenTimeout_->setValue(settings_->privacyScreenTimeout()); privacyScreenTimeout_->setValue(settings_->privacyScreenTimeout());
WebRTCSession::instance().refreshDevices(); CallDevices::instance().refresh();
auto mics = auto mics = CallDevices::instance().names(false, settings_->microphone().toStdString());
WebRTCSession::instance().getDeviceNames(false, settings_->microphone().toStdString());
microphoneCombo_->clear(); microphoneCombo_->clear();
for (const auto &m : mics) for (const auto &m : mics)
microphoneCombo_->addItem(QString::fromStdString(m)); microphoneCombo_->addItem(QString::fromStdString(m));
@ -1241,8 +1239,7 @@ UserSettingsPage::showEvent(QShowEvent *)
auto cameraResolution = settings_->cameraResolution(); auto cameraResolution = settings_->cameraResolution();
auto cameraFrameRate = settings_->cameraFrameRate(); auto cameraFrameRate = settings_->cameraFrameRate();
auto cameras = auto cameras = CallDevices::instance().names(true, settings_->camera().toStdString());
WebRTCSession::instance().getDeviceNames(true, settings_->camera().toStdString());
cameraCombo_->clear(); cameraCombo_->clear();
for (const auto &c : cameras) for (const auto &c : cameras)
cameraCombo_->addItem(QString::fromStdString(c)); cameraCombo_->addItem(QString::fromStdString(c));

View file

@ -35,6 +35,7 @@ using webrtc::State;
WebRTCSession::WebRTCSession() WebRTCSession::WebRTCSession()
: QObject() : QObject()
, devices_(CallDevices::instance())
{ {
qRegisterMetaType<webrtc::State>(); qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject( qmlRegisterUncreatableMetaObject(
@ -68,9 +69,7 @@ WebRTCSession::init(std::string *errorMessage)
gchar *version = gst_version_string(); gchar *version = gst_version_string();
nhlog::ui()->info("WebRTC: initialised {}", version); nhlog::ui()->info("WebRTC: initialised {}", version);
g_free(version); g_free(version);
#if GST_CHECK_VERSION(1, 18, 0) devices_.init();
startDeviceMonitor();
#endif
return true; return true;
#else #else
(void)errorMessage; (void)errorMessage;
@ -81,195 +80,17 @@ WebRTCSession::init(std::string *errorMessage)
#ifdef GSTREAMER_AVAILABLE #ifdef GSTREAMER_AVAILABLE
namespace { namespace {
struct AudioSource
{
std::string name;
GstDevice *device;
};
struct VideoSource
{
struct Caps
{
std::string resolution;
std::vector<std::string> frameRates;
};
std::string name;
GstDevice *device;
std::vector<Caps> caps;
};
std::string localsdp_; std::string localsdp_;
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_; std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
bool haveAudioStream_; bool haveAudioStream_;
bool haveVideoStream_; bool haveVideoStream_;
std::vector<AudioSource> audioSources_;
std::vector<VideoSource> videoSources_;
GstPad *insetSinkPad_ = nullptr; GstPad *insetSinkPad_ = nullptr;
using FrameRate = std::pair<int, int>;
std::optional<FrameRate>
getFrameRate(const GValue *value)
{
if (GST_VALUE_HOLDS_FRACTION(value)) {
gint num = gst_value_get_fraction_numerator(value);
gint den = gst_value_get_fraction_denominator(value);
return FrameRate{num, den};
}
return std::nullopt;
}
void
addFrameRate(std::vector<std::string> &rates, const FrameRate &rate)
{
constexpr double minimumFrameRate = 15.0;
if (static_cast<double>(rate.first) / rate.second >= minimumFrameRate)
rates.push_back(std::to_string(rate.first) + "/" + std::to_string(rate.second));
}
std::pair<int, int>
tokenise(std::string_view str, char delim)
{
std::pair<int, int> ret;
ret.first = std::atoi(str.data());
auto pos = str.find_first_of(delim);
ret.second = std::atoi(str.data() + pos + 1);
return ret;
}
void
addDevice(GstDevice *device)
{
if (!device)
return;
gchar *name = gst_device_get_display_name(device);
gchar *type = gst_device_get_device_class(device);
bool isVideo = !std::strncmp(type, "Video", 5);
g_free(type);
nhlog::ui()->debug("WebRTC: {} device added: {}", isVideo ? "video" : "audio", name);
if (!isVideo) {
audioSources_.push_back({name, device});
g_free(name);
return;
}
GstCaps *gstcaps = gst_device_get_caps(device);
if (!gstcaps) {
nhlog::ui()->debug("WebRTC: unable to get caps for {}", name);
g_free(name);
return;
}
VideoSource source{name, device, {}};
g_free(name);
guint nCaps = gst_caps_get_size(gstcaps);
for (guint i = 0; i < nCaps; ++i) {
GstStructure *structure = gst_caps_get_structure(gstcaps, i);
const gchar *name = gst_structure_get_name(structure);
if (!std::strcmp(name, "video/x-raw")) {
gint widthpx, heightpx;
if (gst_structure_get(structure,
"width",
G_TYPE_INT,
&widthpx,
"height",
G_TYPE_INT,
&heightpx,
nullptr)) {
VideoSource::Caps caps;
caps.resolution =
std::to_string(widthpx) + "x" + std::to_string(heightpx);
const GValue *value =
gst_structure_get_value(structure, "framerate");
if (auto fr = getFrameRate(value); fr)
addFrameRate(caps.frameRates, *fr);
else if (GST_VALUE_HOLDS_FRACTION_RANGE(value)) {
const GValue *minRate =
gst_value_get_fraction_range_min(value);
if (auto fr = getFrameRate(minRate); fr)
addFrameRate(caps.frameRates, *fr);
const GValue *maxRate =
gst_value_get_fraction_range_max(value);
if (auto fr = getFrameRate(maxRate); fr)
addFrameRate(caps.frameRates, *fr);
} else if (GST_VALUE_HOLDS_LIST(value)) {
guint nRates = gst_value_list_get_size(value);
for (guint j = 0; j < nRates; ++j) {
const GValue *rate =
gst_value_list_get_value(value, j);
if (auto fr = getFrameRate(rate); fr)
addFrameRate(caps.frameRates, *fr);
}
}
if (!caps.frameRates.empty())
source.caps.push_back(std::move(caps));
}
}
}
gst_caps_unref(gstcaps);
videoSources_.push_back(std::move(source));
}
#if GST_CHECK_VERSION(1, 18, 0)
template<typename T>
bool
removeDevice(T &sources, GstDevice *device, bool changed)
{
if (auto it = std::find_if(sources.begin(),
sources.end(),
[device](const auto &s) { return s.device == device; });
it != sources.end()) {
nhlog::ui()->debug(std::string("WebRTC: device ") +
(changed ? "changed: " : "removed: ") + "{}",
it->name);
gst_object_unref(device);
sources.erase(it);
return true;
}
return false;
}
void
removeDevice(GstDevice *device, bool changed)
{
if (device) {
if (removeDevice(audioSources_, device, changed) ||
removeDevice(videoSources_, device, changed))
return;
}
}
#endif
gboolean gboolean
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data) newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
{ {
WebRTCSession *session = static_cast<WebRTCSession *>(user_data); WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
switch (GST_MESSAGE_TYPE(msg)) { switch (GST_MESSAGE_TYPE(msg)) {
#if GST_CHECK_VERSION(1, 18, 0)
case GST_MESSAGE_DEVICE_ADDED: {
GstDevice *device;
gst_message_parse_device_added(msg, &device);
addDevice(device);
emit WebRTCSession::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_REMOVED: {
GstDevice *device;
gst_message_parse_device_removed(msg, &device);
removeDevice(device, false);
emit WebRTCSession::instance().devicesChanged();
break;
}
case GST_MESSAGE_DEVICE_CHANGED: {
GstDevice *device;
GstDevice *oldDevice;
gst_message_parse_device_changed(msg, &device, &oldDevice);
removeDevice(oldDevice, true);
addDevice(device);
break;
}
#endif
case GST_MESSAGE_EOS: case GST_MESSAGE_EOS:
nhlog::ui()->error("WebRTC: end of stream"); nhlog::ui()->error("WebRTC: end of stream");
session->end(); session->end();
@ -724,27 +545,6 @@ getMediaAttributes(const GstSDPMessage *sdp,
return false; return false;
} }
template<typename T>
std::vector<std::string>
deviceNames(T &sources, const std::string &defaultDevice)
{
std::vector<std::string> ret;
ret.reserve(sources.size());
std::transform(sources.cbegin(),
sources.cend(),
std::back_inserter(ret),
[](const auto &s) { return s.name; });
// move default device to top of the list
if (auto it = std::find_if(ret.begin(),
ret.end(),
[&defaultDevice](const auto &s) { return s == defaultDevice; });
it != ret.end())
std::swap(ret.front(), *it);
return ret;
}
} }
bool bool
@ -995,19 +795,11 @@ WebRTCSession::startPipeline(int opusPayloadType, int vp8PayloadType)
bool bool
WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType) WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType)
{ {
std::string microphoneSetting = GstDevice *device = devices_.audioDevice();
ChatPage::instance()->userSettings()->microphone().toStdString(); if (!device)
auto it =
std::find_if(audioSources_.cbegin(),
audioSources_.cend(),
[&microphoneSetting](const auto &s) { return s.name == microphoneSetting; });
if (it == audioSources_.cend()) {
nhlog::ui()->error("WebRTC: unknown microphone: {}", microphoneSetting);
return false; return false;
}
nhlog::ui()->debug("WebRTC: microphone: {}", microphoneSetting);
GstElement *source = gst_device_create_element(it->device, nullptr); GstElement *source = gst_device_create_element(device, nullptr);
GstElement *volume = gst_element_factory_make("volume", "srclevel"); GstElement *volume = gst_element_factory_make("volume", "srclevel");
GstElement *convert = gst_element_factory_make("audioconvert", nullptr); GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
GstElement *resample = gst_element_factory_make("audioresample", nullptr); GstElement *resample = gst_element_factory_make("audioresample", nullptr);
@ -1070,30 +862,16 @@ bool
WebRTCSession::addVideoPipeline(int vp8PayloadType) WebRTCSession::addVideoPipeline(int vp8PayloadType)
{ {
// allow incoming video calls despite localUser having no webcam // allow incoming video calls despite localUser having no webcam
if (videoSources_.empty()) if (!devices_.haveCamera())
return !isOffering_; return !isOffering_;
QSharedPointer<UserSettings> settings = ChatPage::instance()->userSettings(); std::pair<int, int> resolution;
std::string cameraSetting = settings->camera().toStdString(); std::pair<int, int> frameRate;
auto it = std::find_if(videoSources_.cbegin(), GstDevice *device = devices_.videoDevice(resolution, frameRate);
videoSources_.cend(), if (!device)
[&cameraSetting](const auto &s) { return s.name == cameraSetting; });
if (it == videoSources_.cend()) {
nhlog::ui()->error("WebRTC: unknown camera: {}", cameraSetting);
return false; return false;
}
std::string resSetting = settings->cameraResolution().toStdString(); GstElement *source = gst_device_create_element(device, nullptr);
const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting;
std::string frSetting = settings->cameraFrameRate().toStdString();
const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting;
auto resolution = tokenise(res, 'x');
auto frameRate = tokenise(fr, '/');
nhlog::ui()->debug("WebRTC: camera: {}", cameraSetting);
nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
GstElement *source = gst_device_create_element(it->device, nullptr);
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr); GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
GstElement *capsfilter = gst_element_factory_make("capsfilter", "camerafilter"); GstElement *capsfilter = gst_element_factory_make("capsfilter", "camerafilter");
GstCaps *caps = gst_caps_new_simple("video/x-raw", GstCaps *caps = gst_caps_new_simple("video/x-raw",
@ -1239,111 +1017,6 @@ WebRTCSession::end()
emit stateChanged(State::DISCONNECTED); emit stateChanged(State::DISCONNECTED);
} }
#if GST_CHECK_VERSION(1, 18, 0)
void
WebRTCSession::startDeviceMonitor()
{
if (!initialised_)
return;
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_caps_unref(caps);
GstBus *bus = gst_device_monitor_get_bus(monitor);
gst_bus_add_watch(bus, newBusMessage, nullptr);
gst_object_unref(bus);
if (!gst_device_monitor_start(monitor)) {
nhlog::ui()->error("WebRTC: failed to start device monitor");
return;
}
}
}
#endif
void
WebRTCSession::refreshDevices()
{
#if GST_CHECK_VERSION(1, 18, 0)
return;
#else
if (!initialised_)
return;
static GstDeviceMonitor *monitor = nullptr;
if (!monitor) {
monitor = gst_device_monitor_new();
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
gst_caps_unref(caps);
caps = gst_caps_new_empty_simple("video/x-raw");
gst_device_monitor_add_filter(monitor, "Video/Source", caps);
gst_caps_unref(caps);
}
auto clearDevices = [](auto &sources) {
std::for_each(
sources.begin(), sources.end(), [](auto &s) { gst_object_unref(s.device); });
sources.clear();
};
clearDevices(audioSources_);
clearDevices(videoSources_);
GList *devices = gst_device_monitor_get_devices(monitor);
if (devices) {
for (GList *l = devices; l != nullptr; l = l->next)
addDevice(GST_DEVICE_CAST(l->data));
g_list_free(devices);
}
emit devicesChanged();
#endif
}
std::vector<std::string>
WebRTCSession::getDeviceNames(bool isVideo, const std::string &defaultDevice) const
{
return isVideo ? deviceNames(videoSources_, defaultDevice)
: deviceNames(audioSources_, defaultDevice);
}
std::vector<std::string>
WebRTCSession::getResolutions(const std::string &cameraName) const
{
std::vector<std::string> ret;
if (auto it = std::find_if(videoSources_.cbegin(),
videoSources_.cend(),
[&cameraName](const auto &s) { return s.name == cameraName; });
it != videoSources_.cend()) {
ret.reserve(it->caps.size());
for (const auto &c : it->caps)
ret.push_back(c.resolution);
}
return ret;
}
std::vector<std::string>
WebRTCSession::getFrameRates(const std::string &cameraName, const std::string &resolution) const
{
if (auto i = std::find_if(videoSources_.cbegin(),
videoSources_.cend(),
[&](const auto &s) { return s.name == cameraName; });
i != videoSources_.cend()) {
if (auto j =
std::find_if(i->caps.cbegin(),
i->caps.cend(),
[&](const auto &s) { return s.resolution == resolution; });
j != i->caps.cend())
return j->frameRates;
}
return {};
}
#else #else
bool bool
@ -1400,25 +1073,4 @@ void
WebRTCSession::end() WebRTCSession::end()
{} {}
void
WebRTCSession::refreshDevices()
{}
std::vector<std::string>
WebRTCSession::getDeviceNames(bool, const std::string &) const
{
return {};
}
std::vector<std::string>
WebRTCSession::getResolutions(const std::string &) const
{
return {};
}
std::vector<std::string>
WebRTCSession::getFrameRates(const std::string &, const std::string &) const
{
return {};
}
#endif #endif

View file

@ -5,6 +5,7 @@
#include <QObject> #include <QObject>
#include "CallDevices.h"
#include "mtx/events/voip.hpp" #include "mtx/events/voip.hpp"
typedef struct _GstElement GstElement; typedef struct _GstElement GstElement;
@ -59,13 +60,6 @@ public:
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; } void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
void refreshDevices();
std::vector<std::string> getDeviceNames(bool isVideo,
const std::string &defaultDevice) const;
std::vector<std::string> getResolutions(const std::string &cameraName) const;
std::vector<std::string> getFrameRates(const std::string &cameraName,
const std::string &resolution) const;
void setVideoItem(QQuickItem *item) { videoItem_ = item; } void setVideoItem(QQuickItem *item) { videoItem_ = item; }
QQuickItem *getVideoItem() const { return videoItem_; } QQuickItem *getVideoItem() const { return videoItem_; }
@ -76,7 +70,6 @@ signals:
const std::vector<mtx::events::msg::CallCandidates::Candidate> &); const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &); void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(webrtc::State); void stateChanged(webrtc::State);
void devicesChanged();
private slots: private slots:
void setState(webrtc::State state) { state_ = state; } void setState(webrtc::State state) { state_ = state; }
@ -84,6 +77,7 @@ private slots:
private: private:
WebRTCSession(); WebRTCSession();
CallDevices &devices_;
bool initialised_ = false; bool initialised_ = false;
bool haveVoicePlugins_ = false; bool haveVoicePlugins_ = false;
bool haveVideoPlugins_ = false; bool haveVideoPlugins_ = false;
@ -101,7 +95,6 @@ private:
bool startPipeline(int opusPayloadType, int vp8PayloadType); bool startPipeline(int opusPayloadType, int vp8PayloadType);
bool createPipeline(int opusPayloadType, int vp8PayloadType); bool createPipeline(int opusPayloadType, int vp8PayloadType);
bool addVideoPipeline(int vp8PayloadType); bool addVideoPipeline(int vp8PayloadType);
void startDeviceMonitor();
public: public:
WebRTCSession(WebRTCSession const &) = delete; WebRTCSession(WebRTCSession const &) = delete;