2020-07-16 22:44:07 +03:00
|
|
|
#include <cctype>
|
|
|
|
|
2020-07-11 02:19:48 +03:00
|
|
|
#include "Logging.h"
|
2020-08-01 21:31:10 +03:00
|
|
|
#include "WebRTCSession.h"
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-08-14 02:03:27 +03:00
|
|
|
#ifdef GSTREAMER_AVAILABLE
|
2020-08-01 21:31:10 +03:00
|
|
|
extern "C"
|
|
|
|
{
|
2020-07-11 02:19:48 +03:00
|
|
|
#include "gst/gst.h"
|
|
|
|
#include "gst/sdp/sdp.h"
|
|
|
|
|
|
|
|
#define GST_USE_UNSTABLE_API
|
|
|
|
#include "gst/webrtc/webrtc.h"
|
|
|
|
}
|
2020-08-14 02:03:27 +03:00
|
|
|
#endif
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-07-23 04:15:45 +03:00
|
|
|
Q_DECLARE_METATYPE(WebRTCSession::State)
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
WebRTCSession::WebRTCSession()
|
|
|
|
: QObject()
|
2020-07-23 04:15:45 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
qRegisterMetaType<WebRTCSession::State>();
|
|
|
|
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
|
2020-09-17 18:37:30 +03:00
|
|
|
init();
|
2020-07-23 04:15:45 +03:00
|
|
|
}
|
|
|
|
|
2020-07-11 02:19:48 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::init(std::string *errorMessage)
|
|
|
|
{
|
2020-08-14 02:03:27 +03:00
|
|
|
#ifdef GSTREAMER_AVAILABLE
|
2020-08-01 21:31:10 +03:00
|
|
|
if (initialised_)
|
|
|
|
return true;
|
|
|
|
|
|
|
|
GError *error = nullptr;
|
|
|
|
if (!gst_init_check(nullptr, nullptr, &error)) {
|
|
|
|
std::string strError = std::string("WebRTC: failed to initialise GStreamer: ");
|
|
|
|
if (error) {
|
|
|
|
strError += error->message;
|
|
|
|
g_error_free(error);
|
|
|
|
}
|
|
|
|
nhlog::ui()->error(strError);
|
|
|
|
if (errorMessage)
|
|
|
|
*errorMessage = strError;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
gchar *version = gst_version_string();
|
|
|
|
std::string gstVersion(version);
|
|
|
|
g_free(version);
|
|
|
|
nhlog::ui()->info("WebRTC: initialised " + gstVersion);
|
|
|
|
|
|
|
|
// GStreamer Plugins:
|
|
|
|
// Base: audioconvert, audioresample, opus, playback, volume
|
|
|
|
// Good: autodetect, rtpmanager
|
|
|
|
// Bad: dtls, srtp, webrtc
|
|
|
|
// libnice [GLib]: nice
|
|
|
|
initialised_ = true;
|
|
|
|
std::string strError = gstVersion + ": Missing plugins: ";
|
|
|
|
const gchar *needed[] = {"audioconvert",
|
|
|
|
"audioresample",
|
|
|
|
"autodetect",
|
|
|
|
"dtls",
|
|
|
|
"nice",
|
|
|
|
"opus",
|
|
|
|
"playback",
|
|
|
|
"rtpmanager",
|
|
|
|
"srtp",
|
|
|
|
"volume",
|
|
|
|
"webrtc",
|
|
|
|
nullptr};
|
|
|
|
GstRegistry *registry = gst_registry_get();
|
|
|
|
for (guint i = 0; i < g_strv_length((gchar **)needed); i++) {
|
|
|
|
GstPlugin *plugin = gst_registry_find_plugin(registry, needed[i]);
|
|
|
|
if (!plugin) {
|
|
|
|
strError += std::string(needed[i]) + " ";
|
|
|
|
initialised_ = false;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
gst_object_unref(plugin);
|
|
|
|
}
|
|
|
|
|
2020-09-17 18:37:30 +03:00
|
|
|
if (initialised_) {
|
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
|
|
|
startDeviceMonitor();
|
|
|
|
#endif
|
|
|
|
} else {
|
2020-08-01 21:31:10 +03:00
|
|
|
nhlog::ui()->error(strError);
|
|
|
|
if (errorMessage)
|
|
|
|
*errorMessage = strError;
|
|
|
|
}
|
|
|
|
return initialised_;
|
2020-08-14 02:03:27 +03:00
|
|
|
#else
|
|
|
|
(void)errorMessage;
|
|
|
|
return false;
|
|
|
|
#endif
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-14 02:03:27 +03:00
|
|
|
#ifdef GSTREAMER_AVAILABLE
|
2020-08-01 21:31:10 +03:00
|
|
|
namespace {
|
|
|
|
bool isoffering_;
|
|
|
|
std::string localsdp_;
|
|
|
|
std::vector<mtx::events::msg::CallCandidates::Candidate> localcandidates_;
|
2020-09-17 18:37:30 +03:00
|
|
|
std::vector<std::pair<std::string, GstDevice *>> audioSources_;
|
|
|
|
|
|
|
|
void
|
|
|
|
addDevice(GstDevice *device)
|
|
|
|
{
|
|
|
|
if (device) {
|
|
|
|
gchar *name = gst_device_get_display_name(device);
|
|
|
|
nhlog::ui()->debug("WebRTC: device added: {}", name);
|
|
|
|
audioSources_.push_back({name, device});
|
|
|
|
g_free(name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
|
|
|
void
|
|
|
|
removeDevice(GstDevice *device, bool changed)
|
|
|
|
{
|
|
|
|
if (device) {
|
|
|
|
if (auto it = std::find_if(audioSources_.begin(),
|
|
|
|
audioSources_.end(),
|
|
|
|
[device](const auto &s) { return s.second == device; });
|
|
|
|
it != audioSources_.end()) {
|
|
|
|
nhlog::ui()->debug(std::string("WebRTC: device ") +
|
|
|
|
(changed ? "changed: " : "removed: ") + "{}",
|
|
|
|
it->first);
|
|
|
|
gst_object_unref(device);
|
|
|
|
audioSources_.erase(it);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2020-08-01 21:31:10 +03:00
|
|
|
|
|
|
|
gboolean
|
|
|
|
newBusMessage(GstBus *bus G_GNUC_UNUSED, GstMessage *msg, gpointer user_data)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
WebRTCSession *session = static_cast<WebRTCSession *>(user_data);
|
|
|
|
switch (GST_MESSAGE_TYPE(msg)) {
|
2020-09-17 18:37:30 +03:00
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
|
|
|
case GST_MESSAGE_DEVICE_ADDED: {
|
|
|
|
GstDevice *device;
|
|
|
|
gst_message_parse_device_added(msg, &device);
|
|
|
|
addDevice(device);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_MESSAGE_DEVICE_REMOVED: {
|
|
|
|
GstDevice *device;
|
|
|
|
gst_message_parse_device_removed(msg, &device);
|
|
|
|
removeDevice(device, false);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_MESSAGE_DEVICE_CHANGED: {
|
|
|
|
GstDevice *device;
|
|
|
|
GstDevice *oldDevice;
|
|
|
|
gst_message_parse_device_changed(msg, &device, &oldDevice);
|
|
|
|
removeDevice(oldDevice, true);
|
|
|
|
addDevice(device);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
#endif
|
2020-08-01 21:31:10 +03:00
|
|
|
case GST_MESSAGE_EOS:
|
|
|
|
nhlog::ui()->error("WebRTC: end of stream");
|
|
|
|
session->end();
|
|
|
|
break;
|
|
|
|
case GST_MESSAGE_ERROR:
|
|
|
|
GError *error;
|
|
|
|
gchar *debug;
|
|
|
|
gst_message_parse_error(msg, &error, &debug);
|
|
|
|
nhlog::ui()->error(
|
|
|
|
"WebRTC: error from element {}: {}", GST_OBJECT_NAME(msg->src), error->message);
|
|
|
|
g_clear_error(&error);
|
|
|
|
g_free(debug);
|
|
|
|
session->end();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return TRUE;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
GstWebRTCSessionDescription *
|
|
|
|
parseSDP(const std::string &sdp, GstWebRTCSDPType type)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
GstSDPMessage *msg;
|
|
|
|
gst_sdp_message_new(&msg);
|
|
|
|
if (gst_sdp_message_parse_buffer((guint8 *)sdp.c_str(), sdp.size(), msg) == GST_SDP_OK) {
|
|
|
|
return gst_webrtc_session_description_new(type, msg);
|
|
|
|
} else {
|
|
|
|
nhlog::ui()->error("WebRTC: failed to parse remote session description");
|
|
|
|
gst_object_unref(msg);
|
|
|
|
return nullptr;
|
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
void
|
|
|
|
setLocalDescription(GstPromise *promise, gpointer webrtc)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
const GstStructure *reply = gst_promise_get_reply(promise);
|
|
|
|
gboolean isAnswer = gst_structure_id_has_field(reply, g_quark_from_string("answer"));
|
|
|
|
GstWebRTCSessionDescription *gstsdp = nullptr;
|
|
|
|
gst_structure_get(reply,
|
|
|
|
isAnswer ? "answer" : "offer",
|
|
|
|
GST_TYPE_WEBRTC_SESSION_DESCRIPTION,
|
|
|
|
&gstsdp,
|
|
|
|
nullptr);
|
|
|
|
gst_promise_unref(promise);
|
|
|
|
g_signal_emit_by_name(webrtc, "set-local-description", gstsdp, nullptr);
|
|
|
|
|
|
|
|
gchar *sdp = gst_sdp_message_as_text(gstsdp->sdp);
|
|
|
|
localsdp_ = std::string(sdp);
|
|
|
|
g_free(sdp);
|
|
|
|
gst_webrtc_session_description_free(gstsdp);
|
|
|
|
|
|
|
|
nhlog::ui()->debug(
|
|
|
|
"WebRTC: local description set ({}):\n{}", isAnswer ? "answer" : "offer", localsdp_);
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
void
|
|
|
|
createOffer(GstElement *webrtc)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
// create-offer first, then set-local-description
|
|
|
|
GstPromise *promise =
|
|
|
|
gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
|
|
|
|
g_signal_emit_by_name(webrtc, "create-offer", nullptr, promise);
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2020-08-01 21:31:10 +03:00
|
|
|
createAnswer(GstPromise *promise, gpointer webrtc)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
// create-answer first, then set-local-description
|
|
|
|
gst_promise_unref(promise);
|
|
|
|
promise = gst_promise_new_with_change_func(setLocalDescription, webrtc, nullptr);
|
|
|
|
g_signal_emit_by_name(webrtc, "create-answer", nullptr, promise);
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-09-10 21:34:10 +03:00
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
2020-08-03 05:27:05 +03:00
|
|
|
void
|
|
|
|
iceGatheringStateChanged(GstElement *webrtc,
|
|
|
|
GParamSpec *pspec G_GNUC_UNUSED,
|
|
|
|
gpointer user_data G_GNUC_UNUSED)
|
|
|
|
{
|
|
|
|
GstWebRTCICEGatheringState newState;
|
|
|
|
g_object_get(webrtc, "ice-gathering-state", &newState, nullptr);
|
|
|
|
if (newState == GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE) {
|
|
|
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEGatheringState -> Complete");
|
|
|
|
if (isoffering_) {
|
|
|
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
|
|
|
|
emit WebRTCSession::instance().stateChanged(
|
|
|
|
WebRTCSession::State::OFFERSENT);
|
|
|
|
} else {
|
|
|
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
|
|
|
|
emit WebRTCSession::instance().stateChanged(
|
|
|
|
WebRTCSession::State::ANSWERSENT);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
gboolean
|
|
|
|
onICEGatheringCompletion(gpointer timerid)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
*(guint *)(timerid) = 0;
|
|
|
|
if (isoffering_) {
|
|
|
|
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT);
|
|
|
|
} else {
|
|
|
|
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT);
|
|
|
|
}
|
|
|
|
return FALSE;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
2020-08-03 05:27:05 +03:00
|
|
|
#endif
|
2020-07-11 02:19:48 +03:00
|
|
|
|
|
|
|
void
|
2020-08-01 21:31:10 +03:00
|
|
|
addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
|
|
|
|
guint mlineIndex,
|
|
|
|
gchar *candidate,
|
|
|
|
gpointer G_GNUC_UNUSED)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
nhlog::ui()->debug("WebRTC: local candidate: (m-line:{}):{}", mlineIndex, candidate);
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-09-10 21:34:10 +03:00
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
2020-08-22 15:18:42 +03:00
|
|
|
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
|
|
|
|
return;
|
|
|
|
#else
|
2020-08-01 21:31:10 +03:00
|
|
|
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) {
|
|
|
|
emit WebRTCSession::instance().newICECandidate(
|
|
|
|
{"audio", (uint16_t)mlineIndex, candidate});
|
|
|
|
return;
|
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-09-10 21:29:25 +03:00
|
|
|
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
// GStreamer v1.16: webrtcbin's notify::ice-gathering-state triggers
|
2020-09-10 21:34:10 +03:00
|
|
|
// GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE too early. Fixed in v1.18.
|
2020-08-03 05:27:05 +03:00
|
|
|
// Use a 100ms timeout in the meantime
|
2020-08-01 21:31:10 +03:00
|
|
|
static guint timerid = 0;
|
|
|
|
if (timerid)
|
|
|
|
g_source_remove(timerid);
|
2020-07-16 22:44:07 +03:00
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
timerid = g_timeout_add(100, onICEGatheringCompletion, &timerid);
|
2020-08-03 05:27:05 +03:00
|
|
|
#endif
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
void
|
|
|
|
iceConnectionStateChanged(GstElement *webrtc,
|
|
|
|
GParamSpec *pspec G_GNUC_UNUSED,
|
|
|
|
gpointer user_data G_GNUC_UNUSED)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
GstWebRTCICEConnectionState newState;
|
|
|
|
g_object_get(webrtc, "ice-connection-state", &newState, nullptr);
|
|
|
|
switch (newState) {
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
|
|
|
|
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING);
|
|
|
|
break;
|
|
|
|
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
|
|
|
|
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
|
|
|
|
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2020-08-01 21:31:10 +03:00
|
|
|
linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
GstCaps *caps = gst_pad_get_current_caps(newpad);
|
|
|
|
if (!caps)
|
|
|
|
return;
|
|
|
|
|
|
|
|
const gchar *name = gst_structure_get_name(gst_caps_get_structure(caps, 0));
|
|
|
|
gst_caps_unref(caps);
|
|
|
|
|
|
|
|
GstPad *queuepad = nullptr;
|
|
|
|
if (g_str_has_prefix(name, "audio")) {
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming audio stream");
|
|
|
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
|
|
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
|
|
|
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
|
|
|
|
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
|
|
|
|
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
|
2020-08-28 17:49:39 +03:00
|
|
|
gst_element_link_many(queue, convert, resample, sink, nullptr);
|
2020-08-01 21:31:10 +03:00
|
|
|
gst_element_sync_state_with_parent(queue);
|
|
|
|
gst_element_sync_state_with_parent(convert);
|
|
|
|
gst_element_sync_state_with_parent(resample);
|
|
|
|
gst_element_sync_state_with_parent(sink);
|
|
|
|
queuepad = gst_element_get_static_pad(queue, "sink");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (queuepad) {
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, queuepad)))
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad");
|
|
|
|
else {
|
|
|
|
emit WebRTCSession::instance().stateChanged(
|
|
|
|
WebRTCSession::State::CONNECTED);
|
|
|
|
}
|
|
|
|
gst_object_unref(queuepad);
|
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2020-08-01 21:31:10 +03:00
|
|
|
addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
if (GST_PAD_DIRECTION(newpad) != GST_PAD_SRC)
|
|
|
|
return;
|
|
|
|
|
|
|
|
nhlog::ui()->debug("WebRTC: received incoming stream");
|
|
|
|
GstElement *decodebin = gst_element_factory_make("decodebin", nullptr);
|
|
|
|
g_signal_connect(decodebin, "pad-added", G_CALLBACK(linkNewPad), pipe);
|
|
|
|
gst_bin_add(GST_BIN(pipe), decodebin);
|
|
|
|
gst_element_sync_state_with_parent(decodebin);
|
|
|
|
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
|
|
|
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
|
|
|
|
nhlog::ui()->error("WebRTC: unable to link new pad");
|
|
|
|
gst_object_unref(sinkpad);
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
std::string::const_iterator
|
|
|
|
findName(const std::string &sdp, const std::string &name)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
return std::search(
|
|
|
|
sdp.cbegin(),
|
|
|
|
sdp.cend(),
|
|
|
|
name.cbegin(),
|
|
|
|
name.cend(),
|
|
|
|
[](unsigned char c1, unsigned char c2) { return std::tolower(c1) == std::tolower(c2); });
|
|
|
|
}
|
2020-07-26 01:11:11 +03:00
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
int
|
|
|
|
getPayloadType(const std::string &sdp, const std::string &name)
|
|
|
|
{
|
|
|
|
// eg a=rtpmap:111 opus/48000/2
|
|
|
|
auto e = findName(sdp, name);
|
|
|
|
if (e == sdp.cend()) {
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - " + name + " attribute missing");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (auto s = sdp.rfind(':', e - sdp.cbegin()); s == std::string::npos) {
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
|
|
|
|
" payload type");
|
|
|
|
return -1;
|
|
|
|
} else {
|
|
|
|
++s;
|
|
|
|
try {
|
|
|
|
return std::stoi(std::string(sdp, s, e - sdp.cbegin() - s));
|
|
|
|
} catch (...) {
|
|
|
|
nhlog::ui()->error("WebRTC: remote offer - unable to determine " + name +
|
|
|
|
" payload type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
2020-07-24 00:58:22 +03:00
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::createOffer()
|
|
|
|
{
|
|
|
|
isoffering_ = true;
|
|
|
|
localsdp_.clear();
|
|
|
|
localcandidates_.clear();
|
|
|
|
return startPipeline(111); // a dynamic opus payload type
|
|
|
|
}
|
2020-07-11 02:19:48 +03:00
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::acceptOffer(const std::string &sdp)
|
|
|
|
{
|
|
|
|
nhlog::ui()->debug("WebRTC: received offer:\n{}", sdp);
|
|
|
|
if (state_ != State::DISCONNECTED)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
isoffering_ = false;
|
|
|
|
localsdp_.clear();
|
|
|
|
localcandidates_.clear();
|
|
|
|
|
|
|
|
int opusPayloadType = getPayloadType(sdp, "opus");
|
|
|
|
if (opusPayloadType == -1)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription *offer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_OFFER);
|
|
|
|
if (!offer)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (!startPipeline(opusPayloadType)) {
|
|
|
|
gst_webrtc_session_description_free(offer);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
// set-remote-description first, then create-answer
|
|
|
|
GstPromise *promise = gst_promise_new_with_change_func(createAnswer, webrtc_, nullptr);
|
|
|
|
g_signal_emit_by_name(webrtc_, "set-remote-description", offer, promise);
|
|
|
|
gst_webrtc_session_description_free(offer);
|
|
|
|
return true;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::acceptAnswer(const std::string &sdp)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
nhlog::ui()->debug("WebRTC: received answer:\n{}", sdp);
|
|
|
|
if (state_ != State::OFFERSENT)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
GstWebRTCSessionDescription *answer = parseSDP(sdp, GST_WEBRTC_SDP_TYPE_ANSWER);
|
|
|
|
if (!answer) {
|
|
|
|
end();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
g_signal_emit_by_name(webrtc_, "set-remote-description", answer, nullptr);
|
|
|
|
gst_webrtc_session_description_free(answer);
|
|
|
|
return true;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-07-26 17:59:50 +03:00
|
|
|
void
|
2020-08-01 21:31:10 +03:00
|
|
|
WebRTCSession::acceptICECandidates(
|
|
|
|
const std::vector<mtx::events::msg::CallCandidates::Candidate> &candidates)
|
2020-07-26 17:59:50 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
if (state_ >= State::INITIATED) {
|
|
|
|
for (const auto &c : candidates) {
|
|
|
|
nhlog::ui()->debug(
|
|
|
|
"WebRTC: remote candidate: (m-line:{}):{}", c.sdpMLineIndex, c.candidate);
|
2020-09-13 17:21:29 +03:00
|
|
|
if (!c.candidate.empty()) {
|
|
|
|
g_signal_emit_by_name(webrtc_,
|
|
|
|
"add-ice-candidate",
|
|
|
|
c.sdpMLineIndex,
|
|
|
|
c.candidate.c_str());
|
|
|
|
}
|
2020-08-01 21:31:10 +03:00
|
|
|
}
|
|
|
|
}
|
2020-07-26 17:59:50 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::startPipeline(int opusPayloadType)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
if (state_ != State::DISCONNECTED)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
emit stateChanged(State::INITIATING);
|
|
|
|
|
|
|
|
if (!createPipeline(opusPayloadType))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
webrtc_ = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
|
|
|
|
|
|
|
|
if (!stunServer_.empty()) {
|
|
|
|
nhlog::ui()->info("WebRTC: setting STUN server: {}", stunServer_);
|
|
|
|
g_object_set(webrtc_, "stun-server", stunServer_.c_str(), nullptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const auto &uri : turnServers_) {
|
|
|
|
nhlog::ui()->info("WebRTC: setting TURN server: {}", uri);
|
|
|
|
gboolean udata;
|
|
|
|
g_signal_emit_by_name(webrtc_, "add-turn-server", uri.c_str(), (gpointer)(&udata));
|
|
|
|
}
|
|
|
|
if (turnServers_.empty())
|
|
|
|
nhlog::ui()->warn("WebRTC: no TURN server provided");
|
|
|
|
|
|
|
|
// generate the offer when the pipeline goes to PLAYING
|
|
|
|
if (isoffering_)
|
|
|
|
g_signal_connect(
|
|
|
|
webrtc_, "on-negotiation-needed", G_CALLBACK(::createOffer), nullptr);
|
|
|
|
|
|
|
|
// on-ice-candidate is emitted when a local ICE candidate has been gathered
|
|
|
|
g_signal_connect(webrtc_, "on-ice-candidate", G_CALLBACK(addLocalICECandidate), nullptr);
|
|
|
|
|
|
|
|
// capture ICE failure
|
|
|
|
g_signal_connect(
|
|
|
|
webrtc_, "notify::ice-connection-state", G_CALLBACK(iceConnectionStateChanged), nullptr);
|
|
|
|
|
|
|
|
// incoming streams trigger pad-added
|
|
|
|
gst_element_set_state(pipe_, GST_STATE_READY);
|
|
|
|
g_signal_connect(webrtc_, "pad-added", G_CALLBACK(addDecodeBin), pipe_);
|
|
|
|
|
2020-09-10 21:34:10 +03:00
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
2020-08-03 05:27:05 +03:00
|
|
|
// capture ICE gathering completion
|
|
|
|
g_signal_connect(
|
|
|
|
webrtc_, "notify::ice-gathering-state", G_CALLBACK(iceGatheringStateChanged), nullptr);
|
|
|
|
#endif
|
2020-08-01 21:31:10 +03:00
|
|
|
// webrtcbin lifetime is the same as that of the pipeline
|
|
|
|
gst_object_unref(webrtc_);
|
|
|
|
|
|
|
|
// start the pipeline
|
|
|
|
GstStateChangeReturn ret = gst_element_set_state(pipe_, GST_STATE_PLAYING);
|
|
|
|
if (ret == GST_STATE_CHANGE_FAILURE) {
|
|
|
|
nhlog::ui()->error("WebRTC: unable to start pipeline");
|
|
|
|
end();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipe_));
|
2020-09-16 14:29:26 +03:00
|
|
|
busWatchId_ = gst_bus_add_watch(bus, newBusMessage, this);
|
2020-08-01 21:31:10 +03:00
|
|
|
gst_object_unref(bus);
|
|
|
|
emit stateChanged(State::INITIATED);
|
|
|
|
return true;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::createPipeline(int opusPayloadType)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-09-17 18:37:30 +03:00
|
|
|
if (audioSources_.empty()) {
|
2020-08-06 00:56:44 +03:00
|
|
|
nhlog::ui()->error("WebRTC: no audio sources");
|
|
|
|
return false;
|
|
|
|
}
|
2020-08-01 21:31:10 +03:00
|
|
|
|
2020-09-17 18:37:30 +03:00
|
|
|
if (audioSourceIndex_ < 0 || (size_t)audioSourceIndex_ >= audioSources_.size()) {
|
2020-08-06 00:56:44 +03:00
|
|
|
nhlog::ui()->error("WebRTC: invalid audio source index");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-09-17 18:37:30 +03:00
|
|
|
GstElement *source =
|
|
|
|
gst_device_create_element(audioSources_[audioSourceIndex_].second, nullptr);
|
2020-08-06 00:56:44 +03:00
|
|
|
GstElement *volume = gst_element_factory_make("volume", "srclevel");
|
|
|
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
|
|
|
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
|
|
|
|
GstElement *queue1 = gst_element_factory_make("queue", nullptr);
|
|
|
|
GstElement *opusenc = gst_element_factory_make("opusenc", nullptr);
|
|
|
|
GstElement *rtp = gst_element_factory_make("rtpopuspay", nullptr);
|
|
|
|
GstElement *queue2 = gst_element_factory_make("queue", nullptr);
|
|
|
|
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
|
|
|
|
|
|
|
|
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
|
|
|
|
"media",
|
|
|
|
G_TYPE_STRING,
|
|
|
|
"audio",
|
|
|
|
"encoding-name",
|
|
|
|
G_TYPE_STRING,
|
|
|
|
"OPUS",
|
|
|
|
"payload",
|
|
|
|
G_TYPE_INT,
|
|
|
|
opusPayloadType,
|
|
|
|
nullptr);
|
|
|
|
g_object_set(capsfilter, "caps", rtpcaps, nullptr);
|
|
|
|
gst_caps_unref(rtpcaps);
|
|
|
|
|
|
|
|
GstElement *webrtcbin = gst_element_factory_make("webrtcbin", "webrtcbin");
|
|
|
|
g_object_set(webrtcbin, "bundle-policy", GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE, nullptr);
|
|
|
|
|
|
|
|
pipe_ = gst_pipeline_new(nullptr);
|
|
|
|
gst_bin_add_many(GST_BIN(pipe_),
|
|
|
|
source,
|
|
|
|
volume,
|
|
|
|
convert,
|
|
|
|
resample,
|
|
|
|
queue1,
|
|
|
|
opusenc,
|
|
|
|
rtp,
|
|
|
|
queue2,
|
|
|
|
capsfilter,
|
|
|
|
webrtcbin,
|
|
|
|
nullptr);
|
|
|
|
|
|
|
|
if (!gst_element_link_many(source,
|
|
|
|
volume,
|
|
|
|
convert,
|
|
|
|
resample,
|
|
|
|
queue1,
|
|
|
|
opusenc,
|
|
|
|
rtp,
|
|
|
|
queue2,
|
|
|
|
capsfilter,
|
|
|
|
webrtcbin,
|
|
|
|
nullptr)) {
|
|
|
|
nhlog::ui()->error("WebRTC: failed to link pipeline elements");
|
2020-08-01 21:31:10 +03:00
|
|
|
end();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
bool
|
|
|
|
WebRTCSession::toggleMuteAudioSrc(bool &isMuted)
|
2020-07-11 02:19:48 +03:00
|
|
|
{
|
2020-08-01 21:31:10 +03:00
|
|
|
if (state_ < State::INITIATED)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
|
|
|
|
if (!srclevel)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
gboolean muted;
|
|
|
|
g_object_get(srclevel, "mute", &muted, nullptr);
|
|
|
|
g_object_set(srclevel, "mute", !muted, nullptr);
|
|
|
|
gst_object_unref(srclevel);
|
|
|
|
isMuted = !muted;
|
|
|
|
return true;
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
|
|
|
|
2020-08-01 21:31:10 +03:00
|
|
|
void
|
|
|
|
WebRTCSession::end()
|
|
|
|
{
|
|
|
|
nhlog::ui()->debug("WebRTC: ending session");
|
|
|
|
if (pipe_) {
|
|
|
|
gst_element_set_state(pipe_, GST_STATE_NULL);
|
|
|
|
gst_object_unref(pipe_);
|
|
|
|
pipe_ = nullptr;
|
2020-09-16 14:29:26 +03:00
|
|
|
g_source_remove(busWatchId_);
|
|
|
|
busWatchId_ = 0;
|
2020-08-01 21:31:10 +03:00
|
|
|
}
|
|
|
|
webrtc_ = nullptr;
|
|
|
|
if (state_ != State::DISCONNECTED)
|
|
|
|
emit stateChanged(State::DISCONNECTED);
|
2020-07-11 02:19:48 +03:00
|
|
|
}
|
2020-08-06 00:56:44 +03:00
|
|
|
|
2020-09-17 18:37:30 +03:00
|
|
|
#if GST_CHECK_VERSION(1, 18, 0)
|
|
|
|
void
|
|
|
|
WebRTCSession::startDeviceMonitor()
|
|
|
|
{
|
|
|
|
if (!initialised_)
|
|
|
|
return;
|
|
|
|
|
|
|
|
static GstDeviceMonitor *monitor = nullptr;
|
|
|
|
if (!monitor) {
|
|
|
|
monitor = gst_device_monitor_new();
|
|
|
|
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
|
|
|
|
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
|
|
|
|
gst_caps_unref(caps);
|
|
|
|
|
|
|
|
GstBus *bus = gst_device_monitor_get_bus(monitor);
|
|
|
|
gst_bus_add_watch(bus, newBusMessage, nullptr);
|
|
|
|
gst_object_unref(bus);
|
|
|
|
if (!gst_device_monitor_start(monitor)) {
|
|
|
|
nhlog::ui()->error("WebRTC: failed to start device monitor");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
2020-08-06 00:56:44 +03:00
|
|
|
void
|
|
|
|
WebRTCSession::refreshDevices()
|
|
|
|
{
|
|
|
|
if (!initialised_)
|
|
|
|
return;
|
|
|
|
|
|
|
|
static GstDeviceMonitor *monitor = nullptr;
|
|
|
|
if (!monitor) {
|
|
|
|
monitor = gst_device_monitor_new();
|
|
|
|
GstCaps *caps = gst_caps_new_empty_simple("audio/x-raw");
|
|
|
|
gst_device_monitor_add_filter(monitor, "Audio/Source", caps);
|
|
|
|
gst_caps_unref(caps);
|
|
|
|
}
|
2020-09-17 18:37:30 +03:00
|
|
|
|
|
|
|
std::for_each(audioSources_.begin(), audioSources_.end(), [](const auto &s) {
|
|
|
|
gst_object_unref(s.second);
|
|
|
|
});
|
|
|
|
audioSources_.clear();
|
|
|
|
GList *devices = gst_device_monitor_get_devices(monitor);
|
|
|
|
if (devices) {
|
|
|
|
audioSources_.reserve(g_list_length(devices));
|
|
|
|
for (GList *l = devices; l != nullptr; l = l->next)
|
|
|
|
addDevice(GST_DEVICE_CAST(l->data));
|
|
|
|
g_list_free(devices);
|
|
|
|
}
|
2020-08-06 00:56:44 +03:00
|
|
|
}
|
2020-09-17 18:37:30 +03:00
|
|
|
#endif
|
2020-08-06 00:56:44 +03:00
|
|
|
|
|
|
|
std::vector<std::string>
|
|
|
|
WebRTCSession::getAudioSourceNames(const std::string &defaultDevice)
|
|
|
|
{
|
2020-09-17 18:37:30 +03:00
|
|
|
#if !GST_CHECK_VERSION(1, 18, 0)
|
2020-08-06 00:56:44 +03:00
|
|
|
refreshDevices();
|
2020-09-17 18:37:30 +03:00
|
|
|
#endif
|
|
|
|
// move default device to top of the list
|
|
|
|
if (auto it = std::find_if(audioSources_.begin(),
|
|
|
|
audioSources_.end(),
|
|
|
|
[&](const auto &s) { return s.first == defaultDevice; });
|
|
|
|
it != audioSources_.end())
|
|
|
|
std::swap(audioSources_.front(), *it);
|
|
|
|
|
2020-08-06 00:56:44 +03:00
|
|
|
std::vector<std::string> ret;
|
2020-09-17 18:37:30 +03:00
|
|
|
ret.reserve(audioSources_.size());
|
|
|
|
std::for_each(audioSources_.cbegin(), audioSources_.cend(), [&](const auto &s) {
|
|
|
|
ret.push_back(s.first);
|
|
|
|
});
|
2020-08-06 00:56:44 +03:00
|
|
|
return ret;
|
|
|
|
}
|
2020-09-17 18:37:30 +03:00
|
|
|
|
2020-08-14 02:03:27 +03:00
|
|
|
#else
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::createOffer()
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::acceptOffer(const std::string &)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::acceptAnswer(const std::string &)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
WebRTCSession::acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &)
|
|
|
|
{}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::startPipeline(int)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::createPipeline(int)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool
|
|
|
|
WebRTCSession::toggleMuteAudioSrc(bool &)
|
|
|
|
{
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
WebRTCSession::end()
|
2020-08-14 10:01:56 +03:00
|
|
|
{}
|
2020-08-14 02:03:27 +03:00
|
|
|
|
|
|
|
void
|
|
|
|
WebRTCSession::refreshDevices()
|
2020-08-14 10:01:56 +03:00
|
|
|
{}
|
2020-08-14 02:03:27 +03:00
|
|
|
|
2020-09-17 18:37:30 +03:00
|
|
|
void
|
|
|
|
WebRTCSession::startDeviceMonitor()
|
|
|
|
{}
|
|
|
|
|
2020-08-14 02:03:27 +03:00
|
|
|
std::vector<std::string>
|
|
|
|
WebRTCSession::getAudioSourceNames(const std::string &)
|
|
|
|
{
|
|
|
|
return {};
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|