mirror of
https://github.com/Nheko-Reborn/nheko.git
synced 2024-11-26 04:58:49 +03:00
Merge pull request #320 from trilene/webrtc-video
Video calls: add local webcam view
This commit is contained in:
commit
27bf654d92
7 changed files with 168 additions and 57 deletions
BIN
resources/icons/ui/toggle-camera-view.png
Normal file
BIN
resources/icons/ui/toggle-camera-view.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 374 B |
|
@ -103,6 +103,22 @@ Rectangle {
|
||||||
Layout.fillWidth: true
|
Layout.fillWidth: true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ImageButton {
|
||||||
|
visible: TimelineManager.onVideoCall
|
||||||
|
width: 24
|
||||||
|
height: 24
|
||||||
|
buttonTextColor: "#000000"
|
||||||
|
image: ":/icons/icons/ui/toggle-camera-view.png"
|
||||||
|
hoverEnabled: true
|
||||||
|
ToolTip.visible: hovered
|
||||||
|
ToolTip.text: "Toggle camera view"
|
||||||
|
onClicked: TimelineManager.toggleCameraView()
|
||||||
|
}
|
||||||
|
|
||||||
|
Item {
|
||||||
|
implicitWidth: 8
|
||||||
|
}
|
||||||
|
|
||||||
ImageButton {
|
ImageButton {
|
||||||
width: 24
|
width: 24
|
||||||
height: 24
|
height: 24
|
||||||
|
|
|
@ -74,6 +74,7 @@
|
||||||
<file>icons/ui/end-call.png</file>
|
<file>icons/ui/end-call.png</file>
|
||||||
<file>icons/ui/microphone-mute.png</file>
|
<file>icons/ui/microphone-mute.png</file>
|
||||||
<file>icons/ui/microphone-unmute.png</file>
|
<file>icons/ui/microphone-unmute.png</file>
|
||||||
|
<file>icons/ui/toggle-camera-view.png</file>
|
||||||
<file>icons/ui/video-call.png</file>
|
<file>icons/ui/video-call.png</file>
|
||||||
|
|
||||||
<file>icons/emoji-categories/people.png</file>
|
<file>icons/emoji-categories/people.png</file>
|
||||||
|
|
|
@ -103,6 +103,7 @@ bool haveAudioStream_;
|
||||||
bool haveVideoStream_;
|
bool haveVideoStream_;
|
||||||
std::vector<AudioSource> audioSources_;
|
std::vector<AudioSource> audioSources_;
|
||||||
std::vector<VideoSource> videoSources_;
|
std::vector<VideoSource> videoSources_;
|
||||||
|
GstPad *insetSinkPad_ = nullptr;
|
||||||
|
|
||||||
using FrameRate = std::pair<int, int>;
|
using FrameRate = std::pair<int, int>;
|
||||||
std::optional<FrameRate>
|
std::optional<FrameRate>
|
||||||
|
@ -496,6 +497,92 @@ setWaitForKeyFrame(GstBin *decodebin G_GNUC_UNUSED, GstElement *element, gpointe
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
GstElement *
|
||||||
|
newAudioSinkChain(GstElement *pipe)
|
||||||
|
{
|
||||||
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||||
|
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
|
||||||
|
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
|
||||||
|
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
|
||||||
|
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
|
||||||
|
gst_element_link_many(queue, convert, resample, sink, nullptr);
|
||||||
|
gst_element_sync_state_with_parent(queue);
|
||||||
|
gst_element_sync_state_with_parent(convert);
|
||||||
|
gst_element_sync_state_with_parent(resample);
|
||||||
|
gst_element_sync_state_with_parent(sink);
|
||||||
|
return queue;
|
||||||
|
}
|
||||||
|
|
||||||
|
GstElement *
|
||||||
|
newVideoSinkChain(GstElement *pipe)
|
||||||
|
{
|
||||||
|
// use compositor for now; acceleration needs investigation
|
||||||
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||||
|
GstElement *compositor = gst_element_factory_make("compositor", "compositor");
|
||||||
|
GstElement *glupload = gst_element_factory_make("glupload", nullptr);
|
||||||
|
GstElement *glcolorconvert = gst_element_factory_make("glcolorconvert", nullptr);
|
||||||
|
GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr);
|
||||||
|
GstElement *glsinkbin = gst_element_factory_make("glsinkbin", nullptr);
|
||||||
|
g_object_set(qmlglsink, "widget", WebRTCSession::instance().getVideoItem(), nullptr);
|
||||||
|
g_object_set(glsinkbin, "sink", qmlglsink, nullptr);
|
||||||
|
gst_bin_add_many(
|
||||||
|
GST_BIN(pipe), queue, compositor, glupload, glcolorconvert, glsinkbin, nullptr);
|
||||||
|
gst_element_link_many(queue, compositor, glupload, glcolorconvert, glsinkbin, nullptr);
|
||||||
|
gst_element_sync_state_with_parent(queue);
|
||||||
|
gst_element_sync_state_with_parent(compositor);
|
||||||
|
gst_element_sync_state_with_parent(glupload);
|
||||||
|
gst_element_sync_state_with_parent(glcolorconvert);
|
||||||
|
gst_element_sync_state_with_parent(glsinkbin);
|
||||||
|
return queue;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<int, int>
|
||||||
|
getResolution(GstPad *pad)
|
||||||
|
{
|
||||||
|
std::pair<int, int> ret;
|
||||||
|
GstCaps *caps = gst_pad_get_current_caps(pad);
|
||||||
|
const GstStructure *s = gst_caps_get_structure(caps, 0);
|
||||||
|
gst_structure_get_int(s, "width", &ret.first);
|
||||||
|
gst_structure_get_int(s, "height", &ret.second);
|
||||||
|
gst_caps_unref(caps);
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
addCameraView(GstElement *pipe, const std::pair<int, int> &videoCallSize)
|
||||||
|
{
|
||||||
|
GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee");
|
||||||
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||||
|
GstElement *videorate = gst_element_factory_make("videorate", nullptr);
|
||||||
|
gst_bin_add_many(GST_BIN(pipe), queue, videorate, nullptr);
|
||||||
|
gst_element_link_many(tee, queue, videorate, nullptr);
|
||||||
|
gst_element_sync_state_with_parent(queue);
|
||||||
|
gst_element_sync_state_with_parent(videorate);
|
||||||
|
gst_object_unref(tee);
|
||||||
|
|
||||||
|
GstElement *camerafilter = gst_bin_get_by_name(GST_BIN(pipe), "camerafilter");
|
||||||
|
GstPad *filtersinkpad = gst_element_get_static_pad(camerafilter, "sink");
|
||||||
|
auto cameraResolution = getResolution(filtersinkpad);
|
||||||
|
int insetWidth = videoCallSize.first / 4;
|
||||||
|
int insetHeight =
|
||||||
|
static_cast<double>(cameraResolution.second) / cameraResolution.first * insetWidth;
|
||||||
|
nhlog::ui()->debug("WebRTC: picture-in-picture size: {}x{}", insetWidth, insetHeight);
|
||||||
|
gst_object_unref(filtersinkpad);
|
||||||
|
gst_object_unref(camerafilter);
|
||||||
|
|
||||||
|
GstPad *camerapad = gst_element_get_static_pad(videorate, "src");
|
||||||
|
GstElement *compositor = gst_bin_get_by_name(GST_BIN(pipe), "compositor");
|
||||||
|
insetSinkPad_ = gst_element_get_request_pad(compositor, "sink_%u");
|
||||||
|
g_object_set(insetSinkPad_, "zorder", 2, nullptr);
|
||||||
|
g_object_set(insetSinkPad_, "width", insetWidth, "height", insetHeight, nullptr);
|
||||||
|
gint offset = videoCallSize.first / 80;
|
||||||
|
g_object_set(insetSinkPad_, "xpos", offset, "ypos", offset, nullptr);
|
||||||
|
if (GST_PAD_LINK_FAILED(gst_pad_link(camerapad, insetSinkPad_)))
|
||||||
|
nhlog::ui()->error("WebRTC: failed to link camera view chain");
|
||||||
|
gst_object_unref(camerapad);
|
||||||
|
gst_object_unref(compositor);
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
||||||
{
|
{
|
||||||
|
@ -511,51 +598,29 @@ linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe)
|
||||||
gst_object_unref(sinkpad);
|
gst_object_unref(sinkpad);
|
||||||
|
|
||||||
WebRTCSession *session = &WebRTCSession::instance();
|
WebRTCSession *session = &WebRTCSession::instance();
|
||||||
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
GstElement *queue = nullptr;
|
||||||
if (!std::strcmp(mediaType, "audio")) {
|
if (!std::strcmp(mediaType, "audio")) {
|
||||||
nhlog::ui()->debug("WebRTC: received incoming audio stream");
|
nhlog::ui()->debug("WebRTC: received incoming audio stream");
|
||||||
haveAudioStream_ = true;
|
haveAudioStream_ = true;
|
||||||
GstElement *convert = gst_element_factory_make("audioconvert", nullptr);
|
queue = newAudioSinkChain(pipe);
|
||||||
GstElement *resample = gst_element_factory_make("audioresample", nullptr);
|
|
||||||
GstElement *sink = gst_element_factory_make("autoaudiosink", nullptr);
|
|
||||||
|
|
||||||
gst_bin_add_many(GST_BIN(pipe), queue, convert, resample, sink, nullptr);
|
|
||||||
gst_element_link_many(queue, convert, resample, sink, nullptr);
|
|
||||||
gst_element_sync_state_with_parent(queue);
|
|
||||||
gst_element_sync_state_with_parent(convert);
|
|
||||||
gst_element_sync_state_with_parent(resample);
|
|
||||||
gst_element_sync_state_with_parent(sink);
|
|
||||||
} else if (!std::strcmp(mediaType, "video")) {
|
} else if (!std::strcmp(mediaType, "video")) {
|
||||||
nhlog::ui()->debug("WebRTC: received incoming video stream");
|
nhlog::ui()->debug("WebRTC: received incoming video stream");
|
||||||
if (!session->getVideoItem()) {
|
if (!session->getVideoItem()) {
|
||||||
g_free(mediaType);
|
g_free(mediaType);
|
||||||
gst_object_unref(queue);
|
|
||||||
nhlog::ui()->error("WebRTC: video call item not set");
|
nhlog::ui()->error("WebRTC: video call item not set");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
haveVideoStream_ = true;
|
haveVideoStream_ = true;
|
||||||
keyFrameRequestData_.statsField =
|
keyFrameRequestData_.statsField =
|
||||||
std::string("rtp-inbound-stream-stats_") + std::to_string(ssrc);
|
std::string("rtp-inbound-stream-stats_") + std::to_string(ssrc);
|
||||||
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
|
queue = newVideoSinkChain(pipe);
|
||||||
GstElement *glupload = gst_element_factory_make("glupload", nullptr);
|
auto videoCallSize = getResolution(newpad);
|
||||||
GstElement *glcolorconvert = gst_element_factory_make("glcolorconvert", nullptr);
|
nhlog::ui()->info("WebRTC: incoming video resolution: {}x{}",
|
||||||
GstElement *qmlglsink = gst_element_factory_make("qmlglsink", nullptr);
|
videoCallSize.first,
|
||||||
GstElement *glsinkbin = gst_element_factory_make("glsinkbin", nullptr);
|
videoCallSize.second);
|
||||||
g_object_set(qmlglsink, "widget", session->getVideoItem(), nullptr);
|
addCameraView(pipe, videoCallSize);
|
||||||
g_object_set(glsinkbin, "sink", qmlglsink, nullptr);
|
|
||||||
|
|
||||||
gst_bin_add_many(
|
|
||||||
GST_BIN(pipe), queue, videoconvert, glupload, glcolorconvert, glsinkbin, nullptr);
|
|
||||||
gst_element_link_many(
|
|
||||||
queue, videoconvert, glupload, glcolorconvert, glsinkbin, nullptr);
|
|
||||||
gst_element_sync_state_with_parent(queue);
|
|
||||||
gst_element_sync_state_with_parent(videoconvert);
|
|
||||||
gst_element_sync_state_with_parent(glupload);
|
|
||||||
gst_element_sync_state_with_parent(glcolorconvert);
|
|
||||||
gst_element_sync_state_with_parent(glsinkbin);
|
|
||||||
} else {
|
} else {
|
||||||
g_free(mediaType);
|
g_free(mediaType);
|
||||||
gst_object_unref(queue);
|
|
||||||
nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad));
|
nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -600,7 +665,7 @@ addDecodeBin(GstElement *webrtc G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe)
|
||||||
gst_element_sync_state_with_parent(decodebin);
|
gst_element_sync_state_with_parent(decodebin);
|
||||||
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
|
GstPad *sinkpad = gst_element_get_static_pad(decodebin, "sink");
|
||||||
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
|
if (GST_PAD_LINK_FAILED(gst_pad_link(newpad, sinkpad)))
|
||||||
nhlog::ui()->error("WebRTC: unable to link new pad");
|
nhlog::ui()->error("WebRTC: unable to link decodebin");
|
||||||
gst_object_unref(sinkpad);
|
gst_object_unref(sinkpad);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -689,7 +754,8 @@ WebRTCSession::havePlugins(bool isVideo, std::string *errorMessage)
|
||||||
"webrtc",
|
"webrtc",
|
||||||
nullptr};
|
nullptr};
|
||||||
|
|
||||||
const gchar *videoPlugins[] = {"opengl", "qmlgl", "rtp", "videoconvert", "vpx", nullptr};
|
const gchar *videoPlugins[] = {
|
||||||
|
"compositor", "opengl", "qmlgl", "rtp", "videoconvert", "vpx", nullptr};
|
||||||
|
|
||||||
std::string strError("Missing GStreamer plugins: ");
|
std::string strError("Missing GStreamer plugins: ");
|
||||||
const gchar **needed = isVideo ? videoPlugins : voicePlugins;
|
const gchar **needed = isVideo ? videoPlugins : voicePlugins;
|
||||||
|
@ -729,6 +795,7 @@ WebRTCSession::createOffer(bool isVideo)
|
||||||
videoItem_ = nullptr;
|
videoItem_ = nullptr;
|
||||||
haveAudioStream_ = false;
|
haveAudioStream_ = false;
|
||||||
haveVideoStream_ = false;
|
haveVideoStream_ = false;
|
||||||
|
insetSinkPad_ = nullptr;
|
||||||
localsdp_.clear();
|
localsdp_.clear();
|
||||||
localcandidates_.clear();
|
localcandidates_.clear();
|
||||||
|
|
||||||
|
@ -752,6 +819,7 @@ WebRTCSession::acceptOffer(const std::string &sdp)
|
||||||
videoItem_ = nullptr;
|
videoItem_ = nullptr;
|
||||||
haveAudioStream_ = false;
|
haveAudioStream_ = false;
|
||||||
haveVideoStream_ = false;
|
haveVideoStream_ = false;
|
||||||
|
insetSinkPad_ = nullptr;
|
||||||
localsdp_.clear();
|
localsdp_.clear();
|
||||||
localcandidates_.clear();
|
localcandidates_.clear();
|
||||||
|
|
||||||
|
@ -974,6 +1042,7 @@ WebRTCSession::createPipeline(int opusPayloadType, int vp8PayloadType)
|
||||||
nhlog::ui()->error("WebRTC: failed to link audio pipeline elements");
|
nhlog::ui()->error("WebRTC: failed to link audio pipeline elements");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return isVideo_ ? addVideoPipeline(vp8PayloadType) : true;
|
return isVideo_ ? addVideoPipeline(vp8PayloadType) : true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -984,8 +1053,9 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
if (videoSources_.empty())
|
if (videoSources_.empty())
|
||||||
return !isOffering_;
|
return !isOffering_;
|
||||||
|
|
||||||
std::string cameraSetting = ChatPage::instance()->userSettings()->camera().toStdString();
|
QSharedPointer<UserSettings> settings = ChatPage::instance()->userSettings();
|
||||||
auto it = std::find_if(videoSources_.cbegin(),
|
std::string cameraSetting = settings->camera().toStdString();
|
||||||
|
auto it = std::find_if(videoSources_.cbegin(),
|
||||||
videoSources_.cend(),
|
videoSources_.cend(),
|
||||||
[&cameraSetting](const auto &s) { return s.name == cameraSetting; });
|
[&cameraSetting](const auto &s) { return s.name == cameraSetting; });
|
||||||
if (it == videoSources_.cend()) {
|
if (it == videoSources_.cend()) {
|
||||||
|
@ -993,11 +1063,9 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string resSetting =
|
std::string resSetting = settings->cameraResolution().toStdString();
|
||||||
ChatPage::instance()->userSettings()->cameraResolution().toStdString();
|
|
||||||
const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting;
|
const std::string &res = resSetting.empty() ? it->caps.front().resolution : resSetting;
|
||||||
std::string frSetting =
|
std::string frSetting = settings->cameraFrameRate().toStdString();
|
||||||
ChatPage::instance()->userSettings()->cameraFrameRate().toStdString();
|
|
||||||
const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting;
|
const std::string &fr = frSetting.empty() ? it->caps.front().frameRates.front() : frSetting;
|
||||||
auto resolution = tokenise(res, 'x');
|
auto resolution = tokenise(res, 'x');
|
||||||
auto frameRate = tokenise(fr, '/');
|
auto frameRate = tokenise(fr, '/');
|
||||||
|
@ -1005,9 +1073,10 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
|
nhlog::ui()->debug("WebRTC: camera resolution: {}x{}", resolution.first, resolution.second);
|
||||||
nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
|
nhlog::ui()->debug("WebRTC: camera frame rate: {}/{}", frameRate.first, frameRate.second);
|
||||||
|
|
||||||
GstElement *source = gst_device_create_element(it->device, nullptr);
|
GstElement *source = gst_device_create_element(it->device, nullptr);
|
||||||
GstElement *capsfilter = gst_element_factory_make("capsfilter", nullptr);
|
GstElement *videoconvert = gst_element_factory_make("videoconvert", nullptr);
|
||||||
GstCaps *caps = gst_caps_new_simple("video/x-raw",
|
GstElement *capsfilter = gst_element_factory_make("capsfilter", "camerafilter");
|
||||||
|
GstCaps *caps = gst_caps_new_simple("video/x-raw",
|
||||||
"width",
|
"width",
|
||||||
G_TYPE_INT,
|
G_TYPE_INT,
|
||||||
resolution.first,
|
resolution.first,
|
||||||
|
@ -1021,15 +1090,13 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
nullptr);
|
nullptr);
|
||||||
g_object_set(capsfilter, "caps", caps, nullptr);
|
g_object_set(capsfilter, "caps", caps, nullptr);
|
||||||
gst_caps_unref(caps);
|
gst_caps_unref(caps);
|
||||||
|
GstElement *tee = gst_element_factory_make("tee", "videosrctee");
|
||||||
GstElement *convert = gst_element_factory_make("videoconvert", nullptr);
|
GstElement *queue = gst_element_factory_make("queue", nullptr);
|
||||||
GstElement *queue1 = gst_element_factory_make("queue", nullptr);
|
GstElement *vp8enc = gst_element_factory_make("vp8enc", nullptr);
|
||||||
GstElement *vp8enc = gst_element_factory_make("vp8enc", nullptr);
|
|
||||||
g_object_set(vp8enc, "deadline", 1, nullptr);
|
g_object_set(vp8enc, "deadline", 1, nullptr);
|
||||||
g_object_set(vp8enc, "error-resilient", 1, nullptr);
|
g_object_set(vp8enc, "error-resilient", 1, nullptr);
|
||||||
|
GstElement *rtpvp8pay = gst_element_factory_make("rtpvp8pay", nullptr);
|
||||||
GstElement *rtp = gst_element_factory_make("rtpvp8pay", nullptr);
|
GstElement *rtpqueue = gst_element_factory_make("queue", nullptr);
|
||||||
GstElement *queue2 = gst_element_factory_make("queue", nullptr);
|
|
||||||
GstElement *rtpcapsfilter = gst_element_factory_make("capsfilter", nullptr);
|
GstElement *rtpcapsfilter = gst_element_factory_make("capsfilter", nullptr);
|
||||||
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
|
GstCaps *rtpcaps = gst_caps_new_simple("application/x-rtp",
|
||||||
"media",
|
"media",
|
||||||
|
@ -1047,27 +1114,30 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType)
|
||||||
|
|
||||||
gst_bin_add_many(GST_BIN(pipe_),
|
gst_bin_add_many(GST_BIN(pipe_),
|
||||||
source,
|
source,
|
||||||
|
videoconvert,
|
||||||
capsfilter,
|
capsfilter,
|
||||||
convert,
|
tee,
|
||||||
queue1,
|
queue,
|
||||||
vp8enc,
|
vp8enc,
|
||||||
rtp,
|
rtpvp8pay,
|
||||||
queue2,
|
rtpqueue,
|
||||||
rtpcapsfilter,
|
rtpcapsfilter,
|
||||||
nullptr);
|
nullptr);
|
||||||
|
|
||||||
GstElement *webrtcbin = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
|
GstElement *webrtcbin = gst_bin_get_by_name(GST_BIN(pipe_), "webrtcbin");
|
||||||
if (!gst_element_link_many(source,
|
if (!gst_element_link_many(source,
|
||||||
|
videoconvert,
|
||||||
capsfilter,
|
capsfilter,
|
||||||
convert,
|
tee,
|
||||||
queue1,
|
queue,
|
||||||
vp8enc,
|
vp8enc,
|
||||||
rtp,
|
rtpvp8pay,
|
||||||
queue2,
|
rtpqueue,
|
||||||
rtpcapsfilter,
|
rtpcapsfilter,
|
||||||
webrtcbin,
|
webrtcbin,
|
||||||
nullptr)) {
|
nullptr)) {
|
||||||
nhlog::ui()->error("WebRTC: failed to link video pipeline elements");
|
nhlog::ui()->error("WebRTC: failed to link video pipeline elements");
|
||||||
|
gst_object_unref(webrtcbin);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
gst_object_unref(webrtcbin);
|
gst_object_unref(webrtcbin);
|
||||||
|
@ -1101,6 +1171,16 @@ WebRTCSession::toggleMicMute()
|
||||||
return !muted;
|
return !muted;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
WebRTCSession::toggleCameraView()
|
||||||
|
{
|
||||||
|
if (insetSinkPad_) {
|
||||||
|
guint zorder;
|
||||||
|
g_object_get(insetSinkPad_, "zorder", &zorder, nullptr);
|
||||||
|
g_object_set(insetSinkPad_, "zorder", zorder ? 0 : 2, nullptr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
WebRTCSession::end()
|
WebRTCSession::end()
|
||||||
{
|
{
|
||||||
|
@ -1115,11 +1195,13 @@ WebRTCSession::end()
|
||||||
busWatchId_ = 0;
|
busWatchId_ = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
webrtc_ = nullptr;
|
webrtc_ = nullptr;
|
||||||
isVideo_ = false;
|
isVideo_ = false;
|
||||||
isOffering_ = false;
|
isOffering_ = false;
|
||||||
isRemoteVideoRecvOnly_ = false;
|
isRemoteVideoRecvOnly_ = false;
|
||||||
videoItem_ = nullptr;
|
videoItem_ = nullptr;
|
||||||
|
insetSinkPad_ = nullptr;
|
||||||
if (state_ != State::DISCONNECTED)
|
if (state_ != State::DISCONNECTED)
|
||||||
emit stateChanged(State::DISCONNECTED);
|
emit stateChanged(State::DISCONNECTED);
|
||||||
}
|
}
|
||||||
|
@ -1270,6 +1352,10 @@ WebRTCSession::toggleMicMute()
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
WebRTCSession::toggleCameraView()
|
||||||
|
{}
|
||||||
|
|
||||||
void
|
void
|
||||||
WebRTCSession::end()
|
WebRTCSession::end()
|
||||||
{}
|
{}
|
||||||
|
|
|
@ -53,6 +53,7 @@ public:
|
||||||
|
|
||||||
bool isMicMuted() const;
|
bool isMicMuted() const;
|
||||||
bool toggleMicMute();
|
bool toggleMicMute();
|
||||||
|
void toggleCameraView();
|
||||||
void end();
|
void end();
|
||||||
|
|
||||||
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
|
void setTurnServers(const std::vector<std::string> &uris) { turnServers_ = uris; }
|
||||||
|
|
|
@ -330,6 +330,12 @@ TimelineViewManager::toggleMicMute()
|
||||||
emit micMuteChanged();
|
emit micMuteChanged();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
TimelineViewManager::toggleCameraView()
|
||||||
|
{
|
||||||
|
WebRTCSession::instance().toggleCameraView();
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
TimelineViewManager::openImageOverlay(QString mxcUrl, QString eventId) const
|
TimelineViewManager::openImageOverlay(QString mxcUrl, QString eventId) const
|
||||||
{
|
{
|
||||||
|
|
|
@ -61,6 +61,7 @@ public:
|
||||||
QString callPartyAvatarUrl() const { return callManager_->callPartyAvatarUrl(); }
|
QString callPartyAvatarUrl() const { return callManager_->callPartyAvatarUrl(); }
|
||||||
bool isMicMuted() const { return WebRTCSession::instance().isMicMuted(); }
|
bool isMicMuted() const { return WebRTCSession::instance().isMicMuted(); }
|
||||||
Q_INVOKABLE void toggleMicMute();
|
Q_INVOKABLE void toggleMicMute();
|
||||||
|
Q_INVOKABLE void toggleCameraView();
|
||||||
Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const;
|
Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const;
|
||||||
Q_INVOKABLE QColor userColor(QString id, QColor background);
|
Q_INVOKABLE QColor userColor(QString id, QColor background);
|
||||||
Q_INVOKABLE QString escapeEmoji(QString str) const;
|
Q_INVOKABLE QString escapeEmoji(QString str) const;
|
||||||
|
|
Loading…
Reference in a new issue