From 70c77cdc44698104c11c222dc630bbcc26a27f3e Mon Sep 17 00:00:00 2001 From: trilene Date: Sun, 21 Feb 2021 16:30:10 -0500 Subject: [PATCH] Display screen sharing content locally --- resources/qml/TimelineView.qml | 2 +- resources/qml/voip/ActiveCallBar.qml | 10 +-- resources/qml/voip/ScreenShare.qml | 5 +- src/CallManager.cpp | 9 -- src/CallManager.h | 8 +- src/WebRTCSession.cpp | 120 +++++++++++++++++---------- src/WebRTCSession.h | 4 +- 7 files changed, 90 insertions(+), 68 deletions(-) diff --git a/resources/qml/TimelineView.qml b/resources/qml/TimelineView.qml index 0cd129da..07145c7a 100644 --- a/resources/qml/TimelineView.qml +++ b/resources/qml/TimelineView.qml @@ -249,7 +249,7 @@ Page { } Loader { - source: CallManager.isOnCall && CallManager.haveVideo ? "voip/VideoCall.qml" : "" + source: CallManager.isOnCall && CallManager.callType != CallType.VOICE ? "voip/VideoCall.qml" : "" onLoaded: TimelineManager.setVideoCallItem() } diff --git a/resources/qml/voip/ActiveCallBar.qml b/resources/qml/voip/ActiveCallBar.qml index 5589c79b..d7f3c6fd 100644 --- a/resources/qml/voip/ActiveCallBar.qml +++ b/resources/qml/voip/ActiveCallBar.qml @@ -12,7 +12,7 @@ Rectangle { MouseArea { anchors.fill: parent onClicked: { - if (CallManager.haveVideo) + if (CallManager.callType != CallType.VOICE) stackLayout.currentIndex = stackLayout.currentIndex ? 0 : 1; } @@ -139,7 +139,7 @@ Rectangle { PropertyChanges { target: stackLayout - currentIndex: CallManager.haveVideo ? 1 : 0 + currentIndex: CallManager.callType != CallType.VOICE ? 1 : 0 } }, @@ -196,15 +196,15 @@ Rectangle { } ImageButton { - visible: CallManager.haveLocalCamera + visible: CallManager.haveLocalPiP width: 24 height: 24 buttonTextColor: "#000000" image: ":/icons/icons/ui/toggle-camera-view.png" hoverEnabled: true ToolTip.visible: hovered - ToolTip.text: qsTr("Toggle camera view") - onClicked: CallManager.toggleCameraView() + ToolTip.text: qsTr("Hide/Show Picture-in-Picture") + onClicked: CallManager.toggleLocalPiP() } ImageButton { diff --git a/resources/qml/voip/ScreenShare.qml b/resources/qml/voip/ScreenShare.qml index cb70a36c..3ff74199 100644 --- a/resources/qml/voip/ScreenShare.qml +++ b/resources/qml/voip/ScreenShare.qml @@ -21,7 +21,10 @@ Popup { ColumnLayout { Label { - Layout.margins: 8 + Layout.topMargin: 16 + Layout.bottomMargin: 16 + Layout.leftMargin: 8 + Layout.rightMargin: 8 Layout.alignment: Qt.AlignLeft text: qsTr("Share desktop with %1?").arg(TimelineManager.timeline.roomName) color: colors.windowText diff --git a/src/CallManager.cpp b/src/CallManager.cpp index de04ed3b..58d75c2f 100644 --- a/src/CallManager.cpp +++ b/src/CallManager.cpp @@ -405,15 +405,6 @@ CallManager::screenShareSupported() return std::getenv("DISPLAY") && !std::getenv("WAYLAND_DISPLAY"); } -bool -CallManager::haveVideo() const -{ - return callType() == CallType::VIDEO || - (callType() == CallType::SCREEN && - (ChatPage::instance()->userSettings()->screenShareRemoteVideo() && - !session_.isRemoteVideoRecvOnly())); -} - QStringList CallManager::devices(bool isVideo) const { diff --git a/src/CallManager.h b/src/CallManager.h index cfc50481..19d79f86 100644 --- a/src/CallManager.h +++ b/src/CallManager.h @@ -30,8 +30,7 @@ class CallManager : public QObject Q_PROPERTY(QString callParty READ callParty NOTIFY newInviteState) Q_PROPERTY(QString callPartyAvatarUrl READ callPartyAvatarUrl NOTIFY newInviteState) Q_PROPERTY(bool isMicMuted READ isMicMuted NOTIFY micMuteChanged) - Q_PROPERTY(bool haveLocalCamera READ haveLocalCamera NOTIFY newCallState) - Q_PROPERTY(bool haveVideo READ haveVideo NOTIFY newInviteState) + Q_PROPERTY(bool haveLocalPiP READ haveLocalPiP NOTIFY newCallState) Q_PROPERTY(QStringList mics READ mics NOTIFY devicesChanged) Q_PROPERTY(QStringList cameras READ cameras NOTIFY devicesChanged) Q_PROPERTY(bool callsSupported READ callsSupported CONSTANT) @@ -47,8 +46,7 @@ public: QString callParty() const { return callParty_; } QString callPartyAvatarUrl() const { return callPartyAvatarUrl_; } bool isMicMuted() const { return session_.isMicMuted(); } - bool haveLocalCamera() const { return session_.haveLocalCamera(); } - bool haveVideo() const; + bool haveLocalPiP() const { return session_.haveLocalPiP(); } QStringList mics() const { return devices(false); } QStringList cameras() const { return devices(true); } void refreshTurnServer(); @@ -60,7 +58,7 @@ public slots: void sendInvite(const QString &roomid, webrtc::CallType); void syncEvent(const mtx::events::collections::TimelineEvents &event); void toggleMicMute(); - void toggleCameraView() { session_.toggleCameraView(); } + void toggleLocalPiP() { session_.toggleLocalPiP(); } void acceptInvite(); void hangUp( mtx::events::msg::CallHangUp::Reason = mtx::events::msg::CallHangUp::Reason::User); diff --git a/src/WebRTCSession.cpp b/src/WebRTCSession.cpp index c3a28117..4d38d196 100644 --- a/src/WebRTCSession.cpp +++ b/src/WebRTCSession.cpp @@ -350,42 +350,59 @@ getResolution(GstPad *pad) return ret; } +std::pair +getResolution(GstElement *pipe, const gchar *elementName, const gchar *padName) +{ + GstElement *element = gst_bin_get_by_name(GST_BIN(pipe), elementName); + GstPad *pad = gst_element_get_static_pad(element, padName); + auto ret = getResolution(pad); + gst_object_unref(pad); + gst_object_unref(element); + return ret; +} + +std::pair +getPiPDimensions(const std::pair resolution, int fullWidth, double scaleFactor) +{ + int pipWidth = fullWidth * scaleFactor; + int pipHeight = static_cast(resolution.second) / resolution.first * pipWidth; + return {pipWidth, pipHeight}; +} + void addLocalPiP(GstElement *pipe, const std::pair &videoCallSize) { - // embed localUser's camera into received video + // embed localUser's camera into received video (CallType::VIDEO) + // OR embed screen share into received video (CallType::SCREEN) GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee"); if (!tee) return; - GstElement *queue = gst_element_factory_make("queue", nullptr); - GstElement *videorate = gst_element_factory_make("videorate", nullptr); - gst_bin_add_many(GST_BIN(pipe), queue, videorate, nullptr); - gst_element_link_many(tee, queue, videorate, nullptr); + GstElement *queue = gst_element_factory_make("queue", nullptr); + gst_bin_add(GST_BIN(pipe), queue); + gst_element_link(tee, queue); gst_element_sync_state_with_parent(queue); - gst_element_sync_state_with_parent(videorate); gst_object_unref(tee); - GstElement *camerafilter = gst_bin_get_by_name(GST_BIN(pipe), "camerafilter"); - GstPad *filtersinkpad = gst_element_get_static_pad(camerafilter, "sink"); - auto cameraResolution = getResolution(filtersinkpad); - int pipWidth = videoCallSize.first / 4; - int pipHeight = - static_cast(cameraResolution.second) / cameraResolution.first * pipWidth; - nhlog::ui()->debug("WebRTC: local picture-in-picture: {}x{}", pipWidth, pipHeight); - gst_object_unref(filtersinkpad); - gst_object_unref(camerafilter); - - GstPad *camerapad = gst_element_get_static_pad(videorate, "src"); GstElement *compositor = gst_bin_get_by_name(GST_BIN(pipe), "compositor"); localPiPSinkPad_ = gst_element_get_request_pad(compositor, "sink_%u"); g_object_set(localPiPSinkPad_, "zorder", 2, nullptr); - g_object_set(localPiPSinkPad_, "width", pipWidth, "height", pipHeight, nullptr); + + bool isVideo = WebRTCSession::instance().callType() == CallType::VIDEO; + const gchar *element = isVideo ? "camerafilter" : "screenshare"; + const gchar *pad = isVideo ? "sink" : "src"; + auto resolution = getResolution(pipe, element, pad); + auto pipSize = getPiPDimensions(resolution, videoCallSize.first, 0.25); + nhlog::ui()->debug( + "WebRTC: local picture-in-picture: {}x{}", pipSize.first, pipSize.second); + g_object_set(localPiPSinkPad_, "width", pipSize.first, "height", pipSize.second, nullptr); gint offset = videoCallSize.first / 80; g_object_set(localPiPSinkPad_, "xpos", offset, "ypos", offset, nullptr); - if (GST_PAD_LINK_FAILED(gst_pad_link(camerapad, localPiPSinkPad_))) + + GstPad *srcpad = gst_element_get_static_pad(queue, "src"); + if (GST_PAD_LINK_FAILED(gst_pad_link(srcpad, localPiPSinkPad_))) nhlog::ui()->error("WebRTC: failed to link local PiP elements"); - gst_object_unref(camerapad); + gst_object_unref(srcpad); gst_object_unref(compositor); } @@ -394,31 +411,37 @@ addRemotePiP(GstElement *pipe) { // embed localUser's camera into screen image being shared if (remotePiPSinkPad_) { - GstElement *screen = gst_bin_get_by_name(GST_BIN(pipe), "screenshare"); - GstPad *srcpad = gst_element_get_static_pad(screen, "src"); - auto resolution = getResolution(srcpad); + auto camRes = getResolution(pipe, "camerafilter", "sink"); + auto shareRes = getResolution(pipe, "screenshare", "src"); + auto pipSize = getPiPDimensions(camRes, shareRes.first, 0.2); nhlog::ui()->debug( - "WebRTC: screen share: {}x{}", resolution.first, resolution.second); - gst_object_unref(srcpad); - gst_object_unref(screen); + "WebRTC: screen share picture-in-picture: {}x{}", pipSize.first, pipSize.second); - int pipWidth = resolution.first / 5; - int pipHeight = - static_cast(resolution.second) / resolution.first * pipWidth; - nhlog::ui()->debug( - "WebRTC: screen share picture-in-picture: {}x{}", pipWidth, pipHeight); - gint offset = resolution.first / 100; + gint offset = shareRes.first / 100; g_object_set(remotePiPSinkPad_, "zorder", 2, nullptr); - g_object_set(remotePiPSinkPad_, "width", pipWidth, "height", pipHeight, nullptr); + g_object_set( + remotePiPSinkPad_, "width", pipSize.first, "height", pipSize.second, nullptr); g_object_set(remotePiPSinkPad_, "xpos", - resolution.first - pipWidth - offset, + shareRes.first - pipSize.first - offset, "ypos", - resolution.second - pipHeight - offset, + shareRes.second - pipSize.second - offset, nullptr); } } +void +addLocalVideo(GstElement *pipe) +{ + GstElement *queue = newVideoSinkChain(pipe); + GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe), "videosrctee"); + GstPad *srcpad = gst_element_get_request_pad(tee, "src_%u"); + GstPad *sinkpad = gst_element_get_static_pad(queue, "sink"); + if (GST_PAD_LINK_FAILED(gst_pad_link(srcpad, sinkpad))) + nhlog::ui()->error("WebRTC: failed to link videosrctee -> video sink chain"); + gst_object_unref(srcpad); +} + void linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe) { @@ -454,8 +477,7 @@ linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe) nhlog::ui()->info("WebRTC: incoming video resolution: {}x{}", videoCallSize.first, videoCallSize.second); - if (session->callType() == CallType::VIDEO) - addLocalPiP(pipe, videoCallSize); + addLocalPiP(pipe, videoCallSize); } else { g_free(mediaType); nhlog::ui()->error("WebRTC: unknown pad type: {}", GST_PAD_NAME(newpad)); @@ -478,6 +500,8 @@ linkNewPad(GstElement *decodebin, GstPad *newpad, GstElement *pipe) g_timeout_add_seconds(3, testPacketLoss, nullptr); } addRemotePiP(pipe); + if (session->isRemoteVideoRecvOnly()) + addLocalVideo(pipe); } } gst_object_unref(queuepad); @@ -1011,13 +1035,19 @@ WebRTCSession::addVideoPipeline(int vp8PayloadType) } bool -WebRTCSession::haveLocalCamera() const +WebRTCSession::haveLocalPiP() const { - if (callType_ == CallType::VIDEO && state_ >= State::INITIATED) { - GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe_), "videosrctee"); - if (tee) { - gst_object_unref(tee); + if (state_ >= State::INITIATED) { + if (callType_ == CallType::VOICE || isRemoteVideoRecvOnly_) + return false; + else if (callType_ == CallType::SCREEN) return true; + else { + GstElement *tee = gst_bin_get_by_name(GST_BIN(pipe_), "videosrctee"); + if (tee) { + gst_object_unref(tee); + return true; + } } } return false; @@ -1051,7 +1081,7 @@ WebRTCSession::toggleMicMute() } void -WebRTCSession::toggleCameraView() +WebRTCSession::toggleLocalPiP() { if (localPiPSinkPad_) { guint zorder; @@ -1108,7 +1138,7 @@ WebRTCSession::havePlugins(bool, std::string *) } bool -WebRTCSession::haveLocalCamera() const +WebRTCSession::haveLocalPiP() const { return false; } @@ -1144,7 +1174,7 @@ WebRTCSession::toggleMicMute() } void -WebRTCSession::toggleCameraView() +WebRTCSession::toggleLocalPiP() {} void diff --git a/src/WebRTCSession.h b/src/WebRTCSession.h index 24ae9a17..fc637193 100644 --- a/src/WebRTCSession.h +++ b/src/WebRTCSession.h @@ -52,7 +52,7 @@ public: bool havePlugins(bool isVideo, std::string *errorMessage = nullptr); webrtc::CallType callType() const { return callType_; } webrtc::State state() const { return state_; } - bool haveLocalCamera() const; + bool haveLocalPiP() const; bool isOffering() const { return isOffering_; } bool isRemoteVideoRecvOnly() const { return isRemoteVideoRecvOnly_; } bool isRemoteVideoSendOnly() const { return isRemoteVideoSendOnly_; } @@ -64,7 +64,7 @@ public: bool isMicMuted() const; bool toggleMicMute(); - void toggleCameraView(); + void toggleLocalPiP(); void end(); void setTurnServers(const std::vector &uris) { turnServers_ = uris; }