Port ActiveCallBar to Qml

This commit is contained in:
trilene 2020-09-22 12:07:36 -04:00
parent 9169a26e67
commit da27670cbe
16 changed files with 212 additions and 277 deletions

View File

@ -279,7 +279,6 @@ set(SRC_FILES
src/ui/Theme.cpp src/ui/Theme.cpp
src/ui/ThemeManager.cpp src/ui/ThemeManager.cpp
src/ActiveCallBar.cpp
src/AvatarProvider.cpp src/AvatarProvider.cpp
src/BlurhashProvider.cpp src/BlurhashProvider.cpp
src/Cache.cpp src/Cache.cpp
@ -491,7 +490,6 @@ qt5_wrap_cpp(MOC_HEADERS
src/notifications/Manager.h src/notifications/Manager.h
src/ActiveCallBar.h
src/AvatarProvider.h src/AvatarProvider.h
src/BlurhashProvider.h src/BlurhashProvider.h
src/Cache_p.h src/Cache_p.h

View File

@ -2,7 +2,8 @@ import QtQuick 2.3
import QtQuick.Controls 2.3 import QtQuick.Controls 2.3
AbstractButton { AbstractButton {
property string image: undefined property string image
property string src
width: 16 width: 16
height: 16 height: 16
id: button id: button
@ -11,7 +12,7 @@ AbstractButton {
id: buttonImg id: buttonImg
// Workaround, can't get icon.source working for now... // Workaround, can't get icon.source working for now...
anchors.fill: parent anchors.fill: parent
source: "image://colorimage/" + image + "?" + (button.hovered ? colors.highlight : colors.buttonText) source: src ? src : ("image://colorimage/" + image + "?" + (button.hovered ? colors.highlight : colors.buttonText))
} }
MouseArea MouseArea

View File

@ -497,6 +497,146 @@ Page {
} }
} }
} }
Rectangle {
id: activeCallBar
visible: timelineManager.callState != WebRTCState.DISCONNECTED
Layout.fillWidth: true
implicitHeight: topLayout.height + 16
color: "#2ECC71"
z: 3
GridLayout {
anchors.left: parent.left
anchors.right: parent.right
anchors.margins: 8
anchors.verticalCenter: parent.verticalCenter
Avatar {
Layout.column: 1
Layout.row: 0
Layout.rowSpan: 2
Layout.alignment: Qt.AlignVCenter
width: avatarSize
height: avatarSize
url: chat.model ? chat.model.roomAvatarUrl.replace("mxc://", "image://MxcImage/") : ""
displayName: chat.model ? chat.model.roomName : qsTr("No room selected")
}
Label {
Layout.column: 2
Layout.row: 0
Layout.rowSpan: 2
Layout.alignment: Qt.AlignVCenter
font.pointSize: fontMetrics.font.pointSize * 1.1
text: chat.model ? " " + chat.model.roomName + " " : ""
}
Image {
Layout.column: 3
Layout.row: 0
Layout.rowSpan: 2
Layout.alignment: Qt.AlignVCenter
Layout.preferredWidth: 23
Layout.preferredHeight: 23
source: "qrc:/icons/icons/ui/place-call.png"
}
Connections {
target: timelineManager
function onCallStateChanged(state) {
switch (state) {
case WebRTCState.INITIATING:
callStateLabel.text = "Initiating call..."
break;
case WebRTCState.INITIATED:
callStateLabel.text = "Call initiated..."
break;
case WebRTCState.OFFERSENT:
callStateLabel.text = "Calling..."
break;
case WebRTCState.CONNECTING:
callStateLabel.text = "Connecting..."
break;
case WebRTCState.CONNECTED:
callStateLabel.text = "00:00"
var d = new Date()
callTimer.startTime = Math.floor(d.getTime() / 1000)
break;
}
}
}
Label {
id: callStateLabel
Layout.column: 4
Layout.row: 0
Layout.rowSpan: 2
Layout.alignment: Qt.AlignVCenter
font.pointSize: fontMetrics.font.pointSize * 1.1
}
Timer {
id: callTimer
property int startTime
interval: 1000
running: timelineManager.callState == WebRTCState.CONNECTED
repeat: true
onTriggered: {
var d = new Date()
let seconds = Math.floor(d.getTime() / 1000 - startTime)
let s = Math.floor(seconds % 60)
let m = Math.floor(seconds / 60) % 60
let h = Math.floor(seconds / 3600)
callStateLabel.text = (h ? (pad(h) + ":") : "") + pad(m) + ":" + pad(s)
}
function pad(n) {
return (n < 10) ? ("0" + n) : n
}
}
Item {
Layout.column: 5
Layout.fillWidth: true
}
ImageButton {
Layout.column: 6
Layout.row: 0
Layout.rowSpan: 2
Layout.alignment: Qt.AlignVCenter
width: 22
height: 22
src: "qrc:/icons/icons/ui/microphone-mute.png"
hoverEnabled: true
ToolTip.visible: hovered
ToolTip.text: qsTr("Mute Mic")
onClicked: {
if (timelineManager.toggleMuteAudioSource()) {
src = "qrc:/icons/icons/ui/microphone-unmute.png"
ToolTip.text = qsTr("Unmute Mic")
}
else {
src = "qrc:/icons/icons/ui/microphone-mute.png"
ToolTip.text = qsTr("Mute Mic")
}
}
}
Item {
Layout.column: 7
implicitWidth: 16
}
}
}
} }
} }
} }

View File

@ -1,160 +0,0 @@
#include <cstdio>
#include <QDateTime>
#include <QHBoxLayout>
#include <QIcon>
#include <QLabel>
#include <QString>
#include <QTimer>
#include "ActiveCallBar.h"
#include "ChatPage.h"
#include "Utils.h"
#include "WebRTCSession.h"
#include "ui/Avatar.h"
#include "ui/FlatButton.h"
ActiveCallBar::ActiveCallBar(QWidget *parent)
: QWidget(parent)
{
setAutoFillBackground(true);
auto p = palette();
p.setColor(backgroundRole(), QColor(46, 204, 113));
setPalette(p);
QFont f;
f.setPointSizeF(f.pointSizeF());
const int fontHeight = QFontMetrics(f).height();
const int widgetMargin = fontHeight / 3;
const int contentHeight = fontHeight * 3;
setFixedHeight(contentHeight + widgetMargin);
layout_ = new QHBoxLayout(this);
layout_->setSpacing(widgetMargin);
layout_->setContentsMargins(2 * widgetMargin, widgetMargin, 2 * widgetMargin, widgetMargin);
QFont labelFont;
labelFont.setPointSizeF(labelFont.pointSizeF() * 1.1);
labelFont.setWeight(QFont::Medium);
avatar_ = new Avatar(this, QFontMetrics(f).height() * 2.5);
callPartyLabel_ = new QLabel(this);
callPartyLabel_->setFont(labelFont);
stateLabel_ = new QLabel(this);
stateLabel_->setFont(labelFont);
durationLabel_ = new QLabel(this);
durationLabel_->setFont(labelFont);
durationLabel_->hide();
muteBtn_ = new FlatButton(this);
setMuteIcon(false);
muteBtn_->setFixedSize(buttonSize_, buttonSize_);
muteBtn_->setCornerRadius(buttonSize_ / 2);
connect(muteBtn_, &FlatButton::clicked, this, [this]() {
if (WebRTCSession::instance().toggleMuteAudioSrc(muted_))
setMuteIcon(muted_);
});
layout_->addWidget(avatar_, 0, Qt::AlignLeft);
layout_->addWidget(callPartyLabel_, 0, Qt::AlignLeft);
layout_->addWidget(stateLabel_, 0, Qt::AlignLeft);
layout_->addWidget(durationLabel_, 0, Qt::AlignLeft);
layout_->addStretch();
layout_->addWidget(muteBtn_, 0, Qt::AlignCenter);
layout_->addSpacing(18);
timer_ = new QTimer(this);
connect(timer_, &QTimer::timeout, this, [this]() {
auto seconds = QDateTime::currentSecsSinceEpoch() - callStartTime_;
int s = seconds % 60;
int m = (seconds / 60) % 60;
int h = seconds / 3600;
char buf[12];
if (h)
snprintf(buf, sizeof(buf), "%.2d:%.2d:%.2d", h, m, s);
else
snprintf(buf, sizeof(buf), "%.2d:%.2d", m, s);
durationLabel_->setText(buf);
});
connect(
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &ActiveCallBar::update);
}
void
ActiveCallBar::setMuteIcon(bool muted)
{
QIcon icon;
if (muted) {
muteBtn_->setToolTip("Unmute Mic");
icon.addFile(":/icons/icons/ui/microphone-unmute.png");
} else {
muteBtn_->setToolTip("Mute Mic");
icon.addFile(":/icons/icons/ui/microphone-mute.png");
}
muteBtn_->setIcon(icon);
muteBtn_->setIconSize(QSize(buttonSize_, buttonSize_));
}
void
ActiveCallBar::setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl)
{
callPartyLabel_->setText(" " + (displayName.isEmpty() ? userid : displayName) + " ");
if (!avatarUrl.isEmpty())
avatar_->setImage(avatarUrl);
else
avatar_->setLetter(utils::firstChar(roomName));
}
void
ActiveCallBar::update(WebRTCSession::State state)
{
switch (state) {
case WebRTCSession::State::INITIATING:
show();
stateLabel_->setText("Initiating call...");
break;
case WebRTCSession::State::INITIATED:
show();
stateLabel_->setText("Call initiated...");
break;
case WebRTCSession::State::OFFERSENT:
show();
stateLabel_->setText("Calling...");
break;
case WebRTCSession::State::CONNECTING:
show();
stateLabel_->setText("Connecting...");
break;
case WebRTCSession::State::CONNECTED:
show();
callStartTime_ = QDateTime::currentSecsSinceEpoch();
timer_->start(1000);
stateLabel_->setPixmap(
QIcon(":/icons/icons/ui/place-call.png").pixmap(QSize(buttonSize_, buttonSize_)));
durationLabel_->setText("00:00");
durationLabel_->show();
break;
case WebRTCSession::State::ICEFAILED:
case WebRTCSession::State::DISCONNECTED:
hide();
timer_->stop();
callPartyLabel_->setText(QString());
stateLabel_->setText(QString());
durationLabel_->setText(QString());
durationLabel_->hide();
setMuteIcon(false);
break;
default:
break;
}
}

View File

@ -1,40 +0,0 @@
#pragma once
#include <QWidget>
#include "WebRTCSession.h"
class QHBoxLayout;
class QLabel;
class QTimer;
class Avatar;
class FlatButton;
class ActiveCallBar : public QWidget
{
Q_OBJECT
public:
ActiveCallBar(QWidget *parent = nullptr);
public slots:
void update(WebRTCSession::State);
void setCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private:
QHBoxLayout *layout_ = nullptr;
Avatar *avatar_ = nullptr;
QLabel *callPartyLabel_ = nullptr;
QLabel *stateLabel_ = nullptr;
QLabel *durationLabel_ = nullptr;
FlatButton *muteBtn_ = nullptr;
int buttonSize_ = 22;
bool muted_ = false;
qint64 callStartTime_ = 0;
QTimer *timer_ = nullptr;
void setMuteIcon(bool muted);
};

View File

@ -52,7 +52,7 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_}); emit newMessage(roomid_, CallInvite{callid_, sdp, 0, timeoutms_});
emit newMessage(roomid_, CallCandidates{callid_, candidates, 0}); emit newMessage(roomid_, CallCandidates{callid_, candidates, 0});
QTimer::singleShot(timeoutms_, this, [this]() { QTimer::singleShot(timeoutms_, this, [this]() {
if (session_.state() == WebRTCSession::State::OFFERSENT) { if (session_.state() == webrtc::State::OFFERSENT) {
hangUp(CallHangUp::Reason::InviteTimeOut); hangUp(CallHangUp::Reason::InviteTimeOut);
emit ChatPage::instance()->showNotification( emit ChatPage::instance()->showNotification(
"The remote side failed to pick up."); "The remote side failed to pick up.");
@ -99,13 +99,13 @@ CallManager::CallManager(QSharedPointer<UserSettings> userSettings)
turnServerTimer_.setInterval(ttl * 1000 * 0.9); turnServerTimer_.setInterval(ttl * 1000 * 0.9);
}); });
connect(&session_, &WebRTCSession::stateChanged, this, [this](WebRTCSession::State state) { connect(&session_, &WebRTCSession::stateChanged, this, [this](webrtc::State state) {
switch (state) { switch (state) {
case WebRTCSession::State::DISCONNECTED: case webrtc::State::DISCONNECTED:
playRingtone("qrc:/media/media/callend.ogg", false); playRingtone("qrc:/media/media/callend.ogg", false);
clear(); clear();
break; break;
case WebRTCSession::State::ICEFAILED: { case webrtc::State::ICEFAILED: {
QString error("Call connection failed."); QString error("Call connection failed.");
if (turnURIs_.empty()) if (turnURIs_.empty())
error += " Your homeserver has no configured TURN server."; error += " Your homeserver has no configured TURN server.";
@ -152,13 +152,6 @@ CallManager::sendInvite(const QString &roomid)
generateCallID(); generateCallID();
nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_); nhlog::ui()->debug("WebRTC: call id: {} - creating invite", callid_);
std::vector<RoomMember> members(cache::getMembers(roomid.toStdString()));
const RoomMember &callee =
members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(callee.user_id,
callee.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
playRingtone("qrc:/media/media/ringback.ogg", true); playRingtone("qrc:/media/media/ringback.ogg", true);
if (!session_.createOffer()) { if (!session_.createOffer()) {
emit ChatPage::instance()->showNotification("Problem setting up call."); emit ChatPage::instance()->showNotification("Problem setting up call.");
@ -195,7 +188,7 @@ CallManager::hangUp(CallHangUp::Reason reason)
bool bool
CallManager::onActiveCall() CallManager::onActiveCall()
{ {
return session_.state() != WebRTCSession::State::DISCONNECTED; return session_.state() != webrtc::State::DISCONNECTED;
} }
void void
@ -259,11 +252,6 @@ CallManager::handleEvent(const RoomEvent<CallInvite> &callInviteEvent)
std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id)); std::vector<RoomMember> members(cache::getMembers(callInviteEvent.room_id));
const RoomMember &caller = const RoomMember &caller =
members.front().user_id == utils::localUser() ? members.back() : members.front(); members.front().user_id == utils::localUser() ? members.back() : members.front();
emit newCallParty(caller.user_id,
caller.display_name,
QString::fromStdString(roomInfo.name),
QString::fromStdString(roomInfo.avatar_url));
auto dialog = new dialogs::AcceptCall(caller.user_id, auto dialog = new dialogs::AcceptCall(caller.user_id,
caller.display_name, caller.display_name,
QString::fromStdString(roomInfo.name), QString::fromStdString(roomInfo.name),

View File

@ -41,10 +41,6 @@ signals:
void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &); void newMessage(const QString &roomid, const mtx::events::msg::CallAnswer &);
void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &); void newMessage(const QString &roomid, const mtx::events::msg::CallHangUp &);
void turnServerRetrieved(const mtx::responses::TurnServer &); void turnServerRetrieved(const mtx::responses::TurnServer &);
void newCallParty(const QString &userid,
const QString &displayName,
const QString &roomName,
const QString &avatarUrl);
private slots: private slots:
void retrieveTurnServer(); void retrieveTurnServer();

View File

@ -22,7 +22,6 @@
#include <QShortcut> #include <QShortcut>
#include <QtConcurrent> #include <QtConcurrent>
#include "ActiveCallBar.h"
#include "AvatarProvider.h" #include "AvatarProvider.h"
#include "Cache.h" #include "Cache.h"
#include "Cache_p.h" #include "Cache_p.h"
@ -40,7 +39,6 @@
#include "UserInfoWidget.h" #include "UserInfoWidget.h"
#include "UserSettingsPage.h" #include "UserSettingsPage.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "ui/OverlayModal.h" #include "ui/OverlayModal.h"
#include "ui/Theme.h" #include "ui/Theme.h"
@ -129,12 +127,6 @@ ChatPage::ChatPage(QSharedPointer<UserSettings> userSettings, QWidget *parent)
contentLayout_->addWidget(view_manager_->getWidget()); contentLayout_->addWidget(view_manager_->getWidget());
activeCallBar_ = new ActiveCallBar(this);
contentLayout_->addWidget(activeCallBar_);
activeCallBar_->hide();
connect(
&callManager_, &CallManager::newCallParty, activeCallBar_, &ActiveCallBar::setCallParty);
// Splitter // Splitter
splitter->addWidget(sideBar_); splitter->addWidget(sideBar_);
splitter->addWidget(content_); splitter->addWidget(content_);

View File

@ -41,7 +41,6 @@
#include "notifications/Manager.h" #include "notifications/Manager.h"
#include "popups/UserMentions.h" #include "popups/UserMentions.h"
class ActiveCallBar;
class OverlayModal; class OverlayModal;
class QuickSwitcher; class QuickSwitcher;
class RoomList; class RoomList;
@ -235,7 +234,6 @@ private:
SideBarActions *sidebarActions_; SideBarActions *sidebarActions_;
TextInputWidget *text_input_; TextInputWidget *text_input_;
ActiveCallBar *activeCallBar_;
QTimer connectivityTimer_; QTimer connectivityTimer_;
std::atomic_bool isConnected_; std::atomic_bool isConnected_;

View File

@ -288,7 +288,7 @@ MainWindow::showChatPage()
void void
MainWindow::closeEvent(QCloseEvent *event) MainWindow::closeEvent(QCloseEvent *event)
{ {
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) { if (WebRTCSession::instance().state() != webrtc::State::DISCONNECTED) {
if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") != if (QMessageBox::question(this, "nheko", "A call is in progress. Quit?") !=
QMessageBox::Yes) { QMessageBox::Yes) {
event->ignore(); event->ignore();
@ -440,7 +440,7 @@ MainWindow::openLogoutDialog()
{ {
auto dialog = new dialogs::Logout(this); auto dialog = new dialogs::Logout(this);
connect(dialog, &dialogs::Logout::loggingOut, this, [this]() { connect(dialog, &dialogs::Logout::loggingOut, this, [this]() {
if (WebRTCSession::instance().state() != WebRTCSession::State::DISCONNECTED) { if (WebRTCSession::instance().state() != webrtc::State::DISCONNECTED) {
if (QMessageBox::question( if (QMessageBox::question(
this, "nheko", "A call is in progress. Log out?") != this, "nheko", "A call is in progress. Log out?") !=
QMessageBox::Yes) { QMessageBox::Yes) {

View File

@ -560,7 +560,7 @@ TextInputWidget::TextInputWidget(QWidget *parent)
#ifdef GSTREAMER_AVAILABLE #ifdef GSTREAMER_AVAILABLE
callBtn_ = new FlatButton(this); callBtn_ = new FlatButton(this);
changeCallButtonState(WebRTCSession::State::DISCONNECTED); changeCallButtonState(webrtc::State::DISCONNECTED);
connect(&WebRTCSession::instance(), connect(&WebRTCSession::instance(),
&WebRTCSession::stateChanged, &WebRTCSession::stateChanged,
this, this,
@ -776,11 +776,11 @@ TextInputWidget::paintEvent(QPaintEvent *)
} }
void void
TextInputWidget::changeCallButtonState(WebRTCSession::State state) TextInputWidget::changeCallButtonState(webrtc::State state)
{ {
QIcon icon; QIcon icon;
if (state == WebRTCSession::State::ICEFAILED || if (state == webrtc::State::ICEFAILED ||
state == WebRTCSession::State::DISCONNECTED) { state == webrtc::State::DISCONNECTED) {
callBtn_->setToolTip(tr("Place a call")); callBtn_->setToolTip(tr("Place a call"));
icon.addFile(":/icons/icons/ui/place-call.png"); icon.addFile(":/icons/icons/ui/place-call.png");
} else { } else {

View File

@ -164,7 +164,7 @@ public slots:
void openFileSelection(); void openFileSelection();
void hideUploadSpinner(); void hideUploadSpinner();
void focusLineEdit() { input_->setFocus(); } void focusLineEdit() { input_->setFocus(); }
void changeCallButtonState(WebRTCSession::State); void changeCallButtonState(webrtc::State);
private slots: private slots:
void addSelectedEmoji(const QString &emoji); void addSelectedEmoji(const QString &emoji);

View File

@ -1,4 +1,5 @@
#include <cctype> #include <cctype>
#include <QQmlEngine>
#include "Logging.h" #include "Logging.h"
#include "WebRTCSession.h" #include "WebRTCSession.h"
@ -14,12 +15,22 @@ extern "C"
} }
#endif #endif
Q_DECLARE_METATYPE(WebRTCSession::State) Q_DECLARE_METATYPE(webrtc::State)
using webrtc::State;
WebRTCSession::WebRTCSession() WebRTCSession::WebRTCSession()
: QObject() : QObject()
{ {
qRegisterMetaType<WebRTCSession::State>(); qRegisterMetaType<webrtc::State>();
qmlRegisterUncreatableMetaObject(
webrtc::staticMetaObject,
"im.nheko",
1,
0,
"WebRTCState",
"Can't instantiate enum");
connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState); connect(this, &WebRTCSession::stateChanged, this, &WebRTCSession::setState);
init(); init();
} }
@ -247,11 +258,11 @@ iceGatheringStateChanged(GstElement *webrtc,
if (isoffering_) { if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged( emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::OFFERSENT); State::OFFERSENT);
} else { } else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged( emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::ANSWERSENT); State::ANSWERSENT);
} }
} }
} }
@ -264,10 +275,10 @@ onICEGatheringCompletion(gpointer timerid)
*(guint *)(timerid) = 0; *(guint *)(timerid) = 0;
if (isoffering_) { if (isoffering_) {
emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().offerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::OFFERSENT); emit WebRTCSession::instance().stateChanged(State::OFFERSENT);
} else { } else {
emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_); emit WebRTCSession::instance().answerCreated(localsdp_, localcandidates_);
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ANSWERSENT); emit WebRTCSession::instance().stateChanged(State::ANSWERSENT);
} }
return FALSE; return FALSE;
} }
@ -285,7 +296,7 @@ addLocalICECandidate(GstElement *webrtc G_GNUC_UNUSED,
localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate}); localcandidates_.push_back({"audio", (uint16_t)mlineIndex, candidate});
return; return;
#else #else
if (WebRTCSession::instance().state() >= WebRTCSession::State::OFFERSENT) { if (WebRTCSession::instance().state() >= State::OFFERSENT) {
emit WebRTCSession::instance().newICECandidate( emit WebRTCSession::instance().newICECandidate(
{"audio", (uint16_t)mlineIndex, candidate}); {"audio", (uint16_t)mlineIndex, candidate});
return; return;
@ -314,11 +325,11 @@ iceConnectionStateChanged(GstElement *webrtc,
switch (newState) { switch (newState) {
case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING: case GST_WEBRTC_ICE_CONNECTION_STATE_CHECKING:
nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking"); nhlog::ui()->debug("WebRTC: GstWebRTCICEConnectionState -> Checking");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::CONNECTING); emit WebRTCSession::instance().stateChanged(State::CONNECTING);
break; break;
case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED: case GST_WEBRTC_ICE_CONNECTION_STATE_FAILED:
nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed"); nhlog::ui()->error("WebRTC: GstWebRTCICEConnectionState -> Failed");
emit WebRTCSession::instance().stateChanged(WebRTCSession::State::ICEFAILED); emit WebRTCSession::instance().stateChanged(State::ICEFAILED);
break; break;
default: default:
break; break;
@ -356,7 +367,7 @@ linkNewPad(GstElement *decodebin G_GNUC_UNUSED, GstPad *newpad, GstElement *pipe
nhlog::ui()->error("WebRTC: unable to link new pad"); nhlog::ui()->error("WebRTC: unable to link new pad");
else { else {
emit WebRTCSession::instance().stateChanged( emit WebRTCSession::instance().stateChanged(
WebRTCSession::State::CONNECTED); State::CONNECTED);
} }
gst_object_unref(queuepad); gst_object_unref(queuepad);
} }
@ -633,21 +644,17 @@ WebRTCSession::createPipeline(int opusPayloadType)
} }
bool bool
WebRTCSession::toggleMuteAudioSrc(bool &isMuted) WebRTCSession::toggleMuteAudioSource()
{ {
if (state_ < State::INITIATED) if (state_ < State::INITIATED)
return false; return false;
GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel"); GstElement *srclevel = gst_bin_get_by_name(GST_BIN(pipe_), "srclevel");
if (!srclevel)
return false;
gboolean muted; gboolean muted;
g_object_get(srclevel, "mute", &muted, nullptr); g_object_get(srclevel, "mute", &muted, nullptr);
g_object_set(srclevel, "mute", !muted, nullptr); g_object_set(srclevel, "mute", !muted, nullptr);
gst_object_unref(srclevel); gst_object_unref(srclevel);
isMuted = !muted; return !muted;
return true;
} }
void void
@ -778,7 +785,7 @@ WebRTCSession::createPipeline(int)
} }
bool bool
WebRTCSession::toggleMuteAudioSrc(bool &) WebRTCSession::toggleMuteAudioSource()
{ {
return false; return false;
} }

View File

@ -9,23 +9,30 @@
typedef struct _GstElement GstElement; typedef struct _GstElement GstElement;
namespace webrtc {
Q_NAMESPACE
enum class State
{
DISCONNECTED,
ICEFAILED,
INITIATING,
INITIATED,
OFFERSENT,
ANSWERSENT,
CONNECTING,
CONNECTED
};
Q_ENUM_NS(State)
}
class WebRTCSession : public QObject class WebRTCSession : public QObject
{ {
Q_OBJECT Q_OBJECT
public: public:
enum class State
{
DISCONNECTED,
ICEFAILED,
INITIATING,
INITIATED,
OFFERSENT,
ANSWERSENT,
CONNECTING,
CONNECTED
};
static WebRTCSession &instance() static WebRTCSession &instance()
{ {
static WebRTCSession instance; static WebRTCSession instance;
@ -33,14 +40,14 @@ public:
} }
bool init(std::string *errorMessage = nullptr); bool init(std::string *errorMessage = nullptr);
State state() const { return state_; } webrtc::State state() const { return state_; }
bool createOffer(); bool createOffer();
bool acceptOffer(const std::string &sdp); bool acceptOffer(const std::string &sdp);
bool acceptAnswer(const std::string &sdp); bool acceptAnswer(const std::string &sdp);
void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &); void acceptICECandidates(const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
bool toggleMuteAudioSrc(bool &isMuted); bool toggleMuteAudioSource();
void end(); void end();
void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; } void setStunServer(const std::string &stunServer) { stunServer_ = stunServer; }
@ -55,16 +62,16 @@ signals:
void answerCreated(const std::string &sdp, void answerCreated(const std::string &sdp,
const std::vector<mtx::events::msg::CallCandidates::Candidate> &); const std::vector<mtx::events::msg::CallCandidates::Candidate> &);
void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &); void newICECandidate(const mtx::events::msg::CallCandidates::Candidate &);
void stateChanged(WebRTCSession::State); // explicit qualifier necessary for Qt void stateChanged(webrtc::State);
private slots: private slots:
void setState(State state) { state_ = state; } void setState(webrtc::State state) { state_ = state; }
private: private:
WebRTCSession(); WebRTCSession();
bool initialised_ = false; bool initialised_ = false;
State state_ = State::DISCONNECTED; webrtc::State state_ = webrtc::State::DISCONNECTED;
GstElement *pipe_ = nullptr; GstElement *pipe_ = nullptr;
GstElement *webrtc_ = nullptr; GstElement *webrtc_ = nullptr;
unsigned int busWatchId_ = 0; unsigned int busWatchId_ = 0;

View File

@ -141,6 +141,8 @@ TimelineViewManager::TimelineViewManager(QSharedPointer<UserSettings> userSettin
isInitialSync_ = true; isInitialSync_ = true;
emit initialSyncChanged(true); emit initialSyncChanged(true);
}); });
connect(
&WebRTCSession::instance(), &WebRTCSession::stateChanged, this, &TimelineViewManager::callStateChanged);
} }
void void

View File

@ -13,6 +13,7 @@
#include "Logging.h" #include "Logging.h"
#include "TimelineModel.h" #include "TimelineModel.h"
#include "Utils.h" #include "Utils.h"
#include "WebRTCSession.h"
#include "emoji/EmojiModel.h" #include "emoji/EmojiModel.h"
#include "emoji/Provider.h" #include "emoji/Provider.h"
@ -33,6 +34,8 @@ class TimelineViewManager : public QObject
bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged) bool isInitialSync MEMBER isInitialSync_ READ isInitialSync NOTIFY initialSyncChanged)
Q_PROPERTY( Q_PROPERTY(
bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged) bool isNarrowView MEMBER isNarrowView_ READ isNarrowView NOTIFY narrowViewChanged)
Q_PROPERTY(
webrtc::State callState READ callState NOTIFY callStateChanged)
public: public:
TimelineViewManager(QSharedPointer<UserSettings> userSettings, TimelineViewManager(QSharedPointer<UserSettings> userSettings,
@ -48,6 +51,8 @@ public:
Q_INVOKABLE TimelineModel *activeTimeline() const { return timeline_; } Q_INVOKABLE TimelineModel *activeTimeline() const { return timeline_; }
Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; } Q_INVOKABLE bool isInitialSync() const { return isInitialSync_; }
bool isNarrowView() const { return isNarrowView_; } bool isNarrowView() const { return isNarrowView_; }
webrtc::State callState() const { return WebRTCSession::instance().state(); }
Q_INVOKABLE bool toggleMuteAudioSource() { return WebRTCSession::instance().toggleMuteAudioSource(); }
Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const; Q_INVOKABLE void openImageOverlay(QString mxcUrl, QString eventId) const;
Q_INVOKABLE QColor userColor(QString id, QColor background); Q_INVOKABLE QColor userColor(QString id, QColor background);
Q_INVOKABLE QString escapeEmoji(QString str) const; Q_INVOKABLE QString escapeEmoji(QString str) const;
@ -72,6 +77,7 @@ signals:
void inviteUsers(QStringList users); void inviteUsers(QStringList users);
void showRoomList(); void showRoomList();
void narrowViewChanged(); void narrowViewChanged();
void callStateChanged(webrtc::State);
public slots: public slots:
void updateReadReceipts(const QString &room_id, const std::vector<QString> &event_ids); void updateReadReceipts(const QString &room_id, const std::vector<QString> &event_ids);