conference: remove dummy participant if audio only

With video split we don't need dummy rtp session.

GitLab: #729
Change-Id: Ibf6bb303d1860a80549f6cfb2a06e36e51f48b70
This commit is contained in:
agsantos
2022-03-17 15:29:35 -04:00
committed by Aline Gondim Santos
parent dca2eaaa4b
commit a6e0119ce7
11 changed files with 255 additions and 155 deletions

View File

@ -357,18 +357,6 @@ public:
virtual std::vector<MediaAttribute> getMediaAttributeList() const = 0;
#ifdef ENABLE_VIDEO
/**
* Add a dummy video stream with the attached sink.
* Typically needed in conference to display infos for participants
* that have joined the conference without video (audio only).
*/
virtual bool addDummyVideoRtpSession() = 0;
/**
* Remove all dummy video streams.
*/
virtual void removeDummyVideoRtpSessions() = 0;
virtual std::shared_ptr<Observable<std::shared_ptr<MediaFrame>>>
getReceiveVideoFrameActiveWriter() = 0;
virtual void createSinks(const ConfInfo& infos) = 0;

View File

@ -106,52 +106,81 @@ Conference::Conference(const std::shared_ptr<Account>& account)
std::unique_lock<std::mutex> lk(shared->videoToCallMtx_);
for (const auto& info : infos) {
std::string uri {};
std::string deviceId {};
auto it = shared->videoToCall_.find(info.source);
if (it == shared->videoToCall_.end())
it = shared->videoToCall_.emplace_hint(it, info.source, std::string());
bool isLocalMuted = false;
// If not local
if (!it->second.empty()) {
// Retrieve calls participants
// TODO: this is a first version, we assume that the peer is not
// a master of a conference and there is only one remote
// In the future, we should retrieve confInfo from the call
// To merge layouts informations
if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(it->second))) {
std::string deviceId {};
auto active = false;
if (!info.id.empty()) {
if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(info.id))) {
uri = call->getPeerNumber();
isLocalMuted = call->isPeerMuted();
if (auto* transport = call->getTransport())
deviceId = transport->deviceId();
}
if (auto videoMixer = shared->videoMixer_)
active = videoMixer->verifyActive(info.id);
std::string_view peerId = string_remove_suffix(uri, '@');
auto isModerator = shared->isModerator(peerId);
auto isHandRaised = shared->isHandRaised(peerId);
auto isModeratorMuted = shared->isMuted(peerId);
auto sinkId = shared->getConfId() + peerId;
newInfo.emplace_back(ParticipantInfo {std::move(uri),
deviceId,
std::move(sinkId),
active,
info.x,
info.y,
info.w,
info.h,
!info.hasVideo,
isLocalMuted,
isModeratorMuted,
isModerator,
isHandRaised});
} else {
auto it = shared->videoToCall_.find(info.source);
if (it == shared->videoToCall_.end())
it = shared->videoToCall_.emplace_hint(it, info.source, std::string());
// If not local
if (!it->second.empty()) {
// Retrieve calls participants
// TODO: this is a first version, we assume that the peer is not
// a master of a conference and there is only one remote
// In the future, we should retrieve confInfo from the call
// To merge layouts informations
if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(it->second))) {
uri = call->getPeerNumber();
isLocalMuted = call->isPeerMuted();
if (auto* transport = call->getTransport())
deviceId = transport->deviceId();
}
}
if (auto videoMixer = shared->videoMixer_)
active = videoMixer->verifyActive(info.source);
std::string_view peerId = string_remove_suffix(uri, '@');
auto isModerator = shared->isModerator(peerId);
if (uri.empty() && !hostAdded) {
hostAdded = true;
peerId = "host"sv;
deviceId = acc->currentDeviceId();
isLocalMuted = shared->isMediaSourceMuted(MediaType::MEDIA_AUDIO);
}
auto isHandRaised = shared->isHandRaised(peerId);
auto isModeratorMuted = shared->isMuted(peerId);
auto sinkId = shared->getConfId() + peerId;
newInfo.emplace_back(ParticipantInfo {std::move(uri),
deviceId,
std::move(sinkId),
active,
info.x,
info.y,
info.w,
info.h,
!info.hasVideo,
isLocalMuted,
isModeratorMuted,
isModerator,
isHandRaised});
}
auto active = false;
if (auto videoMixer = shared->videoMixer_)
active = info.source == videoMixer->getActiveParticipant();
std::string_view peerId = string_remove_suffix(uri, '@');
auto isModerator = shared->isModerator(peerId);
if (uri.empty()) {
hostAdded = true;
peerId = "host"sv;
deviceId = acc->currentDeviceId();
isLocalMuted = shared->isMediaSourceMuted(MediaType::MEDIA_AUDIO);
}
auto isHandRaised = shared->isHandRaised(peerId);
auto isModeratorMuted = shared->isMuted(peerId);
auto sinkId = shared->getConfId() + peerId;
newInfo.emplace_back(ParticipantInfo {std::move(uri),
deviceId,
std::move(sinkId),
active,
info.x,
info.y,
info.w,
info.h,
!info.hasVideo,
isLocalMuted,
isModeratorMuted,
isModerator,
isHandRaised});
}
if (auto videoMixer = shared->videoMixer_) {
newInfo.h = videoMixer->getHeight();
@ -537,12 +566,12 @@ Conference::handleMediaChangeRequest(const std::shared_ptr<Call>& call,
JAMI_DBG("Conf [%s] Answer to media change request", getConfId().c_str());
#ifdef ENABLE_VIDEO
// If the new media list has video, remove existing dummy
// video sessions if any.
// If the new media list has video, remove the participant from audioonlylist.
if (MediaAttribute::hasMediaType(MediaAttribute::buildMediaAttributesList(remoteMediaList,
false),
MediaType::MEDIA_VIDEO)) {
call->removeDummyVideoRtpSessions();
if (videoMixer_)
videoMixer_->removeAudioOnlySource(call->getCallId());
}
#endif
@ -627,13 +656,12 @@ Conference::addParticipant(const std::string& participant_id)
}
#ifdef ENABLE_VIDEO
if (auto call = getCall(participant_id)) {
// In conference, all participants need to have video session
// (with a sink) in order to display the participant info in
// the layout. So, if a participant joins with an audio only
// call, a dummy video stream is added to the call.
// In conference, if a participant joins with an audio only
// call, it must be listed in the audioonlylist.
auto mediaList = call->getMediaAttributeList();
if (not MediaAttribute::hasMediaType(mediaList, MediaType::MEDIA_VIDEO)) {
call->addDummyVideoRtpSession();
if (videoMixer_)
videoMixer_->addAudioOnlySource(call->getCallId());
}
call->enterConference(shared_from_this());
// Continue the recording for the conference if one participant was recording
@ -667,6 +695,8 @@ Conference::setActiveParticipant(const std::string& participant_id)
if (auto call = getCallFromPeerID(participant_id)) {
if (auto videoRecv = call->getReceiveVideoFrameActiveWriter())
videoMixer_->setActiveParticipant(videoRecv.get());
else
videoMixer_->setActiveParticipant(call->getCallId());
return;
}
@ -677,7 +707,7 @@ Conference::setActiveParticipant(const std::string& participant_id)
return;
}
// Unset active participant by default
videoMixer_->setActiveParticipant(nullptr);
videoMixer_->resetActiveParticipant();
#endif
}
@ -689,8 +719,7 @@ Conference::setLayout(int layout)
case 0:
videoMixer_->setVideoLayout(video::Layout::GRID);
// The layout shouldn't have an active participant
if (videoMixer_->getActiveParticipant())
videoMixer_->setActiveParticipant(nullptr);
videoMixer_->resetActiveParticipant();
break;
case 1:
videoMixer_->setVideoLayout(video::Layout::ONE_BIG_WITH_SMALL);
@ -802,6 +831,8 @@ Conference::removeParticipant(const std::string& participant_id)
return;
}
if (auto call = getCall(participant_id)) {
if (videoMixer_->verifyActive(call->getCallId()))
videoMixer_->resetActiveParticipant();
participantsMuted_.erase(std::string(string_remove_suffix(call->getPeerNumber(), '@')));
handsRaised_.erase(std::string(string_remove_suffix(call->getPeerNumber(), '@')));
#ifdef ENABLE_VIDEO

View File

@ -3044,7 +3044,7 @@ Manager::createSinkClients(const std::string& callId,
sinkId = callId;
sinkId += string_remove_suffix(participant.uri, '@') + participant.device;
}
if (participant.w && participant.h) {
if (participant.w && participant.h && !participant.videoMuted) {
auto currentSink = getSinkClient(sinkId);
if (currentSink) {
currentSink->setCrop(participant.x, participant.y, participant.w, participant.h);

View File

@ -62,8 +62,12 @@ public:
receive_ = receive;
}
bool isSending() const noexcept { return send_.enabled; }
bool isReceiving() const noexcept { return receive_.enabled; }
bool isReceiving() const noexcept
{
return receive_.enabled
&& (receive_.direction_ == MediaDirection::RECVONLY
|| receive_.direction_ == MediaDirection::SENDRECV);
}
void setMtu(uint16_t mtu) { mtu_ = mtu; }

View File

@ -179,6 +179,7 @@ VideoMixer::stopInput()
void
VideoMixer::setActiveHost()
{
activeAudioOnly_ = "";
activeSource_ = videoLocalSecondary_ ? videoLocalSecondary_.get() : videoLocal_.get();
updateLayout();
}
@ -186,10 +187,19 @@ VideoMixer::setActiveHost()
void
VideoMixer::setActiveParticipant(Observable<std::shared_ptr<MediaFrame>>* ob)
{
activeAudioOnly_ = "";
activeSource_ = ob;
updateLayout();
}
void
VideoMixer::setActiveParticipant(const std::string& id)
{
activeAudioOnly_ = id;
activeSource_ = nullptr;
updateLayout();
}
void
VideoMixer::updateLayout()
{
@ -219,9 +229,7 @@ VideoMixer::detached(Observable<std::shared_ptr<MediaFrame>>* ob)
if (x->source == ob) {
// Handle the case where the current shown source leave the conference
if (activeSource_ == ob) {
currentLayout_ = Layout::GRID;
activeSource_ = videoLocalSecondary_ ? videoLocalSecondary_.get()
: videoLocal_.get();
resetActiveParticipant();
}
JAMI_DBG("Remove source [%p]", x.get());
sources_.remove(x);
@ -283,12 +291,24 @@ VideoMixer::process()
libav_utils::fillWithBlack(output.pointer());
{
std::lock_guard<std::mutex> lk(audioOnlySourcesMtx_);
auto lock(rwMutex_.read());
int i = 0;
bool activeFound = false;
bool needsUpdate = layoutUpdated_ > 0;
bool successfullyRendered = false;
std::vector<SourceInfo> sourcesInfo;
sourcesInfo.reserve(sources_.size() + audioOnlySources_.size());
// add all audioonlysources
for (auto& id : audioOnlySources_) {
if (currentLayout_ != Layout::ONE_BIG or activeAudioOnly_ == id) {
sourcesInfo.emplace_back(SourceInfo {{}, 0, 0, 10, 10, false, id});
}
if (currentLayout_ == Layout::ONE_BIG and activeAudioOnly_ == id)
successfullyRendered = true;
}
// add video sources
for (auto& x : sources_) {
/* thread stop pending? */
if (!loop_.isRunning())
@ -359,11 +379,9 @@ VideoMixer::process()
if (needsUpdate and successfullyRendered) {
layoutUpdated_ -= 1;
if (layoutUpdated_ == 0) {
std::vector<SourceInfo> sourcesInfo;
sourcesInfo.reserve(sources_.size());
for (auto& x : sources_) {
sourcesInfo.emplace_back(
SourceInfo {x->source, x->x, x->y, x->w, x->h, x->hasVideo});
SourceInfo {x->source, x->x, x->y, x->w, x->h, x->hasVideo, {}});
}
if (onSourcesUpdated_)
onSourcesUpdated_(std::move(sourcesInfo));

View File

@ -45,6 +45,7 @@ struct SourceInfo
int w;
int h;
bool hasVideo;
std::string id;
};
using OnSourcesUpdatedCb = std::function<void(std::vector<SourceInfo>&&)>;
@ -73,9 +74,23 @@ public:
void stopInput();
void setActiveParticipant(Observable<std::shared_ptr<MediaFrame>>* ob);
void setActiveParticipant(const std::string& id);
void resetActiveParticipant() {
activeAudioOnly_ = "";
activeSource_ = nullptr;
updateLayout();
}
void setActiveHost();
Observable<std::shared_ptr<MediaFrame>>* getActiveParticipant() { return activeSource_; }
bool verifyActive(const std::string& id)
{
return id == activeAudioOnly_;
}
bool verifyActive(Observable<std::shared_ptr<MediaFrame>>* ob)
{
return ob == activeSource_;
}
void setVideoLayout(Layout newLayout)
{
@ -93,6 +108,20 @@ public:
std::shared_ptr<SinkClient>& getSink() { return sink_; }
void addAudioOnlySource(const std::string& id)
{
std::lock_guard<std::mutex> lk(audioOnlySourcesMtx_);
audioOnlySources_.insert(id);
updateLayout();
}
void removeAudioOnlySource(const std::string& id)
{
std::lock_guard<std::mutex> lk(audioOnlySourcesMtx_);
audioOnlySources_.erase(id);
updateLayout();
}
private:
NON_COPYABLE(VideoMixer);
struct VideoMixerSource;
@ -130,6 +159,10 @@ private:
Observable<std::shared_ptr<MediaFrame>>* activeSource_ {nullptr};
std::list<std::unique_ptr<VideoMixerSource>> sources_;
std::mutex audioOnlySourcesMtx_;
std::set<std::string> audioOnlySources_;
std::string activeAudioOnly_{};
std::atomic_int layoutUpdated_ {0};
OnSourcesUpdatedCb onSourcesUpdated_ {};

View File

@ -255,10 +255,23 @@ VideoRtpSession::startReceiver()
receiveThread_->startLoop();
receiveThread_->setRequestKeyFrameCallback([this]() { cbKeyFrameRequest_(); });
receiveThread_->setRotation(rotation_.load());
if (videoMixer_) {
auto activeParticipant = videoMixer_->verifyActive(receiveThread_.get())
|| videoMixer_->verifyActive(callID_);
videoMixer_->removeAudioOnlySource(callID_);
if (activeParticipant)
videoMixer_->setActiveParticipant(receiveThread_.get());
}
} else {
JAMI_DBG("[%p] Video receiver disabled", this);
if (receiveThread_ and videoMixer_) {
auto activeParticipant = videoMixer_->verifyActive(receiveThread_.get())
|| videoMixer_->verifyActive(callID_);
videoMixer_->addAudioOnlySource(callID_);
receiveThread_->detach(videoMixer_.get());
if (activeParticipant)
videoMixer_->setActiveParticipant(callID_);
}
}
if (socketPair_)
@ -276,7 +289,11 @@ VideoRtpSession::stopReceiver()
return;
if (videoMixer_) {
auto activeParticipant = videoMixer_->verifyActive(receiveThread_.get()) || videoMixer_->verifyActive(callID_);
videoMixer_->addAudioOnlySource(callID_);
receiveThread_->detach(videoMixer_.get());
if (activeParticipant)
videoMixer_->setActiveParticipant(callID_);
}
// We need to disable the read operation, otherwise the
@ -463,7 +480,6 @@ VideoRtpSession::setupConferenceVideoPipeline(Conference& conference, Direction
conference.getConfId().c_str(),
callID_.c_str());
if (receiveThread_) {
conference.detachVideo(dummyVideoReceive_.get());
receiveThread_->stopSink();
conference.attachVideo(receiveThread_.get(), callID_);
} else {
@ -472,6 +488,14 @@ VideoRtpSession::setupConferenceVideoPipeline(Conference& conference, Direction
}
}
std::shared_ptr<VideoFrameActiveWriter>
VideoRtpSession::getReceiveVideoFrameActiveWriter()
{
if (isReceiving() && receiveThread_ && conference_)
return std::static_pointer_cast<VideoFrameActiveWriter>(receiveThread_);
return {};
}
void
VideoRtpSession::enterConference(Conference& conference)
{
@ -521,10 +545,11 @@ VideoRtpSession::exitConference()
videoMixer_->detach(sender_.get());
if (receiveThread_) {
auto activetParticipant = videoMixer_->verifyActive(receiveThread_.get());
conference_->detachVideo(receiveThread_.get());
receiveThread_->startSink();
} else {
conference_->detachVideo(dummyVideoReceive_.get());
if (activetParticipant)
videoMixer_->setActiveParticipant(callID_);
}
videoMixer_.reset();

View File

@ -108,12 +108,7 @@ public:
std::shared_ptr<VideoReceiveThread>& getVideoReceive() { return receiveThread_; }
std::shared_ptr<VideoFrameActiveWriter> getReceiveVideoFrameActiveWriter()
{
if (isReceiving() && receiveThread_)
return std::static_pointer_cast<VideoFrameActiveWriter>(receiveThread_);
return dummyVideoReceive_;
}
std::shared_ptr<VideoFrameActiveWriter> getReceiveVideoFrameActiveWriter();
private:
void setupConferenceVideoPipeline(Conference& conference, Direction dir);
@ -129,8 +124,6 @@ private:
std::unique_ptr<VideoSender> sender_;
std::shared_ptr<VideoReceiveThread> receiveThread_;
std::shared_ptr<VideoFrameActiveWriter> dummyVideoReceive_
= std::make_shared<VideoFrameActiveWriter>();
Conference* conference_ {nullptr};
std::shared_ptr<VideoMixer> videoMixer_;
std::shared_ptr<VideoInput> videoLocal_;

View File

@ -54,6 +54,7 @@
#include <chrono>
#include <libavutil/display.h>
#include <video/sinkclient.h>
#include "media/video/video_mixer.h"
#endif
#include "audio/ringbufferpool.h"
#include "jamidht/channeled_transport.h"
@ -91,8 +92,6 @@ static constexpr auto REUSE_ICE_IN_REINVITE_REQUIRED_VERSION_STR = "11.0.2"sv;
static const std::vector<unsigned> REUSE_ICE_IN_REINVITE_REQUIRED_VERSION
= split_string_to_unsigned(REUSE_ICE_IN_REINVITE_REQUIRED_VERSION_STR, '.');
constexpr auto DUMMY_VIDEO_STR = "dummy video session";
SIPCall::SIPCall(const std::shared_ptr<SIPAccountBase>& account,
const std::string& callId,
Call::CallType type,
@ -1042,6 +1041,11 @@ SIPCall::hangup(int reason)
// Stop all RTP streams
stopAllMedia();
if (auto conf = getConference()) {
if (auto mixer = conf->getVideoMixer()) {
mixer->removeAudioOnlySource(getCallId());
}
}
setState(Call::ConnectionState::DISCONNECTED, reason);
dht::ThreadPool::io().run([w = weak()] {
if (auto shared = w.lock())
@ -1397,6 +1401,11 @@ SIPCall::peerHungup()
if (inviteSession_)
terminateSipSession(PJSIP_SC_NOT_FOUND);
if (auto conf = getConference()) {
if (auto mixer = conf->getVideoMixer()) {
mixer->removeAudioOnlySource(getCallId());
}
}
Call::peerHungup();
}
@ -2046,20 +2055,12 @@ SIPCall::startAllMedia()
// reset
readyToRecord_ = false;
resetMediaReady();
#ifdef ENABLE_VIDEO
bool hasActiveVideo = false;
#endif
for (auto iter = rtpStreams_.begin(); iter != rtpStreams_.end(); iter++) {
if (not iter->mediaAttribute_) {
throw std::runtime_error("Missing media attribute");
}
#ifdef ENABLE_VIDEO
if (iter->mediaAttribute_->type_ == MEDIA_VIDEO)
hasActiveVideo |= iter->mediaAttribute_->enabled_;
#endif
// Not restarting media loop on hold as it's a huge waste of CPU ressources
// because of the audio loop
if (getState() != CallState::HOLD) {
@ -2071,16 +2072,6 @@ SIPCall::startAllMedia()
}
}
#ifdef ENABLE_VIDEO
// TODO. Move this elsewhere (when adding participant to conf?)
if (not hasActiveVideo) {
if (auto conference = conf_.lock())
if (conference->isVideoEnabled())
if (auto recv = getReceiveVideoFrameActiveWriter())
conference->attachVideo(recv.get(), getCallId());
}
#endif
// Media is restarted, we can process the last holding request.
isWaitingForIceAndMedia_ = false;
if (remainingRequest_ != Request::NoRequest) {
@ -2900,15 +2891,8 @@ SIPCall::enterConference(std::shared_ptr<Conference> conference)
#ifdef ENABLE_VIDEO
if (conference->isVideoEnabled()) {
auto videoRtp = getVideoRtp();
if (not videoRtp) {
// In conference, we need to have a video RTP session even
// if it's an audio only call
if (not addDummyVideoRtpSession()) {
JAMI_ERR("[call:%s] Failed to get a valid video RTP session", getCallId().c_str());
throw std::runtime_error("Failed to get a valid video RTP session");
}
}
videoRtp->enterConference(*conference);
if (videoRtp)
videoRtp->enterConference(*conference);
}
#endif
@ -2960,47 +2944,6 @@ SIPCall::getVideoRtp() const
return {};
}
bool
SIPCall::addDummyVideoRtpSession()
{
JAMI_DBG("[call:%s] Add dummy video stream", getCallId().c_str());
MediaAttribute mediaAttr(MediaType::MEDIA_VIDEO,
true,
true,
false,
"dummy source",
DUMMY_VIDEO_STR);
addMediaStream(mediaAttr);
auto& stream = rtpStreams_.back();
createRtpSession(stream);
return stream.rtpSession_ != nullptr;
}
void
SIPCall::removeDummyVideoRtpSessions()
{
// It's not expected to have more than one dummy video stream, but
// check just in case.
auto removed = std::remove_if(rtpStreams_.begin(),
rtpStreams_.end(),
[](const RtpStream& stream) {
return stream.mediaAttribute_->label_ == DUMMY_VIDEO_STR;
});
auto count = std::distance(removed, rtpStreams_.end());
rtpStreams_.erase(removed, rtpStreams_.end());
if (count > 0) {
JAMI_DBG("[call:%s] Removed %lu dummy video stream(s)", getCallId().c_str(), count);
if (count > 1) {
JAMI_WARN("[call:%s] Expected to find 1 dummy video stream, found %lu",
getCallId().c_str(),
count);
}
}
}
void
SIPCall::setRotation(int rotation)
{
@ -3017,6 +2960,8 @@ SIPCall::createSinks(const ConfInfo& infos)
std::lock_guard<std::mutex> lk(sinksMtx_);
auto videoRtp = getVideoRtp();
if (!videoRtp)
return;
auto& videoReceive = videoRtp->getVideoReceive();
if (!videoReceive)
return;

View File

@ -271,8 +271,6 @@ public:
* Returns a pointer to the VideoRtp object
*/
std::shared_ptr<video::VideoRtpSession> getVideoRtp() const;
bool addDummyVideoRtpSession() override;
void removeDummyVideoRtpSessions() override;
void setRotation(int rotation);
#endif
// Get the list of current RTP sessions

View File

@ -46,6 +46,7 @@ struct CallData
std::string hostState {};
std::atomic_bool moderatorMuted {false};
std::atomic_bool raisedHand {false};
std::atomic_bool active {false};
void reset()
{
@ -54,6 +55,7 @@ struct CallData
device = "";
hostState = "";
moderatorMuted = false;
active = false;
raisedHand = false;
}
};
@ -79,6 +81,7 @@ private:
void testAudioVideoMutedStates();
void testCreateParticipantsSinks();
void testMuteStatusAfterRemove();
void testActiveStatusAfterRemove();
void testHandsUp();
void testPeerLeaveConference();
void testJoinCallFromOtherAccount();
@ -90,6 +93,7 @@ private:
CPPUNIT_TEST(testAudioVideoMutedStates);
CPPUNIT_TEST(testCreateParticipantsSinks);
CPPUNIT_TEST(testMuteStatusAfterRemove);
CPPUNIT_TEST(testActiveStatusAfterRemove);
CPPUNIT_TEST(testHandsUp);
CPPUNIT_TEST(testPeerLeaveConference);
CPPUNIT_TEST(testJoinCallFromOtherAccount);
@ -212,14 +216,17 @@ ConferenceTest::registerSignalHandlers()
const std::vector<std::map<std::string, std::string>> participantsInfos) {
for (const auto& infos : participantsInfos) {
if (infos.at("uri").find(bobUri) != std::string::npos) {
bobCall.active = infos.at("active") == "true";
bobCall.moderatorMuted = infos.at("audioModeratorMuted") == "true";
bobCall.raisedHand = infos.at("handRaised") == "true";
bobCall.device = infos.at("device");
} else if (infos.at("uri").find(carlaUri) != std::string::npos) {
carlaCall.active = infos.at("active") == "true";
carlaCall.moderatorMuted = infos.at("audioModeratorMuted") == "true";
carlaCall.raisedHand = infos.at("handRaised") == "true";
carlaCall.device = infos.at("device");
} else if (infos.at("uri").find(daviUri) != std::string::npos) {
daviCall.active = infos.at("active") == "true";
daviCall.moderatorMuted = infos.at("audioModeratorMuted") == "true";
daviCall.raisedHand = infos.at("handRaised") == "true";
daviCall.device = infos.at("device");
@ -451,6 +458,64 @@ ConferenceTest::testMuteStatusAfterRemove()
DRing::unregisterSignalHandlers();
}
void
ConferenceTest::testActiveStatusAfterRemove()
{
registerSignalHandlers();
auto aliceAccount = Manager::instance().getAccount<JamiAccount>(aliceId);
auto bobAccount = Manager::instance().getAccount<JamiAccount>(bobId);
auto bobUri = bobAccount->getUsername();
auto daviAccount = Manager::instance().getAccount<JamiAccount>(daviId);
auto daviUri = daviAccount->getUsername();
startConference();
MediaAttribute defaultAudio(MediaType::MEDIA_AUDIO);
defaultAudio.label_ = "audio_0";
defaultAudio.enabled_ = true;
JAMI_INFO("Start call between Alice and Davi");
auto call1 = DRing::placeCallWithMedia(aliceId,
daviUri,
MediaAttribute::mediaAttributesToMediaMaps(
{defaultAudio}));
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return !daviCall.callId.empty(); }));
Manager::instance().answerCall(daviId, daviCall.callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return daviCall.hostState == "CURRENT"; }));
Manager::instance().addParticipant(aliceId, call1, aliceId, confId);
DRing::setActiveParticipant(aliceId, confId, daviUri);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(5), [&] { return daviCall.active.load(); }));
Manager::instance().hangupCall(daviId, daviCall.callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return daviCall.state == "OVER"; }));
daviCall.reset();
auto call2 = DRing::placeCallWithMedia(aliceId, daviUri, MediaAttribute::mediaAttributesToMediaMaps({defaultAudio}));
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return !daviCall.callId.empty(); }));
Manager::instance().answerCall(daviId, daviCall.callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return daviCall.hostState == "CURRENT"; }));
Manager::instance().addParticipant(aliceId, call2, aliceId, confId);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(5), [&] { return !daviCall.active.load(); }));
Manager::instance().hangupCall(daviId, daviCall.callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, std::chrono::seconds(20), [&] { return daviCall.state == "OVER"; }));
hangupConference();
DRing::unregisterSignalHandlers();
}
void
ConferenceTest::testHandsUp()
{