video_mixer: vectorize host's video inputs

With the previous code, the video mixer only supports 2 inputs via
switchInput and switchSecondaryInputs. Both of those methods are
deprecated in favor of requestMediaChange.
Now, instead of two separate variables to manage, the video mixer
manages a vector of video inputs for the host. This will allow
the conference to have a better management of inputs via
requestMediaChange().

https://git.jami.net/savoirfairelinux/jami-project/-/issues/1429

Change-Id: Ia65b35707bddf979154527c856e0574d96bbe519
This commit is contained in:
Sébastien Blin
2022-05-17 04:55:41 -04:00
parent 5574e5f6ee
commit f0b9497825
5 changed files with 116 additions and 127 deletions

View File

@ -457,12 +457,7 @@ switchSecondaryInput(const std::string& accountId,
const std::string& confId,
const std::string& resource)
{
if (const auto account = jami::Manager::instance().getAccount(accountId)) {
if (auto conf = account->getConference(confId)) {
conf->switchSecondaryInput(resource);
return true;
}
}
JAMI_ERR("Use requestMediaChange");
return false;
}

View File

@ -155,7 +155,8 @@ Conference::Conference(const std::shared_ptr<Account>& account)
isModeratorMuted = shared->isMuted(streamId);
if (auto videoMixer = shared->videoMixer_)
active = videoMixer->verifyActive(streamId);
if (auto call = std::dynamic_pointer_cast<SIPCall>(getCall(streamInfo.callId))) {
if (auto call = std::dynamic_pointer_cast<SIPCall>(
getCall(streamInfo.callId))) {
uri = call->getPeerNumber();
isLocalMuted = call->isPeerMuted();
if (auto* transport = call->getTransport())
@ -212,21 +213,17 @@ Conference::Conference(const std::shared_ptr<Account>& account)
parser_.onHangupParticipant([&](const auto& accountUri, const auto& deviceId) {
hangupParticipant(accountUri, deviceId);
});
parser_.onRaiseHand(
[&](const auto& deviceId, bool state) { setHandRaised(deviceId, state); });
parser_.onSetActiveStream([&](const auto& streamId, bool state) {
setActiveStream(streamId, state);
});
parser_.onMuteStreamAudio
(
parser_.onRaiseHand([&](const auto& deviceId, bool state) { setHandRaised(deviceId, state); });
parser_.onSetActiveStream(
[&](const auto& streamId, bool state) { setActiveStream(streamId, state); });
parser_.onMuteStreamAudio(
[&](const auto& accountUri, const auto& deviceId, const auto& streamId, bool state) {
muteStream(accountUri, deviceId, streamId, state);
});
parser_.onSetLayout([&](int layout) { setLayout(layout); });
// Version 0, deprecated
parser_.onKickParticipant(
[&](const auto& participantId) { hangupParticipant(participantId); });
parser_.onKickParticipant([&](const auto& participantId) { hangupParticipant(participantId); });
parser_.onSetActiveParticipant(
[&](const auto& participantId) { setActiveParticipant(participantId); });
parser_.onMuteParticipant(
@ -371,7 +368,7 @@ Conference::createConfAVStreams()
createConfAVStream(receiveStreamData, *videoMixer_, receiveSubject);
// Preview
if (auto& videoPreview = videoMixer_->getVideoLocal()) {
if (auto videoPreview = videoMixer_->getVideoLocal()) {
auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
StreamData previewStreamData {getConfId(),
false,
@ -541,24 +538,7 @@ Conference::requestMediaChange(const std::vector<DRing::MediaMap>& mediaList)
mediaAttr.toString(true).c_str());
}
// NOTE:
// The current design support only one stream per media type. The
// request will be ignored if this condition is not respected.
for (auto mediaType : {MediaType::MEDIA_AUDIO, MediaType::MEDIA_VIDEO}) {
auto count = std::count_if(mediaAttrList.begin(),
mediaAttrList.end(),
[&mediaType](auto const& attr) {
return attr.type_ == mediaType;
});
if (count > 1) {
JAMI_ERR("[conf %s] Cant handle more than 1 stream per media type (found %lu)",
getConfId().c_str(),
count);
return false;
}
}
uint32_t videoIdx = 0;
for (auto const& mediaAttr : mediaAttrList) {
#ifdef ENABLE_VIDEO
auto& mediaSource = mediaAttr.type_ == MediaType::MEDIA_AUDIO ? hostAudioSource_
@ -586,8 +566,9 @@ Conference::requestMediaChange(const std::vector<DRing::MediaMap>& mediaList)
? DRing::Media::Details::MEDIA_TYPE_AUDIO
: DRing::Media::Details::MEDIA_TYPE_VIDEO);
} else {
switchInput(mediaSource.sourceUri_);
videoMixer_->switchInput(mediaAttr.sourceUri_, videoIdx);
}
videoIdx++;
}
// Update the mute state if changed.
@ -610,12 +591,15 @@ Conference::handleMediaChangeRequest(const std::shared_ptr<Call>& call,
#ifdef ENABLE_VIDEO
// If the new media list has video, remove the participant from audioonlylist.
if (videoMixer_ && MediaAttribute::hasMediaType(
MediaAttribute::buildMediaAttributesList(remoteMediaList, false),
MediaType::MEDIA_VIDEO)) {
if (videoMixer_
&& MediaAttribute::hasMediaType(MediaAttribute::buildMediaAttributesList(remoteMediaList,
false),
MediaType::MEDIA_VIDEO)) {
auto callId = call->getCallId();
videoMixer_->removeAudioOnlySource(callId,
std::string(sip_utils::streamId(callId, 0, MediaType::MEDIA_VIDEO)));
std::string(sip_utils::streamId(callId,
0,
MediaType::MEDIA_VIDEO)));
}
#endif
@ -704,7 +688,10 @@ Conference::addParticipant(const std::string& participant_id)
// call, it must be listed in the audioonlylist.
auto mediaList = call->getMediaAttributeList();
if (videoMixer_ && not MediaAttribute::hasMediaType(mediaList, MediaType::MEDIA_VIDEO)) {
videoMixer_->addAudioOnlySource(call->getCallId(), sip_utils::streamId(call->getCallId(), 0, MediaType::MEDIA_AUDIO));
videoMixer_->addAudioOnlySource(call->getCallId(),
sip_utils::streamId(call->getCallId(),
0,
MediaType::MEDIA_AUDIO));
}
call->enterConference(shared_from_this());
// Continue the recording for the conference if one participant was recording
@ -736,7 +723,8 @@ Conference::setActiveParticipant(const std::string& participant_id)
return;
}
if (auto call = getCallFromPeerID(participant_id)) {
videoMixer_->setActiveStream(sip_utils::streamId(call->getCallId(), 0, MediaType::MEDIA_VIDEO));
videoMixer_->setActiveStream(
sip_utils::streamId(call->getCallId(), 0, MediaType::MEDIA_VIDEO));
return;
}
@ -869,7 +857,8 @@ Conference::removeParticipant(const std::string& participant_id)
auto sinkId = getConfId() + peerId;
// Remove if active
// TODO all streams
if (videoMixer_->verifyActive(sip_utils::streamId(participant_id, 0, MediaType::MEDIA_VIDEO)))
if (videoMixer_->verifyActive(
sip_utils::streamId(participant_id, 0, MediaType::MEDIA_VIDEO)))
videoMixer_->resetActiveStream();
call->exitConference();
if (call->isPeerRecording())
@ -904,9 +893,10 @@ Conference::attachLocalParticipant()
#ifdef ENABLE_VIDEO
if (videoMixer_) {
videoMixer_->switchInput(hostVideoSource_.sourceUri_);
std::vector<std::string> videoInputs = {hostVideoSource_.sourceUri_};
if (not mediaSecondaryInput_.empty())
videoMixer_->switchSecondaryInput(mediaSecondaryInput_);
videoInputs.emplace_back(mediaSecondaryInput_);
videoMixer_->switchInputs(videoInputs);
}
#endif
} else {
@ -933,7 +923,7 @@ Conference::detachLocalParticipant()
#ifdef ENABLE_VIDEO
if (videoMixer_)
videoMixer_->stopInput();
videoMixer_->stopInputs();
// Reset local video source
hostVideoSource_ = {};
@ -1054,10 +1044,10 @@ Conference::switchInput(const std::string& input)
return;
if (auto mixer = videoMixer_) {
mixer->switchInput(input);
mixer->switchInputs({input});
#ifdef ENABLE_PLUGIN
// Preview
if (auto& videoPreview = mixer->getVideoLocal()) {
if (auto videoPreview = mixer->getVideoLocal()) {
auto previewSubject = std::make_shared<MediaStreamSubject>(pluginVideoMap_);
StreamData previewStreamData {getConfId(),
false,
@ -1071,17 +1061,6 @@ Conference::switchInput(const std::string& input)
#endif
}
void
Conference::switchSecondaryInput(const std::string& input)
{
#ifdef ENABLE_VIDEO
mediaSecondaryInput_ = input;
if (videoMixer_) {
videoMixer_->switchSecondaryInput(input);
}
#endif
}
bool
Conference::isVideoEnabled() const
{
@ -1518,13 +1497,13 @@ Conference::muteLocalHost(bool is_muted, const std::string& mediaType)
setLocalHostMuteState(MediaType::MEDIA_VIDEO, is_muted);
if (is_muted) {
if (auto mixer = videoMixer_) {
JAMI_DBG("Muting local video source");
mixer->stopInput();
JAMI_DBG("Muting local video sources");
mixer->stopInputs();
}
} else {
if (auto mixer = videoMixer_) {
JAMI_DBG("Un-muting local video source");
switchInput(hostVideoSource_.sourceUri_);
mixer->switchInputs({hostVideoSource_.sourceUri_});
}
}
emitSignal<DRing::CallSignal::VideoMuted>(id_, is_muted);

View File

@ -323,8 +323,6 @@ public:
bool toggleRecording() override;
void switchInput(const std::string& input);
void switchSecondaryInput(const std::string& input);
void setActiveParticipant(const std::string& participant_id);
void setActiveStream(const std::string& streamId, bool state);
void setLayout(int layout);

View File

@ -87,9 +87,11 @@ VideoMixer::VideoMixer(const std::string& id, const std::string& localInput)
, loop_([] { return true; }, std::bind(&VideoMixer::process, this), [] {})
{
// Local video camera is the main participant
if (not localInput.empty())
videoLocal_ = getVideoInput(localInput);
attachVideo(videoLocal_.get(), "", sip_utils::streamId("", 0, MediaType::MEDIA_VIDEO));
if (not localInput.empty()) {
auto videoInput = getVideoInput(localInput);
localInputs_.emplace_back(videoInput);
attachVideo(videoInput.get(), "", sip_utils::streamId("", 0, MediaType::MEDIA_VIDEO));
}
loop_.start();
nextProcess_ = std::chrono::steady_clock::now();
@ -98,12 +100,8 @@ VideoMixer::VideoMixer(const std::string& id, const std::string& localInput)
VideoMixer::~VideoMixer()
{
stop_sink();
detachVideo(videoLocal_.get());
videoLocal_.reset();
detachVideo(videoLocalSecondary_.get());
videoLocalSecondary_.reset();
stopSink();
stopInputs();
loop_.join();
@ -111,61 +109,62 @@ VideoMixer::~VideoMixer()
}
void
VideoMixer::switchInput(const std::string& input)
VideoMixer::switchInput(const std::string& input, unsigned idx)
{
JAMI_DBG("Set new input %s", input.c_str());
std::shared_ptr<VideoFrameActiveWriter> oldInput;
auto newInput = getVideoInput(input);
std::unique_lock<std::mutex> lk(localInputsMtx_);
if (idx < localInputs_.size())
oldInput = std::move(localInputs_[idx]);
else
localInputs_.resize(idx + 1);
localInputs_[idx] = newInput;
lk.unlock();
if (oldInput)
stopInput(oldInput);
attachVideo(newInput.get(), "", sip_utils::streamId("", idx, MediaType::MEDIA_VIDEO));
}
if (auto local = videoLocal_) {
// Detach videoInput from mixer
local->detach(this);
#if !VIDEO_CLIENT_INPUT
if (auto localInput = std::dynamic_pointer_cast<VideoInput>(local)) {
// Stop old VideoInput
localInput->stopInput();
}
#endif
}
void
VideoMixer::switchInputs(const std::vector<std::string>& inputs)
{
stopInputs();
if (input.empty()) {
JAMI_DBG("[mixer:%s] Input is empty, don't add it to the mixer", id_.c_str());
if (inputs.empty()) {
JAMI_DBG("[mixer:%s] Inputs is empty, don't add it to the mixer", id_.c_str());
return;
}
// Re-attach videoInput to mixer
videoLocal_ = getVideoInput(input);
attachVideo(videoLocal_.get(), "", sip_utils::streamId("", 0, MediaType::MEDIA_VIDEO));
}
void
VideoMixer::switchSecondaryInput(const std::string& input)
{
if (auto local = videoLocalSecondary_) {
// Detach videoInput from mixer
local->detach(this);
#if !VIDEO_CLIENT_INPUT
if (auto localInput = std::dynamic_pointer_cast<VideoInput>(local)) {
// Stop old VideoInput
localInput->stopInput();
for (auto i = 0u; i != inputs.size(); ++i) {
auto videoInput = getVideoInput(inputs[i]);
{
std::lock_guard<std::mutex> lk(localInputsMtx_);
localInputs_.emplace_back(videoInput);
}
#endif
attachVideo(videoInput.get(), "", sip_utils::streamId("", i, MediaType::MEDIA_VIDEO));
}
if (input.empty()) {
JAMI_DBG("[mixer:%s] Input is empty, don't add it in the mixer", id_.c_str());
return;
}
// Re-attach videoInput to mixer
videoLocalSecondary_ = getVideoInput(input);
attachVideo(videoLocalSecondary_.get(), "", sip_utils::streamId("", 1, MediaType::MEDIA_VIDEO));
}
void
VideoMixer::stopInput()
VideoMixer::stopInput(const std::shared_ptr<VideoFrameActiveWriter>& input)
{
if (auto local = std::move(videoLocal_)) {
local->detach(this);
}
// Detach videoInputs from mixer
input->detach(this);
#if !VIDEO_CLIENT_INPUT
// Stop old VideoInput
if (auto oldInput = std::dynamic_pointer_cast<VideoInput>(input))
oldInput->stopInput();
#endif
}
void
VideoMixer::stopInputs()
{
std::lock_guard<std::mutex> lk(localInputsMtx_);
for (auto& input: localInputs_)
stopInput(input);
localInputs_.clear();
}
void
@ -527,15 +526,15 @@ VideoMixer::setParameters(int width, int height, AVPixelFormat format)
if (previous_p)
libav_utils::fillWithBlack(previous_p->pointer());
start_sink();
startSink();
updateLayout();
startTime_ = av_gettime();
}
void
VideoMixer::start_sink()
VideoMixer::startSink()
{
stop_sink();
stopSink();
if (width_ == 0 or height_ == 0) {
JAMI_WARN("[mixer:%s] MX: unable to start with zero-sized output", id_.c_str());
@ -552,7 +551,7 @@ VideoMixer::start_sink()
}
void
VideoMixer::stop_sink()
VideoMixer::stopSink()
{
this->detach(sink_.get());
sink_->stop();

View File

@ -75,9 +75,22 @@ public:
void attached(Observable<std::shared_ptr<MediaFrame>>* ob) override;
void detached(Observable<std::shared_ptr<MediaFrame>>* ob) override;
void switchInput(const std::string& input);
void switchSecondaryInput(const std::string& input);
void stopInput();
/**
* Set all inputs at once
* @param inputs New inputs
* @note previous inputs will be stopped
*/
void switchInputs(const std::vector<std::string>& inputs);
/**
* Update one specific output
* @param input New input for this index
* @param idx Media's index
*/
void switchInput(const std::string& input, unsigned idx);
/**
* Stop all inputs
*/
void stopInputs();
void setActiveStream(const std::string& id);
void resetActiveStream()
@ -105,7 +118,11 @@ public:
MediaStream getStream(const std::string& name) const;
std::shared_ptr<VideoFrameActiveWriter>& getVideoLocal() { return videoLocal_; }
std::shared_ptr<VideoFrameActiveWriter> getVideoLocal() const {
if (!localInputs_.empty())
return *localInputs_.begin();
return {};
}
void updateLayout();
@ -152,8 +169,8 @@ private:
const std::shared_ptr<VideoFrame>& input,
int index);
void start_sink();
void stop_sink();
void startSink();
void stopSink();
void process();
@ -166,8 +183,9 @@ private:
std::shared_ptr<SinkClient> sink_;
std::chrono::time_point<std::chrono::steady_clock> nextProcess_;
std::shared_ptr<VideoFrameActiveWriter> videoLocal_;
std::shared_ptr<VideoFrameActiveWriter> videoLocalSecondary_;
std::mutex localInputsMtx_;
std::vector<std::shared_ptr<VideoFrameActiveWriter>> localInputs_ {};
void stopInput(const std::shared_ptr<VideoFrameActiveWriter>& input);
VideoScaler scaler_;