recorder: unbind add/remove streams from the toggle

GitLab: #762
GitLab: #446
Change-Id: I6efbd78314c07f772935ead083db8e9ae119d834
This commit is contained in:
Aline Gondim Santos
2023-01-31 10:41:12 -03:00
committed by Sébastien Blin
parent 977ee99c11
commit 52d54291ad
29 changed files with 620 additions and 217 deletions

View File

@ -86,8 +86,6 @@ Call::Call(const std::shared_ptr<Account>& account,
, type_(type)
, account_(account)
{
updateDetails(details);
addStateListener([this](Call::CallState call_state,
Call::ConnectionState cnx_state,
UNUSED int code) {
@ -367,14 +365,6 @@ Call::toggleRecording()
return startRecording;
}
void
Call::updateDetails(const std::map<std::string, std::string>& details)
{
const auto& iter = details.find(libjami::Call::Details::AUDIO_ONLY);
if (iter != std::end(details))
isAudioOnly_ = iter->second == TRUE_STR;
}
std::map<std::string, std::string>
Call::getDetails() const
{

View File

@ -426,15 +426,6 @@ public:
virtual void restartMediaSender() = 0;
/**
* Update call details after creation.
* @param details to update
*
* \note No warranty to update any details, only some details can be modified.
* See the implementation for more ... details :-).
*/
void updateDetails(const std::map<std::string, std::string>& details);
// Media status methods
virtual bool hasVideo() const = 0;
virtual bool isCaptureDeviceMuted(const MediaType& mediaType) const = 0;

View File

@ -137,6 +137,10 @@ AudioInput::readFromDevice()
audioFrame = resampler_->resample(std::move(audioFrame), format_);
resizer_->enqueue(std::move(audioFrame));
if (recorderCallback_ && settingMS_.exchange(false)) {
recorderCallback_(MediaStream("a:local", format_, sent_samples));
}
jami_tracepoint(audio_input_read_from_device_end, id_.c_str());
}
@ -329,6 +333,21 @@ AudioInput::foundDevOpts(const DeviceParams& params)
}
}
void
AudioInput::setRecorderCallback(
const std::function<void(const MediaStream& ms)>&
cb)
{
settingMS_.exchange(true);
recorderCallback_ = cb;
if (decoder_)
decoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
}
bool
AudioInput::createDecoder()
{
@ -366,6 +385,10 @@ AudioInput::createDecoder()
decoder_ = std::move(decoder);
foundDevOpts(devOpts_);
decoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
return true;
}

View File

@ -71,6 +71,8 @@ public:
onSuccessfulSetup_ = cb;
}
void setRecorderCallback(const std::function<void(const MediaStream& ms)>& cb);
private:
void readFromDevice();
void readFromFile();
@ -114,6 +116,8 @@ private:
std::chrono::time_point<std::chrono::high_resolution_clock> wakeUp_;
std::function<void(MediaType, bool)> onSuccessfulSetup_;
std::function<void(const MediaStream& ms)> recorderCallback_;
std::atomic_bool settingMS_ {true};
};
} // namespace jami

View File

@ -60,6 +60,10 @@ AudioReceiveThread::setup()
notify(frame);
ringbuffer_->put(std::static_pointer_cast<AudioFrame>(frame));
}));
audioDecoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
audioDecoder_->setInterruptCallback(interruptCb, this);
// custom_io so the SDP demuxer will not open any UDP connections
@ -131,6 +135,18 @@ AudioReceiveThread::addIOContext(SocketPair& socketPair)
demuxContext_.reset(socketPair.createIOContext(mtu_));
}
void
AudioReceiveThread::setRecorderCallback(
const std::function<void(const MediaStream& ms)>& cb)
{
recorderCallback_ = cb;
if (audioDecoder_)
audioDecoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
}
MediaStream
AudioReceiveThread::getInfo() const
{

View File

@ -59,6 +59,8 @@ public:
onSuccessfulSetup_ = cb;
}
void setRecorderCallback(const std::function<void(const MediaStream& ms)>& cb);
private:
NON_COPYABLE(AudioReceiveThread);
@ -90,6 +92,7 @@ private:
void cleanup();
std::function<void(MediaType, bool)> onSuccessfulSetup_;
std::function<void(const MediaStream& ms)> recorderCallback_;
};
} // namespace jami

View File

@ -47,11 +47,14 @@
namespace jami {
AudioRtpSession::AudioRtpSession(const std::string& callId, const std::string& streamId)
AudioRtpSession::AudioRtpSession(const std::string& callId,
const std::string& streamId,
const std::shared_ptr<MediaRecorder>& rec)
: RtpSession(callId, streamId, MediaType::MEDIA_AUDIO)
, rtcpCheckerThread_([] { return true; }, [this] { processRtcpChecker(); }, [] {})
{
recorder_ = rec;
JAMI_DBG("Created Audio RTP session: %p - call Id %s", this, callId_.c_str());
// don't move this into the initializer list or Cthulus will emerge
@ -60,6 +63,7 @@ AudioRtpSession::AudioRtpSession(const std::string& callId, const std::string& s
AudioRtpSession::~AudioRtpSession()
{
deinitRecorder();
stop();
JAMI_DBG("Destroyed Audio RTP session: %p - call Id %s", this, callId_.c_str());
}
@ -91,6 +95,7 @@ AudioRtpSession::startSender()
// sender sets up input correctly, we just keep a reference in case startSender is called
audioInput_ = jami::getAudioInput(callId_);
audioInput_->setRecorderCallback([this](const MediaStream& ms) { attachLocalRecorder(ms); });
audioInput_->setMuted(muteState_);
audioInput_->setSuccessfulSetupCb(onSuccessfulSetup_);
auto newParams = audioInput_->switchInput(input_);
@ -168,6 +173,8 @@ AudioRtpSession::startReceiver()
accountAudioCodec->audioformat,
receive_.receiving_sdp,
mtu_));
receiveThread_->setRecorderCallback([this](const MediaStream& ms) { attachRemoteRecorder(ms); });
receiveThread_->addIOContext(*socketPair_);
receiveThread_->setSuccessfulSetupCb(onSuccessfulSetup_);
receiveThread_->startReceiver();
@ -244,12 +251,28 @@ AudioRtpSession::stop()
}
void
AudioRtpSession::setMuted(bool muted, Direction)
AudioRtpSession::setMuted(bool muted, Direction dir)
{
std::lock_guard<std::recursive_mutex> lock(mutex_);
if (dir == Direction::SEND) {
muteState_ = muted;
if (audioInput_)
audioInput_->setMuted(muted);
} else {
if (receiveThread_) {
auto ms = receiveThread_->getInfo();
if (muted) {
if (auto ob = recorder_->getStream(ms.name)) {
receiveThread_->detach(ob);
recorder_->removeStream(ms);
}
} else {
if (auto ob = recorder_->addStream(ms)) {
receiveThread_->attach(ob);
}
}
}
}
}
void
@ -342,25 +365,55 @@ AudioRtpSession::processRtcpChecker()
}
void
AudioRtpSession::initRecorder(std::shared_ptr<MediaRecorder>& rec)
AudioRtpSession::attachRemoteRecorder(const MediaStream& ms)
{
if (receiveThread_)
receiveThread_->attach(rec->addStream(receiveThread_->getInfo()));
if (auto input = jami::getAudioInput(callId_))
input->attach(rec->addStream(input->getInfo()));
if (!recorder_ || !receiveThread_)
return;
if (auto ob = recorder_->addStream(ms)) {
receiveThread_->attach(ob);
}
}
void
AudioRtpSession::deinitRecorder(std::shared_ptr<MediaRecorder>& rec)
AudioRtpSession::attachLocalRecorder(const MediaStream& ms)
{
if (!recorder_ || !audioInput_)
return;
if (auto ob = recorder_->addStream(ms)) {
audioInput_->attach(ob);
}
}
void
AudioRtpSession::initRecorder()
{
if (!recorder_)
return;
if (receiveThread_)
receiveThread_->setRecorderCallback(
[this](const MediaStream& ms) { attachRemoteRecorder(ms); });
if (audioInput_)
audioInput_->setRecorderCallback(
[this](const MediaStream& ms) { attachLocalRecorder(ms); });
}
void
AudioRtpSession::deinitRecorder()
{
if (!recorder_)
return;
if (receiveThread_) {
if (auto ob = rec->getStream(receiveThread_->getInfo().name)) {
auto ms = receiveThread_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
receiveThread_->detach(ob);
recorder_->removeStream(ms);
}
}
if (auto input = jami::getAudioInput(callId_)) {
if (auto ob = rec->getStream(input->getInfo().name)) {
input->detach(ob);
if (audioInput_) {
auto ms = audioInput_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
audioInput_->detach(ob);
recorder_->removeStream(ms);
}
}
}

View File

@ -24,6 +24,7 @@
#include "audiobuffer.h"
#include "media_device.h"
#include "rtp_session.h"
#include "media_stream.h"
#include "threadloop.h"
@ -50,7 +51,9 @@ struct RTCPInfo
class AudioRtpSession : public RtpSession
{
public:
AudioRtpSession(const std::string& callId, const std::string& streamId);
AudioRtpSession(const std::string& callId,
const std::string& streamId,
const std::shared_ptr<MediaRecorder>& rec);
virtual ~AudioRtpSession();
void start(std::unique_ptr<IceSocket> rtp_sock, std::unique_ptr<IceSocket> rtcp_sock) override;
@ -58,8 +61,8 @@ public:
void stop() override;
void setMuted(bool muted, Direction dir = Direction::SEND) override;
void initRecorder(std::shared_ptr<MediaRecorder>& rec) override;
void deinitRecorder(std::shared_ptr<MediaRecorder>& rec) override;
void initRecorder() override;
void deinitRecorder() override;
std::shared_ptr<AudioInput>& getAudioLocal() { return audioInput_; }
std::unique_ptr<AudioReceiveThread>& getAudioReceive() { return receiveThread_; }
@ -91,6 +94,9 @@ private:
std::chrono::seconds rtcp_checking_interval {4};
std::function<void(bool)> voiceCallback_;
void attachRemoteRecorder(const MediaStream& ms);
void attachLocalRecorder(const MediaStream& ms);
};
} // namespace jami

View File

@ -482,6 +482,9 @@ PulseLayer::stopStream(AudioDeviceType type)
stream->stop();
stream.reset();
if (type == AudioDeviceType::PLAYBACK || type == AudioDeviceType::ALL)
playbackChanged(false);
std::lock_guard<std::mutex> lk(mutex_);
if (not playback_ and not ringtone_ and not record_) {
pendingStreams = 0;

View File

@ -200,4 +200,10 @@ MediaAttribute::toString(bool full) const
return descr.str();
}
bool
MediaAttribute::hasValidVideo()
{
return type_ == MediaType::MEDIA_VIDEO && enabled_&& !muted_ && !onHold_;
}
} // namespace jami

View File

@ -102,5 +102,7 @@ public:
// For instance, muting the audio can be done by disabling the
// audio input (capture) of the encoding session, resulting in
// sending RTP packets without actual audio (silence).
bool hasValidVideo();
};
} // namespace jami

View File

@ -696,6 +696,11 @@ MediaDecoder::decode(AVPacket& packet)
if (callback_)
callback_(std::move(f));
if (contextCallback_ && firstDecode_.load()) {
firstDecode_.exchange(false);
contextCallback_();
}
return DecodeStatus::FrameFinished;
}
return DecodeStatus::Success;

View File

@ -214,6 +214,12 @@ public:
void setFEC(bool enable) { fecEnabled_ = enable; }
void setContextCallback(const std::function<void()>& cb)
{
firstDecode_.exchange(true);
contextCallback_ = cb;
}
private:
NON_COPYABLE(MediaDecoder);
@ -253,6 +259,9 @@ private:
bool fecEnabled_ {false};
std::function<void()> contextCallback_;
std::atomic_bool firstDecode_ {true};
protected:
AVDictionary* options_ = nullptr;
};

View File

@ -312,7 +312,7 @@ MediaEncoder::initStream(const SystemCodecInfo& systemCodecInfo, AVBufferRef* fr
JAMI_WARN("Using hardware encoding for %s with %s ",
avcodec_get_name(static_cast<AVCodecID>(systemCodecInfo.avcodecId)),
it.getName().c_str());
encoders_.push_back(encoderCtx);
encoders_.emplace_back(encoderCtx);
break;
}
}
@ -326,7 +326,7 @@ MediaEncoder::initStream(const SystemCodecInfo& systemCodecInfo, AVBufferRef* fr
static_cast<AVCodecID>(systemCodecInfo.avcodecId),
videoOpts_.bitrate);
readConfig(encoderCtx);
encoders_.push_back(encoderCtx);
encoders_.emplace_back(encoderCtx);
if (avcodec_open2(encoderCtx, outputCodec_, &options_) < 0)
throw MediaEncoderException("Could not open encoder");
}
@ -495,6 +495,8 @@ MediaEncoder::encode(AVFrame* frame, int streamIdx)
}
}
int ret = 0;
if (streamIdx >= encoders_.size())
return -1;
AVCodecContext* encoderCtx = encoders_[streamIdx];
AVPacket pkt;
av_init_packet(&pkt);

View File

@ -69,7 +69,12 @@ struct MediaRecorder::StreamObserver : public Observer<std::shared_ptr<MediaFram
: info(ms)
, cb_(func) {};
~StreamObserver() {};
~StreamObserver()
{
for (auto& obs : observablesFrames_) {
obs->detach(this);
}
};
void update(Observable<std::shared_ptr<MediaFrame>>* /*ob*/,
const std::shared_ptr<MediaFrame>& m) override
@ -117,15 +122,32 @@ struct MediaRecorder::StreamObserver : public Observer<std::shared_ptr<MediaFram
#endif
}
void attached(Observable<std::shared_ptr<MediaFrame>>* obs) override
{
observablesFrames_.insert(obs);
}
void detached(Observable<std::shared_ptr<MediaFrame>>* obs) override
{
auto it = observablesFrames_.find(obs);
if (it != observablesFrames_.end())
observablesFrames_.erase(it);
}
private:
std::function<void(const std::shared_ptr<MediaFrame>&)> cb_;
std::unique_ptr<MediaFilter> videoRotationFilter_ {};
int rotation_ = 0;
std::set<Observable<std::shared_ptr<MediaFrame>>*> observablesFrames_;
};
MediaRecorder::MediaRecorder() {}
MediaRecorder::~MediaRecorder() {}
MediaRecorder::~MediaRecorder()
{
flush();
reset();
}
bool
MediaRecorder::isRecording() const
@ -168,6 +190,7 @@ MediaRecorder::startRecording()
startTime_ = *std::localtime(&t);
startTimeStamp_ = av_gettime();
std::lock_guard<std::mutex> lk(encoderMtx_);
encoder_.reset(new MediaEncoder);
JAMI_DBG() << "Start recording '" << getPath() << "'";
@ -175,6 +198,7 @@ MediaRecorder::startRecording()
isRecording_ = true;
// start thread after isRecording_ is set to true
dht::ThreadPool::computation().run([rec = shared_from_this()] {
std::lock_guard<std::mutex> lk(rec->encoderMtx_);
while (rec->isRecording()) {
std::shared_ptr<MediaFrame> frame;
// get frame from queue
@ -191,7 +215,7 @@ MediaRecorder::startRecording()
}
try {
// encode frame
if (frame && frame->pointer()) {
if (rec->encoder_ && frame && frame->pointer()) {
#ifdef ENABLE_VIDEO
bool isVideo = (frame->pointer()->width > 0 && frame->pointer()->height > 0);
rec->encoder_->encode(frame->pointer(),
@ -208,6 +232,7 @@ MediaRecorder::startRecording()
rec->reset(); // allows recorder to be reused in same call
});
}
interrupted_ = false;
return 0;
}
@ -226,32 +251,53 @@ MediaRecorder::stopRecording()
Observer<std::shared_ptr<MediaFrame>>*
MediaRecorder::addStream(const MediaStream& ms)
{
std::lock_guard<std::mutex> lk(mutexStreamSetup_);
if (audioOnly_ && ms.isVideo) {
JAMI_ERR() << "Trying to add video stream to audio only recording";
return nullptr;
}
if (ms.isVideo && ms.format < 0) {
JAMI_ERR() << "Trying to add invalid video stream to recording";
if (ms.format < 0 || ms.name.empty()) {
JAMI_ERR() << "Trying to add invalid stream to recording";
return nullptr;
}
auto ptr = std::make_unique<StreamObserver>(ms,
auto it = streams_.find(ms.name);
if (it == streams_.end()) {
auto streamPtr = std::make_unique<StreamObserver>(ms,
[this,
ms](const std::shared_ptr<MediaFrame>& frame) {
onFrame(ms.name, frame);
});
auto p = streams_.insert(std::make_pair(ms.name, std::move(ptr)));
if (p.second) {
JAMI_DBG() << "Recorder input #" << streams_.size() << ": " << ms;
if (ms.isVideo)
hasVideo_ = true;
else
hasAudio_ = true;
return p.first->second.get();
it = streams_.insert(std::make_pair(ms.name, std::move(streamPtr))).first;
JAMI_LOG("[Recorder: {:p}] Recorder input #{}: {:s}", fmt::ptr(this), streams_.size(), ms.name);
} else {
JAMI_WARN() << "Recorder already has '" << ms.name << "' as input";
return p.first->second.get();
JAMI_LOG("[Recorder: {:p}] Recorder already has '{:s}' as input", fmt::ptr(this), ms.name);
}
if (ms.isVideo)
setupVideoOutput();
else
setupAudioOutput();
return it->second.get();
}
void
MediaRecorder::removeStream(const MediaStream& ms)
{
std::lock_guard<std::mutex> lk(mutexStreamSetup_);
auto it = streams_.find(ms.name);
if (it == streams_.end()) {
JAMI_LOG("[Recorder: {:p}] Recorder no stream to remove", fmt::ptr(this));
} else {
JAMI_LOG("[Recorder: {:p}] Recorder removing '{:s}'", fmt::ptr(this), ms.name);
streams_.erase(it);
if (ms.isVideo)
setupVideoOutput();
else
setupAudioOutput();
}
return;
}
Observer<std::shared_ptr<MediaFrame>>*
@ -266,9 +312,11 @@ MediaRecorder::getStream(const std::string& name) const
void
MediaRecorder::onFrame(const std::string& name, const std::shared_ptr<MediaFrame>& frame)
{
if (not isRecording_)
if (not isRecording_ || interrupted_)
return;
std::lock_guard<std::mutex> lk(mutexStreamSetup_);
// copy frame to not mess with the original frame's pts (does not actually copy frame data)
std::unique_ptr<MediaFrame> clone;
const auto& ms = streams_[name]->info;
@ -303,15 +351,23 @@ MediaRecorder::onFrame(const std::string& name, const std::shared_ptr<MediaFrame
| AV_ROUND_PASS_MINMAX));
std::unique_ptr<MediaFrame> filteredFrame;
#ifdef ENABLE_VIDEO
if (ms.isVideo) {
if (ms.isVideo && videoFilter_ && outputVideoFilter_) {
std::lock_guard<std::mutex> lk(mutexFilterVideo_);
videoFilter_->feedInput(clone->pointer(), name);
filteredFrame = videoFilter_->readOutput();
} else {
auto videoFilterOutput = videoFilter_->readOutput();
if (videoFilterOutput) {
outputVideoFilter_->feedInput(videoFilterOutput->pointer(), "input");
filteredFrame = outputVideoFilter_->readOutput();
}
} else if (audioFilter_ && outputAudioFilter_) {
#endif // ENABLE_VIDEO
std::lock_guard<std::mutex> lk(mutexFilterAudio_);
audioFilter_->feedInput(clone->pointer(), name);
filteredFrame = audioFilter_->readOutput();
auto audioFilterOutput = audioFilter_->readOutput();
if (audioFilterOutput) {
outputAudioFilter_->feedInput(audioFilterOutput->pointer(), "input");
filteredFrame = outputAudioFilter_->readOutput();
}
#ifdef ENABLE_VIDEO
}
#endif // ENABLE_VIDEO
@ -348,32 +404,17 @@ MediaRecorder::initRecord()
#ifdef RING_ACCEL
encoder_->enableAccel(false); // TODO recorder has problems with hardware encoding
#endif
videoFilter_.reset();
if (hasVideo_) {
const MediaStream& videoStream = setupVideoOutput();
if (videoStream.format < 0) {
JAMI_ERR() << "Could not retrieve video recorder stream properties";
return -1;
}
MediaDescription args;
args.mode = RateMode::CQ;
encoder_->setOptions(videoStream);
encoder_->setOptions(args);
}
#endif // ENABLE_VIDEO
audioFilter_.reset();
if (hasAudio_) {
const MediaStream& audioStream = setupAudioOutput();
if (audioStream.format < 0) {
JAMI_ERR() << "Could not retrieve audio recorder stream properties";
return -1;
}
{
MediaStream audioStream;
audioStream.name = "audioOutput";
audioStream.format = 1;
audioStream.timeBase = rational<int>(1, 48000);
audioStream.sampleRate = 48000;
audioStream.nbChannels = 2;
encoder_->setOptions(audioStream);
}
if (hasAudio_) {
auto audioCodec = std::static_pointer_cast<jami::SystemAudioCodecInfo>(
getSystemCodecContainer()->searchCodecByName("opus", jami::MEDIA_AUDIO));
audioIdx_ = encoder_->addStream(*audioCodec.get());
@ -384,7 +425,23 @@ MediaRecorder::initRecord()
}
#ifdef ENABLE_VIDEO
if (hasVideo_) {
if (!audioOnly_) {
MediaStream videoStream;
videoStream.name = "videoOutput";
videoStream.format = 0;
videoStream.isVideo = true;
videoStream.timeBase = rational<int>(0, 1);
videoStream.width = 1280;
videoStream.height = 720;
videoStream.frameRate = rational<int>(30, 1);
videoStream.bitrate = Manager::instance().videoPreferences.getRecordQuality();
MediaDescription args;
args.mode = RateMode::CQ;
encoder_->setOptions(videoStream);
encoder_->setOptions(args);
auto videoCodec = std::static_pointer_cast<jami::SystemVideoCodecInfo>(
getSystemCodecContainer()->searchCodecByName("VP8", jami::MEDIA_VIDEO));
videoIdx_ = encoder_->addStream(*videoCodec.get());
@ -401,7 +458,7 @@ MediaRecorder::initRecord()
return 0;
}
MediaStream
void
MediaRecorder::setupVideoOutput()
{
MediaStream encoderStream, peer, local, mixer;
@ -432,8 +489,8 @@ MediaRecorder::setupVideoOutput()
int streams = peer.isValid() + local.isValid() + mixer.isValid();
switch (streams) {
case 0: {
JAMI_ERR("Trying to record a stream but none is valid");
break;
JAMI_WARN() << "Trying to record a video stream but none is valid";
return;
}
case 1: {
MediaStream inputStream;
@ -460,16 +517,38 @@ MediaRecorder::setupVideoOutput()
}
#ifdef ENABLE_VIDEO
if (ret >= 0) {
encoderStream = videoFilter_->getOutputParams();
encoderStream.bitrate = Manager::instance().videoPreferences.getRecordQuality();
JAMI_DBG() << "Recorder output: " << encoderStream;
} else {
if (ret < 0) {
JAMI_ERR() << "Failed to initialize video filter";
}
// setup output filter
if (!videoFilter_)
return;
MediaStream secondaryFilter = videoFilter_->getOutputParams();
secondaryFilter.name = "input";
if (outputVideoFilter_) {
outputVideoFilter_->flush();
outputVideoFilter_.reset();
}
outputVideoFilter_.reset(new MediaFilter);
float scaledHeight = 1280 * (float)secondaryFilter.height / (float)secondaryFilter.width;
std::string scaleFilter = "scale=1280:-2";
if (scaledHeight > 720)
scaleFilter += ",scale=-2:720";
ret = outputVideoFilter_->initialize(
"[input]" + scaleFilter + ",pad=1280:720:(ow-iw)/2:(oh-ih)/2,format=pix_fmts=yuv420p,fps=30",
{secondaryFilter});
if (ret < 0) {
JAMI_ERR() << "Failed to initialize output video filter";
}
#endif
return encoderStream;
return;
}
std::string
@ -509,7 +588,7 @@ MediaRecorder::buildVideoFilter(const std::vector<MediaStream>& peers,
return v.str();
}
MediaStream
void
MediaRecorder::setupAudioOutput()
{
MediaStream encoderStream, peer, local, mixer;
@ -539,6 +618,10 @@ MediaRecorder::setupAudioOutput()
int ret = -1;
int streams = peer.isValid() + local.isValid() + mixer.isValid();
switch (streams) {
case 0: {
JAMI_WARN() << "Trying to record a audio stream but none is valid";
return;
}
case 1: {
MediaStream inputStream;
if (peer.isValid())
@ -562,14 +645,31 @@ MediaRecorder::setupAudioOutput()
break;
}
if (ret >= 0) {
encoderStream = audioFilter_->getOutputParams();
JAMI_DBG() << "Recorder output: " << encoderStream;
} else {
if (ret < 0) {
JAMI_ERR() << "Failed to initialize audio filter";
return;
}
return encoderStream;
// setup output filter
if (!audioFilter_)
return;
MediaStream secondaryFilter = audioFilter_->getOutputParams();
secondaryFilter.name = "input";
if (outputAudioFilter_) {
outputAudioFilter_->flush();
outputAudioFilter_.reset();
}
outputAudioFilter_.reset(new MediaFilter);
ret = outputAudioFilter_
->initialize("[input]aformat=sample_fmts=s16:sample_rates=48000:channel_layouts=stereo",
{secondaryFilter});
if (ret < 0) {
JAMI_ERR() << "Failed to initialize output audio filter";
}
return;
}
std::string
@ -600,12 +700,15 @@ MediaRecorder::flush()
if (videoFilter_) {
std::lock_guard<std::mutex> lk(mutexFilterVideo_);
videoFilter_->flush();
outputVideoFilter_->flush();
}
if (audioFilter_) {
std::lock_guard<std::mutex> lk(mutexFilterAudio_);
audioFilter_->flush();
outputAudioFilter_->flush();
}
if (encoder_)
encoder_->flush();
}
@ -616,11 +719,11 @@ MediaRecorder::reset()
std::lock_guard<std::mutex> lk(mutexFrameBuff_);
frameBuff_.clear();
}
streams_.clear();
videoIdx_ = audioIdx_ = -1;
audioOnly_ = false;
videoFilter_.reset();
audioFilter_.reset();
outputAudioFilter_.reset();
outputVideoFilter_.reset();
encoder_.reset();
}

View File

@ -96,6 +96,13 @@ public:
*/
Observer<std::shared_ptr<MediaFrame>>* addStream(const MediaStream& ms);
/**
* @brief Removes a stream from the recorder.
*
* Caller must then detach this from the media source.
*/
void removeStream(const MediaStream& ms);
/**
* @brief Gets the stream observer.
*
@ -128,10 +135,11 @@ private:
void reset();
int initRecord();
MediaStream setupVideoOutput();
void setupVideoOutput();
std::string buildVideoFilter(const std::vector<MediaStream>& peers,
const MediaStream& local) const;
MediaStream setupAudioOutput();
void setupAudioOutput();
std::mutex mutexStreamSetup_;
std::string buildAudioFilter(const std::vector<MediaStream>& peers,
const MediaStream& local) const;
@ -148,15 +156,18 @@ private:
std::string description_;
std::unique_ptr<MediaEncoder> encoder_;
std::mutex encoderMtx_;
std::unique_ptr<MediaFilter> outputVideoFilter_;
std::unique_ptr<MediaFilter> outputAudioFilter_;
std::unique_ptr<MediaFilter> videoFilter_;
std::unique_ptr<MediaFilter> audioFilter_;
bool hasAudio_ {false};
bool hasVideo_ {false};
int videoIdx_ = -1;
int audioIdx_ = -1;
bool isRecording_ = false;
bool audioOnly_ = false;
int lastVideoPts_ = 0;
std::condition_variable cv_;
std::atomic_bool interrupted_ {false};

View File

@ -37,7 +37,7 @@ public:
virtual bool isPeerRecording() const { return peerRecording_; }
virtual void peerMuted(bool muted) = 0;
virtual void peerMuted(bool muted, int streamIdx) = 0;
virtual bool isPeerMuted() const { return peerMuted_; }

View File

@ -114,8 +114,6 @@ Recordable::stopRecording()
recorder_->stopRecording();
recording_ = false;
// new recorder since this one may still be recording
recorder_ = std::make_shared<MediaRecorder>();
}
bool

View File

@ -71,8 +71,8 @@ public:
onSuccessfulSetup_ = cb;
}
virtual void initRecorder(std::shared_ptr<MediaRecorder>& rec) = 0;
virtual void deinitRecorder(std::shared_ptr<MediaRecorder>& rec) = 0;
virtual void initRecorder() = 0;
virtual void deinitRecorder() = 0;
std::shared_ptr<AccountCodecInfo> getCodec() const { return send_.codec; }
const IpAddr& getSendAddr() const { return send_.addr; };
const IpAddr& getRecvAddr() const { return receive_.addr; };
@ -89,7 +89,7 @@ protected:
MediaDescription send_;
MediaDescription receive_;
uint16_t mtu_;
std::shared_ptr<MediaRecorder> recorder_;
std::function<void(MediaType, bool)> onSuccessfulSetup_;
std::string getRemoteRtpUri() const { return "rtp://" + send_.addr.toString(true); }

View File

@ -261,6 +261,18 @@ VideoInput::configureFilePlayback(const std::string&,
sink_->setFrameSize(decoder_->getWidth(), decoder_->getHeight());
}
void
VideoInput::setRecorderCallback(
const std::function<void(const MediaStream& ms)>& cb)
{
recorderCallback_ = cb;
if (decoder_)
decoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
}
void
VideoInput::createDecoder()
{
@ -346,10 +358,16 @@ VideoInput::createDecoder()
onSuccessfulSetup_(MEDIA_VIDEO, 0);
decoder_ = std::move(decoder);
foundDecOpts(decOpts_);
/* Signal the client about readable sink */
sink_->setFrameSize(decoder_->getWidth(), decoder_->getHeight());
decoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
}
void

View File

@ -53,7 +53,7 @@ class SinkClient;
enum class VideoInputMode { ManagedByClient, ManagedByDaemon, Undefined };
class VideoInput : public VideoGenerator, public std::enable_shared_from_this<VideoInput>
class VideoInput : public VideoGenerator
{
public:
VideoInput(VideoInputMode inputMode = VideoInputMode::Undefined,
@ -87,6 +87,8 @@ public:
void setupSink();
void stopSink();
void setRecorderCallback(const std::function<void(const MediaStream& ms)>& cb);
#if VIDEO_CLIENT_INPUT
/*
* these functions are used to pass buffer from/to the daemon
@ -169,6 +171,7 @@ private:
std::atomic_bool paused_ {true};
std::function<void(MediaType, bool)> onSuccessfulSetup_;
std::function<void(const MediaStream& ms)> recorderCallback_;
};
} // namespace video

View File

@ -103,6 +103,10 @@ VideoReceiveThread::setup()
displayMatrix.release());
publishFrame(std::static_pointer_cast<VideoFrame>(frame));
}));
videoDecoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
videoDecoder_->setResolutionChangedCallback([this](int width, int height) {
dstWidth_ = width;
dstHeight_ = height;
@ -188,6 +192,18 @@ VideoReceiveThread::addIOContext(SocketPair& socketPair)
demuxContext_.reset(socketPair.createIOContext(mtu_));
}
void
VideoReceiveThread::setRecorderCallback(
const std::function<void(const MediaStream& ms)>& cb)
{
recorderCallback_ = cb;
if (videoDecoder_)
videoDecoder_->setContextCallback([this]() {
if (recorderCallback_)
recorderCallback_(getInfo());
});
}
void
VideoReceiveThread::decodeFrame()
{

View File

@ -86,6 +86,9 @@ public:
onSuccessfulSetup_ = cb;
}
void setRecorderCallback(
const std::function<void(const MediaStream& ms)>& cb);
private:
NON_COPYABLE(VideoReceiveThread);
@ -123,6 +126,7 @@ private:
std::function<void(void)> keyFrameRequestCallback_;
std::function<void(MediaType, bool)> onSuccessfulSetup_;
std::function<void(const MediaStream& ms)> recorderCallback_;
};
} // namespace video

View File

@ -61,12 +61,14 @@ constexpr auto DELAY_AFTER_REMB_DEC = std::chrono::milliseconds(500);
VideoRtpSession::VideoRtpSession(const string& callId,
const string& streamId,
const DeviceParams& localVideoParams)
const DeviceParams& localVideoParams,
const std::shared_ptr<MediaRecorder>& rec)
: RtpSession(callId, streamId, MediaType::MEDIA_VIDEO)
, localVideoParams_(localVideoParams)
, videoBitrateInfo_ {}
, rtcpCheckerThread_([] { return true; }, [this] { processRtcpChecker(); }, [] {})
{
recorder_ = rec;
setupVideoBitrateInfo(); // reset bitrate
cc = std::make_unique<CongestionControl>();
JAMI_DBG("[%p] Video RTP session created for call %s", this, callId_.c_str());
@ -74,6 +76,7 @@ VideoRtpSession::VideoRtpSession(const string& callId,
VideoRtpSession::~VideoRtpSession()
{
deinitRecorder();
stop();
JAMI_DBG("[%p] Video RTP session destroyed", this);
}
@ -128,6 +131,10 @@ VideoRtpSession::startSender()
auto input = getVideoInput(input_);
videoLocal_ = input;
if (input) {
videoLocal_->setRecorderCallback(
[this](const MediaStream& ms) {
attachLocalRecorder(ms);
});
auto newParams = input->getParams();
try {
if (newParams.valid()
@ -272,6 +279,8 @@ VideoRtpSession::startReceiver()
if (activeStream)
videoMixer_->setActiveStream(streamId_);
}
receiveThread_->setRecorderCallback(
[this](const MediaStream& ms) { attachRemoteRecorder(ms); });
} else {
JAMI_DBG("[%p] Video receiver disabled", this);
@ -316,6 +325,12 @@ VideoRtpSession::stopReceiver()
if (socketPair_)
socketPair_->setReadBlockingMode(false);
auto ms = receiveThread_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
receiveThread_->detach(ob);
recorder_->removeStream(ms);
}
receiveThread_->stopLoop();
receiveThread_->stopSink();
}
@ -417,6 +432,13 @@ VideoRtpSession::setMuted(bool mute, Direction dir)
}
if ((send_.onHold = mute)) {
if (videoLocal_) {
auto ms = videoLocal_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
videoLocal_->detach(ob);
recorder_->removeStream(ms);
}
}
stopSender();
} else {
restartSender();
@ -431,6 +453,13 @@ VideoRtpSession::setMuted(bool mute, Direction dir)
}
if ((receive_.onHold = mute)) {
if (receiveThread_) {
auto ms = receiveThread_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
receiveThread_->detach(ob);
recorder_->removeStream(ms);
}
}
stopReceiver();
} else {
startReceiver();
@ -738,35 +767,57 @@ VideoRtpSession::processRtcpChecker()
}
void
VideoRtpSession::initRecorder(std::shared_ptr<MediaRecorder>& rec)
VideoRtpSession::attachRemoteRecorder(const MediaStream& ms)
{
if (receiveThread_) {
if (auto ob = rec->addStream(receiveThread_->getInfo())) {
if (!recorder_ || !receiveThread_)
return;
if (auto ob = recorder_->addStream(ms)) {
receiveThread_->attach(ob);
}
}
if (Manager::instance().videoPreferences.getRecordPreview()) {
if (auto input = std::static_pointer_cast<VideoInput>(videoLocal_)) {
if (auto ob = rec->addStream(input->getInfo())) {
input->attach(ob);
}
}
}
}
void
VideoRtpSession::deinitRecorder(std::shared_ptr<MediaRecorder>& rec)
VideoRtpSession::attachLocalRecorder(const MediaStream& ms)
{
if (!rec)
if (!recorder_ || !videoLocal_ || !Manager::instance().videoPreferences.getRecordPreview())
return;
if (auto ob = recorder_->addStream(ms)) {
videoLocal_->attach(ob);
}
}
void
VideoRtpSession::initRecorder()
{
if (!recorder_)
return;
if (receiveThread_) {
if (auto ob = rec->getStream(receiveThread_->getInfo().name)) {
receiveThread_->setRecorderCallback(
[this](const MediaStream& ms) { attachRemoteRecorder(ms); });
}
if (videoLocal_ && !send_.onHold) {
videoLocal_->setRecorderCallback(
[this](const MediaStream& ms) { attachLocalRecorder(ms); });
}
}
void
VideoRtpSession::deinitRecorder()
{
if (!recorder_)
return;
if (receiveThread_) {
auto ms = receiveThread_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
receiveThread_->detach(ob);
recorder_->removeStream(ms);
}
}
if (auto input = std::static_pointer_cast<VideoInput>(videoLocal_)) {
if (auto ob = rec->getStream(input->getInfo().name)) {
input->detach(ob);
if (videoLocal_) {
auto ms = videoLocal_->getInfo();
if (auto ob = recorder_->getStream(ms.name)) {
videoLocal_->detach(ob);
recorder_->removeStream(ms);
}
}
}

View File

@ -70,7 +70,10 @@ class VideoRtpSession : public RtpSession
public:
using BaseType = RtpSession;
VideoRtpSession(const std::string& callId, const std::string& streamId, const DeviceParams& localVideoParams);
VideoRtpSession(const std::string& callId,
const std::string& streamId,
const DeviceParams& localVideoParams,
const std::shared_ptr<MediaRecorder>& rec);
~VideoRtpSession();
void setRequestKeyFrameCallback(std::function<void(void)> cb);
@ -97,8 +100,8 @@ public:
void exitConference();
void setChangeOrientationCallback(std::function<void(int)> cb);
void initRecorder(std::shared_ptr<MediaRecorder>& rec) override;
void deinitRecorder(std::shared_ptr<MediaRecorder>& rec) override;
void initRecorder() override;
void deinitRecorder() override;
const VideoBitrateInfo& getVideoBitrateInfo();
@ -175,6 +178,9 @@ private:
std::function<void(void)> cbKeyFrameRequest_;
std::atomic<int> rotation_ {0};
void attachRemoteRecorder(const MediaStream& ms);
void attachLocalRecorder(const MediaStream& ms);
};
} // namespace video

View File

@ -185,13 +185,14 @@ SIPCall::createRtpSession(RtpStream& stream)
// To get audio_0 ; video_0
auto streamId = sip_utils::streamId(id_, stream.mediaAttribute_->label_);
if (stream.mediaAttribute_->type_ == MediaType::MEDIA_AUDIO) {
stream.rtpSession_ = std::make_shared<AudioRtpSession>(id_, streamId);
stream.rtpSession_ = std::make_shared<AudioRtpSession>(id_, streamId, recorder_);
}
#ifdef ENABLE_VIDEO
else if (stream.mediaAttribute_->type_ == MediaType::MEDIA_VIDEO) {
stream.rtpSession_ = std::make_shared<video::VideoRtpSession>(id_,
streamId,
getVideoSettings());
getVideoSettings(),
recorder_);
std::static_pointer_cast<video::VideoRtpSession>(stream.rtpSession_)->setRotation(rotation_);
}
#endif
@ -236,7 +237,7 @@ SIPCall::configureRtpSession(const std::shared_ptr<RtpSession>& rtpSession,
rtpSession->setSuccessfulSetupCb([w = weak()](MediaType type, bool isRemote) {
if (auto thisPtr = w.lock())
thisPtr->rtpSetupSuccess(type, isRemote);
thisPtr->rtpSetupSuccess();
});
if (localMedia.type == MediaType::MEDIA_AUDIO) {
@ -1472,7 +1473,6 @@ SIPCall::switchInput(const std::string& source)
}
if (isRec) {
readyToRecord_ = false;
resetMediaReady();
pendingRecord_ = true;
}
}
@ -2017,7 +2017,11 @@ SIPCall::hasVideo() const
{
#ifdef ENABLE_VIDEO
std::function<bool(const RtpStream& stream)> videoCheck = [](auto const& stream) {
return stream.mediaAttribute_->type_ == MediaType::MEDIA_VIDEO;
bool validVideo = stream.mediaAttribute_
&& stream.mediaAttribute_->hasValidVideo();
bool validRemoteVideo = stream.remoteMediaAttribute_
&& stream.remoteMediaAttribute_->hasValidVideo();
return validVideo || validRemoteVideo;
};
const auto iter = std::find_if(rtpStreams_.begin(), rtpStreams_.end(), videoCheck);
@ -2153,7 +2157,6 @@ SIPCall::startAllMedia()
// reset
readyToRecord_ = false;
resetMediaReady();
for (auto iter = rtpStreams_.begin(); iter != rtpStreams_.end(); iter++) {
if (not iter->mediaAttribute_) {
@ -2219,9 +2222,6 @@ void
SIPCall::stopAllMedia()
{
JAMI_DBG("[call:%s] Stopping all media", getCallId().c_str());
deinitRecorder();
if (Call::isRecording())
stopRecording(); // if call stops, finish recording
#ifdef ENABLE_VIDEO
{
@ -2270,11 +2270,11 @@ SIPCall::updateRemoteMedia()
auto const& remoteMedia = rtpStream.remoteMediaAttribute_ = std::make_shared<MediaAttribute>(
remoteMediaList[idx]);
if (remoteMedia->type_ == MediaType::MEDIA_VIDEO) {
rtpStream.rtpSession_->setMuted(remoteMedia->muted_, RtpSession::Direction::RECV);
JAMI_DEBUG("[call:{:s}] Remote media @ {:d}: {:s}",
getCallId(),
idx,
remoteMedia->toString());
rtpStream.rtpSession_->setMuted(remoteMedia->muted_, RtpSession::Direction::RECV);
// Request a key-frame if we are un-muting the video
if (not remoteMedia->muted_)
requestKeyframe(findRtpStreamIndex(remoteMedia->label_));
@ -2650,6 +2650,19 @@ SIPCall::reportMediaNegotiationStatus()
callId,
libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS,
currentMediaList());
auto previousState = isAudioOnly_;
auto newState = !hasVideo();
if (previousState != newState && Call::isRecording()) {
deinitRecorder();
toggleRecording();
pendingRecord_ = true;
}
isAudioOnly_ = newState;
if (pendingRecord_ && readyToRecord_) {
toggleRecording();
}
}
void
@ -3259,10 +3272,9 @@ SIPCall::toggleRecording()
peerUri_);
recorder_->setMetadata(title, ""); // use default description
for (const auto& rtpSession : getRtpSessionList())
rtpSession->initRecorder(recorder_);
rtpSession->initRecorder();
} else {
updateRecState(false);
deinitRecorder();
}
pendingRecord_ = false;
auto state = Call::toggleRecording();
@ -3275,7 +3287,7 @@ void
SIPCall::deinitRecorder()
{
for (const auto& rtpSession : getRtpSessionList())
rtpSession->deinitRecorder(recorder_);
rtpSession->deinitRecorder();
}
void
@ -3541,25 +3553,21 @@ SIPCall::newIceSocket(unsigned compId)
}
void
SIPCall::rtpSetupSuccess(MediaType type, bool isRemote)
SIPCall::rtpSetupSuccess()
{
std::lock_guard<std::mutex> lk {setupSuccessMutex_};
if (type == MEDIA_AUDIO) {
if (isRemote)
mediaReady_.at("a:remote") = true;
else
mediaReady_.at("a:local") = true;
} else {
if (isRemote)
mediaReady_.at("v:remote") = true;
else
mediaReady_.at("v:local") = true;
}
isAudioOnly_ = !hasVideo();
#ifdef ENABLE_VIDEO
readyToRecord_ = true; // We're ready to record whenever a stream is ready
#endif
auto previousState = isAudioOnly_;
auto newState = !hasVideo();
if (previousState != newState && Call::isRecording()) {
deinitRecorder();
toggleRecording();
pendingRecord_ = true;
}
isAudioOnly_ = newState;
if (pendingRecord_ && readyToRecord_)
toggleRecording();
@ -3583,13 +3591,23 @@ SIPCall::peerRecording(bool state)
}
void
SIPCall::peerMuted(bool muted)
SIPCall::peerMuted(bool muted, int streamIdx)
{
if (muted) {
JAMI_WARN("Peer muted");
} else {
JAMI_WARN("Peer un-muted");
}
if (streamIdx == -1) {
for (const auto& audioRtp : getRtpSessionList(MediaType::MEDIA_AUDIO))
audioRtp->setMuted(muted, RtpSession::Direction::RECV);
} else if (streamIdx > -1 && streamIdx < static_cast<int>(rtpStreams_.size())) {
auto& stream = rtpStreams_[streamIdx];
if (stream.rtpSession_ && stream.rtpSession_->getMediaType() == MediaType::MEDIA_AUDIO)
stream.rtpSession_->setMuted(muted, RtpSession::Direction::RECV);
}
peerMuted_ = muted;
if (auto conf = conf_.lock())
conf->updateMuted();
@ -3608,11 +3626,4 @@ SIPCall::peerVoice(bool voice)
}
}
void
SIPCall::resetMediaReady()
{
for (auto& m : mediaReady_)
m.second = false;
}
} // namespace jami

View File

@ -170,7 +170,7 @@ public:
// Override PeerRecorder
void peerRecording(bool state) override;
void peerMuted(bool state) override;
void peerMuted(bool state, int streamIdx) override;
void peerVoice(bool state) override;
// end override PeerRecorder
@ -347,7 +347,7 @@ private:
void deinitRecorder();
void rtpSetupSuccess(MediaType type, bool isRemote);
void rtpSetupSuccess();
void setupVoiceCallback(const std::shared_ptr<RtpSession>& rtpSession);
@ -518,12 +518,6 @@ private:
std::atomic_bool waitForIceInit_ {false};
std::map<const std::string, bool> mediaReady_ {{"a:local", false},
{"a:remote", false},
{"v:local", false},
{"v:remote", false}};
void resetMediaReady();
void detachAudioFromConference();
std::mutex setupSuccessMutex_;

View File

@ -1239,7 +1239,7 @@ handleMediaControl(SIPCall& call, pjsip_msg_body* body)
if (matched_pattern.ready() && !matched_pattern.empty() && matched_pattern[1].matched) {
try {
bool state = std::stoi(matched_pattern[1]);
call.peerMuted(state);
call.peerMuted(state, streamIdx);
} catch (const std::exception& e) {
JAMI_WARN("Error parsing state remote mute: %s", e.what());
}

View File

@ -31,6 +31,7 @@
#include "jamidht/jamiaccount.h"
#include "manager.h"
#include "media_const.h"
#include "client/videomanager.h"
#include "common.h"
@ -47,6 +48,7 @@ struct CallData
std::string mediaStatus {};
std::string device {};
std::string hostState {};
bool changeRequested = false;
void reset()
{
@ -83,6 +85,8 @@ public:
std::unique_lock<std::mutex> lk {mtx};
std::condition_variable cv;
std::string videoPath = std::filesystem::absolute("media/test_video_file.mp4").string();
private:
void registerSignalHandlers();
void testRecordCall();
@ -146,6 +150,14 @@ RecorderTest::registerSignalHandlers()
}
cv.notify_one();
}));
confHandlers.insert(libjami::exportable_callback<libjami::CallSignal::MediaChangeRequested>(
[=](const std::string& accountId,
const std::string& callId,
const std::vector<std::map<std::string, std::string>>&) {
if (accountId == bobId && bobCall.callId == callId) {
bobCall.changeRequested = true;
}
}));
confHandlers.insert(
libjami::exportable_callback<libjami::CallSignal::StateChange>([=](const std::string& accountId,
const std::string& callId,
@ -191,36 +203,82 @@ RecorderTest::testRecordCall()
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::AUDIO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "audio_0"},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
std::map<std::string, std::string> mediaAttributeV
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::VIDEO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
{libjami::Media::MediaAttributeKey::LABEL, "video_0"},
{libjami::Media::MediaAttributeKey::SOURCE, "file://" + videoPath}};
mediaList.emplace_back(mediaAttributeA);
mediaList.emplace_back(mediaAttributeV);
auto callId = libjami::placeCallWithMedia(aliceId, bobUri, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !bobCall.callId.empty(); }));
Manager::instance().answerCall(bobId, bobCall.callId);
libjami::acceptWithMedia(bobId, bobCall.callId, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] {
return bobCall.mediaStatus
== libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS;
}));
// give time to start camera
std::this_thread::sleep_for(5s);
// Start recorder
recordedFile.clear();
CPPUNIT_ASSERT(!libjami::getIsRecording(aliceId, callId));
libjami::toggleRecording(aliceId, callId);
// Stop recorder after a few seconds
std::this_thread::sleep_for(5s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return recordedFile.empty(); }));
// add local video
{
auto newMediaList = mediaList;
newMediaList.emplace_back(mediaAttributeV);
// Request Media Change
libjami::requestMediaChange(aliceId, callId, newMediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 10s, [&] { return bobCall.changeRequested; }));
// Answer the change request
bobCall.mediaStatus = "";
libjami::answerMediaChangeRequest(bobId, bobCall.callId, newMediaList);
bobCall.changeRequested = false;
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] {
return bobCall.mediaStatus
== libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS;
}));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty() && recordedFile.find(".ogg") != std::string::npos; }));
recordedFile = "";
// give time to start camera
std::this_thread::sleep_for(10s);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return recordedFile.empty(); }));
}
// mute local video
{
mediaAttributeV[libjami::Media::MediaAttributeKey::MUTED] = TRUE_STR;
auto newMediaList = mediaList;
newMediaList.emplace_back(mediaAttributeV);
// Mute Bob video
libjami::requestMediaChange(aliceId, callId, newMediaList);
std::this_thread::sleep_for(5s);
libjami::requestMediaChange(bobId, bobCall.callId, newMediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty() && recordedFile.find(".webm") != std::string::npos; }));
recordedFile = "";
std::this_thread::sleep_for(10s);
}
// Stop recorder after a few seconds
libjami::toggleRecording(aliceId, callId);
CPPUNIT_ASSERT(!libjami::getIsRecording(aliceId, callId));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty(); }));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty() && recordedFile.find(".ogg") != std::string::npos; }));
Manager::instance().hangupCall(aliceId, callId);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER"; }));
@ -237,16 +295,18 @@ RecorderTest::testRecordAudioOnlyCall()
auto bobUri = bobAccount->getUsername();
JAMI_INFO("Start call between Alice and Bob");
// Audio only call
std::vector<std::map<std::string, std::string>> mediaList;
std::map<std::string, std::string> mediaAttribute
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::AUDIO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "audio_0"},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
mediaList.emplace_back(mediaAttribute);
auto callId = libjami::placeCallWithMedia(aliceId, bobUri, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !bobCall.callId.empty(); }));
Manager::instance().answerCall(bobId, bobCall.callId);
libjami::acceptWithMedia(bobId, bobCall.callId, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] {
return bobCall.mediaStatus
== libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS;
@ -255,10 +315,10 @@ RecorderTest::testRecordAudioOnlyCall()
// Start recorder
recordedFile.clear();
libjami::toggleRecording(aliceId, callId);
// Stop recorder
std::this_thread::sleep_for(5s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
// Toggle recording
libjami::toggleRecording(aliceId, callId);
CPPUNIT_ASSERT(!libjami::getIsRecording(aliceId, callId));
@ -290,11 +350,13 @@ RecorderTest::testRecordCallOnePersonRdv()
recordedFile.clear();
JAMI_INFO("Start call between Alice and Bob");
// Audio only call
std::vector<std::map<std::string, std::string>> mediaList;
std::map<std::string, std::string> mediaAttributeA
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::AUDIO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "audio_0"},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
mediaList.emplace_back(mediaAttributeA);
auto callId = libjami::placeCallWithMedia(aliceId, bobUri, mediaList);
@ -306,18 +368,19 @@ RecorderTest::testRecordCallOnePersonRdv()
CPPUNIT_ASSERT(!libjami::getIsRecording(aliceId, callId));
libjami::toggleRecording(aliceId, callId);
// Stop recorder after a few seconds
std::this_thread::sleep_for(5s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
// Stop recorder
libjami::toggleRecording(aliceId, callId);
CPPUNIT_ASSERT(!libjami::getIsRecording(aliceId, callId));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty(); }));
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !recordedFile.empty() && recordedFile.find(".ogg") != std::string::npos; }));
Manager::instance().hangupCall(aliceId, callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER" && !recordedFile.empty(); }));
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER"; }));
JAMI_INFO("End testRecordCallOnePersonRdv");
}
@ -336,34 +399,37 @@ RecorderTest::testStopCallWhileRecording()
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::AUDIO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "audio_0"},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
std::map<std::string, std::string> mediaAttributeV
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::VIDEO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
{libjami::Media::MediaAttributeKey::LABEL, "video_0"},
{libjami::Media::MediaAttributeKey::SOURCE, "file://" + videoPath}};
mediaList.emplace_back(mediaAttributeA);
mediaList.emplace_back(mediaAttributeV);
auto callId = libjami::placeCallWithMedia(aliceId, bobUri, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !bobCall.callId.empty(); }));
Manager::instance().answerCall(bobId, bobCall.callId);
libjami::acceptWithMedia(bobId, bobCall.callId, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] {
return bobCall.mediaStatus
== libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS;
}));
// give time to start camera
std::this_thread::sleep_for(5s);
// Start recorder
recordedFile.clear();
libjami::toggleRecording(aliceId, callId);
std::this_thread::sleep_for(10s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
// Hangup call
std::this_thread::sleep_for(5s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
Manager::instance().hangupCall(aliceId, callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER" && !recordedFile.empty(); }));
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER" && !recordedFile.empty() && recordedFile.find(".webm") != std::string::npos; }));
JAMI_INFO("End testStopCallWhileRecording");
}
@ -385,11 +451,19 @@ RecorderTest::testDaemonPreference()
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::AUDIO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "audio_0"},
{libjami::Media::MediaAttributeKey::SOURCE, ""}};
std::map<std::string, std::string> mediaAttributeV
= {{libjami::Media::MediaAttributeKey::MEDIA_TYPE, libjami::Media::MediaAttributeValue::VIDEO},
{libjami::Media::MediaAttributeKey::ENABLED, TRUE_STR},
{libjami::Media::MediaAttributeKey::MUTED, FALSE_STR},
{libjami::Media::MediaAttributeKey::LABEL, "video_0"},
{libjami::Media::MediaAttributeKey::SOURCE, "file://" + videoPath}};
mediaList.emplace_back(mediaAttributeA);
mediaList.emplace_back(mediaAttributeV);
auto callId = libjami::placeCallWithMedia(aliceId, bobUri, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] { return !bobCall.callId.empty(); }));
Manager::instance().answerCall(bobId, bobCall.callId);
libjami::acceptWithMedia(bobId, bobCall.callId, mediaList);
CPPUNIT_ASSERT(cv.wait_for(lk, 20s, [&] {
return bobCall.mediaStatus
== libjami::Media::MediaNegotiationStatusEvents::NEGOTIATION_SUCCESS;
@ -398,10 +472,11 @@ RecorderTest::testDaemonPreference()
// Let record some seconds
std::this_thread::sleep_for(5s);
CPPUNIT_ASSERT(libjami::getIsRecording(aliceId, callId));
std::this_thread::sleep_for(10s);
Manager::instance().hangupCall(aliceId, callId);
CPPUNIT_ASSERT(
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER" && !recordedFile.empty(); }));
cv.wait_for(lk, 20s, [&] { return bobCall.state == "OVER" && !recordedFile.empty() && recordedFile.find(".webm") != std::string::npos; }));
JAMI_INFO("End testDaemonPreference");
}