diff --git a/erizo/src/erizo/media/ExternalOutput.cpp b/erizo/src/erizo/media/ExternalOutput.cpp index 20deabe3d..be08e342d 100644 --- a/erizo/src/erizo/media/ExternalOutput.cpp +++ b/erizo/src/erizo/media/ExternalOutput.cpp @@ -23,16 +23,18 @@ namespace erizo { DEFINE_LOGGER(ExternalOutput, "media.ExternalOutput"); ExternalOutput::ExternalOutput(std::shared_ptr worker, const std::string& output_url, const std::vector rtp_mappings, - const std::vector ext_mappings) + const std::vector ext_mappings, bool hasAudio, bool hasVideo) : worker_{worker}, pipeline_{Pipeline::create()}, audio_queue_{5.0, 10.0}, video_queue_{5.0, 10.0}, inited_{false}, video_stream_{nullptr}, audio_stream_{nullptr}, video_source_ssrc_{0}, first_video_timestamp_{-1}, first_audio_timestamp_{-1}, first_data_received_{}, video_offset_ms_{-1}, audio_offset_ms_{-1}, - need_to_send_fir_{true}, rtp_mappings_{rtp_mappings}, video_codec_{AV_CODEC_ID_NONE}, - audio_codec_{AV_CODEC_ID_NONE}, pipeline_initialized_{false}, ext_processor_{ext_mappings} { + need_to_send_fir_{true}, rtp_mappings_{rtp_mappings}, hasAudio_{hasAudio}, hasVideo_{hasVideo}, + video_codec_{AV_CODEC_ID_NONE}, audio_codec_{AV_CODEC_ID_NONE}, + pipeline_initialized_{false}, ext_processor_{ext_mappings} + { ELOG_DEBUG("Creating output to %s", output_url.c_str()); - + ELOG_DEBUG("Has audio %d has video %d", hasAudio, hasVideo); // TODO(pedro): these should really only be called once per application run av_register_all(); avcodec_register_all(); @@ -88,7 +90,6 @@ bool ExternalOutput::init() { return true; } - ExternalOutput::~ExternalOutput() { ELOG_DEBUG("Destructing"); } @@ -365,15 +366,24 @@ int ExternalOutput::deliverEvent_(MediaEventPtr event) { } bool ExternalOutput::initContext() { - if (video_codec_ != AV_CODEC_ID_NONE && - audio_codec_ != AV_CODEC_ID_NONE && - video_stream_ == nullptr && - audio_stream_ == nullptr) { + bool init_video = false; + bool init_audio = false; + + if (hasVideo_ && video_codec_ == AV_CODEC_ID_NONE) { + return false; + } + + if (hasAudio_ && audio_codec_ == AV_CODEC_ID_NONE) { + return false; + } + + if (hasVideo_ && video_stream_ == nullptr) { AVCodec* video_codec = avcodec_find_encoder(video_codec_); if (video_codec == nullptr) { ELOG_ERROR("Could not find video codec"); return false; } + init_video = true; need_to_send_fir_ = true; video_queue_.setTimebase(video_map_.clock_rate); video_stream_ = avformat_new_stream(context_, video_codec); @@ -390,13 +400,16 @@ bool ExternalOutput::initContext() { video_stream_->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; } context_->oformat->flags |= AVFMT_VARIABLE_FPS; + context_->streams[0] = video_stream_; + } + if (hasAudio_ && audio_stream_ == nullptr) { AVCodec* audio_codec = avcodec_find_encoder(audio_codec_); if (audio_codec == nullptr) { ELOG_ERROR("Could not find audio codec"); return false; } - + init_audio = true; audio_stream_ = avformat_new_stream(context_, audio_codec); audio_stream_->id = 1; audio_stream_->codec->codec_id = audio_codec_; @@ -407,8 +420,18 @@ bool ExternalOutput::initContext() { audio_stream_->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; } - context_->streams[0] = video_stream_; + if (!hasVideo_) { + // To avoid the following matroska errors, we add CODEC_FLAG_GLOBAL_HEADER... + // - Codec for stream 0 does not use global headers but container format requires global headers + // - Only audio, video, and subtitles are supported for Matroska. + video_stream_ = avformat_new_stream(context_, nullptr); + video_stream_->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; + context_->streams[0] = video_stream_; + } context_->streams[1] = audio_stream_; + } + + if ( init_audio || init_video ) { if (avio_open(&context_->pb, context_->filename, AVIO_FLAG_WRITE) < 0) { ELOG_ERROR("Error opening output file"); return false; @@ -435,11 +458,6 @@ void ExternalOutput::queueData(char* buffer, int length, packetType type) { if (first_data_received_ == time_point()) { first_data_received_ = clock::now(); - if (getAudioSinkSSRC() == 0) { - ELOG_DEBUG("No audio detected"); - audio_map_ = RtpMap{0, "PCMU", 8000, AUDIO_TYPE, 1}; - audio_codec_ = AV_CODEC_ID_PCM_MULAW; - } } if (need_to_send_fir_ && video_source_ssrc_) { sendFirPacket(); diff --git a/erizo/src/erizo/media/ExternalOutput.h b/erizo/src/erizo/media/ExternalOutput.h index d994304b6..63768e153 100644 --- a/erizo/src/erizo/media/ExternalOutput.h +++ b/erizo/src/erizo/media/ExternalOutput.h @@ -38,7 +38,8 @@ class ExternalOutput : public MediaSink, public RawDataReceiver, public Feedback public: explicit ExternalOutput(std::shared_ptr worker, const std::string& output_url, const std::vector rtp_mappings, - const std::vector ext_mappings); + const std::vector ext_mappings, + bool hasAudio, bool hasVideo); virtual ~ExternalOutput(); bool init(); void receiveRawData(const RawDataPacket& packet) override; @@ -65,7 +66,6 @@ class ExternalOutput : public MediaSink, public RawDataReceiver, public Feedback boost::condition_variable cond_; AVStream *video_stream_, *audio_stream_; AVFormatContext *context_; - uint32_t video_source_ssrc_; std::unique_ptr depacketizer_; @@ -104,6 +104,8 @@ class ExternalOutput : public MediaSink, public RawDataReceiver, public Feedback // so the second scheme seems not applicable. Too bad. bool need_to_send_fir_; std::vector rtp_mappings_; + bool hasAudio_; + bool hasVideo_; enum AVCodecID video_codec_; enum AVCodecID audio_codec_; std::map video_maps_; diff --git a/erizoAPI/ExternalOutput.cc b/erizoAPI/ExternalOutput.cc index 70bd9db94..e6fd36cd1 100644 --- a/erizoAPI/ExternalOutput.cc +++ b/erizoAPI/ExternalOutput.cc @@ -129,10 +129,14 @@ NAN_METHOD(ExternalOutput::New) { ext_mappings.push_back({value++, *ext_map_it}); } } + + bool hasAudio = Nan::To((info[3])).FromJust(); + bool hasVideo = Nan::To((info[4])).FromJust(); + std::shared_ptr worker = thread_pool->me->getLessUsedWorker(); ExternalOutput* obj = new ExternalOutput(); - obj->me = std::make_shared(worker, url, rtp_mappings, ext_mappings); + obj->me = std::make_shared(worker, url, rtp_mappings, ext_mappings, hasAudio, hasVideo); obj->Wrap(info.This()); info.GetReturnValue().Set(info.This()); @@ -159,3 +163,6 @@ NAN_METHOD(ExternalOutput::init) { int r = me->init(); info.GetReturnValue().Set(Nan::New(r)); } + + + diff --git a/erizo_controller/erizoController/models/Client.js b/erizo_controller/erizoController/models/Client.js index c252783dd..5240a10b8 100644 --- a/erizo_controller/erizoController/models/Client.js +++ b/erizo_controller/erizoController/models/Client.js @@ -568,7 +568,11 @@ class Client extends events.EventEmitter { return; } if (stream.hasAudio() || stream.hasVideo() || stream.hasScreen()) { - const mediaOptions = { mediaConfiguration: this.token.mediaConfiguration }; + const mediaOptions = { mediaConfiguration: + this.token.mediaConfiguration, + hasAudio: stream.hasAudio(), + hasVideo: stream.hasVideo() || stream.hasScreen(), + }; stream.addExternalOutputSubscriber(url); stream.updateExternalOutputSubscriberState(url, StreamStates.SUBSCRIBER_CREATED); this.room.controller.addExternalOutput(streamId, url, mediaOptions, () => { @@ -613,7 +617,7 @@ class Client extends events.EventEmitter { this.room.streamManager.forEachPublishedStream((stream) => { if (stream.hasExternalOutputSubscriber(url)) { stream.removeExternalOutputSubscriber(url); - this.room.controller.removeExternalOutput(options.id, url, callback); + this.room.controller.removeExternalOutput(stream.id, url, callback); removed = true; } }); diff --git a/erizo_controller/erizoJS/models/Publisher.js b/erizo_controller/erizoJS/models/Publisher.js index 810cea6aa..ed4418926 100644 --- a/erizo_controller/erizoJS/models/Publisher.js +++ b/erizo_controller/erizoJS/models/Publisher.js @@ -98,11 +98,15 @@ class Source extends NodeClass { addExternalOutput(url, options) { const eoId = `${url}_${this.streamId}`; + const hasVideo = options.hasVideo === null || options.hasVideo; + const hasAudio = options.hasAudio === null || options.hasAudio; + log.info(`message: Adding ExternalOutput, id: ${eoId}, url: ${url},`, logger.objectToLog(this.options), logger.objectToLog(this.options.metadata)); const externalOutput = new erizo.ExternalOutput(this.threadPool, url, - Helpers.getMediaConfiguration(options.mediaConfiguration)); + Helpers.getMediaConfiguration(options.mediaConfiguration), hasAudio, hasVideo); externalOutput.id = eoId; + externalOutput.init(); this.muxer.addExternalOutput(externalOutput, url); this.externalOutputs[url] = externalOutput; diff --git a/extras/basic_example/public/index.html b/extras/basic_example/public/index.html index f0aab9580..42d290acc 100644 --- a/extras/basic_example/public/index.html +++ b/extras/basic_example/public/index.html @@ -9,7 +9,6 @@
-
diff --git a/extras/basic_example/public/script.js b/extras/basic_example/public/script.js index 1bf94c406..6356d89c3 100644 --- a/extras/basic_example/public/script.js +++ b/extras/basic_example/public/script.js @@ -7,8 +7,6 @@ const serverUrl = '/'; let localStream; let room; -let recording = false; -let recordingId = ''; let localStreamIndex = 0; const localStreams = new Map(); const configFlags = { @@ -76,11 +74,40 @@ const createPublisherContainer = (stream, index) => { document.getElementById('videoContainer').removeChild(container); }; + const stopRecordButton = document.createElement('button'); + stopRecordButton.textContent = 'Stop record'; + stopRecordButton.setAttribute('style', 'float:left;'); + stopRecordButton.setAttribute('hidden', 'true'); + + const recordButton = document.createElement('button'); + recordButton.textContent = 'Record'; + recordButton.setAttribute('style', 'float:left;'); + + let recordId; + recordButton.onclick = () => { + console.log(stream); + room.startRecording(stream, (id) => { + recordId = id; + }); + recordButton.hidden = true; + stopRecordButton.hidden = false; + }; + + stopRecordButton.onclick = () => { + console.log(stream); + room.stopRecording(recordId); + recordButton.hidden = false; + stopRecordButton.hidden = true; + }; + + const div = document.createElement('div'); div.setAttribute('style', 'width: 320px; height: 240px; float:left'); div.setAttribute('id', `myVideo${index}`); container.appendChild(div); container.appendChild(unpublishButton); + container.appendChild(recordButton); + container.appendChild(stopRecordButton); document.getElementById('videoContainer').appendChild(container); }; @@ -111,24 +138,6 @@ const testConnection = () => { window.location = '/connection_test.html'; }; - -// eslint-disable-next-line no-unused-vars -function startRecording() { - if (room !== undefined) { - if (!recording) { - room.startRecording(localStream, (id) => { - recording = true; - recordingId = id; - window.recordingId = recordingId; - }); - } else { - room.stopRecording(recordingId); - recording = false; - } - window.recording = recording; - } -} - let slideShowMode = false; // eslint-disable-next-line no-unused-vars @@ -178,7 +187,6 @@ const startBasicExample = () => { document.getElementById('publishOnlyAudio').disabled = false; document.getElementById('startWarning').hidden = true; document.getElementById('startButton').hidden = true; - recording = false; console.log('Selected Room', configFlags.room, 'of type', configFlags.type); const config = { audio: true, video: !configFlags.onlyAudio, @@ -291,7 +299,6 @@ const startBasicExample = () => { localStream.setAttributes({ type: 'publisher' }); } subscribeToStreams(streams); - document.getElementById('recordButton').disabled = false; }); room.addEventListener('stream-removed', (streamEvent) => {