From c950d44eddfbcd238327f6cc1357b253ff688933 Mon Sep 17 00:00:00 2001 From: Romain Bouqueau Date: Tue, 19 Apr 2016 13:25:49 +0200 Subject: [PATCH] logs and exception are prefixed by the module name - fix 53 --- src/apps/dashcastx/pipeliner.cpp | 16 +-- src/apps/player/pipeliner.cpp | 6 +- src/lib_media/common/libav.cpp | 16 +-- src/lib_media/common/pcm.hpp | 10 +- src/lib_media/decode/jpegturbo_decode.cpp | 5 +- src/lib_media/decode/libav_decode.cpp | 13 ++- src/lib_media/demux/gpac_demux_mp4_full.cpp | 11 +-- src/lib_media/demux/gpac_demux_mp4_simple.cpp | 7 +- src/lib_media/demux/libav_demux.cpp | 19 ++-- src/lib_media/encode/jpegturbo_encode.cpp | 3 +- src/lib_media/encode/libav_encode.cpp | 33 +++---- src/lib_media/in/file.cpp | 5 +- src/lib_media/in/sound_generator.cpp | 1 - src/lib_media/in/video_generator.cpp | 1 - src/lib_media/mux/gpac_mux_m2ts.cpp | 2 +- src/lib_media/mux/gpac_mux_mp4.cpp | 97 +++++++++---------- src/lib_media/mux/libav_mux.cpp | 21 ++-- src/lib_media/out/file.cpp | 3 +- src/lib_media/out/null.cpp | 1 - src/lib_media/out/print.cpp | 1 - src/lib_media/render/sdl_audio.cpp | 15 ++- src/lib_media/render/sdl_common.cpp | 5 +- src/lib_media/render/sdl_video.cpp | 11 +-- src/lib_media/stream/apple_hls.cpp | 3 +- src/lib_media/stream/mpeg_dash.cpp | 13 ++- src/lib_media/transform/audio_convert.cpp | 5 +- src/lib_media/transform/restamp.cpp | 4 +- src/lib_media/transform/video_convert.cpp | 7 +- src/lib_media/utils/comparator.cpp | 14 +-- src/lib_modules/core/error.hpp | 30 ++++++ src/lib_modules/core/log.hpp | 17 ++++ src/lib_modules/core/metadata.hpp | 8 +- src/lib_modules/core/module.hpp | 5 +- src/lib_modules/core/output.hpp | 2 +- src/lib_modules/modules.vcxproj | 2 + src/lib_modules/modules.vcxproj.filters | 6 ++ src/lib_modules/utils/helper.hpp | 8 +- src/lib_modules/utils/pipeline.cpp | 16 +-- src/lib_utils/log.cpp | 4 +- src/lib_utils/log.hpp | 17 ++-- 40 files changed, 249 insertions(+), 214 deletions(-) create mode 100644 src/lib_modules/core/error.hpp create mode 100644 src/lib_modules/core/log.hpp diff --git a/src/apps/dashcastx/pipeliner.cpp b/src/apps/dashcastx/pipeliner.cpp index 117fe056..095c9a24 100644 --- a/src/apps/dashcastx/pipeliner.cpp +++ b/src/apps/dashcastx/pipeliner.cpp @@ -12,17 +12,17 @@ namespace { Encode::LibavEncode* createEncoder(std::shared_ptr metadata, const dashcastXOptions &opt, size_t i) { auto const codecType = metadata->getStreamType(); if (codecType == VIDEO_PKT) { - Log::msg(Log::Info, "[Encoder] Found video stream"); + Log::msg(Info, "[Encoder] Found video stream"); Encode::LibavEncodeParams p; p.isLowLatency = opt.isLive; p.res = opt.v[i].res; p.bitrate_v = opt.v[i].bitrate; return new Encode::LibavEncode(Encode::LibavEncode::Video, p); } else if (codecType == AUDIO_PKT) { - Log::msg(Log::Info, "[Encoder] Found audio stream"); + Log::msg(Info, "[Encoder] Found audio stream"); return new Encode::LibavEncode(Encode::LibavEncode::Audio); } else { - Log::msg(Log::Info, "[Encoder] Found unknown stream"); + Log::msg(Info, "[Encoder] Found unknown stream"); return nullptr; } } @@ -30,15 +30,15 @@ Encode::LibavEncode* createEncoder(std::shared_ptr metadata, co ModuleS* createConverter(std::shared_ptr metadata, const Resolution &dstRes) { auto const codecType = metadata->getStreamType(); if (codecType == VIDEO_PKT) { - Log::msg(Log::Info, "[Converter] Found video stream"); + Log::msg(Info, "[Converter] Found video stream"); auto dstFormat = PictureFormat(dstRes, YUV420P); return new Transform::VideoConvert(dstFormat); } else if (codecType == AUDIO_PKT) { - Log::msg(Log::Info, "[Converter] Found audio stream"); + Log::msg(Info, "[Converter] Found audio stream"); auto format = PcmFormat(44100, 2, AudioLayout::Stereo, AudioSampleFormat::F32, AudioStruct::Planar); return new Transform::AudioConvert(format); } else { - Log::msg(Log::Info, "[Converter] Found unknown stream"); + Log::msg(Info, "[Converter] Found unknown stream"); return nullptr; } } @@ -55,14 +55,14 @@ void declarePipeline(Pipeline &pipeline, const dashcastXOptions &opt) { const bool transcode = opt.v.size() > 0 ? true : false; if (!transcode) { - Log::msg(Log::Warning, "[DashcastX] No transcode. Make passthru."); + Log::msg(Warning, "[DashcastX] No transcode. Make passthru."); } int numDashInputs = 0; for (size_t i = 0; i < demux->getNumOutputs(); ++i) { auto const metadata = getMetadataFromOutput(demux->getOutput(i)); if (!metadata) { - Log::msg(Log::Warning, "[DashcastX] Unknown metadata for stream %s. Ignoring.", i); + Log::msg(Warning, "[DashcastX] Unknown metadata for stream %s. Ignoring.", i); break; } diff --git a/src/apps/player/pipeliner.cpp b/src/apps/player/pipeliner.cpp index 57adb0f7..20a4e30b 100644 --- a/src/apps/player/pipeliner.cpp +++ b/src/apps/player/pipeliner.cpp @@ -9,13 +9,13 @@ using namespace Pipelines; namespace { ModuleS* createRenderer(int codecType) { if (codecType == VIDEO_PKT) { - Log::msg(Log::Info, "Found video stream"); + Log::msg(Info, "Found video stream"); return new Render::SDLVideo(); } else if (codecType == AUDIO_PKT) { - Log::msg(Log::Info, "Found audio stream"); + Log::msg(Info, "Found audio stream"); return new Render::SDLAudio(); } else { - Log::msg(Log::Info, "Found unknown stream"); + Log::msg(Info, "Found unknown stream"); return new Out::Null; } } diff --git a/src/lib_media/common/libav.cpp b/src/lib_media/common/libav.cpp index 35a81bb9..f5425f33 100644 --- a/src/lib_media/common/libav.cpp +++ b/src/lib_media/common/libav.cpp @@ -12,24 +12,24 @@ extern "C" { } namespace { -Log::Level avLogLevel(int level) { +Level avLogLevel(int level) { switch (level) { case AV_LOG_QUIET: case AV_LOG_PANIC: case AV_LOG_FATAL: case AV_LOG_ERROR: - return Log::Warning; + return Warning; case AV_LOG_WARNING: case AV_LOG_INFO: - return Log::Info; + return Info; case AV_LOG_VERBOSE: - return Log::Debug; + return Debug; case AV_LOG_DEBUG: case AV_LOG_TRACE: - return Log::Quiet; + return Quiet; default: assert(0); - return Log::Debug; + return Debug; } } @@ -221,7 +221,7 @@ DataAVPacket::DataAVPacket(size_t size) } DataAVPacket::~DataAVPacket() { - Log::msg(Log::Debug, "Freeing %s, pts=%s", this, pkt->pts); + Log::msg(Debug, "Freeing %s, pts=%s", this, pkt->pts); } uint8_t* DataAVPacket::data() { @@ -258,7 +258,7 @@ void buildAVDictionary(const std::string &moduleName, AVDictionary **dict, const char *tokval = nullptr; while (tok && (tokval = strtok(nullptr, "- "))) { if (av_dict_set(dict, tok, tokval, 0) < 0) { - Log::msg(Log::Warning, "[%s] unknown %s option \"%s\" with value \"%s\"", moduleName.c_str(), type, tok, tokval); + Log::msg(Warning, "[%s] unknown %s option \"%s\" with value \"%s\"", moduleName.c_str(), type, tok, tokval); } tok = strtok(nullptr, "- "); } diff --git a/src/lib_media/common/pcm.hpp b/src/lib_media/common/pcm.hpp index 918d2f4f..7642cb1a 100644 --- a/src/lib_media/common/pcm.hpp +++ b/src/lib_media/common/pcm.hpp @@ -69,23 +69,23 @@ class PcmFormat { bool operator==(const PcmFormat& other) const { if (other.sampleRate != sampleRate) { - Log::msg(Log::Info, "[Audio] Incompatible configuration: sample rate is %s, expect %s.", other.sampleRate, sampleRate); + Log::msg(Info, "[Audio] Incompatible configuration: sample rate is %s, expect %s.", other.sampleRate, sampleRate); return false; } if (other.numChannels != numChannels) { - Log::msg(Log::Info, "[Audio] Incompatible configuration: channel number is %s, expect %s.", other.numChannels, numChannels); + Log::msg(Info, "[Audio] Incompatible configuration: channel number is %s, expect %s.", other.numChannels, numChannels); return false; } if (other.layout != layout) { - Log::msg(Log::Info, "[Audio] Incompatible configuration: layout is %s, expect %s.", other.layout, layout); + Log::msg(Info, "[Audio] Incompatible configuration: layout is %s, expect %s.", other.layout, layout); return false; } if (other.sampleFormat != sampleFormat) { - Log::msg(Log::Info, "[Audio] Incompatible configuration: sample format is %s, expect %s.", other.sampleFormat, sampleFormat); + Log::msg(Info, "[Audio] Incompatible configuration: sample format is %s, expect %s.", other.sampleFormat, sampleFormat); return false; } if (other.numPlanes != numPlanes) { - Log::msg(Log::Info, "[Audio] Incompatible configuration: plane number is %s, expect %s.", other.numPlanes, numPlanes); + Log::msg(Info, "[Audio] Incompatible configuration: plane number is %s, expect %s.", other.numPlanes, numPlanes); return false; } diff --git a/src/lib_media/decode/jpegturbo_decode.cpp b/src/lib_media/decode/jpegturbo_decode.cpp index f36b730f..832cac0b 100644 --- a/src/lib_media/decode/jpegturbo_decode.cpp +++ b/src/lib_media/decode/jpegturbo_decode.cpp @@ -1,6 +1,5 @@ #include "jpegturbo_decode.hpp" #include "../common/libav.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" extern "C" { #include @@ -63,12 +62,12 @@ void JPEGTurboDecode::process(Data data_) { int w, h, jpegSubsamp; auto jpegBuf = data->data(); if (tjDecompressHeader2(jtHandle->get(), (unsigned char*)jpegBuf, (unsigned long)data->size(), &w, &h, &jpegSubsamp) < 0) { - Log::msg(Log::Warning, "[jpegturbo_decode] error encountered while decompressing header."); + log(Warning, "error encountered while decompressing header."); return; } auto out = DataPicture::create(output, Resolution(w, h), RGB24); if (tjDecompress2(jtHandle->get(), (unsigned char*)jpegBuf, (unsigned long)data->size(), out->data(), w, 0/*pitch*/, h, pixelFmt, TJFLAG_FASTDCT) < 0) { - Log::msg(Log::Warning, "[jpegturbo_decode] error encountered while decompressing frame."); + log(Warning, "error encountered while decompressing frame."); return; } ensureMetadata(w, h, pixelFmt); diff --git a/src/lib_media/decode/libav_decode.cpp b/src/lib_media/decode/libav_decode.cpp index 075a29bd..38641a7a 100644 --- a/src/lib_media/decode/libav_decode.cpp +++ b/src/lib_media/decode/libav_decode.cpp @@ -1,6 +1,5 @@ #include "libav_decode.hpp" #include "../common/pcm.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "lib_ffpp/ffpp.hpp" #include @@ -22,19 +21,19 @@ LibavDecode::LibavDecode(const MetadataPktLibav &metadata) switch (codecCtx->codec_type) { case AVMEDIA_TYPE_VIDEO: break; case AVMEDIA_TYPE_AUDIO: break; - default: throw std::runtime_error(format("[LibavDecode] codec_type %s not supported. Must be audio or video.", codecCtx->codec_type)); + default: throw error(format("codec_type %s not supported. Must be audio or video.", codecCtx->codec_type)); } //find an appropriate decode auto codec = avcodec_find_decoder(codecCtx->codec_id); if (!codec) - throw std::runtime_error(format("[LibavDecode] Decoder not found for codecID(%s).", codecCtx->codec_id)); + throw error(format("Decoder not found for codecID(%s).", codecCtx->codec_id)); //force single threaded as h264 probing seems to miss SPS/PPS and seek fails silently ffpp::Dict dict; dict.set("threads", "1"); if (avcodec_open2(codecCtx, codec, &dict) < 0) { - throw std::runtime_error("[LibavDecode] Couldn't open stream."); + throw error("Couldn't open stream."); } switch (codecCtx->codec_type) { @@ -51,7 +50,7 @@ LibavDecode::LibavDecode(const MetadataPktLibav &metadata) break; } default: - throw std::runtime_error("[LibavDecode] Invalid output type."); + throw error("Invalid output type."); } } @@ -64,7 +63,7 @@ bool LibavDecode::processAudio(const DataAVPacket *data) { AVPacket *pkt = data->getPacket(); int gotFrame; if (avcodec_decode_audio4(codecCtx, avFrame->get(), &gotFrame, pkt) < 0) { - Log::msg(Log::Warning, "[LibavDecode] Error encoutered while decoding audio."); + log(Warning, "Error encoutered while decoding audio."); return false; } if (gotFrame) { @@ -111,7 +110,7 @@ bool LibavDecode::processVideo(const DataAVPacket *data) { AVPacket *pkt = data->getPacket(); int gotPicture; if (avcodec_decode_video2(codecCtx, avFrame->get(), &gotPicture, pkt) < 0) { - Log::msg(Log::Warning, "[LibavDecode] Error encoutered while decoding video."); + log(Warning, "Error encoutered while decoding video."); return false; } if (gotPicture) { diff --git a/src/lib_media/demux/gpac_demux_mp4_full.cpp b/src/lib_media/demux/gpac_demux_mp4_full.cpp index 52a85ea4..43bd702d 100644 --- a/src/lib_media/demux/gpac_demux_mp4_full.cpp +++ b/src/lib_media/demux/gpac_demux_mp4_full.cpp @@ -1,5 +1,4 @@ #include "gpac_demux_mp4_full.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include #include @@ -47,7 +46,7 @@ bool GPACDemuxMP4Full::openData() { GF_ISOFile *movie; GF_Err e = gf_isom_open_progressive(reader->dataUrl.c_str(), 0, 0, &movie, &missingBytes); if ((e != GF_OK && e != GF_ISOM_INCOMPLETE_FILE) || reader->movie) { - Log::msg(Log::Warning, "Error opening fragmented mp4 in progressive mode: %s (missing %s bytes)", + log(Warning, "Error opening fragmented mp4 in progressive mode: %s (missing %s bytes)", gf_error_to_string(e), missingBytes); return false; @@ -74,7 +73,7 @@ bool GPACDemuxMP4Full::processSample() { newSampleCount = reader->movie->getSampleCount(reader->trackNumber); if (newSampleCount > reader->sampleCount) { /* New samples have been added to the file */ - Log::msg(Log::Info, "Found %s new samples (total: %s)", + log(Info, "Found %s new samples (total: %s)", newSampleCount - reader->sampleCount, newSampleCount); if (reader->sampleCount == 0) { @@ -99,7 +98,7 @@ bool GPACDemuxMP4Full::processSample() { reader->samplesProcessed++; /*here we dump some sample info: samp->data, samp->dataLength, samp->isRAP, samp->DTS, samp->CTS_Offset */ - Log::msg(Log::Debug, + log(Debug, "Found sample #%s(#%s) of length %s , RAP: %s, DTS : %s, CTS : %s", reader->sampleIndex, reader->samplesProcessed, @@ -120,7 +119,7 @@ bool GPACDemuxMP4Full::processSample() { u64 newBufferStart = 0; u64 missingBytes; - Log::msg(Log::Debug, "Releasing unnecessary buffers"); + log(Debug, "Releasing unnecessary buffers"); /* release internal structures associated with the samples read so far */ reader->movie->resetTables(true); @@ -146,7 +145,7 @@ bool GPACDemuxMP4Full::processSample() { return true; } catch(gpacpp::Error const& e) { - Log::msg(Log::Warning, "Could not get sample: %s", gf_error_to_string(e.error_)); + log(Warning, "Could not get sample: %s", gf_error_to_string(e.error_)); return false; } } diff --git a/src/lib_media/demux/gpac_demux_mp4_simple.cpp b/src/lib_media/demux/gpac_demux_mp4_simple.cpp index 70e9230e..2c3b747b 100644 --- a/src/lib_media/demux/gpac_demux_mp4_simple.cpp +++ b/src/lib_media/demux/gpac_demux_mp4_simple.cpp @@ -1,5 +1,4 @@ #include "gpac_demux_mp4_simple.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "lib_gpacpp/gpacpp.hpp" @@ -28,7 +27,7 @@ GPACDemuxMP4Simple::GPACDemuxMP4Simple(std::string const& path) u64 missingBytes; GF_Err e = gf_isom_open_progressive(path.c_str(), 0, 0, &movie, &missingBytes); if ((e != GF_OK && e != GF_ISOM_INCOMPLETE_FILE) || movie == nullptr) { - throw std::runtime_error(format("Could not open file %s for reading (%s).", path, gf_error_to_string(e))); + throw error(format("Could not open file %s for reading (%s).", path, gf_error_to_string(e))); } reader->init(movie); output = addOutput(new OutputDefault); @@ -44,7 +43,7 @@ void GPACDemuxMP4Simple::process(Data /*data*/) { std::unique_ptr ISOSample; ISOSample = reader->movie->getSample(reader->trackNumber, reader->sampleIndex, sampleDescriptionIndex); - Log::msg(Log::Debug, "Found sample #%s/%s of length %s, RAP %s, DTS: %s, CTS: %s", + log(Debug, "Found sample #%s/%s of length %s, RAP %s, DTS: %s, CTS: %s", reader->sampleIndex, reader->sampleCount, ISOSample->dataLength, @@ -59,7 +58,7 @@ void GPACDemuxMP4Simple::process(Data /*data*/) { } catch (gpacpp::Error const& err) { if (err.error_ == GF_ISOM_INCOMPLETE_FILE) { u64 missingBytes = reader->movie->getMissingBytes(reader->trackNumber); - Log::msg(Log::Error, "Missing %s bytes on input file", missingBytes); + log(Error, "Missing %s bytes on input file", missingBytes); } else { return; } diff --git a/src/lib_media/demux/libav_demux.cpp b/src/lib_media/demux/libav_demux.cpp index 2e61a4a0..5dbba3c0 100644 --- a/src/lib_media/demux/libav_demux.cpp +++ b/src/lib_media/demux/libav_demux.cpp @@ -1,7 +1,6 @@ #include "libav_demux.hpp" #include "../transform/restamp.hpp" #include "../common/libav.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "lib_ffpp/ffpp.hpp" #include @@ -45,11 +44,11 @@ bool isRaw(AVCodecContext *codecCtx) { namespace Demux { void LibavDemux::webcamList() { - Log::msg(Log::Warning, "[LibavDemux] Webcam list:"); + log(Warning, "Webcam list:"); ffpp::Dict dict; - buildAVDictionary("[LibavDemux]", &dict, "-list_devices true", "format"); + buildAVDictionary(typeid(*this).name(), &dict, "-list_devices true", "format"); avformat_open_input(&m_formatCtx, "video=dummy:audio=dummy", av_find_input_format(webcamFormat()), &dict); - Log::msg(Log::Warning, "\n[LibavDemux] Webcam example: webcam:video=\"Integrated Webcam\":audio=\"Microphone (Realtek High Defini\""); + log(Warning, "Webcam example: webcam:video=\"Integrated Webcam\":audio=\"Microphone (Realtek High Defini\""); } bool LibavDemux::webcamOpen(const std::string &options) { @@ -61,14 +60,14 @@ bool LibavDemux::webcamOpen(const std::string &options) { LibavDemux::LibavDemux(const std::string &url) { if (!(m_formatCtx = avformat_alloc_context())) - throw std::runtime_error("[LibavDemux] Can't allocate format context"); + throw error("Can't allocate format context"); const std::string device = url.substr(0, url.find(":")); if (device == "webcam") { if (url == device || !webcamOpen(url.substr(url.find(":") + 1))) { webcamList(); if (m_formatCtx) avformat_close_input(&m_formatCtx); - throw std::runtime_error("Webcam init failed."); + throw error("Webcam init failed."); } restamp = uptr(new Transform::Restamp(Transform::Restamp::ClockSystem)); /*some webcams timestamps don't start at 0 (based on UTC)*/ } else { @@ -77,13 +76,13 @@ LibavDemux::LibavDemux(const std::string &url) { dict.set("analyzeduration", "100M"); if (avformat_open_input(&m_formatCtx, url.c_str(), nullptr, &dict)) { if (m_formatCtx) avformat_close_input(&m_formatCtx); - throw std::runtime_error(format("[LibavDemux] Error when opening input '%s'", url)); + throw error(format("Error when opening input '%s'", url)); } //if you don't call you may miss the first frames if (avformat_find_stream_info(m_formatCtx, nullptr) < 0) { avformat_close_input(&m_formatCtx); - throw std::runtime_error("[LibavDemux] Couldn't get additional video stream info"); + throw error("Couldn't get additional video stream info"); } restamp = uptr(new Transform::Restamp(Transform::Restamp::Reset)); @@ -132,7 +131,7 @@ void LibavDemux::process(Data data) { if (status < 0) { if (status == (int)AVERROR_EOF || (m_formatCtx->pb && m_formatCtx->pb->eof_reached)) { } else if (m_formatCtx->pb && m_formatCtx->pb->error) { - Log::msg(Log::Warning, "[LibavDemux] Stream contains an irrecoverable error - leaving"); + log(Warning, "Stream contains an irrecoverable error - leaving"); } return; } @@ -142,7 +141,7 @@ void LibavDemux::process(Data data) { outputs[pkt->stream_index]->emit(out); } - Log::msg(Log::Info, "[LibavDemux] Exit from an external event."); + log(Info, "Exit from an external event."); } } diff --git a/src/lib_media/encode/jpegturbo_encode.cpp b/src/lib_media/encode/jpegturbo_encode.cpp index 2b418ddc..b7402810 100644 --- a/src/lib_media/encode/jpegturbo_encode.cpp +++ b/src/lib_media/encode/jpegturbo_encode.cpp @@ -1,5 +1,4 @@ #include "jpegturbo_encode.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" extern "C" { #include @@ -45,7 +44,7 @@ void JPEGTurboEncode::process(Data data_) { auto jpegBuf = data->data(); unsigned long jpegSize; if (tjCompress2(jtHandle->get(), (unsigned char*)jpegBuf, w, 0/*pitch*/, h, TJPF_RGB, &buf, &jpegSize, TJSAMP_420, JPEGQuality, TJFLAG_FASTDCT) < 0) { - Log::msg(Log::Warning, "[jpegturbo_encode] error encountered while compressing."); + log(Warning, "error encountered while compressing."); return; } out->resize(jpegSize); diff --git a/src/lib_media/encode/libav_encode.cpp b/src/lib_media/encode/libav_encode.cpp index bcda18e5..a664ee6b 100644 --- a/src/lib_media/encode/libav_encode.cpp +++ b/src/lib_media/encode/libav_encode.cpp @@ -1,5 +1,4 @@ #include "libav_encode.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "../common/pcm.hpp" #include @@ -30,7 +29,7 @@ void fps2NumDen(const double fps, int &num, int &den) { } else { num = (int)fps; den = 1; - Log::msg(Log::Warning, "[libav_encode] Frame rate '%s' was not recognized. Truncating to '%s'.", fps, num); + Log::msg(Warning, "Frame rate '%s' was not recognized. Truncating to '%s'.", fps, num); } } @@ -58,29 +57,29 @@ LibavEncode::LibavEncode(Type type, const LibavEncodeParams ¶ms) codecName = "acodec"; break; default: - throw std::runtime_error("Unknown encoder type. Failed."); + throw error("Unknown encoder type. Failed."); } /* parse the codec optionsDict */ ffpp::Dict codecDict; - buildAVDictionary("[libav_encode]", &codecDict, codecOptions.c_str(), "codec"); + buildAVDictionary(typeid(*this).name(), &codecDict, codecOptions.c_str(), "codec"); codecDict.set("threads", "auto"); /* parse other optionsDict*/ ffpp::Dict generalDict; - buildAVDictionary("[libav_encode]", &generalDict, generalOptions.c_str(), "other"); + buildAVDictionary(typeid(*this).name(), &generalDict, generalOptions.c_str(), "other"); /* find the encoder */ auto entry = generalDict.get(codecName); if(!entry) - throw std::runtime_error("Could not get codecName."); + throw error("Could not get codecName."); AVCodec *codec = avcodec_find_encoder_by_name(entry->value); if (!codec) - throw std::runtime_error(format("[libav_encode] codec '%s' not found, disable output.", entry->value)); + throw error(format("codec '%s' not found, disable output.", entry->value)); codecCtx = avcodec_alloc_context3(codec); if (!codecCtx) - throw std::runtime_error("[libav_encode] could not allocate the codec context."); + throw error("could not allocate the codec context."); /* parameters */ switch (type) { @@ -110,7 +109,7 @@ LibavEncode::LibavEncode(Type type, const LibavEncodeParams ¶ms) /* user extra params */ std::string extraParams; if (Parse::populateString("LibavOutputWriter", config, "extra_params", extraParams, false) == Parse::PopulateResult_Ok) { - Log::msg(Log::Debug, "[libav_encode] extra_params : " << extraParams.c_str()); + log(Debug, "extra_params : " << extraParams.c_str()); std::vector paramList; Util::split(extraParams.c_str(), ',', ¶mList); auto param = paramList.begin(); @@ -118,9 +117,9 @@ LibavEncode::LibavEncode(Type type, const LibavEncodeParams ¶ms) std::vector paramValue; Util::split(param->c_str(), '=', ¶mValue); if (paramValue.size() != 2) { - Log::msg(Log::Warning, "[libav_encode] extra_params : wrong param (" << paramValue.size() << " value detected, 2 expected) in " << param->c_str()); + log(Warning, "extra_params : wrong param (" << paramValue.size() << " value detected, 2 expected) in " << param->c_str()); } else { - Log::msg(Log::Debug, "[libav_encode] extra_params : detected param " << paramValue[0].c_str() << " with value " << paramValue[1].c_str() << " [" << param->c_str() << "]"); + log(Debug, "extra_params : detected param " << paramValue[0].c_str() << " with value " << paramValue[1].c_str() << " [" << param->c_str() << "]"); av_dict_set(&codecDict, paramValue[0].c_str(), paramValue[1].c_str(), 0); } } @@ -130,7 +129,7 @@ LibavEncode::LibavEncode(Type type, const LibavEncodeParams ¶ms) /* open it */ codecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER; //gives access to the extradata (e.g. H264 SPS/PPS, etc.) if (avcodec_open2(codecCtx, codec, &codecDict) < 0) - throw std::runtime_error("[libav_encode] could not open codec, disable output."); + throw error("could not open codec, disable output."); /* check all optionsDict have been consumed */ auto opt = stringDup(codecOptions.c_str()); @@ -139,7 +138,7 @@ LibavEncode::LibavEncode(Type type, const LibavEncodeParams ¶ms) AVDictionaryEntry *avde = nullptr; avde = codecDict.get(tok, avde); if (avde) { - Log::msg(Log::Warning, "[libav_encode] codec option \"%s\", value \"%s\" was ignored.", avde->key, avde->value); + log(Warning, "codec option \"%s\", value \"%s\" was ignored.", avde->key, avde->value); } tok = strtok(nullptr, "- "); } @@ -197,7 +196,7 @@ bool LibavEncode::processAudio(const DataPcm *data) { int gotPkt = 0; if (avcodec_encode_audio2(codecCtx, pkt, f, &gotPkt)) { - Log::msg(Log::Warning, "[libav_encode] error encountered while encoding audio frame %s.", frameNum); + log(Warning, "error encountered while encoding audio frame %s.", frameNum); return false; } if (gotPkt) { @@ -206,7 +205,7 @@ bool LibavEncode::processAudio(const DataPcm *data) { if (times.tryPop(time)) { out->setTime(time); } else { - Log::msg(Log::Warning, "[libav_encode] error encountered: more output packets than input. Discard", frameNum); + log(Warning, "error encountered: more output packets than input. Discard", frameNum); return false; } assert(pkt->size); @@ -237,7 +236,7 @@ bool LibavEncode::processVideo(const DataPicture *pic) { int gotPkt = 0; if (avcodec_encode_video2(codecCtx, pkt, f ? f->get() : nullptr, &gotPkt)) { - Log::msg(Log::Warning, "[libav_encode] error encountered while encoding video frame %s.", frameNum); + log(Warning, "error encountered while encoding video frame %s.", frameNum); return false; } else { if (gotPkt) { @@ -267,7 +266,7 @@ void LibavEncode::process(Data data) { case AVMEDIA_TYPE_AUDIO: { const auto pcmData = safe_cast(data); if (pcmData->getFormat() != *pcmFormat) - throw std::runtime_error("[LibavEncode] Incompatible audio data"); + throw error("Incompatible audio data"); processAudio(pcmData.get()); break; } diff --git a/src/lib_media/in/file.cpp b/src/lib_media/in/file.cpp index a7944ec4..4b604362 100644 --- a/src/lib_media/in/file.cpp +++ b/src/lib_media/in/file.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "file.hpp" @@ -10,13 +9,13 @@ namespace In { File::File(std::string const& fn) { file = fopen(fn.c_str(), "rb"); if (!file) - throw std::runtime_error(format("Can't open file for reading: %s", fn)); + throw error(format("Can't open file for reading: %s", fn)); fseek(file, 0, SEEK_END); auto size = ftell(file); fseek(file, 0, SEEK_SET); if (size > IOSIZE) - Log::msg(Log::Info, "File %s size is %s, will be sent by %s bytes chunks. Check the downstream modules are able to agregate data frames.", fn, size, IOSIZE); + log(Info, "File %s size is %s, will be sent by %s bytes chunks. Check the downstream modules are able to agregate data frames.", fn, size, IOSIZE); output = addOutput(new OutputDefault); } diff --git a/src/lib_media/in/sound_generator.cpp b/src/lib_media/in/sound_generator.cpp index 52bf90ee..aa48336d 100644 --- a/src/lib_media/in/sound_generator.cpp +++ b/src/lib_media/in/sound_generator.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "sound_generator.hpp" #include diff --git a/src/lib_media/in/video_generator.cpp b/src/lib_media/in/video_generator.cpp index d93c083a..c3efe19f 100644 --- a/src/lib_media/in/video_generator.cpp +++ b/src/lib_media/in/video_generator.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "video_generator.hpp" #include "../common/pcm.hpp" diff --git a/src/lib_media/mux/gpac_mux_m2ts.cpp b/src/lib_media/mux/gpac_mux_m2ts.cpp index d756fa68..2b84c08c 100644 --- a/src/lib_media/mux/gpac_mux_m2ts.cpp +++ b/src/lib_media/mux/gpac_mux_m2ts.cpp @@ -27,7 +27,7 @@ void GPACMuxMPEG2TS::declareStream(Data data) { auto input = addInput(new Input(this)); input->setMetadata(new MetadataPktLibavAudio(metadata2->getAVCodecContext())); } else { - throw std::runtime_error("[GPACMuxMPEG2TS] Stream creation failed: unknown type."); + throw error("Stream creation failed: unknown type."); } } diff --git a/src/lib_media/mux/gpac_mux_mp4.cpp b/src/lib_media/mux/gpac_mux_mp4.cpp index bbde4388..4fe1ed10 100644 --- a/src/lib_media/mux/gpac_mux_mp4.cpp +++ b/src/lib_media/mux/gpac_mux_mp4.cpp @@ -1,5 +1,4 @@ #include "gpac_mux_mp4.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" extern "C" { @@ -25,7 +24,7 @@ static GF_Err avc_import_ffextradata(const u8 *extradata, const u64 extradataSiz AVCState avc; GF_BitStream *bs; if (!extradata || !extradataSize) { - Log::msg(Log::Warning, "No initial SPS/PPS provided."); + Log::msg(Warning, "No initial SPS/PPS provided."); return GF_OK; } bs = gf_bs_new((const char*)extradata, extradataSize, GF_BITSTREAM_READ); @@ -340,13 +339,13 @@ GPACMuxMP4::GPACMuxMP4(const std::string &baseName, uint64_t chunkDurationInMs, : m_useFragments(useSegments), m_useSegments(useSegments), m_chunkDuration(timescaleToClock(chunkDurationInMs, 1000)) { if (m_chunkDuration == 0) { - Log::msg(Log::Debug, "[GPAC Mux] Configuration: single file."); + log(Debug, "Configuration: single file."); assert(!useSegments); } else { if (useSegments) - Log::msg(Log::Debug, "[GPAC Mux] Configuration: segmented."); + log(Debug, "Configuration: segmented."); else - Log::msg(Log::Info, "[GPAC Mux] Configuration: chunks (independent ISOBMF files, not segmented)."); + log(Info, "Configuration: chunks (independent ISOBMF files, not segmented)."); } std::stringstream fileName; @@ -354,16 +353,16 @@ GPACMuxMP4::GPACMuxMP4(const std::string &baseName, uint64_t chunkDurationInMs, fileName << ".mp4"; if (baseName == "") { - throw std::runtime_error("[GPACMuxMP4] Unsupported memory output"); //open in memory - apparently we have to use the gmem:// protocol + throw error("Unsupported memory output"); //open in memory - apparently we have to use the gmem:// protocol } else { m_iso = gf_isom_open(fileName.str().c_str(), GF_ISOM_OPEN_WRITE, nullptr); if (!m_iso) - throw std::runtime_error(format("[GPACMuxMP4] Cannot open iso file %s")); + throw error(format("Cannot open iso file %s")); } GF_Err e = gf_isom_set_storage_mode(m_iso, GF_ISOM_STORE_INTERLEAVED); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot make iso file %s interleaved", fileName.str())); + throw error(format("Cannot make iso file %s interleaved", fileName.str())); output = addOutput(new OutputDataDefault); } @@ -378,12 +377,12 @@ void GPACMuxMP4::closeSegment(bool isLastSeg) { GF_Err e = gf_isom_close_segment(m_iso, 0, 0, 0, 0, 0, GF_FALSE, (Bool)isLastSeg, GF_4CC('e', 'o', 'd', 's'), nullptr, nullptr); #endif if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] %s: gf_isom_close_segment", gf_error_to_string(e))); + throw error(format("%s: gf_isom_close_segment", gf_error_to_string(e))); m_lastChunkSize = gf_isom_get_file_size(m_iso); sendOutput(); - Log::msg(Log::Info, "[GPACMuxMP4] Segment %s completed (size %s) (startsWithSAP=%s).", m_chunkName, m_lastChunkSize, m_chunkStartsWithRAP); + log(Info, "Segment %s completed (size %s) (startsWithSAP=%s)", m_chunkName, m_lastChunkSize, m_chunkStartsWithRAP); } } @@ -398,7 +397,7 @@ GPACMuxMP4::~GPACMuxMP4() { GF_Err e; e = gf_isom_close(m_iso); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] %s: gf_isom_close", gf_error_to_string(e))); + throw error(format("%s: gf_isom_close", gf_error_to_string(e))); } void GPACMuxMP4::setupFragments() { @@ -407,7 +406,7 @@ void GPACMuxMP4::setupFragments() { if (m_useFragments) { e = gf_isom_setup_track_fragment(m_iso, m_trackId, 1, 1, 0, 0, 0, 0); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot setup track as fragmented: %s", gf_error_to_string(e))); + throw error(format("Cannot setup track as fragmented: %s", gf_error_to_string(e))); } //gf_isom_add_track_to_root_od(video_output_file->isof, 1); @@ -416,32 +415,32 @@ void GPACMuxMP4::setupFragments() { if (m_useSegments) { e = gf_isom_finalize_for_fragment(m_iso, 1); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot prepare track for movie fragmentation: %s", gf_error_to_string(e))); + throw error(format("Cannot prepare track for movie fragmentation: %s", gf_error_to_string(e))); std::stringstream ss; ss << gf_isom_get_filename(m_iso) << "_" << m_chunkNum+1; m_chunkName = ss.str(); e = gf_isom_start_segment(m_iso, (char*)m_chunkName.c_str(), GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to start segment %s (%s): %s", m_chunkNum, m_chunkName, gf_error_to_string(e))); + throw error(format("Impossible to start segment %s (%s): %s", m_chunkNum, m_chunkName, gf_error_to_string(e))); } else { e = gf_isom_finalize_for_fragment(m_iso, 0); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot prepare track for movie fragmentation: %s", gf_error_to_string(e))); + throw error(format("Cannot prepare track for movie fragmentation: %s", gf_error_to_string(e))); } e = gf_isom_start_fragment(m_iso, GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to create the moof: %s", gf_error_to_string(e))); + throw error(format("Impossible to create the moof: %s", gf_error_to_string(e))); e = gf_isom_set_traf_base_media_decode_time(m_iso, m_trackId, m_DTS); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to create TFDT %s: %s", gf_net_get_ntp_ts(), gf_error_to_string(e))); + throw error(format("Impossible to create TFDT %s: %s", gf_net_get_ntp_ts(), gf_error_to_string(e))); #ifndef CHROME_DASHJS_2_0_COMPAT e = gf_isom_set_fragment_reference_time(m_iso, m_trackId, gf_net_get_ntp_ts(), 0); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to create UTC marquer: %s", gf_error_to_string(e))); + throw error(format("Impossible to create UTC marquer: %s", gf_error_to_string(e))); #endif } } @@ -453,7 +452,7 @@ void GPACMuxMP4::declareStreamAudio(std::shared_ptr GF_ESD *esd = gf_odf_desc_esd_new(2); if (!esd) - throw std::runtime_error("[GPACMuxMP4] Cannot create GF_ESD"); + throw error(format("Cannot create GF_ESD")); esd->decoderConfig = (GF_DecoderConfig *)gf_odf_desc_new(GF_ODF_DCD_TAG); esd->slConfig = (GF_SLConfig *)gf_odf_desc_new(GF_ODF_SLC_TAG); @@ -484,7 +483,7 @@ void GPACMuxMP4::declareStreamAudio(std::shared_ptr assert(e == GF_OK);*/ } else { if (metadata->getCodecName() != "mp2") { - Log::msg(Log::Warning, "Unlisted codec, setting GPAC_OTI_AUDIO_MPEG1 descriptor."); + log(Warning, "Unlisted codec, setting GPAC_OTI_AUDIO_MPEG1 descriptor."); } esd->decoderConfig->objectTypeIndication = GPAC_OTI_AUDIO_MPEG1; esd->decoderConfig->bufferSizeDB = 20; @@ -504,30 +503,30 @@ void GPACMuxMP4::declareStreamAudio(std::shared_ptr } trackNum = gf_isom_new_track(m_iso, esd->ESID, GF_ISOM_MEDIA_AUDIO, metadata->getSampleRate()); - Log::msg(Log::Warning, "TimeScale: %s", metadata->getSampleRate()); + log(Warning, "TimeScale: %s", metadata->getSampleRate()); if (!trackNum) - throw std::runtime_error("[GPACMuxMP4] Cannot create new track"); + throw error(format("Cannot create new track")); m_trackId = gf_isom_get_track_id(m_iso, trackNum); e = gf_isom_set_track_enabled(m_iso, trackNum, GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] gf_isom_set_track_enabled: %s", gf_error_to_string(e))); + throw error(format("gf_isom_set_track_enabled: %s", gf_error_to_string(e))); e = gf_isom_new_mpeg4_description(m_iso, trackNum, esd, nullptr, nullptr, &di); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] gf_isom_new_mpeg4_description: %s", gf_error_to_string(e))); + throw error(format("gf_isom_new_mpeg4_description: %s", gf_error_to_string(e))); gf_odf_desc_del((GF_Descriptor *)esd); esd = nullptr; e = gf_isom_set_audio_info(m_iso, trackNum, di, metadata->getSampleRate(), metadata->getNumChannels(), metadata->getBitsPerSample()); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] gf_isom_set_audio_info: %s", gf_error_to_string(e))); + throw error(format("gf_isom_set_audio_info: %s", gf_error_to_string(e))); e = gf_isom_set_pl_indication(m_iso, GF_ISOM_PL_AUDIO, acfg.audioPL); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Container format import failed: %s", gf_error_to_string(e))); + throw error(format("Container format import failed: %s", gf_error_to_string(e))); sampleRate = metadata->getSampleRate(); @@ -540,12 +539,12 @@ void GPACMuxMP4::declareStreamAudio(std::shared_ptr void GPACMuxMP4::declareStreamVideo(std::shared_ptr metadata) { u32 trackNum = gf_isom_new_track(m_iso, 0, GF_ISOM_MEDIA_VISUAL, metadata->getTimeScale() * TIMESCALE_MUL); if (!trackNum) - throw std::runtime_error("[GPACMuxMP4] Cannot create new track"); + throw error(format("Cannot create new track")); m_trackId = gf_isom_get_track_id(m_iso, trackNum); GF_Err e = gf_isom_set_track_enabled(m_iso, trackNum, GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot enable track: %s", gf_error_to_string(e))); + throw error(format("Cannot enable track: %s", gf_error_to_string(e))); const uint8_t *extradata; size_t extradataSize; @@ -555,29 +554,29 @@ void GPACMuxMP4::declareStreamVideo(std::shared_ptr if (metadata->getAVCodecContext()->codec_id == AV_CODEC_ID_H264) { GF_AVCConfig *avccfg = gf_odf_avc_cfg_new(); if (!avccfg) - throw std::runtime_error("[GPACMuxMP4] Container format import failed (AVC)"); + throw error(format("Container format import failed (AVC)")); e = avc_import_ffextradata(extradata, extradataSize, avccfg); if (e == GF_OK) { e = gf_isom_avc_config_new(m_iso, trackNum, avccfg, nullptr, nullptr, &di); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot create AVC config: %s", gf_error_to_string(e))); + throw error(format("Cannot create AVC config: %s", gf_error_to_string(e))); gf_odf_avc_cfg_del(avccfg); } } else if (metadata->getAVCodecContext()->codec_id == AV_CODEC_ID_H265) { GF_HEVCConfig *hevccfg = gf_odf_hevc_cfg_new(); if (!hevccfg) - throw std::runtime_error("[GPACMuxMP4] Container format import failed (HEVC)"); + throw error(format("Container format import failed (HEVC)")); e = hevc_import_ffextradata(extradata, extradataSize, hevccfg); if (e == GF_OK) { e = gf_isom_hevc_config_new(m_iso, trackNum, hevccfg, nullptr, nullptr, &di); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot create AVC config: %s", gf_error_to_string(e))); + throw error(format("Cannot create AVC config: %s", gf_error_to_string(e))); gf_odf_hevc_cfg_del(hevccfg); } } else { - throw std::runtime_error("[GPACMuxMP4] Unknown codec"); + throw error(format("Unknown codec")); } if (e) { if (e == GF_NON_COMPLIANT_BITSTREAM) { @@ -594,11 +593,11 @@ void GPACMuxMP4::declareStreamVideo(std::shared_ptr e = gf_isom_new_mpeg4_description(m_iso, trackNum, esd, nullptr, nullptr, &di); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot create MPEG-4 config: %s", gf_error_to_string(e))); + throw error(format("Cannot create MPEG-4 config: %s", gf_error_to_string(e))); gf_odf_desc_del((GF_Descriptor*)esd); isAnnexB = false; } else { - throw std::runtime_error("[GPACMuxMP4] Container format import failed"); + throw error(format("Container format import failed")); } } @@ -613,7 +612,7 @@ void GPACMuxMP4::declareStreamVideo(std::shared_ptr if (m_useSegments) { e = gf_isom_avc_set_inband_config(m_iso, trackNum, di); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Cannot set inband PPS/SPS for AVC track: %s", gf_error_to_string(e))); + throw error(format("Cannot set inband PPS/SPS for AVC track: %s", gf_error_to_string(e))); } #endif @@ -630,7 +629,7 @@ void GPACMuxMP4::declareStream(Data data) { } else if (auto audio = std::dynamic_pointer_cast(metadata)) { declareStreamAudio(audio); } else { - throw std::runtime_error("[GPACMuxMP4] Stream creation failed: unknown type."); + throw error(format("Stream creation failed: unknown type.")); } } @@ -640,7 +639,7 @@ void GPACMuxMP4::sendOutput() { switch (gf_isom_get_media_type(m_iso, gf_isom_get_track_by_id(m_iso, m_trackId))) { case GF_ISOM_MEDIA_VISUAL: streamType = VIDEO_PKT; mimeType = "video/mp4"; break; case GF_ISOM_MEDIA_AUDIO: streamType = AUDIO_PKT; mimeType = "audio/mp4"; break; - default: throw std::runtime_error("[GPACMuxMP4] Segment contains neither audio nor video"); + default: throw error(format("Segment contains neither audio nor video")); } Bool isInband = #ifdef AVC_INBAND_CONFIG @@ -650,7 +649,7 @@ void GPACMuxMP4::sendOutput() { #endif char codecName[256]; //FIXME: security issue on the GPAC API GF_Err e = gf_media_get_rfc_6381_codec_name(m_iso, gf_isom_get_track_by_id(m_iso, m_trackId), codecName, isInband, GF_FALSE); - if (e) throw std::runtime_error("[GPACMuxMP4] Could not compute codec name (RFC 6381)"); + if (e) throw error(format("Could not compute codec name (RFC 6381)")); auto out = output->getBuffer(0); auto metadata = std::make_shared(m_chunkName, streamType, mimeType, gf_strdup(codecName), m_curFragDur, m_lastChunkSize, m_chunkStartsWithRAP); @@ -659,7 +658,7 @@ void GPACMuxMP4::sendOutput() { switch (gf_isom_get_media_type(m_iso, gf_isom_get_track_by_id(m_iso, m_trackId))) { case GF_ISOM_MEDIA_VISUAL: metadata->resolution[0] = resolution[0]; metadata->resolution[1] = resolution[1]; break; case GF_ISOM_MEDIA_AUDIO: metadata->sampleRate = sampleRate; break; - default: throw std::runtime_error("[GPACMuxMP4] Segment contains neither audio nor video"); + default: throw error(format("Segment contains neither audio nor video")); } out->setTime(m_DTS, mediaTimescale); output->emit(out); @@ -678,28 +677,28 @@ void GPACMuxMP4::addSample(gpacpp::IsoSample &sample, const uint64_t dataDuratio if (m_useSegments) { m_chunkNum++; m_chunkStartsWithRAP = sample.IsRAP == RAP; - Log::msg(Log::Warning, "%s", m_chunkStartsWithRAP); + log(Warning, "%s", m_chunkStartsWithRAP); std::stringstream ss; ss << gf_isom_get_filename(m_iso) << "_" << m_chunkNum+1; m_chunkName = ss.str(); e = gf_isom_start_segment(m_iso, (char*)m_chunkName.c_str(), GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to start the segment %s (%s): %s", m_chunkNum, m_chunkName, gf_error_to_string(e))); + throw error(format("Impossible to start the segment %s (%s): %s", m_chunkNum, m_chunkName, gf_error_to_string(e))); } e = gf_isom_start_fragment(m_iso, GF_TRUE); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to start the fragment: %s", gf_error_to_string(e))); + throw error(format("Impossible to start the fragment: %s", gf_error_to_string(e))); e = gf_isom_set_traf_base_media_decode_time(m_iso, m_trackId, sample.DTS); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to create TFDT %s: %s", gf_net_get_ntp_ts(), gf_error_to_string(e))); + throw error(format("Impossible to create TFDT %s: %s", gf_net_get_ntp_ts(), gf_error_to_string(e))); #ifndef CHROME_DASHJS_2_0_COMPAT e = gf_isom_set_fragment_reference_time(m_iso, m_trackId, gf_net_get_ntp_ts(), sample.DTS + sample.CTS_Offset); if (e != GF_OK) - throw std::runtime_error(format("[GPACMuxMP4] Impossible to set the UTC marquer %s: %s", m_chunkNum, gf_error_to_string(e))); + throw error(format("Impossible to set the UTC marquer %s: %s", m_chunkNum, gf_error_to_string(e))); #endif const u64 oneFragDurInTimescale = clockToTimescale(m_chunkDuration, mediaTimescale); @@ -708,13 +707,13 @@ void GPACMuxMP4::addSample(gpacpp::IsoSample &sample, const uint64_t dataDuratio e = gf_isom_fragment_add_sample(m_iso, m_trackId, &sample, 1, (u32)dataDurationInTs, 0, 0, GF_FALSE); if (e != GF_OK) { - Log::msg(Log::Error, "%s: gf_isom_fragment_add_sample", gf_error_to_string(e)); + log(Error, "%s: gf_isom_fragment_add_sample", gf_error_to_string(e)); return; } } else { GF_Err e = gf_isom_add_sample(m_iso, m_trackId, 1, &sample); if (e != GF_OK) { - Log::msg(Log::Error, "%s: gf_isom_add_sample", gf_error_to_string(e)); + log(Error, "%s: gf_isom_add_sample", gf_error_to_string(e)); return; } } @@ -747,7 +746,7 @@ void GPACMuxMP4::process() { sample.dataLength = bufLen; sample.setDataOwnership(false); } else { - Log::msg(Log::Warning, "[GPACMuxMP4] only audio or video supported yet"); + log(Warning, "only audio or video supported yet"); return; } @@ -766,7 +765,7 @@ void GPACMuxMP4::process() { if (dataDurationInTs <= 0) { dataDurationInTs = 1; } - Log::msg(Log::Debug, "[GPACMuxMP4] VFR: adding sample with duration %ss", dataDurationInTs / (double)mediaTimescale); + log(Debug, "VFR: adding sample with duration %ss", dataDurationInTs / (double)mediaTimescale); } #else /*wait to have two samples - FIXME: should be in a separate class + mast segment is never processed (should be in flush())*/ diff --git a/src/lib_media/mux/libav_mux.cpp b/src/lib_media/mux/libav_mux.cpp index c3e7b3ee..6d3f6d49 100644 --- a/src/lib_media/mux/libav_mux.cpp +++ b/src/lib_media/mux/libav_mux.cpp @@ -1,5 +1,4 @@ #include "libav_mux.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "../common/libav.hpp" #include @@ -26,20 +25,20 @@ LibavMux::LibavMux(const std::string &baseName) /* parse the format optionsDict */ std::string optionsStr = "-format mp4"; AVDictionary *optionsDict = nullptr; - buildAVDictionary("[libav_mux]", &optionsDict, optionsStr.c_str(), "format"); + buildAVDictionary(typeid(*this).name(), &optionsDict, optionsStr.c_str(), "format"); /* setup container */ AVOutputFormat *of = av_guess_format(av_dict_get(optionsDict, "format", nullptr, 0)->value, nullptr, nullptr); if (!of) { av_dict_free(&optionsDict); - throw std::runtime_error("[libav_mux] couldn't guess container from file extension"); + throw error("couldn't guess container from file extension"); } av_dict_free(&optionsDict); /* output format context */ m_formatCtx = avformat_alloc_context(); if (!m_formatCtx) - throw std::runtime_error("[libav_mux] format context couldn't be allocated."); + throw error("format context couldn't be allocated."); m_formatCtx->oformat = of; std::stringstream fileName; @@ -53,7 +52,7 @@ LibavMux::LibavMux(const std::string &baseName) if (!(m_formatCtx->flags & AVFMT_NOFILE)) { if (avio_open(&m_formatCtx->pb, fileName.str().c_str(), AVIO_FLAG_READ_WRITE) < 0) { avformat_free_context(m_formatCtx); - throw std::runtime_error(format("[libav_mux] could not open %s, disable output.", baseName)); + throw error(format("could not open %s, disable output.", baseName)); } strncpy(m_formatCtx->filename, fileName.str().c_str(), sizeof(m_formatCtx->filename)); } @@ -76,7 +75,7 @@ void LibavMux::declareStream(Data data) { if (auto metadata = std::dynamic_pointer_cast(metadata_)) { AVStream *avStream = avformat_new_stream(m_formatCtx, metadata->getAVCodecContext()->codec); if (!avStream) - throw std::runtime_error("[LibavMux] Stream creation failed (1)."); + throw error("Stream creation failed (1)."); m_formatCtx->streams[0]->codec->time_base = metadata->getAVCodecContext()->time_base; //FIXME: [0]: not a mux yet... m_formatCtx->streams[0]->codec->width = metadata->getAVCodecContext()->width; @@ -89,23 +88,23 @@ void LibavMux::declareStream(Data data) { } else if (auto metadata2 = std::dynamic_pointer_cast(metadata_)) { AVStream *avStream = avformat_new_stream(m_formatCtx, metadata2->getAVCodecContext()->codec); if (!avStream) - throw std::runtime_error("[LibavMux] Stream creation failed (2)."); + throw error("Stream creation failed (2)."); m_formatCtx->streams[0]->codec->sample_rate = metadata2->getAVCodecContext()->sample_rate; auto input = addInput(new Input(this)); input->setMetadata(new MetadataPktLibavAudio(metadata2->getAVCodecContext())); } else { - throw std::runtime_error("[LibavMux] Stream creation failed: unknown type."); + throw error("Stream creation failed: unknown type."); } } void LibavMux::ensureHeader() { if (!m_headerWritten) { if (avformat_write_header(m_formatCtx, nullptr) != 0) { - Log::msg(Log::Warning, "[libav_mux] fatal error: can't write the container header"); + log(Warning, "fatal error: can't write the container header"); for (unsigned i = 0; i < m_formatCtx->nb_streams; i++) { if (m_formatCtx->streams[i]->codec && m_formatCtx->streams[i]->codec->codec) { - Log::msg(Log::Debug, "[libav_mux] codec[%s] is \"%s\" (%s)", i, m_formatCtx->streams[i]->codec->codec->name, m_formatCtx->streams[i]->codec->codec->long_name); + log(Debug, "codec[%s] is \"%s\" (%s)", i, m_formatCtx->streams[i]->codec->codec->name, m_formatCtx->streams[i]->codec->codec->long_name); } } } else { @@ -134,7 +133,7 @@ void LibavMux::process() { /* write the compressed frame to the container output file */ pkt->stream_index = avStream->index; if (av_interleaved_write_frame(m_formatCtx, pkt) != 0) { - Log::msg(Log::Warning, "[libav_mux] can't write video frame."); + log(Warning, "can't write video frame."); return; } } diff --git a/src/lib_media/out/file.cpp b/src/lib_media/out/file.cpp index 86bd3471..44b0b91e 100644 --- a/src/lib_media/out/file.cpp +++ b/src/lib_media/out/file.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "file.hpp" @@ -8,7 +7,7 @@ namespace Out { File::File(std::string const& path) { file = fopen(path.c_str(), "wb"); if (!file) - throw std::runtime_error(format("Can't open file for writing: %s", path)); + throw error(format("Can't open file for writing: %s", path)); addInput(new Input(this)); } diff --git a/src/lib_media/out/null.cpp b/src/lib_media/out/null.cpp index f6affe8b..acb12dd0 100644 --- a/src/lib_media/out/null.cpp +++ b/src/lib_media/out/null.cpp @@ -1,5 +1,4 @@ #include "null.hpp" -#include "lib_utils/log.hpp" namespace Modules { diff --git a/src/lib_media/out/print.cpp b/src/lib_media/out/print.cpp index 69cfdfe5..566d1935 100644 --- a/src/lib_media/out/print.cpp +++ b/src/lib_media/out/print.cpp @@ -1,5 +1,4 @@ #include "print.hpp" -#include "lib_utils/log.hpp" namespace Modules { diff --git a/src/lib_media/render/sdl_audio.cpp b/src/lib_media/render/sdl_audio.cpp index cc832062..4842291e 100644 --- a/src/lib_media/render/sdl_audio.cpp +++ b/src/lib_media/render/sdl_audio.cpp @@ -1,6 +1,5 @@ #include "sdl_audio.hpp" #include "render_common.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "lib_modules/utils/stranded_pool_executor.hpp" #include "SDL2/SDL.h" @@ -31,7 +30,7 @@ namespace Render { bool SDLAudio::reconfigure(PcmFormat const * const pcmData) { if (pcmData->numPlanes > 1) { - Log::msg(Log::Warning, "[SDLAudio render] Support for planar audio is buggy. Please set an audio converter."); + log(Warning, "Support for planar audio is buggy. Please set an audio converter."); return false; } @@ -44,12 +43,12 @@ bool SDLAudio::reconfigure(PcmFormat const * const pcmData) { SDL_CloseAudio(); if (SDL_OpenAudio(&audioSpec, &realSpec) < 0) { - Log::msg(Log::Warning, "[SDLAudio render] Couldn't open audio: %s", SDL_GetError()); + log(Warning, "Couldn't open audio: %s", SDL_GetError()); return false; } m_Latency = timescaleToClock((uint64_t)realSpec.samples, realSpec.freq); - Log::msg(Log::Info, "[SDLAudio render] %s Hz %s ms", realSpec.freq, m_Latency * 1000.0f / IClock::Rate); + log(Info, "%s Hz %s ms", realSpec.freq, m_Latency * 1000.0f / IClock::Rate); pcmFormat = uptr(new PcmFormat(*pcmData)); @@ -62,7 +61,7 @@ SDLAudio::SDLAudio(IClock* clock) : m_clock(clock), pcmFormat(new PcmFormat(44100, AudioLayout::Stereo, AudioSampleFormat::S16, AudioStruct::Interleaved)), m_converter(new Transform::AudioConvert(*pcmFormat)), m_FifoTime(0) { if (!reconfigure(pcmFormat.get())) - throw std::runtime_error("Audio output creation failed"); + throw error("Audio output creation failed"); auto input = addInput(new Input(this)); input->setMetadata(new MetadataRawAudio); @@ -113,13 +112,13 @@ void SDLAudio::fillAudio(uint8_t *stream, int len) { if (relativeSamplePosition < -audioJitterTolerance) { auto const numSamplesToDrop = std::min(fifoSamplesToRead(), -relativeSamplePosition); - Log::msg(Log::Warning, "[SDLAudio render] must drop fifo data (%s ms)", numSamplesToDrop * 1000.0f / pcmFormat->sampleRate); + log(Warning, "must drop fifo data (%s ms)", numSamplesToDrop * 1000.0f / pcmFormat->sampleRate); fifoConsumeSamples((size_t)numSamplesToDrop); } if (relativeSamplePosition > audioJitterTolerance) { auto const numSilenceSamples = std::min(numSamplesToProduce, relativeSamplePosition); - Log::msg(Log::Warning, "[SDLAudio render] insert silence (%s ms)", numSilenceSamples * 1000.0f / pcmFormat->sampleRate); + log(Warning, "insert silence (%s ms)", numSilenceSamples * 1000.0f / pcmFormat->sampleRate); silenceSamples(stream, (size_t)numSilenceSamples); numSamplesToProduce -= numSilenceSamples; } @@ -132,7 +131,7 @@ void SDLAudio::fillAudio(uint8_t *stream, int len) { } if (numSamplesToProduce > 0) { - Log::msg(Log::Warning, "[SDLAudio render] underflow"); + log(Warning, "underflow"); silenceSamples(stream, (size_t)numSamplesToProduce); } } diff --git a/src/lib_media/render/sdl_common.cpp b/src/lib_media/render/sdl_common.cpp index 350f0a7e..32ea08a2 100644 --- a/src/lib_media/render/sdl_common.cpp +++ b/src/lib_media/render/sdl_common.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "SDL2/SDL.h" #ifdef __linux__ @@ -13,10 +12,10 @@ class SdlInit { sigaction(SIGINT, nullptr, &action); #endif if (SDL_InitSubSystem(SDL_INIT_AUDIO | SDL_INIT_NOPARACHUTE) == -1) - throw std::runtime_error(format("[SDLAudio render] Couldn't initialize: %s", SDL_GetError())); + throw std::runtime_error(format("Couldn't initialize: %s", SDL_GetError())); if (SDL_InitSubSystem(SDL_INIT_VIDEO | SDL_INIT_NOPARACHUTE) == -1) - throw std::runtime_error(format("[SDLVideo render] Couldn't initialize: %s", SDL_GetError())); + throw std::runtime_error(format("Couldn't initialize: %s", SDL_GetError())); #ifdef __linux__ sigaction(SIGINT, &action, nullptr); diff --git a/src/lib_media/render/sdl_video.cpp b/src/lib_media/render/sdl_video.cpp index 90c2de75..913018c1 100644 --- a/src/lib_media/render/sdl_video.cpp +++ b/src/lib_media/render/sdl_video.cpp @@ -1,5 +1,4 @@ #include "sdl_video.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "SDL2/SDL.h" #include "render_common.hpp" @@ -13,7 +12,7 @@ Uint32 pixelFormat2SDLFormat(const Modules::PixelFormat format) { case YUV420P: return SDL_PIXELFORMAT_IYUV; case YUYV422: return SDL_PIXELFORMAT_YUY2; case RGB24: return SDL_PIXELFORMAT_RGB24; - default: throw std::runtime_error("[SDLVideo] Pixel format not supported."); + default: throw std::runtime_error("Pixel format not supported."); } } } @@ -30,12 +29,12 @@ void SDLVideo::doRender() { pictureFormat.format = YUV420P; window = SDL_CreateWindow("Signals SDLVideo renderer", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, pictureFormat.res.width, pictureFormat.res.height, SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE); if (!window) - throw std::runtime_error(format("[SDLVideo render] Couldn't set create window: %s", SDL_GetError())); + throw error(format("Couldn't set create window: %s", SDL_GetError())); renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC); if (!renderer) { SDL_DestroyWindow(window); - throw std::runtime_error(format("[SDLVideo render] Couldn't set create renderer: %s", SDL_GetError())); + throw error(format("Couldn't set create renderer: %s", SDL_GetError())); } m_dataQueue.push(nullptr); //unlock the constructor @@ -104,14 +103,14 @@ bool SDLVideo::processOneFrame(Data data) { } void SDLVideo::createTexture() { - Log::msg(Log::Info, format("[SDLVideo render] %sx%s", pictureFormat.res.width, pictureFormat.res.height)); + log(Info, format("%sx%s", pictureFormat.res.width, pictureFormat.res.height)); if (texture) SDL_DestroyTexture(texture); texture = SDL_CreateTexture(renderer, pixelFormat2SDLFormat(pictureFormat.format), SDL_TEXTUREACCESS_STATIC, pictureFormat.res.width, pictureFormat.res.height); if (!texture) - throw std::runtime_error(format("[SDLVideo render] Couldn't set create texture: %s", SDL_GetError())); + throw error(format("Couldn't set create texture: %s", SDL_GetError())); displayrect->x = 0; displayrect->y = 0; diff --git a/src/lib_media/stream/apple_hls.cpp b/src/lib_media/stream/apple_hls.cpp index ed3d7c87..101d2dba 100644 --- a/src/lib_media/stream/apple_hls.cpp +++ b/src/lib_media/stream/apple_hls.cpp @@ -1,6 +1,5 @@ #include "apple_hls.hpp" #include "lib_modules/core/clock.hpp" -#include "lib_utils/log.hpp" #include "../common/libav.hpp" @@ -50,7 +49,7 @@ void Apple_HLS::HLSThread() { if (type == Live) { auto dur = std::chrono::milliseconds(nextInMs); - Log::msg(Log::Info, "[Apple_HLS] Going to sleep for %s ms.", std::chrono::duration_cast(dur).count()); + log(Info, "Going to sleep for %s ms.", std::chrono::duration_cast(dur).count()); std::this_thread::sleep_for(dur); } } diff --git a/src/lib_media/stream/mpeg_dash.cpp b/src/lib_media/stream/mpeg_dash.cpp index f6451978..a0e777ee 100644 --- a/src/lib_media/stream/mpeg_dash.cpp +++ b/src/lib_media/stream/mpeg_dash.cpp @@ -1,6 +1,5 @@ #include "mpeg_dash.hpp" #include "lib_modules/core/clock.hpp" -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "../out/file.hpp" #include "../common/libav.hpp" @@ -55,7 +54,7 @@ MPEG_DASH::~MPEG_DASH() { //needed because of the use of system time for live - otherwise awake on data as for any multi-input module //TODO: add clock to the scheduler, see #14 void MPEG_DASH::DASHThread() { - Log::msg(Log::Info, "[MPEG_DASH] start processing at UTC: %s.", gf_net_get_utc()); + log(Info, "start processing at UTC: %s.", gf_net_get_utc()); Data data; for (;;) { @@ -68,7 +67,7 @@ void MPEG_DASH::DASHThread() { } else { qualities[i].meta = safe_cast(data->getMetadata()); if (!qualities[i].meta) - throw std::runtime_error(format("[MPEG_DASH] Unknown data received on input %s", i).c_str()); + throw error(format("Unknown data received on input %s", i).c_str()); auto const numSeg = totalDurationInMs / segDurationInMs; qualities[i].bitrate_in_bps = (qualities[i].meta->getSize() * 8 + qualities[i].bitrate_in_bps * numSeg) / (numSeg + 1); } @@ -79,13 +78,13 @@ void MPEG_DASH::DASHThread() { generateMPD(); if (type == Live) { if (!mpd->write(mpdPath)) - Log::msg(Log::Warning, "[MPEG_DASH] Can't write MPD at %s (1). Check you have sufficient rights.", mpdPath); + log(Warning, "Can't write MPD at %s (1). Check you have sufficient rights.", mpdPath); } - Log::msg(Log::Info, "[MPEG_DASH] Processes segment (total processed: %ss, UTC: %s (deltaAST=%s).", (double)totalDurationInMs / 1000, gf_net_get_utc(), gf_net_get_utc() - mpd->mpd->availabilityStartTime); + log(Info, "Processes segment (total processed: %ss, UTC: %s (deltaAST=%s).", (double)totalDurationInMs / 1000, gf_net_get_utc(), gf_net_get_utc() - mpd->mpd->availabilityStartTime); if (type == Live) { auto dur = std::chrono::milliseconds(mpd->mpd->availabilityStartTime + totalDurationInMs - gf_net_get_utc()); - Log::msg(Log::Info, "[MPEG_DASH] Going to sleep for %s ms.", std::chrono::duration_cast(dur).count()); + log(Info, "Going to sleep for %s ms.", std::chrono::duration_cast(dur).count()); std::this_thread::sleep_for(dur); } } @@ -96,7 +95,7 @@ void MPEG_DASH::DASHThread() { mpd->mpd->media_presentation_duration = totalDurationInMs; generateMPD(); if (!mpd->write(mpdPath)) - Log::msg(Log::Warning, "[MPEG_DASH] Can't write MPD at %s (2). Check you have sufficient rights.", mpdPath); + log(Warning, "Can't write MPD at %s (2). Check you have sufficient rights.", mpdPath); } void MPEG_DASH::process() { diff --git a/src/lib_media/transform/audio_convert.cpp b/src/lib_media/transform/audio_convert.cpp index 932b120c..e6bf7dd4 100644 --- a/src/lib_media/transform/audio_convert.cpp +++ b/src/lib_media/transform/audio_convert.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "audio_convert.hpp" #include "lib_ffpp/ffpp.hpp" @@ -63,10 +62,10 @@ void AudioConvert::process(Data data) { if (audioData) { if (audioData->getFormat() != srcPcmFormat) { if (autoConfigure) { - Log::msg(Log::Info, "[AudioConvert] Incompatible input audio data. Reconfiguring."); + log(Info, "Incompatible input audio data. Reconfiguring."); reconfigure(audioData->getFormat()); } else { - throw std::runtime_error("[AudioConvert] Incompatible input audio data."); + throw error("Incompatible input audio data."); } accumulatedTimeInDstSR = clockToTimescale(data->getTime(), srcPcmFormat.sampleRate); } diff --git a/src/lib_media/transform/restamp.cpp b/src/lib_media/transform/restamp.cpp index 559dfbc6..dcb0641a 100644 --- a/src/lib_media/transform/restamp.cpp +++ b/src/lib_media/transform/restamp.cpp @@ -33,10 +33,10 @@ namespace Modules { } break; default: - throw std::runtime_error("[Restamp] Unknown mode"); + throw error("Unknown mode"); } - Log::msg(Log::Debug, "[Restamp] %s -> %sms", (double)data->getTime() / IClock::Rate, (double)(time + offset) / IClock::Rate); + log(Debug, "%s -> %sms", (double)data->getTime() / IClock::Rate, (double)(time + offset) / IClock::Rate); const_cast(data.get())->setTime(time + offset); //FIXME: we should have input&output on the same allocator getOutput(0)->emit(data); } diff --git a/src/lib_media/transform/video_convert.cpp b/src/lib_media/transform/video_convert.cpp index 3dac8c3c..6bbebfd8 100644 --- a/src/lib_media/transform/video_convert.cpp +++ b/src/lib_media/transform/video_convert.cpp @@ -1,4 +1,3 @@ -#include "lib_utils/log.hpp" #include "lib_utils/tools.hpp" #include "video_convert.hpp" #include "lib_ffpp/ffpp.hpp" @@ -28,7 +27,7 @@ void VideoConvert::reconfigure(const PictureFormat &format) { dstFormat.res.width, dstFormat.res.height, libavPixFmtConvert(dstFormat.format), SWS_BILINEAR, nullptr, nullptr, nullptr); if (!m_SwContext) - throw std::runtime_error("[VideoConvert] Impossible to set up video converter."); + throw error("Impossible to set up video converter."); srcFormat = format; } @@ -40,7 +39,7 @@ void VideoConvert::process(Data data) { auto videoData = safe_cast(data); if (videoData->getFormat() != srcFormat) { if (m_SwContext) - Log::msg(Log::Info, "[VideoConvert] Incompatible input video data. Reconfiguring."); + log(Info, "Incompatible input video data. Reconfiguring."); reconfigure(videoData->getFormat()); } @@ -67,7 +66,7 @@ void VideoConvert::process(Data data) { break; } default: - throw std::runtime_error("[VideoConvert] Destination colorspace not supported."); + throw error("Destination colorspace not supported."); } sws_scale(m_SwContext, srcSlice, srcStride, 0, srcFormat.res.height, pDst, dstStride); diff --git a/src/lib_media/utils/comparator.cpp b/src/lib_media/utils/comparator.cpp index c464a04d..77f9dd51 100644 --- a/src/lib_media/utils/comparator.cpp +++ b/src/lib_media/utils/comparator.cpp @@ -8,7 +8,7 @@ namespace Utils { void IComparator::process(Data data) { if (data != nullptr) - throw std::runtime_error("[Comparator] data not expected"); + throw error("data not expected"); for (;;) { Data aData, bData; @@ -16,14 +16,14 @@ void IComparator::process(Data data) { auto b = other.tryPop(bData); if (!a || !b) { if (a || b) - throw std::runtime_error("[Comparator] not the same number of samples"); - Log::msg(Log::Info, "[Comparator] end of process"); + throw error("not the same number of samples"); + log(Info, "end of process"); break; } auto res = compare(aData, bData); if (!res) - Log::msg(Log::Info, "[Comparator] comparison failed"); + log(Info, "comparison failed"); } } @@ -46,12 +46,12 @@ bool PcmComparator::compare(Data data1, Data data2) const { auto pcm1 = safe_cast(data1); auto pcm2 = safe_cast(data2); if (pcm1->getFormat() != pcm2->getFormat()) - throw std::runtime_error("[PcmComparator] Incompatible audio data"); + throw std::runtime_error("Incompatible audio data"); auto const size1 = pcm1->size(); auto const size2 = pcm2->size(); if (size1 != size2) - Log::msg(Log::Warning, "[PcmComparator] Sample sizes are different, comparing the overlap."); + Log::msg(Warning, "Sample sizes are different, comparing the overlap."); const DataPcm *data; if (size1 < size2) data = pcm1.get(); @@ -61,7 +61,7 @@ bool PcmComparator::compare(Data data1, Data data2) const { for (size_t planeIdx = 0; planeIdx < data->getFormat().numPlanes; ++planeIdx) { for (size_t i = 0; i < data->getPlaneSize(planeIdx); ++i) { if (abs(pcm1->getPlane(planeIdx)[i] - pcm2->getPlane(planeIdx)[i]) > tolerance) { - throw std::runtime_error(format("[PcmComparator] Samples are different at plane %s, index %s.", planeIdx, i)); + throw std::runtime_error(format("Samples are different at plane %s, index %s.", planeIdx, i)); return false; } } diff --git a/src/lib_modules/core/error.hpp b/src/lib_modules/core/error.hpp new file mode 100644 index 00000000..03038291 --- /dev/null +++ b/src/lib_modules/core/error.hpp @@ -0,0 +1,30 @@ +#pragma once + +#include "lib_utils/format.hpp" +#include +#include + + +namespace Modules { + +class Exception : public std::exception { +public: + Exception(std::string const &msg) throw() : msg(msg) {} + ~Exception() throw() {} + + char const* what() const throw() { + return msg.c_str(); + } + +private: + Exception& operator= (const Exception&) = delete; + std::string msg; +}; + +struct IError { + virtual std::exception error(std::string const &msg) { + throw Exception(format("[%s] %s", typeid(*this).name(), msg)); + } +}; + +} diff --git a/src/lib_modules/core/log.hpp b/src/lib_modules/core/log.hpp new file mode 100644 index 00000000..e04c037a --- /dev/null +++ b/src/lib_modules/core/log.hpp @@ -0,0 +1,17 @@ +#pragma once + +#include "lib_utils/log.hpp" + + +namespace Modules { + +struct ILog { + virtual ~ILog() noexcept(false) {} + + template + void log(Level level, const std::string& fmt, Arguments... args) { + Log::msg(level, format("[%s] %s", typeid(*this).name(), format(fmt, args...))); + } +}; + +} diff --git a/src/lib_modules/core/metadata.hpp b/src/lib_modules/core/metadata.hpp index aaef958a..e1e19e4e 100644 --- a/src/lib_modules/core/metadata.hpp +++ b/src/lib_modules/core/metadata.hpp @@ -45,7 +45,7 @@ static bool operator==(const IMetadata &left, const IMetadata &right) { class MetadataFile : public IMetadata { public: - MetadataFile(const std::string filename, StreamType streamType, const std::string mimeType, const std::string codecName, uint64_t durationIn180k, uint64_t filesize, bool startsWithRAP) + MetadataFile(const std::string& filename, StreamType streamType, const std::string& mimeType, const std::string& codecName, uint64_t durationIn180k, uint64_t filesize, bool startsWithRAP) : streamType(streamType), filename(filename), mimeType(mimeType), codecName(codecName), durationIn180k(durationIn180k), filesize(filesize), startsWithRAP(startsWithRAP) { } std::string getFilename() const { @@ -138,14 +138,14 @@ class MetadataCap : public IMetadataCap { } else if (metadata != m_metadata) { if (m_metadata) { if (*m_metadata == *metadata) { - Log::msg(Log::Debug, "Output: metadata not equal but comparable by value. Updating."); + Log::msg(Debug, "Output: metadata not equal but comparable by value. Updating."); m_metadata = metadata; } else { - Log::msg(Log::Info, "Metadata update from data not supported yet: output pin and data won't carry the same metadata."); + Log::msg(Info, "Metadata update from data not supported yet: output pin and data won't carry the same metadata."); } return true; } - Log::msg(Log::Info, "Output: metadata transported by data changed. Updating."); + Log::msg(Info, "Output: metadata transported by data changed. Updating."); if (m_metadata && (metadata->getStreamType() != m_metadata->getStreamType())) throw std::runtime_error(format("Metadata update: incompatible types %s for data and %s for attached", metadata->getStreamType(), m_metadata->getStreamType())); m_metadata = metadata; diff --git a/src/lib_modules/core/module.hpp b/src/lib_modules/core/module.hpp index c05a6647..fa1bd418 100644 --- a/src/lib_modules/core/module.hpp +++ b/src/lib_modules/core/module.hpp @@ -1,9 +1,12 @@ #pragma once #include "data.hpp" +#include "error.hpp" #include "input.hpp" +#include "log.hpp" #include "output.hpp" #include +#include #include @@ -15,7 +18,7 @@ struct IModule { virtual void flush() = 0; }; -class Module : public IModule, public InputCap, public OutputCap { +class Module : public IModule, public ILog, public IError, public InputCap, public OutputCap { public: Module() = default; virtual ~Module() noexcept(false) {} diff --git a/src/lib_modules/core/output.hpp b/src/lib_modules/core/output.hpp index b73f7282..b68b8b8f 100644 --- a/src/lib_modules/core/output.hpp +++ b/src/lib_modules/core/output.hpp @@ -38,7 +38,7 @@ class OutputT : public IOutput, public MetadataCap { updateMetadata(data); size_t numReceivers = signal.emit(data); if (numReceivers == 0) - Log::msg(Log::Debug, "emit(): Output had no receiver"); + Log::msg(Debug, "emit(): Output had no receiver"); return numReceivers; } diff --git a/src/lib_modules/modules.vcxproj b/src/lib_modules/modules.vcxproj index 1a41e40d..61c03b51 100644 --- a/src/lib_modules/modules.vcxproj +++ b/src/lib_modules/modules.vcxproj @@ -22,7 +22,9 @@ + + diff --git a/src/lib_modules/modules.vcxproj.filters b/src/lib_modules/modules.vcxproj.filters index 7eba7402..9e3e2340 100644 --- a/src/lib_modules/modules.vcxproj.filters +++ b/src/lib_modules/modules.vcxproj.filters @@ -40,6 +40,12 @@ core + + core + + + core + diff --git a/src/lib_modules/utils/helper.hpp b/src/lib_modules/utils/helper.hpp index 0b3a17ef..8d39be9b 100644 --- a/src/lib_modules/utils/helper.hpp +++ b/src/lib_modules/utils/helper.hpp @@ -21,18 +21,18 @@ size_t ConnectOutputToInput(IOutput* prev, ModuleType* next, IProcessExecutor& e if (prevMetadata && nextMetadata) { if (prevMetadata->getStreamType() != next->getMetadata()->getStreamType()) throw std::runtime_error("Module connection: incompatible types"); - Log::msg(Log::Info, "--------- Connect: metadata OK"); + Log::msg(Info, "--------- Connect: metadata OK"); } else { if (prevMetadata && !nextMetadata) { #if 0 //rely on data to propagate type instead of inputs or outputs - this way sent data type is on the output, processed data is on the input next->setMetadata(prevMetadata); - Log::msg(Log::Info, "--------- Connect: metadata Propagate to next"); + log(Info, "--------- Connect: metadata Propagate to next"); #endif } else if (!prevMetadata && nextMetadata) { safe_cast(prev)->setMetadata(nextMetadata); - Log::msg(Log::Info, "--------- Connect: metadata Propagate to prev (backward)"); + Log::msg(Info, "--------- Connect: metadata Propagate to prev (backward)"); } else { - Log::msg(Log::Info, "--------- Connect: no metadata"); + Log::msg(Info, "--------- Connect: no metadata"); } } diff --git a/src/lib_modules/utils/pipeline.cpp b/src/lib_modules/utils/pipeline.cpp index bf537888..ac389730 100644 --- a/src/lib_modules/utils/pipeline.cpp +++ b/src/lib_modules/utils/pipeline.cpp @@ -19,10 +19,10 @@ class PipelinedInput : public IInput { /* direct call: receiving nullptr stops the execution */ virtual void process(Data data) override { if (data) { - Log::msg(Log::Debug, format("Module %s: dispatch data for time %s", typeid(notify).name(), data->getTime() / (double)IClock::Rate)); + Log::msg(Debug, format("Module %s: dispatch data for time %s", typeid(notify).name(), data->getTime() / (double)IClock::Rate)); delegate->process(data); } else { - Log::msg(Log::Debug, format("Module %s: notify finished.", typeid(notify).name())); + Log::msg(Debug, format("Module %s: notify finished.", typeid(notify).name())); notify->finished(); } } @@ -93,7 +93,7 @@ void PipelinedModule::connect(IOutput *output, size_t inputIdx) { } void PipelinedModule::dispatch(Data data) { - Log::msg(Log::Debug, format("Module %s: dispatch data", typeid(delegate).name())); + Log::msg(Debug, format("Module %s: dispatch data", typeid(delegate).name())); if (isSource()) { assert(data == nullptr); @@ -144,25 +144,25 @@ void Pipeline::connect(IPipelineModule *prev, size_t outputIdx, IPipelineModule } void Pipeline::start() { - Log::msg(Log::Info, "Pipeline: starting"); + Log::msg(Info, "Pipeline: starting"); for (auto &m : modules) { if (m->isSource()) m->dispatch(nullptr); } - Log::msg(Log::Info, "Pipeline: started"); + Log::msg(Info, "Pipeline: started"); } void Pipeline::waitForCompletion() { - Log::msg(Log::Info, "Pipeline: waiting for completion (remaning: %s)", (int)numRemainingNotifications); + Log::msg(Info, "Pipeline: waiting for completion (remaning: %s)", (int)numRemainingNotifications); std::unique_lock lock(mutex); while (numRemainingNotifications > 0) { condition.wait(lock); } - Log::msg(Log::Info, "Pipeline: completed"); + Log::msg(Info, "Pipeline: completed"); } void Pipeline::exitSync() { - Log::msg(Log::Warning, format("Pipeline: asked to exit now.")); + Log::msg(Warning, format("Pipeline: asked to exit now.")); for (auto &m : modules) { if (m->isSource()) m->dispatch(nullptr); diff --git a/src/lib_utils/log.cpp b/src/lib_utils/log.cpp index e58a520b..4eb4bd8e 100644 --- a/src/lib_utils/log.cpp +++ b/src/lib_utils/log.cpp @@ -18,7 +18,7 @@ static WORD console_attr_ori = 0; #define RESET "\x1b[0m" #endif /*_WIN32*/ -Log::Level Log::globalLogLevel = Log::Info; +Level Log::globalLogLevel = Info; namespace { @@ -85,6 +85,6 @@ void Log::setLevel(Level level) { globalLogLevel = level; } -Log::Level Log::getLevel() { +Level Log::getLevel() { return globalLogLevel; } diff --git a/src/lib_utils/log.hpp b/src/lib_utils/log.hpp index 4fc54215..9dfb8fd3 100644 --- a/src/lib_utils/log.hpp +++ b/src/lib_utils/log.hpp @@ -3,17 +3,16 @@ #include "format.hpp" #include +enum Level { + Quiet = -1, + Error = 0, + Warning, + Info, + Debug +}; class Log { public: - enum Level { - Quiet = -1, - Error = 0, - Warning, - Info, - Debug - }; - template static void msg(Level level, const std::string& fmt, Arguments... args) { if ((level != Quiet) && (level <= globalLogLevel)) { @@ -23,7 +22,7 @@ class Log { } void setLevel(Level level); - Log::Level getLevel(); + Level getLevel(); private: Log();