diff --git a/src/modules/ffmpeg/CMakeLists.txt b/src/modules/ffmpeg/CMakeLists.txt index 0283d1205e..be2146be80 100644 --- a/src/modules/ffmpeg/CMakeLists.txt +++ b/src/modules/ffmpeg/CMakeLists.txt @@ -5,6 +5,7 @@ set(SOURCES producer/av_producer.cpp producer/av_input.cpp util/av_util.cpp + util/audio_resampler.cpp producer/ffmpeg_producer.cpp consumer/ffmpeg_consumer.cpp @@ -15,6 +16,7 @@ set(HEADERS producer/av_producer.h producer/av_input.h util/av_util.h + util/audio_resampler.h producer/ffmpeg_producer.h consumer/ffmpeg_consumer.h diff --git a/src/modules/ffmpeg/util/audio_resampler.cpp b/src/modules/ffmpeg/util/audio_resampler.cpp new file mode 100644 index 0000000000..e5d52ddc88 --- /dev/null +++ b/src/modules/ffmpeg/util/audio_resampler.cpp @@ -0,0 +1,38 @@ +#include "audio_resampler.h" +#include "av_assert.h" + +extern "C" { +#include +#include +} + +namespace caspar::ffmpeg { + +AudioResampler::AudioResampler(int64_t sample_rate, AVSampleFormat in_sample_fmt) + : ctx(std::shared_ptr(swr_alloc_set_opts(nullptr, + AV_CH_LAYOUT_7POINT1, + AV_SAMPLE_FMT_S32, + sample_rate, + AV_CH_LAYOUT_7POINT1, + in_sample_fmt, + sample_rate, + 0, + nullptr), + [](SwrContext* ptr) { swr_free(&ptr); })) +{ + if (!ctx) + FF_RET(AVERROR(ENOMEM), "swr_alloc_set_opts"); + + FF_RET(swr_init(ctx.get()), "swr_init"); +} + +caspar::array AudioResampler::convert(int frames, const void** src) +{ + auto result = caspar::array(frames * 8 * sizeof(int32_t)); + auto ptr = result.data(); + swr_convert(ctx.get(), (uint8_t**)&ptr, frames, reinterpret_cast(src), frames); + + return result; +} + +}; // namespace caspar::ffmpeg \ No newline at end of file diff --git a/src/modules/ffmpeg/util/audio_resampler.h b/src/modules/ffmpeg/util/audio_resampler.h new file mode 100644 index 0000000000..3f91b44cef --- /dev/null +++ b/src/modules/ffmpeg/util/audio_resampler.h @@ -0,0 +1,27 @@ +#include +#include + +#pragma once + +extern "C" { +#include +} + +struct SwrContext; + +namespace caspar::ffmpeg { + +class AudioResampler +{ + std::shared_ptr ctx; + + public: + AudioResampler(int64_t sample_rate, AVSampleFormat in_sample_fmt); + + AudioResampler(const AudioResampler&) = delete; + AudioResampler& operator=(const AudioResampler&) = delete; + + caspar::array convert(int frames, const void** src); +}; + +}; // namespace caspar::ffmpeg \ No newline at end of file diff --git a/src/modules/html/CMakeLists.txt b/src/modules/html/CMakeLists.txt index c155dc6c0b..3f2afe42ba 100644 --- a/src/modules/html/CMakeLists.txt +++ b/src/modules/html/CMakeLists.txt @@ -25,7 +25,9 @@ target_include_directories(html PRIVATE .. ../.. ${CEF_INCLUDE_PATH} - ) + ${FFMPEG_INCLUDE_PATH} +) +target_link_libraries(html ffmpeg) set_target_properties(html PROPERTIES FOLDER modules) source_group(sources\\producer producer/*) diff --git a/src/modules/html/producer/html_producer.cpp b/src/modules/html/producer/html_producer.cpp index 2ad42d07be..f5b42bd61b 100644 --- a/src/modules/html/producer/html_producer.cpp +++ b/src/modules/html/producer/html_producer.cpp @@ -60,13 +60,83 @@ #include #include +#include + #include "../html.h" namespace caspar { namespace html { +inline std::int_least64_t now() +{ + return std::chrono::duration_cast( + std::chrono::high_resolution_clock::now().time_since_epoch()) + .count(); +} + +struct presentation_frame +{ + std::int_least64_t timestamp = now(); + core::draw_frame frame = core::draw_frame::empty(); + bool has_video = false; + bool has_audio = false; + + explicit presentation_frame(core::draw_frame video = {}) + { + if (video) { + frame = std::move(video); + has_video = true; + } + } + + presentation_frame(presentation_frame&& other) noexcept + : timestamp(other.timestamp) + , frame(std::move(other.frame)) + { + } + + presentation_frame(const presentation_frame&) = delete; + presentation_frame& operator=(const presentation_frame&) = delete; + + presentation_frame& operator=(presentation_frame&& rhs) + { + timestamp = rhs.timestamp; + frame = std::move(rhs.frame); + return *this; + } + + ~presentation_frame() {} + + void add_audio(core::mutable_frame audio) + { + if (has_audio) + return; + has_audio = true; + + if (frame) { + frame = core::draw_frame::over(frame, core::draw_frame(std::move(audio))); + } else { + frame = core::draw_frame(std::move(audio)); + } + } + + void add_video(core::draw_frame video) + { + if (has_video) + return; + has_video = true; + + if (frame) { + frame = core::draw_frame::over(frame, std::move(video)); + } else { + frame = std::move(video); + } + } +}; + class html_client : public CefClient , public CefRenderHandler + , public CefAudioHandler , public CefLifeSpanHandler , public CefLoadHandler , public CefDisplayHandler @@ -80,15 +150,18 @@ class html_client caspar::timer paint_timer_; caspar::timer test_timer_; - spl::shared_ptr frame_factory_; - core::video_format_desc format_desc_; - bool gpu_enabled_; - tbb::concurrent_queue javascript_before_load_; - std::atomic loaded_; - std::queue> frames_; - mutable std::mutex frames_mutex_; - const size_t frames_max_size_ = 4; - std::atomic closing_; + spl::shared_ptr frame_factory_; + core::video_format_desc format_desc_; + bool gpu_enabled_; + tbb::concurrent_queue javascript_before_load_; + std::atomic loaded_; + std::queue frames_; + core::draw_frame last_generated_frame_; + mutable std::mutex frames_mutex_; + const size_t frames_max_size_ = 4; + std::atomic closing_; + + std::unique_ptr audioResampler_; core::draw_frame last_frame_; std::int_least64_t last_frame_time_; @@ -167,15 +240,15 @@ class html_client // Check if the sole buffered frame is too young to have a partner field generated (with a tolerance) auto time_per_frame = (1000 * 1.5) / format_desc_.fps; - auto front_frame_is_too_young = (now_time - frames_.front().first) < time_per_frame; + auto front_frame_is_too_young = (now_time - frames_.front().timestamp) < time_per_frame; if (follows_gap_in_frames && front_frame_is_too_young) { return false; } } - last_frame_time_ = frames_.front().first; - last_frame_ = std::move(frames_.front().second); + last_frame_time_ = frames_.front().timestamp; + last_frame_ = std::move(frames_.front().frame); frames_.pop(); graph_->set_value("buffered-frames", (double)frames_.size() / frames_max_size_); @@ -190,12 +263,13 @@ class html_client { if (!try_pop(field)) { graph_->set_tag(diagnostics::tag_severity::SILENT, "late-frame"); + return core::draw_frame::still(last_frame_); + } else { + return last_frame_; } - - return last_frame_; } - core::draw_frame last_frame() const { return last_frame_; } + core::draw_frame last_frame() const { return core::draw_frame::still(last_frame_); } bool is_ready() const { @@ -245,13 +319,6 @@ class html_client } private: - std::int_least64_t now() - { - return std::chrono::duration_cast( - std::chrono::high_resolution_clock::now().time_since_epoch()) - .count(); - } - void GetViewRect(CefRefPtr browser, CefRect& rect) override { CASPAR_ASSERT(CefCurrentlyOn(TID_UI)); @@ -302,7 +369,10 @@ class html_client { std::lock_guard lock(frames_mutex_); - frames_.push(std::make_pair(now(), core::draw_frame(std::move(frame)))); + core::draw_frame new_frame = core::draw_frame(std::move(frame)); + last_generated_frame_ = new_frame; + + frames_.push(presentation_frame(std::move(new_frame))); while (frames_.size() > 4) { frames_.pop(); graph_->set_tag(diagnostics::tag_severity::WARNING, "dropped-frame"); @@ -353,6 +423,8 @@ class html_client CefRefPtr GetRenderHandler() override { return this; } + CefRefPtr GetAudioHandler() override { return this; } + CefRefPtr GetLifeSpanHandler() override { return this; } CefRefPtr GetLoadHandler() override { return this; } @@ -378,7 +450,7 @@ class html_client { std::lock_guard lock(frames_mutex_); - frames_.push(std::make_pair(now(), core::draw_frame::empty())); + frames_.push(presentation_frame()); } { @@ -399,6 +471,51 @@ class html_client return false; } + bool GetAudioParameters(CefRefPtr browser, CefAudioParameters& params) override + { + params.channel_layout = CEF_CHANNEL_LAYOUT_7_1; + params.sample_rate = format_desc_.audio_sample_rate; + params.frames_per_buffer = format_desc_.audio_cadence[0]; + return format_desc_.audio_cadence.size() == 1; // TODO - handle 59.94 + } + + void OnAudioStreamStarted(CefRefPtr browser, const CefAudioParameters& params, int channels) override + { + audioResampler_ = std::make_unique(params.sample_rate, AV_SAMPLE_FMT_FLTP); + } + void OnAudioStreamPacket(CefRefPtr browser, const float** data, int samples, int64_t pts) override + { + if (!audioResampler_) + return; + + auto audio = audioResampler_->convert(samples, reinterpret_cast(data)); + auto audio_frame = core::mutable_frame(this, {}, std::move(audio), core::pixel_format_desc()); + + { + std::lock_guard lock(frames_mutex_); + if (frames_.empty()) { + presentation_frame wrapped_frame(last_generated_frame_); + wrapped_frame.add_audio(std::move(audio_frame)); + + frames_.push(std::move(wrapped_frame)); + } else { + if (!frames_.back().has_audio) { + frames_.back().add_audio(std::move(audio_frame)); + } else { + presentation_frame wrapped_frame(last_generated_frame_); + wrapped_frame.add_audio(std::move(audio_frame)); + frames_.push(std::move(wrapped_frame)); + } + } + } + } + void OnAudioStreamStopped(CefRefPtr browser) override { audioResampler_ = nullptr; } + void OnAudioStreamError(CefRefPtr browser, const CefString& message) override + { + CASPAR_LOG(info) << "[html_producer] OnAudioStreamError: \"" << message.ToString() << "\""; + audioResampler_ = nullptr; + } + void do_execute_javascript(const std::wstring& javascript) { html::begin_invoke([=] {