From 5da5938053edb9a2f122e44b19a5fa280787a5d3 Mon Sep 17 00:00:00 2001 From: melpon Date: Tue, 23 Jan 2024 15:44:09 +0900 Subject: [PATCH] =?UTF-8?q?m120.6099.1=20=E3=81=AB=E5=AF=BE=E5=BF=9C?= =?UTF-8?q?=E3=81=99=E3=82=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- run.py | 1 + winuwp/001-audio.patch | 11 +-- winuwp/002-capturer.patch | 46 ++++++----- winuwp/003-build.patch | 71 ++++++---------- winuwp/004-nogeneric.patch | 41 ++++++---- winuwp/008-streamer.patch | 80 +++++++++++++++++++ .../winuwp/decoder/h264_decoder_mf_impl.cc | 8 +- .../winuwp/encoder/h264_encoder_mf_impl.cc | 2 +- 8 files changed, 165 insertions(+), 95 deletions(-) create mode 100644 winuwp/008-streamer.patch diff --git a/run.py b/run.py index 7ce410cd..7c605f11 100644 --- a/run.py +++ b/run.py @@ -285,6 +285,7 @@ def get_depot_tools(source_dir, fetch=False): '005-h264.patch', '006-logging.patch', '007-internal-error.patch', + '008-streamer.patch', ] WINUWP_ADDITIONAL_DIRS = [ ['modules', 'audio_device', 'winuwp'], diff --git a/winuwp/001-audio.patch b/winuwp/001-audio.patch index 2b81246a..98cbcdbd 100644 --- a/winuwp/001-audio.patch +++ b/winuwp/001-audio.patch @@ -50,7 +50,7 @@ index d441479b33..2925586b0e 100644 # Required for the built-in WASAPI AEC. "dmoguids.lib", diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc -index 01a8a25c06..98eba26d0e 100644 +index 80ed928933..50bf744032 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -22,8 +22,12 @@ @@ -65,12 +65,12 @@ index 01a8a25c06..98eba26d0e 100644 +#endif #elif defined(WEBRTC_ANDROID) #include - #if defined(WEBRTC_AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) + #include "sdk/android/native_api/audio_device_module/audio_device_android.h" diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn index 1e76a54cc..b254b3ac8 100644 --- a/build/config/win/BUILD.gn +++ b/build/config/win/BUILD.gn -@@ -245,7 +245,16 @@ config("runtime_library") { +@@ -245,7 +245,11 @@ config("runtime_library") { } else { defines += [ "WINAPI_FAMILY=WINAPI_FAMILY_DESKTOP_APP" ] } @@ -80,11 +80,6 @@ index 1e76a54cc..b254b3ac8 100644 + "/await", + "/EHsc", # Required for having a wrapper using the WinRT/C++ projection. + ] -+ if (is_clang) { -+ cflags += [ -+ "-Wno-microsoft-template", # The audio module uses some templates Microsoft style. -+ ] -+ } # This warning is given because the linker cannot tell the difference # between consuming WinRT APIs versus authoring WinRT within static diff --git a/winuwp/002-capturer.patch b/winuwp/002-capturer.patch index 4d80cc99..ad13e014 100644 --- a/winuwp/002-capturer.patch +++ b/winuwp/002-capturer.patch @@ -227,7 +227,7 @@ index 63534600a9..7d9a0a83aa 100644 } bool operator==(const VideoCaptureCapability& other) const { diff --git a/modules/video_capture/video_capture_impl.cc b/modules/video_capture/video_capture_impl.cc -index d539b38264..390fa4c123 100644 +index 428253bf23..eea8ebd2a9 100644 --- a/modules/video_capture/video_capture_impl.cc +++ b/modules/video_capture/video_capture_impl.cc @@ -14,6 +14,7 @@ @@ -238,7 +238,7 @@ index d539b38264..390fa4c123 100644 #include "api/video/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_capture/video_capture_config.h" -@@ -133,9 +134,17 @@ void VideoCaptureImpl::DeliverRawFrame(uint8_t* videoFrame, +@@ -139,9 +139,17 @@ void VideoCaptureImpl::DeliverRawFrame(uint8_t* videoFrame, } int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, @@ -253,19 +253,25 @@ index d539b38264..390fa4c123 100644 + return IncomingFrameNV12(videoFrame, stride_y, plane_uv, stride_uv, videoFrameLength, frameInfo, captureTime); + } + + RTC_CHECK_RUNS_SERIALIZED(&capture_checker_); MutexLock lock(&api_lock_); - const int32_t width = frameInfo.width; -@@ -148,16 +156,8 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, +@@ -155,22 +163,8 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, return 0; } - // Not encoded, convert to I420. -- if (frameInfo.videoType != VideoType::kMJPEG && -- CalcBufferSize(frameInfo.videoType, width, abs(height)) != -- videoFrameLength) { -- RTC_LOG(LS_ERROR) << "Wrong incoming frame length."; -- return -1; +- if (frameInfo.videoType != VideoType::kMJPEG) { +- // Allow buffers larger than expected. On linux gstreamer allocates buffers +- // page-aligned and v4l2loopback passes us the buffer size verbatim which +- // for most cases is larger than expected. +- // See https://github.com/umlaeute/v4l2loopback/issues/190. +- if (auto size = CalcBufferSize(frameInfo.videoType, width, abs(height)); +- videoFrameLength < size) { +- RTC_LOG(LS_ERROR) << "Wrong incoming frame length. Expected " << size +- << ", Got " << videoFrameLength << "."; +- return -1; +- } - } - - int stride_y = width; @@ -275,7 +281,7 @@ index d539b38264..390fa4c123 100644 int target_width = width; int target_height = abs(height); -@@ -177,7 +177,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, +@@ -187,7 +181,7 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). rtc::scoped_refptr buffer = I420Buffer::Create( @@ -283,8 +289,8 @@ index d539b38264..390fa4c123 100644 + target_width, target_height, dst_stride_y, dst_stride_uv, dst_stride_uv); libyuv::RotationMode rotation_mode = libyuv::kRotate0; - if (apply_rotation) { -@@ -198,10 +198,11 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, + if (apply_rotation_) { +@@ -208,10 +202,11 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, } const int conversionResult = libyuv::ConvertToI420( @@ -299,8 +305,8 @@ index d539b38264..390fa4c123 100644 + 0, // No Cropping width, height, target_width, target_height, rotation_mode, ConvertVideoType(frameInfo.videoType)); - if (conversionResult < 0) { -@@ -224,6 +225,40 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, + if (conversionResult != 0) { +@@ -234,6 +229,40 @@ int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, return 0; } @@ -340,7 +346,7 @@ index d539b38264..390fa4c123 100644 + int32_t VideoCaptureImpl::StartCapture( const VideoCaptureCapability& capability) { - _requestedCapability = capability; + RTC_DCHECK_RUN_ON(&api_checker_); diff --git a/modules/video_capture/video_capture_impl.h b/modules/video_capture/video_capture_impl.h index 1f7aa89883..c186806ae5 100644 --- a/modules/video_capture/video_capture_impl.h @@ -424,13 +430,13 @@ index 481326c1d2..d299c7e175 100644 return nullptr; } diff --git a/webrtc.gni b/webrtc.gni -index 6e81fc4285..eb89c658fe 100644 +index 173d66c791..f101888462 100644 --- a/webrtc.gni +++ b/webrtc.gni -@@ -240,6 +240,15 @@ declare_args() { - # "warn": RTC_LOGs a message with LS_WARNING severity if the field trial - # hasn't been registered. - rtc_strict_field_trials = "" +@@ -260,6 +260,15 @@ declare_args() { + # WebRTC does not declare its public dependencies. See webrtc:8603. Instead + # WebRTC is using a global common dependencies. + rtc_common_public_deps = [] # no-presubmit-check TODO(webrtc:8603) + + # Defines which API should be used by the video capture module on Windows. + # The following are the current options: diff --git a/winuwp/003-build.patch b/winuwp/003-build.patch index d8a3ba84..8719a14f 100644 --- a/winuwp/003-build.patch +++ b/winuwp/003-build.patch @@ -15,24 +15,6 @@ index d5289b85d7..f5be6ebfbd 100644 "api/video_codecs:video_encoder_factory_template_libvpx_vp8_adapter", "api/video_codecs:video_encoder_factory_template_libvpx_vp9_adapter", "api/video_codecs:video_encoder_factory_template_open_h264_adapter", -diff --git a/rtc_base/BUILD.gn b/rtc_base/BUILD.gn -index e74b85fcbf..dec9736fe5 100644 ---- a/rtc_base/BUILD.gn -+++ b/rtc_base/BUILD.gn -@@ -1039,6 +1039,13 @@ if (is_win) { - "win32_window.h", - ] - -+ if (current_os == "winuwp") { -+ sources -= [ -+ "win32_window.cc", -+ "win32_window.h", -+ ] -+ } -+ - deps = [ - ":byte_order", - ":checks", diff --git a/rtc_base/file_rotating_stream.cc b/rtc_base/file_rotating_stream.cc index c529b5b1b4..515646b03b 100644 --- a/rtc_base/file_rotating_stream.cc @@ -98,19 +80,6 @@ index fe8ec1afdc..403c8a0de2 100644 #define WEBRTC_DENORMAL_DISABLER_SUPPORTED #endif -diff --git a/third_party/boringssl/BUILD.gn b/third_party/boringssl/BUILD.gn -index efcbdf378b6..c0f70952bd5 100644 ---- a/third_party/boringssl/BUILD.gn -+++ b/third_party/boringssl/BUILD.gn -@@ -96,7 +96,7 @@ if (is_win && !is_msan && current_cpu != "arm64") { - sources += crypto_sources_linux_aarch64 - } else if (is_apple) { - sources += crypto_sources_apple_aarch64 -- } else if (is_win) { -+ } else if (is_win && current_os != "winuwp") { - sources += crypto_sources_win_aarch64 - } else { - public_configs = [ ":no_asm_config" ] diff --git a/third_party/crc32c/BUILD.gn b/third_party/crc32c/BUILD.gn index 38024bbd8d0..a40772c21e1 100644 --- a/third_party/crc32c/BUILD.gn @@ -146,22 +115,32 @@ index 758aca15407..60ef0074839 100644 } else { # This should only match windows. os_category = current_os } -diff --git a/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct4x4_neon.c b/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct4x4_neon.c -index 3b9196fae..a53c0f672 100644 ---- a/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct4x4_neon.c -+++ b/third_party/libvpx/source/libvpx/vpx_dsp/arm/fdct4x4_neon.c -@@ -52,7 +52,11 @@ void vpx_fdct4x4_neon(const int16_t *input, tran_low_t *final_output, - - void vpx_highbd_fdct4x4_neon(const int16_t *input, tran_low_t *final_output, - int stride) { -+#if defined(_WIN32) -+ const int32x4_t const_1000 = { .n128_u32 = {1, 0, 0, 0} }; -+#else - static const int32x4_t const_1000 = { 1, 0, 0, 0 }; -+#endif - const int32x4_t const_one = vdupq_n_s32(1); +@@ -506,8 +508,10 @@ static_library("libvpx") { + deps += [ ":libvpx_intrinsics_neon" ] + } + if (current_cpu == "arm64") { +- deps += [ ":libvpx_intrinsics_neon_dotprod" ] +- deps += [ ":libvpx_intrinsics_neon_i8mm" ] ++ if (current_os != "winuwp") { ++ deps += [ ":libvpx_intrinsics_neon_dotprod" ] ++ deps += [ ":libvpx_intrinsics_neon_i8mm" ] ++ } + } + if (is_android) { + deps += [ "//third_party/cpu_features:ndk_compat" ] +diff --git a/third_party/boringssl/BUILD.gn b/third_party/boringssl/BUILD.gn +index 321a2dc2eb3..5b2045d8875 100644 +--- a/third_party/boringssl/BUILD.gn ++++ b/third_party/boringssl/BUILD.gn +@@ -118,7 +118,7 @@ if (enable_rust_boringssl) { + } + } - // input[M * stride] * 16 +-if (is_msan) { ++if (is_msan || current_os == "winuwp") { + # MSan instrumentation is incompatible with assembly optimizations. + # BoringSSL's GAS-compatible assembly knows how to detect MSan, but the NASM + # assembly does not, so we check for MSan explicitly. diff --git a/third_party/pffft/BUILD.gn b/third_party/pffft/BUILD.gn index 0a1cf7a1f5e..7c32b610ffe 100644 --- a/third_party/pffft/BUILD.gn diff --git a/winuwp/004-nogeneric.patch b/winuwp/004-nogeneric.patch index 9b0c7180..c74f14d3 100644 --- a/winuwp/004-nogeneric.patch +++ b/winuwp/004-nogeneric.patch @@ -1,8 +1,8 @@ diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc -index 5eff91fa5c..c036d27e78 100644 +index 4b63ebefb3..03bf130079 100644 --- a/call/rtp_payload_params.cc +++ b/call/rtp_payload_params.cc -@@ -262,7 +262,7 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, +@@ -310,7 +310,7 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, chains_calculator_.Reset( codec_specific_info->generic_frame_info->part_of_chain); } @@ -11,7 +11,7 @@ index 5eff91fa5c..c036d27e78 100644 *codec_specific_info->generic_frame_info, frame_id); return; } -@@ -302,7 +302,7 @@ void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, +@@ -417,7 +417,7 @@ void RtpPayloadParams::GenericToGeneric(int64_t shared_frame_id, bool is_keyframe, RTPVideoHeader* rtp_video_header) { RTPVideoHeader::GenericDescriptorInfo& generic = @@ -20,7 +20,7 @@ index 5eff91fa5c..c036d27e78 100644 generic.frame_id = shared_frame_id; generic.decode_target_indications.push_back(DecodeTargetIndication::kSwitch); -@@ -335,7 +335,7 @@ void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, +@@ -450,7 +450,7 @@ void RtpPayloadParams::H264ToGeneric(const CodecSpecificInfoH264& h264_info, } RTPVideoHeader::GenericDescriptorInfo& generic = @@ -29,7 +29,7 @@ index 5eff91fa5c..c036d27e78 100644 generic.frame_id = shared_frame_id; generic.temporal_index = temporal_index; -@@ -392,7 +392,7 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, +@@ -507,7 +507,7 @@ void RtpPayloadParams::Vp8ToGeneric(const CodecSpecificInfoVP8& vp8_info, } RTPVideoHeader::GenericDescriptorInfo& generic = @@ -38,7 +38,7 @@ index 5eff91fa5c..c036d27e78 100644 generic.frame_id = shared_frame_id; generic.spatial_index = spatial_index; -@@ -465,7 +465,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, +@@ -576,7 +576,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, } RTPVideoHeader::GenericDescriptorInfo& result = @@ -47,6 +47,15 @@ index 5eff91fa5c..c036d27e78 100644 result.frame_id = shared_frame_id; result.spatial_index = spatial_index; +@@ -641,7 +641,7 @@ void RtpPayloadParams::Vp9ToGeneric(const CodecSpecificInfoVP9& vp9_info, + // non-layerd streams. + if (spatial_index > 0 || temporal_index > 0) { + // Prefer to generate no generic layering than an inconsistent one. +- rtp_video_header.generic.reset(); ++ rtp_video_header.generic_.reset(); + return; + } + diff --git a/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc b/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc index f4525f0db1..eadbae52eb 100644 --- a/modules/rtp_rtcp/source/rtp_descriptor_authentication.cc @@ -66,11 +75,11 @@ index f4525f0db1..eadbae52eb 100644 if (descriptor.spatial_index < 0 || descriptor.temporal_index < 0 || descriptor.spatial_index >= diff --git a/modules/rtp_rtcp/source/rtp_sender_video.cc b/modules/rtp_rtcp/source/rtp_sender_video.cc -index c863db4ccf..a3fcb32ad2 100644 +index ebca7aaa75..ce5e224a9f 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video.cc -@@ -130,9 +130,9 @@ absl::optional LoadVideoPlayoutDelayOverride( - bool PacketWillLikelyBeRequestedForRestransmitionIfLost( +@@ -121,9 +121,9 @@ absl::optional LoadVideoPlayoutDelayOverride( + bool PacketWillLikelyBeRequestedForRestransmissionIfLost( const RTPVideoHeader& video_header) { return IsBaseLayer(video_header) && - !(video_header.generic.has_value() @@ -81,7 +90,7 @@ index c863db4ccf..a3fcb32ad2 100644 DecodeTargetIndication::kDiscardable) : false); } -@@ -365,26 +365,26 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, +@@ -358,26 +358,26 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, *video_header.absolute_capture_time); } @@ -116,7 +125,7 @@ index c863db4ccf..a3fcb32ad2 100644 RTC_DCHECK_EQ( descriptor.frame_dependencies.decode_target_indications.size(), video_structure_->num_decode_targets); -@@ -399,7 +399,7 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, +@@ -392,7 +392,7 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, // layers when inter layer dependency is not used, i.e. S structures. // Distinguish these two cases by checking if there are any dependencies. if (video_header.frame_type == VideoFrameType::kVideoFrameKey && @@ -125,7 +134,7 @@ index c863db4ccf..a3fcb32ad2 100644 // To avoid extra structure copy, temporary share ownership of the // video_structure with the dependency descriptor. descriptor.attached_structure = -@@ -422,17 +422,17 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, +@@ -415,17 +415,17 @@ void RTPSenderVideo::AddRtpHeaderExtensions(const RTPVideoHeader& video_header, if (first_packet) { generic_descriptor.SetFrameId( @@ -138,9 +147,9 @@ index c863db4ccf..a3fcb32ad2 100644 + video_header.generic_->frame_id - dep); } -- uint8_t spatial_bimask = 1 << video_header.generic->spatial_index; -+ uint8_t spatial_bimask = 1 << video_header.generic_->spatial_index; - generic_descriptor.SetSpatialLayersBitmask(spatial_bimask); +- uint8_t spatial_bitmask = 1 << video_header.generic->spatial_index; ++ uint8_t spatial_bitmask = 1 << video_header.generic_->spatial_index; + generic_descriptor.SetSpatialLayersBitmask(spatial_bitmask); generic_descriptor.SetTemporalLayer( - video_header.generic->temporal_index); @@ -148,7 +157,7 @@ index c863db4ccf..a3fcb32ad2 100644 if (video_header.frame_type == VideoFrameType::kVideoFrameKey) { generic_descriptor.SetResolution(video_header.width, -@@ -523,12 +523,12 @@ bool RTPSenderVideo::SendVideo( +@@ -504,12 +504,12 @@ bool RTPSenderVideo::SendVideo(int payload_type, } } diff --git a/winuwp/008-streamer.patch b/winuwp/008-streamer.patch new file mode 100644 index 00000000..5baeeafd --- /dev/null +++ b/winuwp/008-streamer.patch @@ -0,0 +1,80 @@ +diff --git a/rtc_base/checks.h b/rtc_base/checks.h +index 99fee97d0a..4806805f71 100644 +--- a/rtc_base/checks.h ++++ b/rtc_base/checks.h +@@ -229,19 +229,17 @@ template <> + class LogStreamer<> final { + public: + template ())), + absl::enable_if_t::value || + std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { +- return LogStreamer(MakeVal(arg), this); ++ RTC_FORCE_INLINE auto operator<<(U arg) const { ++ return LogStreamer(MakeVal(arg), this); + } + + template ())), + absl::enable_if_t::value && + !std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { +- return LogStreamer(MakeVal(arg), this); ++ RTC_FORCE_INLINE auto operator<<(const U& arg) const { ++ return LogStreamer(MakeVal(arg), this); + } + + #if RTC_CHECK_MSG_ENABLED +diff --git a/rtc_base/logging.h b/rtc_base/logging.h +index b171cfe11e..a6d465b825 100644 +--- a/rtc_base/logging.h ++++ b/rtc_base/logging.h +@@ -376,20 +376,9 @@ class LogStreamer; + template <> + class LogStreamer<> final { + public: +- template ())), +- absl::enable_if_t::value || +- std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { +- return LogStreamer(MakeVal(arg), this); +- } +- +- template ())), +- absl::enable_if_t::value && +- !std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { +- return LogStreamer(MakeVal(arg), this); ++ template ++ RTC_FORCE_INLINE auto operator<<(const U& arg) const { ++ return LogStreamer(MakeVal(arg), this); + } + + template +@@ -407,20 +396,9 @@ class LogStreamer final { + RTC_FORCE_INLINE LogStreamer(T arg, const LogStreamer* prior) + : arg_(arg), prior_(prior) {} + +- template ())), +- absl::enable_if_t::value || +- std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(U arg) const { +- return LogStreamer(MakeVal(arg), this); +- } +- +- template ())), +- absl::enable_if_t::value && +- !std::is_enum::value>* = nullptr> +- RTC_FORCE_INLINE LogStreamer operator<<(const U& arg) const { +- return LogStreamer(MakeVal(arg), this); ++ template ++ RTC_FORCE_INLINE auto operator<<(const U& arg) const { ++ return LogStreamer(MakeVal(arg), this); + } + + template diff --git a/winuwp/modules/video_coding/codecs/h264/winuwp/decoder/h264_decoder_mf_impl.cc b/winuwp/modules/video_coding/codecs/h264/winuwp/decoder/h264_decoder_mf_impl.cc index e94e91bf..056c47da 100644 --- a/winuwp/modules/video_coding/codecs/h264/winuwp/decoder/h264_decoder_mf_impl.cc +++ b/winuwp/modules/video_coding/codecs/h264/winuwp/decoder/h264_decoder_mf_impl.cc @@ -413,12 +413,12 @@ HRESULT H264DecoderMFImpl::EnqueueFrame(const EncodedImage& input_image, int64_t sample_time_ms; if (first_frame_rtp_ == 0) { - first_frame_rtp_ = input_image.Timestamp(); + first_frame_rtp_ = input_image.RtpTimestamp(); sample_time_ms = 0; } else { // Convert from 90 khz, rounding to nearest ms. sample_time_ms = - (static_cast(input_image.Timestamp()) - first_frame_rtp_) / + (static_cast(input_image.RtpTimestamp()) - first_frame_rtp_) / 90.0 + 0.5f; } @@ -503,7 +503,7 @@ int H264DecoderMFImpl::Decode(const EncodedImage& input_image, return WEBRTC_VIDEO_CODEC_ERROR; // Flush any decoded samples resulting from new frame, invoking callback - hr = FlushFrames(input_image.Timestamp(), input_image.ntp_time_ms_); + hr = FlushFrames(input_image.RtpTimestamp(), input_image.ntp_time_ms_); if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { // Output media type is no longer suitable. Reconfigure and retry. @@ -520,7 +520,7 @@ int H264DecoderMFImpl::Decode(const EncodedImage& input_image, width_.reset(); height_.reset(); - hr = FlushFrames(input_image.Timestamp(), input_image.ntp_time_ms_); + hr = FlushFrames(input_image.RtpTimestamp(), input_image.ntp_time_ms_); } if (SUCCEEDED(hr) || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { diff --git a/winuwp/modules/video_coding/codecs/h264/winuwp/encoder/h264_encoder_mf_impl.cc b/winuwp/modules/video_coding/codecs/h264/winuwp/encoder/h264_encoder_mf_impl.cc index 7ba029fd..0150e8d1 100644 --- a/winuwp/modules/video_coding/codecs/h264/winuwp/encoder/h264_encoder_mf_impl.cc +++ b/winuwp/modules/video_coding/codecs/h264/winuwp/encoder/h264_encoder_mf_impl.cc @@ -622,7 +622,7 @@ void H264EncoderMFImpl::OnH264Encoded(ComPtr sample) { } } - encodedImage.SetTimestamp(frameAttributes.timestamp); + encodedImage.SetRtpTimestamp(frameAttributes.timestamp); encodedImage.ntp_time_ms_ = frameAttributes.ntpTime; encodedImage.capture_time_ms_ = frameAttributes.captureRenderTime; encodedImage._encodedWidth = frameAttributes.frameWidth;