Add RtpVideoSender::SendVideoLayersAllocation

This adds a method to allow VideoLayersAllocation to be sent using the header extension RtpVideoLayersAllocationExtension.

Bug: webrtc:12000
Change-Id: Iafdc1e16911c57ca55d7cc0559a0b45774211e92
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/187495
Commit-Queue: Per Kjellander <perkj@webrtc.org>
Reviewed-by: Danil Chapovalov <danilchap@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32397}
This commit is contained in:
Per Kjellander
2020-10-14 08:41:52 +02:00
committed by Commit Bot
parent 8171580dce
commit 4f350ba76c
6 changed files with 259 additions and 19 deletions

View File

@ -34,6 +34,7 @@
#include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h"
#include "modules/rtp_rtcp/source/time_util.h" #include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_parser.h"
@ -129,6 +130,18 @@ absl::optional<VideoPlayoutDelay> LoadVideoPlayoutDelayOverride(
: absl::nullopt; : absl::nullopt;
} }
// Some packets can be skipped and the stream can still be decoded. Those
// packets are less likely to be retransmitted if they are lost.
bool PacketWillLikelyBeRequestedForRestransmitionIfLost(
const RTPVideoHeader& video_header) {
return IsBaseLayer(video_header) &&
!(video_header.generic.has_value()
? absl::c_linear_search(
video_header.generic->decode_target_indications,
DecodeTargetIndication::kDiscardable)
: false);
}
} // namespace } // namespace
RTPSenderVideo::RTPSenderVideo(const Config& config) RTPSenderVideo::RTPSenderVideo(const Config& config)
@ -140,6 +153,7 @@ RTPSenderVideo::RTPSenderVideo(const Config& config)
: (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)), : (kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers)),
last_rotation_(kVideoRotation_0), last_rotation_(kVideoRotation_0),
transmit_color_space_next_frame_(false), transmit_color_space_next_frame_(false),
send_allocation_(false),
current_playout_delay_{-1, -1}, current_playout_delay_{-1, -1},
playout_delay_pending_(false), playout_delay_pending_(false),
forced_playout_delay_(LoadVideoPlayoutDelayOverride(config.field_trials)), forced_playout_delay_(LoadVideoPlayoutDelayOverride(config.field_trials)),
@ -223,11 +237,15 @@ void RTPSenderVideo::SetVideoStructure(
frame_transformer_delegate_->SetVideoStructureUnderLock(video_structure); frame_transformer_delegate_->SetVideoStructureUnderLock(video_structure);
return; return;
} }
// Lock is being held by SetVideoStructure() caller. SetVideoStructureInternal(video_structure);
SetVideoStructureUnderLock(video_structure);
} }
void RTPSenderVideo::SetVideoStructureUnderLock( void RTPSenderVideo::SetVideoStructureAfterTransformation(
const FrameDependencyStructure* video_structure) {
SetVideoStructureInternal(video_structure);
}
void RTPSenderVideo::SetVideoStructureInternal(
const FrameDependencyStructure* video_structure) { const FrameDependencyStructure* video_structure) {
RTC_DCHECK_RUNS_SERIALIZED(&send_checker_); RTC_DCHECK_RUNS_SERIALIZED(&send_checker_);
if (video_structure == nullptr) { if (video_structure == nullptr) {
@ -257,6 +275,28 @@ void RTPSenderVideo::SetVideoStructureUnderLock(
video_structure_->structure_id = structure_id; video_structure_->structure_id = structure_id;
} }
void RTPSenderVideo::SetVideoLayersAllocation(
VideoLayersAllocation allocation) {
if (frame_transformer_delegate_) {
frame_transformer_delegate_->SetVideoLayersAllocationUnderLock(
std::move(allocation));
return;
}
SetVideoLayersAllocationInternal(std::move(allocation));
}
void RTPSenderVideo::SetVideoLayersAllocationAfterTransformation(
VideoLayersAllocation allocation) {
SetVideoLayersAllocationInternal(std::move(allocation));
}
void RTPSenderVideo::SetVideoLayersAllocationInternal(
VideoLayersAllocation allocation) {
RTC_DCHECK_RUNS_SERIALIZED(&send_checker_);
allocation_ = std::move(allocation);
send_allocation_ = true;
}
void RTPSenderVideo::AddRtpHeaderExtensions( void RTPSenderVideo::AddRtpHeaderExtensions(
const RTPVideoHeader& video_header, const RTPVideoHeader& video_header,
const absl::optional<AbsoluteCaptureTime>& absolute_capture_time, const absl::optional<AbsoluteCaptureTime>& absolute_capture_time,
@ -387,6 +427,18 @@ void RTPSenderVideo::AddRtpHeaderExtensions(
generic_descriptor); generic_descriptor);
} }
} }
if (first_packet && send_allocation_) {
if (video_header.frame_type == VideoFrameType::kVideoFrameKey) {
packet->SetExtension<RtpVideoLayersAllocationExtension>(
allocation_.value());
} else if (PacketWillLikelyBeRequestedForRestransmitionIfLost(
video_header)) {
VideoLayersAllocation allocation = allocation_.value();
allocation.resolution_and_frame_rate_is_valid = false;
packet->SetExtension<RtpVideoLayersAllocationExtension>(allocation);
}
}
} }
bool RTPSenderVideo::SendVideo( bool RTPSenderVideo::SendVideo(
@ -417,11 +469,16 @@ bool RTPSenderVideo::SendVideo(
} }
MaybeUpdateCurrentPlayoutDelay(video_header); MaybeUpdateCurrentPlayoutDelay(video_header);
if (video_header.frame_type == VideoFrameType::kVideoFrameKey && if (video_header.frame_type == VideoFrameType::kVideoFrameKey) {
!IsNoopDelay(current_playout_delay_)) { if (!IsNoopDelay(current_playout_delay_)) {
// Force playout delay on key-frames, if set. // Force playout delay on key-frames, if set.
playout_delay_pending_ = true; playout_delay_pending_ = true;
} }
if (allocation_) {
// Send the bitrate allocation on every key frame.
send_allocation_ = true;
}
}
if (video_structure_ != nullptr && video_header.generic) { if (video_structure_ != nullptr && video_header.generic) {
active_decode_targets_tracker_.OnFrame( active_decode_targets_tracker_.OnFrame(
@ -638,15 +695,11 @@ bool RTPSenderVideo::SendVideo(
} }
if (video_header.frame_type == VideoFrameType::kVideoFrameKey || if (video_header.frame_type == VideoFrameType::kVideoFrameKey ||
(IsBaseLayer(video_header) && PacketWillLikelyBeRequestedForRestransmitionIfLost(video_header)) {
!(video_header.generic.has_value() // This frame will likely be delivered, no need to populate playout
? absl::c_linear_search(
video_header.generic->decode_target_indications,
DecodeTargetIndication::kDiscardable)
: false))) {
// This frame has guaranteed delivery, no need to populate playout
// delay extensions until it changes again. // delay extensions until it changes again.
playout_delay_pending_ = false; playout_delay_pending_ = false;
send_allocation_ = false;
} }
TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp", TRACE_EVENT_ASYNC_END1("webrtc", "Video", capture_time_ms, "timestamp",

View File

@ -24,6 +24,7 @@
#include "api/transport/rtp/dependency_descriptor.h" #include "api/transport/rtp/dependency_descriptor.h"
#include "api/video/video_codec_type.h" #include "api/video/video_codec_type.h"
#include "api/video/video_frame_type.h" #include "api/video/video_frame_type.h"
#include "api/video/video_layers_allocation.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/source/absolute_capture_time_sender.h" #include "modules/rtp_rtcp/source/absolute_capture_time_sender.h"
#include "modules/rtp_rtcp/source/active_decode_targets_helper.h" #include "modules/rtp_rtcp/source/active_decode_targets_helper.h"
@ -117,9 +118,22 @@ class RTPSenderVideo {
// All calls to SendVideo after this call must use video_header compatible // All calls to SendVideo after this call must use video_header compatible
// with the video_structure. // with the video_structure.
void SetVideoStructure(const FrameDependencyStructure* video_structure); void SetVideoStructure(const FrameDependencyStructure* video_structure);
void SetVideoStructureUnderLock( // Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists
// to ensure correct syncronization.
void SetVideoStructureAfterTransformation(
const FrameDependencyStructure* video_structure); const FrameDependencyStructure* video_structure);
// Sets current active VideoLayersAllocation. The allocation will be sent
// using the rtp video layers allocation extension. The allocation will be
// sent in full on every key frame. The allocation will be sent once on a
// none discardable delta frame per call to this method and will not contain
// resolution and frame rate.
void SetVideoLayersAllocation(VideoLayersAllocation allocation);
// Should only be used by a RTPSenderVideoFrameTransformerDelegate and exists
// to ensure correct syncronization.
void SetVideoLayersAllocationAfterTransformation(
VideoLayersAllocation allocation);
// Returns the current packetization overhead rate, in bps. Note that this is // Returns the current packetization overhead rate, in bps. Note that this is
// the payload overhead, eg the VP8 payload headers, not the RTP headers // the payload overhead, eg the VP8 payload headers, not the RTP headers
// or extension/ // or extension/
@ -145,6 +159,10 @@ class RTPSenderVideo {
int64_t last_frame_time_ms; int64_t last_frame_time_ms;
}; };
void SetVideoStructureInternal(
const FrameDependencyStructure* video_structure);
void SetVideoLayersAllocationInternal(VideoLayersAllocation allocation);
void AddRtpHeaderExtensions( void AddRtpHeaderExtensions(
const RTPVideoHeader& video_header, const RTPVideoHeader& video_header,
const absl::optional<AbsoluteCaptureTime>& absolute_capture_time, const absl::optional<AbsoluteCaptureTime>& absolute_capture_time,
@ -181,10 +199,14 @@ class RTPSenderVideo {
bool transmit_color_space_next_frame_ RTC_GUARDED_BY(send_checker_); bool transmit_color_space_next_frame_ RTC_GUARDED_BY(send_checker_);
std::unique_ptr<FrameDependencyStructure> video_structure_ std::unique_ptr<FrameDependencyStructure> video_structure_
RTC_GUARDED_BY(send_checker_); RTC_GUARDED_BY(send_checker_);
absl::optional<VideoLayersAllocation> allocation_
RTC_GUARDED_BY(send_checker_);
// Flag indicating if we should send |allocation_|.
bool send_allocation_ RTC_GUARDED_BY(send_checker_);
// Current target playout delay. // Current target playout delay.
VideoPlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_); VideoPlayoutDelay current_playout_delay_ RTC_GUARDED_BY(send_checker_);
// Flag indicating if we need to propagate |current_playout_delay_| in order // Flag indicating if we need to send |current_playout_delay_| in order
// to guarantee it gets delivered. // to guarantee it gets delivered.
bool playout_delay_pending_; bool playout_delay_pending_;
// Set by the field trial WebRTC-ForceSendPlayoutDelay to override the playout // Set by the field trial WebRTC-ForceSendPlayoutDelay to override the playout

View File

@ -162,7 +162,14 @@ void RTPSenderVideoFrameTransformerDelegate::SetVideoStructureUnderLock(
const FrameDependencyStructure* video_structure) { const FrameDependencyStructure* video_structure) {
MutexLock lock(&sender_lock_); MutexLock lock(&sender_lock_);
RTC_CHECK(sender_); RTC_CHECK(sender_);
sender_->SetVideoStructureUnderLock(video_structure); sender_->SetVideoStructureAfterTransformation(video_structure);
}
void RTPSenderVideoFrameTransformerDelegate::SetVideoLayersAllocationUnderLock(
VideoLayersAllocation allocation) {
MutexLock lock(&sender_lock_);
RTC_CHECK(sender_);
sender_->SetVideoLayersAllocationAfterTransformation(std::move(allocation));
} }
void RTPSenderVideoFrameTransformerDelegate::Reset() { void RTPSenderVideoFrameTransformerDelegate::Reset() {

View File

@ -16,6 +16,7 @@
#include "api/frame_transformer_interface.h" #include "api/frame_transformer_interface.h"
#include "api/scoped_refptr.h" #include "api/scoped_refptr.h"
#include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_base.h"
#include "api/video/video_layers_allocation.h"
#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/mutex.h"
namespace webrtc { namespace webrtc {
@ -51,10 +52,16 @@ class RTPSenderVideoFrameTransformerDelegate : public TransformedFrameCallback {
// Delegates the call to RTPSendVideo::SendVideo on the |encoder_queue_|. // Delegates the call to RTPSendVideo::SendVideo on the |encoder_queue_|.
void SendVideo(std::unique_ptr<TransformableFrameInterface> frame) const; void SendVideo(std::unique_ptr<TransformableFrameInterface> frame) const;
// Delegates the call to RTPSendVideo::SendVideo under |sender_lock_|. // Delegates the call to RTPSendVideo::SetVideoStructureAfterTransformation
// under |sender_lock_|.
void SetVideoStructureUnderLock( void SetVideoStructureUnderLock(
const FrameDependencyStructure* video_structure); const FrameDependencyStructure* video_structure);
// Delegates the call to
// RTPSendVideo::SetVideoLayersAllocationAfterTransformation under
// |sender_lock_|.
void SetVideoLayersAllocationUnderLock(VideoLayersAllocation allocation);
// Unregisters and releases the |frame_transformer_| reference, and resets // Unregisters and releases the |frame_transformer_| reference, and resets
// |sender_| under lock. Called from RTPSenderVideo destructor to prevent the // |sender_| under lock. Called from RTPSenderVideo destructor to prevent the
// |sender_| to dangle. // |sender_| to dangle.

View File

@ -33,6 +33,7 @@
#include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
#include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h"
#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h"
#include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h"
#include "modules/rtp_rtcp/source/time_util.h" #include "modules/rtp_rtcp/source/time_util.h"
#include "rtc_base/arraysize.h" #include "rtc_base/arraysize.h"
#include "rtc_base/rate_limiter.h" #include "rtc_base/rate_limiter.h"
@ -66,7 +67,8 @@ enum : int { // The first valid value is 1.
kVideoRotationExtensionId, kVideoRotationExtensionId,
kVideoTimingExtensionId, kVideoTimingExtensionId,
kAbsoluteCaptureTimeExtensionId, kAbsoluteCaptureTimeExtensionId,
kPlayoutDelayExtensionId kPlayoutDelayExtensionId,
kVideoLayersAllocationExtensionId,
}; };
constexpr int kPayload = 100; constexpr int kPayload = 100;
@ -98,6 +100,8 @@ class LoopbackTransportTest : public webrtc::Transport {
kAbsoluteCaptureTimeExtensionId); kAbsoluteCaptureTimeExtensionId);
receivers_extensions_.Register<PlayoutDelayLimits>( receivers_extensions_.Register<PlayoutDelayLimits>(
kPlayoutDelayExtensionId); kPlayoutDelayExtensionId);
receivers_extensions_.Register<RtpVideoLayersAllocationExtension>(
kVideoLayersAllocationExtensionId);
} }
bool SendRtp(const uint8_t* data, bool SendRtp(const uint8_t* data,
@ -821,6 +825,152 @@ TEST_P(RtpSenderVideoTest,
UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1); UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1);
} }
TEST_P(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) {
const size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize];
rtp_module_->RegisterRtpHeaderExtension(
RtpVideoLayersAllocationExtension::kUri,
kVideoLayersAllocationExtensionId);
VideoLayersAllocation allocation;
VideoLayersAllocation::SpatialLayer layer;
layer.width = 360;
layer.height = 180;
layer.target_bitrate_per_temporal_layer.push_back(
DataRate::KilobitsPerSec(50));
allocation.resolution_and_frame_rate_is_valid = true;
allocation.active_spatial_layers.push_back(layer);
rtp_sender_video_->SetVideoLayersAllocation(allocation);
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameKey;
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoLayersAllocation sent_allocation;
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
EXPECT_THAT(sent_allocation.active_spatial_layers, ElementsAre(layer));
// Next key frame also have the allocation.
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
}
TEST_P(RtpSenderVideoTest,
VideoLayersAllocationWithoutResolutionSentOnDeltaFrames) {
const size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize];
rtp_module_->RegisterRtpHeaderExtension(
RtpVideoLayersAllocationExtension::kUri,
kVideoLayersAllocationExtensionId);
VideoLayersAllocation allocation;
VideoLayersAllocation::SpatialLayer layer;
layer.width = 360;
layer.height = 180;
allocation.resolution_and_frame_rate_is_valid = true;
layer.target_bitrate_per_temporal_layer.push_back(
DataRate::KilobitsPerSec(50));
allocation.active_spatial_layers.push_back(layer);
rtp_sender_video_->SetVideoLayersAllocation(allocation);
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoLayersAllocation sent_allocation;
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
ASSERT_THAT(sent_allocation.active_spatial_layers, SizeIs(1));
EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid);
EXPECT_THAT(sent_allocation.active_spatial_layers[0]
.target_bitrate_per_temporal_layer,
SizeIs(1));
}
TEST_P(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) {
const size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize];
rtp_module_->RegisterRtpHeaderExtension(
RtpVideoLayersAllocationExtension::kUri,
kVideoLayersAllocationExtensionId);
VideoLayersAllocation allocation;
VideoLayersAllocation::SpatialLayer layer;
layer.target_bitrate_per_temporal_layer.push_back(
DataRate::KilobitsPerSec(50));
allocation.active_spatial_layers.push_back(layer);
rtp_sender_video_->SetVideoLayersAllocation(allocation);
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoLayersAllocation sent_allocation;
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(1));
// VideoLayersAllocation not sent on the next delta frame.
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
EXPECT_FALSE(transport_.last_sent_packet()
.HasExtension<RtpVideoLayersAllocationExtension>());
// Update allocation. VideoLayesAllocation should be sent on the next frame.
rtp_sender_video_->SetVideoLayersAllocation(allocation);
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
}
TEST_P(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) {
const size_t kFrameSize = 100;
uint8_t kFrame[kFrameSize];
rtp_module_->RegisterRtpHeaderExtension(
RtpVideoLayersAllocationExtension::kUri,
kVideoLayersAllocationExtensionId);
VideoLayersAllocation allocation;
VideoLayersAllocation::SpatialLayer layer;
layer.target_bitrate_per_temporal_layer.push_back(
DataRate::KilobitsPerSec(50));
allocation.active_spatial_layers.push_back(layer);
rtp_sender_video_->SetVideoLayersAllocation(allocation);
RTPVideoHeader hdr;
hdr.frame_type = VideoFrameType::kVideoFrameDelta;
hdr.codec = VideoCodecType::kVideoCodecVP8;
auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>();
vp8_header.temporalIdx = 1;
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
VideoLayersAllocation sent_allocation;
EXPECT_FALSE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
// Send a delta frame on tl0.
vp8_header.temporalIdx = 0;
rtp_sender_video_->SendVideo(kPayload, kType, kTimestamp, 0, kFrame, hdr,
kDefaultExpectedRetransmissionTimeMs);
EXPECT_TRUE(
transport_.last_sent_packet()
.GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation));
}
TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) { TEST_P(RtpSenderVideoTest, AbsoluteCaptureTime) {
constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678; constexpr int64_t kAbsoluteCaptureTimestampMs = 12345678;
uint8_t kFrame[kMaxPacketLength]; uint8_t kFrame[kMaxPacketLength];

View File

@ -148,6 +148,7 @@ bool RtpVideoLayersAllocationExtension::Parse(
rtc::BitBuffer reader(data.data(), data.size()); rtc::BitBuffer reader(data.data(), data.size());
if (!allocation) if (!allocation)
return false; return false;
allocation->active_spatial_layers.clear();
uint32_t val; uint32_t val;
// NS: // NS: