WebRtc_Word32 => int32_t in video_coding/

BUG=314

Review URL: https://webrtc-codereview.appspot.com/1203008

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3778 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org
2013-04-08 11:13:29 +00:00
parent 2f44673d66
commit 034f004a4f
21 changed files with 183 additions and 197 deletions

View File

@ -45,7 +45,7 @@ int PacketManipulatorImpl::ManipulatePackets(
packet_reader_->InitializeReading(encoded_image->_buffer,
encoded_image->_length,
config_.packet_size_in_bytes);
WebRtc_UWord8* packet = NULL;
uint8_t* packet = NULL;
int nbr_bytes_to_read;
// keep track of if we've lost any packets, since then we shall loose
// the remains of the current frame:

View File

@ -61,7 +61,7 @@ class PacketManipulatorTest: public PacketRelatedTest {
void VerifyPacketLoss(int expected_nbr_packets_dropped,
int actual_nbr_packets_dropped,
int expected_packet_data_length,
WebRtc_UWord8* expected_packet_data,
uint8_t* expected_packet_data,
EncodedImage& actual_image) {
EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
EXPECT_EQ(expected_packet_data_length, static_cast<int>(image_._length));
@ -121,8 +121,8 @@ TEST_F(PacketManipulatorTest, BurstDropNinePackets) {
// Create a longer packet data structure (10 packets)
const int kNbrPackets = 10;
const int kDataLength = kPacketSizeInBytes * kNbrPackets;
WebRtc_UWord8 data[kDataLength];
WebRtc_UWord8* data_pointer = data;
uint8_t data[kDataLength];
uint8_t* data_pointer = data;
// Fill with 0s, 1s and so on to be able to easily verify which were dropped:
for (int i = 0; i < kNbrPackets; ++i) {
memset(data_pointer + i * kPacketSizeInBytes, i, kPacketSizeInBytes);

View File

@ -61,8 +61,8 @@ bool VideoProcessorImpl::Init() {
// Initialize data structures used by the encoder/decoder APIs
size_t frame_length_in_bytes = frame_reader_->FrameLength();
source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
source_buffer_ = new uint8_t[frame_length_in_bytes];
last_successful_frame_buffer_ = new uint8_t[frame_length_in_bytes];
// Set fixed properties common for all frames.
// To keep track of spatial resize actions by encoder.
last_encoder_frame_width_ = config_.codec_settings->width;
@ -71,7 +71,7 @@ bool VideoProcessorImpl::Init() {
// Setup required callbacks for the encoder/decoder:
encode_callback_ = new VideoProcessorEncodeCompleteCallback(this);
decode_callback_ = new VideoProcessorDecodeCompleteCallback(this);
WebRtc_Word32 register_result =
int32_t register_result =
encoder_->RegisterEncodeCompleteCallback(encode_callback_);
if (register_result != WEBRTC_VIDEO_CODEC_OK) {
fprintf(stderr, "Failed to register encode complete callback, return code: "
@ -85,11 +85,11 @@ bool VideoProcessorImpl::Init() {
return false;
}
// Init the encoder and decoder
WebRtc_UWord32 nbr_of_cores = 1;
uint32_t nbr_of_cores = 1;
if (!config_.use_single_core) {
nbr_of_cores = CpuInfo::DetectNumberOfCores();
}
WebRtc_Word32 init_result =
int32_t init_result =
encoder_->InitEncode(config_.codec_settings, nbr_of_cores,
config_.networking_config.max_payload_size_in_bytes);
if (init_result != WEBRTC_VIDEO_CODEC_OK) {
@ -192,8 +192,7 @@ bool VideoProcessorImpl::ProcessFrame(int frame_number) {
// For dropped frames, we regard them as zero size encoded frames.
encoded_frame_size_ = 0;
WebRtc_Word32 encode_result = encoder_->Encode(source_frame_, NULL,
&frame_types);
int32_t encode_result = encoder_->Encode(source_frame_, NULL, &frame_types);
if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
@ -264,8 +263,8 @@ void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
decode_start_ = TickTime::Now();
// TODO(kjellander): Pass fragmentation header to the decoder when
// CL 172001 has been submitted and PacketManipulator supports this.
WebRtc_Word32 decode_result = decoder_->Decode(*encoded_image,
last_frame_missing_, NULL);
int32_t decode_result = decoder_->Decode(*encoded_image, last_frame_missing_,
NULL);
stat.decode_return_code = decode_result;
if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
// Write the last successful frame the output file to avoid getting it out
@ -342,7 +341,7 @@ void VideoProcessorImpl::FrameDecoded(const I420VideoFrame& image) {
int VideoProcessorImpl::GetElapsedTimeMicroseconds(
const webrtc::TickTime& start, const webrtc::TickTime& stop) {
WebRtc_UWord64 encode_time = (stop - start).Microseconds();
uint64_t encode_time = (stop - start).Microseconds();
assert(encode_time <
static_cast<unsigned int>(std::numeric_limits<int>::max()));
return static_cast<int>(encode_time);
@ -379,7 +378,7 @@ const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
}
// Callbacks
WebRtc_Word32
int32_t
VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info,
@ -387,7 +386,7 @@ VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
video_processor_->FrameEncoded(&encoded_image); // Forward to parent class.
return 0;
}
WebRtc_Word32
int32_t
VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
I420VideoFrame& image) {
video_processor_->FrameDecoded(image); // forward to parent class

View File

@ -200,10 +200,10 @@ class VideoProcessorImpl : public VideoProcessor {
EncodedImageCallback* encode_callback_;
DecodedImageCallback* decode_callback_;
// Buffer used for reading the source video file:
WebRtc_UWord8* source_buffer_;
uint8_t* source_buffer_;
// Keep track of the last successful frame, since we need to write that
// when decoding fails:
WebRtc_UWord8* last_successful_frame_buffer_;
uint8_t* last_successful_frame_buffer_;
webrtc::I420VideoFrame source_frame_;
// To keep track of if we have excluded the first key frame from packet loss:
bool first_key_frame_has_been_excluded_;
@ -231,10 +231,9 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
WebRtc_Word32 Encoded(
webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
const webrtc::RTPFragmentationHeader* fragmentation = NULL);
int32_t Encoded(webrtc::EncodedImage& encoded_image,
const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
const webrtc::RTPFragmentationHeader* fragmentation = NULL);
private:
VideoProcessorImpl* video_processor_;
@ -247,7 +246,7 @@ class VideoProcessorImpl : public VideoProcessor {
explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
: video_processor_(vp) {
}
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image);
int32_t Decoded(webrtc::I420VideoFrame& image);
private:
VideoProcessorImpl* video_processor_;