Stop using LOG macros in favor of RTC_ prefixed macros.

This CL has been generated with the following script:

for m in PLOG \
  LOG_TAG \
  LOG_GLEM \
  LOG_GLE_EX \
  LOG_GLE \
  LAST_SYSTEM_ERROR \
  LOG_ERRNO_EX \
  LOG_ERRNO \
  LOG_ERR_EX \
  LOG_ERR \
  LOG_V \
  LOG_F \
  LOG_T_F \
  LOG_E \
  LOG_T \
  LOG_CHECK_LEVEL_V \
  LOG_CHECK_LEVEL \
  LOG
do
  git grep -l $m | xargs sed -i "s,\b$m\b,RTC_$m,g"
done
git checkout rtc_base/logging.h
git cl format

Bug: webrtc:8452
Change-Id: I1a53ef3e0a5ef6e244e62b2e012b864914784600
Reviewed-on: https://webrtc-review.googlesource.com/21325
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20617}
This commit is contained in:
Mirko Bonadei
2017-11-09 11:09:25 +01:00
committed by Commit Bot
parent 34fa309129
commit 675513b96a
407 changed files with 5753 additions and 5371 deletions

View File

@ -48,7 +48,7 @@ void decompressionOutputCallback(void *decoder,
std::unique_ptr<RTCFrameDecodeParams> decodeParams(
reinterpret_cast<RTCFrameDecodeParams *>(params));
if (status != noErr) {
LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
RTC_LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
return;
}
// TODO(tkchin): Handle CVO properly.
@ -126,7 +126,7 @@ void decompressionOutputCallback(void *decoder,
// This can happen after backgrounding. We need to wait for the next
// sps/pps before we can resume so we request a keyframe by returning an
// error.
LOG(LS_WARNING) << "Missing video format. Frame with sps/pps required.";
RTC_LOG(LS_WARNING) << "Missing video format. Frame with sps/pps required.";
return WEBRTC_VIDEO_CODEC_ERROR;
}
CMSampleBufferRef sampleBuffer = nullptr;
@ -153,7 +153,7 @@ void decompressionOutputCallback(void *decoder,
#endif
CFRelease(sampleBuffer);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
RTC_LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;

View File

@ -100,7 +100,7 @@ bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBuffe
CVReturn cvRet = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
if (cvRet != kCVReturnSuccess) {
LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
RTC_LOG(LS_ERROR) << "Failed to lock base address: " << cvRet;
return false;
}
uint8_t *dstY = reinterpret_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
@ -122,7 +122,7 @@ bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBuffe
frameBuffer.height);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
RTC_LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
return false;
}
return true;
@ -130,13 +130,13 @@ bool CopyVideoFrameToNV12PixelBuffer(id<RTCI420Buffer> frameBuffer, CVPixelBuffe
CVPixelBufferRef CreatePixelBuffer(CVPixelBufferPoolRef pixel_buffer_pool) {
if (!pixel_buffer_pool) {
LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
RTC_LOG(LS_ERROR) << "Failed to get pixel buffer pool.";
return nullptr;
}
CVPixelBufferRef pixel_buffer;
CVReturn ret = CVPixelBufferPoolCreatePixelBuffer(nullptr, pixel_buffer_pool, &pixel_buffer);
if (ret != kCVReturnSuccess) {
LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
RTC_LOG(LS_ERROR) << "Failed to create pixel buffer: " << ret;
// We probably want to drop frames here, since failure probably means
// that the pool is empty.
return nullptr;
@ -306,7 +306,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
webrtc::Clock::GetRealTimeClock(), .5, .95));
_packetizationMode = RTCH264PacketizationModeNonInterleaved;
_profile = ExtractProfile([codecInfo nativeSdpVideoFormat]);
LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(_profile);
RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]);
#if defined(WEBRTC_IOS)
@ -405,7 +405,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
}
RTC_DCHECK(pixelBuffer);
if (!CopyVideoFrameToNV12PixelBuffer([frame.buffer toI420], pixelBuffer)) {
LOG(LS_ERROR) << "Failed to copy frame data.";
RTC_LOG(LS_ERROR) << "Failed to copy frame data.";
CVBufferRelease(pixelBuffer);
return WEBRTC_VIDEO_CODEC_ERROR;
}
@ -456,7 +456,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
CVBufferRelease(pixelBuffer);
}
if (status != noErr) {
LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
RTC_LOG(LS_ERROR) << "Failed to encode frame with code: " << status;
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;
@ -496,7 +496,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
// Resetting the session when this happens fixes the issue.
// In addition we request a keyframe so video can recover quickly.
resetCompressionSession = YES;
LOG(LS_INFO) << "Resetting compression session due to invalid pool.";
RTC_LOG(LS_INFO) << "Resetting compression session due to invalid pool.";
}
#endif
@ -523,7 +523,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
if (![compressionSessionPixelFormats
containsObject:[NSNumber numberWithLong:framePixelFormat]]) {
resetCompressionSession = YES;
LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
RTC_LOG(LS_INFO) << "Resetting compression session due to non-matching pixel format.";
}
}
@ -591,7 +591,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
encoder_specs = nullptr;
}
if (status != noErr) {
LOG(LS_ERROR) << "Failed to create compression session: " << status;
RTC_LOG(LS_ERROR) << "Failed to create compression session: " << status;
return WEBRTC_VIDEO_CODEC_ERROR;
}
#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
@ -601,9 +601,9 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
nullptr,
&hwaccl_enabled);
if (status == noErr && (CFBooleanGetValue(hwaccl_enabled))) {
LOG(LS_INFO) << "Compression session created with hw accl enabled";
RTC_LOG(LS_INFO) << "Compression session created with hw accl enabled";
} else {
LOG(LS_INFO) << "Compression session created with hw accl disabled";
RTC_LOG(LS_INFO) << "Compression session created with hw accl disabled";
}
#endif
[self configureCompressionSession];
@ -674,7 +674,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
CFRelease(dataRateLimits);
}
if (status != noErr) {
LOG(LS_ERROR) << "Failed to set data rate limit";
RTC_LOG(LS_ERROR) << "Failed to set data rate limit";
}
_encoderBitrateBps = bitrateBps;
@ -691,11 +691,11 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
timestamp:(uint32_t)timestamp
rotation:(RTCVideoRotation)rotation {
if (status != noErr) {
LOG(LS_ERROR) << "H264 encode failed.";
RTC_LOG(LS_ERROR) << "H264 encode failed.";
return;
}
if (infoFlags & kVTEncodeInfo_FrameDropped) {
LOG(LS_INFO) << "H264 encode dropped frame.";
RTC_LOG(LS_INFO) << "H264 encode dropped frame.";
return;
}
@ -708,7 +708,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
}
if (isKeyframe) {
LOG(LS_INFO) << "Generated keyframe";
RTC_LOG(LS_INFO) << "Generated keyframe";
}
// Convert the sample buffer into a buffer suitable for RTP packetization.
@ -745,7 +745,7 @@ CFStringRef ExtractProfile(webrtc::SdpVideoFormat videoFormat) {
BOOL res = _callback(frame, codecSpecificInfo, header);
if (!res) {
LOG(LS_ERROR) << "Encode callback failed";
RTC_LOG(LS_ERROR) << "Encode callback failed";
return;
}
_bitrateAdjuster->Update(frame.buffer.length);

View File

@ -44,8 +44,8 @@ void SetVTSessionProperty(VTSessionRef session,
CFRelease(cfNum);
if (status != noErr) {
std::string key_string = CFStringToString(key);
LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
}
}
@ -60,8 +60,8 @@ void SetVTSessionProperty(VTSessionRef session,
CFRelease(cfNum);
if (status != noErr) {
std::string key_string = CFStringToString(key);
LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
}
}
@ -71,8 +71,8 @@ void SetVTSessionProperty(VTSessionRef session, CFStringRef key, bool value) {
OSStatus status = VTSessionSetProperty(session, key, cf_bool);
if (status != noErr) {
std::string key_string = CFStringToString(key);
LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << value << ": " << status;
}
}
@ -84,7 +84,7 @@ void SetVTSessionProperty(VTSessionRef session,
if (status != noErr) {
std::string key_string = CFStringToString(key);
std::string val_string = CFStringToString(value);
LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << val_string << ": " << status;
RTC_LOG(LS_ERROR) << "VTSessionSetProperty failed to set: " << key_string
<< " to " << val_string << ": " << status;
}
}

View File

@ -42,7 +42,7 @@ bool H264CMSampleBufferToAnnexBBuffer(
CMVideoFormatDescriptionRef description =
CMSampleBufferGetFormatDescription(avcc_sample_buffer);
if (description == nullptr) {
LOG(LS_ERROR) << "Failed to get sample buffer's description.";
RTC_LOG(LS_ERROR) << "Failed to get sample buffer's description.";
return false;
}
@ -52,7 +52,7 @@ bool H264CMSampleBufferToAnnexBBuffer(
OSStatus status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
description, 0, nullptr, nullptr, &param_set_count, &nalu_header_size);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to get parameter set.";
RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
return false;
}
RTC_CHECK_EQ(nalu_header_size, kAvccHeaderByteSize);
@ -73,7 +73,7 @@ bool H264CMSampleBufferToAnnexBBuffer(
status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
description, i, &param_set, &param_set_size, nullptr, nullptr);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to get parameter set.";
RTC_LOG(LS_ERROR) << "Failed to get parameter set.";
return false;
}
// Update buffer.
@ -91,7 +91,7 @@ bool H264CMSampleBufferToAnnexBBuffer(
CMBlockBufferRef block_buffer =
CMSampleBufferGetDataBuffer(avcc_sample_buffer);
if (block_buffer == nullptr) {
LOG(LS_ERROR) << "Failed to get sample buffer's block buffer.";
RTC_LOG(LS_ERROR) << "Failed to get sample buffer's block buffer.";
return false;
}
CMBlockBufferRef contiguous_buffer = nullptr;
@ -100,8 +100,8 @@ bool H264CMSampleBufferToAnnexBBuffer(
status = CMBlockBufferCreateContiguous(
nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
<< status;
RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
<< status;
return false;
}
} else {
@ -117,7 +117,7 @@ bool H264CMSampleBufferToAnnexBBuffer(
status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr, nullptr,
&data_ptr);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to get block buffer data.";
RTC_LOG(LS_ERROR) << "Failed to get block buffer data.";
CFRelease(contiguous_buffer);
return false;
}
@ -173,11 +173,11 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
const uint8_t* data = nullptr;
size_t data_len = 0;
if (!reader.ReadNalu(&data, &data_len)) {
LOG(LS_ERROR) << "Failed to read SPS";
RTC_LOG(LS_ERROR) << "Failed to read SPS";
return false;
}
if (!reader.ReadNalu(&data, &data_len)) {
LOG(LS_ERROR) << "Failed to read PPS";
RTC_LOG(LS_ERROR) << "Failed to read PPS";
return false;
}
}
@ -190,7 +190,7 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
reader.BytesRemaining(), kCMBlockBufferAssureMemoryNowFlag,
&block_buffer);
if (status != kCMBlockBufferNoErr) {
LOG(LS_ERROR) << "Failed to create block buffer.";
RTC_LOG(LS_ERROR) << "Failed to create block buffer.";
return false;
}
@ -200,8 +200,8 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
status = CMBlockBufferCreateContiguous(
nullptr, block_buffer, nullptr, nullptr, 0, 0, 0, &contiguous_buffer);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
<< status;
RTC_LOG(LS_ERROR) << "Failed to flatten non-contiguous block buffer: "
<< status;
CFRelease(block_buffer);
return false;
}
@ -216,7 +216,7 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
status = CMBlockBufferGetDataPointer(contiguous_buffer, 0, nullptr,
&block_buffer_size, &data_ptr);
if (status != kCMBlockBufferNoErr) {
LOG(LS_ERROR) << "Failed to get block buffer data pointer.";
RTC_LOG(LS_ERROR) << "Failed to get block buffer data pointer.";
CFRelease(contiguous_buffer);
return false;
}
@ -238,7 +238,7 @@ bool H264AnnexBBufferToCMSampleBuffer(const uint8_t* annexb_buffer,
nullptr, video_format, 1, 0, nullptr, 0,
nullptr, out_sample_buffer);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to create sample buffer.";
RTC_LOG(LS_ERROR) << "Failed to create sample buffer.";
CFRelease(contiguous_buffer);
return false;
}
@ -284,23 +284,23 @@ CMVideoFormatDescriptionRef CreateVideoFormatDescription(
// Skip AUD.
if (ParseNaluType(annexb_buffer[4]) == kAud) {
if (!reader.ReadNalu(&param_set_ptrs[0], &param_set_sizes[0])) {
LOG(LS_ERROR) << "Failed to read AUD";
RTC_LOG(LS_ERROR) << "Failed to read AUD";
return nullptr;
}
}
if (!reader.ReadNalu(&param_set_ptrs[0], &param_set_sizes[0])) {
LOG(LS_ERROR) << "Failed to read SPS";
RTC_LOG(LS_ERROR) << "Failed to read SPS";
return nullptr;
}
if (!reader.ReadNalu(&param_set_ptrs[1], &param_set_sizes[1])) {
LOG(LS_ERROR) << "Failed to read PPS";
RTC_LOG(LS_ERROR) << "Failed to read PPS";
return nullptr;
}
status = CMVideoFormatDescriptionCreateFromH264ParameterSets(
kCFAllocatorDefault, 2, param_set_ptrs, param_set_sizes, 4,
&description);
if (status != noErr) {
LOG(LS_ERROR) << "Failed to create video format description.";
RTC_LOG(LS_ERROR) << "Failed to create video format description.";
return nullptr;
}
return description;