Remove the VPM denoiser.
The VPM denoiser give bad results, is slow and has not been used in practice. Instead we use the VP8 denoiser. Testing this denoiser takes up a lot of runtime on linux_memcheck (about 4 minutes) which we can do without. BUG= R=stefan@webrtc.org Review URL: https://webrtc-codereview.appspot.com/16069004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6688 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@ -23,7 +23,6 @@ LOCAL_SRC_FILES := \
|
||||
color_enhancement.cc \
|
||||
content_analysis.cc \
|
||||
deflickering.cc \
|
||||
denoising.cc \
|
||||
frame_preprocessor.cc \
|
||||
spatial_resampler.cc \
|
||||
video_decimator.cc \
|
||||
|
||||
@ -1,156 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/video_processing/main/source/denoising.h"
|
||||
|
||||
#include <string.h>
|
||||
|
||||
namespace webrtc {
|
||||
// Down-sampling in time (unit: number of frames)
|
||||
enum { kSubsamplingTime = 0 };
|
||||
// Sub-sampling in width (unit: power of 2.
|
||||
enum { kSubsamplingWidth = 0 };
|
||||
// Sub-sampling in height (unit: power of 2)
|
||||
enum { kSubsamplingHeight = 0 };
|
||||
// (Q8) De-noising filter parameter
|
||||
enum { kDenoiseFiltParam = 179 };
|
||||
// (Q8) 1 - filter parameter
|
||||
enum { kDenoiseFiltParamRec = 77 };
|
||||
// (Q8) De-noising threshold level
|
||||
enum { kDenoiseThreshold = 19200 };
|
||||
|
||||
VPMDenoising::VPMDenoising()
|
||||
: id_(0),
|
||||
moment1_(NULL),
|
||||
moment2_(NULL) {
|
||||
Reset();
|
||||
}
|
||||
|
||||
VPMDenoising::~VPMDenoising() {
|
||||
if (moment1_) {
|
||||
delete [] moment1_;
|
||||
moment1_ = NULL;
|
||||
}
|
||||
|
||||
if (moment2_) {
|
||||
delete [] moment2_;
|
||||
moment2_ = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t VPMDenoising::ChangeUniqueId(const int32_t id) {
|
||||
id_ = id;
|
||||
return VPM_OK;
|
||||
}
|
||||
|
||||
void VPMDenoising::Reset() {
|
||||
frame_size_ = 0;
|
||||
denoise_frame_cnt_ = 0;
|
||||
|
||||
if (moment1_) {
|
||||
delete [] moment1_;
|
||||
moment1_ = NULL;
|
||||
}
|
||||
|
||||
if (moment2_) {
|
||||
delete [] moment2_;
|
||||
moment2_ = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) {
|
||||
assert(frame);
|
||||
int32_t thevar;
|
||||
int k;
|
||||
int jsub, ksub;
|
||||
int32_t diff0;
|
||||
uint32_t tmp_moment1;
|
||||
uint32_t tmp_moment2;
|
||||
uint32_t tmp;
|
||||
int32_t num_pixels_changed = 0;
|
||||
|
||||
if (frame->IsZeroSize()) {
|
||||
return VPM_GENERAL_ERROR;
|
||||
}
|
||||
|
||||
int width = frame->width();
|
||||
int height = frame->height();
|
||||
|
||||
/* Size of luminance component */
|
||||
const uint32_t y_size = height * width;
|
||||
|
||||
/* Initialization */
|
||||
if (y_size != frame_size_) {
|
||||
delete [] moment1_;
|
||||
moment1_ = NULL;
|
||||
|
||||
delete [] moment2_;
|
||||
moment2_ = NULL;
|
||||
}
|
||||
frame_size_ = y_size;
|
||||
|
||||
if (!moment1_) {
|
||||
moment1_ = new uint32_t[y_size];
|
||||
memset(moment1_, 0, sizeof(uint32_t)*y_size);
|
||||
}
|
||||
|
||||
if (!moment2_) {
|
||||
moment2_ = new uint32_t[y_size];
|
||||
memset(moment2_, 0, sizeof(uint32_t)*y_size);
|
||||
}
|
||||
|
||||
/* Apply de-noising on each pixel, but update variance sub-sampled */
|
||||
uint8_t* buffer = frame->buffer(kYPlane);
|
||||
for (int i = 0; i < height; i++) { // Collect over height
|
||||
k = i * width;
|
||||
ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
|
||||
for (int j = 0; j < width; j++) { // Collect over width
|
||||
jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
|
||||
/* Update mean value for every pixel and every frame */
|
||||
tmp_moment1 = moment1_[k + j];
|
||||
tmp_moment1 *= kDenoiseFiltParam; // Q16
|
||||
tmp_moment1 += ((kDenoiseFiltParamRec * ((uint32_t)buffer[k + j])) << 8);
|
||||
tmp_moment1 >>= 8; // Q8
|
||||
moment1_[k + j] = tmp_moment1;
|
||||
|
||||
tmp_moment2 = moment2_[ksub + jsub];
|
||||
if ((ksub == k) && (jsub == j) && (denoise_frame_cnt_ == 0)) {
|
||||
tmp = ((uint32_t)buffer[k + j] *
|
||||
(uint32_t)buffer[k + j]);
|
||||
tmp_moment2 *= kDenoiseFiltParam; // Q16
|
||||
tmp_moment2 += ((kDenoiseFiltParamRec * tmp) << 8);
|
||||
tmp_moment2 >>= 8; // Q8
|
||||
}
|
||||
moment2_[k + j] = tmp_moment2;
|
||||
/* Current event = deviation from mean value */
|
||||
diff0 = ((int32_t)buffer[k + j] << 8) - moment1_[k + j];
|
||||
/* Recent events = variance (variations over time) */
|
||||
thevar = moment2_[k + j];
|
||||
thevar -= ((moment1_[k + j] * moment1_[k + j]) >> 8);
|
||||
// De-noising criteria, i.e., when should we replace a pixel by its mean.
|
||||
// 1) recent events are minor.
|
||||
// 2) current events are minor.
|
||||
if ((thevar < kDenoiseThreshold)
|
||||
&& ((diff0 * diff0 >> 8) < kDenoiseThreshold)) {
|
||||
// Replace with mean.
|
||||
buffer[k + j] = (uint8_t)(moment1_[k + j] >> 8);
|
||||
num_pixels_changed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
denoise_frame_cnt_++;
|
||||
if (denoise_frame_cnt_ > kSubsamplingTime)
|
||||
denoise_frame_cnt_ = 0;
|
||||
|
||||
return num_pixels_changed;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_
|
||||
#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_
|
||||
|
||||
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VPMDenoising {
|
||||
public:
|
||||
VPMDenoising();
|
||||
~VPMDenoising();
|
||||
|
||||
int32_t ChangeUniqueId(int32_t id);
|
||||
|
||||
void Reset();
|
||||
|
||||
int32_t ProcessFrame(I420VideoFrame* frame);
|
||||
|
||||
private:
|
||||
int32_t id_;
|
||||
|
||||
uint32_t* moment1_; // (Q8) First order moment (mean).
|
||||
uint32_t* moment2_; // (Q8) Second order moment.
|
||||
uint32_t frame_size_; // Size (# of pixels) of frame.
|
||||
int denoise_frame_cnt_; // Counter for subsampling in time.
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_
|
||||
|
||||
@ -31,8 +31,6 @@
|
||||
'content_analysis.h',
|
||||
'deflickering.cc',
|
||||
'deflickering.h',
|
||||
'denoising.cc',
|
||||
'denoising.h',
|
||||
'frame_preprocessor.cc',
|
||||
'frame_preprocessor.h',
|
||||
'spatial_resampler.cc',
|
||||
|
||||
@ -51,7 +51,6 @@ int32_t VideoProcessingModuleImpl::ChangeUniqueId(const int32_t id) {
|
||||
id_ = id;
|
||||
brightness_detection_.ChangeUniqueId(id);
|
||||
deflickering_.ChangeUniqueId(id);
|
||||
denoising_.ChangeUniqueId(id);
|
||||
frame_pre_processor_.ChangeUniqueId(id);
|
||||
return VPM_OK;
|
||||
}
|
||||
@ -66,7 +65,6 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id)
|
||||
mutex_(*CriticalSectionWrapper::CreateCriticalSection()) {
|
||||
brightness_detection_.ChangeUniqueId(id);
|
||||
deflickering_.ChangeUniqueId(id);
|
||||
denoising_.ChangeUniqueId(id);
|
||||
frame_pre_processor_.ChangeUniqueId(id);
|
||||
}
|
||||
|
||||
@ -77,7 +75,6 @@ VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {
|
||||
void VideoProcessingModuleImpl::Reset() {
|
||||
CriticalSectionScoped mutex(&mutex_);
|
||||
deflickering_.Reset();
|
||||
denoising_.Reset();
|
||||
brightness_detection_.Reset();
|
||||
frame_pre_processor_.Reset();
|
||||
}
|
||||
@ -146,11 +143,6 @@ int32_t VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame,
|
||||
return deflickering_.ProcessFrame(frame, stats);
|
||||
}
|
||||
|
||||
int32_t VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame) {
|
||||
CriticalSectionScoped mutex(&mutex_);
|
||||
return denoising_.ProcessFrame(frame);
|
||||
}
|
||||
|
||||
int32_t VideoProcessingModuleImpl::BrightnessDetection(
|
||||
const I420VideoFrame& frame,
|
||||
const FrameStats& stats) {
|
||||
|
||||
@ -16,7 +16,6 @@
|
||||
#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
|
||||
#include "webrtc/modules/video_processing/main/source/color_enhancement.h"
|
||||
#include "webrtc/modules/video_processing/main/source/deflickering.h"
|
||||
#include "webrtc/modules/video_processing/main/source/denoising.h"
|
||||
#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -36,8 +35,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
|
||||
|
||||
virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats);
|
||||
|
||||
virtual int32_t Denoising(I420VideoFrame* frame);
|
||||
|
||||
virtual int32_t BrightnessDetection(const I420VideoFrame& frame,
|
||||
const FrameStats& stats);
|
||||
|
||||
@ -74,7 +71,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
|
||||
int32_t id_;
|
||||
CriticalSectionWrapper& mutex_;
|
||||
VPMDeflickering deflickering_;
|
||||
VPMDenoising denoising_;
|
||||
VPMBrightnessDetection brightness_detection_;
|
||||
VPMFramePreprocessor frame_pre_processor_;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user