Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,162 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h"
#include <string.h>
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#include "webrtc/modules/audio_coding/codecs/legacy_encoded_audio_frame.h"
#include "webrtc/rtc_base/checks.h"
namespace webrtc {
AudioDecoderG722Impl::AudioDecoderG722Impl() {
WebRtcG722_CreateDecoder(&dec_state_);
WebRtcG722_DecoderInit(dec_state_);
}
AudioDecoderG722Impl::~AudioDecoderG722Impl() {
WebRtcG722_FreeDecoder(dec_state_);
}
bool AudioDecoderG722Impl::HasDecodePlc() const {
return false;
}
int AudioDecoderG722Impl::DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) {
RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz);
int16_t temp_type = 1; // Default is speech.
size_t ret =
WebRtcG722_Decode(dec_state_, encoded, encoded_len, decoded, &temp_type);
*speech_type = ConvertSpeechType(temp_type);
return static_cast<int>(ret);
}
void AudioDecoderG722Impl::Reset() {
WebRtcG722_DecoderInit(dec_state_);
}
std::vector<AudioDecoder::ParseResult> AudioDecoderG722Impl::ParsePayload(
rtc::Buffer&& payload,
uint32_t timestamp) {
return LegacyEncodedAudioFrame::SplitBySamples(this, std::move(payload),
timestamp, 8, 16);
}
int AudioDecoderG722Impl::PacketDuration(const uint8_t* encoded,
size_t encoded_len) const {
// 1/2 encoded byte per sample per channel.
return static_cast<int>(2 * encoded_len / Channels());
}
int AudioDecoderG722Impl::SampleRateHz() const {
return 16000;
}
size_t AudioDecoderG722Impl::Channels() const {
return 1;
}
AudioDecoderG722StereoImpl::AudioDecoderG722StereoImpl() {
WebRtcG722_CreateDecoder(&dec_state_left_);
WebRtcG722_CreateDecoder(&dec_state_right_);
WebRtcG722_DecoderInit(dec_state_left_);
WebRtcG722_DecoderInit(dec_state_right_);
}
AudioDecoderG722StereoImpl::~AudioDecoderG722StereoImpl() {
WebRtcG722_FreeDecoder(dec_state_left_);
WebRtcG722_FreeDecoder(dec_state_right_);
}
int AudioDecoderG722StereoImpl::DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) {
RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz);
int16_t temp_type = 1; // Default is speech.
// De-interleave the bit-stream into two separate payloads.
uint8_t* encoded_deinterleaved = new uint8_t[encoded_len];
SplitStereoPacket(encoded, encoded_len, encoded_deinterleaved);
// Decode left and right.
size_t decoded_len = WebRtcG722_Decode(dec_state_left_, encoded_deinterleaved,
encoded_len / 2, decoded, &temp_type);
size_t ret = WebRtcG722_Decode(
dec_state_right_, &encoded_deinterleaved[encoded_len / 2],
encoded_len / 2, &decoded[decoded_len], &temp_type);
if (ret == decoded_len) {
ret += decoded_len; // Return total number of samples.
// Interleave output.
for (size_t k = ret / 2; k < ret; k++) {
int16_t temp = decoded[k];
memmove(&decoded[2 * k - ret + 2], &decoded[2 * k - ret + 1],
(ret - k - 1) * sizeof(int16_t));
decoded[2 * k - ret + 1] = temp;
}
}
*speech_type = ConvertSpeechType(temp_type);
delete[] encoded_deinterleaved;
return static_cast<int>(ret);
}
int AudioDecoderG722StereoImpl::SampleRateHz() const {
return 16000;
}
size_t AudioDecoderG722StereoImpl::Channels() const {
return 2;
}
void AudioDecoderG722StereoImpl::Reset() {
WebRtcG722_DecoderInit(dec_state_left_);
WebRtcG722_DecoderInit(dec_state_right_);
}
std::vector<AudioDecoder::ParseResult> AudioDecoderG722StereoImpl::ParsePayload(
rtc::Buffer&& payload,
uint32_t timestamp) {
return LegacyEncodedAudioFrame::SplitBySamples(this, std::move(payload),
timestamp, 2 * 8, 16);
}
// Split the stereo packet and place left and right channel after each other
// in the output array.
void AudioDecoderG722StereoImpl::SplitStereoPacket(
const uint8_t* encoded,
size_t encoded_len,
uint8_t* encoded_deinterleaved) {
// Regroup the 4 bits/sample so |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
// where "lx" is 4 bits representing left sample number x, and "rx" right
// sample. Two samples fit in one byte, represented with |...|.
for (size_t i = 0; i + 1 < encoded_len; i += 2) {
uint8_t right_byte = ((encoded[i] & 0x0F) << 4) + (encoded[i + 1] & 0x0F);
encoded_deinterleaved[i] = (encoded[i] & 0xF0) + (encoded[i + 1] >> 4);
encoded_deinterleaved[i + 1] = right_byte;
}
// Move one byte representing right channel each loop, and place it at the
// end of the bytestream vector. After looping the data is reordered to:
// |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
// where N is the total number of samples.
for (size_t i = 0; i < encoded_len / 2; i++) {
uint8_t right_byte = encoded_deinterleaved[i + 1];
memmove(&encoded_deinterleaved[i + 1], &encoded_deinterleaved[i + 2],
encoded_len - i - 2);
encoded_deinterleaved[encoded_len - 1] = right_byte;
}
}
} // namespace webrtc

View File

@ -0,0 +1,79 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_
#include "webrtc/api/audio_codecs/audio_decoder.h"
#include "webrtc/rtc_base/constructormagic.h"
typedef struct WebRtcG722DecInst G722DecInst;
namespace webrtc {
class AudioDecoderG722Impl final : public AudioDecoder {
public:
AudioDecoderG722Impl();
~AudioDecoderG722Impl() override;
bool HasDecodePlc() const override;
void Reset() override;
std::vector<ParseResult> ParsePayload(rtc::Buffer&& payload,
uint32_t timestamp) override;
int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override;
int SampleRateHz() const override;
size_t Channels() const override;
protected:
int DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) override;
private:
G722DecInst* dec_state_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Impl);
};
class AudioDecoderG722StereoImpl final : public AudioDecoder {
public:
AudioDecoderG722StereoImpl();
~AudioDecoderG722StereoImpl() override;
void Reset() override;
std::vector<ParseResult> ParsePayload(rtc::Buffer&& payload,
uint32_t timestamp) override;
int SampleRateHz() const override;
size_t Channels() const override;
protected:
int DecodeInternal(const uint8_t* encoded,
size_t encoded_len,
int sample_rate_hz,
int16_t* decoded,
SpeechType* speech_type) override;
private:
// Splits the stereo-interleaved payload in |encoded| into separate payloads
// for left and right channels. The separated payloads are written to
// |encoded_deinterleaved|, which must hold at least |encoded_len| samples.
// The left channel starts at offset 0, while the right channel starts at
// offset encoded_len / 2 into |encoded_deinterleaved|.
void SplitStereoPacket(const uint8_t* encoded,
size_t encoded_len,
uint8_t* encoded_deinterleaved);
G722DecInst* dec_state_left_;
G722DecInst* dec_state_right_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722StereoImpl);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_

View File

@ -0,0 +1,162 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h"
#include <algorithm>
#include <limits>
#include "webrtc/common_types.h"
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/safe_conversions.h"
namespace webrtc {
namespace {
const size_t kSampleRateHz = 16000;
AudioEncoderG722Config CreateConfig(const CodecInst& codec_inst) {
AudioEncoderG722Config config;
config.num_channels = rtc::dchecked_cast<int>(codec_inst.channels);
config.frame_size_ms = codec_inst.pacsize / 16;
return config;
}
} // namespace
AudioEncoderG722Impl::AudioEncoderG722Impl(const AudioEncoderG722Config& config,
int payload_type)
: num_channels_(config.num_channels),
payload_type_(payload_type),
num_10ms_frames_per_packet_(
static_cast<size_t>(config.frame_size_ms / 10)),
num_10ms_frames_buffered_(0),
first_timestamp_in_buffer_(0),
encoders_(new EncoderState[num_channels_]),
interleave_buffer_(2 * num_channels_) {
RTC_CHECK(config.IsOk());
const size_t samples_per_channel =
kSampleRateHz / 100 * num_10ms_frames_per_packet_;
for (size_t i = 0; i < num_channels_; ++i) {
encoders_[i].speech_buffer.reset(new int16_t[samples_per_channel]);
encoders_[i].encoded_buffer.SetSize(samples_per_channel / 2);
}
Reset();
}
AudioEncoderG722Impl::AudioEncoderG722Impl(const CodecInst& codec_inst)
: AudioEncoderG722Impl(CreateConfig(codec_inst), codec_inst.pltype) {}
AudioEncoderG722Impl::~AudioEncoderG722Impl() = default;
int AudioEncoderG722Impl::SampleRateHz() const {
return kSampleRateHz;
}
size_t AudioEncoderG722Impl::NumChannels() const {
return num_channels_;
}
int AudioEncoderG722Impl::RtpTimestampRateHz() const {
// The RTP timestamp rate for G.722 is 8000 Hz, even though it is a 16 kHz
// codec.
return kSampleRateHz / 2;
}
size_t AudioEncoderG722Impl::Num10MsFramesInNextPacket() const {
return num_10ms_frames_per_packet_;
}
size_t AudioEncoderG722Impl::Max10MsFramesInAPacket() const {
return num_10ms_frames_per_packet_;
}
int AudioEncoderG722Impl::GetTargetBitrate() const {
// 4 bits/sample, 16000 samples/s/channel.
return static_cast<int>(64000 * NumChannels());
}
void AudioEncoderG722Impl::Reset() {
num_10ms_frames_buffered_ = 0;
for (size_t i = 0; i < num_channels_; ++i)
RTC_CHECK_EQ(0, WebRtcG722_EncoderInit(encoders_[i].encoder));
}
AudioEncoder::EncodedInfo AudioEncoderG722Impl::EncodeImpl(
uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) {
if (num_10ms_frames_buffered_ == 0)
first_timestamp_in_buffer_ = rtp_timestamp;
// Deinterleave samples and save them in each channel's buffer.
const size_t start = kSampleRateHz / 100 * num_10ms_frames_buffered_;
for (size_t i = 0; i < kSampleRateHz / 100; ++i)
for (size_t j = 0; j < num_channels_; ++j)
encoders_[j].speech_buffer[start + i] = audio[i * num_channels_ + j];
// If we don't yet have enough samples for a packet, we're done for now.
if (++num_10ms_frames_buffered_ < num_10ms_frames_per_packet_) {
return EncodedInfo();
}
// Encode each channel separately.
RTC_CHECK_EQ(num_10ms_frames_buffered_, num_10ms_frames_per_packet_);
num_10ms_frames_buffered_ = 0;
const size_t samples_per_channel = SamplesPerChannel();
for (size_t i = 0; i < num_channels_; ++i) {
const size_t bytes_encoded = WebRtcG722_Encode(
encoders_[i].encoder, encoders_[i].speech_buffer.get(),
samples_per_channel, encoders_[i].encoded_buffer.data());
RTC_CHECK_EQ(bytes_encoded, samples_per_channel / 2);
}
const size_t bytes_to_encode = samples_per_channel / 2 * num_channels_;
EncodedInfo info;
info.encoded_bytes = encoded->AppendData(
bytes_to_encode, [&] (rtc::ArrayView<uint8_t> encoded) {
// Interleave the encoded bytes of the different channels. Each separate
// channel and the interleaved stream encodes two samples per byte, most
// significant half first.
for (size_t i = 0; i < samples_per_channel / 2; ++i) {
for (size_t j = 0; j < num_channels_; ++j) {
uint8_t two_samples = encoders_[j].encoded_buffer.data()[i];
interleave_buffer_.data()[j] = two_samples >> 4;
interleave_buffer_.data()[num_channels_ + j] = two_samples & 0xf;
}
for (size_t j = 0; j < num_channels_; ++j)
encoded[i * num_channels_ + j] =
interleave_buffer_.data()[2 * j] << 4 |
interleave_buffer_.data()[2 * j + 1];
}
return bytes_to_encode;
});
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
info.encoder_type = CodecType::kG722;
return info;
}
AudioEncoderG722Impl::EncoderState::EncoderState() {
RTC_CHECK_EQ(0, WebRtcG722_CreateEncoder(&encoder));
}
AudioEncoderG722Impl::EncoderState::~EncoderState() {
RTC_CHECK_EQ(0, WebRtcG722_FreeEncoder(encoder));
}
size_t AudioEncoderG722Impl::SamplesPerChannel() const {
return kSampleRateHz / 100 * num_10ms_frames_per_packet_;
}
} // namespace webrtc

View File

@ -0,0 +1,68 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_
#include <memory>
#include "webrtc/api/audio_codecs/audio_encoder.h"
#include "webrtc/api/audio_codecs/g722/audio_encoder_g722_config.h"
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
#include "webrtc/rtc_base/buffer.h"
#include "webrtc/rtc_base/constructormagic.h"
namespace webrtc {
struct CodecInst;
class AudioEncoderG722Impl final : public AudioEncoder {
public:
AudioEncoderG722Impl(const AudioEncoderG722Config& config, int payload_type);
explicit AudioEncoderG722Impl(const CodecInst& codec_inst);
~AudioEncoderG722Impl() override;
int SampleRateHz() const override;
size_t NumChannels() const override;
int RtpTimestampRateHz() const override;
size_t Num10MsFramesInNextPacket() const override;
size_t Max10MsFramesInAPacket() const override;
int GetTargetBitrate() const override;
void Reset() override;
protected:
EncodedInfo EncodeImpl(uint32_t rtp_timestamp,
rtc::ArrayView<const int16_t> audio,
rtc::Buffer* encoded) override;
private:
// The encoder state for one channel.
struct EncoderState {
G722EncInst* encoder;
std::unique_ptr<int16_t[]> speech_buffer; // Queued up for encoding.
rtc::Buffer encoded_buffer; // Already encoded.
EncoderState();
~EncoderState();
};
size_t SamplesPerChannel() const;
const size_t num_channels_;
const int payload_type_;
const size_t num_10ms_frames_per_packet_;
size_t num_10ms_frames_buffered_;
uint32_t first_timestamp_in_buffer_;
const std::unique_ptr<EncoderState[]> encoders_;
rtc::Buffer interleave_buffer_;
RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderG722Impl);
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_AUDIO_ENCODER_G722_H_

View File

@ -0,0 +1,400 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722_decode.c - The ITU G.722 codec, decode part.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based in part on a single channel G.722 codec which is:
*
* Copyright (c) CMU 1993
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722_decode.c,v 1.15 2006/07/07 16:37:49 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Removed usage of inttypes.h and tgmath.h
* -Changed to use WebRtc types
* -Changed __inline__ to __inline
* -Added saturation check on output
*/
/*! \file */
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include "g722_enc_dec.h"
#include "webrtc/typedefs.h"
#if !defined(FALSE)
#define FALSE 0
#endif
#if !defined(TRUE)
#define TRUE (!FALSE)
#endif
static __inline int16_t saturate(int32_t amp)
{
int16_t amp16;
/* Hopefully this is optimised for the common case - not clipping */
amp16 = (int16_t) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
return WEBRTC_INT16_MAX;
return WEBRTC_INT16_MIN;
}
/*- End of function --------------------------------------------------------*/
static void block4(G722DecoderState *s, int band, int d);
static void block4(G722DecoderState *s, int band, int d)
{
int wd1;
int wd2;
int wd3;
int i;
/* Block 4, RECONS */
s->band[band].d[0] = d;
s->band[band].r[0] = saturate(s->band[band].s + d);
/* Block 4, PARREC */
s->band[band].p[0] = saturate(s->band[band].sz + d);
/* Block 4, UPPOL2 */
for (i = 0; i < 3; i++)
s->band[band].sg[i] = s->band[band].p[i] >> 15;
wd1 = saturate(s->band[band].a[1] * 4);
wd2 = (s->band[band].sg[0] == s->band[band].sg[1]) ? -wd1 : wd1;
if (wd2 > 32767)
wd2 = 32767;
wd3 = (s->band[band].sg[0] == s->band[band].sg[2]) ? 128 : -128;
wd3 += (wd2 >> 7);
wd3 += (s->band[band].a[2]*32512) >> 15;
if (wd3 > 12288)
wd3 = 12288;
else if (wd3 < -12288)
wd3 = -12288;
s->band[band].ap[2] = wd3;
/* Block 4, UPPOL1 */
s->band[band].sg[0] = s->band[band].p[0] >> 15;
s->band[band].sg[1] = s->band[band].p[1] >> 15;
wd1 = (s->band[band].sg[0] == s->band[band].sg[1]) ? 192 : -192;
wd2 = (s->band[band].a[1]*32640) >> 15;
s->band[band].ap[1] = saturate(wd1 + wd2);
wd3 = saturate(15360 - s->band[band].ap[2]);
if (s->band[band].ap[1] > wd3)
s->band[band].ap[1] = wd3;
else if (s->band[band].ap[1] < -wd3)
s->band[band].ap[1] = -wd3;
/* Block 4, UPZERO */
wd1 = (d == 0) ? 0 : 128;
s->band[band].sg[0] = d >> 15;
for (i = 1; i < 7; i++)
{
s->band[band].sg[i] = s->band[band].d[i] >> 15;
wd2 = (s->band[band].sg[i] == s->band[band].sg[0]) ? wd1 : -wd1;
wd3 = (s->band[band].b[i]*32640) >> 15;
s->band[band].bp[i] = saturate(wd2 + wd3);
}
/* Block 4, DELAYA */
for (i = 6; i > 0; i--)
{
s->band[band].d[i] = s->band[band].d[i - 1];
s->band[band].b[i] = s->band[band].bp[i];
}
for (i = 2; i > 0; i--)
{
s->band[band].r[i] = s->band[band].r[i - 1];
s->band[band].p[i] = s->band[band].p[i - 1];
s->band[band].a[i] = s->band[band].ap[i];
}
/* Block 4, FILTEP */
wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
wd1 = (s->band[band].a[1]*wd1) >> 15;
wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
wd2 = (s->band[band].a[2]*wd2) >> 15;
s->band[band].sp = saturate(wd1 + wd2);
/* Block 4, FILTEZ */
s->band[band].sz = 0;
for (i = 6; i > 0; i--)
{
wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
}
s->band[band].sz = saturate(s->band[band].sz);
/* Block 4, PREDIC */
s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
}
/*- End of function --------------------------------------------------------*/
G722DecoderState* WebRtc_g722_decode_init(G722DecoderState* s,
int rate,
int options) {
s = s ? s : malloc(sizeof(*s));
memset(s, 0, sizeof(*s));
if (rate == 48000)
s->bits_per_sample = 6;
else if (rate == 56000)
s->bits_per_sample = 7;
else
s->bits_per_sample = 8;
if ((options & G722_SAMPLE_RATE_8000))
s->eight_k = TRUE;
if ((options & G722_PACKED) && s->bits_per_sample != 8)
s->packed = TRUE;
else
s->packed = FALSE;
s->band[0].det = 32;
s->band[1].det = 8;
return s;
}
/*- End of function --------------------------------------------------------*/
int WebRtc_g722_decode_release(G722DecoderState *s)
{
free(s);
return 0;
}
/*- End of function --------------------------------------------------------*/
size_t WebRtc_g722_decode(G722DecoderState *s, int16_t amp[],
const uint8_t g722_data[], size_t len)
{
static const int wl[8] = {-60, -30, 58, 172, 334, 538, 1198, 3042 };
static const int rl42[16] = {0, 7, 6, 5, 4, 3, 2, 1,
7, 6, 5, 4, 3, 2, 1, 0 };
static const int ilb[32] =
{
2048, 2093, 2139, 2186, 2233, 2282, 2332,
2383, 2435, 2489, 2543, 2599, 2656, 2714,
2774, 2834, 2896, 2960, 3025, 3091, 3158,
3228, 3298, 3371, 3444, 3520, 3597, 3676,
3756, 3838, 3922, 4008
};
static const int wh[3] = {0, -214, 798};
static const int rh2[4] = {2, 1, 2, 1};
static const int qm2[4] = {-7408, -1616, 7408, 1616};
static const int qm4[16] =
{
0, -20456, -12896, -8968,
-6288, -4240, -2584, -1200,
20456, 12896, 8968, 6288,
4240, 2584, 1200, 0
};
static const int qm5[32] =
{
-280, -280, -23352, -17560,
-14120, -11664, -9752, -8184,
-6864, -5712, -4696, -3784,
-2960, -2208, -1520, -880,
23352, 17560, 14120, 11664,
9752, 8184, 6864, 5712,
4696, 3784, 2960, 2208,
1520, 880, 280, -280
};
static const int qm6[64] =
{
-136, -136, -136, -136,
-24808, -21904, -19008, -16704,
-14984, -13512, -12280, -11192,
-10232, -9360, -8576, -7856,
-7192, -6576, -6000, -5456,
-4944, -4464, -4008, -3576,
-3168, -2776, -2400, -2032,
-1688, -1360, -1040, -728,
24808, 21904, 19008, 16704,
14984, 13512, 12280, 11192,
10232, 9360, 8576, 7856,
7192, 6576, 6000, 5456,
4944, 4464, 4008, 3576,
3168, 2776, 2400, 2032,
1688, 1360, 1040, 728,
432, 136, -432, -136
};
static const int qmf_coeffs[12] =
{
3, -11, 12, 32, -210, 951, 3876, -805, 362, -156, 53, -11,
};
int dlowt;
int rlow;
int ihigh;
int dhigh;
int rhigh;
int xout1;
int xout2;
int wd1;
int wd2;
int wd3;
int code;
size_t outlen;
int i;
size_t j;
outlen = 0;
rhigh = 0;
for (j = 0; j < len; )
{
if (s->packed)
{
/* Unpack the code bits */
if (s->in_bits < s->bits_per_sample)
{
s->in_buffer |= (g722_data[j++] << s->in_bits);
s->in_bits += 8;
}
code = s->in_buffer & ((1 << s->bits_per_sample) - 1);
s->in_buffer >>= s->bits_per_sample;
s->in_bits -= s->bits_per_sample;
}
else
{
code = g722_data[j++];
}
switch (s->bits_per_sample)
{
default:
case 8:
wd1 = code & 0x3F;
ihigh = (code >> 6) & 0x03;
wd2 = qm6[wd1];
wd1 >>= 2;
break;
case 7:
wd1 = code & 0x1F;
ihigh = (code >> 5) & 0x03;
wd2 = qm5[wd1];
wd1 >>= 1;
break;
case 6:
wd1 = code & 0x0F;
ihigh = (code >> 4) & 0x03;
wd2 = qm4[wd1];
break;
}
/* Block 5L, LOW BAND INVQBL */
wd2 = (s->band[0].det*wd2) >> 15;
/* Block 5L, RECONS */
rlow = s->band[0].s + wd2;
/* Block 6L, LIMIT */
if (rlow > 16383)
rlow = 16383;
else if (rlow < -16384)
rlow = -16384;
/* Block 2L, INVQAL */
wd2 = qm4[wd1];
dlowt = (s->band[0].det*wd2) >> 15;
/* Block 3L, LOGSCL */
wd2 = rl42[wd1];
wd1 = (s->band[0].nb*127) >> 7;
wd1 += wl[wd2];
if (wd1 < 0)
wd1 = 0;
else if (wd1 > 18432)
wd1 = 18432;
s->band[0].nb = wd1;
/* Block 3L, SCALEL */
wd1 = (s->band[0].nb >> 6) & 31;
wd2 = 8 - (s->band[0].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[0].det = wd3 << 2;
block4(s, 0, dlowt);
if (!s->eight_k)
{
/* Block 2H, INVQAH */
wd2 = qm2[ihigh];
dhigh = (s->band[1].det*wd2) >> 15;
/* Block 5H, RECONS */
rhigh = dhigh + s->band[1].s;
/* Block 6H, LIMIT */
if (rhigh > 16383)
rhigh = 16383;
else if (rhigh < -16384)
rhigh = -16384;
/* Block 2H, INVQAH */
wd2 = rh2[ihigh];
wd1 = (s->band[1].nb*127) >> 7;
wd1 += wh[wd2];
if (wd1 < 0)
wd1 = 0;
else if (wd1 > 22528)
wd1 = 22528;
s->band[1].nb = wd1;
/* Block 3H, SCALEH */
wd1 = (s->band[1].nb >> 6) & 31;
wd2 = 10 - (s->band[1].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[1].det = wd3 << 2;
block4(s, 1, dhigh);
}
if (s->itu_test_mode)
{
amp[outlen++] = (int16_t) (rlow << 1);
amp[outlen++] = (int16_t) (rhigh << 1);
}
else
{
if (s->eight_k)
{
amp[outlen++] = (int16_t) (rlow << 1);
}
else
{
/* Apply the receive QMF */
for (i = 0; i < 22; i++)
s->x[i] = s->x[i + 2];
s->x[22] = rlow + rhigh;
s->x[23] = rlow - rhigh;
xout1 = 0;
xout2 = 0;
for (i = 0; i < 12; i++)
{
xout2 += s->x[2*i]*qmf_coeffs[i];
xout1 += s->x[2*i + 1]*qmf_coeffs[11 - i];
}
/* We shift by 12 to allow for the QMF filters (DC gain = 4096), less 1
to allow for the 15 bit input to the G.722 algorithm. */
/* WebRtc, tlegrand: added saturation */
amp[outlen++] = saturate(xout1 >> 11);
amp[outlen++] = saturate(xout2 >> 11);
}
}
}
return outlen;
}
/*- End of function --------------------------------------------------------*/
/*- End of file ------------------------------------------------------------*/

View File

@ -0,0 +1,160 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722.h - The ITU G.722 codec.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based on a single channel G.722 codec which is:
*
***** Copyright (c) CMU 1993 *****
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722.h,v 1.10 2006/06/16 12:45:53 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Changed to use WebRtc types
* -Added new defines for minimum and maximum values of short int
*/
/*! \file */
#if !defined(_G722_ENC_DEC_H_)
#define _G722_ENC_DEC_H_
#include "webrtc/typedefs.h"
/*! \page g722_page G.722 encoding and decoding
\section g722_page_sec_1 What does it do?
The G.722 module is a bit exact implementation of the ITU G.722 specification for all three
specified bit rates - 64000bps, 56000bps and 48000bps. It passes the ITU tests.
To allow fast and flexible interworking with narrow band telephony, the encoder and decoder
support an option for the linear audio to be an 8k samples/second stream. In this mode the
codec is considerably faster, and still fully compatible with wideband terminals using G.722.
\section g722_page_sec_2 How does it work?
???.
*/
#define WEBRTC_INT16_MAX 32767
#define WEBRTC_INT16_MIN -32768
enum
{
G722_SAMPLE_RATE_8000 = 0x0001,
G722_PACKED = 0x0002
};
typedef struct
{
/*! TRUE if the operating in the special ITU test mode, with the band split filters
disabled. */
int itu_test_mode;
/*! TRUE if the G.722 data is packed */
int packed;
/*! TRUE if encode from 8k samples/second */
int eight_k;
/*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
int bits_per_sample;
/*! Signal history for the QMF */
int x[24];
struct
{
int s;
int sp;
int sz;
int r[3];
int a[3];
int ap[3];
int p[3];
int d[7];
int b[7];
int bp[7];
int sg[7];
int nb;
int det;
} band[2];
unsigned int in_buffer;
int in_bits;
unsigned int out_buffer;
int out_bits;
} G722EncoderState;
typedef struct
{
/*! TRUE if the operating in the special ITU test mode, with the band split filters
disabled. */
int itu_test_mode;
/*! TRUE if the G.722 data is packed */
int packed;
/*! TRUE if decode to 8k samples/second */
int eight_k;
/*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
int bits_per_sample;
/*! Signal history for the QMF */
int x[24];
struct
{
int s;
int sp;
int sz;
int r[3];
int a[3];
int ap[3];
int p[3];
int d[7];
int b[7];
int bp[7];
int sg[7];
int nb;
int det;
} band[2];
unsigned int in_buffer;
int in_bits;
unsigned int out_buffer;
int out_bits;
} G722DecoderState;
#ifdef __cplusplus
extern "C" {
#endif
G722EncoderState* WebRtc_g722_encode_init(G722EncoderState* s,
int rate,
int options);
int WebRtc_g722_encode_release(G722EncoderState *s);
size_t WebRtc_g722_encode(G722EncoderState *s,
uint8_t g722_data[],
const int16_t amp[],
size_t len);
G722DecoderState* WebRtc_g722_decode_init(G722DecoderState* s,
int rate,
int options);
int WebRtc_g722_decode_release(G722DecoderState *s);
size_t WebRtc_g722_decode(G722DecoderState *s,
int16_t amp[],
const uint8_t g722_data[],
size_t len);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,430 @@
/*
* SpanDSP - a series of DSP components for telephony
*
* g722_encode.c - The ITU G.722 codec, encode part.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2005 Steve Underwood
*
* All rights reserved.
*
* Despite my general liking of the GPL, I place my own contributions
* to this code in the public domain for the benefit of all mankind -
* even the slimy ones who might try to proprietize my work and use it
* to my detriment.
*
* Based on a single channel 64kbps only G.722 codec which is:
*
***** Copyright (c) CMU 1993 *****
* Computer Science, Speech Group
* Chengxiang Lu and Alex Hauptmann
*
* $Id: g722_encode.c,v 1.14 2006/07/07 16:37:49 steveu Exp $
*
* Modifications for WebRtc, 2011/04/28, by tlegrand:
* -Removed usage of inttypes.h and tgmath.h
* -Changed to use WebRtc types
* -Added option to run encoder bitexact with ITU-T reference implementation
*/
/*! \file */
#include <memory.h>
#include <stdio.h>
#include <stdlib.h>
#include "g722_enc_dec.h"
#include "webrtc/typedefs.h"
#if !defined(FALSE)
#define FALSE 0
#endif
#if !defined(TRUE)
#define TRUE (!FALSE)
#endif
static __inline int16_t saturate(int32_t amp)
{
int16_t amp16;
/* Hopefully this is optimised for the common case - not clipping */
amp16 = (int16_t) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
return WEBRTC_INT16_MAX;
return WEBRTC_INT16_MIN;
}
/*- End of function --------------------------------------------------------*/
static void block4(G722EncoderState *s, int band, int d)
{
int wd1;
int wd2;
int wd3;
int i;
/* Block 4, RECONS */
s->band[band].d[0] = d;
s->band[band].r[0] = saturate(s->band[band].s + d);
/* Block 4, PARREC */
s->band[band].p[0] = saturate(s->band[band].sz + d);
/* Block 4, UPPOL2 */
for (i = 0; i < 3; i++)
s->band[band].sg[i] = s->band[band].p[i] >> 15;
wd1 = saturate(s->band[band].a[1] << 2);
wd2 = (s->band[band].sg[0] == s->band[band].sg[1]) ? -wd1 : wd1;
if (wd2 > 32767)
wd2 = 32767;
wd3 = (wd2 >> 7) + ((s->band[band].sg[0] == s->band[band].sg[2]) ? 128 : -128);
wd3 += (s->band[band].a[2]*32512) >> 15;
if (wd3 > 12288)
wd3 = 12288;
else if (wd3 < -12288)
wd3 = -12288;
s->band[band].ap[2] = wd3;
/* Block 4, UPPOL1 */
s->band[band].sg[0] = s->band[band].p[0] >> 15;
s->band[band].sg[1] = s->band[band].p[1] >> 15;
wd1 = (s->band[band].sg[0] == s->band[band].sg[1]) ? 192 : -192;
wd2 = (s->band[band].a[1]*32640) >> 15;
s->band[band].ap[1] = saturate(wd1 + wd2);
wd3 = saturate(15360 - s->band[band].ap[2]);
if (s->band[band].ap[1] > wd3)
s->band[band].ap[1] = wd3;
else if (s->band[band].ap[1] < -wd3)
s->band[band].ap[1] = -wd3;
/* Block 4, UPZERO */
wd1 = (d == 0) ? 0 : 128;
s->band[band].sg[0] = d >> 15;
for (i = 1; i < 7; i++)
{
s->band[band].sg[i] = s->band[band].d[i] >> 15;
wd2 = (s->band[band].sg[i] == s->band[band].sg[0]) ? wd1 : -wd1;
wd3 = (s->band[band].b[i]*32640) >> 15;
s->band[band].bp[i] = saturate(wd2 + wd3);
}
/* Block 4, DELAYA */
for (i = 6; i > 0; i--)
{
s->band[band].d[i] = s->band[band].d[i - 1];
s->band[band].b[i] = s->band[band].bp[i];
}
for (i = 2; i > 0; i--)
{
s->band[band].r[i] = s->band[band].r[i - 1];
s->band[band].p[i] = s->band[band].p[i - 1];
s->band[band].a[i] = s->band[band].ap[i];
}
/* Block 4, FILTEP */
wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
wd1 = (s->band[band].a[1]*wd1) >> 15;
wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
wd2 = (s->band[band].a[2]*wd2) >> 15;
s->band[band].sp = saturate(wd1 + wd2);
/* Block 4, FILTEZ */
s->band[band].sz = 0;
for (i = 6; i > 0; i--)
{
wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
}
s->band[band].sz = saturate(s->band[band].sz);
/* Block 4, PREDIC */
s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
}
/*- End of function --------------------------------------------------------*/
G722EncoderState* WebRtc_g722_encode_init(G722EncoderState* s,
int rate,
int options) {
if (s == NULL)
{
if ((s = (G722EncoderState *) malloc(sizeof(*s))) == NULL)
return NULL;
}
memset(s, 0, sizeof(*s));
if (rate == 48000)
s->bits_per_sample = 6;
else if (rate == 56000)
s->bits_per_sample = 7;
else
s->bits_per_sample = 8;
if ((options & G722_SAMPLE_RATE_8000))
s->eight_k = TRUE;
if ((options & G722_PACKED) && s->bits_per_sample != 8)
s->packed = TRUE;
else
s->packed = FALSE;
s->band[0].det = 32;
s->band[1].det = 8;
return s;
}
/*- End of function --------------------------------------------------------*/
int WebRtc_g722_encode_release(G722EncoderState *s)
{
free(s);
return 0;
}
/*- End of function --------------------------------------------------------*/
/* WebRtc, tlegrand:
* Only define the following if bit-exactness with reference implementation
* is needed. Will only have any effect if input signal is saturated.
*/
//#define RUN_LIKE_REFERENCE_G722
#ifdef RUN_LIKE_REFERENCE_G722
int16_t limitValues (int16_t rl)
{
int16_t yl;
yl = (rl > 16383) ? 16383 : ((rl < -16384) ? -16384 : rl);
return (yl);
}
#endif
size_t WebRtc_g722_encode(G722EncoderState *s, uint8_t g722_data[],
const int16_t amp[], size_t len)
{
static const int q6[32] =
{
0, 35, 72, 110, 150, 190, 233, 276,
323, 370, 422, 473, 530, 587, 650, 714,
786, 858, 940, 1023, 1121, 1219, 1339, 1458,
1612, 1765, 1980, 2195, 2557, 2919, 0, 0
};
static const int iln[32] =
{
0, 63, 62, 31, 30, 29, 28, 27,
26, 25, 24, 23, 22, 21, 20, 19,
18, 17, 16, 15, 14, 13, 12, 11,
10, 9, 8, 7, 6, 5, 4, 0
};
static const int ilp[32] =
{
0, 61, 60, 59, 58, 57, 56, 55,
54, 53, 52, 51, 50, 49, 48, 47,
46, 45, 44, 43, 42, 41, 40, 39,
38, 37, 36, 35, 34, 33, 32, 0
};
static const int wl[8] =
{
-60, -30, 58, 172, 334, 538, 1198, 3042
};
static const int rl42[16] =
{
0, 7, 6, 5, 4, 3, 2, 1, 7, 6, 5, 4, 3, 2, 1, 0
};
static const int ilb[32] =
{
2048, 2093, 2139, 2186, 2233, 2282, 2332,
2383, 2435, 2489, 2543, 2599, 2656, 2714,
2774, 2834, 2896, 2960, 3025, 3091, 3158,
3228, 3298, 3371, 3444, 3520, 3597, 3676,
3756, 3838, 3922, 4008
};
static const int qm4[16] =
{
0, -20456, -12896, -8968,
-6288, -4240, -2584, -1200,
20456, 12896, 8968, 6288,
4240, 2584, 1200, 0
};
static const int qm2[4] =
{
-7408, -1616, 7408, 1616
};
static const int qmf_coeffs[12] =
{
3, -11, 12, 32, -210, 951, 3876, -805, 362, -156, 53, -11,
};
static const int ihn[3] = {0, 1, 0};
static const int ihp[3] = {0, 3, 2};
static const int wh[3] = {0, -214, 798};
static const int rh2[4] = {2, 1, 2, 1};
int dlow;
int dhigh;
int el;
int wd;
int wd1;
int ril;
int wd2;
int il4;
int ih2;
int wd3;
int eh;
int mih;
int i;
size_t j;
/* Low and high band PCM from the QMF */
int xlow;
int xhigh;
size_t g722_bytes;
/* Even and odd tap accumulators */
int sumeven;
int sumodd;
int ihigh;
int ilow;
int code;
g722_bytes = 0;
xhigh = 0;
for (j = 0; j < len; )
{
if (s->itu_test_mode)
{
xlow =
xhigh = amp[j++] >> 1;
}
else
{
if (s->eight_k)
{
/* We shift by 1 to allow for the 15 bit input to the G.722 algorithm. */
xlow = amp[j++] >> 1;
}
else
{
/* Apply the transmit QMF */
/* Shuffle the buffer down */
for (i = 0; i < 22; i++)
s->x[i] = s->x[i + 2];
s->x[22] = amp[j++];
s->x[23] = amp[j++];
/* Discard every other QMF output */
sumeven = 0;
sumodd = 0;
for (i = 0; i < 12; i++)
{
sumodd += s->x[2*i]*qmf_coeffs[i];
sumeven += s->x[2*i + 1]*qmf_coeffs[11 - i];
}
/* We shift by 12 to allow for the QMF filters (DC gain = 4096), plus 1
to allow for us summing two filters, plus 1 to allow for the 15 bit
input to the G.722 algorithm. */
xlow = (sumeven + sumodd) >> 14;
xhigh = (sumeven - sumodd) >> 14;
#ifdef RUN_LIKE_REFERENCE_G722
/* The following lines are only used to verify bit-exactness
* with reference implementation of G.722. Higher precision
* is achieved without limiting the values.
*/
xlow = limitValues(xlow);
xhigh = limitValues(xhigh);
#endif
}
}
/* Block 1L, SUBTRA */
el = saturate(xlow - s->band[0].s);
/* Block 1L, QUANTL */
wd = (el >= 0) ? el : -(el + 1);
for (i = 1; i < 30; i++)
{
wd1 = (q6[i]*s->band[0].det) >> 12;
if (wd < wd1)
break;
}
ilow = (el < 0) ? iln[i] : ilp[i];
/* Block 2L, INVQAL */
ril = ilow >> 2;
wd2 = qm4[ril];
dlow = (s->band[0].det*wd2) >> 15;
/* Block 3L, LOGSCL */
il4 = rl42[ril];
wd = (s->band[0].nb*127) >> 7;
s->band[0].nb = wd + wl[il4];
if (s->band[0].nb < 0)
s->band[0].nb = 0;
else if (s->band[0].nb > 18432)
s->band[0].nb = 18432;
/* Block 3L, SCALEL */
wd1 = (s->band[0].nb >> 6) & 31;
wd2 = 8 - (s->band[0].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[0].det = wd3 << 2;
block4(s, 0, dlow);
if (s->eight_k)
{
/* Just leave the high bits as zero */
code = (0xC0 | ilow) >> (8 - s->bits_per_sample);
}
else
{
/* Block 1H, SUBTRA */
eh = saturate(xhigh - s->band[1].s);
/* Block 1H, QUANTH */
wd = (eh >= 0) ? eh : -(eh + 1);
wd1 = (564*s->band[1].det) >> 12;
mih = (wd >= wd1) ? 2 : 1;
ihigh = (eh < 0) ? ihn[mih] : ihp[mih];
/* Block 2H, INVQAH */
wd2 = qm2[ihigh];
dhigh = (s->band[1].det*wd2) >> 15;
/* Block 3H, LOGSCH */
ih2 = rh2[ihigh];
wd = (s->band[1].nb*127) >> 7;
s->band[1].nb = wd + wh[ih2];
if (s->band[1].nb < 0)
s->band[1].nb = 0;
else if (s->band[1].nb > 22528)
s->band[1].nb = 22528;
/* Block 3H, SCALEH */
wd1 = (s->band[1].nb >> 6) & 31;
wd2 = 10 - (s->band[1].nb >> 11);
wd3 = (wd2 < 0) ? (ilb[wd1] << -wd2) : (ilb[wd1] >> wd2);
s->band[1].det = wd3 << 2;
block4(s, 1, dhigh);
code = ((ihigh << 6) | ilow) >> (8 - s->bits_per_sample);
}
if (s->packed)
{
/* Pack the code bits */
s->out_buffer |= (code << s->out_bits);
s->out_bits += s->bits_per_sample;
if (s->out_bits >= 8)
{
g722_data[g722_bytes++] = (uint8_t) (s->out_buffer & 0xFF);
s->out_bits -= 8;
s->out_buffer >>= 8;
}
}
else
{
g722_data[g722_bytes++] = (uint8_t) code;
}
}
return g722_bytes;
}
/*- End of function --------------------------------------------------------*/
/*- End of file ------------------------------------------------------------*/

View File

@ -0,0 +1,107 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdlib.h>
#include <string.h>
#include "g722_enc_dec.h"
#include "g722_interface.h"
#include "webrtc/typedefs.h"
int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
{
*G722enc_inst=(G722EncInst*)malloc(sizeof(G722EncoderState));
if (*G722enc_inst!=NULL) {
return(0);
} else {
return(-1);
}
}
int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
{
// Create and/or reset the G.722 encoder
// Bitrate 64 kbps and wideband mode (2)
G722enc_inst = (G722EncInst *) WebRtc_g722_encode_init(
(G722EncoderState*) G722enc_inst, 64000, 2);
if (G722enc_inst == NULL) {
return -1;
} else {
return 0;
}
}
int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
{
// Free encoder memory
return WebRtc_g722_encode_release((G722EncoderState*) G722enc_inst);
}
size_t WebRtcG722_Encode(G722EncInst *G722enc_inst,
const int16_t* speechIn,
size_t len,
uint8_t* encoded)
{
unsigned char *codechar = (unsigned char*) encoded;
// Encode the input speech vector
return WebRtc_g722_encode((G722EncoderState*) G722enc_inst, codechar,
speechIn, len);
}
int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
{
*G722dec_inst=(G722DecInst*)malloc(sizeof(G722DecoderState));
if (*G722dec_inst!=NULL) {
return(0);
} else {
return(-1);
}
}
void WebRtcG722_DecoderInit(G722DecInst* inst) {
// Create and/or reset the G.722 decoder
// Bitrate 64 kbps and wideband mode (2)
WebRtc_g722_decode_init((G722DecoderState*)inst, 64000, 2);
}
int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
{
// Free encoder memory
return WebRtc_g722_decode_release((G722DecoderState*) G722dec_inst);
}
size_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
const uint8_t *encoded,
size_t len,
int16_t *decoded,
int16_t *speechType)
{
// Decode the G.722 encoder stream
*speechType=G722_WEBRTC_SPEECH;
return WebRtc_g722_decode((G722DecoderState*) G722dec_inst, decoded,
encoded, len);
}
int16_t WebRtcG722_Version(char *versionStr, short len)
{
// Get version string
char version[30] = "2.0.0\n";
if (strlen(version) < (unsigned int)len)
{
strcpy(versionStr, version);
return 0;
}
else
{
return -1;
}
}

View File

@ -0,0 +1,182 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_
#include "webrtc/typedefs.h"
/*
* Solution to support multiple instances
*/
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
/*
* Comfort noise constants
*/
#define G722_WEBRTC_SPEECH 1
#define G722_WEBRTC_CNG 2
#ifdef __cplusplus
extern "C" {
#endif
/****************************************************************************
* WebRtcG722_CreateEncoder(...)
*
* Create memory used for G722 encoder
*
* Input:
* - G722enc_inst : G722 instance for encoder
*
* Return value : 0 - Ok
* -1 - Error
*/
int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
/****************************************************************************
* WebRtcG722_EncoderInit(...)
*
* This function initializes a G722 instance
*
* Input:
* - G722enc_inst : G722 instance, i.e. the user that should receive
* be initialized
*
* Return value : 0 - Ok
* -1 - Error
*/
int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
/****************************************************************************
* WebRtcG722_FreeEncoder(...)
*
* Free the memory used for G722 encoder
*
* Input:
* - G722enc_inst : G722 instance for encoder
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
/****************************************************************************
* WebRtcG722_Encode(...)
*
* This function encodes G722 encoded data.
*
* Input:
* - G722enc_inst : G722 instance, i.e. the user that should encode
* a packet
* - speechIn : Input speech vector
* - len : Samples in speechIn
*
* Output:
* - encoded : The encoded data vector
*
* Return value : Length (in bytes) of coded data
*/
size_t WebRtcG722_Encode(G722EncInst* G722enc_inst,
const int16_t* speechIn,
size_t len,
uint8_t* encoded);
/****************************************************************************
* WebRtcG722_CreateDecoder(...)
*
* Create memory used for G722 encoder
*
* Input:
* - G722dec_inst : G722 instance for decoder
*
* Return value : 0 - Ok
* -1 - Error
*/
int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
/****************************************************************************
* WebRtcG722_DecoderInit(...)
*
* This function initializes a G722 instance
*
* Input:
* - inst : G722 instance
*/
void WebRtcG722_DecoderInit(G722DecInst* inst);
/****************************************************************************
* WebRtcG722_FreeDecoder(...)
*
* Free the memory used for G722 decoder
*
* Input:
* - G722dec_inst : G722 instance for decoder
*
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
/****************************************************************************
* WebRtcG722_Decode(...)
*
* This function decodes a packet with G729 frame(s). Output speech length
* will be a multiple of 80 samples (80*frames/packet).
*
* Input:
* - G722dec_inst : G722 instance, i.e. the user that should decode
* a packet
* - encoded : Encoded G722 frame(s)
* - len : Bytes in encoded vector
*
* Output:
* - decoded : The decoded vector
* - speechType : 1 normal, 2 CNG (Since G722 does not have its own
* DTX/CNG scheme it should always return 1)
*
* Return value : Samples in decoded vector
*/
size_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
const uint8_t* encoded,
size_t len,
int16_t *decoded,
int16_t *speechType);
/****************************************************************************
* WebRtcG722_Version(...)
*
* Get a string with the current version of the codec
*/
int16_t WebRtcG722_Version(char *versionStr, short len);
#ifdef __cplusplus
}
#endif
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_G722_G722_INTERFACE_H_ */

View File

@ -0,0 +1,158 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* testG722.cpp : Defines the entry point for the console application.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "webrtc/typedefs.h"
/* include API */
#include "webrtc/modules/audio_coding/codecs/g722/g722_interface.h"
/* Runtime statistics */
#include <time.h>
#define CLOCKS_PER_SEC_G722 100000
// Forward declaration
typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
/* function for reading audio data from PCM file */
bool readframe(int16_t *data, FILE *inp, size_t length)
{
size_t rlen = fread(data, sizeof(int16_t), length, inp);
if (rlen >= length)
return false;
memset(data + rlen, 0, (length - rlen) * sizeof(int16_t));
return true;
}
int main(int argc, char* argv[])
{
char inname[60], outbit[40], outname[40];
FILE *inp, *outbitp, *outp;
int framecnt;
bool endfile;
size_t framelength = 160;
G722EncInst *G722enc_inst;
G722DecInst *G722dec_inst;
/* Runtime statistics */
double starttime;
double runtime = 0;
double length_file;
size_t stream_len = 0;
int16_t shortdata[960];
int16_t decoded[960];
uint8_t streamdata[80 * 6];
int16_t speechType[1];
/* handling wrong input arguments in the command line */
if (argc!=5) {
printf("\n\nWrong number of arguments or flag values.\n\n");
printf("\n");
printf("Usage:\n\n");
printf("./testG722.exe framelength infile outbitfile outspeechfile \n\n");
printf("with:\n");
printf("framelength : Framelength in samples.\n\n");
printf("infile : Normal speech input file\n\n");
printf("outbitfile : Bitstream output file\n\n");
printf("outspeechfile: Speech output file\n\n");
exit(0);
}
/* Get frame length */
int framelength_int = atoi(argv[1]);
if (framelength_int < 0) {
printf(" G.722: Invalid framelength %d.\n", framelength_int);
exit(1);
}
framelength = static_cast<size_t>(framelength_int);
/* Get Input and Output files */
sscanf(argv[2], "%s", inname);
sscanf(argv[3], "%s", outbit);
sscanf(argv[4], "%s", outname);
if ((inp = fopen(inname,"rb")) == NULL) {
printf(" G.722: Cannot read file %s.\n", inname);
exit(1);
}
if ((outbitp = fopen(outbit,"wb")) == NULL) {
printf(" G.722: Cannot write file %s.\n", outbit);
exit(1);
}
if ((outp = fopen(outname,"wb")) == NULL) {
printf(" G.722: Cannot write file %s.\n", outname);
exit(1);
}
printf("\nInput:%s\nOutput bitstream:%s\nOutput:%s\n", inname, outbit, outname);
/* Create and init */
WebRtcG722_CreateEncoder((G722EncInst **)&G722enc_inst);
WebRtcG722_CreateDecoder((G722DecInst **)&G722dec_inst);
WebRtcG722_EncoderInit((G722EncInst *)G722enc_inst);
WebRtcG722_DecoderInit((G722DecInst *)G722dec_inst);
/* Initialize encoder and decoder */
framecnt = 0;
endfile = false;
while (!endfile) {
framecnt++;
/* Read speech block */
endfile = readframe(shortdata, inp, framelength);
/* Start clock before call to encoder and decoder */
starttime = clock()/(double)CLOCKS_PER_SEC_G722;
/* G.722 encoding + decoding */
stream_len = WebRtcG722_Encode((G722EncInst *)G722enc_inst, shortdata, framelength, streamdata);
WebRtcG722_Decode(G722dec_inst, streamdata, stream_len, decoded,
speechType);
/* Stop clock after call to encoder and decoder */
runtime += (double)((clock()/(double)CLOCKS_PER_SEC_G722)-starttime);
/* Write coded bits to file */
if (fwrite(streamdata, sizeof(short), stream_len / 2, outbitp) !=
stream_len / 2) {
return -1;
}
/* Write coded speech to file */
if (fwrite(decoded, sizeof(short), framelength, outp) !=
framelength) {
return -1;
}
}
WebRtcG722_FreeEncoder((G722EncInst *)G722enc_inst);
WebRtcG722_FreeDecoder((G722DecInst *)G722dec_inst);
length_file = ((double)framecnt*(double)framelength/16000);
printf("\n\nLength of speech file: %.1f s\n", length_file);
printf("Time to run G.722: %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
printf("---------------------END----------------------\n");
fclose(inp);
fclose(outbitp);
fclose(outp);
return 0;
}