* Update the session layer to p4 37930

* Update the peerconnection_client in sync with updates on the libjingle side.
Review URL: http://webrtc-codereview.appspot.com/29008

git-svn-id: http://webrtc.googlecode.com/svn/trunk@34 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
ronghuawu@google.com
2011-06-01 18:50:40 +00:00
parent c16e32d346
commit e6988b9de5
16 changed files with 451 additions and 1700 deletions

View File

@ -115,7 +115,7 @@ bool PeerConnection::Init() {
cricket::PORTALLOCATOR_DISABLE_RELAY);
// create channel manager
channel_manager_.reset(new WebRtcChannelManager(media_thread_.get()));
channel_manager_.reset(new cricket::ChannelManager(media_thread_.get()));
//start the media thread
media_thread_->SetPriority(talk_base::PRIORITY_HIGH);
@ -244,24 +244,22 @@ bool PeerConnection::SetAudioDevice(const std::string& wave_in_device,
return channel_manager_->SetAudioOptions(wave_in_device, wave_out_device, opts);
}
bool PeerConnection::SetLocalVideoRenderer(cricket::VideoRenderer* renderer) {
return channel_manager_->SetLocalRenderer(renderer);
}
bool PeerConnection::SetVideoRenderer(const std::string& stream_id,
cricket::VideoRenderer* renderer) {
ASSERT(session_ != NULL);
return session_->SetVideoRenderer(stream_id, renderer);
}
bool PeerConnection::SetVideoRenderer(const std::string& stream_id,
ExternalRenderer* external_renderer) {
ASSERT(session_ != NULL);
return session_->SetVideoRenderer(stream_id, external_renderer);
}
bool PeerConnection::SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
ASSERT(session_ != NULL);
return session_->SetVideoRenderer(channel_id, window, zOrder, left, top,
right, bottom);
}
bool PeerConnection::SetVideoCapture(const std::string& cam_device) {
return channel_manager_->SetVideoOptions(cam_device);
}

View File

@ -10,7 +10,7 @@
#include "talk/base/thread.h"
#include "talk/base/scoped_ptr.h"
#include "talk/base/basicpacketsocketfactory.h"
#include "talk/app/webrtcchannelmanager.h"
#include "talk/session/phone/channelmanager.h"
namespace Json {
class Value;
@ -18,6 +18,8 @@ class Value;
namespace cricket {
class BasicPortAllocator;
class ChannelManager;
class VideoRenderer;
}
#ifdef PLATFORM_CHROMIUM
@ -76,16 +78,13 @@ class PeerConnection : public sigslot::has_slots<> {
bool SetAudioDevice(const std::string& wave_in_device,
const std::string& wave_out_device, int opts);
// Set the video renderer
bool SetLocalVideoRenderer(cricket::VideoRenderer* renderer);
bool SetVideoRenderer(const std::string& stream_id,
cricket::VideoRenderer* renderer);
bool SetVideoRenderer(const std::string& stream_id,
ExternalRenderer* external_renderer);
// Set channel_id to -1 for the local preview
bool SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
// Set video capture device
// For Chromium the cam_device should use the capture session id.
// For standalone app, cam_device is the camera name. It will try to
@ -120,7 +119,7 @@ class PeerConnection : public sigslot::has_slots<> {
std::string config_;
talk_base::scoped_ptr<talk_base::Thread> media_thread_;
talk_base::scoped_ptr<WebRtcChannelManager> channel_manager_;
talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
talk_base::scoped_ptr<talk_base::NetworkManager> network_manager_;
talk_base::scoped_ptr<cricket::BasicPortAllocator> port_allocator_;
talk_base::scoped_ptr<talk_base::BasicPacketSocketFactory> socket_factory_;

View File

@ -20,6 +20,7 @@
#include "talk/app/peerconnection.h"
#include "talk/app/session_test/main_wnd.h"
#include "talk/base/logging.h"
#include "talk/session/phone/videorendererfactory.h"
static const char kAudioLabel[] = "audio_label";
static const char kVideoLabel[] = "video_label";
@ -217,6 +218,7 @@ class PeerConnectionClient : public sigslot::has_slots<> {
}
void OnConnect(talk_base::AsyncSocket* socket) {
ASSERT(!onconnect_data_.empty());
int sent = socket->Send(onconnect_data_.c_str(), onconnect_data_.length());
ASSERT(sent == onconnect_data_.length());
onconnect_data_.clear();
@ -385,8 +387,10 @@ class PeerConnectionClient : public sigslot::has_slots<> {
notification_data_.clear();
}
if (hanging_get_.GetState() == talk_base::Socket::CS_CLOSED)
if (hanging_get_.GetState() == talk_base::Socket::CS_CLOSED &&
state_ == CONNECTED) {
hanging_get_.Connect(server_address_);
}
}
// Parses a single line entry in the form "<name>,<id>,<connected>"
@ -446,7 +450,9 @@ class PeerConnectionClient : public sigslot::has_slots<> {
void OnClose(talk_base::AsyncSocket* socket, int err) {
LOG(INFO) << __FUNCTION__;
socket->Close();
if (err != WSAECONNREFUSED) {
if (socket == &hanging_get_) {
if (state_ == CONNECTED) {
@ -553,6 +559,7 @@ class ConnectionObserver
void DeletePeerConnection() {
peer_connection_.reset();
handshake_ = NONE;
}
void StartCaptureDevice() {
@ -561,8 +568,11 @@ class ConnectionObserver
main_wnd_->SwitchToStreamingUI();
if (peer_connection_->SetVideoCapture("")) {
peer_connection_->SetVideoRenderer(-1, main_wnd_->handle(), 0,
0.7f, 0.7f, 0.95f, 0.95f);
if (!local_renderer_.get()) {
local_renderer_.reset(
cricket::VideoRendererFactory::CreateGuiVideoRenderer(176, 144));
}
peer_connection_->SetLocalVideoRenderer(local_renderer_.get());
} else {
ASSERT(false);
}
@ -612,8 +622,12 @@ class ConnectionObserver
video_channel_ = channel_id;
waiting_for_video_ = false;
LOG(INFO) << "Setting video renderer for channel: " << channel_id;
bool ok = peer_connection_->SetVideoRenderer(channel_id,
main_wnd_->handle(), 1, 0.0f, 0.0f, 1.0f, 1.0f);
if (!remote_renderer_.get()) {
remote_renderer_.reset(
cricket::VideoRendererFactory::CreateGuiVideoRenderer(352, 288));
}
bool ok = peer_connection_->SetVideoRenderer(stream_id,
remote_renderer_.get());
ASSERT(ok);
} else {
ASSERT(audio_channel_ == -1);
@ -774,7 +788,6 @@ class ConnectionObserver
LOG(INFO) << "PEER_CONNECTION_CLOSED";
DeletePeerConnection();
::InvalidateRect(main_wnd_->handle(), NULL, TRUE);
handshake_ = NONE;
waiting_for_audio_ = false;
waiting_for_video_ = false;
peer_id_ = -1;
@ -790,7 +803,12 @@ class ConnectionObserver
DisconnectFromServer();
}
} else if (msg == SEND_MESSAGE_TO_PEER) {
client_->SendToPeer(peer_id_, *reinterpret_cast<std::string*>(lp));
bool ok = client_->SendToPeer(peer_id_,
*reinterpret_cast<std::string*>(lp));
if (!ok) {
LOG(LS_ERROR) << "SendToPeer failed";
DisconnectFromServer();
}
} else {
ret = false;
}
@ -808,6 +826,8 @@ class ConnectionObserver
MainWnd* main_wnd_;
int video_channel_;
int audio_channel_;
scoped_ptr<cricket::VideoRenderer> local_renderer_;
scoped_ptr<cricket::VideoRenderer> remote_renderer_;
};
int PASCAL wWinMain(HINSTANCE instance, HINSTANCE prev_instance,

View File

@ -31,6 +31,7 @@
#include "talk/base/common.h"
#include "common_types.h"
#include "modules/interface/module_common_types.h"
#include "video_engine/main/interface/vie_base.h"
#include "video_engine/main/interface/vie_capture.h"
#include "video_engine/main/interface/vie_codec.h"

View File

@ -14,6 +14,7 @@
#include "talk/app/voicemediaengine.h"
#include "modules/video_capture/main/interface/video_capture.h"
#include "vplib.h"
#ifndef ARRAYSIZE
#define ARRAYSIZE(a) (sizeof(a) / sizeof((a)[0]))
@ -25,6 +26,213 @@ static const int kDefaultLogSeverity = 3;
static const int kStartVideoBitrate = 300;
static const int kMaxVideoBitrate = 1000;
CricketWebRTCVideoFrame::CricketWebRTCVideoFrame() {
}
CricketWebRTCVideoFrame::~CricketWebRTCVideoFrame() {
// TODO(ronghuawu): should the CricketWebRTCVideoFrame owns the buffer?
WebRtc_UWord8* newMemory = NULL;
WebRtc_UWord32 newLength = 0;
WebRtc_UWord32 newSize = 0;
video_frame_.Swap(newMemory, newLength, newSize);
}
void CricketWebRTCVideoFrame::Attach(unsigned char* buffer, int bufferSize,
int w, int h) {
WebRtc_UWord8* newMemory = buffer;
WebRtc_UWord32 newLength = bufferSize;
WebRtc_UWord32 newSize = bufferSize;
video_frame_.Swap(newMemory, newLength, newSize);
video_frame_.SetWidth(w);
video_frame_.SetHeight(h);
}
size_t CricketWebRTCVideoFrame::GetWidth() const {
return video_frame_.Width();
}
size_t CricketWebRTCVideoFrame::GetHeight() const {
return video_frame_.Height();
}
const uint8* CricketWebRTCVideoFrame::GetYPlane() const {
WebRtc_UWord8* buffer = video_frame_.Buffer();
return buffer;
}
const uint8* CricketWebRTCVideoFrame::GetUPlane() const {
WebRtc_UWord8* buffer = video_frame_.Buffer();
if (buffer)
buffer += (video_frame_.Width() * video_frame_.Height());
return buffer;
}
const uint8* CricketWebRTCVideoFrame::GetVPlane() const {
WebRtc_UWord8* buffer = video_frame_.Buffer();
if (buffer)
buffer += (video_frame_.Width() * video_frame_.Height() * 5 / 4);
return buffer;
}
uint8* CricketWebRTCVideoFrame::GetYPlane() {
WebRtc_UWord8* buffer = video_frame_.Buffer();
return buffer;
}
uint8* CricketWebRTCVideoFrame::GetUPlane() {
WebRtc_UWord8* buffer = video_frame_.Buffer();
if (buffer)
buffer += (video_frame_.Width() * video_frame_.Height());
return buffer;
}
uint8* CricketWebRTCVideoFrame::GetVPlane() {
WebRtc_UWord8* buffer = video_frame_.Buffer();
if (buffer)
buffer += (video_frame_.Width() * video_frame_.Height() * 3 / 2);
return buffer;
}
cricket::VideoFrame* CricketWebRTCVideoFrame::Copy() const {
WebRtc_UWord8* buffer = video_frame_.Buffer();
if (buffer) {
int new_buffer_size = video_frame_.Length();
unsigned char* new_buffer = new unsigned char[new_buffer_size];
memcpy(new_buffer, buffer, new_buffer_size);
CricketWebRTCVideoFrame* copy = new CricketWebRTCVideoFrame();
copy->Attach(new_buffer, new_buffer_size,
video_frame_.Width(), video_frame_.Height());
copy->SetTimeStamp(video_frame_.TimeStamp());
copy->SetElapsedTime(elapsed_time_);
return copy;
}
return NULL;
}
size_t CricketWebRTCVideoFrame::CopyToBuffer(
uint8* buffer, size_t size) const {
if (!video_frame_.Buffer()) {
return 0;
}
size_t needed = video_frame_.Length();
if (needed <= size) {
memcpy(buffer, video_frame_.Buffer(), needed);
}
return needed;
}
size_t CricketWebRTCVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc,
uint8* buffer,
size_t size,
size_t pitch_rgb) const {
if (!video_frame_.Buffer()) {
return 0;
}
size_t width = video_frame_.Width();
size_t height = video_frame_.Height();
// See http://www.virtualdub.org/blog/pivot/entry.php?id=190 for a good
// explanation of pitch and why this is the amount of space we need.
size_t needed = pitch_rgb * (height - 1) + 4 * width;
if (needed > size) {
LOG(LS_WARNING) << "RGB buffer is not large enough";
return needed;
}
VideoType outgoingVideoType = kUnknown;
switch (to_fourcc) {
case cricket::FOURCC_ARGB:
outgoingVideoType = kARGB;
break;
default:
LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
break;
}
if (outgoingVideoType != kUnknown)
ConvertFromI420(outgoingVideoType, video_frame_.Buffer(),
width, height, buffer);
return needed;
}
// TODO(ronghuawu): Implement StretchToPlanes
void CricketWebRTCVideoFrame::StretchToPlanes(
uint8* y, uint8* u, uint8* v,
int32 dst_pitch_y, int32 dst_pitch_u, int32 dst_pitch_v,
size_t width, size_t height, bool interpolate, bool crop) const {
}
size_t CricketWebRTCVideoFrame::StretchToBuffer(size_t w, size_t h,
uint8* buffer, size_t size,
bool interpolate,
bool crop) const {
if (!video_frame_.Buffer()) {
return 0;
}
size_t needed = video_frame_.Length();
if (needed <= size) {
uint8* bufy = buffer;
uint8* bufu = bufy + w * h;
uint8* bufv = bufu + ((w + 1) >> 1) * ((h + 1) >> 1);
StretchToPlanes(bufy, bufu, bufv, w, (w + 1) >> 1, (w + 1) >> 1, w, h,
interpolate, crop);
}
return needed;
}
void CricketWebRTCVideoFrame::StretchToFrame(cricket::VideoFrame *target,
bool interpolate, bool crop) const {
if (!target) return;
StretchToPlanes(target->GetYPlane(),
target->GetUPlane(),
target->GetVPlane(),
target->GetYPitch(),
target->GetUPitch(),
target->GetVPitch(),
target->GetWidth(),
target->GetHeight(),
interpolate, crop);
target->SetElapsedTime(GetElapsedTime());
target->SetTimeStamp(GetTimeStamp());
}
cricket::VideoFrame* CricketWebRTCVideoFrame::Stretch(size_t w, size_t h,
bool interpolate, bool crop) const {
// TODO(ronghuawu): implement
CricketWebRTCVideoFrame* frame = new CricketWebRTCVideoFrame();
return frame;
}
CricketWebRTCVideoRenderer::CricketWebRTCVideoRenderer
(cricket::VideoRenderer* renderer)
:renderer_(renderer) {
}
CricketWebRTCVideoRenderer::~CricketWebRTCVideoRenderer() {
}
int CricketWebRTCVideoRenderer::FrameSizeChange(unsigned int width,
unsigned int height,
unsigned int numberOfStreams) {
ASSERT(renderer_ != NULL);
width_ = width;
height_ = height;
number_of_streams_ = numberOfStreams;
return renderer_->SetSize(width_, height_, 0) ? 0 : -1;
}
int CricketWebRTCVideoRenderer::DeliverFrame(unsigned char* buffer,
int bufferSize) {
ASSERT(renderer_ != NULL);
video_frame_.Attach(buffer, bufferSize, width_, height_);
return renderer_->RenderFrame(&video_frame_) ? 0 : -1;
}
const RtcVideoEngine::VideoCodecPref RtcVideoEngine::kVideoCodecPrefs[] = {
{"VP8", 104, 0},
{"H264", 105, 1}
@ -231,29 +439,23 @@ bool RtcVideoEngine::SetCaptureDevice(const cricket::Device* cam) {
return (capture_id_ != -1);
}
bool RtcVideoEngine::SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
int ret;
if (channel_id == -1)
channel_id = capture_id_;
ret = video_engine_->render()->AddRenderer(
channel_id, window, zOrder, left, top, right, bottom);
if (ret !=0 )
return false;
ret = video_engine_->render()->StartRender(channel_id);
if (ret !=0 )
return false;
return true;
}
bool RtcVideoEngine::SetLocalRenderer(cricket::VideoRenderer* renderer) {
LOG(LS_WARNING) << "Not required call SetLocalRenderer for webrtc";
return false;
if (!local_renderer_.get()) {
local_renderer_.reset(new CricketWebRTCVideoRenderer(renderer));
} else {
// Renderer already set
return true;
}
int ret;
ret = video_engine_->render()->AddRenderer(capture_id_,
kVideoI420,
local_renderer_.get());
if (ret != 0)
return false;
ret = video_engine_->render()->StartRender(capture_id_);
return (ret == 0);
}
cricket::CaptureResult RtcVideoEngine::SetCapture(bool capture) {
@ -565,7 +767,22 @@ bool RtcVideoMediaChannel::RemoveStream(uint32 ssrc) {
bool RtcVideoMediaChannel::SetRenderer(
uint32 ssrc, cricket::VideoRenderer* renderer) {
return false;
if (!remote_renderer_.get()) {
remote_renderer_.reset(new CricketWebRTCVideoRenderer(renderer));
} else {
// Renderer already set
return true;
}
int ret;
ret = engine_->video_engine()->render()->AddRenderer(video_channel_,
kVideoI420,
remote_renderer_.get());
if (ret != 0)
return false;
ret = engine_->video_engine()->render()->StartRender(video_channel_);
return (ret == 0);
}
bool RtcVideoMediaChannel::SetExternalRenderer(uint32 ssrc, void* renderer)
@ -575,12 +792,11 @@ bool RtcVideoMediaChannel::SetExternalRenderer(uint32 ssrc, void* renderer)
video_channel_,
kVideoI420,
static_cast<ExternalRenderer*>(renderer));
if (ret !=0 )
if (ret != 0)
return false;
ret = engine_->video_engine()->render()->StartRender(video_channel_);
if (ret !=0 )
return false;
return true;
return (ret == 0);
}
bool RtcVideoMediaChannel::GetStats(cricket::VideoMediaInfo* info) {

View File

@ -49,6 +49,74 @@ class RtcVideoMediaChannel;
class RtcVoiceEngine;
class ExternalRenderer;
// CricketWebRTCVideoFrame only supports I420
class CricketWebRTCVideoFrame : public cricket::VideoFrame {
public:
CricketWebRTCVideoFrame();
~CricketWebRTCVideoFrame();
void Attach(unsigned char* buffer, int bufferSize, int w, int h);
virtual size_t GetWidth() const;
virtual size_t GetHeight() const;
virtual const uint8* GetYPlane() const;
virtual const uint8* GetUPlane() const;
virtual const uint8* GetVPlane() const;
virtual uint8* GetYPlane();
virtual uint8* GetUPlane();
virtual uint8* GetVPlane();
virtual int32 GetYPitch() const { return video_frame_.Width(); }
virtual int32 GetUPitch() const { return video_frame_.Width() / 2; }
virtual int32 GetVPitch() const { return video_frame_.Width() / 2; }
virtual size_t GetPixelWidth() const { return 1; }
virtual size_t GetPixelHeight() const { return 1; }
virtual int64 GetElapsedTime() const { return elapsed_time_; }
virtual int64 GetTimeStamp() const { return video_frame_.TimeStamp(); }
virtual void SetElapsedTime(int64 elapsed_time) {
elapsed_time_ = elapsed_time;
}
virtual void SetTimeStamp(int64 time_stamp) {
video_frame_.SetTimeStamp(time_stamp);
}
virtual VideoFrame* Copy() const;
virtual size_t CopyToBuffer(uint8* buffer, size_t size) const;
virtual size_t ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
size_t size, size_t pitch_rgb) const;
virtual void StretchToPlanes(uint8* y, uint8* u, uint8* v,
int32 pitchY, int32 pitchU, int32 pitchV,
size_t width, size_t height,
bool interpolate, bool crop) const;
virtual size_t StretchToBuffer(size_t w, size_t h, uint8* buffer, size_t size,
bool interpolate, bool crop) const;
virtual void StretchToFrame(VideoFrame* target, bool interpolate,
bool crop) const;
virtual VideoFrame* Stretch(size_t w, size_t h, bool interpolate,
bool crop) const;
private:
webrtc::VideoFrame video_frame_;
int64 elapsed_time_;
};
class CricketWebRTCVideoRenderer : public ExternalRenderer {
public:
CricketWebRTCVideoRenderer(cricket::VideoRenderer* renderer);
virtual int FrameSizeChange(unsigned int width, unsigned int height,
unsigned int numberOfStreams);
virtual int DeliverFrame(unsigned char* buffer, int bufferSize);
virtual ~CricketWebRTCVideoRenderer();
private:
cricket::VideoRenderer* renderer_;
CricketWebRTCVideoFrame video_frame_;
unsigned int width_;
unsigned int height_;
unsigned int number_of_streams_;
};
class RtcVideoEngine : public ViEBaseObserver, public TraceCallback {
public:
RtcVideoEngine();
@ -72,13 +140,6 @@ class RtcVideoEngine : public ViEBaseObserver, public TraceCallback {
bool SetOptions(int options);
//TODO - need to change this interface for webrtc
bool SetCaptureDevice(const cricket::Device* device);
bool SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
bool SetLocalRenderer(cricket::VideoRenderer* renderer);
cricket::CaptureResult SetCapture(bool capture);
const std::vector<cricket::VideoCodec>& codecs() const;
@ -133,6 +194,7 @@ class RtcVideoEngine : public ViEBaseObserver, public TraceCallback {
cricket::VideoEncoderConfig default_encoder_config_;
cricket::VideoCodec default_codec_;
bool capture_started_;
talk_base::scoped_ptr<CricketWebRTCVideoRenderer> local_renderer_;
};
class RtcVideoMediaChannel: public cricket::VideoMediaChannel,
@ -188,6 +250,7 @@ class RtcVideoMediaChannel: public cricket::VideoMediaChannel,
bool sending_;
bool render_started_;
webrtc::VideoCodec send_codec_;
talk_base::scoped_ptr<CricketWebRTCVideoRenderer> remote_renderer_;
};
}

View File

@ -1,137 +0,0 @@
// Copyright 2011 Google Inc. All Rights Reserved.
// Author: mallinath@google.com (Mallinath Bareddy)
#include "talk/app/webrtcchannelmanager.h"
namespace webrtc {
struct VideoCaptureDeviceParams : public talk_base::MessageData {
VideoCaptureDeviceParams(const std::string& cam_device)
: cam_device(cam_device),
result(false) {}
const std::string cam_device;
bool result;
};
struct RenderParams : public talk_base::MessageData {
RenderParams(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom)
:channel_id(channel_id)
,window(window)
,zOrder(zOrder)
,left(left)
,top(top)
,right(right)
,bottom(bottom) {}
int channel_id;
void* window;
unsigned int zOrder;
float left;
float top;
float right;
float bottom;
bool result;
};
bool WebRtcChannelManager::Init() {
return MaybeInit();
}
cricket::VoiceChannel* WebRtcChannelManager::CreateVoiceChannel(
cricket::BaseSession* s, const std::string& content_name, bool rtcp) {
return (MaybeInit()) ?
ChannelManager::CreateVoiceChannel(s, content_name, rtcp) : NULL;
}
cricket::VideoChannel* WebRtcChannelManager::CreateVideoChannel(
cricket::BaseSession* s, const std::string& content_name, bool rtcp,
cricket::VoiceChannel* vc) {
return (MaybeInit()) ?
ChannelManager::CreateVideoChannel(s, content_name, rtcp, vc) : NULL;
}
cricket::Soundclip* WebRtcChannelManager::CreateSoundclip() {
return (MaybeInit()) ? ChannelManager::CreateSoundclip() : NULL;
}
void WebRtcChannelManager::DestroyVoiceChannel(cricket::VoiceChannel* vc) {
ChannelManager::DestroyVoiceChannel(vc);
MaybeTerm();
}
void WebRtcChannelManager::DestroyVideoChannel(cricket::VideoChannel* vc) {
ChannelManager::DestroyVideoChannel(vc);
MaybeTerm();
}
void WebRtcChannelManager::DestroySoundclip(cricket::Soundclip* s) {
ChannelManager::DestroySoundclip(s);
MaybeTerm();
}
bool WebRtcChannelManager::MaybeInit() {
bool ret = initialized();
if (!ret) {
ret = ChannelManager::Init();
}
return ret;
}
void WebRtcChannelManager::MaybeTerm() {
if (initialized() && !has_channels()) {
Terminate();
}
}
bool WebRtcChannelManager::SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
if (MaybeInit()) {
RenderParams params(channel_id, window, zOrder, left, top, right, bottom);
return cricket::ChannelManager::Send(MSG_SETRTC_VIDEORENDERER, &params);
} else {
return false;
}
}
void WebRtcChannelManager::SetVideoRenderer_w(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
ASSERT(worker_thread() == talk_base::Thread::Current());
ASSERT(initialized());
media_engine()->SetVideoRenderer(channel_id, window, zOrder, left, top, right, bottom);
}
void WebRtcChannelManager::OnMessage(talk_base::Message *message) {
talk_base::MessageData* data = message->pdata;
switch(message->message_id) {
case MSG_SETRTC_VIDEORENDERER: {
RenderParams* p = static_cast<RenderParams*>(data);
SetVideoRenderer_w(p->channel_id,
p->window,
p->zOrder,
p->left,
p->top,
p->right,
p->bottom);
break;
}
default: {
ChannelManager::OnMessage(message);
}
}
}
} // namespace webrtc

View File

@ -1,68 +0,0 @@
// Copyright 2011 Google Inc. All Rights Reserved.
// Author: mallinath@google.com (Mallinath Bareddy)
#ifndef TALK_APP_WEBRTC_WEBRTCCHANNELMANAGER_H_
#define TALK_APP_WEBRTC_WEBRTCCHANNELMANAGER_H_
#include "talk/session/phone/channelmanager.h"
namespace webrtc {
class AudioDeviceModule;
enum {
MSG_SETRTC_VIDEORENDERER = 21, // Set internal video renderer
};
// WebRtcChannelManager automatically takes care of initialization and
// cricket::ChannelManager. Terminates when not needed
class WebRtcChannelManager : public cricket::ChannelManager {
public:
WebRtcChannelManager(talk_base::Thread* worker_thread)
: ChannelManager(worker_thread) {
}
WebRtcChannelManager(cricket::MediaEngine* me, cricket::DeviceManager* dm,
talk_base::Thread* worker_thread)
: ChannelManager(me, dm, worker_thread) {
}
bool Init();
cricket::VoiceChannel* CreateVoiceChannel(
cricket::BaseSession* s, const std::string& content_name, bool rtcp);
cricket::VideoChannel* CreateVideoChannel(
cricket::BaseSession* s, const std::string& content_name, bool rtcp,
cricket::VoiceChannel* vc);
cricket::Soundclip* CreateSoundclip();
void DestroyVoiceChannel(cricket::VoiceChannel* vc);
void DestroyVideoChannel(cricket::VideoChannel* vc);
void DestroySoundclip(cricket::Soundclip* s);
bool SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
private:
bool MaybeInit();
void MaybeTerm();
void SetExternalAdm_w(AudioDeviceModule* external_adm);
void SetVideoRenderer_w(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
void OnMessage(talk_base::Message *message);
};
} // namespace webrtc
#endif /* TALK_APP_WEBRTC_WEBRTCCHANNELMANAGER_H_ */

View File

@ -29,20 +29,19 @@
#include <string>
#include <vector>
#include "talk/app/pc_transport_impl.h"
#include "talk/app/peerconnection.h"
#include "talk/app/webrtc_json.h"
#include "talk/base/common.h"
#include "talk/base/json.h"
#include "talk/base/scoped_ptr.h"
#include "talk/p2p/base/constants.h"
#include "talk/p2p/base/sessiondescription.h"
#include "talk/p2p/base/p2ptransport.h"
#include "talk/session/phone/mediasessionclient.h"
#include "talk/session/phone/channel.h"
#include "talk/session/phone/voicechannel.h"
#include "talk/session/phone/channelmanager.h"
#include "talk/app/webrtc_json.h"
#include "talk/app/webrtcchannelmanager.h"
#include "talk/app/peerconnection.h"
#include "talk/app/pc_transport_impl.h"
#include "talk/session/phone/mediasessionclient.h"
#include "talk/session/phone/voicechannel.h"
using namespace cricket;
@ -55,7 +54,7 @@ enum {
MSG_RTC_SETVIDEOCAPTURE = 4,
MSG_RTC_CANDIDATETIMEOUT = 5,
MSG_RTC_SETEXTERNALRENDERER = 6,
MSG_RTC_SETRENDERER = 7,
MSG_RTC_SETCRICKETRENDERER = 7,
MSG_RTC_CHANNELENABLE = 8,
MSG_RTC_SIGNALONWRITABLESTATE = 9,
MSG_RTC_DESTROYVOICECHANNEL = 10,
@ -107,29 +106,15 @@ struct ExternalRenderParams : public talk_base::MessageData {
bool result;
};
struct RenderParams : public talk_base::MessageData {
RenderParams(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom)
:channel_id(channel_id)
,window(window)
,zOrder(zOrder)
,left(left)
,top(top)
,right(right)
,bottom(bottom) {}
struct CricketRenderParams : public talk_base::MessageData {
CricketRenderParams(const std::string& stream_id,
cricket::VideoRenderer* renderer)
: stream_id(stream_id),
renderer(renderer),
result(false) {}
int channel_id;
void* window;
unsigned int zOrder;
float left;
float top;
float right;
float bottom;
const std::string stream_id;
cricket::VideoRenderer* renderer;
bool result;
};
@ -159,7 +144,7 @@ WebRTCSessionImpl::WebRTCSessionImpl(
const std::string& id,
const std::string& direction,
cricket::PortAllocator* allocator,
WebRtcChannelManager* channelmgr,
cricket::ChannelManager* channelmgr,
PeerConnection* connection,
talk_base::Thread* signaling_thread)
: WebRTCSession(id, direction, allocator, connection, signaling_thread),
@ -203,7 +188,7 @@ bool WebRTCSessionImpl::CreateVoiceChannel(const std::string& stream_id) {
this, &WebRTCSessionImpl::OnVoiceChannelCreated);
signaling_thread_->Post(this, MSG_RTC_CREATEAUDIOCHANNEL,
new CreateChannelParams(stream_id, false, NULL));
new CreateChannelParams(stream_id, true, NULL));
return true;
}
@ -255,7 +240,7 @@ bool WebRTCSessionImpl::CreateVideoChannel(const std::string& stream_id) {
this, &WebRTCSessionImpl::OnVideoChannelCreated);
signaling_thread_->Post(this, MSG_RTC_CREATEVIDEOCHANNEL,
new CreateChannelParams(stream_id, false, NULL));
new CreateChannelParams(stream_id, true, NULL));
return true;
}
@ -303,6 +288,33 @@ void WebRTCSessionImpl::OnVideoChannelCreated(
}
}
bool WebRTCSessionImpl::SetVideoRenderer(const std::string& stream_id,
cricket::VideoRenderer* renderer) {
if(signaling_thread_ != talk_base::Thread::Current()) {
signaling_thread_->Post(this, MSG_RTC_SETCRICKETRENDERER,
new CricketRenderParams(stream_id, renderer),
true);
return true;
}
ASSERT(signaling_thread_ == talk_base::Thread::Current());
bool ret = false;
StreamMap::iterator iter;
for (iter = streams_.begin(); iter != streams_.end(); ++iter) {
StreamInfo* stream_info = (*iter);
if (stream_info->stream_id.compare(stream_id) == 0) {
ASSERT(stream_info->channel != NULL);
ASSERT(stream_info->video);
cricket::VideoChannel* channel = static_cast<cricket::VideoChannel*>(
stream_info->channel);
ret = channel->SetRenderer(0, renderer);
break;
}
}
return ret;
}
bool WebRTCSessionImpl::SetVideoRenderer(const std::string& stream_id,
ExternalRenderer* external_renderer) {
if(signaling_thread_ != talk_base::Thread::Current()) {
@ -330,30 +342,6 @@ bool WebRTCSessionImpl::SetVideoRenderer(const std::string& stream_id,
return ret;
}
bool WebRTCSessionImpl::SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
signaling_thread_->Post(this, MSG_RTC_SETRENDERER,
new RenderParams(channel_id, window, zOrder, left, top, right, bottom),
true);
return true;
}
bool WebRTCSessionImpl::SetVideoRenderer_w(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom) {
ASSERT(signaling_thread_ == talk_base::Thread::Current());
return channel_manager_->SetVideoRenderer(channel_id, window, zOrder, left, top, right, bottom);
}
void WebRTCSessionImpl::OnMessage(talk_base::Message* message) {
using talk_base::TypedMessageData;
talk_base::MessageData* data = message->pdata;
@ -420,31 +408,25 @@ void WebRTCSessionImpl::OnMessage(talk_base::Message* message) {
break;
}
case MSG_RTC_SETVIDEOCAPTURE : {
CaptureParams* p = static_cast<CaptureParams*>(data);
CaptureParams* p = reinterpret_cast<CaptureParams*>(data);
p->result = SetVideoCapture_w(p->capture);
delete p;
break;
}
case MSG_RTC_SETEXTERNALRENDERER : {
ExternalRenderParams* p = static_cast<ExternalRenderParams*> (data);
ExternalRenderParams* p = reinterpret_cast<ExternalRenderParams*>(data);
p->result = SetVideoRenderer(p->stream_id, p->external_renderer);
delete p;
break;
}
case MSG_RTC_SETRENDERER : {
RenderParams* p = static_cast<RenderParams*> (data);
p->result = SetVideoRenderer_w(p->channel_id,
p->window,
p->zOrder,
p->left,
p->top,
p->right,
p->bottom);
case MSG_RTC_SETCRICKETRENDERER : {
CricketRenderParams* p = reinterpret_cast<CricketRenderParams*>(data);
p->result = SetVideoRenderer(p->stream_id, p->renderer);
delete p;
break;
}
case MSG_RTC_CHANNELENABLE : {
ChannelEnableParams* p = static_cast<ChannelEnableParams*> (data);
ChannelEnableParams* p = reinterpret_cast<ChannelEnableParams*>(data);
ChannelEnable_w(p->channel, p->enable);
delete p;
break;
@ -707,8 +689,6 @@ void WebRTCSessionImpl::DestroyChannel(
break;
}
}
ASSERT(found);
}
void WebRTCSessionImpl::DestroyVoiceChannel_w(

View File

@ -79,7 +79,6 @@ typedef std::vector<cricket::VideoCodec> VideoCodecs;
class ExternalRenderer;
class PeerConnection;
class WebRtcChannelManager;
class WebRTCSessionImpl: public WebRTCSession {
@ -88,7 +87,7 @@ class WebRTCSessionImpl: public WebRTCSession {
WebRTCSessionImpl(const std::string& id,
const std::string& direction,
cricket::PortAllocator* allocator,
WebRtcChannelManager* channelmgr,
cricket::ChannelManager* channelmgr,
PeerConnection* connection,
talk_base::Thread* signaling_thread);
@ -130,15 +129,11 @@ class WebRTCSessionImpl: public WebRTCSession {
void OnStateChange(P2PTransportClass::State state,
cricket::TransportChannel* channel);
void OnMessageReceived(const char* data, size_t data_size);
bool SetVideoRenderer(const std::string& stream_id,
cricket::VideoRenderer* renderer);
bool SetVideoRenderer(const std::string& stream_id,
ExternalRenderer* external_renderer);
bool SetVideoRenderer(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
sigslot::signal2<cricket::VideoChannel*, std::string&> SignalVideoChannel;
sigslot::signal2<cricket::VoiceChannel*, std::string&> SignalVoiceChannel;
sigslot::signal1<WebRTCSessionImpl*> SignalOnRemoveStream;
@ -155,13 +150,6 @@ class WebRTCSessionImpl: public WebRTCSession {
}
private:
bool SetVideoRenderer_w(int channel_id,
void* window,
unsigned int zOrder,
float left,
float top,
float right,
float bottom);
void ChannelEnable_w(cricket::BaseChannel* channel, bool enable);
void OnVoiceChannelError(cricket::VoiceChannel* voice_channel, uint32 ssrc,
@ -232,7 +220,7 @@ class WebRTCSessionImpl: public WebRTCSession {
void SendLocalDescription_w();
WebRtcChannelManager* channel_manager_;
cricket::ChannelManager* channel_manager_;
std::vector<StreamInfo*> streams_;
TransportChannelMap transport_channels_;
bool all_writable_;