WebRtc_Word32 -> int32_t in video_render/

BUG=314

Review URL: https://webrtc-codereview.appspot.com/1304006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3810 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org
2013-04-10 08:09:04 +00:00
parent b7192b8247
commit ddf94e71e5
39 changed files with 1273 additions and 1332 deletions

View File

@ -30,7 +30,7 @@ namespace webrtc {
JavaVM* VideoRenderAndroid::g_jvm = NULL;
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
WebRtc_Word32 SetRenderAndroidVM(void* javaVM) {
int32_t SetRenderAndroidVM(void* javaVM) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
VideoRenderAndroid::g_jvm = (JavaVM*)javaVM;
return 0;
@ -38,7 +38,7 @@ WebRtc_Word32 SetRenderAndroidVM(void* javaVM) {
#endif
VideoRenderAndroid::VideoRenderAndroid(
const WebRtc_Word32 id,
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool /*fullscreen*/):
@ -71,20 +71,20 @@ VideoRenderAndroid::~VideoRenderAndroid() {
delete &_critSect;
}
WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id) {
int32_t VideoRenderAndroid::ChangeUniqueId(const int32_t id) {
CriticalSectionScoped cs(&_critSect);
_id = id;
return 0;
}
WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/) {
int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
return -1;
}
VideoRenderCallback*
VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right,
const float bottom) {
@ -114,8 +114,8 @@ VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
return renderStream;
}
WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId) {
int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
const uint32_t streamId) {
CriticalSectionScoped cs(&_critSect);
MapItem* item = _streamsMap.Find(streamId);
@ -131,9 +131,9 @@ WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
return 0;
}
WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left,
float& top,
float& right,
@ -141,7 +141,7 @@ WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
return -1;
}
WebRtc_Word32 VideoRenderAndroid::StartRender() {
int32_t VideoRenderAndroid::StartRender() {
CriticalSectionScoped cs(&_critSect);
if (_javaRenderThread) {
@ -174,7 +174,7 @@ WebRtc_Word32 VideoRenderAndroid::StartRender() {
return 0;
}
WebRtc_Word32 VideoRenderAndroid::StopRender() {
int32_t VideoRenderAndroid::StopRender() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
{
CriticalSectionScoped cs(&_critSect);
@ -268,31 +268,31 @@ bool VideoRenderAndroid::FullScreen() {
return false;
}
WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
WebRtc_UWord64& /*totalGraphicsMemory*/,
WebRtc_UWord64& /*availableGraphicsMemory*/) const {
int32_t VideoRenderAndroid::GetGraphicsMemory(
uint64_t& /*totalGraphicsMemory*/,
uint64_t& /*availableGraphicsMemory*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
WebRtc_UWord32& /*screenWidth*/,
WebRtc_UWord32& /*screenHeight*/) const {
int32_t VideoRenderAndroid::GetScreenResolution(
uint32_t& /*screenWidth*/,
uint32_t& /*screenHeight*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(
const WebRtc_UWord32 /*streamId*/) {
uint32_t VideoRenderAndroid::RenderFrameRate(
const uint32_t /*streamId*/) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
const WebRtc_UWord32 /*streamId*/,
int32_t VideoRenderAndroid::SetStreamCropping(
const uint32_t /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
@ -302,14 +302,14 @@ WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) {
int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
const WebRtc_UWord32 streamId,
int32_t VideoRenderAndroid::ConfigureRenderer(
const uint32_t streamId,
const unsigned int zOrder,
const float left,
const float top,
@ -320,12 +320,12 @@ WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
int32_t VideoRenderAndroid::SetText(
const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@ -333,12 +333,12 @@ WebRtc_Word32 VideoRenderAndroid::SetText(
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left, const float top,
const float right,
const float bottom) {
int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey,
const float left, const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;

View File

@ -38,37 +38,37 @@ class AndroidStream : public VideoRenderCallback {
class VideoRenderAndroid: IVideoRender {
public:
VideoRenderAndroid(const WebRtc_Word32 id,
VideoRenderAndroid(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderAndroid();
virtual WebRtc_Word32 Init()=0;
virtual int32_t Init()=0;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual int32_t ChangeUniqueId(const int32_t id);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual int32_t ChangeWindow(void* window);
virtual VideoRenderCallback* AddIncomingRenderStream(
const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const uint32_t streamId,
const uint32_t zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId);
virtual int32_t DeleteIncomingRenderStream(
const uint32_t streamId);
virtual WebRtc_Word32 GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
virtual int32_t GetIncomingRenderStreamProperties(
const uint32_t streamId,
uint32_t& zOrder,
float& left, float& top,
float& right, float& bottom) const;
virtual WebRtc_Word32 StartRender();
virtual int32_t StartRender();
virtual WebRtc_Word32 StopRender();
virtual int32_t StopRender();
virtual void ReDraw();
@ -80,55 +80,53 @@ class VideoRenderAndroid: IVideoRender {
virtual bool FullScreen();
virtual WebRtc_Word32 GetGraphicsMemory(
WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual int32_t GetGraphicsMemory(
uint64_t& totalGraphicsMemory,
uint64_t& availableGraphicsMemory) const;
virtual WebRtc_Word32 GetScreenResolution(
WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual int32_t GetScreenResolution(
uint32_t& screenWidth,
uint32_t& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual uint32_t RenderFrameRate(const uint32_t streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual int32_t SetStreamCropping(const uint32_t streamId,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual int32_t SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual int32_t ConfigureRenderer(const uint32_t streamId,
const unsigned int zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual int32_t SetText(const uint8_t textId,
const uint8_t* text,
const int32_t textLength,
const uint32_t textColorRef,
const uint32_t backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
virtual int32_t SetBitmap(const void* bitMap,
const uint8_t pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
static JavaVM* g_jvm;
protected:
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) = 0;
WebRtc_Word32 _id;
int32_t _id;
CriticalSectionWrapper& _critSect;
VideoRenderType _renderType;
jobject _ptrWindow;
@ -143,7 +141,7 @@ class VideoRenderAndroid: IVideoRender {
bool _javaShutDownFlag;
EventWrapper& _javaShutdownEvent;
EventWrapper& _javaRenderEvent;
WebRtc_Word64 _lastJavaRenderEvent;
int64_t _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
ThreadWrapper* _javaRenderThread;
};

View File

@ -25,7 +25,7 @@
namespace webrtc {
AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
const WebRtc_Word32 id,
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
@ -129,7 +129,7 @@ AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
}
}
WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() {
int32_t AndroidNativeOpenGl2Renderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@ -211,8 +211,8 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() {
}
AndroidStream*
AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
@ -232,7 +232,7 @@ AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
}
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
WebRtc_UWord32 streamId,
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj):
_id(streamId),
@ -279,11 +279,11 @@ AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
}
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
@ -380,8 +380,8 @@ WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
return 0;
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(
const WebRtc_UWord32 /*streamId*/,
int32_t AndroidNativeOpenGl2Channel::RenderFrame(
const uint32_t /*streamId*/,
I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();

View File

@ -24,20 +24,17 @@ class CriticalSectionWrapper;
class AndroidNativeOpenGl2Channel: public AndroidStream {
public:
AndroidNativeOpenGl2Channel(
WebRtc_UWord32 streamId,
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj);
~AndroidNativeOpenGl2Channel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
int32_t Init(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(
const WebRtc_UWord32 streamId,
virtual int32_t RenderFrame(
const uint32_t streamId,
I420VideoFrame& videoFrame);
//Implements AndroidStream
@ -54,7 +51,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
void DrawNative();
WebRtc_UWord32 _id;
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
I420VideoFrame _bufferToRender;
@ -71,7 +68,7 @@ class AndroidNativeOpenGl2Channel: public AndroidStream {
class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
public:
AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
AndroidNativeOpenGl2Renderer(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
@ -79,10 +76,10 @@ class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
~AndroidNativeOpenGl2Renderer();
static bool UseOpenGL2(void* window);
WebRtc_Word32 Init();
int32_t Init();
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,

View File

@ -26,7 +26,7 @@
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
const WebRtc_Word32 id,
const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
@ -77,7 +77,7 @@ AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
}
}
WebRtc_Word32 AndroidSurfaceViewRenderer::Init() {
int32_t AndroidSurfaceViewRenderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError,
@ -200,8 +200,8 @@ WebRtc_Word32 AndroidSurfaceViewRenderer::Init() {
AndroidStream*
AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,
@ -223,7 +223,7 @@ AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
}
AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
WebRtc_UWord32 streamId,
uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj) :
@ -284,8 +284,8 @@ AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
}
}
WebRtc_Word32 AndroidSurfaceViewChannel::Init(
WebRtc_Word32 /*zOrder*/,
int32_t AndroidSurfaceViewChannel::Init(
int32_t /*zOrder*/,
const float left,
const float top,
const float right,
@ -410,8 +410,8 @@ WebRtc_Word32 AndroidSurfaceViewChannel::Init(
}
WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(
const WebRtc_UWord32 /*streamId*/,
int32_t AndroidSurfaceViewChannel::RenderFrame(
const uint32_t /*streamId*/,
I420VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();

View File

@ -22,27 +22,24 @@ class CriticalSectionWrapper;
class AndroidSurfaceViewChannel : public AndroidStream {
public:
AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
AndroidSurfaceViewChannel(uint32_t streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj);
~AndroidSurfaceViewChannel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
int32_t Init(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
I420VideoFrame& videoFrame);
virtual int32_t RenderFrame(const uint32_t streamId,
I420VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
WebRtc_UWord32 _id;
uint32_t _id;
CriticalSectionWrapper& _renderCritSect;
I420VideoFrame _bufferToRender;
@ -62,15 +59,15 @@ class AndroidSurfaceViewChannel : public AndroidStream {
class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
public:
AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
AndroidSurfaceViewRenderer(const int32_t id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidSurfaceViewRenderer();
WebRtc_Word32 Init();
int32_t Init();
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
int32_t streamId,
int32_t zOrder,
const float left,
const float top,
const float right,

View File

@ -68,7 +68,7 @@ const char VideoRenderOpenGles20::g_fragmentShader[] = {
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n" };
VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
_id(id),
_textureWidth(-1),
_textureHeight(-1) {
@ -88,8 +88,7 @@ VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
VideoRenderOpenGles20::~VideoRenderOpenGles20() {
}
WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
WebRtc_Word32 height) {
int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__, (int) width,
(int) height);
@ -174,11 +173,11 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
// SetCoordinates
// Sets the coordinates where the stream shall be rendered.
// Values must be between 0 and 1.
WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom) {
int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
const float left,
const float top,
const float right,
const float bottom) {
if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
(bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@ -215,8 +214,7 @@ WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
return 0;
}
WebRtc_Word32 VideoRenderOpenGles20::Render(const I420VideoFrame&
frameToRender) {
int32_t VideoRenderOpenGles20::Render(const I420VideoFrame& frameToRender) {
if (frameToRender.IsZeroSize()) {
return -1;
@ -361,7 +359,7 @@ void VideoRenderOpenGles20::SetupTextures(const I420VideoFrame& frameToRender) {
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* uComponent = frameToRender.buffer(kUPlane);
const uint8_t* uComponent = frameToRender.buffer(kUPlane);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
@ -374,7 +372,7 @@ void VideoRenderOpenGles20::SetupTextures(const I420VideoFrame& frameToRender) {
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* vComponent = frameToRender.buffer(kVPlane);
const uint8_t* vComponent = frameToRender.buffer(kVPlane);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("SetupTextures");

View File

@ -21,16 +21,13 @@ namespace webrtc
class VideoRenderOpenGles20 {
public:
VideoRenderOpenGles20(WebRtc_Word32 id);
VideoRenderOpenGles20(int32_t id);
~VideoRenderOpenGles20();
WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
WebRtc_Word32 Render(const I420VideoFrame& frameToRender);
WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
int32_t Setup(int32_t widht, int32_t height);
int32_t Render(const I420VideoFrame& frameToRender);
int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
const float right, const float bottom);
private:
void printGLString(const char *name, GLenum s);
@ -41,7 +38,7 @@ class VideoRenderOpenGles20 {
void SetupTextures(const I420VideoFrame& frameToRender);
void UpdateTextures(const I420VideoFrame& frameToRender);
WebRtc_Word32 _id;
int32_t _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.
GLuint _program;
GLuint _vPositionHandle;