diff --git a/src/modules/video_capture/main/source/Linux/device_info_linux.cc b/src/modules/video_capture/main/source/Linux/device_info_linux.cc index 842e51f103..c67885ffe1 100644 --- a/src/modules/video_capture/main/source/Linux/device_info_linux.cc +++ b/src/modules/video_capture/main/source/Linux/device_info_linux.cc @@ -260,8 +260,11 @@ WebRtc_Word32 DeviceInfoLinux::FillCapabilityMap(int fd) video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; video_fmt.fmt.pix.sizeimage = 0; - int totalFmts = 2; - unsigned int videoFormats[] = { V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YUYV }; + int totalFmts = 3; + unsigned int videoFormats[] = { + V4L2_PIX_FMT_MJPEG, + V4L2_PIX_FMT_YUV420, + V4L2_PIX_FMT_YUYV }; int sizes = 13; unsigned int size[][2] = { { 128, 96 }, { 160, 120 }, { 176, 144 }, @@ -292,10 +295,14 @@ WebRtc_Word32 DeviceInfoLinux::FillCapabilityMap(int fd) { cap->rawType = kVideoYUY2; } + else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG) + { + cap->rawType = kVideoMJPEG; + } // get fps of current camera mode // V4l2 does not have a stable method of knowing so we just guess. - if(cap->width>=800) + if(cap->width >= 800 && cap->rawType != kVideoMJPEG) { cap->maxFPS = 15; } diff --git a/src/modules/video_capture/main/source/Linux/video_capture_linux.cc b/src/modules/video_capture/main/source/Linux/video_capture_linux.cc index 198f3f0820..8468d49c92 100644 --- a/src/modules/video_capture/main/source/Linux/video_capture_linux.cc +++ b/src/modules/video_capture/main/source/Linux/video_capture_linux.cc @@ -27,6 +27,8 @@ #include "critical_section_wrapper.h" #include "video_capture_linux.h" +// #define WEBRTC_MJPEG + namespace webrtc { namespace videocapturemodule @@ -150,8 +152,25 @@ WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture( return -1; } - int nFormats = 2; - unsigned int fmts[2] = { V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YUYV }; + // Supported video formats in preferred order. +#ifndef WEBRTC_MJPEG + const int nFormats = 2; + unsigned int fmts[nFormats] = { V4L2_PIX_FMT_YUV420, V4L2_PIX_FMT_YUYV }; +#else + // If the requested resolution is larger than VGA, we prefer MJPEG. Go for + // I420 otherwise. + const int nFormats = 3; + unsigned int fmts[nFormats]; + if (capability.width > 640 || capability.height > 480) { + fmts[0] = V4L2_PIX_FMT_MJPEG; + fmts[1] = V4L2_PIX_FMT_YUV420; + fmts[2] = V4L2_PIX_FMT_YUYV; + } else { + fmts[0] = V4L2_PIX_FMT_YUV420; + fmts[1] = V4L2_PIX_FMT_YUYV; + fmts[2] = V4L2_PIX_FMT_MJPEG; + } +#endif struct v4l2_format video_fmt; memset(&video_fmt, 0, sizeof(struct v4l2_format)); @@ -180,10 +199,13 @@ WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture( "no supporting video formats found"); return -1; } + if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV) _captureVideoType = kVideoYUY2; - else + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420) _captureVideoType = kVideoI420; + else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG) + _captureVideoType = kVideoMJPEG; //set format and frame size now if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) @@ -197,10 +219,11 @@ WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture( _currentWidth = video_fmt.fmt.pix.width; _currentHeight = video_fmt.fmt.pix.height; _captureDelay = 120; - if(_currentWidth >= 800) + // No way of knowing frame rate, make a guess. + if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) _currentFrameRate = 15; else - _currentFrameRate = 30; // No way of knowing on Linux. + _currentFrameRate = 30; if (!AllocateVideoBuffers()) { diff --git a/src/modules/video_capture/main/source/video_capture_impl.cc b/src/modules/video_capture/main/source/video_capture_impl.cc index 6574d3ead7..59b0dac470 100644 --- a/src/modules/video_capture/main/source/video_capture_impl.cc +++ b/src/modules/video_capture/main/source/video_capture_impl.cc @@ -247,10 +247,11 @@ WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame, return 0; } -WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame, - WebRtc_Word32 videoFrameLength, - const VideoCaptureCapability& frameInfo, - WebRtc_Word64 captureTime/*=0*/) +WebRtc_Word32 VideoCaptureImpl::IncomingFrame( + WebRtc_UWord8* videoFrame, + WebRtc_Word32 videoFrameLength, + const VideoCaptureCapability& frameInfo, + WebRtc_Word64 captureTime/*=0*/) { WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id, "IncomingFrame width %d, height %d", (int) frameInfo.width, @@ -263,19 +264,21 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame, const WebRtc_Word32 width = frameInfo.width; const WebRtc_Word32 height = frameInfo.height; - if (frameInfo.codecType == kVideoCodecUnknown) // None encoded. Convert to I420. + if (frameInfo.codecType == kVideoCodecUnknown) { + // Not encoded, convert to I420. const VideoType commonVideoType = - RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); - int size = CalcBufferSize(commonVideoType, width, height); - if (size != videoFrameLength) + RawVideoTypeToCommonVideoVideoType(frameInfo.rawType); + + if (frameInfo.rawType != kVideoMJPEG && + CalcBufferSize(commonVideoType, width, height) != videoFrameLength) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "Wrong incoming frame length."); + "Wrong incoming frame length."); return -1; } - // Allocate I420 buffer + // Allocate I420 buffer. int requiredLength = CalcBufferSize(kI420, width, height); _captureFrame.VerifyAndAllocate(requiredLength); if (!_captureFrame.Buffer()) @@ -286,7 +289,8 @@ WebRtc_Word32 VideoCaptureImpl::IncomingFrame(WebRtc_UWord8* videoFrame, } memset(_captureFrame.Buffer(), 0, _captureFrame.Size()); - int dstStride = width; // Keeping stride = width for I420 destination. + // Keeping stride = width for I420 destination. + int dstStride = width; const int conversionResult = ConvertToI420(commonVideoType, videoFrame, 0, 0, // No cropping