#include "videocapture_test.h"
#include "ui_videocapture_test.h"
#include <QDebug>
#include <QMessageBox>
#include "interface.h"
#define CLEAR(x) memset(&(x), 0, sizeof(x))
QString dev_name = "/dev/video0";
VideoCapture_Test::VideoCapture_Test(QWidget *parent) :
QWidget(parent),
ui(new Ui::VideoCapture_Test)
{
ui->setupUi(this);
open_flag = 0;
init_Video();
start_capturing();
// p = (unsigned char *)malloc(640 * 480/*QWidget::width()*QWidget::height()*/* 3 * sizeof(char));
frame = new QImage(p,640,480,QImage::Format_RGB888);
ui->VideoLab->setStyleSheet("background-color: rgb(0, 0, 0);");
timer1 = new QTimer(this);
connect(timer1,SIGNAL(timeout()),this,SLOT(VideoUpdate()));
connect(this,SIGNAL(display_error(QString )),this,SLOT(error_display(QString)));
}
void VideoCapture_Test::VideoUpdate()
{
// qDebug()<<"update";
int ret;
ret = get_frame((void **)&p,&len);
// convert_yuv_to_rgb_buffer(p,pp,640,480/*QWidget::width(),QWidget::height()*/);
frame->loadFromData((uchar *)p,/*len*/640 * 480 *2 * sizeof(char));
ui->VideoLab->setPixmap(QPixmap::fromImage(*frame,Qt::AutoColor));
ui->VideoLab->update();
ret = unget_frame();
}
int VideoCapture_Test::init_Video()
{
// int ret;
v4l2_capability cap;
v4l2_cropcap cropcap;
v4l2_crop crop;
v4l2_format fmt;
// QString s = "GGGGGGGGGgg";
fd = open(dev_name.toStdString().c_str(), O_RDWR/*|O_NONBLOCK*/, 0);
if(-1 == fd)
{
emit display_error(tr("open: %1").arg(QString(strerror(errno))));
//// emit display_error(QString(strerror(errno)));
// emit display_error(s);
// qDebug()<<errno;
return -1;
}
if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))
{
//查询设备功能
if(EINVAL == errno)
{
emit display_error(tr("%1 is no V4l2 device").arg(dev_name));
}
else
{
emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno))));
}
return -1;
}
if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
//视频采集
emit display_error(tr("%1 is no video capture device").arg(dev_name));
return -1;
}
if(!(cap.capabilities & V4L2_CAP_STREAMING))
{
//视频流
emit display_error(tr("%1 does not support streaming i/o").arg(dev_name));
return -1;
}
CLEAR(cropcap);
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap))
{
CLEAR(crop);
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if(-1 == ioctl(fd, VIDIOC_S_CROP, &crop))
{
if(EINVAL == errno)
{
emit display_error(tr("VIDIOC_S_CROP not supported"));
}
else
{
emit display_error(tr("VIDIOC_S_CROP: %1").arg(QString(strerror(errno))));
return -1;
}
}
}else{
emit display_error(tr("VIDIOC_CROPCAP: %1").arg(QString(strerror(errno))));
return -1;
}
/* struct v4l2_fmtdesc fmtt;
memset(&fmtt, 0, sizeof(fmtt));
fmtt.index = 0;
fmtt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while ((ret = ioctl(fd, VIDIOC_ENUM_FMT, &fmtt)) == 0) {
fmtt.index++;
printf("{ pixelformat = ''%c%c%c%c'', description = ''%s'' }\n",
fmtt.pixelformat & 0xFF, (fmtt.pixelformat >> 8) & 0xFF,
(fmtt.pixelformat >> 16) & 0xFF, (fmtt.pixelformat >> 24) & 0xFF,
fmtt.description);
// qDebug()<<"";
}*/
CLEAR(fmt);
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 640;
fmt.fmt.pix.height = 480;
// fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//YUV4:2:2
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_MJPEG ;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;//隔行扫描
if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))
{
//设置视频格式
emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno))));
return -1;
}
if(-1 == init_mmap())
{
//初始化mmap,内存映射
return -1;
}
return 0;
}
//初始化mmap
int VideoCapture_Test::init_mmap()
{
v4l2_requestbuffers req;
CLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req))
{ //请求buf
if(EINVAL == errno)
{
emit display_error(tr("%1 does not support memory mapping").arg(dev_name));
return -1;
}
else
{
emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno))));
return -1;
}
}
if(req.count < 2)
{
emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name));
return -1;
}
buffers = (buffer*)calloc(req.count, sizeof(*buffers));//分配内存大小
if(!buffers)
{
emit display_error(tr("out of memory"));
return -1;
}
for(n_buffers = 0; n_buffers < req.count; ++n_buffers)
{
v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))
{
//获取buf信息起始位置,长度等
emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno))));
return -1;
}
buffers[n_buffers].length = buf.length;
buffers[n_buffers].start =
mmap(NULL, // start anywhere
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd, buf.m.offset);//映射
if(MAP_FAILED == buffers[n_buffers].start)
{
emit display_error(tr("mmap %1").arg(QString(strerror(errno))));
return -1;
}
}
qDebug()<<"n_buffers is "<<n_buffers;
return 0;
}
//开始捕获视频
int VideoCapture_Test::start_capturing()
{
unsigned int i;
for(i = 0; i < n_buffers; ++i)
{
v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory =V4L2_MEMORY_MMAP;
buf.index = i;
// fprintf(stderr, "n_buffers: %d\n", i);
if(-1 == ioctl(fd, VIDIOC_QBUF, &buf))
{ //把buf排成一列
emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
return -1;
}
}
v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(-1 == ioctl(fd, VIDIOC_STREAMON, &type))
{
emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno))));
return -1;
}
return 0;
}
//获取一帧图像
int VideoCapture_Test::get_frame(void **frame_buf, size_t* len)
{
v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))
{ //从队列中取出一个buf
switch(errno)
{
case EAGAIN:
// perror("dqbuf");
return -1;
case EIO:
return -1 ;
default:
emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno))));
return -1;
}
}
*frame_b