实战小项目之嵌入式linux图像采集与传输
项目简介
本次编程实战主要是围绕嵌入式linux v4l2采集框架展开,包括以下几个部分:
- v4l2视频采集
- IPU转码
- framebuffer显示
- 自定义UDP简单协议进行传输
- 上位机软件(QT)
首先是采集部分
#include "includes.h" int fd_cam;
struct cam_buffer *buffers=NULL;
unsigned int n_buffers=;
int frameIndex=; void initVideo()
{
int ret;
struct v4l2_capability cam_cap; //显示设备信息
struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力
struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT
struct v4l2_crop cam_crop; //图像的缩放
struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等 /*设备的打开*/
fd_cam = open( USB_VIDEO, O_RDWR );
if( fd_cam< )
printf("Can't open video device\n"); /* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
ret = ioctl( fd_cam,VIDIOC_QUERYCAP,&cam_cap );
if( ret< ) {
printf("Can't get device information: VIDIOCGCAP\n");
}
printf("Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
cam_cap.driver,cam_cap.card,cam_cap.bus_info,(cam_cap.version>>)&0XFF,
(cam_cap.version>>)&0XFF,cam_cap.version&0XFF); /* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
cam_fmtdesc.index=;
cam_fmtdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while(ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -)
{
printf("\t%d.%s\n",cam_fmtdesc.index+,cam_fmtdesc.description);
cam_fmtdesc.index++;
} /* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
cam_cropcap.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
if( == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)){
printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
cam_cropcap.defrect.left,cam_cropcap.defrect.top,
cam_cropcap.defrect.width,cam_cropcap.defrect.height);
/* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_crop.c = cam_cropcap.defrect;//默认取景窗口大小
if(- == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)){
//printf("Can't set crop para\n");
}
}
else{
printf("Can't set cropcap para\n");
} /* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_format.fmt.pix.width = ;
cam_format.fmt.pix.height = ;
cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;//要和摄像头支持的类型对应
cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret=ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
if(ret<){
printf("Can't set frame information\n");
}
/* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
cam_format.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret=ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
if(ret<){
printf("Can't get frame information\n");
}
printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
cam_format.fmt.pix.width,cam_format.fmt.pix.height);
ret=initBuffers();
if(ret<){
printf("Buffers init error\n");
//exit(-1);
}
} void closeVideo()
{
//stopCapture();
//freeBuffers();
close(fd_cam);
} int initBuffers()
{
int ret;
/* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
struct v4l2_requestbuffers req;
CLEAR(req);
req.count=;
req.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret=ioctl(fd_cam, VIDIOC_REQBUFS, &req);
if(ret<){
printf("Request frame buffers failed\n");
return -;
}
if(req.count<){
printf("Request frame buffers while insufficient buffer memory\n");
return -;
}
buffers = (struct cam_buffer*)calloc(req.count, sizeof(*buffers));
if(!buffers){
printf("Out of memory\n");
return -;
}
for(n_buffers = ; n_buffers < req.count; n_buffers++){
struct v4l2_buffer buf;
CLEAR(buf);
// 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
ret=ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
if(ret< )
{
printf("VIDIOC_QUERYBUF %d failed\n",n_buffers);
return -;
}
buffers[n_buffers].length = buf.length;
// 映射内存
buffers[n_buffers].start =
mmap(NULL, // start anywhere
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
fd_cam, buf.m.offset);
if(MAP_FAILED == buffers[n_buffers].start)
{
printf("mmap buffer%d failed\n",n_buffers);
return -;
} }
return ;
}
int startCapture()
{
unsigned int i;
//struct v4l2_buffer buf;
for(i=;i<n_buffers;i++){
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory =V4L2_MEMORY_MMAP;
buf.index = i;
// fprintf(stderr, "n_buffers: %d\n", i);
if(- == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
printf("VIDIOC_QBUF buffer%d failed\n",i);
return -;
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(- == ioctl(fd_cam, VIDIOC_STREAMON, &type)){
printf("VIDIOC_STREAMON error");
return -;
}
return ;
}
int stopCapture()
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(- == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)){
printf("VIDIOC_STREAMOFF error\n");
return -;
}
return ;
}
int freeBuffers()
{
unsigned int i;
for(i = ; i < n_buffers; ++i){
if(- == munmap(buffers[i].start, buffers[i].length)){
printf("munmap buffer%d failed\n",i);
return -;
}
}
free(buffers);
return ;
}
int getFrame(void **frame_buf, size_t* len)
{
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if(- == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)){
printf("VIDIOC_DQBUF error\n");
return -;
}
printf("queue_buf.index=%d\n",queue_buf.index);
//pthread_rwlock_wrlock(&rwlock);
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
frameIndex = queue_buf.index;
//pthread_rwlock_unlock(&rwlock);
return ;
}
int backFrame()
{
if(frameIndex != -){
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = frameIndex;
if(- == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)){
printf("VIDIOC_QBUF error\n");
return -;
}
return ;
}
return -;
} /*yuv格式转换为rgb格式*/
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = ;
unsigned int pixel_16;
unsigned char pixel_24[];
unsigned int pixel32;
int y0, u, y1, v;
struct timeval starttime,endtime;
gettimeofday(&starttime,);
for(in = ; in < width * height * ; in += ) {
pixel_16 =
yuv[in + ] << |
yuv[in + ] << |
yuv[in + ] << |
yuv[in + ];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> ;
y1 = (pixel_16 & 0x00ff0000) >> ;
v = (pixel_16 & 0xff000000) >> ;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[] = (pixel32 & 0x000000ff);
pixel_24[] = (pixel32 & 0x0000ff00) >> ;
pixel_24[] = (pixel32 & 0x00ff0000) >> ;
//pthread_rwlock_wrlock(&rwlock);
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
//pthread_rwlock_unlock(&rwlock);
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[] = (pixel32 & 0x000000ff);
pixel_24[] = (pixel32 & 0x0000ff00) >> ;
pixel_24[] = (pixel32 & 0x00ff0000) >> ;
//pthread_rwlock_wrlock(&rwlock);
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
rgb[out++] = pixel_24[];
//pthread_rwlock_unlock(&rwlock);
}
gettimeofday(&endtime,);
double timeuse = *(endtime.tv_sec - starttime.tv_sec)+endtime.tv_usec-starttime.tv_usec;
timeuse /=;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
printf("yuv2rgb use %f ms\n",timeuse);
return ;
}
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = ;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-));
g = y - (0.698001 * (v-)) - (0.337633 * (u-));
b = y + (1.732446 * (u-));
if(r > ) r = ;
if(g > ) g = ;
if(b > ) b = ;
if(r < ) r = ;
if(g < ) g = ;
if(b < ) b = ;
pixel[] = r * / ;
pixel[] = g * / ;
pixel[] = b * / ;
return pixel32;
}
之后是IPU部分
#include "includes.h" int fd_ipu=;
struct ipu_task taskCam;
struct timeval begintime, endtime;
unsigned int ipuOutputSize=,ipuInputSize=;
void *inbuf=NULL;
void *outbuf=NULL;
/***************与软件解码对应的IPU解码**************************/
void initIPU()
{
int ret;
CLEAR(taskCam);
// Input image size and format
taskCam.input.width = ;
taskCam.input.height = ;
taskCam.input.format = v4l2_fourcc('Y', 'U', 'Y', 'V');
//
// taskCam.input.crop.pos.x = 0;
// taskCam.input.crop.pos.y = 0;
// taskCam.input.crop.w = 0;
// taskCam.input.crop.h = 0; // Output image size and format
taskCam.output.width = ;
taskCam.output.height = ;
taskCam.output.format = v4l2_fourcc('B', 'G', 'R', ''); // taskCam.output.crop.pos.x = 300;
// taskCam.output.crop.pos.y = 300;
// taskCam.output.crop.w = 300;
// taskCam.output.crop.h = 300;
// Open IPU device
fd_ipu = open(IPUDEV, O_RDWR, );
if (fd_ipu < ) {
printf("open ipu dev fail\n");
}
ipuOutputSize=taskCam.output.paddr= taskCam.output.width * taskCam.output.height
* fmt_to_bpp(taskCam.output.format)/;
printf("ipuOutputSize=%d\n",ipuOutputSize);
ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.output.paddr);
if (ret < ) {
printf("ioctl IPU_ALLOC fail\n");
}
outbuf= mmap(, ipuOutputSize, PROT_READ | PROT_WRITE,
MAP_SHARED, fd_ipu, taskCam.output.paddr);
if (!outbuf) {
printf("mmap ipu output image fail\n");
}
ipuInputSize =taskCam.input.paddr=taskCam.input.width * taskCam.input.height
* fmt_to_bpp(taskCam.input.format)/;
printf("ipuInputSize=%d\n",ipuInputSize);
ret = ioctl(fd_ipu, IPU_ALLOC, &taskCam.input.paddr);
if (ret < ) {
printf("ioctl IPU_ALLOC fail: (errno = %d)\n", errno);
}
inbuf = mmap(, ipuInputSize, PROT_READ | PROT_WRITE,
MAP_SHARED, fd_ipu, taskCam.input.paddr);
if (!inbuf) {
printf("mmap ipu input image fail\n");
}
}
void IPUConvent(void *in,void *out)
{
int ret;
memcpy(inbuf, in, ipuInputSize);
gettimeofday(&begintime, NULL);
// Perform color space conversion
ret = ioctl(fd_ipu, IPU_QUEUE_TASK, &taskCam);
if (ret < ) {
printf("ioct IPU_QUEUE_TASK fail %x\n", ret);
}
gettimeofday(&endtime, NULL);
double timeuse = *(endtime.tv_sec - begintime.tv_sec)+endtime.tv_usec-begintime.tv_usec;
timeuse /=;//除以1000则进行毫秒计时,如果除以1000000则进行秒级别计时,如果除以1则进行微妙级别计时
printf("yuv2rgb use %f ms\n",timeuse);
memcpy(out,outbuf,ipuOutputSize);
}
void closeIPU()
{
if(rgbFrame)munmap(rgbFrame, ipuOutputSize);
if(inbuf)munmap(inbuf, ipuInputSize);
if (taskCam.input.paddr)
ioctl(fd_ipu, IPU_FREE, &taskCam.input.paddr);
}
然后是framebuffer显示
#include "includes.h" int fd_fb0;
long int screensize = ;
char *fb_buf = ;
struct fb_var_screeninfo vinfo;
struct fb_fix_screeninfo finfo; void InitDisOnFrameBuffer()
{
// Open the file for reading and writing
fd_fb0 = open(DISON_FB0, O_RDWR);
if (!fd_fb0) {
printf("Error: cannot open framebuffer device.\n");
exit();
}
printf("The framebuffer device was opened successfully.\n"); // Get fixed screen information
if (ioctl(fd_fb0, FBIOGET_FSCREENINFO, &finfo)) {
printf("Error reading fixed information.\n");
exit();
} // Get variable screen information
if (ioctl(fd_fb0, FBIOGET_VSCREENINFO, &vinfo)) {
printf("Error reading variable information.\n");
exit();
}
printf("%dx%d, %dbpp\n", vinfo.xres, vinfo.yres, vinfo.bits_per_pixel ); // Figure out the size of the screen in bytes
screensize = vinfo.xres * vinfo.yres * vinfo.bits_per_pixel / ;
printf("screensize=%d\n",screensize); // Map the device to memory
fb_buf = (char *)mmap(, screensize, PROT_READ | PROT_WRITE, MAP_SHARED,
fd_fb0, );
if ((int)fb_buf == -) {
printf("Error: failed to map framebuffer device to memory.\n");
exit();
}
printf("The framebuffer device was mapped to memory successfully.\n");
}
void DisOnFrameBuffer(unsigned char *frame)
{
//memcpy(fb_buf,frame,640* 480* 3 * sizeof(char));
int x = , y = ;
long int location = ;
// Figure out where in memory to put the pixel
for ( y = ; y < ; y++ )
for ( x = ; x < ; x++ ) {
location = (x+vinfo.xoffset) * (vinfo.bits_per_pixel/) +
(y+vinfo.yoffset) * finfo.line_length;
if ( vinfo.bits_per_pixel == ) {
//rgb32 bgra
*(fb_buf + location ) = *frame;frame++; // Some blue
*(fb_buf + location + ) = *frame;frame++; // A little green
*(fb_buf + location + ) = *frame;frame++; //A lot of red//frame[480*y+x+2];
*(fb_buf + location + ) = ; // No transparency
}
else { //assume 16bpp
int b = ;
int g = (x-)/; // A little green
int r = -(y-)/; // A lot of red
unsigned short int t = r<< | g << | b;
*((unsigned short int*)(fb_buf + location)) = t;
}
}
}
void CloseDisOnFrameBuffer()
{
munmap(fb_buf, screensize);
close(fd_fb0);
}
UDP部分
#include "includes.h" struct sockaddr_in serveraddr;
int confd; char udpRecbuf[MAXLINE]; void initUDPTrans()
{
//1.创建一个socket
confd=socket(AF_INET,SOCK_DGRAM,);
//2.初始化服务器地址
bzero(&serveraddr,sizeof(serveraddr));
serveraddr.sin_family=AF_INET;
//
inet_pton(AF_INET,SEVER_IP,&serveraddr.sin_addr.s_addr);
serveraddr.sin_port =htons(SERVER_PORT);
}
void sendUDPdata(void *datas,unsigned int size)
{
size_t len,i,j;//分成1800块 每块512
char tempflag;
struct udptransbuf data;
for(i=;i<;i++){
memcpy(data.buf,datas+i*BLOCKSIZE,BLOCKSIZE);
// for(j=0;j<BLOCKSIZE;j++)
// data.buf[j]= (unsigned char*)(datas+i*BLOCKSIZE+j);
if(i==){
tempflag='a';
data.flag=tempflag;
}
else{
tempflag++;
data.flag=tempflag;
}
//3向务器发送数据
len=sendto(confd,(void*)&data,sizeof(data),,(struct sockaddr *)&serveraddr,sizeof(serveraddr));
if(len<)
printf("UDP send failed\n");
}
//char udpSendbuf[MAXLINE]="125wwew3332354#@$#";
}
void recUDPdata(char *udpRecbuf)//这里要求传入的是数组,要是指针需要修改
{
size_t len;
len=recvfrom(confd,udpRecbuf,sizeof(udpRecbuf),,NULL,);
write(STDIN_FILENO,udpRecbuf,len);
}
void closeUDPtrans()
{
close(confd);
}
完整工程
https://github.com/tla001/CapTrans
实战小项目之嵌入式linux图像采集与传输的更多相关文章
- Flask框架的学习与实战(二):实战小项目
昨天写了一篇flask开发环境搭建,今天继续,进行一个实战小项目-blog系统. blog系统很简单,只有一个页面,然而麻雀虽小五脏俱全.这里目的不是为了做项目而做项目,这篇文章本意是通过这次练习传达 ...
- 实战小项目BUG纪录
果然,作为程序员最可爱的女朋友就是各种BUG,解决了你的开发能力和开发效率就会上升到一个新的层次.反之,在你面对BUG的时候,如果轻易的就放弃了,你也就失去了一次自我成长的机会.学习就是这样的,我们有 ...
- Django集成celery实战小项目
上一篇已经介绍了celery的基本知识,本篇以一个小项目为例,详细说明django框架如何集成celery进行开发. 本系列文章的开发环境: window 7 + python2.7 + pychar ...
- 【实战小项目】python开发自动化运维工具--批量操作主机
有很多开源自动化运维工具都很好用如ansible/salt stack等,完全不用重复造轮子.只不过,很多运维同学学习Python之后,苦于没小项目训练.本篇就演示用Python写一个批量操作主机的工 ...
- ASP.NET Core 2.1 Web API + Identity Server 4 + Angular 6 + Angular Material 实战小项目视频
视频简介 ASP.NET Core Web API + Angular 6的教学视频 我是后端开发人员, 前端的Angular部分讲的比较差一些, 可以直接看代码!!!! 这是一个小项目的实战视频, ...
- 实战小项目之RTMP流媒体演示系统
项目简介 windows下使用基于Qt对之前的RtmpApp进行封装与应用,单独功能使用线程执行,主要包括以下几个功能: 视频下载 推送文件 推送摄像头数据或者桌面 基于libvlc的播放器 视频下载 ...
- springboot实战小项目-简要介绍、vue项目创建
因为菜,所以要好好学习! 一.项目介绍:这是一个后台管理系统,准备实现的功能: 1.登录.注册.个人信息查看.退出登录 2.根据关键字查询用户.新增用户.根据id或者其他字段排序.编辑用户信息.删除用 ...
- 实战小项目之ffmpeg推流yolo视频实时检测
之前实现了yolo图像的在线检测,这次主要完成远程视频的检测.主要包括推流--収流--检测显示三大部分 首先说一下推流,主要使用ffmpeg命令进行本地摄像头的推流,为了实现首屏秒开使用-g设置gop ...
- 实战小项目之基于yolo的目标检测web api实现
上个月,对微服务及web service有了一些想法,看了一本app后台开发及运维的书,主要是一些概念性的东西,对service有了一些基本了解.互联网最开始的构架多是cs构架,浏览器兴起以后,变成了 ...
随机推荐
- Localroast使用总结
全手打原创,转载请标明出处: https://www.cnblogs.com/dreamsqin/p/10883248.html,多谢~=.= 什么是Localroast 一个根据 JSON 文件快速 ...
- ffmpeg 资源[转]
http://blog.csdn.net/leixiaohua1020/article/details/15811977 一. FFmpeg主站 1. FFmpeg的源码发布,资料 网址: htt ...
- linux 命令——37 date (转)
在linux环境中,不管是编程还是其他维护,时间是必不可少的,也经常会用到时间的运算,熟练运用date命令来表示自己想要表示的时间,肯定可以给自己的工作带来诸多方便. 1.命令格式: date [参数 ...
- Python 随笔之Redis
Python学习记录 ——redis 2018-03-07 Redis是一个开源的使用ANSI C语言编写.支持网络.可基于内存亦可持久化的日志型.Key-Value数据库,并提供多种语言的API.从 ...
- Luogu [P1334] 瑞瑞的木板(手写堆)
其实这个题完全不需要用手写堆,只需要一遍遍sort就行了…… 但是! 为了练习手写堆,还是用手写堆做了. 在做本题之前,如果你没有什么思路的话,建议先做Luogu的合并果子. 好,假设你已经做过了合并 ...
- java设计模式——原型模式
一. 定义与类型 定义:指原型实例指定创建对象的种类,并且通过拷贝这些原型创建新的对象.不需要知道任何创建的细节,不调用构造函数 类型:创建型 二.使用场景 类初始化消耗较多资源 new 产生的一个对 ...
- java基础编程——重建二叉树
题目描述 输入某二叉树的前序遍历和中序遍历的结果,请重建出该二叉树.假设输入的前序遍历和中序遍历的结果中都不含重复的数字.例如输入前序遍历序列{1,2,4,7,3,5,6,8}和中序遍历序列{4,7, ...
- head与body(新手向)
网页文档包含了页头(head)与主体(body) 页头 -是对该网页文档进行描绘的主体信息. -至少含有title与meta. meta描述网页的特征,比如字符编码,平时广泛用的utf-8.且meta ...
- angular2的生命周期钩子的使用情况
angular 2 Directive Lifecycleangular2 中组建继承于指令,并扩展了与ui视图相关的属性.angular2 指令的生命周期是用来记录指令从创建,应用及销毁的过程.an ...
- 自动布局之-NSLayoutConstraint
AutoLayout概念是苹果自iOS6开始引入的概念. 目前为止,实现自动布局技术选型方面也可以使用xib和storyboard.在开发过程中通常登录.注册等变动可能性较小的视图,我会采用xib开发 ...