Linux摄像头驱动学习之:(六)UVC-基本框架代码分析
仿照内核的自带UVC(usb video class)驱动程序写的一版简化驱动,仅供学习,实际项目开发中应该尽量使用内核自带的驱动,除非内核自带的驱动不支持此款硬件才需要自己写驱动。
下面就直接上代码了,要根据自己的设备信息修改相关配置参数。
#include <linux/kernel.h>
#include <linux/list.h>
#include <linux/module.h>
#include <linux/usb.h>
#include <linux/videodev2.h>
#include <linux/vmalloc.h>
#include <linux/wait.h>
#include <linux/mm.h>
#include <asm/atomic.h>
#include <asm/unaligned.h> #include <media/v4l2-common.h>
#include <media/v4l2-ioctl.h>
#include <media/videobuf-core.h> #include "uvcvideo.h" #define sheldon_UVC_URBS 3 /* Values for bmHeaderInfo (Video and Still Image Payload Headers, 2.4.3.3) */
#define UVC_STREAM_EOH (1 << 7)
#define UVC_STREAM_ERR (1 << 6)
#define UVC_STREAM_STI (1 << 5)
#define UVC_STREAM_RES (1 << 4)
#define UVC_STREAM_SCR (1 << 3)
#define UVC_STREAM_PTS (1 << 2)
#define UVC_STREAM_EOF (1 << 1)
#define UVC_STREAM_FID (1 << 0) struct sheldon_uvc_streaming_control {
__u16 bmHint;
__u8 bFormatIndex;
__u8 bFrameIndex;
__u32 dwFrameInterval;
__u16 wKeyFrameRate;
__u16 wPFrameRate;
__u16 wCompQuality;
__u16 wCompWindowSize;
__u16 wDelay;
__u32 dwMaxVideoFrameSize;
__u32 dwMaxPayloadTransferSize;
__u32 dwClockFrequency;
__u8 bmFramingInfo;
__u8 bPreferedVersion;
__u8 bMinVersion;
__u8 bMaxVersion;
}; /*参考 drivers/media/video/uvc 下一系列代码*/ struct frame_desc {
int width;
int height;
}; /* 参考uvc_video_queue定义一些结构体 */
struct sheldon_uvc_buffer {
struct v4l2_buffer buf;
int state;
int vma_use_count; /* 表示是否已经被mmap */
wait_queue_head_t wait; /* APP要读某个缓冲区,如果无数据,在此休眠 */
struct list_head stream;
struct list_head irq;
}; struct sheldon_uvc_queue {
void *mem;
int count;
int buf_size;
struct sheldon_uvc_buffer buffer[]; struct urb *urb[];
char *urb_buffer[];
dma_addr_t urb_dma[];
unsigned int urb_size; struct list_head mainqueue; /* 供APP消费用 */
struct list_head irqqueue; /* 供底层驱动生产用 */
}; static struct sheldon_uvc_queue sheldon_uvc_queue; static struct video_device *sheldon_uvc_vdev;
static struct usb_device *sheldon_uvc_udev;
static int sheldon_uvc_bEndpointAddress = 0x82; //lsusb - 人工确定参数
static int sheldon_uvc_streaming_intf;
static int sheldon_uvc_control_intf;
static int sheldon_uvc_streaming_bAlternateSetting = ;
static struct v4l2_format sheldon_uvc_format;
static struct frame_desc framdesc[] = {{,},{,},{,}};//{{640, 480}, {352, 288}, {320, 240}, {176, 144}, {160, 120}};
static int frame_idx = ;
static int bBitsPerPixel = ; /* lsusb -v -d 0x1e4e: "bBitsPerPixel" */
static int uvc_version = 0x0100; /* lsusb -v -d 0x1e4e: bcdUVC */
static int wMaxPacketSize = ;
static int ProcessingUnitID = ; static struct sheldon_uvc_streaming_control sheldon_uvc_params; /* A2 参考 uvc_v4l2_do_ioctl */ /* sheldonUV_vidioc_querycap :用于判断是否为视频设备*/
static int sheldonUV_vidioc_querycap(struct file *file, void *priv,
struct v4l2_capability *cap)
{
//strcpy(cap->driver, "sheldonUV");
//strcpy(cap->card, "sheldonUV");
//cap->version = 0x0001;
//cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING; memset(cap, , sizeof *cap);
strcpy(cap->driver, "sheldonUV");
strcpy(cap->card, "sheldonUV"); cap->version = ;
cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING; return ;
} /* A3 列举支持哪种格式
* 参考: uvc_fmts 数组
*/
static int sheldonUV_vidioc_enum_fmt_vid_cap(struct file *file, void *priv,
struct v4l2_fmtdesc *f)
{
/* 人工查看描述符可知我们用的摄像头只支持1种格式 */
if (f->index >= )
return -EINVAL; /* 支持什么格式呢?
* 查看VideoStreaming Interface的描述符,
* 'Y''U''Y''V'
* 得到GUID为"59 55 59 32 00 00 10 00 80 00 00 aa 00 38 9b 71"
*/
strcpy(f->description, "4:2:2, packed, YUYV");
f->pixelformat = V4L2_PIX_FMT_YUYV; return ;
} /* A4 返回当前所使用的格式 */
static int sheldonUV_vidioc_get_fmt_vid_cap(struct file *file, void *priv,
struct v4l2_format *f)
{
memcpy(f, &sheldon_uvc_format, sizeof(sheldon_uvc_format));
return ();
} /* A5 测试驱动程序是否支持某种格式 ,强制设置为第一种格式
* 参考: uvc_v4l2_try_format
* sheldon_vivi_vidioc_try_fmt_vid_cap
*/ static int sheldonUV_vidioc_try_fmt_vid_cap(struct file *file, void *priv,
struct v4l2_format *f)
{
if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
{
return -EINVAL;
} if (f->fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV)
return -EINVAL; /* 调整format的width, height,
* 计算bytesperline, sizeimage
*/ /* 人工查看描述符, 确定支持哪几种分辨率 */
f->fmt.pix.width = framdesc[frame_idx].width;
f->fmt.pix.height = framdesc[frame_idx].height;
f->fmt.pix.bytesperline =
(f->fmt.pix.width * bBitsPerPixel) >> ;
f->fmt.pix.sizeimage =
f->fmt.pix.height * f->fmt.pix.bytesperline; return ;
} /* A6 如果支持这种格式,则进行设置-参考 vivi_vidioc_s_fmt_vid_cap*/
static int sheldonUV_vidioc_set_fmt_vid_cap(struct file *file, void *priv,
struct v4l2_format *f)
{
int ret = sheldonUV_vidioc_try_fmt_vid_cap(file, NULL, f);
if (ret < )
return ret; memcpy(&sheldon_uvc_format, f, sizeof(sheldon_uvc_format)); return ;// ret;
} static int sheldon_uvc_free_buffers(void)
{
if (sheldon_uvc_queue.mem)
{
vfree(sheldon_uvc_queue.mem);
memset(&sheldon_uvc_queue, , sizeof(sheldon_uvc_queue));
sheldon_uvc_queue.mem = NULL;
}
return ;
} /* A7 APP 调用该ioctl让驱动程分配若干个缓存,APP将从这些缓存中读到视频数据
* 参考: uvc_alloc_buffers
*/
static int sheldonUV_vidioc_reqbufs(struct file *file, void *priv,
struct v4l2_requestbuffers *p)
{
int nbuffers = p->count;
int bufsize = PAGE_ALIGN(sheldon_uvc_format.fmt.pix.sizeimage);
unsigned int i;
void *mem = NULL;
int ret; if ((ret = sheldon_uvc_free_buffers()) < )
goto done; /* Bail out if no buffers should be allocated. */
if (nbuffers == )
goto done; /* Decrement the number of buffers until allocation succeeds. */
for (; nbuffers > ; --nbuffers) {
mem = vmalloc_32(nbuffers * bufsize);
if (mem != NULL)
break;
} if (mem == NULL) {
ret = -ENOMEM;
goto done;
} /* 这些缓存是一次性作为一个整体来分配的 */
memset(&sheldon_uvc_queue, , sizeof(sheldon_uvc_queue)); INIT_LIST_HEAD(&sheldon_uvc_queue.mainqueue);
INIT_LIST_HEAD(&sheldon_uvc_queue.irqqueue); for (i = ; i < nbuffers; ++i) {
sheldon_uvc_queue.buffer[i].buf.index = i;
sheldon_uvc_queue.buffer[i].buf.m.offset = i * bufsize;
sheldon_uvc_queue.buffer[i].buf.length = sheldon_uvc_format.fmt.pix.sizeimage;
sheldon_uvc_queue.buffer[i].buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
sheldon_uvc_queue.buffer[i].buf.sequence = ;
sheldon_uvc_queue.buffer[i].buf.field = V4L2_FIELD_NONE;
sheldon_uvc_queue.buffer[i].buf.memory = V4L2_MEMORY_MMAP;
sheldon_uvc_queue.buffer[i].buf.flags = ;
sheldon_uvc_queue.buffer[i].state = VIDEOBUF_IDLE; //分配好后为空闲状态
init_waitqueue_head(&sheldon_uvc_queue.buffer[i].wait);
} sheldon_uvc_queue.mem = mem;
sheldon_uvc_queue.count = nbuffers;
sheldon_uvc_queue.buf_size = bufsize;
ret = nbuffers; done:
return ret;
} /*A8 查询buffer状态,获得偏移值,app可以调用mmap*/
static int sheldonUV_vidioc_querybuf(struct file *file, void *priv, struct v4l2_buffer *v4l2_buf)
{
int ret = ; if (v4l2_buf->index >= sheldon_uvc_queue.count) {
ret = -EINVAL;
goto done;
} memcpy(v4l2_buf, &sheldon_uvc_queue.buffer[v4l2_buf->index].buf, sizeof(*v4l2_buf)); /* 更新flags */
if (sheldon_uvc_queue.buffer[v4l2_buf->index].vma_use_count)
v4l2_buf->flags |= V4L2_BUF_FLAG_MAPPED; switch (sheldon_uvc_queue.buffer[v4l2_buf->index].state) {
case VIDEOBUF_ERROR:
case VIDEOBUF_DONE:
v4l2_buf->flags |= V4L2_BUF_FLAG_DONE;
break;
case VIDEOBUF_QUEUED:
case VIDEOBUF_ACTIVE:
v4l2_buf->flags |= V4L2_BUF_FLAG_QUEUED;
break;
case VIDEOBUF_IDLE:
default:
break;
} done:
return ret;
} /* A10 把缓冲区放入队列, 底层的硬件操作函数将会把数据放入这个队列的缓存
* 参考: uvc_queue_buffer
*/
static int sheldonUV_vidioc_qbuf(struct file *file, void *priv, struct v4l2_buffer *v4l2_buf)
{
struct sheldon_uvc_buffer *buf; /* 0. APP传入的v4l2_buf可能有问题, 要做判断 */ if (v4l2_buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
v4l2_buf->memory != V4L2_MEMORY_MMAP) {
return -EINVAL;
} if (v4l2_buf->index >= sheldon_uvc_queue.count) {
return -EINVAL;
} buf = &sheldon_uvc_queue.buffer[v4l2_buf->index]; if (buf->state != VIDEOBUF_IDLE) {
return -EINVAL;
} /* 1. 修改状态 */
buf->state = VIDEOBUF_QUEUED;
buf->buf.bytesused = ; /* 2. 放入2个队列 */
/* 队列1: 供APP使用
* 当缓冲区没有数据时,放入mainqueue队列
* 当缓冲区有数据时, APP从mainqueue队列中取出
*/
list_add_tail(&buf->stream, &sheldon_uvc_queue.mainqueue); /* 队列2: 供产生数据的函数使用
* 当采集到数据时,从irqqueue队列中取出第1个缓冲区,存入数据
*/
list_add_tail(&buf->irq, &sheldon_uvc_queue.irqqueue); return ; } static void sheldon_uvc_print_streaming_params(struct sheldon_uvc_streaming_control *ctrl)
{
printk("video params:\n");
printk("bmHint = %d\n", ctrl->bmHint);
printk("bFormatIndex = %d\n", ctrl->bFormatIndex);
printk("bFrameIndex = %d\n", ctrl->bFrameIndex);
printk("dwFrameInterval = %d\n", ctrl->dwFrameInterval);
printk("wKeyFrameRate = %d\n", ctrl->wKeyFrameRate);
printk("wPFrameRate = %d\n", ctrl->wPFrameRate);
printk("wCompQuality = %d\n", ctrl->wCompQuality);
printk("wCompWindowSize = %d\n", ctrl->wCompWindowSize);
printk("wDelay = %d\n", ctrl->wDelay);
printk("dwMaxVideoFrameSize = %d\n", ctrl->dwMaxVideoFrameSize);
printk("dwMaxPayloadTransferSize = %d\n", ctrl->dwMaxPayloadTransferSize);
printk("dwClockFrequency = %d\n", ctrl->dwClockFrequency);
printk("bmFramingInfo = %d\n", ctrl->bmFramingInfo);
printk("bPreferedVersion = %d\n", ctrl->bPreferedVersion);
printk("bMinVersion = %d\n", ctrl->bMinVersion);
printk("bMinVersion = %d\n", ctrl->bMinVersion);
} /* 参考: uvc_get_video_ctrl
(ret = uvc_get_video_ctrl(video, probe, 1, GET_CUR))
static int uvc_get_video_ctrl(struct uvc_video_device *video,
struct uvc_streaming_control *ctrl, int probe, __u8 query)
*/
static int sheldon_uvc_get_streaming_params(struct sheldon_uvc_streaming_control *ctrl)
{
__u8 *data;
__u16 size;
int ret;
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe; size = uvc_version >= 0x0110 ? : ;
data = kmalloc(size, GFP_KERNEL);
if (data == NULL)
return -ENOMEM; pipe = (GET_CUR & 0x80) ? usb_rcvctrlpipe(sheldon_uvc_udev, )
: usb_sndctrlpipe(sheldon_uvc_udev, );
type |= (GET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT; ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_CUR, type, VS_PROBE_CONTROL << ,
<< | sheldon_uvc_streaming_intf, data, size, ); if (ret < )
goto done; ctrl->bmHint = le16_to_cpup((__le16 *)&data[]);
ctrl->bFormatIndex = data[];
ctrl->bFrameIndex = data[];
ctrl->dwFrameInterval = le32_to_cpup((__le32 *)&data[]);
ctrl->wKeyFrameRate = le16_to_cpup((__le16 *)&data[]);
ctrl->wPFrameRate = le16_to_cpup((__le16 *)&data[]);
ctrl->wCompQuality = le16_to_cpup((__le16 *)&data[]);
ctrl->wCompWindowSize = le16_to_cpup((__le16 *)&data[]);
ctrl->wDelay = le16_to_cpup((__le16 *)&data[]);
ctrl->dwMaxVideoFrameSize = get_unaligned_le32(&data[]);
ctrl->dwMaxPayloadTransferSize = get_unaligned_le32(&data[]); if (size == ) {
ctrl->dwClockFrequency = get_unaligned_le32(&data[]);
ctrl->bmFramingInfo = data[];
ctrl->bPreferedVersion = data[];
ctrl->bMinVersion = data[];
ctrl->bMaxVersion = data[];
} else {
//ctrl->dwClockFrequency = video->dev->clock_frequency;
ctrl->bmFramingInfo = ;
ctrl->bPreferedVersion = ;
ctrl->bMinVersion = ;
ctrl->bMaxVersion = ;
} done:
kfree(data); return (ret < ) ? ret : ;
} /* 参考: uvc_v4l2_try_format ∕uvc_probe_video
* uvc_set_video_ctrl(video, probe, 1)
*/
static int sheldon_uvc_try_streaming_params(struct sheldon_uvc_streaming_control *ctrl)
{
__u8 *data;
__u16 size;
int ret;
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe; memset(ctrl, , sizeof *ctrl); ctrl->bmHint = ; /* dwFrameInterval */
ctrl->bFormatIndex = ;
ctrl->bFrameIndex = frame_idx + ;
ctrl->dwFrameInterval = ; size = uvc_version >= 0x0110 ? : ;
data = kzalloc(size, GFP_KERNEL); if (data == NULL)
return -ENOMEM; *(__le16 *)&data[] = cpu_to_le16(ctrl->bmHint);
data[] = ctrl->bFormatIndex;
data[] = ctrl->bFrameIndex;
*(__le32 *)&data[] = cpu_to_le32(ctrl->dwFrameInterval);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wKeyFrameRate);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wPFrameRate);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wCompQuality);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wCompWindowSize);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wDelay);
put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[]);
put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[]); if (size == ) {
put_unaligned_le32(ctrl->dwClockFrequency, &data[]);
data[] = ctrl->bmFramingInfo;
data[] = ctrl->bPreferedVersion;
data[] = ctrl->bMinVersion;
data[] = ctrl->bMaxVersion;
} pipe = (SET_CUR & 0x80) ? usb_rcvctrlpipe(sheldon_uvc_udev, )
: usb_sndctrlpipe(sheldon_uvc_udev, ); type |= (SET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT; ret = usb_control_msg(sheldon_uvc_udev, pipe, SET_CUR, type, VS_PROBE_CONTROL << ,
<< | sheldon_uvc_streaming_intf, data, size, ); kfree(data); return (ret < ) ? ret : ; } /* 参考: uvc_v4l2_try_format ∕uvc_probe_video
* uvc_set_video_ctrl(video, probe, 1)
*/
static int sheldon_uvc_set_streaming_params(struct sheldon_uvc_streaming_control *ctrl)
{
__u8 *data;
__u16 size;
int ret;
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe; size = uvc_version >= 0x0110 ? : ;
data = kzalloc(size, GFP_KERNEL);
if (data == NULL)
return -ENOMEM; *(__le16 *)&data[] = cpu_to_le16(ctrl->bmHint);
data[] = ctrl->bFormatIndex;
data[] = ctrl->bFrameIndex;
*(__le32 *)&data[] = cpu_to_le32(ctrl->dwFrameInterval);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wKeyFrameRate);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wPFrameRate);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wCompQuality);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wCompWindowSize);
*(__le16 *)&data[] = cpu_to_le16(ctrl->wDelay);
put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[]);
put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[]); if (size == ) {
put_unaligned_le32(ctrl->dwClockFrequency, &data[]);
data[] = ctrl->bmFramingInfo;
data[] = ctrl->bPreferedVersion;
data[] = ctrl->bMinVersion;
data[] = ctrl->bMaxVersion;
} pipe = (SET_CUR & 0x80) ? usb_rcvctrlpipe(sheldon_uvc_udev, )
: usb_sndctrlpipe(sheldon_uvc_udev, );
type |= (SET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT; ret = usb_control_msg(sheldon_uvc_udev, pipe, SET_CUR, type, VS_COMMIT_CONTROL << ,
<< | sheldon_uvc_streaming_intf, data, size, ); kfree(data); return (ret < ) ? ret : ; } static void sheldon_uvc_uninit_urbs(void)
{
int i;
for (i = ; i < sheldon_UVC_URBS; ++i) {
if (sheldon_uvc_queue.urb_buffer[i])
{
usb_buffer_free(sheldon_uvc_udev, sheldon_uvc_queue.urb_size, sheldon_uvc_queue.urb_buffer[i], sheldon_uvc_queue.urb_dma[i]);
sheldon_uvc_queue.urb_buffer[i] = NULL;
} if (sheldon_uvc_queue.urb[i])
{
usb_free_urb(sheldon_uvc_queue.urb[i]);
sheldon_uvc_queue.urb[i] = NULL;
}
}
} /* 参考: uvc_video_complete / uvc_video_decode_isoc */
static void sheldon_uvc_video_complete(struct urb *urb)
{
u8 *src;
u8 *dest;
int ret, i;
int len;
int maxlen;
int nbytes;
struct sheldon_uvc_buffer *buf; switch (urb->status) {
case :
break; default:
printk("Non-zero status (%d) in video "
"completion handler.\n", urb->status);
return;
} /* 从irqqueue队列中取出第1个缓冲区 */
if (!list_empty(&sheldon_uvc_queue.irqqueue)) //如果队列不空
{
buf = list_first_entry(&sheldon_uvc_queue.irqqueue, struct sheldon_uvc_buffer, irq); for (i = ; i < urb->number_of_packets; ++i) {
if (urb->iso_frame_desc[i].status < ) {
printk("USB isochronous frame "
"lost (%d).\n", urb->iso_frame_desc[i].status);
continue;
} src = urb->transfer_buffer + urb->iso_frame_desc[i].offset; //源 dest = sheldon_uvc_queue.mem + buf->buf.m.offset + buf->buf.bytesused; //目的 len = urb->iso_frame_desc[i].actual_length; //整个数据长度
/* 判断数据是否有效 */
/* URB数据含义:
* data[0] : 头部长度
* data[1] : 错误状态
*/
if (len < || src[] < || src[] > len)
continue; /* Skip payloads marked with the error bit ("error frames"). */
if (src[] & UVC_STREAM_ERR) {
printk("Dropping payload (error bit set).\n");
continue;
} /* 除去头部后的数据长度 */
len -= src[]; /* 缓冲区最多还能存多少数据 */
maxlen = buf->buf.length - buf->buf.bytesused;
nbytes = min(len, maxlen); /* 复制数据 */
memcpy(dest, src + src[], nbytes);
buf->buf.bytesused += nbytes; /* 判断一帧数据是否已经全部接收到 */
if (len > maxlen) {
buf->state = VIDEOBUF_DONE;
} /* Mark the buffer as done if the EOF marker is set. */
if (src[] & UVC_STREAM_EOF && buf->buf.bytesused != ) {
printk("Frame complete (EOF found).\n");
if (len == )
printk("EOF in empty payload.\n");
buf->state = VIDEOBUF_DONE;
} } /* 当接收完一帧数据,
* 从irqqueue中删除这个缓冲区
* 唤醒等待数据的进程
*/
if (buf->state == VIDEOBUF_DONE ||
buf->state == VIDEOBUF_ERROR)
{
list_del(&buf->irq);
wake_up(&buf->wait);
}
} /* 再次提交URB */
if ((ret = usb_submit_urb(urb, GFP_ATOMIC)) < ) {
printk("Failed to resubmit video URB (%d).\n", ret);
}
} /* 参考: uvc_init_video_isoc */
static int sheldon_uvc_alloc_init_urbs(void)
{
u16 psize;
u32 size;
int npackets;
int i;
int j; struct urb *urb; psize = wMaxPacketSize; /* 实时传输端点一次能传输的最大字节数 */
size = sheldon_uvc_params.dwMaxVideoFrameSize; /* 一帧数据的最大长度 */
npackets = DIV_ROUND_UP(size, psize);
if (npackets > )
npackets = ; size = sheldon_uvc_queue.urb_size = psize * npackets; for (i = ; i < sheldon_UVC_URBS; ++i) {
/* 1. 分配usb_buffers */ sheldon_uvc_queue.urb_buffer[i] = usb_buffer_alloc(
sheldon_uvc_udev, size,
GFP_KERNEL | __GFP_NOWARN, &sheldon_uvc_queue.urb_dma[i]); //sheldon_uvc_queue.urb_dma[i]存放分配的物理地址 /* 2. 分配urb */
sheldon_uvc_queue.urb[i] = usb_alloc_urb(npackets, GFP_KERNEL); if (!sheldon_uvc_queue.urb_buffer[i] || !sheldon_uvc_queue.urb[i])
{
sheldon_uvc_uninit_urbs();
return -ENOMEM;
} } /* 3. 设置urb */
for (i = ; i < sheldon_UVC_URBS; ++i) {
urb = sheldon_uvc_queue.urb[i]; urb->dev = sheldon_uvc_udev;
urb->context = NULL;
urb->pipe = usb_rcvisocpipe(sheldon_uvc_udev,sheldon_uvc_bEndpointAddress);
urb->transfer_flags = URB_ISO_ASAP | URB_NO_TRANSFER_DMA_MAP;
urb->interval = ;
urb->transfer_buffer = sheldon_uvc_queue.urb_buffer[i]; //分配的urb buffer
urb->transfer_dma = sheldon_uvc_queue.urb_dma[i]; //分配的urb 物理地址
urb->complete = sheldon_uvc_video_complete; //收完数据的中断处理函数
urb->number_of_packets = npackets; //要传输的数据次数
urb->transfer_buffer_length = size; //总共的数据量 for (j = ; j < npackets; ++j) {
urb->iso_frame_desc[j].offset = j * psize; //存放每次传输的数据
urb->iso_frame_desc[j].length = psize;
} } return ;
} /*打开视频流
* 参考: uvc_video_enable(video, 1):
* uvc_commit_video
* uvc_init_video
*/
static int sheldonUV_vidioc_streamon(struct file *file, void *priv, enum v4l2_buf_type i)
{
int ret; /* 1. 向USB摄像头设置参数: 比如使用哪个format, 使用这个format下的哪个frame(分辨率)
* 参考: uvc_set_video_ctrl / uvc_get_video_ctrl
* 1.1 根据一个结构体uvc_streaming_control设置数据包: 可以手工设置,也可以读出后再修改
* 1.2 调用usb_control_msg发出数据包
*/ /* a. 测试参数 */
ret = sheldon_uvc_try_streaming_params(&sheldon_uvc_params);
printk("sheldon_uvc_try_streaming_params ret = %d\n", ret); /* b. 取出参数 */
ret = sheldon_uvc_get_streaming_params(&sheldon_uvc_params);
printk("sheldon_uvc_get_streaming_params ret = %d\n", ret); /* c. 设置参数 */
ret = sheldon_uvc_set_streaming_params(&sheldon_uvc_params);
printk("sheldon_uvc_set_streaming_params ret = %d\n", ret); sheldon_uvc_print_streaming_params(&sheldon_uvc_params); /* d. 设置VideoStreaming Interface所使用的setting
* d.1 从sheldon_uvc_params确定带宽
* d.2 根据setting的endpoint能传输的wMaxPacketSize
* 找到能满足该带宽的setting
*/
/* 手工确定:
* bandwidth = sheldon_uvc_params.dwMaxPayloadTransferSize = 64
* 观察lsusb -v -d 0x1e4e:的结果:
* wMaxPacketSize 0x0080 1x128 bytes
* bAlternateSetting 6
*/
usb_set_interface(sheldon_uvc_udev, sheldon_uvc_streaming_intf, sheldon_uvc_streaming_bAlternateSetting); /* 2. 分配设置URB */
ret = sheldon_uvc_alloc_init_urbs();
if (ret)
printk("sheldon_uvc_alloc_init_urbs err : ret = %d\n", ret); /* 3. 提交URB以接收数据 */
for (i = ; i < sheldon_UVC_URBS; ++i) {
if ((ret = usb_submit_urb(sheldon_uvc_queue.urb[i], GFP_KERNEL)) < ) {
printk("Failed to submit URB %u (%d).\n", i, ret);
sheldon_uvc_uninit_urbs();
return ret;
}
} return ;
} /*底层的硬件操作函数取出队列的缓存*/
static int sheldonUV_vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer *v4l2_buf)
{
/* APP发现数据就绪后, 从mainqueue里取出这个buffer */ struct sheldon_uvc_buffer *buf;
int ret = ; if (list_empty(&sheldon_uvc_queue.mainqueue)) {
ret = -EINVAL;
goto done;
} buf = list_first_entry(&sheldon_uvc_queue.mainqueue, struct sheldon_uvc_buffer, stream); switch (buf->state) {
case VIDEOBUF_ERROR:
ret = -EIO;
case VIDEOBUF_DONE:
buf->state = VIDEOBUF_IDLE;
break; case VIDEOBUF_IDLE:
case VIDEOBUF_QUEUED:
case VIDEOBUF_ACTIVE:
default:
ret = -EINVAL;
goto done;
} list_del(&buf->stream); done:
return ret;
} /*
* A14 之前已经通过mmap映射了缓存, APP可以直接读数据
* A15 再次调用sheldonUV_vidioc_qbuf把缓存放入队列
* A16 poll...
*/ /* A17 停止-关闭视频流
* 参考 : uvc_video_enable(video, 0)
*/ static int sheldonUV_vidioc_streamoff(struct file *file, void *priv, enum v4l2_buf_type t)
{
struct urb *urb;
unsigned int i; /* 1. kill URB */
for (i = ; i < sheldon_UVC_URBS; ++i) {
if ((urb = sheldon_uvc_queue.urb[i]) == NULL)
continue;
usb_kill_urb(urb);
} /* 2. free URB */
sheldon_uvc_uninit_urbs(); /* 3. 设置VideoStreaming Interface为setting 0 */
usb_set_interface(sheldon_uvc_udev, sheldon_uvc_streaming_intf, ); return ;
} /*[下面几个函数实现属性设置]*/ /*
*Extract the bit string specified by mapping->offset and mapping->size
* from the little-endian data stored at 'data' and return the result as
* a signed 32bit integer. Sign extension will be performed if the mapping
* references a signed data type.
*/
static __s32 sheldonUV_get_le_value(const __u8 *data)
{
int bits = ;
int offset = ;
__s32 value = ;
__u8 mask; data += offset / ;
offset &= ;
mask = ((1LL << bits) - ) << offset; for (; bits > ; data++) {
__u8 byte = *data & mask;
value |= offset > ? (byte >> offset) : (byte << (-offset));
bits -= - (offset > ? offset : );
offset -= ;
mask = ( << bits) - ;
} /* Sign-extend the value if needed. */
value |= -(value & ( << ( - ))); return value;
} /* Set the bit string specified by mapping->offset and mapping->size
* in the little-endian data stored at 'data' to the value 'value'.
*/
static void sheldonUV_set_le_value(__s32 value, __u8 *data)
{
int bits = ;
int offset = ;
__u8 mask; data += offset / ;
offset &= ; for (; bits > ; data++) {
mask = ((1LL << bits) - ) << offset;
*data = (*data & ~mask) | ((value << offset) & mask);
value >>= offset ? offset : ;
bits -= - offset;
offset = ;
}
} /* 参考:uvc_query_v4l2_ctrl:调用VIDIOC_QUERYCTRL ioctl确定是否支持某个属性 */
int sheldonUV_vidioc_queryctrl (struct file *file, void *fh,
struct v4l2_queryctrl *ctrl)
{
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe;
int ret;
u8 data[]; if (ctrl->id != V4L2_CID_BRIGHTNESS)
return -EINVAL; memset(ctrl, , sizeof *ctrl);
ctrl->id = V4L2_CID_BRIGHTNESS;
ctrl->type = V4L2_CTRL_TYPE_INTEGER;
strcpy(ctrl->name, "sheldonUV_BRIGHTNESS");
ctrl->flags = ; pipe = usb_rcvctrlpipe(sheldon_uvc_udev, );
type |= USB_DIR_IN; /* 发起USB传输确定这些值 */
ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_MIN, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO;
ctrl->minimum = sheldonUV_get_le_value(data); /* Note signedness */ ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_MAX, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO;
ctrl->maximum = sheldonUV_get_le_value(data); /* Note signedness */ ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_RES, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO;
ctrl->step = sheldonUV_get_le_value(data); /* Note signedness */ ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_DEF, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO;
ctrl->default_value = sheldonUV_get_le_value(data); /* Note signedness */ printk("Brightness: min =%d, max = %d, step = %d, default = %d\n", ctrl->minimum, ctrl->maximum, ctrl->step, ctrl->default_value); return ;
} /* 参考 : uvc_ctrl_get : 获得属性 */
int sheldonUV_vidioc_g_ctrl (struct file *file, void *fh,
struct v4l2_control *ctrl)
{
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe;
int ret;
u8 data[]; if (ctrl->id != V4L2_CID_BRIGHTNESS)
return -EINVAL; pipe = usb_rcvctrlpipe(sheldon_uvc_udev, );
type |= USB_DIR_IN; ret = usb_control_msg(sheldon_uvc_udev, pipe, GET_CUR, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO;
ctrl->value = sheldonUV_get_le_value(data); /* Note signedness */ return ;
} /* 参考: uvc_ctrl_set/uvc_ctrl_commit : 设置属性*/
int sheldonUV_vidioc_s_ctrl (struct file *file, void *fh,
struct v4l2_control *ctrl)
{
__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
unsigned int pipe;
int ret;
u8 data[]; if (ctrl->id != V4L2_CID_BRIGHTNESS)
return -EINVAL; sheldonUV_set_le_value(ctrl->value, data); pipe = usb_sndctrlpipe(sheldon_uvc_udev, );
type |= USB_DIR_OUT; ret = usb_control_msg(sheldon_uvc_udev, pipe, SET_CUR, type, PU_BRIGHTNESS_CONTROL << ,
ProcessingUnitID << | sheldon_uvc_control_intf, data, , );
if (ret != )
return -EIO; return ;
} static const struct v4l2_ioctl_ops sheldonUV_ioctl_ops = {
// 表示它是一个摄像头设备
.vidioc_querycap = sheldonUV_vidioc_querycap, /* 用于列举、获得、测试、设置摄像头的数据的格式 */
.vidioc_enum_fmt_vid_cap = sheldonUV_vidioc_enum_fmt_vid_cap,
.vidioc_g_fmt_vid_cap = sheldonUV_vidioc_get_fmt_vid_cap,
.vidioc_try_fmt_vid_cap = sheldonUV_vidioc_try_fmt_vid_cap,
.vidioc_s_fmt_vid_cap = sheldonUV_vidioc_set_fmt_vid_cap, /* 缓冲区操作: 申请/查询/放入队列/取出队列 */
.vidioc_reqbufs = sheldonUV_vidioc_reqbufs,
.vidioc_querybuf = sheldonUV_vidioc_querybuf,
.vidioc_qbuf = sheldonUV_vidioc_qbuf,
.vidioc_dqbuf = sheldonUV_vidioc_dqbuf, /* 查询/获得/设置属性 */
.vidioc_queryctrl = sheldonUV_vidioc_queryctrl,
.vidioc_g_ctrl = sheldonUV_vidioc_g_ctrl,
.vidioc_s_ctrl = sheldonUV_vidioc_s_ctrl, // 启动/停止
.vidioc_streamon = sheldonUV_vidioc_streamon,
.vidioc_streamoff = sheldonUV_vidioc_streamoff,
}; static int sheldonUV_open(struct file *file)
{
/* 队列操作2: 初始化 */
/* videobuf_queue_vmalloc_init(&sheldonUV_vb_vidqueue, &sheldonUV_video_qops,
NULL, &sheldonUV_queue_slock, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_FIELD_INTERLACED,
sizeof(struct videobuf_buffer), NULL); // 倒数第2个参数是buffer的头部大小 sheldonUV_timer.expires = jiffies + 1;
add_timer(&sheldonUV_timer);
*/
return ;
} static void sheldon_uvc_vm_open(struct vm_area_struct *vma)
{
struct sheldon_uvc_buffer *buffer = vma->vm_private_data;
buffer->vma_use_count++;
} static void sheldon_uvc_vm_close(struct vm_area_struct *vma)
{
struct sheldon_uvc_buffer *buffer = vma->vm_private_data;
buffer->vma_use_count--;
} static struct vm_operations_struct sheldon_uvc_vm_ops = {
.open = sheldon_uvc_vm_open,
.close = sheldon_uvc_vm_close,
}; /*映射->应用程序空间,之后app可以直接操作这块
* 参考: uvc_v4l2_mmap
*/
static int sheldonUV_mmap(struct file *file, struct vm_area_struct *vma)
{
struct sheldon_uvc_buffer *buffer;
struct page *page;
unsigned long addr, start, size;
unsigned int i;
int ret = ; start = vma->vm_start;
size = vma->vm_end - vma->vm_start; /* 应用程序调用mmap函数时, 会传入offset参数
* 根据这个offset找出指定的缓冲区
*/
for (i = ; i < sheldon_uvc_queue.count; ++i) {
buffer = &sheldon_uvc_queue.buffer[i];
if ((buffer->buf.m.offset >> PAGE_SHIFT) == vma->vm_pgoff)
break;
} if (i == sheldon_uvc_queue.count || size != sheldon_uvc_queue.buf_size) {
ret = -EINVAL;
goto done;
} /*
* VM_IO marks the area as being an mmaped region for I/O to a
* device. It also prevents the region from being core dumped.
*/
vma->vm_flags |= VM_IO; /* 根据虚拟地址找到缓冲区对应的page构体 */
addr = (unsigned long)sheldon_uvc_queue.mem + buffer->buf.m.offset;
while (size > ) {
page = vmalloc_to_page((void *)addr); /* 把此page映射到APP对应的虚拟地址上面 */
if ((ret = vm_insert_page(vma, start, page)) < )
goto done; start += PAGE_SIZE;
addr += PAGE_SIZE;
size -= PAGE_SIZE;
} vma->vm_ops = &sheldon_uvc_vm_ops;
vma->vm_private_data = buffer;
sheldon_uvc_vm_open(vma); done:
return ret;
} /*APP 调用POLL/select确定缓存数据是否就绪*/
static unsigned int sheldonUV_poll(struct file *file, struct poll_table_struct *wait)
{
struct sheldon_uvc_buffer *buf;
unsigned int mask = ; /* 从mainqueuq中取出第1个缓冲区 */ /*判断它的状态, 如果未就绪, 休眠 */ if (list_empty(&sheldon_uvc_queue.mainqueue)) {
mask |= POLLERR;
goto done;
} buf = list_first_entry(&sheldon_uvc_queue.mainqueue, struct sheldon_uvc_buffer, stream); poll_wait(file, &buf->wait, wait);
if (buf->state == VIDEOBUF_DONE ||
buf->state == VIDEOBUF_ERROR)
mask |= POLLIN | POLLRDNORM; done:
return mask;
} static int sheldonUV_close(struct file *file)
{
//del_timer(&sheldonUV_timer);
//videobuf_stop(&sheldonUV_vb_vidqueue);
//videobuf_mmap_free(&sheldonUV_vb_vidqueue); return ;
} static const struct v4l2_file_operations sheldonUV_fops = {
.owner = THIS_MODULE,
.open = sheldonUV_open,
.release = sheldonUV_close,
.mmap = sheldonUV_mmap,
.ioctl = video_ioctl2, /* V4L2 ioctl handler -> sheldonUV_ioctl_ops*/
.poll = sheldonUV_poll,
}; static void sheldonUV_release(struct video_device *vdev)
{
} //probe处理函数,有匹配usb设备时调用
static int sheldon_uvc_probe(struct usb_interface *intf, const struct usb_device_id *id)
{
static int cnt; //根据interface结构体获得usb_devce结构体,其中包含了设备描述符
struct usb_device *dev = interface_to_usbdev(intf);
//此处需要定义一个描述符结构体
struct usb_device_descriptor *descriptor = &dev->descriptor;
//从usb_device结构体中获得配置描述符相关信息
struct usb_host_config *host_config;
struct usb_config_descriptor *config;
//定义接口联合体描述符结构体,获得 IAD 接口
struct usb_interface_assoc_descriptor *assoc_desc;
//接口描述符
struct usb_interface_descriptor *interface;
//端点描述符
struct usb_endpoint_descriptor *endpoint;
//定义接口设置信息结构体
//struct usb_interface_descriptor *idesc; int i, j ,k ,l ,m;
unsigned char *buffer;
int buflen; int desc_len;
//int desc_cnt; sheldon_uvc_udev = dev; printk("sheldn_uvc_probe : cnt = %d\n", cnt++); if (cnt == )
{
sheldon_uvc_control_intf = intf->cur_altsetting->desc.bInterfaceNumber;
}
else if(cnt == )
{
sheldon_uvc_streaming_intf = intf->cur_altsetting->desc.bInterfaceNumber;
} if (cnt == )
{
/*1.分配一个video_device结构体*/
sheldon_uvc_vdev = video_device_alloc();
/*2.设置*/
/* 2.1 */
sheldon_uvc_vdev->release = sheldonUV_release; /* 2.2 */
sheldon_uvc_vdev->fops = &sheldonUV_fops; /* 2.3 */
sheldon_uvc_vdev->ioctl_ops = &sheldonUV_ioctl_ops;
/*3.注册*/
video_register_device(sheldon_uvc_vdev ,VFL_TYPE_GRABBER, -);
} return ;
} //disconnect函数,设备断开时调用
static void sheldon_uvc_disconnect(struct usb_interface *intf)
{
static int cnt;
printk("sheldon_uvc_disconnect : cnt = %d\n",cnt++); if (cnt == )
{
video_unregister_device(sheldon_uvc_vdev);
video_device_release(sheldon_uvc_vdev);
} } //支持的设备类型信息
static struct usb_device_id sheldon_uvc_ids[] = {
/* Generic USB Video Class */
{ USB_INTERFACE_INFO(USB_CLASS_VIDEO, , ) },/*1-视频控制接口*/
{ USB_INTERFACE_INFO(USB_CLASS_VIDEO, , ) },/*2-视频流控制接口(被1包含)*/
{}
}; //1.分配usb_driver结构体
//2.设置 static struct usb_driver sheldon_uvc_driver = {
.name = "sheldon_UV",
.probe = sheldon_uvc_probe,
.disconnect = sheldon_uvc_disconnect,
.id_table = sheldon_uvc_ids,
}; static int sheldon_uvc_init(void)
{
//3.注册
printk("sheldon_uvc_init ~\n");
usb_register(&sheldon_uvc_driver);
return ;
} static void sheldon_uvc_exit(void)
{
printk("sheldon_uvc_exit ~\n");
usb_deregister(&sheldon_uvc_driver);
} module_init(sheldon_uvc_init);
module_exit(sheldon_uvc_exit);
MODULE_LICENSE("GPL");
Linux摄像头驱动学习之:(六)UVC-基本框架代码分析的更多相关文章
- Linux摄像头驱动学习之:(一)V4L2_框架分析
这段时间开始搞安卓camera底层驱动了,把以前的的Linux视频驱动回顾一下,本篇主要概述一下vfl2(video for linux 2). 一. V4L2框架: video for linux ...
- Linux摄像头驱动学习之:(四)UVC-摄像头驱动框架分析
UVC: USB Video ClassUVC驱动:drivers\media\video\uvc\ uvc_driver.c分析:1. usb_register(&uvc_driver.dr ...
- Linux内核驱动学习(六)GPIO之概览
文章目录 前言 功能 如何使用 设备树 API 总结 前言 GPIO(General Purpose Input/Output)通用输入/输出接口,是十分灵活软件可编程的接口,功能强大,十分常用,SO ...
- Linux摄像头驱动学习之:(五)UVC-分析设备描述符
linux系统上插上USB摄像头设备后,内存就会有相应的设备描述符信息,后期可以根据这些信息进一步写驱动程序. 流程:Device(设备) -> Configuration(配置) -> ...
- Linux摄像头驱动学习之:(三)从零写虚拟驱动(仿照vivi.c)
本篇仿照vivi.c 写虚拟视频驱动,代码(myvivi.c+fillbuf.c+Makefile)如下: //==========================myvivi.c========== ...
- Linux 网卡驱动学习(六)(应用层、tcp 层、ip 层、设备层和驱动层作用解析)
本文将介绍网络连接建立的过程.收发包流程,以及当中应用层.tcp层.ip层.设备层和驱动层各层发挥的作用. 1.应用层 对于使用socket进行网络连接的server端程序.我们会先调用socket函 ...
- Linux摄像头驱动学习之:(二)通过虚拟驱动vivi分析摄像头驱动
一.通过指令 "strace -o xawtv.log xawtv" 得到以下调用信息:// 1~7都是在v4l2_open里调用1. open2. ioctl(4, VIDIOC ...
- Linux音频驱动学习之:(1)ASOC分析
一.音频架构概述 (1)ALSA是Advanced Linux Sound Architecture 的缩写,目前已经成为了linux的主流音频体系结构,想了解更多的关于ALSA的这一开源项目的信息和 ...
- Linux内核驱动学习(八)GPIO驱动模拟输出PWM
文章目录 前言 原理图 IO模拟输出PWM 设备树 驱动端 调试信息 实验结果 附录 前言 上一篇的学习中介绍了如何在用户空间直接操作GPIO,并写了一个脚本可以产生PWM.本篇的学习会将写一个驱动操 ...
随机推荐
- python核心编程学习记录之模块
- @synthesize vs. @dynamic
@synthesize will generate getter and setter methods and corresponding instance variable for your pro ...
- C# Cookie工具类
/// <summary> /// Cookies赋值 /// </summary> /// <param name="strName">主键& ...
- JAVA基础知识之NIO.2——Path,Paths,Files
NIO.2 JDK7对NIO进行了重大改进,主要包含以下两方面 新增Path接口,Paths工具类,Files工具类. 这些接口和工具类对NIO中的功能进行了高度封装,大大简化了文件系统的IO编程. ...
- (淘宝无限适配)手机端rem布局详解(转载非原创)
从网易与淘宝的font-size思考前端设计稿与工作流 本文结合自己对网易与淘宝移动端首页html元素上的font-size这个属性的思考与学习,讨论html5设计稿尺寸以及前端与设计之间协作流程的问 ...
- 深入浅出设计模式——迭代器模式(Iterator Pattern)
模式动机 一个聚合对象,如一个列表(List)或者一个集合(Set),应该提供一种方法来让别人可以访问它的元素,而又不需要暴露它的内部结构.针对不同的需要,可能还要以不同的方式遍历整个聚合对象,但是我 ...
- (转)Uiautomator——API详解
原文链接:http://www.cnblogs.com/by-dream/p/4921701.html#3328376 以一个简单的例子开始吧.我们完成一个 " 打开QQ,进入QQ空间,然后 ...
- 使用Nexus创建Maven私服
Nexus是一个强大的Maven仓库管理器,它极大地简化了自己内部仓库的维护和外部仓库的访问.利用Nexus你可以只在一个地方就能够完全控制访问和部署在你所维护仓库中的每个Artifact.Nexus ...
- spring+mongo
一.程序结构
- Circular progress bar in Unity 3D
Circular progress bar in Unity 3D - UnityScripthttp://stackoverflow.com/questions/22662706/circular- ...