* NEW: Initial support for V4L2 devices using the V4L2 interface
* NEW: Support for devices with YUYV and YUV420[p] outputs
* BUG: V4L2 streams not yet implemented
* BUG: V4L2 tuner not yet implemented
* OPT: V4L driver auto-selects V1/V2 depending on device capabilities
  Version 2 will be used if possible, but V1 can be forced in VideoDevice()



git-svn-id: svn://localhost/gambas/trunk@1805 867c0c6c-44f3-4631-809d-bfa615b0a4ec
This commit is contained in:
Oddjobz 2009-01-20 15:38:08 +00:00
parent 6700b4a603
commit 492bd2a0c8
5 changed files with 1540 additions and 100 deletions

232
gb.v4l/src/CConverters.c Normal file
View file

@ -0,0 +1,232 @@
/*
Collection of conversion routines from various sources ..
All provided under GPL or "as-is" licenses.
*/
#include <stdio.h>
#include <stdlib.h>
int convert_rgb_to_yuv_pixel(int r, int g, int b)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int y, u, v;
y = 0.299 * (r - 128) + 0.587 * (g - 128) + 0.114 * (b - 128) + 128;
u = - 0.147 * (r - 128) - 0.289 * (g - 128) + 0.436 * (b - 128) + 128;
v = 0.615 * (r - 128) - 0.515 * (g - 128) - 0.100 * (b - 128) + 128;
if(y > 255) y = 255;
if(u > 255) u = 255;
if(v > 255) v = 255;
if(y < 0) y = 0;
if(u < 0) u = 0;
if(v < 0) v = 0;
pixel[0] = y;
pixel[1] = u;
pixel[2] = v;
return pixel32;
}
int convert_rgb_to_yuv_buffer(unsigned char *rgb, unsigned char *yuv, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel32;
int y0, u0, v0, y1, u1, v1;
for(in = 0; in < width * height * 3; in += 6) {
pixel32 = convert_rgb_to_yuv_pixel(rgb[in], rgb[in + 1], rgb[in + 2]);
y0 = (pixel32 & 0x000000ff);
u0 = (pixel32 & 0x0000ff00) >> 8;
v0 = (pixel32 & 0x00ff0000) >> 16;
pixel32 = convert_rgb_to_yuv_pixel(rgb[in + 3], rgb[in + 4], rgb[in + 5]);
y1 = (pixel32 & 0x000000ff);
u1 = (pixel32 & 0x0000ff00) >> 8;
v1 = (pixel32 & 0x00ff0000) >> 16;
yuv[out++] = y0;
yuv[out++] = (u0 + u1) / 2;
yuv[out++] = y1;
yuv[out++] = (v0 + v1) / 2;
}
return 0;
}
int convert_yuv_to_rgb_pixel(int y, int u, int v)
{
unsigned int pixel32 = 0;
unsigned char *pixel = (unsigned char *)&pixel32;
int r, g, b;
r = y + (1.370705 * (v-128));
g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
b = y + (1.732446 * (u-128));
if(r > 255) r = 255;
if(g > 255) g = 255;
if(b > 255) b = 255;
if(r < 0) r = 0;
if(g < 0) g = 0;
if(b < 0) b = 0;
pixel[0] = r * 220 / 256;
pixel[1] = g * 220 / 256;
pixel[2] = b * 220 / 256;
return pixel32;
}
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
unsigned int in, out = 0;
unsigned int pixel_16;
unsigned char pixel_24[3];
unsigned int pixel32;
int y0, u, y1, v;
for(in = 0; in < width * height * 2; in += 4) {
pixel_16 =
yuv[in + 3] << 24 |
yuv[in + 2] << 16 |
yuv[in + 1] << 8 |
yuv[in + 0];
y0 = (pixel_16 & 0x000000ff);
u = (pixel_16 & 0x0000ff00) >> 8;
y1 = (pixel_16 & 0x00ff0000) >> 16;
v = (pixel_16 & 0xff000000) >> 24;
pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
pixel_24[0] = (pixel32 & 0x000000ff);
pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
rgb[out++] = pixel_24[0];
rgb[out++] = pixel_24[1];
rgb[out++] = pixel_24[2];
}
return 0;
}
static inline void move_420_block (int yTL, int yTR, int yBL, int yBR, int u,
int v, int rowPixels, unsigned char *rgb,
int bits);
#define LIMIT(x) ((x)>0xffffff?0xff: ((x)<=0xffff?0:((x)>>16)))
void
yuv420p_to_rgb (unsigned char *image, unsigned char *image2, int x, int y, int z) {
const int numpix = x * y;
const int bytes = z; /* (z*8) >> 3; */
int i, j, y00, y01, y10, y11, u, v;
unsigned char *pY = image;
unsigned char *pU = pY + numpix;
unsigned char *pV = pU + numpix / 4;
for (j = 0; j <= y - 2; j += 2) {
for (i = 0; i <= x - 2; i += 2) {
y00 = *pY;
y01 = *(pY + 1);
y10 = *(pY + x);
y11 = *(pY + x + 1);
u = (*pU++) - 128;
v = (*pV++) - 128;
move_420_block (y00, y01, y10, y11, u, v, x, image2, z * 8);
pY += 2;
image2 += 2 * bytes;
}
pY += x;
image2 += x * bytes;
}
}
void move_420_block (int yTL, int yTR, int yBL, int yBR, int u, int v,
int rowPixels, unsigned char *rgb, int bits)
{
const int rvScale = 91881;
const int guScale = -22553;
const int gvScale = -46801;
const int buScale = 116129;
const int yScale = 65536;
int r, g, b;
g = guScale * u + gvScale * v;
if (1) {
r = buScale * u;
b = rvScale * v;
} else {
r = rvScale * v;
b = buScale * u;
}
yTL *= yScale;
yTR *= yScale;
yBL *= yScale;
yBR *= yScale;
if (bits == 24) {
/* Write out top two pixels */
rgb[0] = LIMIT (b + yTL);
rgb[1] = LIMIT (g + yTL);
rgb[2] = LIMIT (r + yTL);
rgb[3] = LIMIT (b + yTR);
rgb[4] = LIMIT (g + yTR);
rgb[5] = LIMIT (r + yTR);
/* Skip down to next line to write out bottom two pixels */
rgb += 3 * rowPixels;
rgb[0] = LIMIT (b + yBL);
rgb[1] = LIMIT (g + yBL);
rgb[2] = LIMIT (r + yBL);
rgb[3] = LIMIT (b + yBR);
rgb[4] = LIMIT (g + yBR);
rgb[5] = LIMIT (r + yBR);
} else if (bits == 16) {
/* Write out top two pixels */
rgb[0] = ((LIMIT (b + yTL) >> 3) & 0x1F)
| ((LIMIT (g + yTL) << 3) & 0xE0);
rgb[1] = ((LIMIT (g + yTL) >> 5) & 0x07)
| (LIMIT (r + yTL) & 0xF8);
rgb[2] = ((LIMIT (b + yTR) >> 3) & 0x1F)
| ((LIMIT (g + yTR) << 3) & 0xE0);
rgb[3] = ((LIMIT (g + yTR) >> 5) & 0x07)
| (LIMIT (r + yTR) & 0xF8);
/* Skip down to next line to write out bottom two pixels */
rgb += 2 * rowPixels;
rgb[0] = ((LIMIT (b + yBL) >> 3) & 0x1F)
| ((LIMIT (g + yBL) << 3) & 0xE0);
rgb[1] = ((LIMIT (g + yBL) >> 5) & 0x07)
| (LIMIT (r + yBL) & 0xF8);
rgb[2] = ((LIMIT (b + yBR) >> 3) & 0x1F)
| ((LIMIT (g + yBR) << 3) & 0xE0);
rgb[3] = ((LIMIT (g + yBR) >> 5) & 0x07)
| (LIMIT (r + yBR) & 0xF8);
}
}

View file

@ -88,6 +88,8 @@ GB_STREAM_DESC VideoStream = {
handle: Video_stream_handle
};
extern int gv4l2_debug_mode; // ++
/***********************************************************************************
Camera setup
@ -143,8 +145,12 @@ int vd_get_capabilities(video_device_t *vd)
}
int vd_setup_capture_mode(video_device_t *vd)
// -- int vd_setup_capture_mode(video_device_t *vd)
int vd_setup_capture_mode(CWEBCAM * _object) // ++
{
video_device_t *vd = DEVICE;
if (!vd_get_capabilities(vd)) return 0;
// See if we can use mmap (to avoid copying data around)
@ -174,7 +180,9 @@ int vd_setup_capture_mode(video_device_t *vd)
}
if (vd->frame_buffer) GB.Free(POINTER(&vd->frame_buffer));
GB.Alloc (POINTER(&vd->frame_buffer),vd->buffer_size);
if (THIS->frame) GB.Free(POINTER(&THIS->frame)); // ++
GB.Alloc(POINTER(&vd->frame_buffer),vd->buffer_size);
GB.Alloc(POINTER(&THIS->frame),vd->height * vd->width * 4); // ++
return 1;
}
@ -190,6 +198,12 @@ int vd_setup_capture_mode(video_device_t *vd)
vd->vmmap.frame = 0; // Start at frame 0
vd->vmmap.width = vd->width;
vd->vmmap.height = vd->height;
if (THIS->frame) GB.Free(POINTER(&THIS->frame)); // ++
GB.Alloc(&THIS->frame, vd->height * vd->width * 4); // ++
ioctl(vd->dev, VIDIOCGPICT, &vd->videopict); //++ Recover camera palette
vd->vmmap.format = vd->videopict.palette; //++ Save for future ref
return 1;
}
@ -224,7 +238,7 @@ void put_image_jpeg (char *image, int width, int height, int quality, int frame,
struct jpeg_error_mgr jerr;
char *line;
line = malloc (width * 3);
GB.Alloc( POINTER(&line) ,width * 3);
if (!line)
return;
cjpeg.err = jpeg_std_error(&jerr);
@ -254,7 +268,7 @@ void put_image_jpeg (char *image, int width, int height, int quality, int frame,
}
jpeg_finish_compress (&cjpeg);
jpeg_destroy_compress (&cjpeg);
free (line);
GB.Free( POINTER(&line) );
}
@ -358,33 +372,39 @@ void put_image_ppm (char *image, int width, int height, int binary, int frame,FI
unsigned char * vd_get_image(video_device_t *vd)
//unsigned char * vd_get_image(video_device_t *vd)
unsigned char * vd_get_image(CWEBCAM * _object)
{
int len;
video_device_t *vd;
vd = DEVICE;
if (vd->use_mmap) {
if (!vd->capturing) {
int i;
// Queue requests to capture successive frames
for (i = 0; i < vd->vmbuf.frames; ++i) {
vd->vmmap.frame = i;
if (vd_ioctl(vd, VIDIOCMCAPTURE, &vd->vmmap)) return 0;
if(vd_ioctl(vd, VIDIOCMCAPTURE, &vd->vmmap))
return 0;
}
// And start reading from zero
vd->vmmap.frame = 0;
vd->capturing = 1;
}
// VIDIOCSYNC causes the driver to block until the specified
// frame is completely received
if (ioctl(vd->dev, VIDIOCSYNC, &vd->vmmap.frame)) return 0;
gv4l1_process_image (THIS,vd->frame_buffer + vd->vmbuf.offsets[vd->vmmap.frame]);
//vd_post_process(vd,vd->frame_buffer + vd->vmbuf.offsets[vd->vmmap.frame]);
return THIS->frame;
// Return the buffer, cause it should contain an image
return vd->frame_buffer + vd->vmbuf.offsets[vd->vmmap.frame];
//return vd->frame_buffer + vd->vmbuf.offsets[vd->vmmap.frame];
}
// Otherwise, we have to read the right number of bytes
@ -426,7 +446,8 @@ int fill_buffer(void *_object)
char *buf;
int w,h;
buf=(char*)vd_get_image(DEVICE);
// -- buf=(char*)vd_get_image(DEVICE);
buf=(char*)vd_get_image(THIS); // ++
if (!buf) return -1;
w=DEVICE->vmmap.width;
h=DEVICE->vmmap.height;
@ -545,108 +566,287 @@ int Video_stream_handle(GB_STREAM *stream)
************************************************************************************/
int CWEBCAM_check(void *_object)
{
if (!DEVICE) return TRUE;
//if((!DEVICE)&&(!THIS->is_v4l2)) return TRUE;
if(!THIS->device) return TRUE; // ++ V4L2
return FALSE;
}
BEGIN_PROPERTY(CWEBCAM_bright)
BEGIN_PROPERTY(CWEBCAM_contrast)
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.brightness);
if( !THIS->is_v4l2 ) {
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.contrast);
return;
}
DEVICE->videopict.contrast=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
return;
}
DEVICE->videopict.brightness=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
if (READ_PROPERTY)
GB.ReturnInteger(gv4l2_contrast(THIS,-1));
else gv4l2_contrast(THIS,VPROP(GB_INTEGER));
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_contrast)
BEGIN_PROPERTY(CWEBCAM_contrast_max)
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.contrast);
return;
}
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->contrast_max);
DEVICE->videopict.contrast=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_contrast_min)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->contrast_min);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_colour)
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.colour);
if( !THIS->is_v4l2 ) {
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.colour);
return;
}
DEVICE->videopict.colour=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
return;
}
if (READ_PROPERTY)
GB.ReturnInteger(gv4l2_color(THIS,-1));
else gv4l2_color(THIS,VPROP(GB_INTEGER));
DEVICE->videopict.colour=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_color_max)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->color_max);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_color_min)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->color_min);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_whiteness)
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.whiteness>>8);
if( !THIS->is_v4l2 ) {
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.whiteness>>8);
return;
}
DEVICE->videopict.whiteness=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
return;
}
if (READ_PROPERTY)
GB.ReturnInteger(gv4l2_whiteness(THIS,-1));
else gv4l2_whiteness(THIS,VPROP(GB_INTEGER));
DEVICE->videopict.whiteness=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_whiteness_max)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->whiteness_max);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_whiteness_min)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->whiteness_min);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_hue)
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.hue>>8);
if( !THIS->is_v4l2 ) {
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.hue>>8);
return;
}
DEVICE->videopict.hue=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
return;
}
DEVICE->videopict.hue=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
if (READ_PROPERTY)
GB.ReturnInteger(gv4l2_hue(THIS,-1));
else gv4l2_hue(THIS,VPROP(GB_INTEGER));
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_hue_max)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->hue_max);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_hue_min)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->hue_min);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_bright)
if( !THIS->is_v4l2 ) {
vd_ioctl (DEVICE, VIDIOCGPICT, &DEVICE->videopict);
if (READ_PROPERTY)
{
GB.ReturnInteger(DEVICE->videopict.brightness);
return;
}
DEVICE->videopict.brightness=VPROP(GB_INTEGER);
vd_ioctl (DEVICE, VIDIOCSPICT, &DEVICE->videopict);
return;
}
if (READ_PROPERTY)
GB.ReturnInteger(gv4l2_brightness(THIS,-1));
else gv4l2_brightness(THIS,VPROP(GB_INTEGER));
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_bright_max)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->bright_max);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_bright_min)
if( !THIS->is_v4l2 )
GB.ReturnInteger(65535);
else GB.ReturnInteger(THIS->bright_min);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_width)
GB.ReturnInteger(DEVICE->width);
if( THIS->is_v4l2 ) // ++
GB.ReturnInteger(THIS->fmt.fmt.pix.width);
else GB.ReturnInteger(DEVICE->width);
END_PROPERTY
BEGIN_PROPERTY(CWEBCAM_height)
GB.ReturnInteger(DEVICE->height);
if( THIS->is_v4l2 ) // ++
GB.ReturnInteger(THIS->fmt.fmt.pix.height);
else GB.ReturnInteger(DEVICE->height);
END_PROPERTY
BEGIN_METHOD (CWEBCAM_new,GB_STRING Device;)
//
//=============================================================================
//
// _new( device name)
//
// Default constructor
//
BEGIN_METHOD (CWEBCAM_new,GB_STRING Device; GB_INTEGER Compat;)
int mydev;
struct video_tuner vtuner;
VIDEO_STREAM *str;
mydev=open (GB.FileName(STRING(Device),LENGTH(Device)),O_RDWR);
if (mydev==-1)
{
// ++ V4L2
//
// Open the device
//
mydev = gv4l2_open_device(STRING(Device));
if( mydev == -1) {
GB.Error("Unable to open device");
return;
}
THIS->io = mydev;
if(MISSING(Compat))
THIS->is_v4l2 = gv4l2_available( THIS );
else
switch( VARG(Compat) ) {
case gv4l2_V4L:
THIS->is_v4l2 = 0;
break;
case gv4l2_V4L2:
THIS->is_v4l2 = 1;
break;
default:
close(mydev);
GB.Error("Invalid compatibility flag");
}
GB.Alloc(POINTER(&THIS->device),sizeof(char)*(LENGTH(Device)+1)); // ++
strcpy(THIS->device,STRING(Device)); // ++
if( THIS->is_v4l2 ) {
//
gv4l2_debug("Device is V4L2!");
//
// Initialise the device
//
if( !gv4l2_init_device(THIS,DEF_WIDTH,DEF_HEIGHT) ) {
close(mydev);
GB.Error("Unable to initialise the device");
return;
}
//
THIS->stream.desc=&VideoStream;
str=(VIDEO_STREAM*)POINTER(&THIS->stream);
str->handle=(void*)THIS;
//
gv4l2_start_capture(THIS);
return;
}
gv4l2_debug("Device is V4L!");
// mydev=open (GB.FileName(STRING(Device),LENGTH(Device)),O_RDWR);
// if (mydev==-1)
// {
// GB.Error("Unable to open device");
// return;
//}
// -- V4L2
DEVICE=vd_setup(DEF_WIDTH,DEF_HEIGHT,DEF_DEPTH,mydev);
if (!vd_setup_capture_mode(DEVICE))
//-- if (!vd_setup_capture_mode(DEVICE))
if (!vd_setup_capture_mode(THIS)) // ++
{
close(mydev);
GB.Free(POINTER(&DEVICE));
@ -655,8 +855,9 @@ BEGIN_METHOD (CWEBCAM_new,GB_STRING Device;)
}
vd_setup_video_source(DEVICE,IN_DEFAULT,NORM_DEFAULT);
GB.Alloc(POINTER(&THIS->device),sizeof(char)*(LENGTH(Device)+1));
strcpy(THIS->device,STRING(Device));
// -- GB.Alloc(POINTER(&THIS->device),sizeof(char)*(LENGTH(Device)+1));
// -- strcpy(THIS->device,STRING(Device));
if (vd_ioctl (DEVICE, VIDIOCGTUNER, &vtuner)) DEVICE->Freq2=1;
@ -665,10 +866,29 @@ BEGIN_METHOD (CWEBCAM_new,GB_STRING Device;)
str->handle=(void*)THIS;
END_METHOD
//
//=============================================================================
//
// _free()
//
// Default destructor
//
BEGIN_METHOD_VOID(CWEBCAM_free)
if (THIS->device) GB.Free(POINTER(&THIS->device));
// ++ V4L2
if (THIS->device) GB.Free(POINTER(&THIS->device)); // ++
if (THIS->frame) GB.Free(POINTER(&THIS->frame)); // ++
if( THIS->is_v4l2 ) {
gv4l2_stop_capture( THIS );
gv4l2_uninit_device( THIS );
gv4l2_close_device( THIS->io );
return;
}
// --if (THIS->device) GB.Free(POINTER(&THIS->device));
// -- V4L2
if (THIS->membuf) GB.Free(POINTER(&THIS->membuf));
if (DEVICE)
@ -678,6 +898,9 @@ BEGIN_METHOD_VOID(CWEBCAM_free)
}
END_METHOD
//
//=============================================================================
//
BEGIN_METHOD(CWEBCAM_size,GB_INTEGER Width; GB_INTEGER Height;)
@ -689,6 +912,13 @@ BEGIN_METHOD(CWEBCAM_size,GB_INTEGER Width; GB_INTEGER Height;)
int channel;
int colour,hue,whiteness,contrast,brightness;
// ++ V4L2
if( THIS->is_v4l2 ) {
gv4l2_resize( THIS , VARG(Width) , VARG(Height) );
return;
}
// -- V4L2
if (h<DEVICE->vcap.minheight) h=DEVICE->vcap.minheight;
if (h>DEVICE->vcap.maxheight) h=DEVICE->vcap.maxheight;
if (w<DEVICE->vcap.minwidth) w=DEVICE->vcap.minwidth;
@ -718,7 +948,8 @@ BEGIN_METHOD(CWEBCAM_size,GB_INTEGER Width; GB_INTEGER Height;)
}
DEVICE=vd_setup(w,h,DEF_DEPTH,mydev);
if (!vd_setup_capture_mode(DEVICE))
//-- if (!vd_setup_capture_mode(DEVICE))
if (!vd_setup_capture_mode(THIS)) // ++
{
close(mydev);
GB.Free(POINTER(&DEVICE));
@ -743,6 +974,11 @@ BEGIN_PROPERTY(CWEBCAM_source)
long Source=0,Norm=0;
if( THIS->is_v4l2 ) {
gv4l2_debug("'Source' not currently implemented for V4L2");
return;
}
if (READ_PROPERTY)
{
if (!vd_ioctl(DEVICE, VIDIOCGCHAN, &DEVICE->vchan))
@ -788,14 +1024,86 @@ BEGIN_PROPERTY(CWEBCAM_source)
vd_setup_video_source(DEVICE,Source,Norm);
END_METHOD
//
//=============================================================================
//
// CWEBCAM_debug()
//
BEGIN_PROPERTY(CWEBCAM_debug)
if (READ_PROPERTY)
{
GB.ReturnInteger( gv4l2_debug_mode );
return;
}
gv4l2_debug_mode = VPROP(GB_INTEGER);
END_PROPERTY
//
//=============================================================================
//
// cwebcam_image
//
// Raw "get_image" routine that can be used elsewhere regardless of the
// version of V4L2 in play. Necessary refactoring I'm afraid ...
//
int cwebcam_image(CWEBCAM * _object)
{
if( THIS->is_v4l2 ) {
if( !gv4l2_read_frame( THIS )) return 0;
THIS->w=THIS->fmt.fmt.pix.width;
THIS->h=THIS->fmt.fmt.pix.height;
}
else
{
if( !vd_get_image( THIS )) return 0;
THIS->w = DEVICE->vmmap.width;
THIS->h = DEVICE->vmmap.height;
vd_image_done(DEVICE);
}
return 1;
}
//
// CWEBCAM_image()
//
// Hopefully you will agree, that not only is the raw _image routine
// required, but the resulting code is much nicer .. :)
//
BEGIN_PROPERTY(CWEBCAM_image)
GB_IMAGE ret=NULL;
if( !cwebcam_image(THIS) ) {
GB.Error("Unable to get image");
GB.ReturnNull();
return;
}
GB.ReturnObject(IMAGE.Create(THIS->w,THIS->h,GB_IMAGE_BGR,THIS->frame));
/*
// Ok, this lot has been refactored, sorry
// Once I got to "save" it became more efficient ..
// -- GB_IMAGE ret=NULL;
unsigned char *buf;
int w, h;
buf = (unsigned char*)vd_get_image(DEVICE);
// ++ V4L2
if( THIS->is_v4l2 ) {
if( !gv4l2_read_frame( THIS ))
{
GB.Error("Unable to get image");
GB.ReturnNull();
return;
}
w=THIS->fmt.fmt.pix.width;
h=THIS->fmt.fmt.pix.height;
GB.ReturnObject(IMAGE.Create(w, h, GB_IMAGE_BGR, THIS->frame));
return;
}
// -- V4L2
// -- buf = (unsigned char*)vd_get_image(DEVICE);
buf = (unsigned char*)vd_get_image(THIS); // ++
if (!buf)
{
GB.Error("Unable to get image");
@ -808,20 +1116,20 @@ BEGIN_PROPERTY(CWEBCAM_image)
vd_image_done(DEVICE);
GB.ReturnObject(IMAGE.Create(w, h, GB_IMAGE_BGR, buf));
*/
END_PROPERTY
BEGIN_METHOD(CWEBCAM_save,GB_STRING File; GB_INTEGER Quality;)
char *File;
char *ext=NULL;
long b;
FILE *fd;
char *buf;
// -- char *buf;
int format=2;
int quality=80;
int w,h;
// -- int w,h;
File=GB.FileName(STRING(File),LENGTH(File));
@ -838,19 +1146,6 @@ BEGIN_METHOD(CWEBCAM_save,GB_STRING File; GB_INTEGER Quality;)
if (quality>100) quality=100;
}
/*if (!MISSING(Format))
{
switch(VARG(Format))
{
case 1:
case 2:
case 3: format=VARG(Format); break;
default : GB.Error("Unknown format"); return;
}
}
else
{*/
format = 0;
for (b=strlen(File)-1;b>=0;b--)
@ -866,18 +1161,21 @@ BEGIN_METHOD(CWEBCAM_save,GB_STRING File; GB_INTEGER Quality;)
if (!format)
{
GB.Error("Unknown format");
GB.Error("Unknown format (jpeg|jpg|png|ppm");
return;
}
fd=fopen(File, "w");
if (!fd)
{
GB.Error("Unable to open file for writting");
return;
}
buf=(char*)vd_get_image(DEVICE);
/* V4L2 Refactoring
// -- buf=(char*)vd_get_image(DEVICE);
buf=(char*)vd_get_image(THIS); // ++
if (!buf)
{
fclose(fd);
@ -894,9 +1192,34 @@ BEGIN_METHOD(CWEBCAM_save,GB_STRING File; GB_INTEGER Quality;)
case 2: put_image_png (buf,w,h,0,fd); break;
case 3: put_image_jpeg (buf,w,h,quality,0,fd); break;
}
*/
//
// V4L2 ++
//
if( !cwebcam_image(THIS) ) {
fclose(fd);
GB.Error("Unable to get image");
return;
}
switch(format)
{
case 1:
put_image_ppm (THIS->frame,THIS->w,THIS->h,quality,0,fd);
break;
case 2:
put_image_png (THIS->frame,THIS->w,THIS->h,0,fd);
break;
case 3:
put_image_jpeg(THIS->frame,THIS->w,THIS->h,quality,0,fd);
break;
}
//
// V4L2 --
//
fclose(fd);
vd_image_done(DEVICE);
// -- (Ooops!) vd_image_done(DEVICE);
END_METHOD
@ -922,20 +1245,25 @@ void return_array(char *array,long mmax)
BEGIN_PROPERTY(CFEATURES_name)
return_array(DEVICE->vcap.name,32);
if( THIS->is_v4l2 )
GB.ReturnNewString(THIS->device,strlen(THIS->device));
else return_array(DEVICE->vcap.name,32);
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_driver)
struct v4l2_capability vcap;
int dev;
if ( vd_ioctl(DEVICE,VIDIOC_QUERYCAP,&vcap)!=0 )
if( THIS->is_v4l2 )
dev = THIS->io;
else dev = DEVICE->dev;
if ( ioctl(dev,VIDIOC_QUERYCAP,&vcap)!=0 )
{
GB.ReturnNull();
return;
}
return_array((char*)vcap.driver,16);
@ -945,7 +1273,13 @@ BEGIN_PROPERTY(CFEATURES_bus)
struct v4l2_capability vcap;
if ( vd_ioctl(DEVICE,VIDIOC_QUERYCAP,&vcap)!=0 )
int dev;
if( THIS->is_v4l2 )
dev = THIS->io;
else dev = DEVICE->dev;
if ( ioctl(dev,VIDIOC_QUERYCAP,&vcap)!=0 )
{
GB.ReturnNull();
return;
@ -956,12 +1290,41 @@ BEGIN_PROPERTY(CFEATURES_bus)
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_card)
struct v4l2_capability vcap;
int dev;
if( THIS->is_v4l2 ) {
return_array((char*)THIS->cap.card,32);
return;
}
dev = DEVICE->dev;
if ( ioctl(dev,VIDIOC_QUERYCAP,&vcap)!=0 )
{
GB.ReturnNull();
return;
}
return_array((char*)vcap.driver,16);
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_version)
char arr[12];
struct v4l2_capability vcap;
if ( vd_ioctl(DEVICE,VIDIOC_QUERYCAP,&vcap)!=0 )
int dev;
if( THIS->is_v4l2 )
dev = THIS->io;
else dev = DEVICE->dev;
if ( ioctl(dev,VIDIOC_QUERYCAP,&vcap)!=0 )
{
GB.ReturnNull();
return;
@ -973,26 +1336,48 @@ BEGIN_PROPERTY(CFEATURES_version)
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_maxWidth)
if( THIS->is_v4l2 ) { // ++ V4L2
gv4l2_debug("maxWidth not implemented in V4l2");
GB.ReturnInteger(1024);
return; // ++ V4L2
}
GB.ReturnInteger(DEVICE->vcap.maxwidth);
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_minWidth)
if( THIS->is_v4l2 ) { // ++ V4L2
gv4l2_debug("minWidth not implemented in V4l2");
GB.ReturnInteger(0);
return; // ++ V4L2
}
GB.ReturnInteger(DEVICE->vcap.minwidth);
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_maxHeight)
if( THIS->is_v4l2 ) { // ++ V4L2
gv4l2_debug("maxHeight not implemented in V4l2");
GB.ReturnInteger(768);
return; // ++ V4L2
}
GB.ReturnInteger(DEVICE->vcap.maxheight);
END_PROPERTY
BEGIN_PROPERTY(CFEATURES_minHeight)
if( THIS->is_v4l2 ) { // ++ V4L2
gv4l2_debug("minHeight not implemented in V4l2");
GB.ReturnInteger(0);
return; // ++ V4L2
}
GB.ReturnInteger(DEVICE->vcap.minheight);
END_PROPERTY
@ -1007,7 +1392,12 @@ BEGIN_PROPERTY(CTUNER_name)
struct video_tuner vtuner;
long bucle,mmax=32;
char * tuner = "'tuner' not currently implemented on V4L2";
if( THIS->is_v4l2 ) {
GB.ReturnNewString(tuner,strlen(tuner));
return;
}
if (vd_ioctl (DEVICE, VIDIOCGTUNER, &vtuner)!=0)
{
@ -1029,6 +1419,11 @@ BEGIN_PROPERTY(CTUNER_signal)
struct video_tuner vtuner;
if( THIS->is_v4l2 ) {
GB.ReturnInteger(0);
return;
}
if (vd_ioctl (DEVICE, VIDIOCGTUNER, &vtuner)!=0)
{
GB.ReturnInteger(0);
@ -1043,6 +1438,11 @@ BEGIN_PROPERTY(CTUNER_low)
struct video_tuner vtuner;
struct v4l2_frequency vfreq;
if( THIS->is_v4l2 ) {
GB.ReturnBoolean(0);
return;
}
if (DEVICE->Freq2)
{
if (READ_PROPERTY)
@ -1073,6 +1473,10 @@ BEGIN_PROPERTY(CTUNER_frequency)
struct video_tuner vtuner;
struct v4l2_frequency vfreq;
if( THIS->is_v4l2 ) {
GB.ReturnInteger(0);
return;
}
if (DEVICE->Freq2)
{
@ -1118,13 +1522,13 @@ GB_DESC CFeaturesDesc[] =
GB_PROPERTY_READ("Name","s",CFEATURES_name),
GB_PROPERTY_READ("Driver","s",CFEATURES_driver),
GB_PROPERTY_READ("Bus","s",CFEATURES_bus),
GB_PROPERTY_READ("Card","s",CFEATURES_card), // ++ V4L2
GB_PROPERTY_READ("Version","s",CFEATURES_version),
GB_PROPERTY_READ("MaxWidth","i",CFEATURES_maxWidth),
GB_PROPERTY_READ("MinWidth","i",CFEATURES_minWidth),
GB_PROPERTY_READ("MaxHeight","i",CFEATURES_maxHeight),
GB_PROPERTY_READ("MinHeight","i",CFEATURES_minHeight),
GB_END_DECLARE
};
@ -1150,10 +1554,6 @@ GB_DESC CWebcamDesc[] =
GB_HOOK_CHECK(CWEBCAM_check),
//GB_CONSTANT("Jpeg","i",FMT_JPEG),
//GB_CONSTANT("Ppm","i",FMT_PPM),
//GB_CONSTANT("Png","i",FMT_PNG),
GB_CONSTANT("Hz","i",1),
GB_CONSTANT("Khz","i",0),
@ -1166,24 +1566,38 @@ GB_DESC CWebcamDesc[] =
GB_CONSTANT("Composite1","i",1), //IN_COMPOSITE1),
GB_CONSTANT("Composite2","i",2), //IN_COMPOSITE2),
GB_CONSTANT("SVideo","i",3), //IN_SVIDEO),
GB_CONSTANT("V4L","i",1), // ++ force V4L1
GB_CONSTANT("V4L2","i",2), // ++ force V4L2
GB_METHOD("_new",NULL,CWEBCAM_new,"(Device)s"),
GB_METHOD("_new",NULL,CWEBCAM_new,"(Device)s[(V4L|V4L2)i]"),
GB_METHOD("_free",NULL,CWEBCAM_free,NULL),
GB_PROPERTY_SELF("Tuner",".VideoDeviceTuner"),
GB_PROPERTY_SELF("Features",".VideoDeviceFeatures"),
GB_PROPERTY_SELF("Tuner",".VideoDeviceTuner"),
GB_PROPERTY("Source","i",CWEBCAM_source),
GB_PROPERTY_READ("Width","i",CWEBCAM_width),
GB_PROPERTY_READ("Height","i",CWEBCAM_height),
GB_PROPERTY("Source","i",CWEBCAM_source),
GB_PROPERTY("Bright","i",CWEBCAM_bright),
GB_PROPERTY("Contrast","i",CWEBCAM_contrast),
GB_PROPERTY("Contrast_Max","i",CWEBCAM_contrast_max),
GB_PROPERTY("Contrast_Min","i",CWEBCAM_contrast_min),
GB_PROPERTY("Color","i",CWEBCAM_colour),
GB_PROPERTY("Color_Max","i",CWEBCAM_color_max),
GB_PROPERTY("Color_Min","i",CWEBCAM_color_min),
GB_PROPERTY("Whiteness","i",CWEBCAM_whiteness),
GB_PROPERTY("Whiteness_Max","i",CWEBCAM_whiteness_max),
GB_PROPERTY("Whiteness_Min","i",CWEBCAM_whiteness_min),
GB_PROPERTY("Bright","i",CWEBCAM_bright),
GB_PROPERTY("Bright_Max","i",CWEBCAM_bright_max),
GB_PROPERTY("Bright_Min","i",CWEBCAM_bright_min),
GB_PROPERTY("Hue","i",CWEBCAM_hue),
GB_PROPERTY("Hue_Max","i",CWEBCAM_hue_max),
GB_PROPERTY("Hue_Min","i",CWEBCAM_hue_min),
GB_PROPERTY("Image","Image",CWEBCAM_image),
GB_PROPERTY("Debug","i",CWEBCAM_debug),
GB_METHOD("Resize",NULL,CWEBCAM_size,"(Width)i(Height)i"),
GB_METHOD("Save",NULL,CWEBCAM_save,"(File)s[(Quality)i]"),

View file

@ -51,7 +51,11 @@ extern GB_STREAM_DESC VideoStream;
#define THIS ((CWEBCAM *)_object)
#define DEVICE (THIS->dev)
// ++ V4L2
#define MCLEAR(x) memset (&(x), 0, sizeof (x))
#define gv4l2_V4L 1
#define gv4l2_V4L2 2
// --
#endif
typedef struct video_device {
@ -83,6 +87,15 @@ typedef struct
void *handle;
} VIDEO_STREAM;
// ++ V4L2
typedef struct gv4l2_buffer
{
void* start;
size_t length;
} gv4l2_buffer_t;
//--
typedef struct
{
GB_BASE ob;
@ -94,6 +107,39 @@ typedef struct
long gotframe;
long posframe;
// ++ YUYV->RGB conversion
void* frame; // "current" frame buffer
//--
// ++ V4L2
//
// There is some duplication here but we really don't want to use
// the v4l video_device_t structure ...
//
struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
struct v4l2_format fmt;
struct gv4l2_buffer* buffers;
//
int is_v4l2; // which version is this dev
int io; // raw device handle for V2
int use_mmap; // is MMAP available
int buffer_count; // number of buffers
int w,h; // "current" dimensions
//
int bright_max;
int hue_max;
int contrast_max;
int whiteness_max;
int color_max;
//
int bright_min;
int hue_min;
int contrast_min;
int whiteness_min;
int color_min;
// --
} CWEBCAM;
@ -108,4 +154,31 @@ int Video_stream_flush(GB_STREAM *stream);
int Video_stream_close(GB_STREAM *stream);
int Video_stream_handle(GB_STREAM *stream);
// ++ YUYV->RGB conversion
int convert_yuv_to_rgb_buffer(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height);
void yuv420p_to_rgb (unsigned char *image, unsigned char *image2, int x, int y, int z);
// --
// ++ V4L2
int gv4l2_available(CWEBCAM * _object);
void gv4l2_debug( char *s );
int gv4l2_xioctl( int fd,int request,void * arg);
int gv4l2_open_device( char* name );
void gv4l2_close_device( int id );
int gv4l2_init_device(CWEBCAM * _object , int width , int height );
int gv4l2_start_capture(CWEBCAM * _object);
int gv4l2_stop_capture(CWEBCAM * _object);
void gv4l2_uninit_device(CWEBCAM * _object);
void gv4l1_process_image (CWEBCAM * _object, void *start);
void gv4l2_process_image (CWEBCAM * _object, void *start);
int gv4l2_read_frame( CWEBCAM * _object );
int gv4l2_resize( CWEBCAM * _object , int width , int height );
int gv4l2_hue( CWEBCAM * _object , int hue );
int gv4l2_brightness( CWEBCAM * _object , int hue );
int gv4l2_contrast( CWEBCAM * _object , int value );
int gv4l2_color( CWEBCAM * _object , int value );
int gv4l2_whiteness( CWEBCAM * _object , int value );
// -- V4L2
#endif

View file

@ -8,7 +8,7 @@ gblib_LTLIBRARIES = gb.v4l.la
gb_v4l_la_LIBADD = @V4L_LIB@
gb_v4l_la_LDFLAGS = -module @LD_FLAGS@
gb_v4l_la_SOURCES = main.h main.c CWebcam.h CWebcam.c
gb_v4l_la_SOURCES = main.h main.c CWebcam.h CWebcam.c gv4l2.c CConverters.c

721
gb.v4l/src/gv4l2.c Normal file
View file

@ -0,0 +1,721 @@
/***************************************************************************
CWebcam.c
V4L2 interface for Gambas
(C) 2009 Gareth Bult <gareth@encryptec.net>
Based on the pre-existing Gambas v4l module and V4L2 examples
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 1, or (at your option)
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
***************************************************************************/
#define __CWEBCAM_C
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <asm/types.h>
#ifdef HAVE_STDLIB_H
#undef HAVE_STDLIB_H
#endif
#include "main.h"
#include "CWebcam.h"
//
int gv4l2_debug_mode = 1;
//
//=============================================================================
//
// gv4l2_available()
//
// Test for V4L2 availability
//
int gv4l2_available(CWEBCAM * _object)
{
char dummy[256];
return(!(ioctl( THIS->io , VIDIOC_QUERYCAP , dummy ) == -1 ));
}
//=============================================================================
//
// v4l2_debug( string )
//
// Debugging routine for V4L2
//
void gv4l2_debug( char *s )
{
if( ! gv4l2_debug_mode ) return;
printf("gambas v4l2: %s [%d]\n",s,errno);
fflush(stdout);
}
//=============================================================================
//
// xioctl( fd,request,arg )
//
// Local swapper for ioctl to repeat on EINTR's
//
int gv4l2_xioctl( int fd,int request,void * arg)
{
int r;
do r = ioctl (fd, request, arg);
while (-1 == r && EINTR == errno);
return r;
}
//
//=============================================================================
//
// gv4l2_hue( THIS, value )
//
void gv4l2_camera_setup( CWEBCAM * _object , int id , int * min, int * max)
{
struct v4l2_queryctrl query;
memset (&query, 0, sizeof (query));
query.id = id;
if(gv4l2_xioctl(THIS->io,VIDIOC_QUERYCTRL,&query)==-1)return;
printf("Name=%s,Min=%d,Max=%d,Value=%d\n",
query.name,
query.minimum,
query.maximum,
query.default_value);
fflush(stdout);
*max = query.maximum;
*min = query.minimum;
}
//
int gv4l2_camera_get( CWEBCAM * _object , int id , int value )
{
struct v4l2_control control;
int command;
int result;
memset (&control, 0, sizeof (control));
control.id = id;
control.value = value;
if( value != -1 )
command = VIDIOC_S_CTRL;
else command = VIDIOC_G_CTRL;
result = gv4l2_xioctl (THIS->io, command , &control);
if( result == -1 ) return result;
return control.value;
}
//
void gv4l2_hue_setup( CWEBCAM * _object )
{
gv4l2_camera_setup( THIS ,
V4L2_CID_HUE , &THIS->hue_min , &THIS->hue_max );
}
//
int gv4l2_hue( CWEBCAM * _object , int value )
{
return gv4l2_camera_get( THIS, V4L2_CID_HUE , value );
}
//
//=============================================================================
//
// gv4l2_brightness( THIS, value )
//
void gv4l2_brightness_setup( CWEBCAM * _object )
{
gv4l2_camera_setup( THIS,
V4L2_CID_BRIGHTNESS , &THIS->bright_min , &THIS->bright_max );
}
int gv4l2_brightness( CWEBCAM * _object , int value )
{
return gv4l2_camera_get( THIS, V4L2_CID_BRIGHTNESS , value );
}
//
//=============================================================================
//
// gv4l2_contrast( THIS, value )
//
void gv4l2_contrast_setup( CWEBCAM * _object )
{
gv4l2_camera_setup( THIS ,
V4L2_CID_CONTRAST, &THIS->contrast_min , &THIS->contrast_max);
}
int gv4l2_contrast( CWEBCAM * _object , int value )
{
return gv4l2_camera_get( THIS, V4L2_CID_CONTRAST , value );
}
//
//=============================================================================
//
// gv4l2_color( THIS, value )
//
void gv4l2_color_setup( CWEBCAM * _object )
{
gv4l2_camera_setup( THIS ,
V4L2_CID_SATURATION, &THIS->color_min, &THIS->color_max );
}
int gv4l2_color( CWEBCAM * _object , int value )
{
return gv4l2_camera_get( THIS, V4L2_CID_SATURATION , value );
}
//
//=============================================================================
//
// gv4l2_color( THIS, value )
//
void gv4l2_whiteness_setup( CWEBCAM * _object )
{
gv4l2_camera_setup( THIS ,
V4L2_CID_WHITENESS , &THIS->whiteness_min,&THIS->whiteness_max);
}
//
int gv4l2_whiteness( CWEBCAM * _object , int value )
{
return gv4l2_camera_get( THIS, V4L2_CID_WHITENESS , value );
}
//
//=============================================================================
//
// v4l2_open_device( device_name )
//
// Open the raw device (typically /dev/video?) , note that we're not
// using non-blocking mode as (a) it's not needed given we're recovering
// specific frames and (b) camera's are often "not ready" so it would
// require retries under Gambas.
//
// FIXME:: what happens when you unplug a camera when active ??
//
int gv4l2_open_device( char* name )
{
struct stat file_info;
int status;
//
// See if the file is there ...
//
status = stat(name,&file_info);
if( status == -1 ) {
gv4l2_debug("failed to stat device");
return status;
}
//
// Make sure it's a character device (/dev/video?)
//
if( !S_ISCHR(file_info.st_mode) ) {
gv4l2_debug("not a character device");
return status;
}
//
// Finally, try to open the file ..
//
return open( name,O_RDWR /* |O_NONBLOCK */ ,0 );
}
//============================================================================
//
// v4l2_close_device( id )
//
// Close the device, got to be done and can't get much easier ! ;-)
//
void gv4l2_close_device( int id )
{
if( close( id ) == -1 ) {
gv4l2_debug("error closing device");
}
}
//============================================================================
//
// v4l2_init_device( THIS , Width , Height )
//
// Initialise the device and associated data structures, this is the most
// complex operation in the code and has to cope with it's own MMAP
// handling whereas V4L did a lot of this for us.
//
// FIXME:: test the READ interface, I only use MMAP cameras ...
//
int gv4l2_init_device(CWEBCAM * _object , int width , int height )
{
unsigned int min;
static unsigned int n_buffers = 0;
if ( gv4l2_xioctl (THIS->io, VIDIOC_QUERYCAP, &THIS->cap) == -1 ) {
gv4l2_debug("VIDIOC_QUERYCAP error");
return 0;
}
if (!(THIS->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
gv4l2_debug("not video capture device");
return 0;
}
//
// We need to choose which IO method to use, well try MMAP and
// if that fails, fall back to READ
//
if (!(THIS->cap.capabilities & V4L2_CAP_STREAMING)) {
//
// No MMAP support!
//
THIS->use_mmap = 0;
if (!(THIS->cap.capabilities & V4L2_CAP_READWRITE)) {
gv4l2_debug("device does not support mmap or read");
return 0;
}
} else THIS->use_mmap = 1;
MCLEAR(THIS->cropcap);
THIS->cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (!gv4l2_xioctl (THIS->io, VIDIOC_CROPCAP, &THIS->cropcap)) {
THIS->crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
THIS->crop.c = THIS->cropcap.defrect;
if ( gv4l2_xioctl (THIS->io, VIDIOC_S_CROP, &THIS->crop) == -1 )
{
if( errno == EINVAL ) {
gv4l2_debug("cropping not supported");
}
}
}
MCLEAR(THIS->fmt);
THIS->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if( gv4l2_xioctl( THIS->io, VIDIOC_G_FMT, &THIS->fmt ) == -1 ) {
gv4l2_debug("Unable to get Video formats");
return 0;
}
THIS->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
THIS->fmt.fmt.pix.width = width;
THIS->fmt.fmt.pix.height = height;
THIS->fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
//
// Camera format should be picked up from VIDIOC_G_FMT above
// FIXME:: do cameras support multiple formats and so we want
// to be able to pick the format??
//
//THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
if ( gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt) == -1) {
gv4l2_debug("VIDIOC_S_FMT, unable to set format");
return 0;
}
// THIS->fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
// gv4l2_xioctl ( THIS->io, VIDIOC_S_FMT, &THIS->fmt);
/* Note VIDIOC_S_FMT may change width and height. */
/* Buggy driver paranoia. */
min = THIS->fmt.fmt.pix.width * 2;
if (THIS->fmt.fmt.pix.bytesperline < min)
THIS->fmt.fmt.pix.bytesperline = min;
min = THIS->fmt.fmt.pix.bytesperline * THIS->fmt.fmt.pix.height;
if (THIS->fmt.fmt.pix.sizeimage < min)
THIS->fmt.fmt.pix.sizeimage = min;
GB.Alloc(&THIS->frame,THIS->fmt.fmt.pix.width*THIS->fmt.fmt.pix.height*4);
gv4l2_brightness_setup( THIS );
gv4l2_contrast_setup( THIS );
gv4l2_color_setup( THIS );
gv4l2_whiteness_setup( THIS );
gv4l2_hue_setup( THIS );
if( !THIS->use_mmap ) {
GB.Alloc( POINTER(&THIS->buffers) ,sizeof(*THIS->buffers));
if( !THIS->buffers ) {
gv4l2_debug("Failed to allocate buffer space");
return 0;
}
THIS->buffers[0].length = THIS->fmt.fmt.pix.sizeimage;
GB.Alloc( POINTER(&THIS->buffers[0].start),THIS->fmt.fmt.pix.sizeimage);
if( !THIS->buffers[0].start ) {
gv4l2_debug("Failed to allocate buffer space");
return 0;
}
return 1;
}
// We don't support USERPTR in Gambas (!)
// So now we initialise MMAP
//
struct v4l2_requestbuffers req;
MCLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if ( gv4l2_xioctl (THIS->io, VIDIOC_REQBUFS, &req) == -1 ) {
gv4l2_debug("mmap not supported or error");
return 0;
}
if (req.count < 2) {
gv4l2_debug("insifficient memory for mmap");
return 0;
}
GB.Alloc ( POINTER(&THIS->buffers),req.count * sizeof (*THIS->buffers));
if (!THIS->buffers) {
gv4l2_debug("insifficient memory for mmap");
return 0;
}
THIS->buffer_count = req.count;
for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
struct v4l2_buffer buf;
MCLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
if( gv4l2_xioctl (THIS->io, VIDIOC_QUERYBUF, &buf) == -1 ) {
gv4l2_debug("VIDIOC_QUERYBUF");
return 0;
}
THIS->buffers[n_buffers].length = buf.length;
THIS->buffers[n_buffers].start =
mmap (NULL /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
THIS->io, buf.m.offset);
if (MAP_FAILED == THIS->buffers[n_buffers].start) {
gv4l2_debug("mmap failed");
return 0;
}
}
return 1;
}
//=============================================================================
//
// v4l2_start_capture()
//
// Start capture mode, this should turn on the little green light on your
// camera.
//
// FIXME:: make sure we check the return status on this call
//
int gv4l2_start_capture(CWEBCAM * _object)
{
int i;
enum v4l2_buf_type type;
//
gv4l2_debug("Capture ON");
//
// Nothing to do unless we're using MMAP
//
if( !THIS->use_mmap) return 1;
//
for( i=0; i<THIS->buffer_count; i++ ) {
struct v4l2_buffer buf;
MCLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if( gv4l2_xioctl( THIS->io, VIDIOC_QBUF, &buf) == -1 ) {
gv4l2_debug("VIDIOC_QBUF error starting capture");
return 0;
}
}
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if( gv4l2_xioctl( THIS->io, VIDIOC_STREAMON, &type) == -1 ) {
gv4l2_debug("VIDIOC_STREAMON error starting capture");
return 0;
}
return 1;
}
//=============================================================================
//
// v4l2_stop_capture()
//
// Stop Capturing on device (turn little green light off!)
//
// FIXME:: check return status on this call!
//
int gv4l2_stop_capture(CWEBCAM * _object)
{
enum v4l2_buf_type type;
if( !THIS->use_mmap) return 1;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if( gv4l2_xioctl( THIS->io, VIDIOC_STREAMOFF, &type) == -1)
{
gv4l2_debug("VIDIOC_STREAMOFF error");
return 0;
}
return 1;
}
//=============================================================================
//
// gv4l2_uninit_device(THIS)
//
// Uninitialise the device and free all the associated memory
//
void gv4l2_uninit_device(CWEBCAM * _object)
{
unsigned int i;
GB.Free( POINTER(&THIS->frame) );
if( !THIS->use_mmap) {
GB.Free ( POINTER(&THIS->buffers[0].start));
GB.Free ( POINTER(&THIS->buffers));
return;
}
for (i = 0; i < THIS->buffer_count; ++i )
if(munmap(THIS->buffers[i].start,THIS->buffers[i].length)==-1)
gv4l2_debug("MUNMAP Error");
GB.Free ( POINTER(&THIS->buffers));
}
//=============================================================================
//
// g4vl_process_image(THIS,start)
//
// Process the image found in start and dump the resulting RGB frame into
// our local frame buffer (THIS->frame). Width, Height and Image size can
// all be found in THIS->fmt.fmt
//
// FIXME:: there are lots of formats, I can *only* test YUYV.
// I'm *assuming* RGB32 is "raw" mode (no conversion)
// Do "other" RGB formats work without conversion?
// What other conversion routines do we need?
// Will BM be moving any/all of these to Image/Picture objects?
//
void gv4l1_process_image (CWEBCAM * _object, void *start)
{
int format,w,h;
long size;
format = THIS->dev->videopict.palette;
w = THIS->dev->width;
h = THIS->dev->height;
size = THIS->dev->buffer_size;
switch(format)
{
case VIDEO_PALETTE_YUV411P: gv4l2_debug("YUV411P"); break;
case VIDEO_PALETTE_YUV420P:
//gv4l2_debug("YUV420P");
case VIDEO_PALETTE_YUV420:
//gv4l2_debug("YUV420");
yuv420p_to_rgb (start,THIS->frame,w,h,3);
return;
case VIDEO_PALETTE_YUYV:
//gv4l2_debug("YUYV");
convert_yuv_to_rgb_buffer(start,THIS->frame,w,h);
return;
case VIDEO_PALETTE_GREY: gv4l2_debug("GREY"); break;
case VIDEO_PALETTE_HI240: gv4l2_debug("HI240"); break;
case VIDEO_PALETTE_RGB565: gv4l2_debug("RGB5656"); break;
case VIDEO_PALETTE_RGB24: gv4l2_debug("RGB24"); break;
case VIDEO_PALETTE_RGB32: /* DEFAULT */ break;
case VIDEO_PALETTE_RGB555: gv4l2_debug("RGB555"); break;
case VIDEO_PALETTE_UYVY: gv4l2_debug("UYVY"); break;
case VIDEO_PALETTE_YUV411: gv4l2_debug("YUV411"); break;
case VIDEO_PALETTE_RAW: gv4l2_debug("RAW"); break;
case VIDEO_PALETTE_YUV422P: gv4l2_debug("YUV422P"); break;
case VIDEO_PALETTE_YUV410P: gv4l2_debug("YUV410P"); break;
case VIDEO_PALETTE_COMPONENT: gv4l2_debug("COMPONENT");break;
default:
gv4l2_debug("Frame in unknown format");
break;
}
memcpy(THIS->frame,start,size);
}
//
// v4l2 version (!)
//
void gv4l2_process_image (CWEBCAM * _object, void *start)
{
int format,w,h;
long size;
format = THIS->fmt.fmt.pix.pixelformat;
w = THIS->fmt.fmt.pix.width;
h = THIS->fmt.fmt.pix.height;
size = THIS->fmt.fmt.pix.sizeimage;
switch(format)
{
case V4L2_PIX_FMT_RGB332: gv4l2_debug("RGB332"); break;
case V4L2_PIX_FMT_RGB444: gv4l2_debug("RGB444"); break;
case V4L2_PIX_FMT_RGB555: gv4l2_debug("RGB555"); break;
case V4L2_PIX_FMT_RGB565: gv4l2_debug("YRGB565"); break;
case V4L2_PIX_FMT_RGB555X: gv4l2_debug("YRGB555X");break;
case V4L2_PIX_FMT_RGB565X: gv4l2_debug("RGB565X"); break;
case V4L2_PIX_FMT_BGR24: gv4l2_debug("BGR24"); break;
case V4L2_PIX_FMT_RGB24: gv4l2_debug("RGB24"); break;
case V4L2_PIX_FMT_BGR32: gv4l2_debug("BGR32"); break;
case V4L2_PIX_FMT_RGB32: /* DEFAULT - NO CONV */ break;
case V4L2_PIX_FMT_GREY: gv4l2_debug("GREY"); break;
case V4L2_PIX_FMT_Y16: gv4l2_debug("Y16"); break;
case V4L2_PIX_FMT_PAL8: gv4l2_debug("PAL8"); break;
case V4L2_PIX_FMT_YVU410: gv4l2_debug("YVU410"); break;
case V4L2_PIX_FMT_YVU420: gv4l2_debug("YVU420"); break;
case V4L2_PIX_FMT_YUV420:
//gv4l2_debug("YUV420");
yuv420p_to_rgb (start,THIS->frame,w,h,3);
return;
case V4L2_PIX_FMT_YUYV:
//gv4l2_debug("YUYV");
convert_yuv_to_rgb_buffer(start,THIS->frame,w,h);
return;
case V4L2_PIX_FMT_UYVY: gv4l2_debug("UYVY"); break;
case V4L2_PIX_FMT_YUV422P: gv4l2_debug("YUV422P"); break;
case V4L2_PIX_FMT_YUV411P: gv4l2_debug("YUV411P"); break;
case V4L2_PIX_FMT_Y41P: gv4l2_debug("Y41P"); break;
case V4L2_PIX_FMT_YUV444: gv4l2_debug("YUV444"); break;
case V4L2_PIX_FMT_YUV555: gv4l2_debug("YUV555"); break;
case V4L2_PIX_FMT_YUV565: gv4l2_debug("YUV565"); break;
case V4L2_PIX_FMT_YUV32: gv4l2_debug("YUV32"); break;
case V4L2_PIX_FMT_NV12: gv4l2_debug("NV12"); break;
case V4L2_PIX_FMT_NV21: gv4l2_debug("NV21"); break;
case V4L2_PIX_FMT_YUV410: gv4l2_debug("YUV410"); break;
case V4L2_PIX_FMT_YYUV: gv4l2_debug("YYUV"); break;
case V4L2_PIX_FMT_HI240: gv4l2_debug("HI240"); break;
case V4L2_PIX_FMT_HM12: gv4l2_debug("HM12"); break;
case V4L2_PIX_FMT_SBGGR8: gv4l2_debug("SBGGR8"); break;
case V4L2_PIX_FMT_SGBRG8: gv4l2_debug("SBGRG8"); break;
case V4L2_PIX_FMT_SBGGR16: gv4l2_debug("SBGGR16"); break;
case V4L2_PIX_FMT_MJPEG: gv4l2_debug("MJPEG"); break;
case V4L2_PIX_FMT_JPEG: gv4l2_debug("JPEG"); break;
case V4L2_PIX_FMT_DV: gv4l2_debug("DV"); break;
case V4L2_PIX_FMT_MPEG: gv4l2_debug("MPEG"); break;
case V4L2_PIX_FMT_WNVA: gv4l2_debug("WNVA"); break;
case V4L2_PIX_FMT_SN9C10X: gv4l2_debug("SN9C10X"); break;
case V4L2_PIX_FMT_PWC1: gv4l2_debug("PWC1"); break;
case V4L2_PIX_FMT_PWC2: gv4l2_debug("PWC2"); break;
case V4L2_PIX_FMT_ET61X251: gv4l2_debug("ET61X251");break;
case V4L2_PIX_FMT_SPCA501: gv4l2_debug("SPCA501"); break;
case V4L2_PIX_FMT_SPCA505: gv4l2_debug("SPCA505"); break;
case V4L2_PIX_FMT_SPCA508: gv4l2_debug("SPCA508"); break;
case V4L2_PIX_FMT_SPCA561: gv4l2_debug("SPCA561"); break;
case V4L2_PIX_FMT_PAC207: gv4l2_debug("PAC207"); break;
case V4L2_PIX_FMT_PJPG: gv4l2_debug("PJPG"); break;
case V4L2_PIX_FMT_YVYU: gv4l2_debug("YVYU"); break;
default:
gv4l2_debug("Frame in unknown format");
break;
}
memcpy(THIS->frame,start,size);
}
//=============================================================================
//
// gv4l2_read_frame( THIS )
//
// Read a frame from the camera / video device
//
// FIXME:: test non mmap mode!
//
int gv4l2_read_frame( CWEBCAM * _object )
{
struct v4l2_buffer buf;
if( !THIS->use_mmap ) {
gv4l2_debug("Using READ interface");
if( read (THIS->io, THIS->buffers[0].start, THIS->buffers[0].length) == -1) {
switch (errno) {
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
gv4l2_debug("READ ERROR");
}
}
gv4l2_process_image (THIS,THIS->buffers[0].start);
return 1;
}
//
// This is the MMAP based read code
//
MCLEAR (buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if( gv4l2_xioctl( THIS->io, VIDIOC_DQBUF, &buf) == -1 ) {
gv4l2_debug("DQBUF Error");
switch (errno) {
case EAGAIN:
gv4l2_debug("EAGAIN");
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
gv4l2_debug("VIDIOC_DQBUF READ ERROR");
}
}
assert (buf.index < THIS->buffer_count);
gv4l2_process_image (THIS,THIS->buffers[buf.index].start);
if( gv4l2_xioctl( THIS->io, VIDIOC_QBUF, &buf) == -1 ) {
gv4l2_debug("VIDIOC_QBUF READ ERROR");
return 0;
}
return 1;
}
//=============================================================================
//
// gv4l2_resize( THIS , Width , Height )
//
// Resize the display.
// Going to cheat a little here, easy way is to completely deactivate
// and let it start up with a new width and height .. :)
//
int gv4l2_resize( CWEBCAM * _object , int width , int height )
{
if(! gv4l2_stop_capture( THIS ) ) {
GB.Error("Failed to stop capturing on device");
return 0;
}
gv4l2_uninit_device( THIS );
//
// See no reason to close it too ...
//
if( !gv4l2_init_device(THIS , width , height ) ) {
GB.Error("Unable to initialise the device");
return 0;
}
//
gv4l2_start_capture( THIS );
return 1;
}