source: flair-src/trunk/lib/FlairSensorActuator/src/V4LCamera.cpp@ 347

Last change on this file since 347 was 347, checked in by Sanahuja Guillaume, 4 years ago

modif v4l

File size: 24.4 KB
Line 
1// %flair:license{
2// This file is part of the Flair framework distributed under the
3// CECILL-C License, Version 1.0.
4// %flair:license}
5// created: 2014/07/17
6// filename: V4LCamera.cpp
7//
8// author: Guillaume Sanahuja
9// Copyright Heudiasyc UMR UTC/CNRS 7253
10//
11// version: $Id: $
12//
13// purpose: base class for V4l camera
14//
15//
16/*********************************************************************/
17
18#include "V4LCamera.h"
19#include <GroupBox.h>
20#include <DoubleSpinBox.h>
21#include <CheckBox.h>
22#include <Label.h>
23#include <Image.h>
24#include <FrameworkManager.h>
25#include <fcntl.h>
26#include <linux/videodev2.h>
27#include <sys/ioctl.h>
28#include <unistd.h>
29#include <cstring>
30#include <sys/mman.h>
31#include <VisionFilter.h>
32
33#define DEFAULT_V4L_BUFFERS 4
34
35#define CLEAR(x) memset (&(x), 0, sizeof (x))
36
37using std::string;
38using namespace flair::core;
39using namespace flair::gui;
40
41namespace flair {
42namespace sensor {
43
44V4LCamera::V4LCamera(string name,uint8_t camera_index, uint16_t width, uint16_t height,
45 Image::Type::Format format, uint8_t priority)
46 : Thread(getFrameworkManager(), name, priority),
47 Camera(name, width, height, format) {
48
49 string deviceName="/dev/video"+std::to_string(camera_index);
50 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
51 if (device == -1) {
52 Thread::Err("Cannot open %s\n",deviceName.c_str());
53 } else {
54 Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str());
55 }
56
57 /* w/o memset some parts arent initialized - AKA: Fill it with zeros so it is clean */
58 memset(&capture,0,sizeof(CvCaptureCAM_V4L));
59 n_buffers = 0;
60
61
62 if(format == Image::Type::Format::UYVY) {
63 if(init(width,height,V4L2_PIX_FMT_UYVY) == -1) {
64 Thread::Err("initialisation failed\n");
65 }
66 } else if (format == Image::Type::Format::YUYV) {
67 if(init(width,height,V4L2_PIX_FMT_YUYV) == -1) {
68 Thread::Err("initialisation failed\n");
69 }
70 } else {
71 Thread::Err("format not supported\n");
72 }
73
74 /* This is just a technicality, but all buffers must be filled up before any
75 staggered SYNC is applied. SO, filler up. (see V4L HowTo) */
76
77 allocBuffers();
78
79 for (capture.bufferIndex = 0;capture.bufferIndex < ((int)capture.req.count);++capture.bufferIndex) {
80 struct v4l2_buffer buf;
81
82 CLEAR (buf);
83
84 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
85 buf.memory = V4L2_MEMORY_MMAP;
86 buf.index = (unsigned long)capture.bufferIndex;
87
88 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
89 Thread::Err("VIDIOC_QBUF error\n");
90 }
91 }
92
93 /* enable the streaming */
94 capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
95 if (-1 == xioctl (device, VIDIOC_STREAMON,&capture.type)) {
96 /* error enabling the stream */
97 Thread::Err("VIDIOC_STREAMON error\n");
98 }
99
100 // ground station
101 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
102 exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1);
103 bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
104 contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1);
105 hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
106 sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1);
107 sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1);
108 autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
109 autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
110 awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
111 fps = new Label(GetGroupBox()->NewRow(), "fps");
112
113 hasProblems=false;
114}
115
116V4LCamera::~V4LCamera() {
117 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
118 //FreeFunction((char*)buffers[n_buffers].start);
119 }
120 SafeStop();
121 Join();
122}
123
124int V4LCamera::init(int width, int height,unsigned long colorspace) {
125 CLEAR (capture.cap);
126 if(-1 == xioctl(device, VIDIOC_QUERYCAP, &capture.cap)) {
127 return -1;
128 } else {
129 CLEAR (capture.capability);
130 capture.capability.type = capture.cap.capabilities;
131
132 /* Query channels number */
133 if (-1 == xioctl(device, VIDIOC_G_INPUT, &capture.capability.channels)) {
134 return -1;
135 }
136 }
137
138 if ((capture.cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
139 Thread::Err("device is unable to capture video memory.\n");
140 return -1;
141 }
142
143 /* Find Window info */
144 CLEAR (capture.form);
145 capture.form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
146
147 if (-1 == xioctl (device, VIDIOC_G_FMT, &capture.form)) {
148 Thread::Err("Could not obtain specifics of capture window.\n");
149 return -1;
150 }
151
152 setVideoSize(width, height,colorspace);
153
154 unsigned int min;
155
156 /* Buggy driver paranoia. */
157 min = capture.form.fmt.pix.width * 2;
158 if (capture.form.fmt.pix.bytesperline < min) {
159 capture.form.fmt.pix.bytesperline = min;
160 }
161
162 min = capture.form.fmt.pix.bytesperline * capture.form.fmt.pix.height;
163 if (capture.form.fmt.pix.sizeimage < min) {
164 capture.form.fmt.pix.sizeimage = min;
165 }
166
167 return 1;
168}
169
170int V4LCamera::setVideoSize(int width, int height,unsigned long colorspace) {
171
172 CLEAR (capture.crop);
173 capture.crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
174 capture.crop.c.left = 0;
175 capture.crop.c.top = 0;
176 capture.crop.c.height = height*24;
177 capture.crop.c.width = width*24;
178
179 /* set the crop area, but don't exit if the device don't support croping */
180 xioctl (device, VIDIOC_S_CROP, &capture.crop);
181
182 CLEAR (capture.form);
183 capture.form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
184
185 /* read the current setting */
186 xioctl (device, VIDIOC_G_FMT, &capture.form);
187
188 /* set the values we want to change */
189 capture.form.fmt.pix.width = width;
190 capture.form.fmt.pix.height = height;
191 capture.form.fmt.win.chromakey = 0;
192 capture.form.fmt.win.field = V4L2_FIELD_ANY;
193 capture.form.fmt.win.clips = 0;
194 capture.form.fmt.win.clipcount = 0;
195 capture.form.fmt.pix.field = V4L2_FIELD_ANY;
196 capture.form.fmt.pix.pixelformat = colorspace;
197
198 /* ask the device to change the size*/
199 if (-1 == xioctl (device, VIDIOC_S_FMT, &capture.form)) {
200 Thread::Err("Could not set specifics of capture window.\n");
201 return -1;
202 }
203
204 /* Get window info again, to get the real value */
205 if (-1 == xioctl (device, VIDIOC_G_FMT, &capture.form)) {
206 Thread::Err("Could not obtain specifics of capture window.\n");
207 return -1;
208 }
209 return 0;
210
211}
212
213/*
214V4LCamera::V4LCamera(string name,
215 uint8_t camera_index, uint16_t width, uint16_t height,
216 Image::Type::Format format, uint8_t priority)
217 : Thread(getFrameworkManager(), name, priority),
218 Camera(name, width, height, format) {
219
220 string deviceName="/dev/video"+std::to_string(camera_index);
221 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
222 if (device == -1) {
223 Thread::Err("Cannot open %s\n",deviceName.c_str());
224 } else {
225 Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str());
226 }
227
228 struct v4l2_capability cap;
229 memset(&cap, 0, sizeof (v4l2_capability));
230 if (xioctl (device, VIDIOC_QUERYCAP, &cap)==-1) {
231 Thread::Err("VIDIOC_QUERYCAP xioctl\n");
232 }
233 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
234 Thread::Err("device is unable to capture video memory.\n");
235 }
236
237 //get v4l2_format
238 struct v4l2_format form;
239 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
240 if(xioctl (device, VIDIOC_G_FMT,&form)==-1) {
241 Thread::Err("VIDIOC_G_FMT xioctl\n");
242 }
243
244 //set width, height and format
245 if (format == Image::Type::Format::UYVY) {
246 form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
247 } else if (format == Image::Type::Format::YUYV) {
248 form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
249 } else {
250 Thread::Err("format not supported\n");
251 }
252
253 form.fmt.pix.width = width;
254 form.fmt.pix.height = height;
255 form.fmt.win.chromakey = 0;
256 form.fmt.win.field = V4L2_FIELD_ANY;
257 form.fmt.win.clips = 0;
258 form.fmt.win.clipcount = 0;
259 form.fmt.pix.field = V4L2_FIELD_ANY;
260 if(xioctl (device, VIDIOC_S_FMT,&form)==-1) {
261 Thread::Err("VIDIOC_S_FMT xioctl\n");
262 }
263
264 //alloc and queue bufs
265 AllocBuffers();
266 for (int bufferIndex = 0; bufferIndex < nbBuffers;++bufferIndex) {
267 QueueBuffer(bufferIndex);
268 }
269
270 // enable the streaming
271 v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
272 if (xioctl (device, VIDIOC_STREAMON,&type)==-1) {
273 Thread::Err("VIDIOC_STREAMON xioctl\n");
274 }
275
276 // skip first frame. it is often bad -- this is unnotied in traditional apps,
277 // but could be fatal if bad jpeg is enabled
278 bufferIndex=-1;
279 GrabFrame();
280
281 // ground station
282 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
283 exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1);
284 bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
285 contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1);
286 hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
287 sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1);
288 sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1);
289 autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
290 autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
291 awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
292 fps = new Label(GetGroupBox()->NewRow(), "fps");
293
294 hasProblems=false;
295}
296
297V4LCamera::~V4LCamera() {
298 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
299 FreeFunction((char*)buffers[n_buffers].start);
300 }
301 SafeStop();
302 Join();
303}
304
305void V4LCamera::Run(void) {
306 Time cam_time, new_time, fpsNow, fpsPrev;
307 int fpsCounter = 0;
308
309 // init image old
310 GrabFrame();
311 cam_time = GetTime();
312 fpsPrev = cam_time;
313
314 while (!ToBeStopped()) {
315 //check for ps3eye deconnection in hds uav
316 if(hasProblems==false) {
317 struct v4l2_format form;
318 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
319 xioctl(device, VIDIOC_G_FMT,&form);
320 if(xioctl (device, VIDIOC_G_FMT,&form)<0) {
321 Thread::Warn("camera disconnected\n");
322 hasProblems=true;
323 }
324 }
325
326 // fps counter
327 fpsCounter++;
328 if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) {
329 // every 5 secondes
330 fpsNow = GetTime();
331 fps->SetText("fps: %.1f",
332 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
333 fpsCounter = 0;
334 fpsPrev = fpsNow;
335 }
336
337 // cam properties
338 if (gain->ValueChanged() == true && autogain->Value() == false)
339 SetGain(gain->Value());
340 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
341 SetExposure(exposure->Value());
342 if (bright->ValueChanged() == true)
343 SetBrightness(bright->Value());
344 if (sat->ValueChanged() == true)
345 SetSaturation(sat->Value());
346 if (contrast->ValueChanged() == true)
347 SetContrast(contrast->Value());
348 if (hue->ValueChanged() == true)
349 SetHue(hue->Value());
350 if (sharpness->ValueChanged() == true)
351 SetProperty(V4L2_CID_SHARPNESS, sharpness->Value());
352 if (autogain->ValueChanged() == true) {
353 if (autogain->Value() == true) {
354 gain->setEnabled(false);
355 } else {
356 gain->setEnabled(true);
357 SetGain(gain->Value());
358 }
359 SetAutoGain(autogain->Value());
360 }
361 if (autoexposure->ValueChanged() == true) {
362 if (autoexposure->Value() == true) {
363 exposure->setEnabled(false);
364 } else {
365 exposure->setEnabled(true);
366 SetExposure(exposure->Value());
367 }
368 SetAutoExposure(autoexposure->Value());
369 }
370 if (awb->ValueChanged() == true)
371 SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value());
372
373 // get picture
374 GrabFrame();
375 new_time = GetTime();
376
377 //check for ps3eye deconnection in hds uav
378 if(new_time-cam_time>100*1000*1000) {
379 Thread::Warn("delta trop grand\n");
380 hasProblems=true;
381 }
382
383 output->GetMutex();
384 output->buffer=(char*)buffers[bufferIndex].start;
385 output->ReleaseMutex();
386
387 output->SetDataTime(cam_time);
388 ProcessUpdate(output);
389
390 cam_time = new_time;
391 }
392
393 close(device);
394}
395*/
396int V4LCamera::allocBuffers() {
397 CLEAR (capture.req);
398
399 unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
400
401 try_again:
402
403 capture.req.count = buffer_number;
404 capture.req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
405 capture.req.memory = V4L2_MEMORY_MMAP;
406
407 if (-1 == xioctl (device, VIDIOC_REQBUFS, &capture.req)) {
408 if (EINVAL == errno) {
409 Thread::Warn("does not support memory mapping\n");
410 } else {
411 perror ("VIDIOC_REQBUFS");
412 }
413 /* free capture, and returns an error code */
414 return -1;
415 }
416
417 if (capture.req.count < buffer_number) {
418 if (buffer_number == 1) {
419 Thread::Warn("Insufficient buffer memory\n");
420 /* free capture, and returns an error code */
421 return -1;
422 } else {
423 buffer_number--;
424 Thread::Warn("Insufficient buffer memory -- decreaseing buffers\n");
425 goto try_again;
426 }
427 }
428
429 for (n_buffers = 0; n_buffers < capture.req.count; ++n_buffers) {
430 struct v4l2_buffer buf;
431 CLEAR (buf);
432
433 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
434 buf.memory = V4L2_MEMORY_MMAP;
435 buf.index = n_buffers;
436
437 if (-1 == xioctl (device, VIDIOC_QUERYBUF, &buf)) {
438 Thread::Warn ("VIDIOC_QUERYBUF error\n");
439 /* free capture, and returns an error code */
440 return -1;
441 }
442
443 capture.buffers[n_buffers].length = buf.length;
444 capture.buffers[n_buffers].start =
445 mmap (NULL /* start anywhere */,
446 buf.length,
447 PROT_READ | PROT_WRITE /* required */,
448 MAP_SHARED /* recommended */,
449 device, buf.m.offset);
450
451 if (MAP_FAILED == capture.buffers[n_buffers].start) {
452 Thread::Warn("mmap error\n");
453 /* free capture, and returns an error code */
454 return -1;
455 }
456 }
457
458 /* Set up Image data */
459 /*
460 cvInitImageHeader( &capture.frame,
461 cvSize( capture.captureWindow.width,
462 capture.captureWindow.height ),
463 IPL_DEPTH_8U, 3, IPL_ORIGIN_TL, 4 );*/
464 /* Allocate space for RGBA data */
465 capture.imageSize=capture.form.fmt.pix.width*capture.form.fmt.pix.height*2;
466 capture.imageData = AllocFunction(capture.imageSize);
467 Printf("cmem allocated %i at %x\n",capture.imageSize,capture.imageData);
468
469 return 1;
470};
471
472
473int V4LCamera::cvGrabFrame(void) {
474 unsigned int count;
475
476 count = 1;
477
478 while (count-- > 0) {
479 for (;;) {
480 fd_set fds;
481 struct timeval tv;
482 int r;
483
484 FD_ZERO (&fds);
485 FD_SET (device, &fds);
486
487 /* Timeout. */
488 tv.tv_sec = 2;
489 tv.tv_usec = 0;
490
491 r = select (device+1, &fds, NULL, NULL, &tv);
492
493 if (-1 == r) {
494 if (EINTR == errno)
495 continue;
496
497 perror ("select");
498 }
499
500 if (0 == r) {
501 fprintf (stderr, "select timeout\n");
502
503 /* end the infinite loop */
504 break;
505 }
506
507 if (read_frame_v4l2 (&capture))
508 break;
509 }
510 }
511 return(1);
512}
513
514int V4LCamera::read_frame_v4l2(CvCaptureCAM_V4L* capture) {
515 struct v4l2_buffer buf;
516
517 CLEAR (buf);
518
519 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
520 buf.memory = V4L2_MEMORY_MMAP;
521
522 if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) {
523 switch (errno) {
524 case EAGAIN:
525 return 0;
526
527 case EIO:
528 /* Could ignore EIO, see spec. */
529
530 /* fall through */
531
532 default:
533 /* display the error and stop processing */
534 perror ("VIDIOC_DQBUF");
535 return 1;
536 }
537 }
538
539 if(buf.index >= capture->req.count) {
540 Thread::Err("buf.index >= capture->req.count\n");
541 }
542
543
544 capture->bufferIndex = buf.index;
545
546
547 if (-1 == xioctl (device, VIDIOC_QBUF, &buf))
548 perror ("VIDIOC_QBUF");
549
550 return 1;
551}
552
553void V4LCamera::Run(void) {
554 Time cam_time, new_time, fpsNow, fpsPrev;
555 //IplImage *img; // raw image
556 int fpsCounter = 0;
557
558 // init image old
559 if (!cvGrabFrame()) {
560 Printf("Could not grab a frame\n");
561 }
562 cam_time = GetTime();
563 fpsPrev = cam_time;
564
565 while (!ToBeStopped()) {
566 // fps counter
567 fpsCounter++;
568 if (fpsCounter == 100) {
569 fpsNow = GetTime();
570 fps->SetText("fps: %.1f",
571 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
572 fpsCounter = 0;
573 fpsPrev = fpsNow;
574 }
575
576 // cam properties
577 if (gain->ValueChanged() == true && autogain->Value() == false)
578 SetGain(gain->Value());
579 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
580 SetExposure(exposure->Value());
581 if (bright->ValueChanged() == true)
582 SetBrightness(bright->Value());
583 if (sat->ValueChanged() == true)
584 SetSaturation(sat->Value());
585 if (contrast->ValueChanged() == true)
586 SetContrast(contrast->Value());
587 if (hue->ValueChanged() == true)
588 SetHue(hue->Value());
589 //if (sharpness->ValueChanged() == true)
590 // cvSetCaptureProperty(capture, CV_CAP_PROP_SHARPNESS, sharpness->Value());
591 if (autogain->ValueChanged() == true) {
592 if (autogain->Value() == true) {
593 gain->setEnabled(false);
594 } else {
595 gain->setEnabled(true);
596 SetGain(gain->Value());
597 }
598 SetAutoGain(autogain->Value());
599 }
600 if (autoexposure->ValueChanged() == true) {
601 if (autoexposure->Value() == true) {
602 exposure->setEnabled(false);
603 } else {
604 exposure->setEnabled(true);
605 SetExposure(exposure->Value());
606 }
607 SetAutoExposure(autoexposure->Value());
608 }
609 //if (awb->ValueChanged() == true)
610 // cvSetCaptureProperty(capture, CV_CAP_PROP_AWB, awb->Value());
611
612 // cam pictures
613 cvRetrieveRawFrame(&capture);
614 if (!cvGrabFrame()) {
615 Printf("Could not grab a frame\n");
616 }
617 new_time = GetTime();
618
619 //check for ps3eye deconnection in hds uav
620 if(new_time-cam_time>100*1000*1000) {
621 Thread::Warn("delta trop grand\n");
622 hasProblems=true;
623 }
624
625 output->GetMutex();
626
627 output->buffer=capture.imageData;
628 output->ReleaseMutex();
629
630 output->SetDataTime(cam_time);
631 ProcessUpdate(output);
632 cam_time = new_time;
633 }
634
635 //cvReleaseCapture(&capture);
636}
637
638void V4LCamera::cvRetrieveRawFrame( CvCaptureCAM_V4L* capture) {
639
640
641
642
643 /* Now get what has already been captured as a IplImage return */
644
645 /* First, reallocate imageData if the frame size changed */
646
647
648/*
649 if(((unsigned long)capture->frame.width != capture->form.fmt.pix.width)
650 || ((unsigned long)capture->frame.height != capture->form.fmt.pix.height)) {
651 if (PALETTE_YUYV == 1 || PALETTE_UYVY == 1)
652 {
653 cvFree(&capture->frame.imageData);
654 cvInitImageHeader( &capture->frame,
655 cvSize( capture->form.fmt.pix.width,
656 capture->form.fmt.pix.height ),
657 IPL_DEPTH_8U,2, IPL_ORIGIN_TL, 4 );
658 capture->frame.imageData = (char *)cvAlloc(capture->frame.imageSize);
659 }else
660 {
661 fprintf( stderr,
662 "HIGHGUI ERROR: V4L: raw output not supported for this palette\n");
663 }
664
665 }
666
667 */
668
669
670
671 memcpy((char *)capture->imageData,(char *)capture->buffers[capture->bufferIndex].start,capture->imageSize);
672
673
674
675
676}
677int V4LCamera::QueueBuffer(int index) {
678 struct v4l2_buffer buf;
679 if(index>=0 && index<nbBuffers) {
680 memset(&buf, 0, sizeof (v4l2_buffer));
681 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
682 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
683 buf.index = (unsigned long)index;
684 buf.m.userptr=(unsigned long)(buffers[index].start);
685 buf.length=buffers[index].length;
686
687 int ret=xioctl (device, VIDIOC_QBUF, &buf);
688 if (ret==-1) {
689 Thread::Err("VIDIOC_QBUF xioctl %s\n",strerror(-ret));
690 return -1;
691 }
692 }
693 return 0;
694}
695
696int V4LCamera::GrabFrame(void) {
697 //queue previous buffer
698 if(QueueBuffer(bufferIndex)<0) return -1;
699
700 fd_set fds;
701 struct timeval tv;
702 FD_ZERO (&fds);
703 FD_SET (device, &fds);
704
705 tv.tv_sec = 0;
706 tv.tv_usec = 100000;
707
708 int r = select (device+1, &fds, NULL, NULL, &tv);
709
710 if (-1 == r) {
711 char errorMsg[256];
712 Thread::Err("select (%s)\n", strerror_r(-r, errorMsg, sizeof(errorMsg)));
713 return -1;
714 }
715
716 if (0 == r) {
717 Thread::Err("select timeout\n");
718 return -1;
719 }
720
721 struct v4l2_buffer buf;
722 memset(&buf, 0, sizeof (v4l2_buffer));
723 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
724 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
725
726 //get last captured image
727 int prevDQbuf=-1;
728 for(int i=0;i<4;i++) {
729 if (xioctl (device, VIDIOC_DQBUF, &buf)==-1) {
730 if (errno==EAGAIN) {
731 break;
732 } else {
733 Thread::Err("VIDIOC_DQBUF xioctl\n");
734 return -1;
735 }
736 } else {
737 if(prevDQbuf!=-1) {
738 QueueBuffer(prevDQbuf);
739 }
740 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
741 if((void*)(buf.m.userptr)==buffers[n_buffers].start) {
742 prevDQbuf=n_buffers;
743 bufferIndex=n_buffers;
744 break;
745 }
746 }
747 }
748 }
749
750 return 1;
751}
752
753int V4LCamera::AllocBuffers(void) {
754 struct v4l2_requestbuffers requestbuffers;
755 memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers));
756
757 unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
758
759 try_again:
760
761 requestbuffers.count = buffer_number;
762 requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
763 requestbuffers.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
764
765 if (xioctl (device, VIDIOC_REQBUFS, &requestbuffers)==-1) {
766 if (errno==EINVAL) {
767 Thread::Err("VIDIOC_REQBUFS user memory not supported\n");
768 } else {
769 Thread::Err ("VIDIOC_REQBUFS xioctl\n");
770 }
771 return -1;
772 }
773
774 nbBuffers=DEFAULT_V4L_BUFFERS;
775 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
776 buffers[n_buffers].length = output->GetDataType().GetSize();
777 buffers[n_buffers].start =AllocFunction(output->GetDataType().GetSize());
778 }
779
780 return 1;
781};
782
783bool V4LCamera::HasProblems(void) {
784 return hasProblems;
785}
786
787void V4LCamera::SetAutoGain(bool value) {
788 SetProperty(V4L2_CID_AUTOGAIN, value);
789}
790
791void V4LCamera::SetAutoExposure(bool value) {
792 Thread::Warn("not implemented\n");
793}
794
795void V4LCamera::SetGain(float value) {
796 SetProperty(V4L2_CID_GAIN, value);
797}
798
799void V4LCamera::SetExposure(float value) {
800 SetProperty(V4L2_CID_EXPOSURE, value);
801}
802
803void V4LCamera::SetBrightness(float value) {
804 SetProperty(V4L2_CID_BRIGHTNESS, value);
805}
806
807void V4LCamera::SetSaturation(float value) {
808 SetProperty(V4L2_CID_SATURATION, value);
809}
810
811void V4LCamera::SetHue(float value) {
812 SetProperty(V4L2_CID_HUE, value);
813}
814
815void V4LCamera::SetContrast(float value) {
816 SetProperty(V4L2_CID_CONTRAST, value);
817}
818
819float V4LCamera::GetProperty(int property) {
820 //get min and max value
821 struct v4l2_queryctrl queryctrl;
822 queryctrl.id = property;
823 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) return -1;
824 int min = queryctrl.minimum;
825 int max = queryctrl.maximum;
826
827 //set value
828 struct v4l2_control control;
829 memset (&control, 0, sizeof (v4l2_control));
830 control.id = property;
831 if(xioctl (device,VIDIOC_G_CTRL, &control)==-1) return -1;
832
833 return ((float)control.value - min + 1) / (max - min);
834}
835
836void V4LCamera::SetProperty(int property,float value) {
837 //get min and max value
838 struct v4l2_queryctrl queryctrl;
839 queryctrl.id = property;
840 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) {
841 Thread::Warn("prop %x, VIDIOC_QUERYCTRL failed\n",property);
842 }
843 int min = queryctrl.minimum;
844 int max = queryctrl.maximum;
845
846 //set value
847 struct v4l2_control control;
848 memset (&control, 0, sizeof (v4l2_control));
849 control.id = property;
850 control.value = (int)(value * (max - min) + min);
851 if(xioctl (device,VIDIOC_S_CTRL, &control)==-1) {
852 Thread::Warn("prop %x, VIDIOC_S_CTRL failed\n",property);
853 }
854}
855
856int V4LCamera::xioctl( int fd, int request, void *arg) {
857 int r;
858
859 do r = ioctl (fd, request, arg);
860 while (-1 == r && EINTR == errno);
861
862 return r;
863}
864
865} // end namespace sensor
866} // end namespace flair
Note: See TracBrowser for help on using the repository browser.