close Warning: Can't use blame annotator:
svn blame failed on trunk/lib/FlairSensorActuator/src/V4LCamera.cpp: 200029 - Couldn't perform atomic initialization

source: flair-src/trunk/lib/FlairSensorActuator/src/V4LCamera.cpp@ 348

Last change on this file since 348 was 348, checked in by Sanahuja Guillaume, 4 years ago

update v4l cam, ok on gcs

File size: 17.6 KB
RevLine 
1// %flair:license{
2// This file is part of the Flair framework distributed under the
3// CECILL-C License, Version 1.0.
4// %flair:license}
5// created: 2014/07/17
6// filename: V4LCamera.cpp
7//
8// author: Guillaume Sanahuja
9// Copyright Heudiasyc UMR UTC/CNRS 7253
10//
11// version: $Id: $
12//
13// purpose: base class for V4l camera
14//
15//
16/*********************************************************************/
17
18#include "V4LCamera.h"
19#include <GroupBox.h>
20#include <DoubleSpinBox.h>
21#include <CheckBox.h>
22#include <Label.h>
23#include <Image.h>
24#include <FrameworkManager.h>
25#include <fcntl.h>
26#include <linux/videodev2.h>
27#include <sys/ioctl.h>
28#include <unistd.h>
29#include <cstring>
30#include <sys/mman.h>
31#include <VisionFilter.h>
32
33#define DEFAULT_V4L_BUFFERS 4
34
35using std::string;
36using namespace flair::core;
37using namespace flair::gui;
38
39namespace flair {
40namespace sensor {
41
42V4LCamera::V4LCamera(string name,uint8_t camera_index, uint16_t width, uint16_t height,
43 Image::Type::Format format, uint8_t priority)
44 : Thread(getFrameworkManager(), name, priority),
45 Camera(name, width, height, format) {
46
47 string deviceName="/dev/video"+std::to_string(camera_index);
48 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
49 if (device == -1) {
50 Thread::Err("Cannot open %s\n",deviceName.c_str());
51 } else {
52 Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str());
53 }
54
55 if(format == Image::Type::Format::UYVY) {
56 if(init(width,height,V4L2_PIX_FMT_UYVY) == -1) {
57 Thread::Err("initialisation failed\n");
58 }
59 } else if (format == Image::Type::Format::YUYV) {
60 if(init(width,height,V4L2_PIX_FMT_YUYV) == -1) {
61 Thread::Err("initialisation failed\n");
62 }
63 } else {
64 Thread::Err("format not supported\n");
65 }
66
67 /* This is just a technicality, but all buffers must be filled up before any
68 staggered SYNC is applied. SO, filler up. (see V4L HowTo) */
69
70 allocBuffers();
71
72 for (int i=0;i < nbBuffers;i++) {
73 struct v4l2_buffer buf;
74 memset(&buf, 0, sizeof (v4l2_buffer));
75
76 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
77 buf.memory = V4L2_MEMORY_MMAP;
78 buf.index = (unsigned long)i;
79
80 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
81 Thread::Err("VIDIOC_QBUF error\n");
82 }
83 }
84
85 /* enable the streaming */
86 v4l2_buf_type type;
87 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
88 if (-1 == xioctl (device, VIDIOC_STREAMON,&type)) {
89 Thread::Err("VIDIOC_STREAMON error\n");
90 }
91
92 // ground station
93 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
94 exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1);
95 bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
96 contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1);
97 hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
98 sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1);
99 sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1);
100 autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
101 autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
102 awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
103 fps = new Label(GetGroupBox()->NewRow(), "fps");
104
105 hasProblems=false;
106}
107
108V4LCamera::~V4LCamera() {
109 for (int i = 0; i < nbBuffers; i++) {
110 //FreeFunction((char*)buffers[i].start);
111 }
112 SafeStop();
113 Join();
114 close(device);
115}
116
117int V4LCamera::init(int width, int height,unsigned long colorspace) {
118 struct v4l2_capability cap;
119 memset(&cap, 0, sizeof (v4l2_capability));
120
121 if(-1 == xioctl(device, VIDIOC_QUERYCAP, &cap)) {
122 return -1;
123 }
124
125 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
126 Thread::Err("device is unable to capture video memory.\n");
127 return -1;
128 }
129
130 struct v4l2_format form;
131 memset(&form, 0, sizeof (v4l2_format));
132 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
133
134 /* read the current setting */
135 if(-1 == xioctl(device, VIDIOC_G_FMT, &form)) {
136 Thread::Err("Could not obtain specifics of capture window.\n");
137 return -1;
138 }
139
140 /* set the values we want to change */
141 form.fmt.pix.width = width;
142 form.fmt.pix.height = height;
143 form.fmt.win.chromakey = 0;
144 form.fmt.win.field = V4L2_FIELD_ANY;
145 form.fmt.win.clips = 0;
146 form.fmt.win.clipcount = 0;
147 form.fmt.pix.field = V4L2_FIELD_ANY;
148 form.fmt.pix.pixelformat = colorspace;
149
150 /* ask the device to change the size*/
151 if(-1 == xioctl (device, VIDIOC_S_FMT, &form)) {
152 Thread::Err("Could not set specifics of capture window.\n");
153 return -1;
154 }
155
156 /* Get window info again, to get the real value */
157 if(-1 == xioctl (device, VIDIOC_G_FMT, &form)) {
158 Thread::Err("Could not obtain specifics of capture window.\n");
159 return -1;
160 }
161
162 return 0;
163}
164
165/*
166void V4LCamera::Run(void) {
167 Time cam_time, new_time, fpsNow, fpsPrev;
168 int fpsCounter = 0;
169
170 // init image old
171 GrabFrame();
172 cam_time = GetTime();
173 fpsPrev = cam_time;
174
175 while (!ToBeStopped()) {
176 //check for ps3eye deconnection in hds uav
177 if(hasProblems==false) {
178 struct v4l2_format form;
179 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
180 xioctl(device, VIDIOC_G_FMT,&form);
181 if(xioctl (device, VIDIOC_G_FMT,&form)<0) {
182 Thread::Warn("camera disconnected\n");
183 hasProblems=true;
184 }
185 }
186
187 // fps counter
188 fpsCounter++;
189 if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) {
190 // every 5 secondes
191 fpsNow = GetTime();
192 fps->SetText("fps: %.1f",
193 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
194 fpsCounter = 0;
195 fpsPrev = fpsNow;
196 }
197
198 // cam properties
199 if (gain->ValueChanged() == true && autogain->Value() == false)
200 SetGain(gain->Value());
201 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
202 SetExposure(exposure->Value());
203 if (bright->ValueChanged() == true)
204 SetBrightness(bright->Value());
205 if (sat->ValueChanged() == true)
206 SetSaturation(sat->Value());
207 if (contrast->ValueChanged() == true)
208 SetContrast(contrast->Value());
209 if (hue->ValueChanged() == true)
210 SetHue(hue->Value());
211 if (sharpness->ValueChanged() == true)
212 SetProperty(V4L2_CID_SHARPNESS, sharpness->Value());
213 if (autogain->ValueChanged() == true) {
214 if (autogain->Value() == true) {
215 gain->setEnabled(false);
216 } else {
217 gain->setEnabled(true);
218 SetGain(gain->Value());
219 }
220 SetAutoGain(autogain->Value());
221 }
222 if (autoexposure->ValueChanged() == true) {
223 if (autoexposure->Value() == true) {
224 exposure->setEnabled(false);
225 } else {
226 exposure->setEnabled(true);
227 SetExposure(exposure->Value());
228 }
229 SetAutoExposure(autoexposure->Value());
230 }
231 if (awb->ValueChanged() == true)
232 SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value());
233
234 // get picture
235 GrabFrame();
236 new_time = GetTime();
237
238 //check for ps3eye deconnection in hds uav
239 if(new_time-cam_time>100*1000*1000) {
240 Thread::Warn("delta trop grand\n");
241 hasProblems=true;
242 }
243
244 output->GetMutex();
245 output->buffer=(char*)buffers[bufferIndex].start;
246 output->ReleaseMutex();
247
248 output->SetDataTime(cam_time);
249 ProcessUpdate(output);
250
251 cam_time = new_time;
252 }
253
254 close(device);
255}
256*/
257int V4LCamera::allocBuffers() {
258 struct v4l2_requestbuffers req;
259 memset(&req, 0, sizeof (v4l2_requestbuffers));
260 nbBuffers=DEFAULT_V4L_BUFFERS;
261
262 try_again:
263
264 req.count = nbBuffers;
265 req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
266 req.memory = V4L2_MEMORY_MMAP;
267
268 if(-1 == xioctl(device, VIDIOC_REQBUFS, &req)) {
269 if (EINVAL == errno) {
270 Thread::Err("camera does not support memory mapping\n");
271 } else {
272 Thread::Err("VIDIOC_REQBUFS failed\n");
273 }
274 return -1;
275 }
276
277 if(req.count < nbBuffers) {
278 if (nbBuffers == 1) {
279 Thread::Err("Insufficient buffer memory\n");
280 return -1;
281 } else {
282 nbBuffers--;
283 Thread::Warn("Insufficient buffer memory -- decreaseing buffers to %i\n",nbBuffers);
284 goto try_again;
285 }
286 }
287
288 for(int i=0; i<req.count; i++) {
289 struct v4l2_buffer buf;
290 memset(&buf, 0, sizeof (v4l2_buffer));
291
292 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
293 buf.memory = V4L2_MEMORY_MMAP;
294 buf.index = i;
295
296 if(-1 == xioctl(device, VIDIOC_QUERYBUF, &buf)) {
297 Thread::Err("VIDIOC_QUERYBUF error\n");
298 return -1;
299 }
300
301 if(output->GetDataType().GetSize()!=buf.length) {
302 Thread::Err("buf size is not as exepcted %i/%i\n",buf.length,output->GetDataType().GetSize());
303 return -1;
304 }
305
306 buffers[i]=mmap(NULL,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,device, buf.m.offset);
307
308 if(MAP_FAILED == buffers[i]) {
309 Thread::Err("mmap error\n");
310 return -1;
311 }
312 }
313
314 //allocate output data
315 imageData = AllocFunction(output->GetDataType().GetSize());
316 Printf("cmem allocated %i at %x\n",output->GetDataType().GetSize(),imageData);
317
318 return 1;
319};
320
321int V4LCamera::AllocBuffers(void) {
322 struct v4l2_requestbuffers requestbuffers;
323 memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers));
324
325 unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
326
327 try_again:
328
329 requestbuffers.count = buffer_number;
330 requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
331 requestbuffers.memory = V4L2_MEMORY_USERPTR;
332
333 if (xioctl (device, VIDIOC_REQBUFS, &requestbuffers)==-1) {
334 if (errno==EINVAL) {
335 Thread::Err("VIDIOC_REQBUFS user memory not supported\n");
336 } else {
337 Thread::Err ("VIDIOC_REQBUFS xioctl\n");
338 }
339 return -1;
340 }
341
342 nbBuffers=DEFAULT_V4L_BUFFERS;
343 for (int i=0; i<nbBuffers; i++) {
344 buffers[i] =AllocFunction(output->GetDataType().GetSize());
345 }
346
347 return 1;
348};
349
350int V4LCamera::cvGrabFrame(void) {
351 unsigned int count;
352
353 count = 1;
354
355 while (count-- > 0) {
356 for (;;) {
357 fd_set fds;
358 struct timeval tv;
359 int r;
360
361 FD_ZERO (&fds);
362 FD_SET (device, &fds);
363
364 /* Timeout. */
365 tv.tv_sec = 2;
366 tv.tv_usec = 0;
367
368 r = select (device+1, &fds, NULL, NULL, &tv);
369
370 if (-1 == r) {
371 if (EINTR == errno)
372 continue;
373
374 perror ("select");
375 }
376
377 if (0 == r) {
378 fprintf (stderr, "select timeout\n");
379
380 /* end the infinite loop */
381 break;
382 }
383
384 if (read_frame_v4l2 ())
385 break;
386 }
387 }
388 return(1);
389}
390
391int V4LCamera::read_frame_v4l2(void) {
392 struct v4l2_buffer buf;
393 memset(&buf, 0, sizeof (v4l2_buffer));
394
395 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
396 buf.memory = V4L2_MEMORY_MMAP;
397
398 if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) {
399 switch (errno) {
400 case EAGAIN:
401 return 0;
402
403 case EIO:
404 /* Could ignore EIO, see spec. */
405
406 /* fall through */
407
408 default:
409 /* display the error and stop processing */
410 perror ("VIDIOC_DQBUF");
411 return 1;
412 }
413 }
414
415 if(buf.index >= nbBuffers) {
416 Thread::Err("buf.index >= capture->req.count\n");
417 }
418
419 bufferIndex = buf.index;
420
421 if (-1 == xioctl (device, VIDIOC_QBUF, &buf))
422 perror ("VIDIOC_QBUF");
423
424 return 1;
425}
426
427void V4LCamera::Run(void) {
428 Time cam_time, new_time, fpsNow, fpsPrev;
429 //IplImage *img; // raw image
430 int fpsCounter = 0;
431
432 // init image old
433 if (!cvGrabFrame()) {
434 Printf("Could not grab a frame\n");
435 }
436 cam_time = GetTime();
437 fpsPrev = cam_time;
438
439 while (!ToBeStopped()) {
440 // fps counter
441 fpsCounter++;
442 if (fpsCounter == 100) {
443 fpsNow = GetTime();
444 fps->SetText("fps: %.1f",
445 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
446 fpsCounter = 0;
447 fpsPrev = fpsNow;
448 }
449
450 // cam properties
451 if (gain->ValueChanged() == true && autogain->Value() == false)
452 SetGain(gain->Value());
453 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
454 SetExposure(exposure->Value());
455 if (bright->ValueChanged() == true)
456 SetBrightness(bright->Value());
457 if (sat->ValueChanged() == true)
458 SetSaturation(sat->Value());
459 if (contrast->ValueChanged() == true)
460 SetContrast(contrast->Value());
461 if (hue->ValueChanged() == true)
462 SetHue(hue->Value());
463 //if (sharpness->ValueChanged() == true)
464 // cvSetCaptureProperty(capture, CV_CAP_PROP_SHARPNESS, sharpness->Value());
465 if (autogain->ValueChanged() == true) {
466 if (autogain->Value() == true) {
467 gain->setEnabled(false);
468 } else {
469 gain->setEnabled(true);
470 SetGain(gain->Value());
471 }
472 SetAutoGain(autogain->Value());
473 }
474 if (autoexposure->ValueChanged() == true) {
475 if (autoexposure->Value() == true) {
476 exposure->setEnabled(false);
477 } else {
478 exposure->setEnabled(true);
479 SetExposure(exposure->Value());
480 }
481 SetAutoExposure(autoexposure->Value());
482 }
483 //if (awb->ValueChanged() == true)
484 // cvSetCaptureProperty(capture, CV_CAP_PROP_AWB, awb->Value());
485
486 // cam pictures
487 cvRetrieveRawFrame();
488 if (!cvGrabFrame()) {
489 Printf("Could not grab a frame\n");
490 }
491 new_time = GetTime();
492
493 //check for ps3eye deconnection in hds uav
494 if(new_time-cam_time>100*1000*1000) {
495 Thread::Warn("delta trop grand\n");
496 hasProblems=true;
497 }
498
499 output->GetMutex();
500
501 output->buffer=imageData;
502 output->ReleaseMutex();
503
504 output->SetDataTime(cam_time);
505 ProcessUpdate(output);
506 cam_time = new_time;
507 }
508}
509
510void V4LCamera::cvRetrieveRawFrame(void) {
511 memcpy(imageData,(char *)buffers[bufferIndex],output->GetDataType().GetSize());
512}
513
514int V4LCamera::QueueBuffer(int index) {
515 struct v4l2_buffer buf;
516 if(index>=0 && index<nbBuffers) {
517 memset(&buf, 0, sizeof (v4l2_buffer));
518 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
519 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
520 buf.index = (unsigned long)index;
521 buf.m.userptr=(unsigned long)(buffers[index]);
522 buf.length=output->GetDataType().GetSize();
523
524 int ret=xioctl (device, VIDIOC_QBUF, &buf);
525 if (ret==-1) {
526 Thread::Err("VIDIOC_QBUF xioctl %s\n",strerror(-ret));
527 return -1;
528 }
529 }
530 return 0;
531}
532
533int V4LCamera::GrabFrame(void) {
534 //queue previous buffer
535 if(QueueBuffer(bufferIndex)<0) return -1;
536
537 fd_set fds;
538 struct timeval tv;
539 FD_ZERO (&fds);
540 FD_SET (device, &fds);
541
542 tv.tv_sec = 0;
543 tv.tv_usec = 100000;
544
545 int r = select (device+1, &fds, NULL, NULL, &tv);
546
547 if (-1 == r) {
548 char errorMsg[256];
549 Thread::Err("select (%s)\n", strerror_r(-r, errorMsg, sizeof(errorMsg)));
550 return -1;
551 }
552
553 if (0 == r) {
554 Thread::Err("select timeout\n");
555 return -1;
556 }
557
558 struct v4l2_buffer buf;
559 memset(&buf, 0, sizeof (v4l2_buffer));
560 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
561 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
562
563 //get last captured image
564 int prevDQbuf=-1;
565 for(int i=0;i<4;i++) {
566 if (xioctl (device, VIDIOC_DQBUF, &buf)==-1) {
567 if (errno==EAGAIN) {
568 break;
569 } else {
570 Thread::Err("VIDIOC_DQBUF xioctl\n");
571 return -1;
572 }
573 } else {
574 if(prevDQbuf!=-1) {
575 QueueBuffer(prevDQbuf);
576 }
577 for (int i=0; i<nbBuffers; i++) {
578 if((void*)(buf.m.userptr)==buffers[i]) {
579 prevDQbuf=i;
580 bufferIndex=i;
581 break;
582 }
583 }
584 }
585 }
586
587 return 1;
588}
589
590bool V4LCamera::HasProblems(void) {
591 return hasProblems;
592}
593
594void V4LCamera::SetAutoGain(bool value) {
595 SetProperty(V4L2_CID_AUTOGAIN, value);
596}
597
598void V4LCamera::SetAutoExposure(bool value) {
599 Thread::Warn("not implemented\n");
600}
601
602void V4LCamera::SetGain(float value) {
603 SetProperty(V4L2_CID_GAIN, value);
604}
605
606void V4LCamera::SetExposure(float value) {
607 SetProperty(V4L2_CID_EXPOSURE, value);
608}
609
610void V4LCamera::SetBrightness(float value) {
611 SetProperty(V4L2_CID_BRIGHTNESS, value);
612}
613
614void V4LCamera::SetSaturation(float value) {
615 SetProperty(V4L2_CID_SATURATION, value);
616}
617
618void V4LCamera::SetHue(float value) {
619 SetProperty(V4L2_CID_HUE, value);
620}
621
622void V4LCamera::SetContrast(float value) {
623 SetProperty(V4L2_CID_CONTRAST, value);
624}
625
626float V4LCamera::GetProperty(int property) {
627 //get min and max value
628 struct v4l2_queryctrl queryctrl;
629 queryctrl.id = property;
630 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) return -1;
631 int min = queryctrl.minimum;
632 int max = queryctrl.maximum;
633
634 //set value
635 struct v4l2_control control;
636 memset (&control, 0, sizeof (v4l2_control));
637 control.id = property;
638 if(xioctl (device,VIDIOC_G_CTRL, &control)==-1) return -1;
639
640 return ((float)control.value - min + 1) / (max - min);
641}
642
643void V4LCamera::SetProperty(int property,float value) {
644 //get min and max value
645 struct v4l2_queryctrl queryctrl;
646 queryctrl.id = property;
647 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) {
648 Thread::Warn("prop %x, VIDIOC_QUERYCTRL failed\n",property);
649 }
650 int min = queryctrl.minimum;
651 int max = queryctrl.maximum;
652
653 //set value
654 struct v4l2_control control;
655 memset (&control, 0, sizeof (v4l2_control));
656 control.id = property;
657 control.value = (int)(value * (max - min) + min);
658 if(xioctl (device,VIDIOC_S_CTRL, &control)==-1) {
659 Thread::Warn("prop %x, VIDIOC_S_CTRL failed\n",property);
660 }
661}
662
663int V4LCamera::xioctl( int fd, int request, void *arg) {
664 int r;
665
666 do r = ioctl (fd, request, arg);
667 while (-1 == r && EINTR == errno);
668
669 return r;
670}
671
672} // end namespace sensor
673} // end namespace flair
Note: See TracBrowser for help on using the repository browser.