source: flair-src/trunk/lib/FlairSensorActuator/src/V4LCamera.cpp@ 332

Last change on this file since 332 was 330, checked in by Sanahuja Guillaume, 5 years ago

use less bandwidth in vprnlite

File size: 13.0 KB
Line 
1// %flair:license{
2// This file is part of the Flair framework distributed under the
3// CECILL-C License, Version 1.0.
4// %flair:license}
5// created: 2014/07/17
6// filename: V4LCamera.cpp
7//
8// author: Guillaume Sanahuja
9// Copyright Heudiasyc UMR UTC/CNRS 7253
10//
11// version: $Id: $
12//
13// purpose: base class for V4l camera
14//
15//
16/*********************************************************************/
17
18#include "V4LCamera.h"
19#include <GroupBox.h>
20#include <DoubleSpinBox.h>
21#include <CheckBox.h>
22#include <Label.h>
23#include <cvimage.h>
24#include <FrameworkManager.h>
25#include <fcntl.h>
26#include <linux/videodev2.h>
27
28#include <sys/ioctl.h>
29#include <unistd.h>
30#include <cstring>
31#include <sys/mman.h>
32
33
34#define DEFAULT_V4L_BUFFERS 4
35
36using std::string;
37using namespace flair::core;
38using namespace flair::gui;
39
40namespace flair {
41namespace sensor {
42
43V4LCamera::V4LCamera(string name,
44 uint8_t camera_index, uint16_t width, uint16_t height,
45 cvimage::Type::Format format, uint8_t priority)
46 : Thread(getFrameworkManager(), name, priority),
47 Camera(name, width, height, format) {
48
49 string deviceName="/dev/video"+std::to_string(camera_index);
50 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
51 if (device == -1) {
52 Thread::Err("Cannot open %s\n");
53 }
54
55 //get v4l2_format
56 struct v4l2_format form;
57 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
58 xioctl (device, VIDIOC_G_FMT,&form);
59
60 //set width, height and format
61 if (format == cvimage::Type::Format::UYVY) {
62 form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
63 } else if (format == cvimage::Type::Format::YUYV) {
64 form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
65 } else {
66 Thread::Err("format not supported\n");
67 }
68
69 form.fmt.pix.width = width;
70 form.fmt.pix.height = height;
71 form.fmt.win.chromakey = 0;
72 form.fmt.win.field = V4L2_FIELD_ANY;
73 form.fmt.win.clips = 0;
74 form.fmt.win.clipcount = 0;
75 form.fmt.pix.field = V4L2_FIELD_ANY;
76 xioctl (device, VIDIOC_S_FMT, &form);
77
78 /* This is just a technicality, but all buffers must be filled up before any
79 staggered SYNC is applied. SO, filler up. (see V4L HowTo) */
80
81 AllocBuffers();
82
83 for (int bufferIndex = 0; bufferIndex < ((int)requestbuffers.count);++bufferIndex) {
84 struct v4l2_buffer buf;
85
86 memset(&buf, 0, sizeof (v4l2_buffer));
87
88 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
89 buf.memory = V4L2_MEMORY_MMAP;
90 buf.index = (unsigned long)bufferIndex;
91
92 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
93 Thread::Err("VIDIOC_QBUF xioctl\n");
94 break;
95 }
96 }
97
98 // enable the streaming
99 v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
100 if (-1 == xioctl (device, VIDIOC_STREAMON,&type)) {
101 Thread::Err("VIDIOC_STREAMON xioctl\n");
102 }
103
104
105 // skip first frame. it is often bad -- this is unnotied in traditional apps,
106 // but could be fatal if bad jpeg is enabled
107 GrabFrame();
108
109
110
111 // station sol
112 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
113 exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,
114 1, 0.1);
115 bright =
116 new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
117 contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,
118 1, 0.1);
119 hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
120 sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:",
121 0, 1, 0.1);
122 sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,
123 0.1);
124 autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
125 autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
126 awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
127 fps = new Label(GetGroupBox()->NewRow(), "fps");
128
129 hasProblems=false;
130}
131
132V4LCamera::~V4LCamera() {
133 SafeStop();
134 Join();
135}
136
137void V4LCamera::Run(void) {
138 Time cam_time, new_time, fpsNow, fpsPrev;
139 char* buffer; // raw image
140 int fpsCounter = 0;
141
142 // init image old
143 GrabFrame();
144 cam_time = GetTime();
145 fpsPrev = cam_time;
146
147 while (!ToBeStopped()) {
148 //check for ps3eye deconnection in hds uav
149 if(hasProblems==false) {
150 struct v4l2_format form;
151 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
152 xioctl(device, VIDIOC_G_FMT,&form);
153 if(xioctl (device, VIDIOC_G_FMT,&form)<0) {
154 Thread::Warn("camera disconnected\n");
155 hasProblems=true;
156 }
157 }
158
159 // fps counter
160 fpsCounter++;
161 if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) {
162 // toute les 5 secondes
163 fpsNow = GetTime();
164 fps->SetText("fps: %.1f",
165 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
166 fpsCounter = 0;
167 fpsPrev = fpsNow;
168 }
169
170 // cam properties
171 if (gain->ValueChanged() == true && autogain->Value() == false)
172 SetGain(gain->Value());
173 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
174 SetExposure(exposure->Value());
175 if (bright->ValueChanged() == true)
176 SetBrightness(bright->Value());
177 if (sat->ValueChanged() == true)
178 SetSaturation(sat->Value());
179 if (contrast->ValueChanged() == true)
180 SetContrast(contrast->Value());
181 if (hue->ValueChanged() == true)
182 SetHue(hue->Value());
183 if (sharpness->ValueChanged() == true)
184 SetProperty(V4L2_CID_SHARPNESS, sharpness->Value());
185 if (autogain->ValueChanged() == true) {
186 if (autogain->Value() == true) {
187 gain->setEnabled(false);
188 } else {
189 gain->setEnabled(true);
190 SetGain(gain->Value());
191 }
192 SetAutoGain(autogain->Value());
193 }
194 if (autoexposure->ValueChanged() == true) {
195 if (autoexposure->Value() == true) {
196 exposure->setEnabled(false);
197 } else {
198 exposure->setEnabled(true);
199 SetExposure(exposure->Value());
200 }
201 SetAutoExposure(autoexposure->Value());
202 }
203 if (awb->ValueChanged() == true)
204 SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value());
205
206 // cam pictures
207 buffer = RetrieveRawFrame();
208 GrabFrame();
209 new_time = GetTime();
210
211 //check for ps3eye deconnection in hds uav
212 if(new_time-cam_time>100*1000*1000) {
213 Thread::Warn("delta trop grand\n");
214 hasProblems=true;
215 }
216
217 output->GetMutex();
218 output->buffer = buffer;
219 output->ReleaseMutex();
220
221 output->SetDataTime(cam_time);
222 ProcessUpdate(output);
223
224 cam_time = new_time;
225 }
226
227 close(device);
228}
229
230void V4LCamera::GrabFrame(void) {
231 unsigned int count;
232
233 count = 1;
234
235 while (count-- > 0) {
236 for (;;) {
237 fd_set fds;
238 struct timeval tv;
239 int r;
240
241 FD_ZERO (&fds);
242 FD_SET (device, &fds);
243
244 /* Timeout. */
245 tv.tv_sec = 2;
246 tv.tv_usec = 0;
247
248 r = select (device+1, &fds, NULL, NULL, &tv);
249
250 if (-1 == r) {
251 if (EINTR == errno) continue;
252 Thread::Err("select\n");
253 }
254
255 if (0 == r) {
256 Thread::Err("select timeout\n");
257 /* end the infinite loop */
258 break;
259 }
260
261 if (read_frame_v4l2 ()) break;
262 }
263 }
264}
265
266int V4LCamera::read_frame_v4l2(void) {
267 struct v4l2_buffer buf;
268 memset(&buf, 0, sizeof (v4l2_buffer));
269
270
271 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
272 buf.memory = V4L2_MEMORY_MMAP;
273
274 if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) {
275 switch (errno) {
276 case EAGAIN:
277 return 0;
278
279 case EIO:
280 /* Could ignore EIO, see spec. */
281
282 /* fall through */
283
284 default:
285 /* display the error and stop processing */
286 Thread::Err("VIDIOC_DQBUF xioctl\n");
287 return 1;
288 }
289 }
290
291 if(buf.index >= requestbuffers.count) {
292 Thread::Err("buf.index >= requestbuffers.count\n");
293 }
294
295#ifdef USE_TEMP_BUFFER
296 memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
297 capture->buffers[buf.index].start,
298 capture->buffers[MAX_V4L_BUFFERS].length );
299 capture->bufferIndex = MAX_V4L_BUFFERS;
300 //printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n",
301 // buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused);
302#else
303 bufferIndex = buf.index;
304#endif
305
306 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
307 Thread::Err ("VIDIOC_QBUF xioctl\n");
308 }
309
310 return 1;
311}
312
313int V4LCamera::AllocBuffers(void) {
314 memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers));
315
316 unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
317
318 try_again:
319
320 requestbuffers.count = buffer_number;
321 requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
322 requestbuffers.memory = V4L2_MEMORY_MMAP;
323
324 if (-1 == xioctl (device, VIDIOC_REQBUFS, &requestbuffers)) {
325 if (EINVAL == errno) {
326 Thread::Err("not support memory mapping not supportted\n");
327 } else {
328 Thread::Err ("VIDIOC_REQBUFS xioctl\n");
329 }
330 return -1;
331 }
332
333 if (requestbuffers.count < buffer_number) {
334 if (buffer_number == 1) {
335 Thread::Err("Insufficient buffer memory\n");
336 return -1;
337 } else {
338 buffer_number--;
339 Thread::Warn ("Insufficient buffer memory, decreasing buffers\n");
340 goto try_again;
341 }
342 }
343
344 for (int n_buffers = 0; n_buffers < requestbuffers.count; ++n_buffers) {
345 struct v4l2_buffer buf;
346
347 memset(&buf, 0, sizeof (v4l2_buffer));
348
349 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
350 buf.memory = V4L2_MEMORY_MMAP;
351 buf.index = n_buffers;
352
353 if (-1 == xioctl (device, VIDIOC_QUERYBUF, &buf)) {
354 Thread::Err("VIDIOC_QUERYBUF xioctl\n");
355 return -1;
356 }
357
358 buffers[n_buffers].length = buf.length;
359 buffers[n_buffers].start =
360 mmap (NULL /* start anywhere */,
361 buf.length,
362 PROT_READ | PROT_WRITE /* required */,
363 MAP_SHARED /* recommended */,
364 device, buf.m.offset);
365
366 if (MAP_FAILED == buffers[n_buffers].start) {
367 Thread::Err("mmap\n");
368 return -1;
369 }
370 }
371
372 //todo: verifier cette alloc, pas de double buffeinrg?
373 //peut on initialiser l'image dans le constrcteur de la camera?
374
375 output->buffer=output->allocFunction(output->dataType.GetSize());
376 return 1;
377};
378
379char *V4LCamera::RetrieveRawFrame(void) {
380
381 /* [FD] this really belongs here */
382 if (ioctl(device, VIDIOCSYNC, &mmaps[bufferIndex].frame) == -1) {
383 Thread::Err("Could not SYNC to video stream. %s\n", strerror(errno));
384 }
385
386 /* Now get what has already been captured as a IplImage return */
387 if (output->dataType.GetFormat() == cvimage::Type::Format::YUYV || output->dataType.GetFormat() == cvimage::Type::Format::UYVY) {
388 #ifdef USE_TEMP_BUFFER
389 capture->frame.imageData=(char*)capture->buffers[capture->bufferIndex].start;
390 #else
391Printf("frame is not allocated\n");
392 memcpy((char *)frame,(char *)buffers[bufferIndex].start,output->GetDataType().GetSize());
393 #endif
394 } else {
395 Thread::Err("palette %d not supported for raw output\n",output->dataType.GetFormat());
396 }
397
398 return(frame);
399}
400
401bool V4LCamera::HasProblems(void) {
402 return hasProblems;
403}
404
405void V4LCamera::SetAutoGain(bool value) {
406 SetProperty(V4L2_CID_AUTOGAIN, value);
407}
408
409void V4LCamera::SetAutoExposure(bool value) {
410 Thread::Warn("not implemented in opencv\n");
411}
412
413void V4LCamera::SetGain(float value) {
414 SetProperty(V4L2_CID_GAIN, value);
415}
416
417void V4LCamera::SetExposure(float value) {
418 SetProperty(V4L2_CID_EXPOSURE, value);
419}
420
421void V4LCamera::SetBrightness(float value) {
422 SetProperty(V4L2_CID_BRIGHTNESS, value);
423}
424
425void V4LCamera::SetSaturation(float value) {
426 SetProperty(V4L2_CID_SATURATION, value);
427}
428
429void V4LCamera::SetHue(float value) {
430 SetProperty(V4L2_CID_HUE, value);
431}
432
433void V4LCamera::SetContrast(float value) {
434 SetProperty(V4L2_CID_CONTRAST, value);
435}
436
437float V4LCamera::GetProperty(int property) {
438 //get min and max value
439 struct v4l2_queryctrl queryctrl;
440 queryctrl.id = property;
441 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) return -1;
442 int min = queryctrl.minimum;
443 int max = queryctrl.maximum;
444
445 //set value
446 struct v4l2_control control;
447 memset (&control, 0, sizeof (v4l2_control));
448 control.id = property;
449 if(xioctl (device,VIDIOC_G_CTRL, &control)==-1) return -1;
450
451 return ((float)control.value - min + 1) / (max - min);
452}
453
454void V4LCamera::SetProperty(int property,float value) {
455 //get min and max value
456 struct v4l2_queryctrl queryctrl;
457 queryctrl.id = property;
458 xioctl (device, VIDIOC_QUERYCTRL,&queryctrl);
459 int min = queryctrl.minimum;
460 int max = queryctrl.maximum;
461
462 //set value
463 struct v4l2_control control;
464 memset (&control, 0, sizeof (v4l2_control));
465 control.id = property;
466 control.value = (int)(value * (max - min) + min);
467 xioctl (device,VIDIOC_S_CTRL, &control);
468}
469
470int V4LCamera::xioctl( int fd, int request, void *arg) {
471 int r;
472
473 do r = ioctl (fd, request, arg);
474 while (-1 == r && EINTR == errno);
475
476 return r;
477}
478
479} // end namespace sensor
480} // end namespace flair
Note: See TracBrowser for help on using the repository browser.