source: flair-src/branches/sanscv/lib/FlairSensorActuator/src/V4LCamera.cpp@ 412

Last change on this file since 412 was 335, checked in by Sanahuja Guillaume, 5 years ago

use V4L2_MEMORY_USERPTR

File size: 11.5 KB
Line 
1// %flair:license{
2// This file is part of the Flair framework distributed under the
3// CECILL-C License, Version 1.0.
4// %flair:license}
5// created: 2014/07/17
6// filename: V4LCamera.cpp
7//
8// author: Guillaume Sanahuja
9// Copyright Heudiasyc UMR UTC/CNRS 7253
10//
11// version: $Id: $
12//
13// purpose: base class for V4l camera
14//
15//
16/*********************************************************************/
17
18#include "V4LCamera.h"
19#include <GroupBox.h>
20#include <DoubleSpinBox.h>
21#include <CheckBox.h>
22#include <Label.h>
23#include <Image.h>
24#include <FrameworkManager.h>
25#include <fcntl.h>
26#include <linux/videodev2.h>
27#include <sys/ioctl.h>
28#include <unistd.h>
29#include <cstring>
30#include <sys/mman.h>
31#include <VisionFilter.h>
32
33#define DEFAULT_V4L_BUFFERS 4
34
35using std::string;
36using namespace flair::core;
37using namespace flair::gui;
38
39namespace flair {
40namespace sensor {
41
42V4LCamera::V4LCamera(string name,
43 uint8_t camera_index, uint16_t width, uint16_t height,
44 Image::Type::Format format, uint8_t priority)
45 : Thread(getFrameworkManager(), name, priority),
46 Camera(name, width, height, format) {
47
48 string deviceName="/dev/video"+std::to_string(camera_index);
49 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
50 if (device == -1) {
51 Thread::Err("Cannot open %s\n",deviceName.c_str());
52 } else {
53 Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str());
54 }
55
56 struct v4l2_capability cap;
57 memset(&cap, 0, sizeof (v4l2_capability));
58 if (xioctl (device, VIDIOC_QUERYCAP, &cap)==-1) {
59 Thread::Err("VIDIOC_QUERYCAP xioctl\n");
60 }
61 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
62 Thread::Err("device is unable to capture video memory.\n");
63 }
64
65 //get v4l2_format
66 struct v4l2_format form;
67 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
68 if(xioctl (device, VIDIOC_G_FMT,&form)==-1) {
69 Thread::Err("VIDIOC_G_FMT xioctl\n");
70 }
71
72 //set width, height and format
73 if (format == Image::Type::Format::UYVY) {
74 form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
75 } else if (format == Image::Type::Format::YUYV) {
76 form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
77 } else {
78 Thread::Err("format not supported\n");
79 }
80
81 form.fmt.pix.width = width;
82 form.fmt.pix.height = height;
83 form.fmt.win.chromakey = 0;
84 form.fmt.win.field = V4L2_FIELD_ANY;
85 form.fmt.win.clips = 0;
86 form.fmt.win.clipcount = 0;
87 form.fmt.pix.field = V4L2_FIELD_ANY;
88 if(xioctl (device, VIDIOC_S_FMT,&form)==-1) {
89 Thread::Err("VIDIOC_S_FMT xioctl\n");
90 }
91
92 //alloc and queue bufs
93 AllocBuffers();
94 for (int bufferIndex = 0; bufferIndex < nbBuffers;++bufferIndex) {
95 QueueBuffer(bufferIndex);
96 }
97
98 // enable the streaming
99 v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
100 if (xioctl (device, VIDIOC_STREAMON,&type)==-1) {
101 Thread::Err("VIDIOC_STREAMON xioctl\n");
102 }
103
104 // skip first frame. it is often bad -- this is unnotied in traditional apps,
105 // but could be fatal if bad jpeg is enabled
106 bufferIndex=-1;
107 GrabFrame();
108
109 // ground station
110 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
111 exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1);
112 bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
113 contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1);
114 hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
115 sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1);
116 sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1);
117 autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
118 autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
119 awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
120 fps = new Label(GetGroupBox()->NewRow(), "fps");
121
122 hasProblems=false;
123}
124
125V4LCamera::~V4LCamera() {
126 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
127 FreeFunction((char*)buffers[n_buffers].start);
128 }
129 SafeStop();
130 Join();
131}
132
133void V4LCamera::Run(void) {
134 Time cam_time, new_time, fpsNow, fpsPrev;
135 int fpsCounter = 0;
136
137 // init image old
138 GrabFrame();
139 cam_time = GetTime();
140 fpsPrev = cam_time;
141
142 while (!ToBeStopped()) {
143 //check for ps3eye deconnection in hds uav
144 if(hasProblems==false) {
145 struct v4l2_format form;
146 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
147 xioctl(device, VIDIOC_G_FMT,&form);
148 if(xioctl (device, VIDIOC_G_FMT,&form)<0) {
149 Thread::Warn("camera disconnected\n");
150 hasProblems=true;
151 }
152 }
153
154 // fps counter
155 fpsCounter++;
156 if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) {
157 // every 5 secondes
158 fpsNow = GetTime();
159 fps->SetText("fps: %.1f",
160 fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.));
161 fpsCounter = 0;
162 fpsPrev = fpsNow;
163 }
164
165 // cam properties
166 if (gain->ValueChanged() == true && autogain->Value() == false)
167 SetGain(gain->Value());
168 if (exposure->ValueChanged() == true && autoexposure->Value() == false)
169 SetExposure(exposure->Value());
170 if (bright->ValueChanged() == true)
171 SetBrightness(bright->Value());
172 if (sat->ValueChanged() == true)
173 SetSaturation(sat->Value());
174 if (contrast->ValueChanged() == true)
175 SetContrast(contrast->Value());
176 if (hue->ValueChanged() == true)
177 SetHue(hue->Value());
178 if (sharpness->ValueChanged() == true)
179 SetProperty(V4L2_CID_SHARPNESS, sharpness->Value());
180 if (autogain->ValueChanged() == true) {
181 if (autogain->Value() == true) {
182 gain->setEnabled(false);
183 } else {
184 gain->setEnabled(true);
185 SetGain(gain->Value());
186 }
187 SetAutoGain(autogain->Value());
188 }
189 if (autoexposure->ValueChanged() == true) {
190 if (autoexposure->Value() == true) {
191 exposure->setEnabled(false);
192 } else {
193 exposure->setEnabled(true);
194 SetExposure(exposure->Value());
195 }
196 SetAutoExposure(autoexposure->Value());
197 }
198 if (awb->ValueChanged() == true)
199 SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value());
200
201 // get picture
202 GrabFrame();
203 new_time = GetTime();
204
205 //check for ps3eye deconnection in hds uav
206 if(new_time-cam_time>100*1000*1000) {
207 Thread::Warn("delta trop grand\n");
208 hasProblems=true;
209 }
210
211 output->GetMutex();
212 output->buffer=(char*)buffers[bufferIndex].start;
213 output->ReleaseMutex();
214
215 output->SetDataTime(cam_time);
216 ProcessUpdate(output);
217
218 cam_time = new_time;
219 }
220
221 close(device);
222}
223
224int V4LCamera::QueueBuffer(int index) {
225 struct v4l2_buffer buf;
226 if(index>=0 && index<nbBuffers) {
227 memset(&buf, 0, sizeof (v4l2_buffer));
228 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
229 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
230 buf.index = (unsigned long)index;
231 buf.m.userptr=(unsigned long)(buffers[index].start);
232 buf.length=buffers[index].length;
233
234 int ret=xioctl (device, VIDIOC_QBUF, &buf);
235 if (ret==-1) {
236 Thread::Err("VIDIOC_QBUF xioctl %s\n",strerror(-ret));
237 return -1;
238 }
239 }
240 return 0;
241}
242
243int V4LCamera::GrabFrame(void) {
244 //queue previous buffer
245 if(QueueBuffer(bufferIndex)<0) return -1;
246
247 fd_set fds;
248 struct timeval tv;
249 FD_ZERO (&fds);
250 FD_SET (device, &fds);
251
252 tv.tv_sec = 0;
253 tv.tv_usec = 100000;
254
255 int r = select (device+1, &fds, NULL, NULL, &tv);
256
257 if (-1 == r) {
258 char errorMsg[256];
259 Thread::Err("select (%s)\n", strerror_r(-r, errorMsg, sizeof(errorMsg)));
260 return -1;
261 }
262
263 if (0 == r) {
264 Thread::Err("select timeout\n");
265 return -1;
266 }
267
268 struct v4l2_buffer buf;
269 memset(&buf, 0, sizeof (v4l2_buffer));
270 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
271 buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
272
273 //get last captured image
274 int prevDQbuf=-1;
275 for(int i=0;i<4;i++) {
276 if (xioctl (device, VIDIOC_DQBUF, &buf)==-1) {
277 if (errno==EAGAIN) {
278 break;
279 } else {
280 Thread::Err("VIDIOC_DQBUF xioctl\n");
281 return -1;
282 }
283 } else {
284 if(prevDQbuf!=-1) {
285 QueueBuffer(prevDQbuf);
286 }
287 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
288 if((void*)(buf.m.userptr)==buffers[n_buffers].start) {
289 prevDQbuf=n_buffers;
290 bufferIndex=n_buffers;
291 break;
292 }
293 }
294 }
295 }
296
297 return 1;
298}
299
300int V4LCamera::AllocBuffers(void) {
301 struct v4l2_requestbuffers requestbuffers;
302 memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers));
303
304 unsigned int buffer_number = DEFAULT_V4L_BUFFERS;
305
306 try_again:
307
308 requestbuffers.count = buffer_number;
309 requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
310 requestbuffers.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
311
312 if (xioctl (device, VIDIOC_REQBUFS, &requestbuffers)==-1) {
313 if (errno==EINVAL) {
314 Thread::Err("VIDIOC_REQBUFS user memory not supported\n");
315 } else {
316 Thread::Err ("VIDIOC_REQBUFS xioctl\n");
317 }
318 return -1;
319 }
320
321 nbBuffers=DEFAULT_V4L_BUFFERS;
322 for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
323 buffers[n_buffers].length = output->GetDataType().GetSize();
324 buffers[n_buffers].start =AllocFunction(output->GetDataType().GetSize());
325 }
326
327 return 1;
328};
329
330bool V4LCamera::HasProblems(void) {
331 return hasProblems;
332}
333
334void V4LCamera::SetAutoGain(bool value) {
335 SetProperty(V4L2_CID_AUTOGAIN, value);
336}
337
338void V4LCamera::SetAutoExposure(bool value) {
339 Thread::Warn("not implemented\n");
340}
341
342void V4LCamera::SetGain(float value) {
343 SetProperty(V4L2_CID_GAIN, value);
344}
345
346void V4LCamera::SetExposure(float value) {
347 SetProperty(V4L2_CID_EXPOSURE, value);
348}
349
350void V4LCamera::SetBrightness(float value) {
351 SetProperty(V4L2_CID_BRIGHTNESS, value);
352}
353
354void V4LCamera::SetSaturation(float value) {
355 SetProperty(V4L2_CID_SATURATION, value);
356}
357
358void V4LCamera::SetHue(float value) {
359 SetProperty(V4L2_CID_HUE, value);
360}
361
362void V4LCamera::SetContrast(float value) {
363 SetProperty(V4L2_CID_CONTRAST, value);
364}
365
366float V4LCamera::GetProperty(int property) {
367 //get min and max value
368 struct v4l2_queryctrl queryctrl;
369 queryctrl.id = property;
370 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) return -1;
371 int min = queryctrl.minimum;
372 int max = queryctrl.maximum;
373
374 //set value
375 struct v4l2_control control;
376 memset (&control, 0, sizeof (v4l2_control));
377 control.id = property;
378 if(xioctl (device,VIDIOC_G_CTRL, &control)==-1) return -1;
379
380 return ((float)control.value - min + 1) / (max - min);
381}
382
383void V4LCamera::SetProperty(int property,float value) {
384 //get min and max value
385 struct v4l2_queryctrl queryctrl;
386 queryctrl.id = property;
387 xioctl (device, VIDIOC_QUERYCTRL,&queryctrl);
388 int min = queryctrl.minimum;
389 int max = queryctrl.maximum;
390
391 //set value
392 struct v4l2_control control;
393 memset (&control, 0, sizeof (v4l2_control));
394 control.id = property;
395 control.value = (int)(value * (max - min) + min);
396 xioctl (device,VIDIOC_S_CTRL, &control);
397}
398
399int V4LCamera::xioctl( int fd, int request, void *arg) {
400 int r;
401
402 do r = ioctl (fd, request, arg);
403 while (-1 == r && EINTR == errno);
404
405 return r;
406}
407
408} // end namespace sensor
409} // end namespace flair
Note: See TracBrowser for help on using the repository browser.