// %flair:license{ // This file is part of the Flair framework distributed under the // CECILL-C License, Version 1.0. // %flair:license} // created: 2014/07/17 // filename: V4LCamera.cpp // // author: Guillaume Sanahuja // Copyright Heudiasyc UMR UTC/CNRS 7253 // // version: $Id: $ // // purpose: base class for V4l camera // // /*********************************************************************/ #include "V4LCamera.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #define DEFAULT_V4L_BUFFERS 4 using std::string; using namespace flair::core; using namespace flair::gui; namespace flair { namespace sensor { V4LCamera::V4LCamera(string name,uint8_t camera_index, uint16_t width, uint16_t height, Image::Type::Format format, uint8_t priority) : Thread(getFrameworkManager(), name, priority), Camera(name, width, height, format) { string deviceName="/dev/video"+std::to_string(camera_index); device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK); if (device == -1) { Thread::Err("Cannot open %s\n",deviceName.c_str()); } else { Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str()); } if(format == Image::Type::Format::UYVY) { if(init(width,height,V4L2_PIX_FMT_UYVY) == -1) { Thread::Err("initialisation failed\n"); } } else if (format == Image::Type::Format::YUYV) { if(init(width,height,V4L2_PIX_FMT_YUYV) == -1) { Thread::Err("initialisation failed\n"); } } else { Thread::Err("format not supported\n"); } /* This is just a technicality, but all buffers must be filled up before any staggered SYNC is applied. SO, filler up. (see V4L HowTo) */ allocBuffers(); for (int i=0;i < nbBuffers;i++) { struct v4l2_buffer buf; memset(&buf, 0, sizeof (v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; buf.index = (unsigned long)i; if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) { Thread::Err("VIDIOC_QBUF error\n"); } } /* enable the streaming */ v4l2_buf_type type; type = V4L2_BUF_TYPE_VIDEO_CAPTURE; if (-1 == xioctl (device, VIDIOC_STREAMON,&type)) { Thread::Err("VIDIOC_STREAMON error\n"); } // ground station gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1); exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1); bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1); contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1); hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1); sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1); sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1); autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:"); autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:"); awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:"); fps = new Label(GetGroupBox()->NewRow(), "fps"); hasProblems=false; } V4LCamera::~V4LCamera() { for (int i = 0; i < nbBuffers; i++) { //FreeFunction((char*)buffers[i].start); } SafeStop(); Join(); close(device); } int V4LCamera::init(int width, int height,unsigned long colorspace) { struct v4l2_capability cap; memset(&cap, 0, sizeof (v4l2_capability)); if(-1 == xioctl(device, VIDIOC_QUERYCAP, &cap)) { return -1; } if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) { Thread::Err("device is unable to capture video memory.\n"); return -1; } struct v4l2_format form; memset(&form, 0, sizeof (v4l2_format)); form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; /* read the current setting */ if(-1 == xioctl(device, VIDIOC_G_FMT, &form)) { Thread::Err("Could not obtain specifics of capture window.\n"); return -1; } /* set the values we want to change */ form.fmt.pix.width = width; form.fmt.pix.height = height; form.fmt.win.chromakey = 0; form.fmt.win.field = V4L2_FIELD_ANY; form.fmt.win.clips = 0; form.fmt.win.clipcount = 0; form.fmt.pix.field = V4L2_FIELD_ANY; form.fmt.pix.pixelformat = colorspace; /* ask the device to change the size*/ if(-1 == xioctl (device, VIDIOC_S_FMT, &form)) { Thread::Err("Could not set specifics of capture window.\n"); return -1; } /* Get window info again, to get the real value */ if(-1 == xioctl (device, VIDIOC_G_FMT, &form)) { Thread::Err("Could not obtain specifics of capture window.\n"); return -1; } return 0; } /* void V4LCamera::Run(void) { Time cam_time, new_time, fpsNow, fpsPrev; int fpsCounter = 0; // init image old GrabFrame(); cam_time = GetTime(); fpsPrev = cam_time; while (!ToBeStopped()) { //check for ps3eye deconnection in hds uav if(hasProblems==false) { struct v4l2_format form; form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; xioctl(device, VIDIOC_G_FMT,&form); if(xioctl (device, VIDIOC_G_FMT,&form)<0) { Thread::Warn("camera disconnected\n"); hasProblems=true; } } // fps counter fpsCounter++; if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) { // every 5 secondes fpsNow = GetTime(); fps->SetText("fps: %.1f", fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.)); fpsCounter = 0; fpsPrev = fpsNow; } // cam properties if (gain->ValueChanged() == true && autogain->Value() == false) SetGain(gain->Value()); if (exposure->ValueChanged() == true && autoexposure->Value() == false) SetExposure(exposure->Value()); if (bright->ValueChanged() == true) SetBrightness(bright->Value()); if (sat->ValueChanged() == true) SetSaturation(sat->Value()); if (contrast->ValueChanged() == true) SetContrast(contrast->Value()); if (hue->ValueChanged() == true) SetHue(hue->Value()); if (sharpness->ValueChanged() == true) SetProperty(V4L2_CID_SHARPNESS, sharpness->Value()); if (autogain->ValueChanged() == true) { if (autogain->Value() == true) { gain->setEnabled(false); } else { gain->setEnabled(true); SetGain(gain->Value()); } SetAutoGain(autogain->Value()); } if (autoexposure->ValueChanged() == true) { if (autoexposure->Value() == true) { exposure->setEnabled(false); } else { exposure->setEnabled(true); SetExposure(exposure->Value()); } SetAutoExposure(autoexposure->Value()); } if (awb->ValueChanged() == true) SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value()); // get picture GrabFrame(); new_time = GetTime(); //check for ps3eye deconnection in hds uav if(new_time-cam_time>100*1000*1000) { Thread::Warn("delta trop grand\n"); hasProblems=true; } output->GetMutex(); output->buffer=(char*)buffers[bufferIndex].start; output->ReleaseMutex(); output->SetDataTime(cam_time); ProcessUpdate(output); cam_time = new_time; } close(device); } */ int V4LCamera::allocBuffers() { struct v4l2_requestbuffers req; memset(&req, 0, sizeof (v4l2_requestbuffers)); nbBuffers=DEFAULT_V4L_BUFFERS; try_again: req.count = nbBuffers; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.memory = V4L2_MEMORY_MMAP; if(-1 == xioctl(device, VIDIOC_REQBUFS, &req)) { if (EINVAL == errno) { Thread::Err("camera does not support memory mapping\n"); } else { Thread::Err("VIDIOC_REQBUFS failed\n"); } return -1; } if(req.count < nbBuffers) { if (nbBuffers == 1) { Thread::Err("Insufficient buffer memory\n"); return -1; } else { nbBuffers--; Thread::Warn("Insufficient buffer memory -- decreaseing buffers to %i\n",nbBuffers); goto try_again; } } for(int i=0; iGetDataType().GetSize()!=buf.length) { Thread::Err("buf size is not as exepcted %i/%i\n",buf.length,output->GetDataType().GetSize()); return -1; } buffers[i]=mmap(NULL,buf.length,PROT_READ | PROT_WRITE,MAP_SHARED,device, buf.m.offset); if(MAP_FAILED == buffers[i]) { Thread::Err("mmap error\n"); return -1; } } //allocate output data imageData = AllocFunction(output->GetDataType().GetSize()); Printf("cmem allocated %i at %x\n",output->GetDataType().GetSize(),imageData); return 1; }; int V4LCamera::AllocBuffers(void) { struct v4l2_requestbuffers requestbuffers; memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers)); unsigned int buffer_number = DEFAULT_V4L_BUFFERS; try_again: requestbuffers.count = buffer_number; requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; requestbuffers.memory = V4L2_MEMORY_USERPTR; if (xioctl (device, VIDIOC_REQBUFS, &requestbuffers)==-1) { if (errno==EINVAL) { Thread::Err("VIDIOC_REQBUFS user memory not supported\n"); } else { Thread::Err ("VIDIOC_REQBUFS xioctl\n"); } return -1; } nbBuffers=DEFAULT_V4L_BUFFERS; for (int i=0; iGetDataType().GetSize()); } return 1; }; int V4LCamera::cvGrabFrame(void) { unsigned int count; count = 1; while (count-- > 0) { for (;;) { fd_set fds; struct timeval tv; int r; FD_ZERO (&fds); FD_SET (device, &fds); /* Timeout. */ tv.tv_sec = 2; tv.tv_usec = 0; r = select (device+1, &fds, NULL, NULL, &tv); if (-1 == r) { if (EINTR == errno) continue; perror ("select"); } if (0 == r) { fprintf (stderr, "select timeout\n"); /* end the infinite loop */ break; } if (read_frame_v4l2 ()) break; } } return(1); } int V4LCamera::read_frame_v4l2(void) { struct v4l2_buffer buf; memset(&buf, 0, sizeof (v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_MMAP; if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) { switch (errno) { case EAGAIN: return 0; case EIO: /* Could ignore EIO, see spec. */ /* fall through */ default: /* display the error and stop processing */ perror ("VIDIOC_DQBUF"); return 1; } } if(buf.index >= nbBuffers) { Thread::Err("buf.index >= capture->req.count\n"); } bufferIndex = buf.index; if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) perror ("VIDIOC_QBUF"); return 1; } void V4LCamera::Run(void) { Time cam_time, new_time, fpsNow, fpsPrev; //IplImage *img; // raw image int fpsCounter = 0; // init image old if (!cvGrabFrame()) { Printf("Could not grab a frame\n"); } cam_time = GetTime(); fpsPrev = cam_time; while (!ToBeStopped()) { // fps counter fpsCounter++; if (fpsCounter == 100) { fpsNow = GetTime(); fps->SetText("fps: %.1f", fpsCounter / ((float)(fpsNow - fpsPrev) / 1000000000.)); fpsCounter = 0; fpsPrev = fpsNow; } // cam properties if (gain->ValueChanged() == true && autogain->Value() == false) SetGain(gain->Value()); if (exposure->ValueChanged() == true && autoexposure->Value() == false) SetExposure(exposure->Value()); if (bright->ValueChanged() == true) SetBrightness(bright->Value()); if (sat->ValueChanged() == true) SetSaturation(sat->Value()); if (contrast->ValueChanged() == true) SetContrast(contrast->Value()); if (hue->ValueChanged() == true) SetHue(hue->Value()); //if (sharpness->ValueChanged() == true) // cvSetCaptureProperty(capture, CV_CAP_PROP_SHARPNESS, sharpness->Value()); if (autogain->ValueChanged() == true) { if (autogain->Value() == true) { gain->setEnabled(false); } else { gain->setEnabled(true); SetGain(gain->Value()); } SetAutoGain(autogain->Value()); } if (autoexposure->ValueChanged() == true) { if (autoexposure->Value() == true) { exposure->setEnabled(false); } else { exposure->setEnabled(true); SetExposure(exposure->Value()); } SetAutoExposure(autoexposure->Value()); } //if (awb->ValueChanged() == true) // cvSetCaptureProperty(capture, CV_CAP_PROP_AWB, awb->Value()); // cam pictures cvRetrieveRawFrame(); if (!cvGrabFrame()) { Printf("Could not grab a frame\n"); } new_time = GetTime(); //check for ps3eye deconnection in hds uav if(new_time-cam_time>100*1000*1000) { Thread::Warn("delta trop grand\n"); hasProblems=true; } output->GetMutex(); output->buffer=imageData; output->ReleaseMutex(); output->SetDataTime(cam_time); ProcessUpdate(output); cam_time = new_time; } } void V4LCamera::cvRetrieveRawFrame(void) { memcpy(imageData,(char *)buffers[bufferIndex],output->GetDataType().GetSize()); } int V4LCamera::QueueBuffer(int index) { struct v4l2_buffer buf; if(index>=0 && indexGetDataType().GetSize(); int ret=xioctl (device, VIDIOC_QBUF, &buf); if (ret==-1) { Thread::Err("VIDIOC_QBUF xioctl %s\n",strerror(-ret)); return -1; } } return 0; } int V4LCamera::GrabFrame(void) { //queue previous buffer if(QueueBuffer(bufferIndex)<0) return -1; fd_set fds; struct timeval tv; FD_ZERO (&fds); FD_SET (device, &fds); tv.tv_sec = 0; tv.tv_usec = 100000; int r = select (device+1, &fds, NULL, NULL, &tv); if (-1 == r) { char errorMsg[256]; Thread::Err("select (%s)\n", strerror_r(-r, errorMsg, sizeof(errorMsg))); return -1; } if (0 == r) { Thread::Err("select timeout\n"); return -1; } struct v4l2_buffer buf; memset(&buf, 0, sizeof (v4l2_buffer)); buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP; //get last captured image int prevDQbuf=-1; for(int i=0;i<4;i++) { if (xioctl (device, VIDIOC_DQBUF, &buf)==-1) { if (errno==EAGAIN) { break; } else { Thread::Err("VIDIOC_DQBUF xioctl\n"); return -1; } } else { if(prevDQbuf!=-1) { QueueBuffer(prevDQbuf); } for (int i=0; i