Changeset 338 in flair-src for trunk/lib/FlairSensorActuator


Ignore:
Timestamp:
Oct 17, 2019, 2:49:35 PM (2 years ago)
Author:
Sanahuja Guillaume
Message:

remove opencv dep

Location:
trunk
Files:
8 edited

Legend:

Unmodified
Added
Removed
  • trunk

  • trunk/lib/FlairSensorActuator/CMakeLists.txt

    r302 r338  
    88        ${CMAKE_CURRENT_SOURCE_DIR}/../FlairCore/src
    99        ${CMAKE_CURRENT_SOURCE_DIR}/../FlairVisionFilter/src
    10         ${CMAKE_SYSROOT}/usr/include/opencv1
    1110        ${CMAKE_SYSROOT}/usr/include/vrpn
    1211)
  • trunk/lib/FlairSensorActuator/src/Camera.cpp

    r330 r338  
    2424#include <DataPlot1D.h>
    2525#include <Picture.h>
    26 #include <VisionFilter.h>
    27 #include <highgui.h>
    2826#include <fstream>
    2927
     
    3634
    3735Camera::Camera(string name, uint16_t width,
    38                uint16_t height, cvimage::Type::Format format)
     36               uint16_t height, Image::Type::Format format)
    3937    : IODevice(getFrameworkManager(), name) {
    4038  plot_tab = NULL;
     
    4240
    4341  // do not allocate imagedata, allocation is done by the camera
    44   output = new cvimage((IODevice *)this, width, height, format, "out", false);
     42  output = new Image((IODevice *)this, width, height, format, "out", false);
    4543       
    4644  // station sol
     
    7674                case LogFormat::RAW:
    7775                        AddDataToLog(output);
    78                         Warn("raw log of cvimage is not yet implemented\n");
     76                        Warn("raw log of Image is not yet implemented\n");
    7977                        break;
    8078                case LogFormat::JPG:
    81                                 Warn("logging cvimage to jpeg\n");
     79                                Warn("logging Image to jpeg\n");
    8280                                Warn("jpeg are not included in classical dbt file, as dbt does not handle variable length\n");
    8381                        break;
     
    9593GridLayout *Camera::GetLayout(void) const { return setup_layout; }
    9694
    97 void Camera::UseDefaultPlot(const core::cvimage *image) {
     95void Camera::UseDefaultPlot(const core::Image *image) {
    9896  if (tab == NULL) {
    9997    Err("not applicable for simulation part.\n");
     
    113111}
    114112
    115 core::cvimage *Camera::Output(void) { return output; }
     113core::Image *Camera::Output(void) { return output; }
    116114
    117115void Camera::ProcessUpdate(core::io_data* data) {
     
    121119                if(logFormat==LogFormat::JPG) {
    122120                        data->GetMutex();
    123       //IplImage *img=((cvimage*)data)->img;
    124       const cvimage* input = dynamic_cast<const cvimage*>(data);
     121      //IplImage *img=((Image*)data)->img;
     122      const Image* input = dynamic_cast<const Image*>(data);
    125123      if (!input) {
    126           Warn("casting %s to cvimage failed\n",data->ObjectName().c_str());
     124          Warn("casting %s to Image failed\n",data->ObjectName().c_str());
    127125          return;
    128126      }
     
    130128     
    131129                        string filename=getFrameworkManager()->GetLogPath()+"/"+ObjectName()+"_"+std::to_string(data->DataTime())+".jpg";
    132                         switch(((cvimage*)data)->GetDataType().GetFormat()) {
    133                                 case cvimage::Type::Format::Gray:
     130                        switch(((Image*)data)->GetDataType().GetFormat()) {
     131                                case Image::Type::Format::Gray:
    134132                                        saveToJpeg(img,filename,PictureFormat_t::Gray,PictureFormat_t::Gray);
    135133                                        break;
    136                                 case cvimage::Type::Format::BGR:
     134                                case Image::Type::Format::BGR:
    137135                                        saveToJpeg(img,filename,PictureFormat_t::RGB,PictureFormat_t::RGB);
    138136                                        break;
    139                                 case cvimage::Type::Format::UYVY:
     137                                case Image::Type::Format::UYVY:
    140138                                        saveToJpeg(img,filename,PictureFormat_t::YUV_422ile,PictureFormat_t::YUV_422p);
    141139                                        break;
     
    160158                output->GetMutex();
    161159                if(extension=="jpg") {
    162                         if(output->GetDataType().GetFormat()==cvimage::Type::Format::Gray) saveToJpeg(output->img,filename,PictureFormat_t::Gray,PictureFormat_t::Gray);
    163                         if(output->GetDataType().GetFormat()==cvimage::Type::Format::BGR) saveToJpeg(output->img,filename,PictureFormat_t::RGB,PictureFormat_t::RGB);
    164                         if(output->GetDataType().GetFormat()==cvimage::Type::Format::UYVY) saveToJpeg(output->img,filename,PictureFormat_t::YUV_422ile,PictureFormat_t::YUV_422p);
     160                        if(output->GetDataType().GetFormat()==Image::Type::Format::Gray) saveToJpeg(output->img,filename,PictureFormat_t::Gray,PictureFormat_t::Gray);
     161                        if(output->GetDataType().GetFormat()==Image::Type::Format::BGR) saveToJpeg(output->img,filename,PictureFormat_t::RGB,PictureFormat_t::RGB);
     162                        if(output->GetDataType().GetFormat()==Image::Type::Format::UYVY) saveToJpeg(output->img,filename,PictureFormat_t::YUV_422ile,PictureFormat_t::YUV_422p);
    165163                } else {
    166164                        cvSaveImage(filename.c_str(),output->img);
  • trunk/lib/FlairSensorActuator/src/Camera.h

    r137 r338  
    1616#include <IODevice.h>
    1717#include <stdint.h>
    18 #include <cvimage.h>
     18#include <Image.h>
    1919
    2020namespace flair {
     
    5151  */
    5252  Camera(std::string name, uint16_t width,
    53          uint16_t height, core::cvimage::Type::Format format);
     53         uint16_t height, core::Image::Type::Format format);
    5454
    5555  /*!
     
    7575  * \param image image to display
    7676  */
    77   void UseDefaultPlot(const core::cvimage *image);
     77  void UseDefaultPlot(const core::Image *image);
    7878
    7979  /*!
     
    126126 * \return the output matrix
    127127 */
    128   core::cvimage *Output(void);
     128  core::Image *Output(void);
    129129
    130130  core::DataType const &GetOutputDataType() const;
     
    159159  gui::GroupBox *GetGroupBox(void) const;
    160160
    161   core::cvimage *output;
     161  core::Image *output;
    162162
    163163private:
  • trunk/lib/FlairSensorActuator/src/Ps3Eye.cpp

    r268 r338  
    2828               uint8_t priority)
    2929    : V4LCamera( name, camera_index, 320, 240,
    30                 cvimage::Type::Format::YUYV, priority) {
     30                Image::Type::Format::YUYV, priority) {
    3131  SetIsReady(true);                 
    3232}
  • trunk/lib/FlairSensorActuator/src/SimulatedCamera.cpp

    r330 r338  
    3636                       uint32_t modelId,uint32_t deviceId, uint8_t priority)
    3737    : Thread(getFrameworkManager(), name, priority),
    38       Camera(name, width, height, cvimage::Type::Format::BGR) {
     38      Camera(name, width, height, Image::Type::Format::BGR) {
    3939
    4040  buf_size = width * height * channels+sizeof(Time);
  • trunk/lib/FlairSensorActuator/src/V4LCamera.cpp

    r330 r338  
    2121#include <CheckBox.h>
    2222#include <Label.h>
    23 #include <cvimage.h>
     23#include <Image.h>
    2424#include <FrameworkManager.h>
    2525#include <fcntl.h>
    2626#include <linux/videodev2.h>
    27 
    2827#include <sys/ioctl.h>
    2928#include <unistd.h>
    3029#include <cstring>
    3130#include <sys/mman.h>
    32 
     31#include <VisionFilter.h>
    3332
    3433#define DEFAULT_V4L_BUFFERS 4
     
    4342V4LCamera::V4LCamera(string name,
    4443                     uint8_t camera_index, uint16_t width, uint16_t height,
    45                      cvimage::Type::Format format, uint8_t priority)
     44                     Image::Type::Format format, uint8_t priority)
    4645    : Thread(getFrameworkManager(), name, priority),
    4746      Camera(name, width, height, format) {
    4847 
    49   string deviceName="/dev/video"+std::to_string(camera_index);
    50   device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
    51   if (device == -1) {
    52      Thread::Err("Cannot open %s\n");
    53   }
     48    string deviceName="/dev/video"+std::to_string(camera_index);
     49    device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK);
     50    if (device == -1) {
     51        Thread::Err("Cannot open %s\n",deviceName.c_str());
     52    } else {
     53        Printf("V4LCamera %s, opened %s\n",name.c_str(),deviceName.c_str());
     54    }
     55
     56    struct v4l2_capability cap;
     57    memset(&cap, 0, sizeof (v4l2_capability));
     58    if (xioctl (device, VIDIOC_QUERYCAP, &cap)==-1) {
     59        Thread::Err("VIDIOC_QUERYCAP xioctl\n");
     60    }
     61    if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
     62        Thread::Err("device is unable to capture video memory.\n");
     63    }
     64
     65    //get v4l2_format
     66    struct v4l2_format form;
     67    form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     68    if(xioctl (device, VIDIOC_G_FMT,&form)==-1) {
     69        Thread::Err("VIDIOC_G_FMT xioctl\n");
     70    }
     71 
     72    //set width, height and format
     73    if (format == Image::Type::Format::UYVY) {
     74        form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
     75    } else if (format == Image::Type::Format::YUYV) {
     76        form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
     77    } else {
     78        Thread::Err("format not supported\n");
     79    }
     80 
     81    form.fmt.pix.width = width;
     82    form.fmt.pix.height = height;
     83    form.fmt.win.chromakey = 0;
     84    form.fmt.win.field = V4L2_FIELD_ANY;
     85    form.fmt.win.clips = 0;
     86    form.fmt.win.clipcount = 0;
     87    form.fmt.pix.field = V4L2_FIELD_ANY;
     88    if(xioctl (device, VIDIOC_S_FMT,&form)==-1) {
     89        Thread::Err("VIDIOC_S_FMT xioctl\n");
     90    }
     91 
     92    //alloc and queue bufs
     93    AllocBuffers();
     94    for (int bufferIndex = 0; bufferIndex < nbBuffers;++bufferIndex) {
     95        QueueBuffer(bufferIndex);
     96    }
    5497   
    55   //get v4l2_format
    56   struct v4l2_format form;
    57   form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    58   xioctl (device, VIDIOC_G_FMT,&form);
    59  
    60   //set width, height and format
    61   if (format == cvimage::Type::Format::UYVY) {
    62     form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
    63   } else if (format == cvimage::Type::Format::YUYV) {
    64     form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    65   } else {
    66     Thread::Err("format not supported\n");
    67   }
    68  
    69   form.fmt.pix.width = width;
    70   form.fmt.pix.height = height;
    71   form.fmt.win.chromakey = 0;
    72   form.fmt.win.field = V4L2_FIELD_ANY;
    73   form.fmt.win.clips = 0;
    74   form.fmt.win.clipcount = 0;
    75   form.fmt.pix.field = V4L2_FIELD_ANY;
    76   xioctl (device, VIDIOC_S_FMT, &form);
    77  
    78   /* This is just a technicality, but all buffers must be filled up before any
    79    staggered SYNC is applied.  SO, filler up. (see V4L HowTo) */
    80 
    81   AllocBuffers();
    82 
    83   for (int bufferIndex = 0; bufferIndex < ((int)requestbuffers.count);++bufferIndex) {
    84     struct v4l2_buffer buf;
    85 
    86     memset(&buf, 0, sizeof (v4l2_buffer));
    87 
    88     buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    89     buf.memory      = V4L2_MEMORY_MMAP;
    90     buf.index       = (unsigned long)bufferIndex;
    91 
    92     if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
    93         Thread::Err("VIDIOC_QBUF xioctl\n");
    94         break;
    95     }
    96   }
    97 
    98   // enable the streaming
    99   v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    100   if (-1 == xioctl (device, VIDIOC_STREAMON,&type)) {
    101       Thread::Err("VIDIOC_STREAMON xioctl\n");
    102   }
    103 
    104 
    105   // skip first frame. it is often bad -- this is unnotied in traditional apps,
    106   //  but could be fatal if bad jpeg is enabled
    107   GrabFrame();
    108    
    109    
    110    
    111   // station sol
    112   gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
    113   exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,
    114                                1, 0.1);
    115   bright =
    116       new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
    117   contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,
    118                                1, 0.1);
    119   hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
    120   sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:",
    121                                 0, 1, 0.1);
    122   sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,
    123                           0.1);
    124   autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
    125   autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
    126   awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
    127   fps = new Label(GetGroupBox()->NewRow(), "fps");
    128  
    129   hasProblems=false;
     98    // enable the streaming
     99    v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     100    if (xioctl (device, VIDIOC_STREAMON,&type)==-1) {
     101        Thread::Err("VIDIOC_STREAMON xioctl\n");
     102    }
     103
     104    // skip first frame. it is often bad -- this is unnotied in traditional apps,
     105    //  but could be fatal if bad jpeg is enabled
     106    bufferIndex=-1;
     107    GrabFrame();
     108
     109    // ground station
     110    gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1);
     111    exposure = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "exposure:", 0,1, 0.1);
     112    bright = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "bright:", 0, 1, 0.1);
     113    contrast = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "contrast:", 0,1, 0.1);
     114    hue = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "hue:", 0, 1, 0.1);
     115    sharpness = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "sharpness:", 0, 1, 0.1);
     116    sat = new DoubleSpinBox(GetGroupBox()->LastRowLastCol(), "saturation:", 0, 1,0.1);
     117    autogain = new CheckBox(GetGroupBox()->NewRow(), "autogain:");
     118    autoexposure = new CheckBox(GetGroupBox()->LastRowLastCol(), "autoexposure:");
     119    awb = new CheckBox(GetGroupBox()->LastRowLastCol(), "awb:");
     120    fps = new Label(GetGroupBox()->NewRow(), "fps");
     121
     122    hasProblems=false;
    130123}
    131124
    132125V4LCamera::~V4LCamera() {
    133   SafeStop();
    134   Join();
     126    for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
     127       FreeFunction((char*)buffers[n_buffers].start);
     128   }
     129    SafeStop();
     130    Join();
    135131}
    136132
    137133void V4LCamera::Run(void) {
    138134  Time cam_time, new_time, fpsNow, fpsPrev;
    139   char* buffer; // raw image
    140135  int fpsCounter = 0;
    141136
     
    160155    fpsCounter++;
    161156    if (GetTime() > (fpsPrev + 5 * (Time)1000000000)) {
    162       // toute les 5 secondes
     157      // every 5 secondes
    163158      fpsNow = GetTime();
    164159      fps->SetText("fps: %.1f",
     
    204199      SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value());
    205200
    206     // cam pictures
    207     buffer = RetrieveRawFrame();
     201    // get picture
    208202    GrabFrame();
    209203    new_time = GetTime();
     
    216210
    217211    output->GetMutex();
    218     output->buffer = buffer;
     212    output->buffer=(char*)buffers[bufferIndex].start;
    219213    output->ReleaseMutex();
    220214
     
    228222}
    229223
    230 void V4LCamera::GrabFrame(void) {
    231     unsigned int count;
    232 
    233     count = 1;
    234 
    235     while (count-- > 0) {
    236         for (;;) {
    237             fd_set fds;
    238             struct timeval tv;
    239             int r;
    240 
    241             FD_ZERO (&fds);
    242             FD_SET (device, &fds);
    243 
    244             /* Timeout. */
    245             tv.tv_sec = 2;
    246             tv.tv_usec = 0;
    247 
    248             r = select (device+1, &fds, NULL, NULL, &tv);
    249 
    250             if (-1 == r) {
    251                 if (EINTR == errno) continue;
    252                 Thread::Err("select\n");
     224int V4LCamera::QueueBuffer(int index) {
     225    struct v4l2_buffer buf;
     226    if(index>=0 && index<nbBuffers) {
     227        memset(&buf, 0, sizeof (v4l2_buffer));
     228        buf.type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     229        buf.memory      = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
     230        buf.index       = (unsigned long)index;
     231        buf.m.userptr=(unsigned long)(buffers[index].start);
     232        buf.length=buffers[index].length;
     233       
     234        int ret=xioctl (device, VIDIOC_QBUF, &buf);
     235        if (ret==-1) {
     236            Thread::Err("VIDIOC_QBUF xioctl %s\n",strerror(-ret));
     237            return -1;
     238        }
     239    }
     240    return 0;
     241}
     242
     243int V4LCamera::GrabFrame(void) {
     244    //queue previous buffer
     245    if(QueueBuffer(bufferIndex)<0) return -1;
     246   
     247    fd_set fds;
     248    struct timeval tv;
     249    FD_ZERO (&fds);
     250    FD_SET (device, &fds);
     251
     252    tv.tv_sec = 0;
     253    tv.tv_usec = 100000;
     254
     255    int r = select (device+1, &fds, NULL, NULL, &tv);
     256
     257    if (-1 == r) {
     258        char errorMsg[256];
     259        Thread::Err("select (%s)\n", strerror_r(-r, errorMsg, sizeof(errorMsg)));
     260        return -1;
     261    }
     262
     263    if (0 == r) {
     264        Thread::Err("select timeout\n");
     265        return -1;
     266    }
     267
     268    struct v4l2_buffer buf;
     269    memset(&buf, 0, sizeof (v4l2_buffer));
     270    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     271    buf.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
     272
     273    //get last captured image
     274    int prevDQbuf=-1;
     275    for(int i=0;i<4;i++) {
     276        if (xioctl (device, VIDIOC_DQBUF, &buf)==-1) {
     277            if (errno==EAGAIN) {
     278                break;
     279            } else {
     280                Thread::Err("VIDIOC_DQBUF xioctl\n");
     281                return -1;
    253282            }
    254 
    255             if (0 == r) {
    256                 Thread::Err("select timeout\n");
    257                 /* end the infinite loop */
    258                 break;
    259             }
    260 
    261             if (read_frame_v4l2 ()) break;
    262         }
    263     }
    264 }
    265 
    266 int V4LCamera::read_frame_v4l2(void) {
    267   struct v4l2_buffer buf;
    268   memset(&buf, 0, sizeof (v4l2_buffer));
    269  
    270 
    271     buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    272     buf.memory = V4L2_MEMORY_MMAP;
    273 
    274     if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) {
    275         switch (errno) {
    276         case EAGAIN:
    277             return 0;
    278 
    279         case EIO:
    280             /* Could ignore EIO, see spec. */
    281 
    282             /* fall through */
    283 
    284         default:
    285             /* display the error and stop processing */
    286             Thread::Err("VIDIOC_DQBUF xioctl\n");
    287             return 1;
    288         }
    289    }
    290 
    291    if(buf.index >= requestbuffers.count) {
    292      Thread::Err("buf.index >= requestbuffers.count\n");
    293    }
     283       } else {
     284           if(prevDQbuf!=-1) {
     285               QueueBuffer(prevDQbuf);
     286           }
     287           for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
     288               if((void*)(buf.m.userptr)==buffers[n_buffers].start) {
     289                   prevDQbuf=n_buffers;
     290                   bufferIndex=n_buffers;
     291                   break;
     292               }
     293           }
     294       }
     295    }
    294296   
    295 #ifdef USE_TEMP_BUFFER
    296    memcpy(capture->buffers[MAX_V4L_BUFFERS].start,
    297           capture->buffers[buf.index].start,
    298           capture->buffers[MAX_V4L_BUFFERS].length );
    299    capture->bufferIndex = MAX_V4L_BUFFERS;
    300    //printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n",
    301    //     buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused);
    302 #else
    303    bufferIndex = buf.index;
    304 #endif
    305 
    306    if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) {
    307        Thread::Err ("VIDIOC_QBUF xioctl\n");
    308    }
    309 
    310297   return 1;
    311298}
    312299
    313300int V4LCamera::AllocBuffers(void) {
     301    struct v4l2_requestbuffers requestbuffers;
    314302   memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers));
    315303   
     
    320308   requestbuffers.count = buffer_number;
    321309   requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    322    requestbuffers.memory = V4L2_MEMORY_MMAP;
    323 
    324    if (-1 == xioctl (device, VIDIOC_REQBUFS, &requestbuffers)) {
    325        if (EINVAL == errno) {
    326          Thread::Err("not support memory mapping not supportted\n");
     310   requestbuffers.memory = V4L2_MEMORY_USERPTR;//V4L2_MEMORY_MMAP;
     311
     312   if (xioctl (device, VIDIOC_REQBUFS, &requestbuffers)==-1) {
     313       if (errno==EINVAL) {
     314         Thread::Err("VIDIOC_REQBUFS user memory not supported\n");
    327315       } else {
    328316         Thread::Err ("VIDIOC_REQBUFS xioctl\n");
     
    330318       return -1;
    331319   }
    332 
    333    if (requestbuffers.count < buffer_number) {
    334        if (buffer_number == 1) {
    335         Thread::Err("Insufficient buffer memory\n");
    336         return -1;
    337        } else {
    338         buffer_number--;
    339         Thread::Warn ("Insufficient buffer memory, decreasing buffers\n");
    340         goto try_again;
    341        }
     320   
     321   nbBuffers=DEFAULT_V4L_BUFFERS;
     322   for (int n_buffers = 0; n_buffers < nbBuffers; n_buffers++) {
     323       buffers[n_buffers].length = output->GetDataType().GetSize();
     324       buffers[n_buffers].start =AllocFunction(output->GetDataType().GetSize());
    342325   }
    343326
    344    for (int n_buffers = 0; n_buffers < requestbuffers.count; ++n_buffers) {
    345        struct v4l2_buffer buf;
    346 
    347        memset(&buf, 0, sizeof (v4l2_buffer));
    348 
    349        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    350        buf.memory = V4L2_MEMORY_MMAP;
    351        buf.index = n_buffers;
    352 
    353        if (-1 == xioctl (device, VIDIOC_QUERYBUF, &buf)) {
    354            Thread::Err("VIDIOC_QUERYBUF xioctl\n");
    355            return -1;
    356        }
    357 
    358        buffers[n_buffers].length = buf.length;
    359        buffers[n_buffers].start =
    360          mmap (NULL /* start anywhere */,
    361                buf.length,
    362                PROT_READ | PROT_WRITE /* required */,
    363                MAP_SHARED /* recommended */,
    364                device, buf.m.offset);
    365 
    366        if (MAP_FAILED == buffers[n_buffers].start) {
    367            Thread::Err("mmap\n");
    368            return -1;
    369        }
    370    }
    371 
    372   //todo: verifier cette alloc, pas de double buffeinrg?
    373   //peut on initialiser l'image dans le constrcteur de la camera?
    374    
    375    output->buffer=output->allocFunction(output->dataType.GetSize());
    376327   return 1;
    377328};
    378329
    379 char *V4LCamera::RetrieveRawFrame(void) {
    380  
    381   /* [FD] this really belongs here */
    382   if (ioctl(device, VIDIOCSYNC, &mmaps[bufferIndex].frame) == -1) {
    383     Thread::Err("Could not SYNC to video stream. %s\n", strerror(errno));
    384   }
    385 
    386   /* Now get what has already been captured as a IplImage return */
    387   if (output->dataType.GetFormat() == cvimage::Type::Format::YUYV || output->dataType.GetFormat() == cvimage::Type::Format::UYVY) {
    388     #ifdef USE_TEMP_BUFFER
    389     capture->frame.imageData=(char*)capture->buffers[capture->bufferIndex].start;
    390     #else
    391 Printf("frame is not allocated\n");
    392     memcpy((char *)frame,(char *)buffers[bufferIndex].start,output->GetDataType().GetSize());
    393     #endif
    394   } else {
    395     Thread::Err("palette %d not supported for raw output\n",output->dataType.GetFormat());
    396   }
    397 
    398   return(frame);
    399 }
    400 
    401330bool V4LCamera::HasProblems(void) {
    402331  return hasProblems;
     
    408337
    409338void V4LCamera::SetAutoExposure(bool value) {
    410   Thread::Warn("not implemented in opencv\n");
     339  Thread::Warn("not implemented\n");
    411340}
    412341
  • trunk/lib/FlairSensorActuator/src/V4LCamera.h

    r330 r338  
    2323namespace flair {
    2424namespace core {
    25 class cvimage;
     25class Image;
    2626}
    2727namespace gui {
     
    5555  V4LCamera(std::string name,
    5656            uint8_t camera_index, uint16_t width, uint16_t height,
    57             core::cvimage::Type::Format format, uint8_t priority);
     57            core::Image::Type::Format format, uint8_t priority);
    5858
    5959  /*!
     
    151151  void SetProperty(int property,float value);
    152152  float GetProperty(int property);
    153   void GrabFrame(void);
    154   int read_frame_v4l2(void);
    155   char *RetrieveRawFrame(void);
     153  int GrabFrame(void);
    156154  int AllocBuffers(void);
     155  int QueueBuffer(int index);
    157156  struct video_mmap *mmaps;
    158157  int bufferIndex;
    159   struct v4l2_requestbuffers requestbuffers;
    160158  struct buffer {
    161159    void *  start;
    162160    size_t  length;
    163161  };
    164   buffer buffers[MAX_V4L_BUFFERS + 1];
     162  buffer buffers[MAX_V4L_BUFFERS];
    165163  char* frame;
     164  int nbBuffers;
    166165};
    167166} // end namespace sensor
Note: See TracChangeset for help on using the changeset viewer.