Changeset 330 in flair-src for trunk/lib/FlairSensorActuator/src
- Timestamp:
- Sep 25, 2019, 3:29:26 PM (5 years ago)
- Location:
- trunk/lib/FlairSensorActuator/src
- Files:
-
- 11 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/lib/FlairSensorActuator/src/Camera.cpp
r214 r330 117 117 void Camera::ProcessUpdate(core::io_data* data) { 118 118 if(getFrameworkManager()->IsLogging() && getFrameworkManager()->IsDeviceLogged(this)) { 119 Printf("todo logging and jpeg without opencv\n"); 120 /* 119 121 if(logFormat==LogFormat::JPG) { 120 122 data->GetMutex(); … … 143 145 } 144 146 data->ReleaseMutex(); 145 } 147 }*/ 146 148 } 147 149 IODevice::ProcessUpdate(data); … … 151 153 if(filename=="") filename="./"+ObjectName()+"_"+std::to_string(GetTime())+".jpg"; 152 154 string::size_type idx = filename.rfind('.'); 153 155 Printf("todo SavePictureToFile without opencv\n");/* 154 156 if(idx != string::npos) { 155 157 Printf("saving %s\n", filename.c_str()); … … 169 171 } else { 170 172 Warn("saving %s no file extension!\n", filename.c_str()); 171 } 173 }*/ 172 174 } 173 175 174 176 void Camera::SaveRawPictureToFile(string filename) const { 175 Printf("saving %s, size %i\n", filename.c_str(), output-> img->imageSize);177 Printf("saving %s, size %i\n", filename.c_str(), output->GetDataType().GetSize()); 176 178 std::ofstream pFile; 177 179 pFile.open(filename); 178 180 output->GetMutex(); 179 pFile.write(output-> img->imageData, output->img->imageSize);181 pFile.write(output->buffer, output->GetDataType().GetSize()); 180 182 output->ReleaseMutex(); 181 183 -
trunk/lib/FlairSensorActuator/src/SimulatedCamera.cpp
r286 r330 22 22 #include <SharedMem.h> 23 23 #include <sstream> 24 #include <string.h> 24 25 25 26 using std::string; … … 40 41 41 42 shmemReadBuf=(char*)malloc(buf_size); 42 output-> img->imageData= shmemReadBuf;43 output->buffer = shmemReadBuf; 43 44 44 45 shmem = new SharedMem((Thread *)this,ShMemName(modelId, deviceId), buf_size, SharedMem::Type::producerConsumer); -
trunk/lib/FlairSensorActuator/src/SimulatedCamera.h
r286 r330 16 16 #include <Camera.h> 17 17 #include <Thread.h> 18 #include <cxcore.h>19 18 20 19 namespace flair { -
trunk/lib/FlairSensorActuator/src/V4LCamera.cpp
r307 r330 23 23 #include <cvimage.h> 24 24 #include <FrameworkManager.h> 25 #include <fcntl.h> 25 26 #include <linux/videodev2.h> 27 28 #include <sys/ioctl.h> 29 #include <unistd.h> 30 #include <cstring> 31 #include <sys/mman.h> 32 33 34 #define DEFAULT_V4L_BUFFERS 4 26 35 27 36 using std::string; … … 37 46 : Thread(getFrameworkManager(), name, priority), 38 47 Camera(name, width, height, format) { 39 capture = cvCaptureFromCAM(camera_index); 40 if (capture < 0) 41 Thread::Err("cvCaptureFromCAM error\n"); 42 43 if (cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH, width)<0) 44 Thread::Err("cvSetCaptureProperty error\n"); 45 if (cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT, height)<0) 46 Thread::Err("cvSetCaptureProperty error\n"); 47 48 49 string deviceName="/dev/video"+std::to_string(camera_index); 50 device = open(deviceName.c_str(), O_RDWR | O_NONBLOCK); 51 if (device == -1) { 52 Thread::Err("Cannot open %s\n"); 53 } 54 55 //get v4l2_format 56 struct v4l2_format form; 57 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 58 xioctl (device, VIDIOC_G_FMT,&form); 59 60 //set width, height and format 48 61 if (format == cvimage::Type::Format::UYVY) { 49 if (cvSetCaptureProperty(capture, CV_CAP_PROP_FORMAT, V4L2_PIX_FMT_UYVY)<0) 50 Thread::Err("cvSetCaptureProperty error\n"); 62 form.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; 51 63 } else if (format == cvimage::Type::Format::YUYV) { 52 if (cvSetCaptureProperty(capture, CV_CAP_PROP_FORMAT, V4L2_PIX_FMT_YUYV) < 53 0) 54 Thread::Err("cvSetCaptureProperty error\n"); 64 form.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; 55 65 } else { 56 66 Thread::Err("format not supported\n"); 57 67 } 58 68 69 form.fmt.pix.width = width; 70 form.fmt.pix.height = height; 71 form.fmt.win.chromakey = 0; 72 form.fmt.win.field = V4L2_FIELD_ANY; 73 form.fmt.win.clips = 0; 74 form.fmt.win.clipcount = 0; 75 form.fmt.pix.field = V4L2_FIELD_ANY; 76 xioctl (device, VIDIOC_S_FMT, &form); 77 78 /* This is just a technicality, but all buffers must be filled up before any 79 staggered SYNC is applied. SO, filler up. (see V4L HowTo) */ 80 81 AllocBuffers(); 82 83 for (int bufferIndex = 0; bufferIndex < ((int)requestbuffers.count);++bufferIndex) { 84 struct v4l2_buffer buf; 85 86 memset(&buf, 0, sizeof (v4l2_buffer)); 87 88 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 89 buf.memory = V4L2_MEMORY_MMAP; 90 buf.index = (unsigned long)bufferIndex; 91 92 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) { 93 Thread::Err("VIDIOC_QBUF xioctl\n"); 94 break; 95 } 96 } 97 98 // enable the streaming 99 v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 100 if (-1 == xioctl (device, VIDIOC_STREAMON,&type)) { 101 Thread::Err("VIDIOC_STREAMON xioctl\n"); 102 } 103 104 105 // skip first frame. it is often bad -- this is unnotied in traditional apps, 106 // but could be fatal if bad jpeg is enabled 107 GrabFrame(); 108 109 110 59 111 // station sol 60 112 gain = new DoubleSpinBox(GetGroupBox()->NewRow(), "gain:", 0, 1, 0.1); … … 85 137 void V4LCamera::Run(void) { 86 138 Time cam_time, new_time, fpsNow, fpsPrev; 87 IplImage *img; // raw image139 char* buffer; // raw image 88 140 int fpsCounter = 0; 89 141 90 142 // init image old 91 if (!cvGrabFrame(capture)) { 92 Printf("Could not grab a frame\n"); 93 } 143 GrabFrame(); 94 144 cam_time = GetTime(); 95 145 fpsPrev = cam_time; … … 97 147 while (!ToBeStopped()) { 98 148 //check for ps3eye deconnection in hds uav 99 if(cvGetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH)<0) { 100 Thread::Warn("camera disconnected\n"); 101 hasProblems=true; 149 if(hasProblems==false) { 150 struct v4l2_format form; 151 form.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 152 xioctl(device, VIDIOC_G_FMT,&form); 153 if(xioctl (device, VIDIOC_G_FMT,&form)<0) { 154 Thread::Warn("camera disconnected\n"); 155 hasProblems=true; 156 } 102 157 } 103 158 … … 127 182 SetHue(hue->Value()); 128 183 if (sharpness->ValueChanged() == true) 129 cvSetCaptureProperty(capture, CV_CAP_PROP_SHARPNESS, sharpness->Value());184 SetProperty(V4L2_CID_SHARPNESS, sharpness->Value()); 130 185 if (autogain->ValueChanged() == true) { 131 186 if (autogain->Value() == true) { … … 147 202 } 148 203 if (awb->ValueChanged() == true) 149 cvSetCaptureProperty(capture, CV_CAP_PROP_AWB, awb->Value());204 SetProperty(V4L2_CID_AUTO_WHITE_BALANCE, awb->Value()); 150 205 151 206 // cam pictures 152 img = cvRetrieveRawFrame(capture); 153 if (!cvGrabFrame(capture)) { 154 Printf("Could not grab a frame\n"); 155 } 207 buffer = RetrieveRawFrame(); 208 GrabFrame(); 156 209 new_time = GetTime(); 157 210 … … 163 216 164 217 output->GetMutex(); 165 output-> img->imageData = img->imageData;218 output->buffer = buffer; 166 219 output->ReleaseMutex(); 167 220 … … 172 225 } 173 226 174 cvReleaseCapture(&capture); 227 close(device); 228 } 229 230 void V4LCamera::GrabFrame(void) { 231 unsigned int count; 232 233 count = 1; 234 235 while (count-- > 0) { 236 for (;;) { 237 fd_set fds; 238 struct timeval tv; 239 int r; 240 241 FD_ZERO (&fds); 242 FD_SET (device, &fds); 243 244 /* Timeout. */ 245 tv.tv_sec = 2; 246 tv.tv_usec = 0; 247 248 r = select (device+1, &fds, NULL, NULL, &tv); 249 250 if (-1 == r) { 251 if (EINTR == errno) continue; 252 Thread::Err("select\n"); 253 } 254 255 if (0 == r) { 256 Thread::Err("select timeout\n"); 257 /* end the infinite loop */ 258 break; 259 } 260 261 if (read_frame_v4l2 ()) break; 262 } 263 } 264 } 265 266 int V4LCamera::read_frame_v4l2(void) { 267 struct v4l2_buffer buf; 268 memset(&buf, 0, sizeof (v4l2_buffer)); 269 270 271 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 272 buf.memory = V4L2_MEMORY_MMAP; 273 274 if (-1 == xioctl (device, VIDIOC_DQBUF, &buf)) { 275 switch (errno) { 276 case EAGAIN: 277 return 0; 278 279 case EIO: 280 /* Could ignore EIO, see spec. */ 281 282 /* fall through */ 283 284 default: 285 /* display the error and stop processing */ 286 Thread::Err("VIDIOC_DQBUF xioctl\n"); 287 return 1; 288 } 289 } 290 291 if(buf.index >= requestbuffers.count) { 292 Thread::Err("buf.index >= requestbuffers.count\n"); 293 } 294 295 #ifdef USE_TEMP_BUFFER 296 memcpy(capture->buffers[MAX_V4L_BUFFERS].start, 297 capture->buffers[buf.index].start, 298 capture->buffers[MAX_V4L_BUFFERS].length ); 299 capture->bufferIndex = MAX_V4L_BUFFERS; 300 //printf("got data in buff %d, len=%d, flags=0x%X, seq=%d, used=%d)\n", 301 // buf.index, buf.length, buf.flags, buf.sequence, buf.bytesused); 302 #else 303 bufferIndex = buf.index; 304 #endif 305 306 if (-1 == xioctl (device, VIDIOC_QBUF, &buf)) { 307 Thread::Err ("VIDIOC_QBUF xioctl\n"); 308 } 309 310 return 1; 311 } 312 313 int V4LCamera::AllocBuffers(void) { 314 memset(&requestbuffers, 0, sizeof (v4l2_requestbuffers)); 315 316 unsigned int buffer_number = DEFAULT_V4L_BUFFERS; 317 318 try_again: 319 320 requestbuffers.count = buffer_number; 321 requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 322 requestbuffers.memory = V4L2_MEMORY_MMAP; 323 324 if (-1 == xioctl (device, VIDIOC_REQBUFS, &requestbuffers)) { 325 if (EINVAL == errno) { 326 Thread::Err("not support memory mapping not supportted\n"); 327 } else { 328 Thread::Err ("VIDIOC_REQBUFS xioctl\n"); 329 } 330 return -1; 331 } 332 333 if (requestbuffers.count < buffer_number) { 334 if (buffer_number == 1) { 335 Thread::Err("Insufficient buffer memory\n"); 336 return -1; 337 } else { 338 buffer_number--; 339 Thread::Warn ("Insufficient buffer memory, decreasing buffers\n"); 340 goto try_again; 341 } 342 } 343 344 for (int n_buffers = 0; n_buffers < requestbuffers.count; ++n_buffers) { 345 struct v4l2_buffer buf; 346 347 memset(&buf, 0, sizeof (v4l2_buffer)); 348 349 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 350 buf.memory = V4L2_MEMORY_MMAP; 351 buf.index = n_buffers; 352 353 if (-1 == xioctl (device, VIDIOC_QUERYBUF, &buf)) { 354 Thread::Err("VIDIOC_QUERYBUF xioctl\n"); 355 return -1; 356 } 357 358 buffers[n_buffers].length = buf.length; 359 buffers[n_buffers].start = 360 mmap (NULL /* start anywhere */, 361 buf.length, 362 PROT_READ | PROT_WRITE /* required */, 363 MAP_SHARED /* recommended */, 364 device, buf.m.offset); 365 366 if (MAP_FAILED == buffers[n_buffers].start) { 367 Thread::Err("mmap\n"); 368 return -1; 369 } 370 } 371 372 //todo: verifier cette alloc, pas de double buffeinrg? 373 //peut on initialiser l'image dans le constrcteur de la camera? 374 375 output->buffer=output->allocFunction(output->dataType.GetSize()); 376 return 1; 377 }; 378 379 char *V4LCamera::RetrieveRawFrame(void) { 380 381 /* [FD] this really belongs here */ 382 if (ioctl(device, VIDIOCSYNC, &mmaps[bufferIndex].frame) == -1) { 383 Thread::Err("Could not SYNC to video stream. %s\n", strerror(errno)); 384 } 385 386 /* Now get what has already been captured as a IplImage return */ 387 if (output->dataType.GetFormat() == cvimage::Type::Format::YUYV || output->dataType.GetFormat() == cvimage::Type::Format::UYVY) { 388 #ifdef USE_TEMP_BUFFER 389 capture->frame.imageData=(char*)capture->buffers[capture->bufferIndex].start; 390 #else 391 Printf("frame is not allocated\n"); 392 memcpy((char *)frame,(char *)buffers[bufferIndex].start,output->GetDataType().GetSize()); 393 #endif 394 } else { 395 Thread::Err("palette %d not supported for raw output\n",output->dataType.GetFormat()); 396 } 397 398 return(frame); 175 399 } 176 400 … … 180 404 181 405 void V4LCamera::SetAutoGain(bool value) { 182 cvSetCaptureProperty(capture, CV_CAP_PROP_AUTOGAIN, value);406 SetProperty(V4L2_CID_AUTOGAIN, value); 183 407 } 184 408 … … 188 412 189 413 void V4LCamera::SetGain(float value) { 190 cvSetCaptureProperty(capture, CV_CAP_PROP_GAIN, value);414 SetProperty(V4L2_CID_GAIN, value); 191 415 } 192 416 193 417 void V4LCamera::SetExposure(float value) { 194 cvSetCaptureProperty(capture, CV_CAP_PROP_EXPOSURE, value);418 SetProperty(V4L2_CID_EXPOSURE, value); 195 419 } 196 420 197 421 void V4LCamera::SetBrightness(float value) { 198 cvSetCaptureProperty(capture, CV_CAP_PROP_BRIGHTNESS, value);422 SetProperty(V4L2_CID_BRIGHTNESS, value); 199 423 } 200 424 201 425 void V4LCamera::SetSaturation(float value) { 202 cvSetCaptureProperty(capture, CV_CAP_PROP_SATURATION, value);426 SetProperty(V4L2_CID_SATURATION, value); 203 427 } 204 428 205 429 void V4LCamera::SetHue(float value) { 206 cvSetCaptureProperty(capture, CV_CAP_PROP_HUE, value);430 SetProperty(V4L2_CID_HUE, value); 207 431 } 208 432 209 433 void V4LCamera::SetContrast(float value) { 210 cvSetCaptureProperty(capture, CV_CAP_PROP_CONTRAST, value); 434 SetProperty(V4L2_CID_CONTRAST, value); 435 } 436 437 float V4LCamera::GetProperty(int property) { 438 //get min and max value 439 struct v4l2_queryctrl queryctrl; 440 queryctrl.id = property; 441 if(xioctl (device, VIDIOC_QUERYCTRL,&queryctrl)==-1) return -1; 442 int min = queryctrl.minimum; 443 int max = queryctrl.maximum; 444 445 //set value 446 struct v4l2_control control; 447 memset (&control, 0, sizeof (v4l2_control)); 448 control.id = property; 449 if(xioctl (device,VIDIOC_G_CTRL, &control)==-1) return -1; 450 451 return ((float)control.value - min + 1) / (max - min); 452 } 453 454 void V4LCamera::SetProperty(int property,float value) { 455 //get min and max value 456 struct v4l2_queryctrl queryctrl; 457 queryctrl.id = property; 458 xioctl (device, VIDIOC_QUERYCTRL,&queryctrl); 459 int min = queryctrl.minimum; 460 int max = queryctrl.maximum; 461 462 //set value 463 struct v4l2_control control; 464 memset (&control, 0, sizeof (v4l2_control)); 465 control.id = property; 466 control.value = (int)(value * (max - min) + min); 467 xioctl (device,VIDIOC_S_CTRL, &control); 468 } 469 470 int V4LCamera::xioctl( int fd, int request, void *arg) { 471 int r; 472 473 do r = ioctl (fd, request, arg); 474 while (-1 == r && EINTR == errno); 475 476 return r; 211 477 } 212 478 -
trunk/lib/FlairSensorActuator/src/V4LCamera.h
r165 r330 16 16 #include <Thread.h> 17 17 #include "Camera.h" 18 #include <highgui.h> 18 //todo use pimpl and remove this 19 #include <linux/videodev2.h> 20 #include <libv4l1-videodev.h> 21 #define MAX_V4L_BUFFERS 10 19 22 20 23 namespace flair { … … 138 141 */ 139 142 void Run(void); 140 141 CvCapture *capture; 142 143 144 int device; 143 145 gui::Tab *sensor_tab; 144 146 gui::DoubleSpinBox *bright, *exposure, *gain, *contrast, *hue, *sharpness,*sat; … … 146 148 gui::Label *fps; 147 149 bool hasProblems; 150 static int xioctl( int fd, int request, void *arg); 151 void SetProperty(int property,float value); 152 float GetProperty(int property); 153 void GrabFrame(void); 154 int read_frame_v4l2(void); 155 char *RetrieveRawFrame(void); 156 int AllocBuffers(void); 157 struct video_mmap *mmaps; 158 int bufferIndex; 159 struct v4l2_requestbuffers requestbuffers; 160 struct buffer { 161 void * start; 162 size_t length; 163 }; 164 buffer buffers[MAX_V4L_BUFFERS + 1]; 165 char* frame; 148 166 }; 149 167 } // end namespace sensor -
trunk/lib/FlairSensorActuator/src/VrpnClient.cpp
r309 r330 36 36 37 37 VrpnClient::VrpnClient(string name, 38 string address, uint8_t priority )38 string address, uint8_t priority,ConnectionType_t connectionType) 39 39 : Thread(getFrameworkManager(), name, priority) { 40 40 if (singleton != NULL) { … … 45 45 } 46 46 47 pimpl_ = new VrpnClient_impl(this, name, address );47 pimpl_ = new VrpnClient_impl(this, name, address,connectionType); 48 48 } 49 49 50 51 VrpnClient::VrpnClient(std::string name,52 uint16_t port, uint8_t priority)53 : Thread(getFrameworkManager(), name, priority) {54 if (singleton != NULL) {55 SimpleWarn("VrpnClient should be instanced only one time!\n");56 SimpleWarn("Next calls to GetVrpnClient() will return the first created VrpnClient (%s)\n",singleton->ObjectName().c_str());57 } else {58 singleton = this;59 }60 61 pimpl_ = new VrpnClient_impl(this, name, port);62 }63 64 50 VrpnClient::VrpnClient(string name, 65 51 SerialPort *serialport, uint16_t us_period, -
trunk/lib/FlairSensorActuator/src/VrpnClient.h
r309 r330 42 42 43 43 public: 44 typedef enum { Vrpn, VrpnLite, Xbee } ConnectionType_t; 45 44 46 /*! 45 47 * \brief Constructor 46 48 * 47 * Construct a VrpnClient. Connection is done by IP to a vrpn server .49 * Construct a VrpnClient. Connection is done by IP to a vrpn server or vrpnlite server (see tools/VrpnLite in flair-src) 48 50 * 49 51 * \param name name 50 52 * \param address server address 51 53 * \param priority priority of the Thread 54 * \param connection connection type: Vrpn or VrpnLite 52 55 */ 53 56 VrpnClient(std::string name, 54 std::string address, uint8_t priority); 55 56 /*! 57 * \brief Constructor 58 * 59 * Construct a VrpnClient. Connection is done by IP to a vrpn-lite server (see tools/VrpnLite in flair-src) 60 * 61 * \param name name 62 * \param port server port 63 * \param priority priority of the Thread 64 */ 65 VrpnClient(std::string name, 66 uint16_t port, uint8_t priority); 57 std::string address, uint8_t priority,ConnectionType_t connectionType=Vrpn); 67 58 68 59 /*! … … 100 91 gui::TabWidget *GetTabWidget(void) const; 101 92 102 typedef enum { Vrpn, VrpnLite, Xbee } ConnectionType_t;103 104 93 ConnectionType_t ConnectionType(void) const; 105 94 -
trunk/lib/FlairSensorActuator/src/VrpnClient_impl.cpp
r309 r330 40 40 41 41 VrpnClient_impl::VrpnClient_impl(VrpnClient *self, string name, 42 std::string address ) {42 std::string address,flair::sensor::VrpnClient::ConnectionType_t connectionType) { 43 43 this->self = self; 44 44 this->address = address; 45 this->connectionType = connectionType; 45 46 isConnected=false; 46 connectionType=VrpnClient::Vrpn; 47 48 connection = vrpn_get_connection_by_name(address.c_str()); 47 48 if(connectionType==VrpnClient::Vrpn) { 49 connection = vrpn_get_connection_by_name(address.c_str()); 50 Printf("Connecting to VRPN server on %s\n",address.c_str()); 51 } else if(connectionType==VrpnClient::VrpnLite) { 52 dataSocket =new UdpSocket(getFrameworkManager(), "data_socket", address); 53 Printf("Connecting to VRPN-lite server on %s\n",address.c_str()); 54 } else { 55 self->Err("Bad connection type, try using naother constructor\n"); 56 } 49 57 50 58 CommonConstructor(name); 51 52 Printf("Connecting to VRPN server on %s\n",address.c_str()); 53 } 54 55 VrpnClient_impl::VrpnClient_impl(VrpnClient *self, string name, 56 uint16_t port) { 57 this->self = self; 58 isConnected=false; 59 connectionType=VrpnClient::VrpnLite; 60 61 dataSocket =new UdpSocket(getFrameworkManager(), "data_socket", port); 62 63 CommonConstructor(name); 64 65 Printf("Connecting to VRPN-lite server on port %i\n",port); 66 } 59 } 60 61 67 62 68 63 VrpnClient_impl::VrpnClient_impl(VrpnClient *self, string name, … … 132 127 trackables.push_back(obj); 133 128 mutex->ReleaseMutex(); 129 } else if (connectionType==VrpnClient::VrpnLite) { 130 if(liteObjects.size()<0xffff) { 131 liteObject_t tmp; 132 tmp.vrpnobject = obj; 133 tmp.id = liteObjects.size(); 134 mutex->GetMutex(); 135 liteObjects.push_back(tmp); 136 mutex->ReleaseMutex(); 137 //Printf("%i %s\n",tmp.id,obj->self->ObjectName().c_str()); 138 139 char char_array[obj->self->ObjectName().length() + 2];//id coded on 16bits 140 strcpy(char_array, obj->self->ObjectName().c_str()); 141 uint16_t* idPtr=(uint16_t*)&char_array[obj->self->ObjectName().length()]; 142 *idPtr=tmp.id; 143 dataSocket->HostToNetwork((char*)idPtr,sizeof(uint16_t)); 144 dataSocket->SendMessage(char_array,obj->self->ObjectName().length() + 2); 145 }else { 146 self->Warn("too much trackables for vrpnlite connection, not adding %s\n",obj->self->ObjectName().c_str()); 147 } 134 148 } else { 135 self->Warn("AddTrackable called but not in vrpn mode \n");149 self->Warn("AddTrackable called but not in vrpn mode nor in vrpnlite mode\n"); 136 150 } 137 151 } … … 146 160 mutex->ReleaseMutex(); 147 161 } else { 148 self->Warn("AddTrackable called but not in vrpnlite nor inxbee mode\n");162 self->Warn("AddTrackable called but not in xbee mode\n"); 149 163 } 150 164 } … … 153 167 mutex->GetMutex(); 154 168 if (connectionType==VrpnClient::Vrpn) { 155 for (vector<VrpnObject_impl *>::iterator it = trackables.begin(); 156 it < trackables.end(); it++) { 169 for (vector<VrpnObject_impl *>::iterator it = trackables.begin();it < trackables.end(); it++) { 157 170 if (*it == obj) { 158 171 trackables.erase(it); … … 162 175 } 163 176 if (connectionType==VrpnClient::VrpnLite || connectionType==VrpnClient::Xbee) { 164 for (vector<liteObject_t>::iterator it = liteObjects.begin(); 165 it < liteObjects.end(); it++) { 177 for (vector<liteObject_t>::iterator it = liteObjects.begin();it < liteObjects.end(); it++) { 166 178 if ((*it).vrpnobject == obj) { 167 179 liteObjects.erase(it); … … 243 255 //printf("%lld\n",GetTime()/(1000*1000)); 244 256 mutex->GetMutex(); 245 for (unsigned int i = 0; i < trackables.size(); i++) 246 trackables.at(i)->tracker->mainloop(); 257 for (unsigned int i = 0; i < trackables.size(); i++) { 258 // Printf("tracker %i\n",i); 259 trackables.at(i)->tracker->mainloop(); 260 //Printf("tracker %i ok\n",i); 261 } 247 262 mutex->ReleaseMutex(); 248 263 } else { … … 251 266 } 252 267 }else if(connectionType==VrpnClient::VrpnLite) { 253 vrpn_TRACKERCB t; 254 float pos[3]; 255 float quat[4]; 268 mutex->GetMutex(); 269 270 int16_t pos[3]; 271 int16_t quat[4]; 256 272 Time time; 257 uint8_t id; 258 char datas[sizeof(id) + sizeof(pos)+sizeof(quat)+ sizeof(time)]; 273 char datas[liteObjects.size()*(sizeof(pos)+sizeof(quat))+ sizeof(time)]; 274 char *datasPtr=datas; 275 259 276 int rcv=dataSocket->RecvMessage(datas,sizeof(datas),50*1000*1000); 260 if(rcv!=sizeof(datas)) continue; 261 id = datas[0]; 262 memcpy(pos, datas+sizeof(id), sizeof(pos)); 263 memcpy(quat, datas +sizeof(id)+ sizeof(pos), sizeof(quat)); 264 memcpy(&time, datas+sizeof(id) + sizeof(pos)+sizeof(quat), sizeof(time)); 265 266 for(int i=0;i<3;i++) dataSocket->NetworkToHost((char*)(&pos[i]),sizeof(pos[i])); 267 for(int i=0;i<4;i++) dataSocket->NetworkToHost((char*)(&quat[i]),sizeof(quat[i])); 277 if(rcv!=sizeof(datas)) { 278 if(rcv>0) Printf("discarding message (size %i/%i)\n",rcv,sizeof(datas)); 279 mutex->ReleaseMutex(); 280 continue; 281 } 282 283 memcpy(&time, datasPtr+sizeof(datas)-sizeof(time), sizeof(time)); 268 284 dataSocket->NetworkToHost((char*)(&time),sizeof(time)); 269 270 mutex->GetMutex(); 271 if (id < liteObjects.size()) { 272 for (int i = 0; i < 3; i++) t.pos[i] = pos[i]; 285 286 for (vector<liteObject_t>::iterator it = liteObjects.begin();it < liteObjects.end(); it++) { 287 memcpy(pos, datasPtr, sizeof(pos)); 288 datasPtr+=sizeof(pos); 289 memcpy(quat,datasPtr, sizeof(quat)); 290 datasPtr+=sizeof(quat); 291 292 for(int i=0;i<3;i++) dataSocket->NetworkToHost((char*)(&pos[i]),sizeof(pos[i])); 293 for(int i=0;i<4;i++) dataSocket->NetworkToHost((char*)(&quat[i]),sizeof(quat[i])); 294 295 vrpn_TRACKERCB t; 296 for (int i = 0; i < 3; i++) t.pos[i] = ConvertPosition(pos[i]); 273 297 // warning: t.quat is defined as (qx,qy,qz,qw), which is different from 274 298 // flair::core::Quaternion 275 t.quat[0] = quat[1];276 t.quat[1] = quat[2];277 t.quat[2] = quat[3];278 t.quat[3] = quat[0];299 t.quat[0] = ConvertQuaternion(quat[1]); 300 t.quat[1] = ConvertQuaternion(quat[2]); 301 t.quat[2] = ConvertQuaternion(quat[3]); 302 t.quat[3] = ConvertQuaternion(quat[0]); 279 303 t.msg_time.tv_sec=time/((Time)1000000000); 280 304 t.msg_time.tv_usec=(time%((Time)1000000000))/((Time)1000); 281 305 //Printf("%i %lld %lld %lld\n",id,time,t.msg_time.tv_sec,t.msg_time.tv_usec); 282 VrpnObject_impl::handle_pos(liteObjects.at(id).vrpnobject, t); 283 } 306 VrpnObject_impl::handle_pos((void*)(it->vrpnobject), t); 307 } 308 284 309 mutex->ReleaseMutex(); 285 310 } 286 311 } 287 312 } 313 314 float VrpnClient_impl::ConvertPosition(int16_t value) const { 315 return (float)value/1000.; 316 } 317 318 float VrpnClient_impl::ConvertQuaternion(int16_t value) const { 319 return (float)value/32767.; 320 } -
trunk/lib/FlairSensorActuator/src/VrpnObject.h
r309 r330 50 50 * \brief Constructor 51 51 * 52 * Construct a VrpnObject. Connection is done by IP . (vrpn)52 * Construct a VrpnObject. Connection is done by IP with vrpn or vrpnlite (see tools/VrpnLite in flair-src) 53 53 * 54 54 * \param name VRPN object name, should be the same as defined in the server … … 62 62 * \brief Constructor 63 63 * 64 * Construct a VrpnObject. Connection is done by xbee or vrpnlite (see tools/VrpnLite in flair-src)64 * Construct a VrpnObject. Connection is done by xbee 65 65 * 66 66 * \param name name 67 * \param id VRPN object id, should be the same as defined in the xbee bridge or vrpnlite tool67 * \param id VRPN object id, should be the same as defined in the xbee bridge 68 68 * \param tab Tab for the user interface 69 69 * \param client VrpnClient of the connection, if unspecified, use the default one -
trunk/lib/FlairSensorActuator/src/VrpnObject_impl.cpp
r318 r330 51 51 self->Err("erreur aucun identifiant specifie pour la connexion Xbee\n"); 52 52 } 53 if (id == -1 && GetVrpnClient()->ConnectionType()==VrpnClient::VrpnLite) {54 self->Err(" erreur aucun identifiant specifie pour la connexionVrpnLite\n");53 if (id != -1 && GetVrpnClient()->ConnectionType()==VrpnClient::VrpnLite) { 54 self->Err("identifiant pour la connexion ignore car inutile en mode VrpnLite\n"); 55 55 } 56 56 if (id != -1 && GetVrpnClient()->ConnectionType()==VrpnClient::Vrpn) { … … 94 94 parent->pimpl_->AddTrackable(this); 95 95 } else if(GetVrpnClient()->ConnectionType()==VrpnClient::VrpnLite){ 96 parent->pimpl_->AddTrackable(this , id);96 parent->pimpl_->AddTrackable(this); 97 97 } 98 98 … … 191 191 192 192 caller->output->SetDataTime(time,deltaTime); 193 //Printf("%s data filled\n",caller->self->ObjectName().c_str()); 193 194 caller->output->ReleaseMutex(); 194 195 … … 199 200 caller->state->SetValueNoMutex(2, 0, Euler::ToDegree(euler.yaw)); 200 201 caller->state->ReleaseMutex(); 201 202 //Printf("%s process\n",caller->self->ObjectName().c_str()); 202 203 caller->self->ProcessUpdate(caller->output); 203 } 204 //Printf("%s process ok\n",caller->self->ObjectName().c_str()); 205 } -
trunk/lib/FlairSensorActuator/src/unexported/VrpnClient_impl.h
r309 r330 48 48 public: 49 49 VrpnClient_impl(flair::sensor::VrpnClient *self, std::string name, 50 std::string address );50 std::string address,flair::sensor::VrpnClient::ConnectionType_t connectionType); 51 51 VrpnClient_impl(flair::sensor::VrpnClient *self, std::string name, 52 52 flair::core::SerialPort *serialport, uint16_t us_period); 53 VrpnClient_impl(flair::sensor::VrpnClient *self, std::string name, 54 uint16_t port); 53 55 54 ~VrpnClient_impl(); 56 55 void AddTrackable(VrpnObject_impl *obj); // normal … … 67 66 private: 68 67 void CommonConstructor(std::string name); 68 float ConvertPosition(int16_t value) const; 69 float ConvertQuaternion(int16_t value) const; 69 70 flair::sensor::VrpnClient *self; 70 71 flair::core::Mutex *mutex;
Note:
See TracChangeset
for help on using the changeset viewer.