/********************************************************************* // created: 2006/06/08 - 21:47 // filename: Camera1394.cpp // // author: Gerald Dherbomez // Copyright Heudiasyc UMR UTC/CNRS 6599 // // version: $Id$ // // purpose: Definition of the Camera1394 class // // todo: *********************************************************************/ #include "Camera1394.h" #include #include #include #include "kernel/ComponentFactory.h" #include "kernel/DbiteException.h" #include "kernel/DbiteFileTypes.h" #include "kernel/Log.h" #include "PacpusTools/ShMem.h" using namespace pacpus; using namespace std; DECLARE_STATIC_LOGGER("pacpus.base.Camera1394"); /// Construct the factory ComponentFactory sFactory("Camera1394"); Camera1394::Camera1394(QString name) : ComponentBase(name) { image_ = NULL; qimage_ = NULL; acquiring_ = FALSE; recording_ = FALSE; tr_ = 0; } Camera1394::~Camera1394() { // verify that the camera has been initialized if (theCamera->m_cameraInitialized) if (theCamera->StopImageAcquisition() != CAM_SUCCESS) qFatal("Problem Stopping Image Acquisition"); delete theCamera; theCamera = NULL; delete[] image_; image_ = NULL; delete qimage_; qimage_ = NULL; } ////////////////////////////////////////////////////////////////////////// // Configure the component // Parameters are: // - node: the number of the camera in the driver list. Normally, node 0 // is for the first camera connected, node is incremented for other ////////////////////////////////////////////////////////////////////////// ComponentBase::COMPONENT_CONFIGURATION Camera1394::configureComponent(XmlComponentConfig config) { cameraNode_ = param.getProperty("node").toInt(); videoFormat_ = param.getProperty("format").toInt(); videoMode_ = param.getProperty("mode").toInt(); frameRate_ = param.getProperty("framerate").toInt(); recording_ = (param.getProperty("recording") == "true" ? true : false ); recordFormat_ = param.getProperty("recordFormat"); reverseImage_ = (param.getProperty("reverseImage") == "true" ? true : false); displaying_ = (param.getProperty("display") == "true" ? true : false); zoom_ = param.getProperty("zoom").toInt(); imageMutex_ = new QMutex; /* if (displaying_) { viewer_ = new ImageViewer; viewer_->setMutex(imageMutex_); viewer_->show(); viewer_->setWindowTitle(componentName); connect(this, SIGNAL( newImage(QImage*) ), viewer_, SLOT( display(QImage*) ) ); }*/ return ComponentBase::CONFIGURED_OK; } ////////////////////////////////////////////////////////////////////////// // Stop the acquisition of data ////////////////////////////////////////////////////////////////////////// void Camera1394::stopActivity() { stopAcquiring(); // wait the thread termination if (!wait(1000)) { LOG_ERROR(componentName << ":The thread doesn't respond for 1 second, it has been terminated"); terminate(); } file_.close(); } ////////////////////////////////////////////////////////////////////////// // Start the camera // Set all video parameters (resolution, mode and framerate) ////////////////////////////////////////////////////////////////////////// void Camera1394::startActivity() { try { QString dbtFileName_ = componentName + ".dbt"; file_.open(dbtFileName_.toStdString(), WriteMode, FILE_JPEG, MAX_CHAR_DIRECTORY_NAME + MAX_CHAR_PICTURE_NAME); } catch (DbiteException & e) { cerr << "error opening dbt file: " << e.what() << endl; return; } initCamera(); if (theCamera->m_videoFlags[videoFormat_][videoMode_][frameRate_]) { // set the camera parameters theCamera->SetVideoFormat(videoFormat_); theCamera->SetVideoMode(videoMode_); theCamera->SetVideoFrameRate(frameRate_); } else qFatal("Video format, mode or rate are bad !"); theCamera->StartImageAcquisition(); // switch some controls in Automatic mode theCamera->m_controlWhiteBalance.SetAutoMode(TRUE); theCamera->m_controlShutter.SetAutoMode(TRUE); theCamera->m_controlGain.SetAutoMode(TRUE); theCamera->m_controlIris.SetAutoMode(TRUE); theCamera->SetZoom(zoom_); // alloc images if (!image_) image_ = new unsigned char [ theCamera->m_width * theCamera->m_height * 3]; if (!qimage_) qimage_ = new QImage( theCamera->m_width , theCamera->m_height, QImage::Format_RGB32 ); startAcquiring(); start(); } ////////////////////////////////////////////////////////////////////////// // Initialize the C1394Camera Driver // Verify the presence of a compatible camera and select it // If it doesn't work, please have a look on the Windows driver in the // hardware manager ////////////////////////////////////////////////////////////////////////// bool Camera1394::initCamera() { theCamera = new C1394Camera; qDebug("Verifying camera presence..."); if(theCamera->CheckLink() != CAM_SUCCESS) qFatal("No camera detected !"); qDebug("Selecting Camera..."); switch (theCamera->SelectCamera(cameraNode_)) { case CAM_SUCCESS : qDebug("The camera has been selected and verified"); break; case CAM_ERROR_PARAM_OUT_OF_RANGE : qFatal("The node value is invalid !"); case CAM_ERROR : qFatal("No camera available !"); default : qFatal("Problem with SelectCamera() method"); } qDebug("Initializing Camera..."); if (theCamera->InitCamera() != CAM_SUCCESS) qFatal("Error during camera initialization !"); qDebug("Checking Feature Presence..."); theCamera->InquireControlRegisters(); qDebug("Checking Feature Status..."); theCamera->StatusControlRegisters(); qDebug("Camera Initialized"); char commandline[64]; sprintf(commandline,"mkdir %s\n",componentName.toLatin1().data()); system(commandline); puts(commandline); return TRUE; } bool Camera1394::recordImage(QString format) { ++imageCounter_; format = format.toLower(); if (format == "jpeg") { format = "jpg"; } bool formatSupported = false; QImageWriter imageWriter; if (imageWriter.supportedImageFormats().contains(format.toLatin1())) { formatSupported = true; } if (!formatSupported) { LOG_ERROR("Format" << format << "not supported for recording"); } else { // write in the dbt file sprintf( imageName_, componentName.toLatin1().data() ); sprintf( imageName_, "%s/image%d.%s", imageName_, imageCounter_, format.toLatin1().data() ); imageWriter.setFileName(imageName_); imageWriter.setFormat( format.toLatin1() ); imageWriter.write(*qimage_); // record the data in a dbt file. The dbt data is only the structure AlascaXT // scan data are recording in the alasca_data.utc file try { file_.writeRecord(time_, tr_, reinterpret_cast(&imageName_), MAX_CHAR_DIRECTORY_NAME + MAX_CHAR_PICTURE_NAME); } catch (DbiteException & e) { cerr << "error writing data: " << e.what() << endl; return false; } } return formatSupported; } //#define MEASURE_TIME ////////////////////////////////////////////////////////////////////////// // Main loop for acquisition ////////////////////////////////////////////////////////////////////////// void Camera1394::run() { bool firstTime = true; ShMem * shMem; imageCounter_ = 0; time_ = road_time(); road_time_t t; while (acquiring_) { if (theCamera->AcquireImage()) qFatal("Problem Acquiring Image !"); prevTime_ = time_; time_ = road_time(); // verify that the acquisition period is good relatively to the selected format if ( isPeriodGood((int)(time_ - prevTime_)) ) setState(MONITOR_OK); else setState(MONITOR_NOK); t = road_time(); if ( !fillRGB32Image(qimage_->bits()) ) continue; #ifdef MEASURE_TIME qDebug() << componentName << "duration conversion: " << (int)(road_time() - t) ; #endif imageMutex_->lock(); t = road_time(); if (reverseImage_) *qimage_ = qimage_->mirrored(true,true); // send image in shared memory if (firstTime) { // shMem = new ShMem("IMAGE", qimage_->numBytes()); firstTime = false; } assert(shMem); // shMem->write(qimage_->bits(), qimage_->numBytes()); #ifdef MEASURE_TIME qDebug() << componentName << "duration mirror: " << (int)(road_time() - t) ; #endif imageMutex_->unlock(); emit newImage(qimage_); t = road_time(); if (recording_) { recordImage(recordFormat_); } #ifdef MEASURE_TIME qDebug() << componentName << "duration record: " << (int)(road_time() - t) ; #endif } // END while (acquiring_) LOG_INFO(":Stopping the camera"); if (theCamera->StopImageAcquisition() != CAM_SUCCESS) LOG_FATAL("Problem Stopping Image Acquisition"); if (!firstTime) delete shMem; setState(STOPPED); } ////////////////////////////////////////////////////////////////////////// // return the actual period in µs corresponding to the framerate code ////////////////////////////////////////////////////////////////////////// double Camera1394::getPeriod() { switch(frameRate_) { case 0: return 1.0e6/1.875; break; case 1: return 1.0e6/3.75; break; case 2: return 1.0e6/7.5; break; case 3: return 1.0e6/15; break; case 4: return 1.0e6/30; break; case 5: return 1.0e6/60; break; default: LOG_WARN("Unknown framerate. The component " << componentName << " may encounter a problem !"); return 0; } } ////////////////////////////////////////////////////////////////////////// // retrn true if the period T corresponds to the framerate (at +/-20%) // T is given in microseconds ////////////////////////////////////////////////////////////////////////// bool Camera1394::isPeriodGood(int T) { float period = getPeriod(); if ( (T < period*0.8) || (T > period*1.2) ) return false; else return true; } ////////////////////////////////////////////////////////////////////////// // useful macro to keep 'a' between 0 and 255 ////////////////////////////////////////////////////////////////////////// #define CLAMP_TO_UCHAR(a) (unsigned char)((a) < 0 ? 0 : ((a) > 255 ? 255 : (a))) ////////////////////////////////////////////////////////////////////////// // Convert the image from YUV444 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) ////////////////////////////////////////////////////////////////////////// void Camera1394::YUV444toRGB32(unsigned char* buffer) { long Y, U, V, deltaG; unsigned char *srcptr, *srcend, *destptr; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; srcptr = theCamera->m_pData; srcend = srcptr + (theCamera->m_width * theCamera->m_height * 3); destptr = buffer; // data pattern: UYV // unroll it to 4 pixels/round while(srcptr < srcend) { U = (*srcptr++) - 128; Y = (*srcptr++); V = (*srcptr++) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A U = (*srcptr++) - 128; Y = (*srcptr++); V = (*srcptr++) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A U = (*srcptr++) - 128; Y = (*srcptr++); V = (*srcptr++) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A U = (*srcptr++) - 128; Y = (*srcptr++); V = (*srcptr++) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A } } ////////////////////////////////////////////////////////////////////////// // Convert the image from YUV422 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) ////////////////////////////////////////////////////////////////////////// void Camera1394::YUV422toRGB32(unsigned char *buffer) { long Y, U, V, deltaG; unsigned char *srcptr, *srcend, *destptr; srcptr = theCamera->m_pData; srcend = srcptr + ((theCamera->m_width * theCamera->m_height) << 1); destptr = buffer; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) { return; } // data pattern: UYVY while(srcptr < srcend) { U = *srcptr; U -= 128; V = *(srcptr+2); V -= 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; Y = *(srcptr + 1); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A Y = *(srcptr + 3); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A srcptr += 4; // twice in the same loop... just like halving the loop overhead U = (*srcptr) - 128; V = (*(srcptr+2)) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; Y = *(srcptr + 1); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A Y = *(srcptr + 3); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A srcptr += 4; } } ////////////////////////////////////////////////////////////////////////// // Convert the image from YUV411 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) ////////////////////////////////////////////////////////////////////////// void Camera1394::YUV411toRGB32(unsigned char *buffer) { long Y, U, V, deltaG; unsigned char *srcptr, *srcend, *destptr; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; // data pattern: UYYVYY srcptr = theCamera->m_pData; srcend = srcptr + ((theCamera->m_width * theCamera->m_height * 3) >> 1); destptr = buffer; while(srcptr < srcend) { U = (*srcptr) - 128; V = (*(srcptr+3)) - 128; deltaG = (12727 * U + 33384 * V); deltaG += (deltaG > 0 ? 32768 : -32768); deltaG >>= 16; Y = *(srcptr + 1); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A Y = *(srcptr + 2); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A Y = *(srcptr + 4); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A Y = *(srcptr + 5); *destptr++ = CLAMP_TO_UCHAR( Y + U ); *destptr++ = CLAMP_TO_UCHAR( Y - deltaG ); *destptr++ = CLAMP_TO_UCHAR( Y + V ); destptr++;//A srcptr += 6; } } ////////////////////////////////////////////////////////////////////////// // Convert the image from Y to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) ////////////////////////////////////////////////////////////////////////// void Camera1394::YtoRGB32(unsigned char * buffer) { unsigned char *srcptr, *srcend, *destptr; srcptr = theCamera->m_pData; srcend = srcptr + (theCamera->m_width * theCamera->m_height); destptr = buffer; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; // just Y's (monochrome) // unroll it to 4 per cycle while(srcptr < srcend) { *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr++; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr++; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr++; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr++; } } ////////////////////////////////////////////////////////////////////////// // Convert the image from Y16 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) // 16-bit monochrome (new to spec 1.3) // the first of each pair of bytes is the high byte, so just copy those to RGB ////////////////////////////////////////////////////////////////////////// void Camera1394::Y16toRGB32(unsigned char *buffer) { unsigned char *srcptr, *srcend, *destptr; srcptr = theCamera->m_pData; srcend = srcptr + 2 * (theCamera->m_width * theCamera->m_height); destptr = buffer; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; // just Y's (monochrome, 16-bit little endian) // unroll it to 4 per cycle while(srcptr < srcend) { *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; } } ////////////////////////////////////////////////////////////////////////// // Convert the image from RGB16 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) // 16-bit RGB (new to spec 1.3) // the first of each pair of bytes is the high byte, so just copy those to RGB // near duplicate of Y16toRGB ////////////////////////////////////////////////////////////////////////// void Camera1394::RGB16toRGB32(unsigned char * buffer) { unsigned char *srcptr, *srcend, *destptr; srcptr = theCamera->m_pData; srcend = srcptr + 6 * (theCamera->m_width * theCamera->m_height); destptr = buffer; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; // R,G,B are 16-bit source, chop of the top 8 and feed // unroll it to 3 per cycle while(srcptr < srcend) { *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; *destptr++ = *srcptr; *destptr++ = *srcptr; *destptr++ = *srcptr; destptr++;//A srcptr += 2; } } ////////////////////////////////////////////////////////////////////////// // Convert the image from RGB24 to RGB32 // The image is stored using a 32-bit RGB format (0xffRRGGBB) ////////////////////////////////////////////////////////////////////////// void Camera1394::RGB24toRGB32(unsigned char * buffer) { unsigned char *srcptr, *srcend, *destptr; srcptr = theCamera->m_pData; srcend = srcptr + 3 * (theCamera->m_width * theCamera->m_height); destptr = buffer; // single-stage idiotproofing if(theCamera->m_pData == NULL || buffer == NULL) return; // unroll it to 3 per cycle while(srcptr < srcend) { *destptr++ = *(srcptr+2); //B *destptr++ = *(srcptr+1); //G *destptr++ = *srcptr; //R destptr++;//A srcptr += 3; *destptr++ = *(srcptr+2); *destptr++ = *(srcptr+1); *destptr++ = *srcptr; destptr++;//A srcptr += 3; *destptr++ = *(srcptr+2); *destptr++ = *(srcptr+1); *destptr++ = *srcptr; destptr++;//A srcptr += 3; } } bool Camera1394::fillRGB32Image(unsigned char * buffer) { if(buffer == NULL) return false; bool retValue = true; switch(videoFormat_) { case 0: switch(videoMode_) { case 0: // 160x120 YUV444 YUV444toRGB32(buffer); break; case 1: // 320x240 YUV222 YUV422toRGB32(buffer); break; case 2: // 640x480 YUV411 YUV411toRGB32(buffer); break; case 3: // 640x480 YUV422 YUV422toRGB32(buffer); break; case 4: // 640x480 RGB RGB24toRGB32(buffer); //memcpy(buffer,theCamera->m_pData,640 * 480 * 3); break; case 5: // 640x480 MONO YtoRGB32(buffer); break; case 6: // 640x480 MONO16 Y16toRGB32(buffer); break; default: retValue = false; qWarning("Current video mode is not supported for display and recording"); break; } break; case 1: switch(videoMode_) { case 0: // 800x600 YUV422 YUV422toRGB32(buffer); break; case 1: // 800x600 RGB memcpy(buffer,theCamera->m_pData,800 * 600 * 3); break; case 2: // 800x600 MONO YtoRGB32(buffer); break; case 3: // 1024x768 YUV422 YUV422toRGB32(buffer); break; case 4: // 1024x768 RGB memcpy(buffer,theCamera->m_pData,1024 * 768 * 3); break; case 5: // 1024x768 MONO YtoRGB32(buffer); break; case 6: // 800x600 MONO16 case 7: // 1024x768 MONO16 Y16toRGB32(buffer); break; default: retValue = false; qWarning("Current video mode is not supported for display and recording"); break; } break; case 2: switch(videoMode_) { case 0: // 1280x960 YUV422 YUV422toRGB32(buffer); break; case 1: // 1280x960 RGB memcpy(buffer,theCamera->m_pData,1280 * 960 * 3); break; case 2: // 1280x960 MONO YtoRGB32(buffer); break; case 3: // 1600x1200 YUV422 YUV422toRGB32(buffer); break; case 4: // 1600x1200 RGB memcpy(buffer,theCamera->m_pData,1600 * 1200 * 3); break; case 5: // 1600x1200 MONO YtoRGB32(buffer); break; case 6: // 1280x1024 MONO16 case 7: // 1600x1200 MONO16 Y16toRGB32(buffer); break; default: retValue = false; qWarning("Current video mode is not supported for display and recording"); break; } break; case 7: switch(theCamera->m_controlSize.m_colorCode) { case 0: // Mono8 YtoRGB32(buffer); break; case 1: // YUV 411 YUV411toRGB32(buffer); break; case 2: // YUV 422 YUV422toRGB32(buffer); break; case 3: // YUV 444 YUV444toRGB32(buffer); break; case 4: // RGB8 memcpy(buffer,theCamera->m_pData,1280 * 960 * 3); break; case 5: // Mono16 Y16toRGB32(buffer); break; case 6: // RGB16 RGB16toRGB32(buffer); break; default: // unsupported retValue = false; qWarning("Current video mode is not supported for display and recording"); break; } break; default: retValue = false; qWarning("Current video mode is not supported for display and recording"); break; } return retValue; }