00001 #include "Shared/RobotInfo.h"
00002 #if defined(TGT_HAS_CAMERA) || defined(TGT_HAS_WEBCAM)
00003
00004 #include "Behaviors/Controller.h"
00005 #include "DualCoding/ShapeEllipse.h"
00006 #include "DualCoding/ShapePolygon.h"
00007 #include "Crew/MapBuilder.h"
00008 #include "DualCoding/ShapeLine.h"
00009 #include "DualCoding/Point.h"
00010 #include "DualCoding/PolygonData.h"
00011 #include "DualCoding/SketchSpace.h"
00012 #include "DualCoding/VRmixin.h"
00013 #include "Events/EventRouter.h"
00014 #include "Events/FilterBankEvent.h"
00015 #include "Motion/Kinematics.h"
00016 #include "Shared/ProjectInterface.h"
00017 #include "Shared/debuget.h"
00018 #include "Vision/Graphics.h"
00019 #include "Vision/RawCameraGenerator.h"
00020 #include "Vision/JPEGGenerator.h"
00021 #include "Wireless/Wireless.h"
00022
00023 #include "RawCam.h"
00024
00025 using namespace DualCoding;
00026
00027 REGISTER_BEHAVIOR_MENU_OPT(RawCam,"TekkotsuMon",BEH_NONEXCLUSIVE);
00028
00029 RawCam* RawCam::theOne=NULL;
00030
00031 RawCam::RawCam()
00032 : CameraStreamBehavior("RawCam",visRaw),
00033 visRaw(NULL), packet(NULL), cur(NULL), avail(0), max_buf(0), lastProcessedTime(0) {
00034 ASSERT(theOne==NULL,"there was already a RawCam running!");
00035 theOne=this;
00036 }
00037
00038 void RawCam::doStart() {
00039 BehaviorBase::doStart();
00040 setupServer();
00041 erouter->addListener(this,EventBase::visRawCameraEGID,
00042 ProjectInterface::visRawCameraSID,
00043 EventBase::deactivateETID);
00044 erouter->addListener(this,EventBase::visJPEGEGID,
00045 ProjectInterface::visColorJPEGSID,
00046 EventBase::deactivateETID);
00047 erouter->addListener(this,EventBase::visJPEGEGID,
00048 ProjectInterface::visGrayscaleJPEGSID,
00049 EventBase::deactivateETID);
00050 }
00051
00052 void RawCam::doStop() {
00053 erouter->removeListener(this);
00054 closeServer();
00055 BehaviorBase::doStop();
00056 }
00057
00058 void RawCam::doEvent() {
00059 if(!wireless->isConnected(visRaw->sock))
00060 return;
00061 if((config->vision.rawcam.transport==0 && visRaw->getTransport()==Socket::SOCK_STREAM)
00062 || (config->vision.rawcam.transport==1 && visRaw->getTransport()==Socket::SOCK_DGRAM)) {
00063
00064 closeServer();
00065 setupServer();
00066 return;
00067 }
00068 try {
00069 const FilterBankEvent* fbke=dynamic_cast<const FilterBankEvent*>(event);
00070 if(fbke==NULL) {
00071 CameraStreamBehavior::doEvent();
00072 return;
00073 }
00074 if((get_time() - lastProcessedTime) < config->vision.rawcam.interval) {
00075 return;
00076 }
00077
00078
00079
00080
00081
00082
00083
00084
00085 if ( fbke->getGeneratorID() == EventBase::visRawCameraEGID && ! VRmixin::drawShapes.empty() )
00086 drawShapesIntoBuffer(*fbke);
00087
00088 if(config->vision.rawcam.encoding==Config::vision_config::ENCODE_COLOR) {
00089 if(!writeColor(*fbke)) {
00090 if(packet) {
00091 cur=packet;
00092 closePacket();
00093 }
00094
00095
00096 }
00097 } else if(config->vision.rawcam.encoding==Config::vision_config::ENCODE_SINGLE_CHANNEL) {
00098
00099 if(!writeSingleChannel(*fbke)) {
00100 if(packet) {
00101 cur=packet;
00102 closePacket();
00103 }
00104
00105
00106 }
00107 }
00108 else {
00109 serr->printf("%s: Bad rawcam.encoding setting\n",getName().c_str());
00110 }
00111 } catch(...) {
00112 if(packet) {
00113 cur=packet;
00114 closePacket();
00115 }
00116
00117 serr->printf("%s: exception generated during image serialization, stopping stream.\n",getName().c_str());
00118 stop();
00119 throw;
00120 }
00121 }
00122
00123 unsigned int RawCam::getSourceLayer(unsigned int chan, unsigned int numLayers) {
00124 if(config->vision.rawcam.encoding==Config::vision_config::ENCODE_SINGLE_CHANNEL) {
00125 if(config->vision.rawcam.channel!=(int)chan)
00126 return -1U;
00127 return numLayers-1-config->vision.rawcam.y_skip;
00128 }
00129
00130 switch(chan) {
00131 case RawCameraGenerator::CHAN_Y:
00132 if(config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_JPEG) {
00133 if(config->vision.rawcam.y_skip-config->vision.rawcam.uv_skip == 1)
00134 return numLayers-1-config->vision.rawcam.uv_skip;
00135 }
00136 return numLayers-1-config->vision.rawcam.y_skip;
00137 case RawCameraGenerator::CHAN_U:
00138 case RawCameraGenerator::CHAN_V:
00139 if(config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_JPEG) {
00140 if(config->vision.rawcam.uv_skip-config->vision.rawcam.y_skip == 1)
00141 return numLayers-1-config->vision.rawcam.y_skip;
00142 }
00143 return numLayers-1-config->vision.rawcam.uv_skip;
00144 default:
00145 return -1U;
00146 }
00147 }
00148
00149 unsigned int RawCam::getSourceYLayer(unsigned int numLayers) {
00150 return getSourceLayer(RawCameraGenerator::CHAN_Y,numLayers);
00151 }
00152
00153 unsigned int RawCam::getSourceULayer(unsigned int numLayers) {
00154 return getSourceLayer(RawCameraGenerator::CHAN_U,numLayers);
00155 }
00156
00157 unsigned int RawCam::getSourceVLayer(unsigned int numLayers) {
00158 return getSourceLayer(RawCameraGenerator::CHAN_V,numLayers);
00159 }
00160
00161 void RawCam::drawShapesIntoBuffer(const FilterBankEvent &fbke) {
00162 unsigned int layer=fbke.getNumLayers() - 1 - config->vision.rawcam.y_skip;
00163 unsigned int chanY = RawCameraGenerator::CHAN_Y;
00164 unsigned int chanU = RawCameraGenerator::CHAN_U;
00165 unsigned int chanV = RawCameraGenerator::CHAN_V;
00166
00167 Graphics g(*fbke.getSource(), layer, chanY, chanU, chanV);
00168 const fmat::Transform baseToCam = kine->baseToLink(CameraFrameOffset);
00169 const fmat::Transform worldToCam = baseToCam * VRmixin::mapBuilder->worldToLocalMatrix;
00170 const unsigned int halfWidth = VRmixin::camSkS.getWidth() / 2;
00171 const unsigned int halfHeight = VRmixin::camSkS.getHeight() / 2;
00172
00173 for ( std::vector<ShapeRoot>::const_iterator it = VRmixin::drawShapes.begin();
00174 it != VRmixin::drawShapes.end(); it++ ) {
00175 if ( ! it->isValid() ) continue;
00176 g.setColor((*it)->getColor());
00177
00178 switch ((*it)->getType() ) {
00179
00180 case pointDataType: {
00181 const Shape<PointData> &aPoint = ShapeRootTypeConst(*it,PointData);
00182 Point center(aPoint->getCentroid());
00183 if ( (*it)->getRefFrameType() == allocentric )
00184 center.applyTransform(worldToCam);
00185 else if ( (*it)->getRefFrameType() == egocentric )
00186 center.applyTransform(baseToCam);
00187 if ( center.coordZ() < 0 )
00188 return;
00189 float camXnorm, camYnorm;
00190 config->vision.computePixelCorrected(center.coordX(), center.coordY(), center.coordZ(), camXnorm, camYnorm);
00191 int px = int((camXnorm + 1) * halfWidth);
00192 int py = int((camYnorm + 1) * halfHeight);
00193 g.drawPoint(px, py);
00194 g.drawPoint(px+1, py);
00195 g.drawPoint(px, py+1);
00196 g.drawPoint(px-1, py);
00197 g.drawPoint(px, py-1);
00198 break;
00199 }
00200
00201 case lineDataType: {
00202 const Shape<LineData> &aLine = ShapeRootTypeConst(*it,LineData);
00203 Point endPt1(aLine->end1Pt());
00204 Point endPt2(aLine->end2Pt());
00205
00206
00207 if ( aLine->getRefFrameType() == allocentric ) {
00208 endPt1.applyTransform(worldToCam);
00209 endPt2.applyTransform(worldToCam);
00210 } else if ( aLine->getRefFrameType() == egocentric ) {
00211 endPt1.applyTransform(baseToCam);
00212 endPt2.applyTransform(baseToCam);
00213 }
00214 if ( endPt1.coordZ() < 0 || endPt2.coordZ() < 0 )
00215 return;
00216 float camXnorm, camYnorm, camXnorm2, camYnorm2;
00217 config->vision.computePixelCorrected(endPt1.coordX(), endPt1.coordY(), endPt1.coordZ(), camXnorm, camYnorm);
00218 config->vision.computePixelCorrected(endPt2.coordX(), endPt2.coordY(), endPt2.coordZ(), camXnorm2, camYnorm2);
00219 int px = int((camXnorm + 1) * halfWidth);
00220 int py = int((camYnorm + 1) * halfHeight);
00221 int qx = int((camXnorm2 + 1) * halfWidth);
00222 int qy = int((camYnorm2 + 1) * halfHeight);
00223 g.drawLine(px, py, qx, qy);
00224 g.drawLine(px+1, py, qx+1, qy);
00225 g.drawLine(px, py+1, qx, qy+1);
00226 break;
00227 }
00228
00229 case polygonDataType: {
00230 const Shape<PolygonData> &aPolygon = ShapeRootTypeConst(*it,PolygonData);
00231 std::vector<Point> vertices = (aPolygon)->getVertices();
00232 unsigned int numVertices = vertices.size();
00233 for (unsigned int i=0; i < numVertices; i++) {
00234 if ( vertices[i].getRefFrameType() == allocentric )
00235 vertices[i].applyTransform(worldToCam);
00236 else if ( vertices[i].getRefFrameType() == egocentric )
00237 vertices[i].applyTransform(baseToCam);
00238 if ( vertices[i].coordZ() < 0 )
00239 return;
00240 }
00241 int px[numVertices], py[numVertices];
00242 for (unsigned int i = 0; i < numVertices; i++) {
00243 float camXnorm, camYnorm;
00244 config->vision.computePixelCorrected(vertices[i].coordX(), vertices[i].coordY(), vertices[i].coordZ(), camXnorm, camYnorm);
00245 px[i] = int((camXnorm + 1) * halfWidth);
00246 py[i] = int((camYnorm + 1) * halfHeight);
00247 }
00248 for (unsigned int i = 0; i < numVertices-1; i++) {
00249 g.drawLine(px[i], py[i], px[i+1], py[i+1]);
00250 g.drawLine(px[i]+1, py[i], px[i+1]+1, py[i+1]);
00251 g.drawLine(px[i], py[i]+1, px[i+1], py[i+1]+1);
00252 }
00253 break;
00254 }
00255
00256 case ellipseDataType: {
00257 const Shape<EllipseData> &aEllipse = ShapeRootTypeConst(*it,EllipseData);
00258
00259
00260
00261
00262 float semiMaj = aEllipse->getSemimajor();
00263 float semiMin = aEllipse->getSemiminor();
00264 AngPi theta = aEllipse->getOrientation();
00265 float cosT = cos(theta);
00266 float sinT = sin(theta);
00267
00268 Point centerPt = aEllipse->centerPt();
00269 Point semimajorPt1 = centerPt + Point(semiMaj*cosT, semiMaj*sinT);
00270 Point semimajorPt2 = centerPt - Point(semiMaj*cosT, semiMaj*sinT);
00271 Point semiminorPt1 = centerPt + Point(semiMin*sinT, semiMin*cosT);
00272 Point semiminorPt2 = centerPt - Point(semiMin*sinT, semiMin*cosT);
00273
00274 if ( aEllipse->getRefFrameType() == allocentric ) {
00275 centerPt.applyTransform(worldToCam);
00276 semimajorPt1.applyTransform(worldToCam);
00277 semimajorPt2.applyTransform(worldToCam);
00278 semiminorPt1.applyTransform(worldToCam);
00279 semiminorPt2.applyTransform(worldToCam);
00280 } else if ( aEllipse->getRefFrameType() == egocentric ) {
00281 centerPt.applyTransform(baseToCam);
00282 semimajorPt1.applyTransform(baseToCam);
00283 semimajorPt2.applyTransform(baseToCam);
00284 semiminorPt1.applyTransform(baseToCam);
00285 semiminorPt2.applyTransform(baseToCam);
00286 }
00287
00288 if ( semimajorPt1.coordZ() < 0 || semimajorPt2.coordZ() < 0 ||
00289 semiminorPt1.coordZ() < 0 || semiminorPt2.coordZ() < 0 )
00290 return;
00291
00292 float camXnorm, camYnorm, major1Xnorm, major1Ynorm, major2Xnorm, major2Ynorm, minor1Xnorm, minor1Ynorm, minor2Xnorm, minor2Ynorm;
00293 config->vision.computePixelCorrected(centerPt.coordX(), centerPt.coordY(), centerPt.coordZ(), camXnorm, camYnorm);
00294 config->vision.computePixelCorrected(semimajorPt1.coordX(), semimajorPt1.coordY(), semimajorPt1.coordZ(), major1Xnorm, major1Ynorm);
00295 config->vision.computePixelCorrected(semimajorPt2.coordX(), semimajorPt2.coordY(), semimajorPt2.coordZ(), major2Xnorm, major2Ynorm);
00296 config->vision.computePixelCorrected(semiminorPt1.coordX(), semiminorPt1.coordY(), semiminorPt1.coordZ(), minor1Xnorm, minor1Ynorm);
00297 config->vision.computePixelCorrected(semiminorPt2.coordX(), semiminorPt2.coordY(), semiminorPt2.coordZ(), minor2Xnorm, minor2Ynorm);
00298
00299 float px = (camXnorm + 1) * halfWidth;
00300 float py = (camYnorm + 1) * halfHeight;
00301 float major1x = (major1Xnorm + 1) * halfWidth;
00302 float major2x = (major2Xnorm + 1) * halfWidth;
00303 float major1y = (major1Ynorm + 1) * halfHeight;
00304 float major2y = (major2Ynorm + 1) * halfHeight;
00305 float minor1x = (minor1Xnorm + 1) * halfWidth;
00306 float minor2x = (minor2Xnorm + 1) * halfWidth;
00307 float minor1y = (minor1Ynorm + 1) * halfHeight;
00308 float minor2y = (minor2Ynorm + 1) * halfHeight;
00309 float major1Len = sqrt((px-major1x)*(px-major1x) + (py-major1y)*(py-major1y));
00310 float major2Len = sqrt((px-major2x)*(px-major2x) + (py-major2y)*(py-major2y));
00311 float minor1Len = sqrt((px-minor1x)*(px-minor1x) + (py-minor1y)*(py-minor1y));
00312 float minor2Len = sqrt((px-minor2x)*(px-minor2x) + (py-minor2y)*(py-minor2y));
00313 AngPi newOrient = atan2(major1y-py, major1x-px);
00314 int pxi = round(px);
00315 int pyi = round(py);
00316
00317
00318 g.drawQuarterEllipse(pxi, pyi, -major1Len, -minor1Len, newOrient);
00319
00320 g.drawQuarterEllipse(pxi, pyi, major2Len, minor2Len, newOrient);
00321
00322 g.drawQuarterEllipse(pxi, pyi, major2Len, -minor1Len, newOrient);
00323
00324 g.drawQuarterEllipse(pxi, pyi, -major1Len, minor2Len, newOrient);
00325 break;
00326 }
00327
00328 default:
00329 std::cout << "Don't know how to draw " << *it << " into camera buffer." << std::endl;
00330 }
00331 }
00332 }
00333
00334 void RawCam::closeServer() {
00335 if(wireless->isConnected(visRaw->sock))
00336 sendCloseConnectionPacket();
00337 Controller::closeGUI("RawVisionGUI");
00338
00339
00340
00341 wireless->setDaemon(visRaw,false);
00342 wireless->close(visRaw->sock);
00343 }
00344
00345 void RawCam::setupServer() {
00346 std::vector<std::string> args;
00347 args.push_back("raw");
00348 char port[50];
00349 snprintf(port,50,"%d",*config->vision.rawcam.port);
00350 args.push_back(port);
00351 if(config->vision.rawcam.transport==0) {
00352 max_buf=UDP_WIRELESS_BUFFER_SIZE;
00353 visRaw=wireless->socket(Socket::SOCK_DGRAM, 1024, max_buf);
00354 args.push_back("udp");
00355 } else if(config->vision.rawcam.transport==1) {
00356 max_buf=TCP_WIRELESS_BUFFER_SIZE;
00357 visRaw=wireless->socket(Socket::SOCK_STREAM, 1024, max_buf);
00358 args.push_back("tcp");
00359 } else {
00360 serr->printf("ERROR: Invalid Config::vision.rawcam.transport: %d\n",*config->vision.rawcam.transport);
00361 return;
00362 }
00363 wireless->setDaemon(visRaw,true);
00364 wireless->setReceiver(visRaw,networkCallback);
00365 wireless->listen(visRaw,config->vision.rawcam.port);
00366
00367 Controller::loadGUI("org.tekkotsu.mon.VisionGUI","RawVisionGUI",*config->vision.rawcam.port,args);
00368 }
00369
00370 bool RawCam::openPacket(FilterBankGenerator& fbkgen, unsigned int time, unsigned int layer) {
00371 if(packet!=NULL)
00372 return false;
00373
00374 avail=max_buf-1;
00375 ASSERT(cur==NULL,"cur non-NULL");
00376 cur=NULL;
00377 char * buf=packet=(char*)visRaw->getWriteBuffer(avail);
00378 ASSERT(packet!=NULL,"dropped frame, network bandwidth is saturated (reduce frame rate or size)");
00379 if(packet==NULL)
00380 return false;
00381
00382 if(!LoadSave::encodeInc("TekkotsuImage",buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00383 if(!LoadSave::encodeInc(*config->vision.rawcam.encoding,buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00384 if(!LoadSave::encodeInc(*config->vision.rawcam.compression,buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00385 if(!LoadSave::encodeInc(fbkgen.getWidth(layer),buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00386 if(!LoadSave::encodeInc(fbkgen.getHeight(layer),buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00387 if(!LoadSave::encodeInc(time,buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00388 if(!LoadSave::encodeInc(fbkgen.getFrameNumber(),buf,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00389
00390 cur=buf;
00391 return true;
00392 }
00393
00394 bool RawCam::writeColor(const FilterBankEvent& e) {
00395 FilterBankGenerator& fbkgen=*e.getSource();
00396
00397 unsigned int y_layer=fbkgen.getNumLayers()-1-config->vision.rawcam.y_skip;
00398 unsigned int uv_layer=fbkgen.getNumLayers()-1-config->vision.rawcam.uv_skip;
00399
00400 if(config->vision.rawcam.channel==-1) {
00401 if((e.getGeneratorID()==EventBase::visRawCameraEGID && config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_NONE)
00402 || (e.getGeneratorID()==EventBase::visJPEGEGID && config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_JPEG)) {
00403 if(const JPEGGenerator* jgen=dynamic_cast<const JPEGGenerator*>(&fbkgen))
00404 if(jgen->getCurrentSourceFormat()==JPEGGenerator::SRC_COLOR)
00405 return true;
00406 openPacket(fbkgen,e.getTimeStamp(),uv_layer);
00407 if(cur==NULL)
00408 return false;
00409
00410 if(!LoadSave::encodeInc("FbkImage",cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00411 if(!LoadSave::encodeInc(0,cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00412 if(!LoadSave::encodeInc(0,cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00413 if(!LoadSave::encodeInc(-1,cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00414 if(!LoadSave::encodeInc(RawCameraGenerator::CHAN_Y,cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00415 if(!LoadSave::encodeInc("blank",cur,avail,"ran out of space %s:%un",__FILE__,__LINE__)) return false;;
00416
00417 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_U);
00418 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00419
00420 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_V);
00421 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00422
00423 closePacket();
00424 }
00425 return true;
00426 }
00427
00428 unsigned int big_layer=y_layer;
00429 unsigned int small_layer=uv_layer;
00430 if(y_layer<uv_layer) {
00431 big_layer=uv_layer;
00432 small_layer=y_layer;
00433 }
00434 if(const JPEGGenerator* jgen=dynamic_cast<const JPEGGenerator*>(&fbkgen)) {
00435 if(config->vision.rawcam.compression!=Config::vision_config::RawCamConfig::COMPRESS_JPEG)
00436 return true;
00437 if(jgen->getCurrentSourceFormat()==JPEGGenerator::SRC_COLOR && big_layer-small_layer<2) {
00438 openPacket(fbkgen,e.getTimeStamp(),big_layer);
00439 if(cur==NULL)
00440 return false;
00441
00442 fbkgen.selectSaveImage(big_layer,0);
00443 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00444
00445 closePacket();
00446 } else if(jgen->getCurrentSourceFormat()==JPEGGenerator::SRC_GRAYSCALE && big_layer-small_layer>=2) {
00447 bool opened=openPacket(fbkgen,e.getTimeStamp(),big_layer);
00448 if(cur==NULL)
00449 return false;
00450
00451 if(big_layer==y_layer) {
00452 fbkgen.selectSaveImage(y_layer,RawCameraGenerator::CHAN_Y);
00453 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00454 } else {
00455 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_U);
00456 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00457 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_V);
00458 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00459 }
00460
00461 if(!opened)
00462 closePacket();
00463 }
00464 } else {
00465 bool opened=false;
00466
00467 if(config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_NONE || (big_layer-small_layer>=2 && big_layer==uv_layer)) {
00468 opened=openPacket(fbkgen,e.getTimeStamp(),big_layer);
00469 if(cur==NULL)
00470 return false;
00471 fbkgen.selectSaveImage(y_layer,RawCameraGenerator::CHAN_Y);
00472 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00473 }
00474
00475 if(config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_NONE || (big_layer-small_layer>=2 && big_layer==y_layer)) {
00476 opened=openPacket(fbkgen,e.getTimeStamp(),big_layer);
00477 if(cur==NULL)
00478 return false;
00479 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_U);
00480 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00481 fbkgen.selectSaveImage(uv_layer,RawCameraGenerator::CHAN_V);
00482 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00483 }
00484
00485 if(config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_NONE || !opened)
00486 closePacket();
00487 }
00488
00489 return true;
00490 }
00491
00492 bool RawCam::writeSingleChannel(const FilterBankEvent& e) {
00493 FilterBankGenerator& fbkgen=*e.getSource();
00494 if( (config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_NONE && e.getGeneratorID()==EventBase::visRawCameraEGID)
00495 || (config->vision.rawcam.compression==Config::vision_config::RawCamConfig::COMPRESS_JPEG && e.getGeneratorID()==EventBase::visJPEGEGID) )
00496 {
00497 if(const JPEGGenerator * jgen=dynamic_cast<const JPEGGenerator*>(&fbkgen))
00498 if(jgen->getCurrentSourceFormat()!=JPEGGenerator::SRC_GRAYSCALE)
00499 return true;
00500 unsigned int layer=fbkgen.getNumLayers()-1-config->vision.rawcam.y_skip;
00501
00502 openPacket(fbkgen,e.getTimeStamp(),layer);
00503 if(cur==NULL)
00504 return false;
00505
00506 fbkgen.selectSaveImage(layer,config->vision.rawcam.channel);
00507 if(!LoadSave::checkInc(fbkgen.saveBuffer(cur,avail),cur,avail,"image size too large -- may need to set Config::vision.rawcam.transport to TCP and reopen raw cam")) return false;
00508
00509 closePacket();
00510 }
00511 return true;
00512 }
00513
00514 void RawCam::closePacket() {
00515 if(packet==NULL)
00516 return;
00517 visRaw->write(cur-packet);
00518 packet=cur=NULL;
00519 avail=0;
00520 lastProcessedTime = get_time();
00521 }
00522
00523 bool RawCam::sendCloseConnectionPacket() {
00524 char msg[]="CloseConnection";
00525 unsigned int len=strlen(msg)+LoadSave::stringpad;
00526 char * buf = (char*)visRaw->getWriteBuffer(len);
00527 if(buf==NULL) {
00528 std::cerr << "Could not get buffer for closing packet" << std::endl;
00529 return false;
00530 }
00531 unsigned int used=LoadSave::encode(msg,buf,len);
00532 if(used==0)
00533 std::cerr << "Could not write close packet" << std::endl;
00534 visRaw->write(used);
00535 return true;
00536 }
00537
00538 #endif
00539
00540
00541
00542
00543
00544