00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00052 #include <QVApplication>
00053 #include <QVDefaultGUI>
00054 #include <QVVideoReaderBlock>
00055 #include <QVImageCanvas>
00056 #include <QVNumericPlot>
00057 #include <QVProcessingBlock>
00058 #include <QVImage>
00059 #include <QVPolyline>
00060 #include <QVPolylineF>
00061 #include <QVMatrix>
00062
00063 #include <qvipp.h>
00064 #include <qvip.h>
00065 #include <qvprojective.h>
00066
00067 #include <GL/glut.h>
00068 #include <QVCameraPose>
00069 #include <QV3DModel>
00070
00071 #include <qv3dobjects.h>
00072
00073
00074 QList<QVPolyline> detectContours(const QVImage<uChar,1> &imageIn)
00075 {
00076 uChar otsu_th;
00077 ComputeThreshold_Otsu(imageIn,otsu_th);
00078
00079 return getConnectedSetBorderContoursThreshold(imageIn,otsu_th);
00080 }
00081
00082
00083 QVPolyline selectBestContour(const QList<QVPolyline> &contours, const int templateCount, const int cols, const int rows, const int minCount = 30, const double maxError = 5.0)
00084 {
00085 double bestError = 1e15;
00086 QVPolyline bestContour;
00087 foreach(QVPolyline contour, contours)
00088 {
00089
00090 if (contour.count() < minCount)
00091 continue;
00092
00093
00094
00095 foreach(QPoint point, contour)
00096 if(point.x()==0 or point.x()==cols-1 or point.y()==0 or point.y()==rows-1)
00097 continue;
00098
00099
00100 double contourError;
00101 QVPolyline contourReduced;
00102 IterativePointElimination(contour, contourReduced, maxError, false, true, &contourError);
00103
00104 if (contourReduced.count() != templateCount)
00105 continue;
00106
00107
00108 if (contourError >= bestError)
00109 continue;
00110
00111 bestContour = contour;
00112 bestError = contourError;
00113 }
00114 return bestContour;
00115 }
00116
00117
00118 QVMatrix matchContourWithTemplate(const QVPolyline &contour, const QVPolylineF &templateL, const double maxError = 0.1)
00119 {
00120
00121 QVPolyline contourReduced;
00122 IterativePointElimination(contour,contourReduced,templateL.length(),true,true);
00123
00124
00125
00126 if(contourReduced.length() != templateL.length())
00127 return QVMatrix();
00128
00129
00130 double bestError = 1e15;
00131 QVMatrix H;
00132 for(int j=0;j<templateL.length();j++)
00133 {
00134 QList< QPair<QPointF, QPointF> > matchings;
00135 for(int i=0;i<templateL.length();i++)
00136 matchings << QPointFMatching(contourReduced[(i+j)%templateL.length()], templateL[i]);
00137
00138 const QVMatrix temptativeH = computeProjectiveHomography(matchings);
00139
00140 double err = 0;
00141 for(int i=0;i<templateL.length();i++)
00142 err += norm2(applyHomography(temptativeH,contourReduced[(i+j)%templateL.length()]) - templateL[i]);
00143
00144 err /= double(templateL.count());
00145
00146 if(err < bestError)
00147 {
00148 bestError = err;
00149 H = temptativeH;
00150 }
00151 }
00152
00153
00154 if (bestError > maxError)
00155 return QVMatrix();
00156 else
00157 return H;
00158 }
00159
00160
00161 class ARProcessingBlock : public QVProcessingBlock
00162 {
00163 private:
00164 QVPolylineF templateL;
00165 QList<double> focals;
00166
00167 public:
00168
00169 ARProcessingBlock(QString name): QVProcessingBlock(name)
00170 {
00171
00172 addProperty<QVImage<uChar,3> >("imageIn",inputFlag|outputFlag);
00173
00174
00175 addProperty<QVMatrix>("Camera calibration matrix", outputFlag);
00176 addProperty<QVCameraPose>("Camera pose", outputFlag);
00177
00178
00179 templateL << QPointF(0.0,0.0)
00180 << QPointF(1.0,0.0)
00181 << QPointF(1.0,0.5)
00182 << QPointF(0.5,0.5)
00183 << QPointF(0.5,1.0)
00184 << QPointF(0.0,1.0);
00185
00186 for(int i = 0; i < templateL.count(); i++)
00187 templateL[i] = templateL[i] - QPointF(0.5, 0.5);
00188 };
00189
00190
00191 void hidePose()
00192 {
00193
00194
00195 setPropertyValue<QVMatrix>("Camera calibration matrix", QVMatrix());
00196 setPropertyValue<QVCameraPose>("Camera pose", QVCameraPose());
00197 }
00198
00199
00200 void iterate()
00201 {
00202
00203 const QVImage<uChar,1> imageIn = getPropertyValue<QVImage<uChar,3> >("imageIn");
00204 const int rows = imageIn.getRows(), cols = imageIn.getCols();
00205 timeFlag("Read input data");
00206
00207
00208 const QList<QVPolyline> contours = detectContours(imageIn);
00209 timeFlag("Detect contours");
00210
00211
00212
00213 const QVPolyline bestContour = selectBestContour(contours, templateL.count(), cols, rows);
00214 timeFlag("Detect best contour");
00215
00216
00217
00218 const QVMatrix H = matchContourWithTemplate(bestContour, templateL, 0.05);
00219
00220
00221 if (H == QVMatrix())
00222 {
00223 hidePose();
00224 return;
00225 }
00226 timeFlag("Find planar homography");
00227
00228
00229
00230
00231 const double actualEstimatedFocal = computeCameraFocalFromPlanarHomography(H, cols, rows, true);
00232
00233
00234 if ( focals.count() < 200 and not isnan(actualEstimatedFocal) )
00235 focals << actualEstimatedFocal;
00236
00237
00238 if (focals.count() == 0)
00239 {
00240 hidePose();
00241 return;
00242 }
00243
00244
00245 const double focal = QVVector(focals).median();
00246 timeFlag("Estimate focal");
00247
00248
00249 const QVMatrix K = QVMatrix::cameraCalibrationMatrix(focal, 4.0 * rows / (3.0 * cols), cols/2.0, rows/2.0);
00250 const QVCameraPose cameraPose = getCameraPoseFromCalibratedHomography(K,H);
00251 timeFlag("Estimate camera pose");
00252
00253
00254 setPropertyValue<QVMatrix>("Camera calibration matrix", K);
00255 setPropertyValue<QVCameraPose>("Camera pose", cameraPose);
00256 }
00257
00258 };
00259
00260
00261 class ARCanvas : public QVImageCanvas
00262 {
00263 public:
00264 ARCanvas(QString name) : QVImageCanvas(name) { };
00265
00266 void custom_viewer_3D()
00267 {
00268 glEnable(GL_LIGHTING);
00269 glEnable(GL_LIGHT0);
00270 glEnable(GL_COLOR_MATERIAL);
00271 glEnable(GL_DEPTH_TEST);
00272 glClear(GL_DEPTH_BUFFER_BIT);
00273
00274
00275 glColor3ub(128,196,255);
00276
00277 glRotatef(90.0, 1.0, 0.0, 0.0);
00278 glTranslatef(0.0,0.20,0.0);
00279
00280 glutSolidTeapot(0.3);
00281 };
00282 };
00283
00284 int main(int argc,char **argv)
00285 {
00286
00287 QVApplication app(argc,argv);
00288 QVDefaultGUI gui;
00289
00290
00291 QVVideoReaderBlock camera("Camera");
00292
00293
00294 ARProcessingBlock processingBlock("My processing block");
00295 camera.linkProperty(&processingBlock,"imageIn");
00296
00297
00298 ARCanvas imageCanvas("augmented objects");
00299
00300 QV3DCoordinateCenter axis(1.0);
00301 imageCanvas.add3DModel(axis);
00302
00303 processingBlock.linkProperty("imageIn",&imageCanvas);
00304
00305
00306
00307 processingBlock.linkProperty("Camera calibration matrix", &imageCanvas);
00308 processingBlock.linkProperty("Camera pose", &imageCanvas);
00309
00310
00311 glutInit(&argc,argv);
00312
00313
00314 return app.exec();
00315 }
00316