[Artoolkit] kpmMatching & Tracking of nftSimple
1. kpmMatching thread
main() -->
loadNFTData() -->
trackingInitInit() -->
In static void *trackingInitMain( THREAD_HANDLE_T *threadHandle )
for(;;)
{
if( threadStartWait(threadHandle) < ) break; kpmMatching(kpmHandle, imagePtr);
trackingInitHandle->flag = ;
for( i = ; i < kpmResultNum; i++ )
{
if( kpmResult[i].camPoseF != ) continue;
ARLOGd("kpmGetPose OK.\n"); if( trackingInitHandle->flag == || err > kpmResult[i].error )
{ // Take the first or best result.
trackingInitHandle->flag = ;
trackingInitHandle->page = kpmResult[i].pageNo;
for (j = ; j < ; j++) for (k = ; k < ; k++) trackingInitHandle->trans[j][k] = kpmResult[i].camPose[j][k];
err = kpmResult[i].error;
}
} threadEndSignal(threadHandle); // --> Next
}
pthread_cond_signal 函数的作用是发送一个信号给另外一个正在处于阻塞等待状态的线程,使其脱离阻塞状态,继续执行。如果没有线程处在阻塞等待状态,pthread_cond_signal也会成功返回。
int threadEndSignal( THREAD_HANDLE_T *flag )
{
pthread_mutex_lock(&(flag->mut));
flag->endF = ;
flag->busyF = ;
pthread_cond_signal(&(flag->cond2));
pthread_mutex_unlock(&(flag->mut));
return ;
}
精华全在这里,地位相当于Android的NDK: "./ARToolKit5-bin-5.3.2r1-Linux-x86_64/lib/SRC/KPM/"
unsw@unsw-UX303UB$ pwd
/home/unsw/Android/ARToolKit5-bin-5.3.2r1-Linux-x86_64/lib/SRC/KPM
unsw@unsw-UX303UB$
unsw@unsw-UX303UB$
unsw@unsw-UX303UB$ _cmd-wc-cpp
./FreakMatcher/framework/logger.cpp
./FreakMatcher/framework/image.cpp
./FreakMatcher/framework/timers.cpp
./FreakMatcher/framework/date_time.cpp
./FreakMatcher/unsupported/test/polynomialsolver.cpp
./FreakMatcher/unsupported/test/alignedvector3.cpp
./FreakMatcher/unsupported/test/BVH.cpp
./FreakMatcher/unsupported/test/matrix_exponential.cpp
./FreakMatcher/unsupported/test/sparse_llt.cpp
./FreakMatcher/unsupported/test/matrix_function.cpp
./FreakMatcher/unsupported/test/sparse_ldlt.cpp
./FreakMatcher/unsupported/test/FFTW.cpp
./FreakMatcher/unsupported/test/NonLinearOptimization.cpp
./FreakMatcher/unsupported/test/NumericalDiff.cpp
./FreakMatcher/unsupported/test/mpreal/dlmalloc.c
./FreakMatcher/unsupported/test/mpreal/mpreal.cpp
./FreakMatcher/unsupported/test/polynomialutils.cpp
./FreakMatcher/unsupported/test/sparse_extra.cpp
./FreakMatcher/unsupported/test/openglsupport.cpp
./FreakMatcher/unsupported/test/FFT.cpp
./FreakMatcher/unsupported/test/autodiff.cpp
./FreakMatcher/unsupported/test/sparse_lu.cpp
./FreakMatcher/unsupported/test/forward_adolc.cpp
./FreakMatcher/unsupported/test/mpreal_support.cpp
./FreakMatcher/unsupported/doc/examples/BVH_Example.cpp
./FreakMatcher/unsupported/doc/examples/PolynomialSolver1.cpp
./FreakMatcher/unsupported/doc/examples/MatrixSine.cpp
./FreakMatcher/unsupported/doc/examples/MatrixExponential.cpp
./FreakMatcher/unsupported/doc/examples/FFT.cpp
./FreakMatcher/unsupported/doc/examples/MatrixFunction.cpp
./FreakMatcher/unsupported/doc/examples/MatrixSinh.cpp
./FreakMatcher/unsupported/doc/examples/PolynomialUtils1.cpp
./FreakMatcher/matchers/freak.cpp
./FreakMatcher/matchers/hough_similarity_voting.cpp
./FreakMatcher/matchers/feature_store_io.cpp
./FreakMatcher/matchers/visual_database_io.cpp
./FreakMatcher/detectors/gradients.cpp
./FreakMatcher/detectors/pyramid.cpp
./FreakMatcher/detectors/DoG_scale_invariant_detector.cpp
./FreakMatcher/detectors/orientation_assignment.cpp
./FreakMatcher/detectors/harris.cpp
./FreakMatcher/detectors/gaussian_scale_space_pyramid.cpp
./FreakMatcher/math/math_io.cpp
./FreakMatcher/facade/visual_database_facade.cpp
./kpmFopen.c
./kpmRefDataSet.cpp
./kpmHandle.cpp
./kpmResult.cpp
./kpmUtil.cpp
./kpmMatching.cpp
total
18173 total
kpmMatching.cpp
struct _KpmHandle {
#if !BINARY_FEATURE
SurfSubHandleT *surfHandle;
void *ann2;
#else
vision::VisualDatabaseFacade *freakMatcher;
#endif ARParamLT *cparamLT;
int poseMode;
int xsize, ysize;
AR_PIXEL_FORMAT pixFormat;
KPM_PROC_MODE procMode;
int detectedMaxFeature;
#if !BINARY_FEATURE
int surfThreadNum;
#endif KpmRefDataSet refDataSet;
KpmInputDataSet inDataSet;
#if !BINARY_FEATURE
KpmMatchResult preRANSAC;
KpmMatchResult aftRANSAC;
#endif #if !BINARY_FEATURE
KpmSkipRegionSet skipRegion;
#endif KpmResult *result;
int resultNum;
int pageIDs[DB_IMAGE_MAX];
};
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1 int kpmMatching( KpmHandle *kpmHandle, ARUint8 *inImage )
{
int xsize, ysize;
int xsize2, ysize2;
int procMode;
ARUint8 *inImageBW;
int i;
#if !BINARY_FEATURE
FeatureVector featureVector;
int *inlierIndex;
CorspMap preRANSAC;
int inlierNum;
CAnnMatch2 *ann2;
int *annMatch2;
int knn;
float h[][];
int j;
#endif
int ret; if (!kpmHandle || !inImage) {
ARLOGe("kpmMatching(): NULL kpmHandle/inImage.\n");
return -;
} xsize = kpmHandle->xsize;
ysize = kpmHandle->ysize;
procMode = kpmHandle->procMode; if (procMode == KpmProcFullSize && (kpmHandle->pixFormat == AR_PIXEL_FORMAT_MONO || kpmHandle->pixFormat == AR_PIXEL_FORMAT_420v || kpmHandle->pixFormat == AR_PIXEL_FORMAT_420f || kpmHandle->pixFormat == AR_PIXEL_FORMAT_NV21)) {
inImageBW = inImage;
} else {
inImageBW = kpmUtilGenBWImage( inImage, kpmHandle->pixFormat, xsize, ysize, procMode, &xsize2, &ysize2 );
if( inImageBW == NULL ) return -;
} #if BINARY_FEATURE
kpmHandle->freakMatcher->query(inImageBW, xsize ,ysize);
kpmHandle->inDataSet.num = (int)kpmHandle->freakMatcher->getQueryFeaturePoints().size();
#else
surfSubExtractFeaturePoint( kpmHandle->surfHandle, inImageBW, kpmHandle->skipRegion.region, kpmHandle->skipRegion.regionNum );
kpmHandle->skipRegion.regionNum = ;
kpmHandle->inDataSet.num = featureVector.num = surfSubGetFeaturePointNum( kpmHandle->surfHandle );
#endif if( kpmHandle->inDataSet.num != ) {
if( kpmHandle->inDataSet.coord != NULL ) free(kpmHandle->inDataSet.coord);
#if !BINARY_FEATURE
if( kpmHandle->preRANSAC.match != NULL ) free(kpmHandle->preRANSAC.match);
if( kpmHandle->aftRANSAC.match != NULL ) free(kpmHandle->aftRANSAC.match);
#endif
arMalloc( kpmHandle->inDataSet.coord, KpmCoord2D, kpmHandle->inDataSet.num );
#if !BINARY_FEATURE
arMalloc( kpmHandle->preRANSAC.match, KpmMatchData, kpmHandle->inDataSet.num );
arMalloc( kpmHandle->aftRANSAC.match, KpmMatchData, kpmHandle->inDataSet.num );
#endif
#if BINARY_FEATURE
#else
arMalloc( featureVector.sf, SurfFeature, kpmHandle->inDataSet.num );
arMalloc( preRANSAC.mp, MatchPoint, kpmHandle->inDataSet.num );
arMalloc( inlierIndex, int, kpmHandle->inDataSet.num ); knn = 1;
arMalloc( annMatch2, int, kpmHandle->inDataSet.num*knn);
#endif #if BINARY_FEATURE
const std::vector<vision::FeaturePoint>& points = kpmHandle->freakMatcher->getQueryFeaturePoints();
//const std::vector<unsigned char>& descriptors = kpmHandle->freakMatcher->getQueryDescriptors();
#endif
if( procMode == KpmProcFullSize ) {
for( i = ; i < kpmHandle->inDataSet.num; i++ ) { // 对每个keypoint遍历,要做什么?提取特征向量 #if BINARY_FEATURE
float x = points[i].x, y = points[i].y;
#else
float x, y, *desc;
surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
for( j = ; j < SURF_SUB_DIMENSION; j++ ) {
featureVector.sf[i].v[j] = desc[j];
}
featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
#endif
if( kpmHandle->cparamLT != NULL ) {
arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x, y, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
}
else {
kpmHandle->inDataSet.coord[i].x = x;
kpmHandle->inDataSet.coord[i].y = y;
}
}
}
else if( procMode == KpmProcTwoThirdSize ) {
for( i = ; i < kpmHandle->inDataSet.num; i++ ) {
#if BINARY_FEATURE
float x = points[i].x, y = points[i].y;
#else
float x, y, *desc;
surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
for( j = ; j < SURF_SUB_DIMENSION; j++ ) {
featureVector.sf[i].v[j] = desc[j];
}
featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
#endif
if( kpmHandle->cparamLT != NULL ) {
arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*1.5f, y*1.5f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
}
else {
kpmHandle->inDataSet.coord[i].x = x*1.5f;
kpmHandle->inDataSet.coord[i].y = y*1.5f;
}
}
}
else if( procMode == KpmProcHalfSize ) {
for( i = ; i < kpmHandle->inDataSet.num; i++ ) {
#if BINARY_FEATURE
float x = points[i].x, y = points[i].y;
#else
float x, y, *desc;
surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
for( j = ; j < SURF_SUB_DIMENSION; j++ ) {
featureVector.sf[i].v[j] = desc[j];
}
featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
#endif
if( kpmHandle->cparamLT != NULL ) {
arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*2.0f, y*2.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
}
else {
kpmHandle->inDataSet.coord[i].x = x*2.0f;
kpmHandle->inDataSet.coord[i].y = y*2.0f;
}
}
}
else if( procMode == KpmProcOneThirdSize ) {
for( i = ; i < kpmHandle->inDataSet.num; i++ ) {
#if BINARY_FEATURE
float x = points[i].x, y = points[i].y;
#else
float x, y, *desc;
surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
for( j = ; j < SURF_SUB_DIMENSION; j++ ) {
featureVector.sf[i].v[j] = desc[j];
}
featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
#endif
if( kpmHandle->cparamLT != NULL ) {
arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*3.0f, y*3.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
}
else {
kpmHandle->inDataSet.coord[i].x = x*3.0f;
kpmHandle->inDataSet.coord[i].y = y*3.0f;
}
}
}
else { // procMode == KpmProcQuatSize
for( i = ; i < kpmHandle->inDataSet.num; i++ ) {
#if BINARY_FEATURE
float x = points[i].x, y = points[i].y;
#else
float x, y, *desc;
surfSubGetFeaturePosition( kpmHandle->surfHandle, i, &x, &y );
desc = surfSubGetFeatureDescPtr( kpmHandle->surfHandle, i );
for( j = ; j < SURF_SUB_DIMENSION; j++ ) {
featureVector.sf[i].v[j] = desc[j];
}
featureVector.sf[i].l = surfSubGetFeatureSign( kpmHandle->surfHandle, i );
#endif
if( kpmHandle->cparamLT != NULL ) {
arParamObserv2IdealLTf( &(kpmHandle->cparamLT->paramLTf), x*4.0f, y*4.0f, &(kpmHandle->inDataSet.coord[i].x), &(kpmHandle->inDataSet.coord[i].y) );
}
else {
kpmHandle->inDataSet.coord[i].x = x*4.0f;
kpmHandle->inDataSet.coord[i].y = y*4.0f;
}
}
} #if !BINARY_FEATURE
ann2 = (CAnnMatch2*)kpmHandle->ann2;
ann2->Match(&featureVector, knn, annMatch2); // knn = 1 就等于不聚类咯
for(int pageLoop = ; pageLoop < kpmHandle->resultNum; pageLoop++ ) {
kpmHandle->preRANSAC.num = ;
kpmHandle->aftRANSAC.num = ; kpmHandle->result[pageLoop].pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
kpmHandle->result[pageLoop].camPoseF = -;
if( kpmHandle->result[pageLoop].skipF ) continue; int featureNum = ;
int *annMatch2Ptr = annMatch2;
int pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
for( i = ; i < kpmHandle->inDataSet.num; i++ ) {
for( j = ; j < knn; j++ ) {
if( *annMatch2Ptr >= && kpmHandle->refDataSet.refPoint[*annMatch2Ptr].pageNo == pageNo ) {
kpmHandle->preRANSAC.match[featureNum].inIndex = i;
kpmHandle->preRANSAC.match[featureNum].refIndex = *annMatch2Ptr;
preRANSAC.mp[featureNum].x1 = kpmHandle->inDataSet.coord[i].x;
preRANSAC.mp[featureNum].y1 = kpmHandle->inDataSet.coord[i].y;
preRANSAC.mp[featureNum].x2 = kpmHandle->refDataSet.refPoint[*annMatch2Ptr].coord3D.x;
preRANSAC.mp[featureNum].y2 = kpmHandle->refDataSet.refPoint[*annMatch2Ptr].coord3D.y;
featureNum++;
annMatch2Ptr += knn-j;
break;
}
annMatch2Ptr++;
}
}
//printf("Page[%d] %d\n", pageLoop, featureNum);
preRANSAC.num = featureNum;
if( featureNum < ) continue; if( kpmRansacHomograhyEstimation(&preRANSAC, inlierIndex, &inlierNum, h) < ) { //不知从哪个库引来的函数
inlierNum = ;
}
//printf(" --> page[%d] %d pre:%3d, aft:%3d\n", pageLoop, kpmHandle->inDataSet.num, preRANSAC.num, inlierNum);
if( inlierNum < ) continue;
// 最终有效的特征匹配点要大于等于6个
kpmHandle->preRANSAC.num = preRANSAC.num;
kpmHandle->aftRANSAC.num = inlierNum;
for( i = ; i < inlierNum; i++ ) {
kpmHandle->aftRANSAC.match[i].inIndex = kpmHandle->preRANSAC.match[inlierIndex[i]].inIndex;
kpmHandle->aftRANSAC.match[i].refIndex = kpmHandle->preRANSAC.match[inlierIndex[i]].refIndex;
}
//printf(" ---> %d %d %d\n", kpmHandle->inDataSet.num, kpmHandle->preRANSAC.num, kpmHandle->aftRANSAC.num);
if( kpmHandle->poseMode == KpmPose6DOF ) {
//printf("----- Page %d ------\n", pageLoop);
ret = kpmUtilGetPose(kpmHandle->cparamLT, &(kpmHandle->aftRANSAC), &(kpmHandle->refDataSet), &(kpmHandle->inDataSet),
kpmHandle->result[pageLoop].camPose, &(kpmHandle->result[pageLoop].error) );
ARLOGi("Pose - %s",arrayToString2(kpmHandle->result[pageLoop].camPose).c_str());
//printf("----- End. ------\n");
}
else {
ret = kpmUtilGetPoseHomography(&(kpmHandle->aftRANSAC), &(kpmHandle->refDataSet), &(kpmHandle->inDataSet),
kpmHandle->result[pageLoop].camPose, &(kpmHandle->result[pageLoop].error) );
}
if( ret == ) {
kpmHandle->result[pageLoop].camPoseF = ;
kpmHandle->result[pageLoop].inlierNum = inlierNum;
ARLOGi("Page[%d] pre:%3d, aft:%3d, error = %f\n", pageLoop, preRANSAC.num, inlierNum, kpmHandle->result[pageLoop].error);
}
}
free(annMatch2);
#else
for (int pageLoop = ; pageLoop < kpmHandle->resultNum; pageLoop++) { kpmHandle->result[pageLoop].pageNo = kpmHandle->refDataSet.pageInfo[pageLoop].pageNo;
kpmHandle->result[pageLoop].camPoseF = -;
if( kpmHandle->result[pageLoop].skipF ) continue; const vision::matches_t& matches = kpmHandle->freakMatcher->inliers();
int matched_image_id = kpmHandle->freakMatcher->matchedId();
if (matched_image_id < ) continue; ret = kpmUtilGetPose_binary(kpmHandle->cparamLT,
matches ,
kpmHandle->freakMatcher->get3DFeaturePoints(matched_image_id),
kpmHandle->freakMatcher->getQueryFeaturePoints(),
kpmHandle->result[pageLoop].camPose,
&(kpmHandle->result[pageLoop].error) );
//ARLOGi("Pose (freak) - %s",arrayToString2(kpmHandle->result[pageLoop].camPose).c_str());
if( ret == ) {
kpmHandle->result[pageLoop].camPoseF = ;
kpmHandle->result[pageLoop].inlierNum = (int)matches.size();
kpmHandle->result[pageLoop].pageNo = kpmHandle->pageIDs[matched_image_id];
ARLOGi("Page[%d] pre:%3d, aft:%3d, error = %f\n", pageLoop, (int)matches.size(), (int)matches.size(), kpmHandle->result[pageLoop].error);
}
}
#endif
#if !BINARY_FEATURE
free(featureVector.sf);
free(preRANSAC.mp);
free(inlierIndex);
#endif
}
else {
#if !BINARY_FEATURE
kpmHandle->preRANSAC.num = ;
kpmHandle->aftRANSAC.num = ;
#endif
for( i = ; i < kpmHandle->resultNum; i++ ) {
kpmHandle->result[i].camPoseF = -;
}
} for( i = ; i < kpmHandle->resultNum; i++ ) kpmHandle->result[i].skipF = ; if (inImageBW != inImage) free( inImageBW ); return ;
}
Therefore, kpmHandler gets camPose.
2. main thread
static void mainLoop(void):上述 camPose 由 (2) 接收。接下来再分析 track object 是怎么一回事。
主线程:
主线程:
int ar2Tracking( AR2HandleT *ar2Handle, AR2SurfaceSetT *surfaceSet, ARUint8 *dataPtr, float trans[][], float *err )
{
AR2TemplateCandidateT *candidatePtr;
AR2TemplateCandidateT *cp[AR2_THREAD_MAX];
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
float aveBlur;
#endif
int num, num2;
int i, j, k; if (!ar2Handle || !surfaceSet || !dataPtr || !trans || !err) return (-); if( surfaceSet->contNum <= ) {
ARLOGd("ar2Tracking() error: ar2SetInitTrans() must be called first.\n");
return -;
} *err = 0.0F; for( i = ; i < surfaceSet->num; i++ ) {
arUtilMatMulf( (const float (*)[])surfaceSet->trans1, (const float (*)[])surfaceSet->surface[i].trans, ar2Handle->wtrans1[i] );
if( surfaceSet->contNum > ) arUtilMatMulf( (const float (*)[])surfaceSet->trans2, (const float (*)[])surfaceSet->surface[i].trans, ar2Handle->wtrans2[i] );
if( surfaceSet->contNum > ) arUtilMatMulf( (const float (*)[])surfaceSet->trans3, (const float (*)[])surfaceSet->surface[i].trans, ar2Handle->wtrans3[i] );
} if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) {
extractVisibleFeatures(ar2Handle->cparamLT, ar2Handle->wtrans1, surfaceSet, ar2Handle->candidate, ar2Handle->candidate2);
}
else {
extractVisibleFeaturesHomography(ar2Handle->xsize, ar2Handle->ysize, ar2Handle->wtrans1, surfaceSet, ar2Handle->candidate, ar2Handle->candidate2);
} candidatePtr = ar2Handle->candidate;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
aveBlur = 0.0F;
#endif
i = ; // Counts up to searchFeatureNum.
num = ;
while( i < ar2Handle->searchFeatureNum ) {
num2 = num;
for( j = ; j < ar2Handle->threadNum; j++ ) {
if( i == ar2Handle->searchFeatureNum ) break; k = ar2SelectTemplate( candidatePtr, surfaceSet->prevFeature, num2, ar2Handle->pos, ar2Handle->xsize, ar2Handle->ysize );
if( k < ) {
if( candidatePtr == ar2Handle->candidate ) {
candidatePtr = ar2Handle->candidate2;
k = ar2SelectTemplate( candidatePtr, surfaceSet->prevFeature, num2, ar2Handle->pos, ar2Handle->xsize, ar2Handle->ysize );
if( k < ) break; // PRL 2012-05-15: Give up if we can't select template from alternate candidate either.
}
else break;
} cp[j] = &(candidatePtr[k]);
ar2Handle->pos[num2][] = candidatePtr[k].sx;
ar2Handle->pos[num2][] = candidatePtr[k].sy;
ar2Handle->arg[j].ar2Handle = ar2Handle;
ar2Handle->arg[j].surfaceSet = surfaceSet;
ar2Handle->arg[j].candidate = &(candidatePtr[k]);
ar2Handle->arg[j].dataPtr = dataPtr; threadStartSignal( ar2Handle->threadHandle[j] ); // 激活一个线程,cond1:计算tracking result
num2++;
if( num2 == ) num2 = num;
i++;
}
k = j;
if( k == ) break; for( j = ; j < k; j++ ) {
threadEndWait( ar2Handle->threadHandle[j] ); // 等待被激活,cond2 if( ar2Handle->arg[j].ret == && ar2Handle->arg[j].result.sim > ar2Handle->simThresh ) {
if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) {
#ifdef ARDOUBLE_IS_FLOAT
(ar2Handle->cparamLT->param.dist_factor,
ar2Handle->arg[j].result.pos2d[], ar2Handle->arg[j].result.pos2d[],
&ar2Handle->pos2d[num][], &ar2Handle->pos2d[num][], ar2Handle->cparamLT->param.dist_function_version);
#else
ARdouble pos2d0, pos2d1;
arParamObserv2Ideal(ar2Handle->cparamLT->param.dist_factor,
(ARdouble)(ar2Handle->arg[j].result.pos2d[]), (ARdouble)(ar2Handle->arg[j].result.pos2d[]),
&pos2d0, &pos2d1, ar2Handle->cparamLT->param.dist_function_version);
ar2Handle->pos2d[num][] = (float)pos2d0;
ar2Handle->pos2d[num][] = (float)pos2d1;
#endif
}
else {
ar2Handle->pos2d[num][] = ar2Handle->arg[j].result.pos2d[];
ar2Handle->pos2d[num][] = ar2Handle->arg[j].result.pos2d[];
}
ar2Handle->pos3d[num][] = ar2Handle->arg[j].result.pos3d[];
ar2Handle->pos3d[num][] = ar2Handle->arg[j].result.pos3d[];
ar2Handle->pos3d[num][] = ar2Handle->arg[j].result.pos3d[];
ar2Handle->pos[num][] = cp[j]->sx;
ar2Handle->pos[num][] = cp[j]->sy;
ar2Handle->usedFeature[num].snum = cp[j]->snum;
ar2Handle->usedFeature[num].level = cp[j]->level;
ar2Handle->usedFeature[num].num = cp[j]->num;
ar2Handle->usedFeature[num].flag = ;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
aveBlur += ar2Handle->arg[j].result.blurLevel;
#endif
num++;
}
}
}
for( i = ; i < num; i++ ) {
surfaceSet->prevFeature[i] = ar2Handle->usedFeature[i];
}
surfaceSet->prevFeature[num].flag = -;
//ARLOG("------\nNum = %d\n", num); if( ar2Handle->trackingMode == AR2_TRACKING_6DOF ) {
if( num < ) {
surfaceSet->contNum = ;
return -;
}
*err = ar2GetTransMat( ar2Handle->icpHandle, surfaceSet->trans1, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, );
//ARLOG("outlier 0%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
icpSetInlierProbability( ar2Handle->icpHandle, 0.8F );
*err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, );
//ARLOG("outlier 20%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
icpSetInlierProbability( ar2Handle->icpHandle, 0.6F );
*err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, );
//ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
icpSetInlierProbability( ar2Handle->icpHandle, 0.4F );
*err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, );
//ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
icpSetInlierProbability( ar2Handle->icpHandle, 0.0F );
*err = ar2GetTransMat( ar2Handle->icpHandle, trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, );
//ARLOG("outlier Max: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
surfaceSet->contNum = ;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL; // Reset the blurLevel.
#endif
return -;
}
}
}
}
}
}
else {
if( num < ) {
surfaceSet->contNum = ;
return -;
}
*err = ar2GetTransMatHomography( surfaceSet->trans1, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, , 1.0F );
//ARLOG("outlier 0%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
*err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, , 0.8F );
//ARLOG("outlier 20%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
*err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, , 0.6F );
//ARLOG("outlier 40%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
*err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, , 0.4F );
//ARLOG("outlier 60%%: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
*err = ar2GetTransMatHomography( trans, ar2Handle->pos2d, ar2Handle->pos3d, num, trans, , 0.0F );
//ARLOG("outlier Max: err = %f, num = %d\n", *err, num);
if( *err > ar2Handle->trackingThresh ) {
surfaceSet->contNum = ;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL; // Reset the blurLevel.
#endif
return -;
}
}
}
}
}
} #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( ar2Handle->blurMethod == AR2_ADAPTIVE_BLUR ) {
aveBlur = aveBlur/num + 0.5F;
ar2Handle->blurLevel += (int)aveBlur - ;
if( ar2Handle->blurLevel < ) ar2Handle->blurLevel = ;
if( ar2Handle->blurLevel >= AR2_BLUR_IMAGE_MAX- ) ar2Handle->blurLevel = AR2_BLUR_IMAGE_MAX-;
}
#endif surfaceSet->contNum++;
for( j = ; j < ; j++ ) {
for( i = ; i < ; i++ ) surfaceSet->trans3[j][i] = surfaceSet->trans2[j][i];
}
for( j = ; j < ; j++ ) {
for( i = ; i < ; i++ ) surfaceSet->trans2[j][i] = surfaceSet->trans1[j][i];
}
for( j = ; j < ; j++ ) {
for( i = ; i < ; i++ ) surfaceSet->trans1[j][i] = trans[j][i];
} return ;
}
3. tracking thread
main() -->
initNFT() -->
ar2CreateHandle() -->
struct _THREAD_HANDLE_T {
int ID;
int startF; // 0 = no request pending, 1 = start please, 2 = quit please.
int endF; // 0 = worker not started or worker running, 1 = worker completed, 2 = worker will quit (exit).
int busyF; // 0 = worker not started or worker ended, 1 = worker busy.
//pthread_t thread;
pthread_mutex_t mut;
pthread_cond_t cond1; // Signals from client that startF has changed.
pthread_cond_t cond2; // Signals from worker that endF has changed.
void *arg;
};
// lib/src/ar2/Handle.c
static AR2HandleT *ar2CreateHandleSub( int pixFormat, int xsize, int ysize, int threadNum )
{
AR2HandleT *ar2Handle;
int i; arMalloc(ar2Handle, AR2HandleT, );
ar2Handle->pixFormat = pixFormat;
ar2Handle->xsize = xsize;
ar2Handle->ysize = ysize;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
ar2Handle->blurMethod = AR2_DEFAULT_BLUR_METHOD;
ar2Handle->blurLevel = AR2_DEFAULT_BLUR_LEVEL;
#endif
ar2Handle->searchSize = AR2_DEFAULT_SEARCH_SIZE;
ar2Handle->templateSize1 = AR2_DEFAULT_TS1;
ar2Handle->templateSize2 = AR2_DEFAULT_TS2;
ar2Handle->searchFeatureNum = AR2_DEFAULT_SEARCH_FEATURE_NUM;
if( ar2Handle->searchFeatureNum > AR2_SEARCH_FEATURE_MAX ) {
ar2Handle->searchFeatureNum = AR2_SEARCH_FEATURE_MAX;
}
ar2Handle->simThresh = AR2_DEFAULT_SIM_THRESH;
ar2Handle->trackingThresh = AR2_DEFAULT_TRACKING_THRESH; if( threadNum == AR2_TRACKING_DEFAULT_THREAD_NUM ) {
threadNum = threadGetCPU();
}
if( threadNum < ) {
threadNum = ;
}
if( threadNum > AR2_THREAD_MAX ) {
threadNum = AR2_THREAD_MAX;
}
ar2Handle->threadNum = threadNum;
ARLOGi("Tracking thread = %d\n", threadNum);
for( i = ; i < ar2Handle->threadNum; i++ ) {
arMalloc( ar2Handle->arg[i].mfImage, ARUint8, xsize*ysize );
ar2Handle->arg[i].templ = NULL;
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
ar2Handle->arg[i].templ2 = NULL;
#endif
ar2Handle->threadHandle[i] = threadInit(i, &(ar2Handle->arg[i]), ar2Tracking2d); // 单独一个线程做跟踪处理 --> next
} return ar2Handle;
}
void *ar2Tracking2d( THREAD_HANDLE_T *threadHandle )
{
AR2Tracking2DParamT *arg;
int ID; arg = (AR2Tracking2DParamT *)threadGetArg(threadHandle);
ID = threadGetID(threadHandle); ARLOGi("Start tracking_thread #%d.\n", ID);
for(;;) {
if( threadStartWait(threadHandle) < ) break; #if AR2_CAPABLE_ADAPTIVE_TEMPLATE 自适应模板?
arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate,
arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->templ2), &(arg->result) );
#else
arg->ret = ar2Tracking2dSub( arg->ar2Handle, arg->surfaceSet, arg->candidate,
arg->dataPtr, arg->mfImage, &(arg->templ), &(arg->result) ); // --> next
#endif
threadEndSignal(threadHandle);
}
ARLOGi("End tracking_thread #%d.\n", ID); return NULL;
}
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
static int ar2Tracking2dSub ( AR2HandleT *handle, AR2SurfaceSetT *surfaceSet, AR2TemplateCandidateT *candidate,
ARUint8 *dataPtr, ARUint8 *mfImage, AR2TemplateT **templ,
AR2Template2T **templ2, AR2Tracking2DResultT *result )
#else
static int ar2Tracking2dSub ( AR2HandleT *handle, AR2SurfaceSetT *surfaceSet, AR2TemplateCandidateT *candidate,
ARUint8 *dataPtr, ARUint8 *mfImage, AR2TemplateT **templ,
AR2Tracking2DResultT *result )
#endif
{
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
AR2Template2T *templ2;
#endif
int snum, level, fnum;
int search[][];
int bx, by; snum = candidate->snum;
level = candidate->level;
fnum = candidate->num; if( *templ == NULL ) *templ = ar2GenTemplate( handle->templateSize1, handle->templateSize2 );
#if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( *templ2 == NULL ) *templ2 = ar2GenTemplate2( handle->templateSize1, handle->templateSize2 );
#endif #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( handle->blurMethod == AR2_CONSTANT_BLUR ) {
if( ar2SetTemplateSub( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum],
surfaceSet->surface[snum].imageSet,
&(surfaceSet->surface[snum].featureSet->list[level]),
fnum,
handle->blurLevel,
*templ ) < ) return -; if( (*templ)->vlen * (*templ)->vlen
< ((*templ)->xts1+(*templ)->xts2+) * ((*templ)->yts1+(*templ)->yts2+)
* AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
return -;
}
}
else {
if( ar2SetTemplate2Sub( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum],
surfaceSet->surface[snum].imageSet,
&(surfaceSet->surface[snum].featureSet->list[level]),
fnum,
handle->blurLevel,
*templ2 ) < ) return -; if( (*templ2)->vlen[] * (*templ2)->vlen[]
< ((*templ2)->xts1+(*templ2)->xts2+) * ((*templ2)->yts1+(*templ2)->yts2+)
* AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
return -;
}
}
#else
if( ar2SetTemplateSub( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum],
surfaceSet->surface[snum].imageSet,
&(surfaceSet->surface[snum].featureSet->list[level]),
fnum,
*templ ) < ) return -; if( (*templ)->vlen * (*templ)->vlen
< ((*templ)->xts1 + (*templ)->xts2 + ) * ((*templ)->yts1 + (*templ)->yts2 + )
* AR2_DEFAULT_TRACKING_SD_THRESH * AR2_DEFAULT_TRACKING_SD_THRESH ) {
return -;
}
#endif // Get the screen coordinates for up to three previous positions of this feature into search[][].
if( surfaceSet->contNum == ) {
ar2GetSearchPoint( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum], NULL, NULL,
&(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
search );
}
else if( surfaceSet->contNum == ) {
ar2GetSearchPoint( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum],
(const float (*)[])handle->wtrans2[snum], NULL,
&(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
search );
}
else {
ar2GetSearchPoint( handle->cparamLT,
(const float (*)[])handle->wtrans1[snum],
(const float (*)[])handle->wtrans2[snum],
(const float (*)[])handle->wtrans3[snum],
&(surfaceSet->surface[snum].featureSet->list[level].coord[fnum]),
search );
} #if AR2_CAPABLE_ADAPTIVE_TEMPLATE
if( handle->blurMethod == AR2_CONSTANT_BLUR ) {
if( ar2GetBestMatching( dataPtr,
mfImage,
handle->xsize,
handle->ysize,
handle->pixFormat,
*templ,
handle->searchSize,
handle->searchSize,
search,
&bx, &by,
&(result->sim)) < ) {
return -;
}
result->blurLevel = handle->blurLevel;
}
else {
if( ar2GetBestMatching2( dataPtr,
mfImage,
handle->xsize,
handle->ysize,
handle->pixFormat,
*templ2,
handle->searchSize,
handle->searchSize,
search,
&bx, &by,
&(result->sim),
&(result->blurLevel)) < ) {
return -;
}
}
#else
if( ar2GetBestMatching( dataPtr,
mfImage,
handle->xsize,
handle->ysize,
handle->pixFormat,
*templ,
handle->searchSize,
handle->searchSize,
search,
&bx, &by,
&(result->sim)) < ) {
return -;
}
#endif result->pos2d[] = (float)bx;
result->pos2d[] = (float)by;
result->pos3d[] = surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
+ surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
+ surfaceSet->surface[snum].trans[][];
result->pos3d[] = surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
+ surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
+ surfaceSet->surface[snum].trans[][];
result->pos3d[] = surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].mx
+ surfaceSet->surface[snum].trans[][] * surfaceSet->surface[snum].featureSet->list[level].coord[fnum].my
+ surfaceSet->surface[snum].trans[][]; return ;
}
tracking提取keypoint没看出来在哪里处理?
[Artoolkit] kpmMatching & Tracking of nftSimple的更多相关文章
- [Artoolkit] Framework Analysis of nftSimple
What is nftSimple? Loads NFT dataset names from a configuration file. The example uses the “Pinball. ...
- [Artoolkit] Android Sample of nftSimple
结合:[Artoolkit] ARToolKit's SDK Structure on Android 重难点:aRBaseLib/, nftSimple/, libcpufeatures.a aRB ...
- 本人AI知识体系导航 - AI menu
Relevant Readable Links Name Interesting topic Comment Edwin Chen 非参贝叶斯 徐亦达老板 Dirichlet Process 学习 ...
- [Artoolkit] Marker of nftSimple
重点看:markers.dat 的解析原理 1. int main(int argc, char** argv) { ]; const char *cparam_name = "Data2/ ...
- [Artoolkit] ARSimpleNativeCarsProj for Multi Markers Tracking
效果简直了,但代码架构有点坑,慢慢道来. libc++_shared.so应该是c++的库:libARWrapperNativeCaresExample.so也有对应的c++文件:那么,libARWr ...
- [Artoolkit] ARToolKit's SDK Structure on Android
Most applications on Android are developed in Java, and Android provides a rich framework of classes ...
- 【AR实验室】ARToolKit之制作自己的Marker/NFT
0x00 - 前言 看过example后,就会想自己动动手,这里改改那里修修.我们先试着添加自己喜欢的marker/nft进行识别. 比如我做了一个法拉利的marker: 还有网上找了一个法拉利log ...
- 【AR实验室】ARToolKit之Example篇
0x00 - 前言 PS : 我突然意识到ARToolKit本质可能就是一个可以实时求解相机内外参的解决方案. 拿到一个新的SDK,90%的人应该都会先跑一下Example.拿到ARToolKit的S ...
- 【AR实验室】ARToolKit之概述篇
0x00 - 前言 我从去年就开始对AR(Augmented Reality)技术比较关注,但是去年AR行业一直处于偶尔发声的状态,丝毫没有其"异姓同名"的兄弟VR(Virtual ...
随机推荐
- Windows10系统重置网络设置
使用Windows10系统户很可能会遇到网络异常,连接不上网的情况? 如此,简易方法可以尝试下.重置网络,教程如下: 1.//按下WIN+X(或右键点击开始按钮),然后选择“命令提示符(管理员)”; ...
- Oracle中使用透明网关链接到Sqlserver(转)
测试环境介绍 1.ORACLEServer Database version:10.2.0 IP:192.168.1.5 ORACLE_HOME:D:\oracle\product\10.2.0\ ...
- 继承之super关键字的使用
一.super关键字: 在对象的内部使用,可代表父类对象. 1. 访问父类的属性:super.age 2. 访问父类的方法:super.eat() 例: package 关键字extends; pub ...
- 使用 IntraWeb (30) - TIWAppInfo、TIWMimeTypes、TIWAppCache
TIWAppInfo //IntraWeb 12.2.15 开始使用 TIWAppInfo 来获取应用的相关信息, 和 IWServerController.WebApplication 的某些属性有 ...
- 各种Oracle索引类型介绍
逻辑上:Single column 单行索引Concatenated 多行索引Unique 唯一索引NonUnique 非唯一索引Function-based函数索引Domain 域索引 物理上:Pa ...
- android: 发送自定义广播
5.3.1 发送标准广播 在发送广播之前,我们还是需要先定义一个广播接收器来准备接收此广播才行,不然发 出去也是白发.因此新建一个 MyBroadcastReceiver 继承自 Broadca ...
- VS2013中Python学习笔记[环境搭建]
前言 Python是一个高层次的结合了解释性.编译性.互动性和面向对象的脚本语言. Python的设计具有很强的可读性,相比其他语言经常使用英文关键字,其他语言的一些标点符号,它具有比其他语言更有特色 ...
- 如何在windows2008/2012上安装启明星系统。
启明星系统提供多种安装方式.安装包里自带了setup.exe.每个程序的 install下有在线安装(例如请假应用程序为book,则默认为 http://localhost/book/install ...
- UVA11137 Ingenuous Cubrency 完全背包 递推式子
做数论都做傻了,这道题目 有推荐,当时的分类放在了递推里面,然后我就不停的去推啊推啊,后来推出来了,可是小一点的数 输出答案都没问题,大一点的数 输出答案就是错的,实在是不知道为什么,后来又不停的看, ...
- OS面试题(转载)
转载自:http://placement.freshersworld.com/power-preparation/technical-interview-preparation/os-intervie ...