2012-03-23 103 views
2

我想实现一个简单的应用程序(修改sample2),它显示了SIFT,SURF,BRIEF和ORB的功能。用户可以简单地比较旋转或尺度不变性或速度。但我发现失败,我无法处理,所以我转而寻求你的帮助。 当我尝试使用SIFT或SURF我总是在线的异常当我尝试匹配:matcherBruteForce.match(descriptorFrame, matches);用模式识别算法SURF,SIFT在OpenCV for ANDROID

我有一个类似的AR应用和使用这些设置这是工作,所以我不能揣摩出我米犯错。我试图将变量“matcherBruteForce”设置为BRUTEFORCE,BRUTEFORCE_L1,BRUTEFORCE_SL2事件为BRUTEFORCE_HAMMING。但我总是得到同样的例外:

SIFT:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>] 
] 

SURF:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>] 
] 

任何帮助赞赏

全班同学:

package sk.bolyos.opencv; 

import java.util.Vector; 

import org.opencv.features2d.DMatch; 
import org.opencv.features2d.DescriptorExtractor; 
import org.opencv.features2d.DescriptorMatcher; 
import org.opencv.features2d.FeatureDetector; 
import org.opencv.features2d.Features2d; 
import org.opencv.features2d.KeyPoint; 
import org.opencv.highgui.VideoCapture; 
import org.opencv.android.Utils; 
import org.opencv.core.Mat; 
import org.opencv.core.Size; 
import org.opencv.imgproc.Imgproc; 
import org.opencv.highgui.Highgui; 

import sk.bolyos.svk.*; 

import android.content.Context; 
import android.graphics.Bitmap; 
import android.util.Log; 
import android.view.SurfaceHolder; 




public class MyView extends CvViewBase { 

    private static final int BOUNDARY = 35; 

    private Mat mRgba; 
    private Mat mGray; 
    private Mat mIntermediateMat; 
    private Mat mLogoMilka1,mLogoMilka2,mLogoMilka3,mLogoMilka4; 
    ///////////////////DETECTORS 
    FeatureDetector siftDetector = FeatureDetector.create(FeatureDetector.SIFT); 
    FeatureDetector surfDetector = FeatureDetector.create(FeatureDetector.SURF); 
    FeatureDetector fastDetector = FeatureDetector.create(FeatureDetector.FAST); 
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB); 
    ///////////////////DESCRIPTORS 
    DescriptorExtractor siftDescriptor = DescriptorExtractor.create(DescriptorExtractor.SIFT); 
    DescriptorExtractor surfDescriptor = DescriptorExtractor.create(DescriptorExtractor.SURF); 
    DescriptorExtractor briefDescriptor = DescriptorExtractor.create(DescriptorExtractor.BRIEF); 
    DescriptorExtractor orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB); 
    ///////////////////DATABASE 
    Vector<KeyPoint> vectorMilka1 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka2 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka3 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka4 = new Vector<KeyPoint>(); 
    Mat descriptorMilka1 = new Mat(); 
    Mat descriptorMilka2 = new Mat(); 
    Mat descriptorMilka3 = new Mat(); 
    Mat descriptorMilka4 = new Mat(); 
    ///////////////////VIDEO 
    Vector<KeyPoint> vectorFrame = new Vector<KeyPoint>(); 
    Mat descriptorFrame = new Mat(); 

    DescriptorMatcher matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
    DescriptorMatcher matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2); 
    Vector<DMatch> matches = new Vector<DMatch>(); 
    Vector<Mat> siftDescriptors = new Vector<Mat>(); 
    Vector<Mat> surfDescriptors = new Vector<Mat>(); 
    Vector<Mat> briefDescriptors = new Vector<Mat>(); 
    Vector<Mat> orbDescriptors = new Vector<Mat>(); 

    public MyView(Context context) { 
     super(context); 
     // TODO Auto-generated constructor stub 
     try{ 
      /* 
      if (mLogoMilka1 == null){ 
       mLogoMilka1 = new Mat(); 
       mLogoMilka1 = Utils.loadResource(getContext(), R.drawable.milkalogo); 
       fillDB(mLogoMilka1,vectorMilka1,descriptorMilka1); 
      } 
      if (mLogoMilka2 == null){ 
       mLogoMilka2 = new Mat(); 
       mLogoMilka2 = Utils.loadResource(getContext(), R.drawable.milkalogom); 
       fillDB(mLogoMilka2,vectorMilka2,descriptorMilka2); 
      } 
      if (mLogoMilka3 == null){ 
       mLogoMilka3 = new Mat(); 
       mLogoMilka3 = Utils.loadResource(getContext(), R.drawable.milkalogol); 
       fillDB(mLogoMilka3,vectorMilka3,descriptorMilka3); 
      }*/ 
      if (mLogoMilka4 == null){ 
       mLogoMilka4 = new Mat(); 
       mLogoMilka4 = Utils.loadResource(getContext(), R.drawable.milkalogolc); 
       fillDB(mLogoMilka4,vectorMilka4,descriptorMilka4); 
      } 

     }catch(Exception e){ 
      Log.e("SVK APPLICATION", "in MyView constructor "+e.toString()); 
     } 
    } 

    public void fillDB(Mat mLogo,Vector<KeyPoint> vector,Mat descriptor){ 

     //SIFT 
     siftDetector.detect(mLogo, vector); 
     siftDescriptor.compute(mLogo, vector, descriptor); 
     siftDescriptors.add(descriptor); 
     //SURF 
     surfDetector.detect(mLogo, vector); 
     surfDescriptor.compute(mLogo, vector, descriptor); 
     surfDescriptors.add(descriptor); 
     //FAST+BRIEF 
     fastDetector.detect(mLogo, vector); 
     briefDescriptor.compute(mLogo, vector, descriptor); 
     briefDescriptors.add(descriptor); 
     //ORB 
     orbDetector.detect(mLogo, vector); 
     orbDescriptor.compute(mLogo, vector, descriptor); 
     orbDescriptors.add(descriptor); 

    } 


    @Override 
    public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { 
     super.surfaceChanged(_holder, format, width, height); 

     synchronized (this) { 
      // initialize Mats before usage 
      mGray = new Mat(); 
      mRgba = new Mat(); 
      mIntermediateMat = new Mat(); 
      matches = new Vector<DMatch>(); 
      vectorFrame = new Vector<KeyPoint>(); 
      descriptorFrame = new Mat(); 
     } 
    } 

    @Override 
    protected Bitmap processFrame(VideoCapture capture) { 
     // TODO Auto-generated method stub 
     switch (SVKApplikaciaActivity.viewMode) { 
     case SVKApplikaciaActivity.VIEW_MODE_SIFT: 
      //TODO SIFT 
      try{ 
       //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); 
       //matcherBruteForce.clear(); 
       matcherBruteForce.add(siftDescriptors); 
       matcherBruteForce.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       siftDetector.detect(mGray, vectorFrame); 
       siftDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherBruteForce.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in SIFT "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_SURF: 
      //TODO SURF 
      try{ 
       //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); 
       //matcherBruteForce.clear(); 
       matcherBruteForce.add(surfDescriptors); 
       matcherBruteForce.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       surfDetector.detect(mGray, vectorFrame); 
       surfDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherBruteForce.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in Surf "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_BRIEF: 
      //TODO BRIEF 
      try{ 
       matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
       matcherHamming.add(briefDescriptors); 
       matcherHamming.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       fastDetector.detect(mGray, vectorFrame); 
       briefDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherHamming.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in Brief "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_ORB: 
      //TODO ORB 
      try{ 
       matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
       matcherHamming.add(orbDescriptors); 
       matcherHamming.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       orbDetector.detect(mGray, vectorFrame); 
       orbDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherHamming.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
       }catch(Exception e){ 
        Log.e("SVK APPLICATION","in ORB "+ e.toString()); 
       } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_AR: 
      //TODO AR 
      break;  

     } 

     Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); 

     if (Utils.matToBitmap(mRgba, bmp)) 
      return bmp; 

     bmp.recycle(); 

     return null; 
    } 

    @Override 
    public void run() { 
     super.run(); 

     synchronized (this) { 
      // Explicitly deallocate Mats 
      if (mRgba != null) 
       mRgba.release(); 
      if (mGray != null) 
       mGray.release(); 
      if (mIntermediateMat != null) 
       mIntermediateMat.release(); 

      mRgba = null; 
      mGray = null; 
      mIntermediateMat = null; 
     } 
    } 

} 

回答

3

我想想我知道这个问题。您正在使用的匹配器无法应用于SIFT和SURF描述符。如果你必须使用一个DescriptorMatcher与筛或冲浪,你必须将其设置为这样

DescriptorMatcher matcherBruteForce=DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2); 

由于SURF和SIFT接受FloatBased描述符独家,它会返回一个错误,如果你通过了DescriptorMatcher设置为海明它。

请注意,在您的代码中,您有两个DescriptorMatchers,一个设置为BRUTEFORCE.SL2,另一个设置为HAMMING。确保将正确的一个即BRUTEFORCE.SL2传递给SIFT或SURF。

它不过最好使用基于FLANN的匹配对SIFT或SURF,因为他们从中提取更多数量的关键点相比,ORB和FLANN适合于大型成套关键点了解更多关于在这里 http://computer-vision-talks.com/2011/07/comparison-of-the-opencvs-feature-detection-algorithms-ii/

这里http://opencv.willowgarage.com/documentation/cpp/flann_fast_approximate_nearest_neighbor_search.html

更新: 有可能使用L2或L1距离来匹配uchar描述符。如果您通过DescriptorMatcher设置为BRUTEFORCE,它可能也适用于ORB(虽然效果不佳)

+0

我已经解决了原因,它不是您的答案,因为bruteforce匹配程序已经是SL2。原因是我没有设置单独的筛选orb简要描述符。但是谢谢 – Csabi 2012-04-13 14:30:11

+0

是的..我也意识到了。但是,你的意思是,你没有设置独立的筛选/ orb描述符(请输入代码示例),你做了什么改变? – 2012-04-23 09:28:43

+0

我没有使用fillDB函数,我对所有ORB/BRIEF/SURF/SIFT使用单独的描述符 – Csabi 2012-04-23 19:52:21

1

你确定你的尺码vectorFrame不等于零? 我想我有同样的问题..你的问题会在检测algoritm,我认为它返回一个零vectorFrame当你的图像的颜色代码是不正确的

只是把Log.e("SVK APPLICATION","vectorFrame size = "+ vectorFrame.size());地方