android - ANDROID OpenCV 中的模式识别算法 SURF、SIFT 出现异常

标签 android opencv sift surf

我想实现一个简单的应用程序(对sample2进行修改),它展示了SIFT、SURF、BRIEF和ORB的作用。用户可以简单地比较旋转或尺度不变性或速度。但我发现失败了,我无法处理,所以我向你寻求帮助。 当我尝试使用 SIFT 或 SURF 时,当我尝试匹配时总是会出现在线异常: matcherBruteForce.match(descriptorFrame, matches);

我有一个类似的 AR 应用程序,并且使用这些设置它正在工作,所以我无法弄清楚我在哪里犯了错误。我尝试将变量“matcherBruteForce”设置为 BRUTEFORCE、BRUTEFORCE_L1、BRUTEFORCE_SL2 事件设置为 BRUTEFORCE_HAMMING。但我总是遇到同样的异常(exception):

SIFT:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>]
]

冲浪:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>]
]

感谢任何帮助

全类:

    package sk.bolyos.opencv;

import java.util.Vector;

import org.opencv.features2d.DMatch;
import org.opencv.features2d.DescriptorExtractor;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.FeatureDetector;
import org.opencv.features2d.Features2d;
import org.opencv.features2d.KeyPoint;
import org.opencv.highgui.VideoCapture;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.highgui.Highgui;

import sk.bolyos.svk.*;

import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;




public class MyView extends CvViewBase {

    private static final int BOUNDARY = 35;

    private Mat mRgba;
    private Mat mGray;
    private Mat mIntermediateMat;
    private Mat mLogoMilka1,mLogoMilka2,mLogoMilka3,mLogoMilka4;
    ///////////////////DETECTORS
    FeatureDetector siftDetector = FeatureDetector.create(FeatureDetector.SIFT);
    FeatureDetector surfDetector = FeatureDetector.create(FeatureDetector.SURF);
    FeatureDetector fastDetector = FeatureDetector.create(FeatureDetector.FAST);
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    ///////////////////DESCRIPTORS
    DescriptorExtractor siftDescriptor = DescriptorExtractor.create(DescriptorExtractor.SIFT);
    DescriptorExtractor surfDescriptor = DescriptorExtractor.create(DescriptorExtractor.SURF);
    DescriptorExtractor briefDescriptor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
    DescriptorExtractor orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);
    ///////////////////DATABASE
    Vector<KeyPoint> vectorMilka1 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka2 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka3 = new Vector<KeyPoint>();
    Vector<KeyPoint> vectorMilka4 = new Vector<KeyPoint>();
    Mat descriptorMilka1 = new Mat();
    Mat descriptorMilka2 = new Mat();
    Mat descriptorMilka3 = new Mat(); 
    Mat descriptorMilka4 = new Mat();
    ///////////////////VIDEO
    Vector<KeyPoint> vectorFrame = new Vector<KeyPoint>();
    Mat descriptorFrame = new Mat();

    DescriptorMatcher matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    DescriptorMatcher matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);
    Vector<DMatch> matches = new Vector<DMatch>();
    Vector<Mat> siftDescriptors = new Vector<Mat>();
    Vector<Mat> surfDescriptors = new Vector<Mat>();
    Vector<Mat> briefDescriptors = new Vector<Mat>();
    Vector<Mat> orbDescriptors = new Vector<Mat>();

    public MyView(Context context) {
        super(context);
        // TODO Auto-generated constructor stub
        try{
            /*
            if (mLogoMilka1 == null){
                mLogoMilka1 = new Mat();
                mLogoMilka1 = Utils.loadResource(getContext(), R.drawable.milkalogo);
                fillDB(mLogoMilka1,vectorMilka1,descriptorMilka1);
            }
            if (mLogoMilka2 == null){
                mLogoMilka2 = new Mat();
                mLogoMilka2 = Utils.loadResource(getContext(), R.drawable.milkalogom);
                fillDB(mLogoMilka2,vectorMilka2,descriptorMilka2);
            }
            if (mLogoMilka3 == null){
                mLogoMilka3 = new Mat();
                mLogoMilka3 = Utils.loadResource(getContext(), R.drawable.milkalogol);
                fillDB(mLogoMilka3,vectorMilka3,descriptorMilka3);
            }*/
            if (mLogoMilka4 == null){
                mLogoMilka4 = new Mat();
                mLogoMilka4 = Utils.loadResource(getContext(), R.drawable.milkalogolc);
                fillDB(mLogoMilka4,vectorMilka4,descriptorMilka4);
            }

        }catch(Exception e){
            Log.e( "SVK APPLICATION", "in MyView constructor "+e.toString());
        }
    }

    public void fillDB(Mat mLogo,Vector<KeyPoint> vector,Mat descriptor){

      //SIFT 
        siftDetector.detect( mLogo, vector );
        siftDescriptor.compute(mLogo, vector, descriptor);
        siftDescriptors.add(descriptor);
      //SURF 
        surfDetector.detect( mLogo, vector );
        surfDescriptor.compute(mLogo, vector, descriptor);
        surfDescriptors.add(descriptor);
      //FAST+BRIEF 
        fastDetector.detect( mLogo, vector );
        briefDescriptor.compute(mLogo, vector, descriptor);
        briefDescriptors.add(descriptor);
      //ORB 
        orbDetector.detect( mLogo, vector );
        orbDescriptor.compute(mLogo, vector, descriptor);
        orbDescriptors.add(descriptor);

    }


    @Override
    public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
        super.surfaceChanged(_holder, format, width, height);

        synchronized (this) {
            // initialize Mats before usage
            mGray = new Mat();
            mRgba = new Mat();
            mIntermediateMat = new Mat();
            matches = new Vector<DMatch>();
            vectorFrame = new Vector<KeyPoint>();
            descriptorFrame = new Mat(); 
        }
    }

    @Override
    protected Bitmap processFrame(VideoCapture capture) {
        // TODO Auto-generated method stub
        switch (SVKApplikaciaActivity.viewMode) {
        case SVKApplikaciaActivity.VIEW_MODE_SIFT:
            //TODO SIFT
            try{
                //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
                //matcherBruteForce.clear();
                matcherBruteForce.add(siftDescriptors);
                matcherBruteForce.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                siftDetector.detect( mGray, vectorFrame );
                siftDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherBruteForce.match(descriptorFrame, matches);  
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in SIFT "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_SURF:
            //TODO SURF
            try{
                //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE);
                //matcherBruteForce.clear();
                matcherBruteForce.add(surfDescriptors);
                matcherBruteForce.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                surfDetector.detect( mGray, vectorFrame );
                surfDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherBruteForce.match(descriptorFrame, matches);  
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in Surf "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_BRIEF:
            //TODO BRIEF
            try{
                matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
                matcherHamming.add(briefDescriptors);
                matcherHamming.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                fastDetector.detect( mGray, vectorFrame );
                briefDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherHamming.match(descriptorFrame, matches); 
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
            }catch(Exception e){
                Log.e( "SVK APPLICATION","in Brief "+ e.toString());
            }
            break;
        case SVKApplikaciaActivity.VIEW_MODE_ORB:
            //TODO ORB
            try{
                matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
                matcherHamming.add(orbDescriptors);
                matcherHamming.train();// proba

                capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
                Imgproc.resize(mGray, mGray,new Size(480,320)); 
                orbDetector.detect( mGray, vectorFrame );
                orbDescriptor.compute(mGray, vectorFrame, descriptorFrame);

                matcherHamming.match(descriptorFrame, matches); 
                Vector<DMatch> matchesXXX = new Vector<DMatch>();
                for (DMatch t : matches)
                    if(t.distance<BOUNDARY)
                        matchesXXX.add(t);
                Mat nGray = new Mat();
                Mat nLogo = new Mat();
                Mat nRgba = new Mat();
                Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3);
                Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3);
                Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba);
                Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4);
                }catch(Exception e){
                    Log.e( "SVK APPLICATION","in ORB "+ e.toString());
                }
            break;  
        case SVKApplikaciaActivity.VIEW_MODE_AR:
            //TODO AR
            break;    

        }

        Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

        if (Utils.matToBitmap(mRgba, bmp))
            return bmp;

        bmp.recycle();

        return null;
    }

    @Override
    public void run() {
        super.run();

        synchronized (this) {
            // Explicitly deallocate Mats
            if (mRgba != null)
                mRgba.release();
            if (mGray != null)
                mGray.release();
            if (mIntermediateMat != null)
                mIntermediateMat.release();

            mRgba = null;
            mGray = null;
            mIntermediateMat = null;
        }
    }

}

最佳答案

我想我知道问题所在。您使用的匹配器不能应用于 SIFT 和 SURF 描述符。如果您必须将 DescriptorMatcher 与 sift 或 surf 一起使用,则必须这样设置

DescriptorMatcher matcherBruteForce=DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2);

由于 SURF 和 SIFT 只接受 FloatBased 描述符,因此如果您将设置为 HAMMING 的 DescriptorMatcher 传递给它,它将返回错误。

请注意,在您的代码中有两个 DescriptorMatchers ,一个设置为 BRUTEFORCE.SL2,另一个设置为 HAMMING。确保您将正确的参数(即 BRUTEFORCE.SL2)传递给 SIFT 或 SURF。

但是,最好使用基于 FLANN 的 SIFT 或 SURF 匹配器,因为与 ORB 相比,它们提取更多数量的关键点,并且 FLANN 适合大型关键点集,请在此处阅读更多信息 http://computer-vision-talks.com/2011/07/comparison-of-the-opencvs-feature-detection-algorithms-ii/

这里http://opencv.willowgarage.com/documentation/cpp/flann_fast_approximate_nearest_neighbor_search.html

更新: 可以使用 L2 或 L1 距离来匹配 uchar 描述符。如果您将 DescriptorMatcher 设置为 BRUTEFORCE,它也可能适用于 ORB(尽管结果很差)

关于android - ANDROID OpenCV 中的模式识别算法 SURF、SIFT 出现异常,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/9836651/

相关文章:

java - Android 对手电筒模式的支持

Python/OpenCV - 从航拍图像中检测篮球场线

opencv - 使用pip安装旧版本的cv2

c++ - SIFT detectAndCompute 抛出 ipp 异常

android - 异步任务工作流程

java - 带有 MenuInflater 的菜单 XML

android - 模拟器设备未准备好等待 20 秒

python - 如何使用python opencv删除文件夹中的特定图像

python - opencv/macports 应用程序的 py2app 错误。如何使用 headerpad_max_install_names 标志编译 opencv?

java - javacv 中的 NullPointerException