android - JNI OpenCV Android 画线或矩形功能

标签 android c++ opencv android-ndk java-native-interface

我在使用 OpenCV 的 android jni 应用程序中绘制函数时遇到了很大的问题。我在我的 native 函数中跟踪对象,并将矩形 Vector by Mat 转发到 Java 代码,然后在 Java 中的 RGBA Mat 上绘制矩形,一切正常。但是现在,在返回到 Java 之前,我已经在我的函数中用 native 代码绘制了这些矩形,但是它不起作用。我试图从 jni 中提取任何东西,但仍然没有效果。绘制的函数不改变 Mat,Mat 变成 Java。如果您能提供帮助,我将不胜感激。

这是我的原生代码:

JNIEXPORT void JNICALL Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{
    LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect enter");
    try
    {
        vector<Rect> RectFaces;
        ((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
        ((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
        for(int i=0; i<RectFaces.size(); i++)
        {
            Point p,k;
            p.x = ((Rect)RectFaces[i]).x;
            p.y = ((Rect)RectFaces[i]).y;
            k.x = ((Rect)RectFaces[i]).x + ((Rect)RectFaces[i]).width;
            k.y = ((Rect)RectFaces[i]).y + ((Rect)RectFaces[i]).height;
            rectangle(*((Mat*)imageGray), p, k,Scalar(0,255,255, 255), -1, 8);
        }
        vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
    }
    catch(cv::Exception& e)
    {
        LOGD("nativeCreateObject caught cv::Exception: %s", e.what());
        jclass je = jenv->FindClass("org/opencv/core/CvException");
        if(!je)
            je = jenv->FindClass("java/lang/Exception");
        jenv->ThrowNew(je, e.what());
    }
    catch (...)
    {
        LOGD("nativeDetect caught unknown exception");
        jclass je = jenv->FindClass("java/lang/Exception");
        jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
    }
    LOGD("Java_org_opencv_samples_facedetect_DetectionBasedTracker_nativeDetect exit");
}

和返回 rgba Mat 的 java 代码:

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

        mRgba = inputFrame.rgba();
        mGray = inputFrame.gray();
        //Imgproc.Canny(mGray, mCanny, CANNY_MIN_TRESHOLD, CANNY_MAX_TRESHOLD);
        Point center = new Point(mRgba.width() / 2, mRgba.height() / 2);
        //must be 1 channels 8 bit!
        /*//do Hough transform to find lines
        double rho = 1;
        double theta = Math.PI/180;
        Imgproc.HoughLinesP(mCanny, mLines, rho, theta, HOUGH_TRESHOLD, HOUGH_MIN_LINE_LENGTH, HOUGH_MAX_LINE_GAP);*/

        if (mAbsoluteFaceSize == 0) {
            int height = mGray.rows();
            if (Math.round(height * mRelativeFaceSize) > 0) {
                mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
            }
            mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
        }

        MatOfRect faces = new MatOfRect();

        if (mDetectorType == JAVA_DETECTOR) {
            if (mJavaDetector != null)
                mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2,
                        2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
                        new Size());
        } else if (mDetectorType == NATIVE_DETECTOR) {
            if (mNativeDetector != null)
                mNativeDetector.detect(mGray, faces);
                //mNativeDetector.findLines(mGray, mCanny);
        } else {
            Log.e(TAG, "Detection method is not selected!");
        }

        /*Rect[] facesArray = faces.toArray();
        for (int i = 0; i < facesArray.length; i++) {
            if (center.x > facesArray[i].tl().x
                    && center.x < facesArray[i].br().x) {
                if (center.y > facesArray[i].tl().y
                        && center.y < facesArray[i].br().y) {
                    Core.rectangle(mRgba, facesArray[i].tl(),
                            facesArray[i].br(), CAR_RECT_COLOR_RED, 3);
                } else {
                    Core.rectangle(mRgba, facesArray[i].tl(),
                            facesArray[i].br(), CAR_RECT_COLOR_YELLOW, 3);
                }
            } else {
                Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
                        CAR_RECT_COLOR_YELLOW, 3);
            }
        }*/

        Point p1 = new Point(mRgba.width() / 2, 0);
        Point p2 = new Point(mRgba.width() / 2, mRgba.height());

        Point p3 = new Point(0, mRgba.height() / 2);
        Point p4 = new Point(mRgba.width(), mRgba.height() / 2);

        Core.line(mRgba, p3, p4, AXIS_COLOR);
        Core.line(mRgba, p1, p2, AXIS_COLOR);


        return mRgba;
        //return mLines;
    }

最佳答案

为什么要将灰度图像传递给函数? 如果你想在屏幕上显示东西,你应该传递 RGBA 数据。

关于android - JNI OpenCV Android 画线或矩形功能,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/20875959/

相关文章:

.net - C++ .NET 将 System::String 转换为 std::string

c++ - matlab strel函数参数作为opencv cpp中的八角形

c++ - 从模板类切换时,为什么出现<错误类型>?

c++ - (Ubuntu 14.04) apt-get libopencv-dev,但出现错误 : Unable to correct problems, 你持有损坏的包

android - 渲染 BIM 模型

android - 如何隐藏整个应用程序的android设备的软导航栏(虚拟按钮)?

java - 我可以将可绘制对象的 int ID 存储在 XML 中吗?

android - 自定义对话框太小

opencv - openCV中的 Blob 检测效果很好,但是由于某些原因,它对于特定的图像失败

c++ - OpenCV 归一化函数,结果总和不为一