Android: using OpenCV VideoCapture in service

2020-07-22 16:12发布

问题:

I'm using a service that is started when the Android device boots. This is because I don't need a visible activity. Works fine so far. But now I'm trying to open the camera (in MyService.onStart) and do some basic image processing. I understood that the default Android camera class needs a surface for video preview. That's why I want to use the VideoCapture from OpenCV.

But I get this error:

No implementation found for native Lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J

I'm wondering if this is because I don't have the following line as used in OpenCV examples using a main Activity. The question is, how to integrate this in my service and when to initialize the VideoCapture member.

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);

Here's my code so far. Most of the OpenCV code is taken from OpenCV's NativeCameraView and CameraBridgeViewBase

package com.example.boot;

import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;

public final class MyService extends Service
{
    private static final String TAG = "MyService";
    private boolean mStopThread;
    private Thread mThread;
    private VideoCapture mCamera;
    private int mFrameWidth;
    private int mFrameHeight;
    private int mCameraIndex = -1;
    private Bitmap mCacheBitmap;

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    public void onDestroy() {

        this.disconnectCamera();

        Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show();
        Log.d(TAG, "onDestroy");
    }

    @Override
    public void onStart(Intent intent, int startid)
    {           
        Log.d(TAG, "service.onStart: begin");

        try
        {
            if (!connectCamera(640, 480))
                Log.e(TAG, "Could not connect camera");
            else
                Log.d(TAG, "Camera successfully connected");
        }
        catch(Exception e)
        {
            Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage());
        }

        Toast.makeText(this, "service started", Toast.LENGTH_LONG).show();
        Log.d(TAG, "service.onStart: end");
    }

    private boolean connectCamera(int width, int height) {
        /* First step - initialize camera connection */
        if (!initializeCamera(width, height))
            return false;

        /* now we can start update thread */
        mThread = new Thread(new CameraWorker());
        mThread.start();

        return true;
    }

    private boolean initializeCamera(int width, int height) {
        synchronized (this) {

            if (mCameraIndex == -1)
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
            else
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);

            if (mCamera == null)
                return false;

            if (mCamera.isOpened() == false)
                return false;

            //java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();

            /* Select the size that fits surface considering maximum size allowed */
            Size frameSize = new Size(width, height);

            mFrameWidth = (int)frameSize.width;
            mFrameHeight = (int)frameSize.height;

            AllocateCache();

            mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
            mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
        }

        Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");

        return true;
    }

    protected void AllocateCache()
    {
        mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
    }

    private void releaseCamera() {
        synchronized (this) {
            if (mCamera != null) {
                mCamera.release();
            }
        }
    }

    private void disconnectCamera() {
        /* 1. We need to stop thread which updating the frames
         * 2. Stop camera and release it
         */
        try {
            mStopThread = true;
            mThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            mThread =  null;
            mStopThread = false;
        }

        /* Now release camera */
        releaseCamera();
    }

    protected void deliverAndDrawFrame(NativeCameraFrame frame) 
    {
        Mat modified = frame.rgba();

        boolean bmpValid = true;
        if (modified != null) {
            try {
                Utils.matToBitmap(modified, mCacheBitmap);
            } catch(Exception e) {
                Log.e(TAG, "Mat type: " + modified);
                Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
                Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
                bmpValid = false;
            }
        }
    }    

    private class NativeCameraFrame 
    {
        public Mat rgba() {
            mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
            return mRgba;
        }

        public Mat gray() {
            mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
            return mGray;
        }

        public NativeCameraFrame(VideoCapture capture) {
            mCapture = capture;
            mGray = new Mat();
            mRgba = new Mat();
        }

        private VideoCapture mCapture;
        private Mat mRgba;
        private Mat mGray;
    };

    private class CameraWorker implements Runnable 
    {
        public void run() 
        {
            do 
            {
                if (!mCamera.grab()) {
                    Log.e(TAG, "Camera frame grab failed");
                    break;
                }

                deliverAndDrawFrame(new NativeCameraFrame(mCamera));

            } while (!mStopThread);
        }
    }
}

回答1:

The line you mention (initAsync) is actually used to load the OpenCV Manager. That should be the very first thing you do, and therefore it should probably go in the beginning of onStart().