Android:在服务中使用OpenCV VideoCapture

Android: using OpenCV VideoCapture in service

我正在使用Android设备启动时启动的服务。 这是因为我不需要可见的活动。 到目前为止工作正常。 但是现在我试图打开相机(在MyService.onStart中)并进行一些基本的图像处理。 我了解默认的Android相机类需要一个用于视频预览的表面。 这就是为什么我要使用OpenCV的VideoCapture。

但是我得到这个错误:

No implementation found for native
Lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J

我想知道是否是因为我没有使用主Activity的OpenCV示例中使用的以下行。 问题是,如何将其集成到我的服务中以及何时初始化VideoCapture成员。

1
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);

到目前为止,这是我的代码。 大多数OpenCV代码取自OpenCV的NativeCameraView和CameraBridgeViewBase

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
package com.example.boot;

import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;

public final class MyService extends Service
{
    private static final String TAG ="MyService";
    private boolean mStopThread;
    private Thread mThread;
    private VideoCapture mCamera;
    private int mFrameWidth;
    private int mFrameHeight;
    private int mCameraIndex = -1;
    private Bitmap mCacheBitmap;

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    public void onDestroy() {

        this.disconnectCamera();

        Toast.makeText(this,"service stopped", Toast.LENGTH_LONG).show();
        Log.d(TAG,"onDestroy");
    }

    @Override
    public void onStart(Intent intent, int startid)
    {          
        Log.d(TAG,"service.onStart: begin");

        try
        {
            if (!connectCamera(640, 480))
                Log.e(TAG,"Could not connect camera");
            else
                Log.d(TAG,"Camera successfully connected");
        }
        catch(Exception e)
        {
            Log.e(TAG,"MyServer.connectCamera throws an exception:" + e.getMessage());
        }

        Toast.makeText(this,"service started", Toast.LENGTH_LONG).show();
        Log.d(TAG,"service.onStart: end");
    }

    private boolean connectCamera(int width, int height) {
        /* First step - initialize camera connection */
        if (!initializeCamera(width, height))
            return false;

        /* now we can start update thread */
        mThread = new Thread(new CameraWorker());
        mThread.start();

        return true;
    }

    private boolean initializeCamera(int width, int height) {
        synchronized (this) {

            if (mCameraIndex == -1)
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
            else
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);

            if (mCamera == null)
                return false;

            if (mCamera.isOpened() == false)
                return false;

            //java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();

            /* Select the size that fits surface considering maximum size allowed */
            Size frameSize = new Size(width, height);

            mFrameWidth = (int)frameSize.width;
            mFrameHeight = (int)frameSize.height;

            AllocateCache();

            mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
            mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
        }

        Log.i(TAG,"Selected camera frame size = (" + mFrameWidth +"," + mFrameHeight +")");

        return true;
    }

    protected void AllocateCache()
    {
        mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
    }

    private void releaseCamera() {
        synchronized (this) {
            if (mCamera != null) {
                mCamera.release();
            }
        }
    }

    private void disconnectCamera() {
        /* 1. We need to stop thread which updating the frames
         * 2. Stop camera and release it
         */
        try {
            mStopThread = true;
            mThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            mThread =  null;
            mStopThread = false;
        }

        /* Now release camera */
        releaseCamera();
    }

    protected void deliverAndDrawFrame(NativeCameraFrame frame)
    {
        Mat modified = frame.rgba();

        boolean bmpValid = true;
        if (modified != null) {
            try {
                Utils.matToBitmap(modified, mCacheBitmap);
            } catch(Exception e) {
                Log.e(TAG,"Mat type:" + modified);
                Log.e(TAG,"Bitmap type:" + mCacheBitmap.getWidth() +"*" + mCacheBitmap.getHeight());
                Log.e(TAG,"Utils.matToBitmap() throws an exception:" + e.getMessage());
                bmpValid = false;
            }
        }
    }    

    private class NativeCameraFrame
    {
        public Mat rgba() {
            mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
            return mRgba;
        }

        public Mat gray() {
            mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
            return mGray;
        }

        public NativeCameraFrame(VideoCapture capture) {
            mCapture = capture;
            mGray = new Mat();
            mRgba = new Mat();
        }

        private VideoCapture mCapture;
        private Mat mRgba;
        private Mat mGray;
    };

    private class CameraWorker implements Runnable
    {
        public void run()
        {
            do
            {
                if (!mCamera.grab()) {
                    Log.e(TAG,"Camera frame grab failed");
                    break;
                }

                deliverAndDrawFrame(new NativeCameraFrame(mCamera));

            } while (!mStopThread);
        }
    }
}

您提到的行(initAsync)实际上用于加载OpenCV管理器。 那应该是您要做的第一件事,因此它可能应该出现在onStart()的开头。