android - 如何在android中实现陀螺仪传感器?

标签 android sensors gyroscope

我正在尝试编写最简单的陀螺仪实现(仅在屏幕更改时记录屏幕的方向)。有人可以提供一个简单的例子吗?


这就是我现在正在尝试的:

public class LessonFiveGLSurfaceView extends GLSurfaceView implements SensorEventListener 
        {
        private LessonFiveRenderer mRenderer;

        public LessonFiveGLSurfaceView(Context context) 
        {
            super(context); 
            System.out.println("test");
        }
        @Override  
        public void onSensorChanged(SensorEvent event)  
        {  
            //output the Roll, Pitch and Yawn values  
            System.out.println("Orientation X (Roll) :"+ Float.toString(event.values[2]) +"\n"+  
                       "Orientation Y (Pitch) :"+ Float.toString(event.values[1]) +"\n"+  
                       "Orientation Z (Yaw) :"+ Float.toString(event.values[0]));  
        }  

但是我收到错误:“LessonFiveGLSurfaceView 类型必须实现继承的抽象方法 SensorEventListener.onAccuracyChanged(Sensor, int)”。

最佳答案

这是我想出的一个类,用于抽象在 android 中使用陀螺仪传感器,它将稍微平滑输入数据,以及平板电脑和手机的正确方向输出,它们没有相同的自然方向(手机是纵向模式,而平板电脑处于横向模式):

/**
* Uses the sensor API to determine the phones orientation.
* Registering for events from the accelerator and the magnetometer (compass)
* a rotation matrix is computed. This matrix can be used to rotate an
* OpenGL scene.
*/
public class PhoneGyroscope  implements SensorEventListener{
private static final String TAG = PhoneGyroscope.class.getSimpleName();
private SensorManager mSensorManager;
private WindowManager mWindowManager;
private float[] mAccelGravityData = new float[3];
private float[] mGeomagneticData = new float[3];
private float[] mRotationMatrix = new float[16];
private float[] bufferedAccelGData = new float[3];
private float[] bufferedMagnetData = new float[3];

public PhoneGyroscope(Context context) {
    mSensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
    mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
}

public void start() {
    mSensorManager.registerListener(this, mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), SensorManager.SENSOR_DELAY_GAME );
    mSensorManager.registerListener(this, mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), SensorManager.SENSOR_DELAY_GAME );
}

public void stop() {
    mSensorManager.unregisterListener(this);
}

private void loadNewSensorData(SensorEvent event) {
    final int type = event.sensor.getType();
    if (type == Sensor.TYPE_ACCELEROMETER) {
        //Smoothing the sensor data a bit
        mAccelGravityData[0]=(mAccelGravityData[0]*2+event.values[0])*0.33334f;
        mAccelGravityData[1]=(mAccelGravityData[1]*2+event.values[1])*0.33334f;
        mAccelGravityData[2]=(mAccelGravityData[2]*2+event.values[2])*0.33334f;
    }
    if (type == Sensor.TYPE_MAGNETIC_FIELD) {
        //Smoothing the sensor data a bit
        mGeomagneticData[0]=(mGeomagneticData[0]*1+event.values[0])*0.5f;
        mGeomagneticData[1]=(mGeomagneticData[1]*1+event.values[1])*0.5f;
        mGeomagneticData[2]=(mGeomagneticData[2]*1+event.values[2])*0.5f;

        float x = mGeomagneticData[0];
        float y = mGeomagneticData[1];
        float z = mGeomagneticData[2];
        double field = Math.sqrt(x*x+y*y+z*z);
        if (field>25 && field<65){
            Log.e(TAG, "loadNewSensorData : wrong magnetic data, need a recalibration field = " + field);
        }
    }
}


private void rootMeanSquareBuffer(float[] target, float[] values) {

    final float amplification = 200.0f;
    float buffer = 20.0f;

    target[0] += amplification;
    target[1] += amplification;
    target[2] += amplification;
    values[0] += amplification;
    values[1] += amplification;
    values[2] += amplification;

    target[0] = (float) (Math
            .sqrt((target[0] * target[0] * buffer + values[0] * values[0])
                    / (1 + buffer)));
    target[1] = (float) (Math
            .sqrt((target[1] * target[1] * buffer + values[1] * values[1])
                    / (1 + buffer)));
    target[2] = (float) (Math
            .sqrt((target[2] * target[2] * buffer + values[2] * values[2])
                    / (1 + buffer)));

    target[0] -= amplification;
    target[1] -= amplification;
    target[2] -= amplification;
    values[0] -= amplification;
    values[1] -= amplification;
    values[2] -= amplification;
}


/*
 * Tablets have LANDSCAPE as default orientation, so screen rotation is 0 or 180 when the orientation is LANDSCAPE, and smartphones have PORTRAIT.
 * I use the next code to difference between tablets and smartphones:
 */
public static int getScreenOrientation(Display display){
    int orientation;

    if(display.getWidth()==display.getHeight()){
        orientation = Configuration.ORIENTATION_SQUARE;
    }else{ //if width is less than height than it is portrait
        if(display.getWidth() < display.getHeight()){
            orientation = Configuration.ORIENTATION_PORTRAIT;
        }else{ // if it is not any of the above it will definitly be landscape
            orientation = Configuration.ORIENTATION_LANDSCAPE;
        }
    }   
    return orientation;
}

private void debugSensorData(SensorEvent event) {
    StringBuilder builder = new StringBuilder();
    builder.append("--- SENSOR ---");
    builder.append("\nName: ");
    Sensor sensor = event.sensor;
    builder.append(sensor.getName());
    builder.append("\nType: ");
    builder.append(sensor.getType());
    builder.append("\nVendor: ");
    builder.append(sensor.getVendor());
    builder.append("\nVersion: ");
    builder.append(sensor.getVersion());
    builder.append("\nMaximum Range: ");
    builder.append(sensor.getMaximumRange());
    builder.append("\nPower: ");
    builder.append(sensor.getPower());
    builder.append("\nResolution: ");
    builder.append(sensor.getResolution());

    builder.append("\n\n--- EVENT ---");
    builder.append("\nAccuracy: ");
    builder.append(event.accuracy);
    builder.append("\nTimestamp: ");
    builder.append(event.timestamp);
    builder.append("\nValues:\n");
    for (int i = 0; i < event.values.length; i++) {
        // ...
        builder.append("   [");
        builder.append(i);
        builder.append("] = ");
        builder.append(event.values[i]);
        builder.append("\n");
    }

    Log.d(TAG, builder.toString());
}

@Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
    // TODO Auto-generated method stub

}

/* Sensor Processing/Rotation Matrix
 * Each time a sensor update happens the onSensorChanged method is called. 
 * This is where we receive the raw sensor data.
 * First of all we want to take the sensor data from the accelerometer and magnetometer and smooth it out to reduce jitters.
 * From there we can call the getRotationMatrix function with our smoothed accelerometer and magnetometer data.
 * The rotation matrix that this outputs is mapped to have the y axis pointing out the top of the phone, so when the phone is flat on a table facing north, it will read {0,0,0}.
 * We need it to read {0,0,0} when pointing north, but sitting vertical. To achieve this we simply remap the co-ordinates system so the X axis is negative.
 * The following code example shows how this is acheived. 
 */
@Override
public void onSensorChanged(SensorEvent event) {

    if (event.accuracy == SensorManager.SENSOR_STATUS_UNRELIABLE) {
        return;
    }

    loadNewSensorData(event);
    int type=event.sensor.getType();

    if (mAccelGravityData != null && mGeomagneticData != null) {

        if ((type==Sensor.TYPE_MAGNETIC_FIELD) || (type==Sensor.TYPE_ACCELEROMETER)) {
            rootMeanSquareBuffer(bufferedAccelGData, mAccelGravityData);
            rootMeanSquareBuffer(bufferedMagnetData, mGeomagneticData);
            if (SensorManager.getRotationMatrix(mRotationMatrix, null, bufferedAccelGData, bufferedMagnetData)){

                Display display = mWindowManager.getDefaultDisplay();
                int orientation = getScreenOrientation(display);
                int rotation = display.getRotation();

                boolean dontRemapCoordinates  = (orientation == Configuration.ORIENTATION_LANDSCAPE && rotation == Surface.ROTATION_0) ||
                        (orientation == Configuration.ORIENTATION_LANDSCAPE && rotation == Surface.ROTATION_180) ||
                        (orientation == Configuration.ORIENTATION_PORTRAIT && rotation == Surface.ROTATION_90) ||
                        (orientation == Configuration.ORIENTATION_PORTRAIT && rotation == Surface.ROTATION_270);

                if( !dontRemapCoordinates){
                                    SensorManager.remapCoordinateSystem(
                                            mRotationMatrix,
                                            SensorManager.AXIS_Y, 
                                            SensorManager.AXIS_MINUS_X,
                                            mRotationMatrix);
                }
                debugSensorData(event); 
            }
        }       
    }

}
}

关于android - 如何在android中实现陀螺仪传感器?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/17776051/

相关文章:

java - MQTT Android Studio 和 Raspberry PI 代理

android - Android模拟器的第一次使用

android - 使用方位信息获取向下方向

matlab - 使用卡尔曼滤波器估计位置

sdk - Garmin IQ Connect 从 Garmin Fenix 5 读取陀螺仪数据

c - STM32F4-探索板: Print out data from connected sensor on the PC screen

android - 如何在 Espresso 测试中获取 View 的标签?

android - LiveData 和 Coroutines - 属性必须被初始化或抽象

c - 如何从 SensorTag CC3200STK 读取数据

iPhone硬件功能: magnetometer and gyro