2012-06-13 26 views
2

我试图将Android Developer Guide中定义的CameraPreview放到ApiDemos/OS/Sensors应用程序屏幕上。有一半时间按预期工作。然而,每个其他时间我重新开始测试应用程序(我提供由按设备上的“家”图标暂停)以下异常会记录:在整个Activity生命周期中管理相机预览SurfaceView?

06-13 14:10:17.369:d/SENSORS_TEST(11888 ):支持预览宽度x高度:640 x 480 06-13 14:10:17.369:D/SENSORS_TEST(11888):支持预览宽度x高度:320 x 240 06-13 14:10:17.369:D/SENSORS_TEST (11888):支持预览宽度x高度:176 x 144 06-13 14:10:17.600:D/dalvikvm(11888):GC_FOR_ALLOC已释放56K,3%空闲9091K/9347K,暂停22ms 06-13 14:10 :17.600:I/dalvikvm-heap(11888):将695056字节的分配堆扩展到9.610MB 06-13 14:10:17.631:D/dalvikvm(11888):GC_CONCU RRENT释放1K,3%免费9768K/10055K,暂停2ms + 2ms 06-13 14:10:3​​1.510:D/AndroidRuntime(11888):关闭VM 06-13 14:10:3​​1.510:W/dalvikvm(11888 ):threadid = 1:线程退出时未捕获的异常(group = 0x40a351f8) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):致命例外:main 06-13 14:10:3​​1.518:E/AndroidRuntime (11888):java.lang.RuntimeException:在发布后调用的方法() 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.hardware.Camera.setPreviewDisplay(Native Method) 06-13 14 :10:3​​1.518:E/AndroidRuntime(11888):在android.hardware.Camera.setPreviewDisplay(Camera.java:405) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):在com.example。 sensor.Sensors10Activity $ CameraPreview.surfaceCreated(Sensors10Activity.java: 221)
06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.SurfaceView.updateWindow(SurfaceView.java:533) 06-13 14:10:3​​1.518:E/AndroidRuntime( 11888):在android.view.SurfaceView.onWindowVisibilityChanged(SurfaceView.java:226) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.View.dispatchWindowVisibilityChanged(View.java:5839) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06- 13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.ViewGroup.dispatchWindowVisibilityChanged(ViewGroup.java:945) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view .ViewRootImpl.performTraversals(ViewRootImpl.java:965) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.view.ViewRootImpl.handleMessage(ViewRootImpl.java:2442) 06-13 14:10 :31.518:E/AndroidRuntime(11888):at android.os.Handler.dispatchMessage(Handler.java:99) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.os.Looper.loop (Looper.java:137) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at android.app.ActivityThread.main(ActivityThread.java:4424) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at java.lang.reflect.Method.invokeNative(Native Method) 06-13 14:10:3​​1.518:E/AndroidRunti我(11888):在java.lang.reflect.Method.invoke(Method.java:511) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:784) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:551) 06-13 14:10:3​​1.518:E/AndroidRuntime(11888):at dalvik.system.NativeStart。主(本地方法)

下面是代码,其中大部分代码直接来自开发指南的相机示例和ApiDemos/OS/Sensors示例。有没有人在Activity生命周期中看到应该做什么不同,以避免221行发生的异常(如代码中所述)?

预先感谢检查,
格雷格

public class Sensors10Activity extends Activity { 
private final String TAG = "SENSORS_TEST"; 
private SensorManager mSensorManager; 
private GraphView mGraphView; 
private Camera mCamera; 
private CameraPreview mCameraPreview; 

public class GraphView extends View implements SensorEventListener { 
    private Bitmap mBitmap; 
    private Paint mPaint = new Paint(); 
    private Canvas mCanvas = new Canvas(); 
    private Path mPath = new Path(); 
    private RectF mRect = new RectF(); 
    private float mLastValues[] = new float[3*2]; 
    private float mOrientationValues[] = new float[3]; 
    private int  mColors[] = new int[3*2]; 
    private float mLastX; 
    private float mScale[] = new float[2]; 
    private float mYOffset; 
    private float mMaxX; 
    private float mSpeed = 1.0f; 
    private float mWidth; 
    private float mHeight; 
    public GraphView(Context context) { 
     super(context); 
     mColors[0] = Color.argb(192, 255, 64, 64); 
     mColors[1] = Color.argb(192, 64, 128, 64); 
     mColors[2] = Color.argb(192, 64, 64, 255); 
     mColors[3] = Color.argb(192, 64, 255, 255); 
     mColors[4] = Color.argb(192, 128, 64, 128); 
     mColors[5] = Color.argb(192, 255, 255, 64); 

     mPaint.setFlags(Paint.ANTI_ALIAS_FLAG); 
     mRect.set(-0.5f, -0.5f, 0.5f, 0.5f); 
     mPath.arcTo(mRect, 0, 180); 
    } 

    @Override 
    protected void onSizeChanged(int w, int h, int oldw, int oldh) { 
     mBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.RGB_565); 
     mCanvas.setBitmap(mBitmap); 
     mCanvas.drawColor(0xFFFFFFFF); 
     mYOffset = h * 0.5f; 
     mScale[0] = - (h * 0.5f * (1.0f/(SensorManager.STANDARD_GRAVITY * 2))); 
     mScale[1] = - (h * 0.5f * (1.0f/(SensorManager.MAGNETIC_FIELD_EARTH_MAX))); 
     mWidth = w; 
     mHeight = h; 
     if (mWidth < mHeight) { 
      mMaxX = w; 
     } else { 
      mMaxX = w-50; 
     } 
     mLastX = mMaxX; 
     super.onSizeChanged(w, h, oldw, oldh); 
    } 

    @Override 
    protected void onDraw(Canvas canvas) { 
     synchronized (this) { 
      if (mBitmap != null) { 
       final Paint paint = mPaint; 
       final Path path = mPath; 
       final int outer = 0xFFC0C0C0; 
       final int inner = 0xFFff7010; 

       if (mLastX >= mMaxX) { 
        mLastX = 0; 
        final Canvas cavas = mCanvas; 
        final float yoffset = mYOffset; 
        final float maxx = mMaxX; 
        final float oneG = SensorManager.STANDARD_GRAVITY * mScale[0]; 
        paint.setColor(0xFFAAAAAA); 
        cavas.drawColor(0xFFFFFFFF); 
        cavas.drawLine(0, yoffset,  maxx, yoffset,  paint); 
        cavas.drawLine(0, yoffset+oneG, maxx, yoffset+oneG, paint); 
        cavas.drawLine(0, yoffset-oneG, maxx, yoffset-oneG, paint); 
       } 
       canvas.drawBitmap(mBitmap, 0, 0, null); 

       float[] values = mOrientationValues; 
       if (mWidth < mHeight) { 
        float w0 = mWidth * 0.333333f; 
        float w = w0 - 32; 
        float x = w0*0.5f; 
        for (int i=0 ; i<3 ; i++) { 
         canvas.save(Canvas.MATRIX_SAVE_FLAG); 
         canvas.translate(x, w*0.5f + 4.0f); 
         canvas.save(Canvas.MATRIX_SAVE_FLAG); 
         paint.setColor(outer); 
         canvas.scale(w, w); 
         canvas.drawOval(mRect, paint); 
         canvas.restore(); 
         canvas.scale(w-5, w-5); 
         paint.setColor(inner); 
         canvas.rotate(-values[i]); 
         canvas.drawPath(path, paint); 
         canvas.restore(); 
         x += w0; 
        } 
       } else { 
        float h0 = mHeight * 0.333333f; 
        float h = h0 - 32; 
        float y = h0*0.5f; 
        for (int i=0 ; i<3 ; i++) { 
         canvas.save(Canvas.MATRIX_SAVE_FLAG); 
         canvas.translate(mWidth - (h*0.5f + 4.0f), y); 
         canvas.save(Canvas.MATRIX_SAVE_FLAG); 
         paint.setColor(outer); 
         canvas.scale(h, h); 
         canvas.drawOval(mRect, paint); 
         canvas.restore(); 
         canvas.scale(h-5, h-5); 
         paint.setColor(inner); 
         canvas.rotate(-values[i]); 
         canvas.drawPath(path, paint); 
         canvas.restore(); 
         y += h0; 
        } 
       } 
      } 
     } 
    } 

    public void onSensorChanged(SensorEvent event) { 
     //Log.d(TAG, "sensor: " + sensor + ", x: " + values[0] + ", y: " + values[1] + ", z: " + values[2]); 
     synchronized (this) { 
      if (mBitmap != null) { 
       final Canvas canvas = mCanvas; 
       final Paint paint = mPaint; 
       if (event.sensor.getType() == Sensor.TYPE_ORIENTATION) { 
        for (int i=0 ; i<3 ; i++) { 
         mOrientationValues[i] = event.values[i]; 
        } 
       } else { 
        float deltaX = mSpeed; 
        float newX = mLastX + deltaX; 

        int j = (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) ? 1 : 0; 
        for (int i=0 ; i<3 ; i++) { 
         int k = i+j*3; 
         final float v = mYOffset + event.values[i] * mScale[j]; 
         paint.setColor(mColors[k]); 
         canvas.drawLine(mLastX, mLastValues[k], newX, v, paint); 
         mLastValues[k] = v; 
        } 
        if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) 
         mLastX += mSpeed; 
       } 
       invalidate(); 
      } 
     } 
    } 

    public void onAccuracyChanged(Sensor sensor, int accuracy) { 
    } 
} 

public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback { 
    private SurfaceHolder mHolder; 
    private Camera mCamera; 

    public CameraPreview(Context context, Camera camera) { 
     super(context); 
     mCamera = camera; 

     // Install a SurfaceHolder.Callback so we get notified when the 
     // underlying surface is created and destroyed. 
     mHolder = getHolder(); 
     mHolder.addCallback(this); 
     // deprecated setting, but required on Android versions prior to 3.0 
     mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 
    } 

    public void surfaceCreated(SurfaceHolder holder) { 
     // The Surface has been created, now tell the camera where to draw the preview. 
     try { 
      mCamera.setPreviewDisplay(holder); // !!! LINE 221: exception occurs here. 
      mCamera.startPreview(); 
     } catch (IOException e) { 
      Log.d(TAG, "Error setting camera preview: " + e.getMessage()); 
     } 
    } 

    public void surfaceDestroyed(SurfaceHolder holder) { 
     // empty. Take care of releasing the Camera preview in your activity. 
    } 

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { 
     // If your preview can change or rotate, take care of those events here. 
     // Make sure to stop the preview before resizing or reformatting it. 

     if (mHolder.getSurface() == null){ 
      // preview surface does not exist 
      return; 
     } 

     // stop preview before making changes 
     try { 
      mCamera.stopPreview(); 
     } catch (Exception e){ 
      // ignore: tried to stop a non-existent preview 
     } 

     // set preview size and make any resize, rotate or 
     // reformatting changes here 

     // start preview with new settings 
     try { 
      mCamera.setPreviewDisplay(mHolder); 
      mCamera.startPreview(); 

     } catch (Exception e){ 
      Log.d(TAG, "Error starting camera preview: " + e.getMessage()); 
     } 
    } 
} 

////////////// 
// LIFE CYCLE 
// 

/** 
* Initialization of the Activity after it is first created. Must at least 
* call {@link android.app.Activity#setContentView setContentView()} to 
* describe what is to be displayed in the screen. 
*/ 
@Override 
protected void onCreate(Bundle savedInstanceState) { 
    // Be sure to call the super class. 
    super.onCreate(savedInstanceState); 

    mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); 
    mCamera = Camera.open(1); 
    mCameraPreview = new CameraPreview(this, mCamera); 
    mGraphView = new GraphView(this); 
    // setContentView(mGraphView); 

    // Create RelativeLayout for layout root. 
    RelativeLayout relativeLayout = new RelativeLayout(this); 
    RelativeLayout.LayoutParams rlp = new RelativeLayout.LayoutParams(
      RelativeLayout.LayoutParams.FILL_PARENT, 
      RelativeLayout.LayoutParams.FILL_PARENT); 

    // Add GraphView to layout. 
    RelativeLayout.LayoutParams lpGraph = new RelativeLayout.LayoutParams(
      RelativeLayout.LayoutParams.FILL_PARENT, 
      RelativeLayout.LayoutParams.FILL_PARENT); 
    mGraphView.setLayoutParams(lpGraph); 
    relativeLayout.addView(mGraphView); 

    // Add SurfaceView to layout. 
    List<Camera.Size> ls = mCamera.getParameters().getSupportedPreviewSizes(); 
    int n = ls.size(); 
    int widthMin = 10000; 
    int imin = -1; 
    for (int i=0; i<n; i++) { 
     Log.d(TAG, "supported preview width x height: " + ls.get(i).width + " x " + ls.get(i).height); 
     if (widthMin > ls.get(i).width) { 
      widthMin = ls.get(i).width; 
      imin = i; 
     } 
    } 
    if (imin >= 0) { 
     RelativeLayout.LayoutParams lpSurface = new RelativeLayout.LayoutParams(
       ls.get(imin).width, ls.get(imin).height); 
     lpSurface.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); 
     lpSurface.addRule(RelativeLayout.CENTER_HORIZONTAL); 
     mCameraPreview.setLayoutParams(lpSurface); 
     relativeLayout.addView(mCameraPreview); 
    } 

    // Provide Android framework with layout root. 
    setContentView(relativeLayout, rlp); 
} 

@Override 
protected void onResume() { 
    super.onResume(); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), 
      SensorManager.SENSOR_DELAY_FASTEST); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), 
      SensorManager.SENSOR_DELAY_FASTEST); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION), 
      SensorManager.SENSOR_DELAY_FASTEST); 

    if (mCamera == null) 
     mCamera = Camera.open(1); 
    if (mCameraPreview == null) 
     mCameraPreview = new CameraPreview(this, mCamera); 
} 


@Override 
protected void onPause() { 
    super.onPause(); 
    if (mCamera != null) { 
     mCamera.stopPreview(); 
     mCamera.release();  // release the camera for other applications 
     mCamera = null; 
    } 
    if (mCameraPreview != null) { 
     mCameraPreview = null; 
    } 
} 

@Override 
protected void onStop() { 
    mSensorManager.unregisterListener(mGraphView); 
    super.onStop(); 
} 

}

回答

1

几个小时的反复试验后,似乎这些生命周期处理工作(即他们处理所造成的停顿电源按钮,由主图标引起的停止,由后图标引起的破坏以及相机在onPause()上释放,以使其可用于系统的相机应用程序)。

@Override 
protected void onCreate(Bundle savedInstanceState) { 
    // Be sure to call the super class. 
    super.onCreate(savedInstanceState); 

    mSensorManager = (SensorManager) getSystemService(SENSOR_SERVICE); 
    mCamera = Camera.open(1); 
    mCameraPreview = new CameraPreview(this, mCamera); 
    mGraphView = new GraphView(this); 
    // setContentView(mGraphView); 

    // Create RelativeLayout for layout root. 
    mLayoutRoot = new RelativeLayout(this); 
    RelativeLayout.LayoutParams rlp = new RelativeLayout.LayoutParams(
      RelativeLayout.LayoutParams.FILL_PARENT, 
      RelativeLayout.LayoutParams.FILL_PARENT); 

    // Add GraphView to layout. 
    RelativeLayout.LayoutParams lpGraph = new RelativeLayout.LayoutParams(
      RelativeLayout.LayoutParams.FILL_PARENT, 
      RelativeLayout.LayoutParams.FILL_PARENT); 
    mGraphView.setLayoutParams(lpGraph); 
    mLayoutRoot.addView(mGraphView); 

    // Add SurfaceView to layout. 
    List<Camera.Size> ls = mCamera.getParameters().getSupportedPreviewSizes(); 
    int n = ls.size(); 
    int widthMin = 10000; 
    int imin = -1; 
    for (int i=0; i<n; i++) { 
     Log.d(TAG, "supported preview width x height: " + ls.get(i).width + " x " + ls.get(i).height); 
     if (widthMin > ls.get(i).width) { 
      widthMin = ls.get(i).width; 
      mCameraPreviewSize = ls.get(i); 
      imin = i; 
     } 
    } 
    if (imin >= 0) { 
     RelativeLayout.LayoutParams lpSurface = new RelativeLayout.LayoutParams(
       ls.get(imin).width, ls.get(imin).height); 
     lpSurface.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); 
     lpSurface.addRule(RelativeLayout.CENTER_HORIZONTAL); 
     mCameraPreview.setLayoutParams(lpSurface); 
     mLayoutRoot.addView(mCameraPreview); 
    } 

    // Provide Android framework with layout root. 
    setContentView(mLayoutRoot, rlp); 
    Log.d(TAG, "onCreate OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 

@Override 
protected void onStart() { 
    super.onStart(); 
    Log.d(TAG, "onStart OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 

@Override 
protected void onResume() { 
    super.onResume(); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER), 
      SensorManager.SENSOR_DELAY_FASTEST); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD), 
      SensorManager.SENSOR_DELAY_FASTEST); 
    mSensorManager.registerListener(mGraphView, 
      mSensorManager.getDefaultSensor(Sensor.TYPE_ORIENTATION), 
      SensorManager.SENSOR_DELAY_FASTEST); 

    if (mCamera == null) 
     mCamera = Camera.open(1); 
    if (mCameraPreview == null) { 
     mCameraPreview = new CameraPreview(this, mCamera); 
     RelativeLayout.LayoutParams lpCameraPreview = new RelativeLayout.LayoutParams(
       mCameraPreviewSize.width, mCameraPreviewSize.height); 
     lpCameraPreview.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM); 
     lpCameraPreview.addRule(RelativeLayout.CENTER_HORIZONTAL); 
     mCameraPreview.setLayoutParams(lpCameraPreview); 
     mLayoutRoot.addView(mCameraPreview); 
    } 
    Log.d(TAG, "onResume OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 


@Override 
protected void onPause() { 
    if (mCamera != null) { 
     mCamera.stopPreview(); 
     mCamera.release();  // release the camera for other applications 
     mCamera = null; 
    } 
    if (mCameraPreview != null) { 
     mLayoutRoot.removeView(mCameraPreview); 
     mCameraPreview = null; 
    } 
    super.onPause(); 
    Log.d(TAG, "onPause OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 

@Override 
protected void onStop() { 
    mSensorManager.unregisterListener(mGraphView); 
    super.onStop(); 
    Log.d(TAG, "onStop OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 

@Override 
protected void onDestroy() { 
    super.onDestroy(); 
    Log.d(TAG, "onDestroy OUT mCamera, mCameraPreview: " + mCamera + ", " + mCameraPreview); 
} 
+1

在ApiDemos /图形/ CameraPreview应用程序生命周期处理程序更简单,比我写的,因为ApiDemos生命周期处理程序不破坏的onPause摄像头预览对象的那些()(可能会更好)。 – gregS

相关问题