2015-10-15 73 views
0

嗨,我已经这样做了4天,但我无法解决它。我有一个类是相机预览类,我在类中创建了一个获取位图函数,我想在另一个类中调用此函数来获取位图。Android Camera SurfaceView创建位图

这是我调用获得位映射功能编码:

public class Stage extends GLSurfaceView { 

private float w, h; 
private int screenWidth, screenHeight; 
private String img; 
private boolean SC; 
private Bitmap screen, imgB; 
private boolean c; 
MyRenderer mRenderer; 
//here declare class of camera <<<<<<< 
CameraSurfaceView csv; 

public Stage(Context context, AttributeSet attrs) { 
    super(context, attrs); 
    setEGLConfigChooser(8, 8, 8, 8, 0, 0); 
    getHolder().setFormat(PixelFormat.TRANSPARENT); 
    setZOrderOnTop(true); 
    mRenderer = new MyRenderer(); 
    setRenderer(mRenderer); 
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); 

    //here the camera class declare......... 
    csv = new CameraSurfaceView(context); 
} 

public class MyRenderer implements GLSurfaceView.Renderer { 

    public void setSC(boolean yn){ 
     SC = yn; 
     requestRender(); 
    } 

    public void setC(boolean y){ 
     c=y; 
    } 

    public final void onDrawFrame(GL10 gl) { 
     gl.glClear(GLES10.GL_COLOR_BUFFER_BIT); 

     if(SC==true){ 

      if(c==true){ 
       //capture camera scene and convert to Bitmap 
       //here is the code of get bitmap for camera class 
       imgB = csv.getCameraBitmap(); 
      } 
      else{ 


      } 

      String file_path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/OpenGL"; 
      File dir = new File(file_path); 
      if(!dir.exists()){ 
       dir.mkdirs(); 
      } 
      String format = new SimpleDateFormat("yyyyMMddHHmmss", java.util.Locale.getDefault()).format(new Date()); 
      File file = new File(file_path, format + ".png"); 
      FileOutputStream fOut; 
      try { 
       fOut = new FileOutputStream(file); 
       imgB.compress(Bitmap.CompressFormat.PNG, 85, fOut); 
       fOut.flush(); 
       fOut.close(); 
      } catch (Exception e) { 
       e.printStackTrace(); 
      } 

      SC = false; 
     } 
    } 

    public final void onSurfaceChanged(GL10 gl, int width, int height) { 
     gl.glClearColor(0, 0, 0, 0); 

     if(width > height) { 
      h = 600; 
      w = width * h/height; 
     } else { 
      w = 600; 
      h = height * w/width; 
     } 
     screenWidth = width; 
     screenHeight = height; 


     gl.glViewport(0, 0, screenWidth, screenHeight); 
     gl.glMatrixMode(GL10.GL_PROJECTION); 
     gl.glLoadIdentity(); 
     gl.glOrthof(0, w, h, 0, -1, 1); 
     gl.glMatrixMode(GL10.GL_MODELVIEW); 
     gl.glLoadIdentity(); 
    } 

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) { 
     // Set up alpha blending 
     gl.glEnable(GL10.GL_ALPHA_TEST); 
     gl.glEnable(GL10.GL_BLEND); 
     gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA); 

     // We are in 2D. Why needs depth? 
     gl.glDisable(GL10.GL_DEPTH_TEST); 

     // Enable vertex arrays (we'll use them to draw primitives). 
     gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); 

     // Enable texture coordination arrays. 
     gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); 

     tex.load(getContext()); 
    } 

} 

} 

这是我的相机类:

public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback { 
private static final String TAG = "CameraSurfaceView"; 

private SurfaceHolder mSurfaceHolder; 
private Camera mCamera = null; 
private Bitmap mBitmap; 
private Context mContext; 
private Camera.Parameters mParameters; 
private byte[] byteArray; 
private List<Camera.Size> mSupportedPreviewSizes; 
private Camera.Size mPreviewSize; 
Bitmap cameraBitmap; 

public CameraSurfaceView (Context context) { 
    this(context, null); 
} 

public CameraSurfaceView (Context context, AttributeSet attrs) { 
    this(context, attrs, 0); 
} 

public CameraSurfaceView (Context context, AttributeSet attrs, int defStyle) { 
    super(context, attrs, defStyle); 
    mContext = context; 

    try { 
     mSurfaceHolder = getHolder(); 
     mSurfaceHolder.addCallback(this); 
     mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 

    } catch (Exception e) { 
     e.printStackTrace(); 
    } 
} 

@Override 
public void surfaceCreated(final SurfaceHolder surfaceHolder) { 
    if (mCamera == null) { 
     try { 
      mCamera = Camera.open(); 
     } catch (RuntimeException ignored) { 
     } 
    } 

    try { 
     if (mCamera != null) { 
      WindowManager winManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE); 
      mCamera.setPreviewDisplay(mSurfaceHolder); 
     } 
    } catch (Exception e) { 
     if (mCamera != null) 
      mCamera.release(); 
     mCamera = null; 
    } 

    if (mCamera == null) { 
     return; 
    } else { 
     mCamera.setPreviewCallback(new Camera.PreviewCallback() { 
      @Override 
      public void onPreviewFrame(byte[] bytes, Camera camera) { 
       if (mParameters == null) 
       { 
        return; 
       } 
       byteArray = bytes; 
       cameraBitmap = getBitmap(); 
      } 
     }); 
    } 

    setWillNotDraw(false); 
} 

@Override 
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) { 
    try { 
     mParameters = mCamera.getParameters(); 

     List<Camera.Size> cameraSize = mParameters.getSupportedPreviewSizes(); 
     mPreviewSize = cameraSize.get(0); 

     for (Camera.Size s : cameraSize) { 
      if ((s.width * s.height) > (mPreviewSize.width * mPreviewSize.height)) { 
       mPreviewSize = s; 
      } 
     } 

     mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 
     mCamera.setParameters(mParameters); 
     mCamera.startPreview(); 

    } catch (Exception e) { 
     if (mCamera != null) { 
      mCamera.release(); 
      mCamera = null; 
     } 
    } 
} 


@Override 
public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 
    if (mCamera != null) { 
     mCamera.setPreviewCallback(null); 
     mCamera.stopPreview(); 
     mCamera.release(); 
     mCamera = null; 
    } 
} 

public Bitmap getBitmap() { 
    try { 
     if (mParameters == null) 
      return null; 

     if (mPreviewSize == null) 
      return null; 

     int format = mParameters.getPreviewFormat(); 
     YuvImage yuvImage = new YuvImage(byteArray, format, mPreviewSize.width, mPreviewSize.height, null); 
     ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); 

     Rect rect = new Rect(0, 0, mPreviewSize.width, mPreviewSize.height); 

     yuvImage.compressToJpeg(rect, 75, byteArrayOutputStream); 
     BitmapFactory.Options options = new BitmapFactory.Options(); 
     options.inPurgeable = true; 
     options.inInputShareable = true; 
     mBitmap = BitmapFactory.decodeByteArray(byteArrayOutputStream.toByteArray(), 0, byteArrayOutputStream.size(), options); 

     byteArrayOutputStream.flush(); 
     byteArrayOutputStream.close(); 
    } catch (IOException ioe) { 
     ioe.printStackTrace(); 
    } 

    return mBitmap; 
} 

public Bitmap getCameraBitmap() { 
    return cameraBitmap; 
} 
} 

任何指导,可以理解〜

回答

0

要开始工作相机,您必须显示CameraSurfaceView - 创建它“在空中”是不够的。请参阅最近的相关讨论:Take a photo using a service on OnePlus One - using WindowManager hack

你不需要takePicture()部分,但等待预览帧被克隆到byteArray是类似的异步。

底线是,您无法同步接收位图。您可以请求位图,并以回调形式交付,例如

CameraSurfaceView csv = new CameraSurfaceView(getContext()); 
((Activity) getContext()).addContentsView(csv); 
csv.requestBitmap(imageView); 

,并在某处CameraSurfaceView.java

public void onPreviewFrame(byte[] bytes, Camera camera) { 
    byteArray = bytes; 
    Bitmap imgB = getBitmap(); 
    imageView.setBitmap(imgB); 
} 
+0

我如何保存位图的变量,所以我可以在其他类 –

+0

那么使用它,你可以根据需要使用它 - 但你不能马上得到它。 –

+0

如何做到这一点?我打电话getbitmap函数,因为你上面我仍然得到空位图 –