/** * 手动对焦 * * @param focusAreas 对焦区域 * @return */ @SuppressLint("NewApi") @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) public boolean manualFocus(AutoFocusCallback cb, List<Area> focusAreas) { if (camera != null && focusAreas != null && mParameters != null && DeviceUtils.hasICS()) { try { camera.cancelAutoFocus(); // getMaxNumFocusAreas检测设备是否支持 if (mParameters.getMaxNumFocusAreas() > 0) { // mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO);// // Macro(close-up) focus mode mParameters.setFocusAreas(focusAreas); } if (mParameters.getMaxNumMeteringAreas() > 0) mParameters.setMeteringAreas(focusAreas); mParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_MACRO); camera.setParameters(mParameters); camera.autoFocus(cb); return true; } catch (Exception e) { if (mOnErrorListener != null) { mOnErrorListener.onVideoError(MEDIA_ERROR_CAMERA_AUTO_FOCUS, 0); } if (e != null) Log.e("Yixia", "autoFocus", e); } } return false; }
/** * 手动聚焦 * @param point 触屏坐标 */ protected void onFocus(Point point,AutoFocusCallback callback){ Parameters parameters=mCamera.getParameters(); //不支持设置自定义聚焦,则使用自动聚焦,返回 if (parameters.getMaxNumFocusAreas()<=0) { mCamera.autoFocus(callback); return; } List<Area> areas=new ArrayList<Area>(); int left=point.x-300; int top=point.y-300; int right=point.x+300; int bottom=point.y+300; left=left<-1000?-1000:left; top=top<-1000?-1000:top; right=right>1000?1000:right; bottom=bottom>1000?1000:bottom; areas.add(new Area(new Rect(left,top,right,bottom), 100)); parameters.setFocusAreas(areas); try { //本人使用的小米手机在设置聚焦区域的时候经常会出异常,看日志发现是框架层的字符串转int的时候出错了, //目测是小米修改了框架层代码导致,在此try掉,对实际聚焦效果没影响 mCamera.setParameters(parameters); } catch (Exception e) { // TODO: handle exception e.printStackTrace(); } mCamera.autoFocus(callback); }
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeFocusAreas(int x, int y) { if (mFocusArea == null) { mFocusArea = new ArrayList<Area>(); mFocusArea.add(new Area(new Rect(), 1)); } // Convert the coordinates to driver format. calculateTapArea(x, y, getAFRegionEdge(), mFocusArea.get(0).rect); }
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeMeteringAreas(int x, int y) { if (mMeteringArea == null) { mMeteringArea = new ArrayList<Area>(); mMeteringArea.add(new Area(new Rect(), 1)); } // Convert the coordinates to driver format. calculateTapArea(x, y, getAERegionEdge(), mMeteringArea.get(0).rect); }
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeFocusAreas(int x, int y) { if (mFocusArea == null) { mFocusArea = new ArrayList<Object>(); mFocusArea.add(new Area(new Rect(), 1)); } // Convert the coordinates to driver format. calculateTapArea(x, y, 1f, ((Area) mFocusArea.get(0)).rect); }
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeMeteringAreas(int x, int y) { if (mMeteringArea == null) { mMeteringArea = new ArrayList<Object>(); mMeteringArea.add(new Area(new Rect(), 1)); } // Convert the coordinates to driver format. // AE area is bigger because exposure is sensitive and // easy to over- or underexposure if area is too small. calculateTapArea(x, y, 1.5f, ((Area) mMeteringArea.get(0)).rect); }
@Override public void onFaceDetection(android.hardware.Camera.Face[] faces, Camera camera) { boolean isfacedet = StoredData.getBoolean(StoredData.M_FACETRACKING, false); // System.out.println("是否可以人脸识别:" + isfacedet); if (!isfacedet || (m_cameraIndex == m_camera_front)) { WiCameraActivity.m_fd_face.setVisibility(View.GONE); return; } if (m_isfacedetection) { WiCameraActivity.m_fd_face.setVisibility(View.VISIBLE); // TODO Auto-generated method stub // TODO Auto-generated method stub Rect[] rectarrayRects = new Rect[faces.length]; for (int i = 0; i < faces.length; i++) { rectarrayRects[i] = faces[i].rect; } // TODO Auto-generated method stub boolean isneedfocus = isNeedFocus(oldRect, rectarrayRects); WiCameraActivity.m_fd_face.setFaces(faces); if (isneedfocus) { if ((mFocusArea != null) && (FOCU_STATE == STATE_SUCCESS || FOCU_STATE == STATE_FAIL || FOCU_STATE == STATE_FOCUSING)) { mFocusArea = null; mMeteringArea = null; cameras.cancelAutoFocus(); FOCU_STATE = STATE_IDLE; } mFocusArea = new ArrayList<Camera.Area>(); mMeteringArea = new ArrayList<Camera.Area>(); if (cameras != null) { for (int i = 0; i < faces.length; i++) { mFocusArea.add(new Area(faces[i].rect, 100)); mMeteringArea.add(new Area(faces[i].rect, 100)); } } setArea(); } oldRect = rectarrayRects; } }
@Override public void onFaceDetection(android.hardware.Camera.Face[] faces, Camera camera) { boolean isfacedet = StoredData.getBoolean(StoredData.M_FACETRACKING, false); // System.out.println("是否可以人脸识别:" + isfacedet); if (!isfacedet || (m_cameraIndex == m_camera_front || Util.CAMERA_STATE == CAMERA_VIDEO || WiCameraActivity.isContinus || (!WiCameraActivity.isCameraOpen))) { WiCameraActivity.m_fd_face.setVisibility(View.GONE); return; } if (m_isfacedetection) { WiCameraActivity.m_fd_face.setVisibility(View.VISIBLE); // if (FOCU_STATE == STATE_FOCUSING) { // WiCameraActivity.m_fd_face.setVisibility(View.GONE); // return; // } // TODO Auto-generated method stub // TODO Auto-generated method stub Rect[] rectarrayRects = new Rect[faces.length]; for (int i = 0; i < faces.length; i++) { rectarrayRects[i] = faces[i].rect; } // TODO Auto-generated method stub boolean isneedfocus = isNeedFocus(oldRect, rectarrayRects); WiCameraActivity.m_fd_face.setFaces(faces); if (isneedfocus) { if ((mFocusArea != null) && (FOCU_STATE == STATE_SUCCESS || FOCU_STATE == STATE_FAIL)) { mFocusArea = null; mMeteringArea = null; cameras.cancelAutoFocus(); FOCU_STATE = STATE_IDLE; } mFocusArea = new ArrayList<Camera.Area>(); mMeteringArea = new ArrayList<Camera.Area>(); if (cameras != null) { for (int i = 0; i < faces.length; i++) { mFocusArea.add(new Area(faces[i].rect, 100)); mMeteringArea.add(new Area(faces[i].rect, 100)); } } // FOCU_STATE = STATE_FOCUSING; setArea(); } oldRect = rectarrayRects; } }
public List<Area> getFocusAreas() { return mFocusArea; }
public List<Area> getMeteringAreas() { return mMeteringArea; }