Java 类android.hardware.camera2.CaptureResult 实例源码
项目:Android-Code-Demos
文件:MyCamera2Fragment.java
private void checkState(CaptureResult result) {
/*switch (mState) {
case STATE_PREVIEW:
// NOTHING
break;
case STATE_WAITING_CAPTURE:
int afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState
|| CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED == afState
|| CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED == afState) {
//do something like save picture
}
break;
}*/
}
项目:Image-Detection-Samples
文件:FDCamera2Presenter.java
private void detectFaces(CaptureResult captureResult) {
Integer mode = captureResult.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
if (isViewAvailable() && mode != null) {
android.hardware.camera2.params.Face[] faces = captureResult.get(CaptureResult.STATISTICS_FACES);
if (faces != null) {
Log.i(TAG, "faces : " + faces.length + " , mode : " + mode);
for (android.hardware.camera2.params.Face face : faces) {
Rect faceBounds = face.getBounds();
// Once processed, the result is sent back to the View
presenterView.onFaceDetected(mapCameraFaceToCanvas(faceBounds, face.getLeftEyePosition(),
face.getRightEyePosition()));
}
}
}
}
项目:Camera2
文件:CaptureDataSerializer.java
/**
* Generate a human-readable string of the given capture request and write
* it to the given file.
*/
public static void toFile(String title, CameraMetadata<?> metadata, File file) {
try {
// Will append if the file already exists.
FileWriter writer = new FileWriter(file, true);
if (metadata instanceof CaptureRequest) {
dumpMetadata(title, (CaptureRequest) metadata, writer);
} else if (metadata instanceof CaptureResult) {
dumpMetadata(title, (CaptureResult) metadata, writer);
} else {
writer.close();
throw new IllegalArgumentException("Cannot generate debug data from type "
+ metadata.getClass().getName());
}
writer.close();
} catch (IOException ex) {
Log.e(TAG, "Could not write capture data to file.", ex);
}
}
项目:Camera2
文件:AutoFocusHelper.java
/**
* Convert reported camera2 AF state to OneCamera AutoFocusState.
*/
public static OneCamera.AutoFocusState stateFromCamera2State(int state) {
switch (state) {
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
return OneCamera.AutoFocusState.ACTIVE_SCAN;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
return OneCamera.AutoFocusState.PASSIVE_SCAN;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
return OneCamera.AutoFocusState.PASSIVE_FOCUSED;
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
return OneCamera.AutoFocusState.ACTIVE_FOCUSED;
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
return OneCamera.AutoFocusState.PASSIVE_UNFOCUSED;
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
return OneCamera.AutoFocusState.ACTIVE_UNFOCUSED;
default:
return OneCamera.AutoFocusState.INACTIVE;
}
}
项目:Camera2
文件:AutoFocusHelper.java
/**
* Utility function: converts CaptureResult.CONTROL_AF_STATE to String.
*/
private static String controlAFStateToString(int controlAFState) {
switch (controlAFState) {
case CaptureResult.CONTROL_AF_STATE_INACTIVE:
return "inactive";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN:
return "passive_scan";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED:
return "passive_focused";
case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN:
return "active_scan";
case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED:
return "focus_locked";
case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
return "not_focus_locked";
case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
return "passive_unfocused";
default:
return "unknown";
}
}
项目:Camera2
文件:OneCameraZslImpl.java
@Override
public void onImageCaptured(Image image, TotalCaptureResult
captureResult) {
long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
// We should only capture the image if it's more recent than the
// latest one. Synchronization is necessary since this method is
// called on {@link #mImageSaverThreadPool}.
synchronized (mLastCapturedImageTimestamp) {
if (timestamp > mLastCapturedImageTimestamp.get()) {
mLastCapturedImageTimestamp.set(timestamp);
} else {
// There was a more recent (or identical) image which has
// begun being saved, so abort.
return;
}
}
mReadyStateManager.setInput(
ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
mSession.startEmpty();
savePicture(image, mParams, mSession);
mParams.callback.onPictureTaken(mSession);
Log.v(TAG, "Image saved. Frame number = " + captureResult.getFrameNumber());
}
项目:android_camera2_api_video_app
文件:Camera2VideoImageActivity.java
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
// Do nothing
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
项目:android_camera2_api_video_app
文件:Camera2VideoImageActivity.java
private void process(CaptureResult captureResult) {
switch (mCaptureState) {
case STATE_PREVIEW:
// Do nothing
break;
case STATE_WAIT_LOCK:
mCaptureState = STATE_PREVIEW;
Integer afState = captureResult.get(CaptureResult.CONTROL_AF_STATE);
if(afState == CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED ||
afState == CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
Toast.makeText(getApplicationContext(), "AF Locked!", Toast.LENGTH_SHORT).show();
startStillCaptureRequest();
}
break;
}
}
项目:Android-Code-Demos
文件:MyCamera2Fragment.java
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
CaptureResult partialResult) {
Log.d(TAG, "mSessionCaptureCallback, onCaptureProgressed");
mSession = session;
checkState(partialResult);
}
项目:361Camera
文件:Camera2Fragment.java
private ImageSaver(Image image, File file, CaptureResult result,
CameraCharacteristics characteristics, Context context,
RefCountedAutoCloseable<ImageReader> reader) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
}
项目:OkayCamera-Android
文件:ImageSaver.java
private ImageSaver(Image image, File file, CaptureResult result,
CameraCharacteristics characteristics, Context context,
RefCountedAutoCloseable<ImageReader> reader) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
}
项目:phonk
文件:CameraNew2.java
public void qq() {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_EFFECT_MODE, CaptureResult.CONTROL_EFFECT_MODE_MONO);
mPreviewRequest = mPreviewRequestBuilder.build();
try {
mCaptureSession.setRepeatingRequest(mPreviewRequest, mCaptureCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
项目:Camera2App
文件:BasicCamera.java
@Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
// キャプチャの進捗状況(随時呼び出される)
process(partialResult);
}
项目:Camera2
文件:AutoFocusHelper.java
/**
* Complain if CONTROL_AF_STATE is not present in result.
* Could indicate bug in API implementation.
*/
public static boolean checkControlAfState(CaptureResult result) {
boolean missing = result.get(CaptureResult.CONTROL_AF_STATE) == null;
if (missing) {
// throw new IllegalStateException("CaptureResult missing CONTROL_AF_STATE.");
Log.e(TAG, "\n!!!! TotalCaptureResult missing CONTROL_AF_STATE. !!!!\n ");
}
return !missing;
}
项目:Camera2
文件:AutoFocusHelper.java
/**
* Complain if LENS_STATE is not present in result.
* Could indicate bug in API implementation.
*/
public static boolean checkLensState(CaptureResult result) {
boolean missing = result.get(CaptureResult.LENS_STATE) == null;
if (missing) {
// throw new IllegalStateException("CaptureResult missing LENS_STATE.");
Log.e(TAG, "\n!!!! TotalCaptureResult missing LENS_STATE. !!!!\n ");
}
return !missing;
}
项目:Camera2
文件:AutoFocusHelper.java
public static void logExtraFocusInfo(CaptureResult result) {
if(!checkControlAfState(result) || !checkLensState(result)) {
return;
}
Object tag = result.getRequest().getTag();
Log.v(TAG, String.format("af_state:%-17s lens_foc_dist:%.3f lens_state:%-10s %s",
controlAFStateToString(result.get(CaptureResult.CONTROL_AF_STATE)),
result.get(CaptureResult.LENS_FOCUS_DISTANCE),
lensStateToString(result.get(CaptureResult.LENS_STATE)),
(tag == null) ? "" : "[" + tag +"]"
));
}
项目:Camera2
文件:AutoFocusHelper.java
/**
* Utility function: converts CaptureResult.LENS_STATE to String.
*/
private static String lensStateToString(int lensState) {
switch (lensState) {
case CaptureResult.LENS_STATE_MOVING:
return "moving";
case CaptureResult.LENS_STATE_STATIONARY:
return "stationary";
default:
return "unknown";
}
}
项目:Camera2
文件:ImageCaptureManager.java
@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request,
final CaptureResult partialResult) {
long frameNumber = partialResult.getFrameNumber();
// Update mMetadata for whichever keys are present, if this frame is
// supplying newer values.
for (final Key<?> key : partialResult.getKeys()) {
Pair<Long, Object> oldEntry = mMetadata.get(key);
final Object oldValue = (oldEntry != null) ? oldEntry.second : null;
boolean newerValueAlreadyExists = oldEntry != null
&& frameNumber < oldEntry.first;
if (newerValueAlreadyExists) {
continue;
}
final Object newValue = partialResult.get(key);
mMetadata.put(key, new Pair<Long, Object>(frameNumber, newValue));
// If the value has changed, call the appropriate listeners, if
// any exist.
if (oldValue == newValue || !mMetadataChangeListeners.containsKey(key)) {
continue;
}
for (final MetadataChangeListener listener :
mMetadataChangeListeners.get(key)) {
Log.v(TAG, "Dispatching to metadata change listener for key: "
+ key.toString());
mListenerHandler.post(new Runnable() {
@Override
public void run() {
listener.onImageMetadataChange(key, oldValue, newValue,
partialResult);
}
});
}
}
}
项目:Camera2
文件:OneCameraImpl.java
/**
* This method takes appropriate action if camera2 AF state changes.
* <ol>
* <li>Reports changes in camera2 AF state to OneCamera.FocusStateListener.</li>
* <li>Take picture after AF scan if mTakePictureWhenLensIsStopped true.</li>
* </ol>
*/
private void autofocusStateChangeDispatcher(CaptureResult result) {
if (result.getFrameNumber() < mLastControlAfStateFrameNumber ||
result.get(CaptureResult.CONTROL_AF_STATE) == null) {
return;
}
mLastControlAfStateFrameNumber = result.getFrameNumber();
// Convert to OneCamera mode and state.
AutoFocusState resultAFState = AutoFocusHelper.
stateFromCamera2State(result.get(CaptureResult.CONTROL_AF_STATE));
// TODO: Consider using LENS_STATE.
boolean lensIsStopped = resultAFState == AutoFocusState.ACTIVE_FOCUSED ||
resultAFState == AutoFocusState.ACTIVE_UNFOCUSED ||
resultAFState == AutoFocusState.PASSIVE_FOCUSED ||
resultAFState == AutoFocusState.PASSIVE_UNFOCUSED;
if (mTakePictureWhenLensIsStopped && lensIsStopped) {
// Take the shot.
mCameraHandler.post(mTakePictureRunnable);
mTakePictureWhenLensIsStopped = false;
}
// Report state change when AF state has changed.
if (resultAFState != mLastResultAFState && mFocusStateListener != null) {
mFocusStateListener.onFocusStatusUpdate(resultAFState, result.getFrameNumber());
}
mLastResultAFState = resultAFState;
}
项目:365browser
文件:VideoCaptureCamera2.java
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.d(TAG, "CrPreviewSessionListener.onConfigured");
mPreviewSession = cameraCaptureSession;
try {
// This line triggers the preview. A |listener| is registered to receive the actual
// capture result details. A CrImageReaderListener will be triggered every time a
// downloaded image is ready. Since |handler| is null, we'll work on the current
// Thread Looper.
mPreviewSession.setRepeatingRequest(
mPreviewRequest, new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session,
CaptureRequest request, TotalCaptureResult result) {
mLastExposureTimeNs =
result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
}
}, null);
} catch (CameraAccessException | SecurityException | IllegalStateException
| IllegalArgumentException ex) {
Log.e(TAG, "setRepeatingRequest: ", ex);
return;
}
// Now wait for trigger on CrPreviewReaderListener.onImageAvailable();
nativeOnStarted(mNativeVideoCaptureDeviceAndroid);
changeCameraStateAndNotify(CameraState.STARTED);
}
项目:parkscan
文件:CameraPreviewActivityFragment.java
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is working normally.
break;
}
}
}
项目:material-camera
文件:Camera2Fragment.java
@Override
public void onCaptureProgressed(
@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:opencv-documentscanner-android
文件:Camera2BasicFragment.java
private void process(CaptureResult result) {
int afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED == afState) {
areWeFocused = true;
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
button.setBackgroundColor(getActivity().getResources().getColor(R.color.blue));
button.setText("Focused");
}
});
} else {
areWeFocused = false;
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
button.setBackgroundColor(getActivity().getResources().getColor(R.color.colorAccent));
button.setText("Not focused");
}
});
}
if (shouldCapture) {
if (areWeFocused) {
shouldCapture = false;
captureStillPicture();
}
}
// switch (mState) {
// case STATE_PREVIEW: {
// Log.d(TAG, "STATE_PREVIEW");
// // We have nothing to do when the camera preview is working normally.
// break;
// }
// case STATE_WAITING_LOCK: {
// Log.d(TAG, "STATE_WAITING_LOCK");
// Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
// if (afState == null) {
// captureStillPicture();
// } else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
// CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState ||
// CaptureResult.CONTROL_AF_STATE_INACTIVE == afState /*add this*/) {
// Log.d(TAG, "STATE_WAITING_LOCK222");
// // CONTROL_AE_STATE can be null on some devices
// Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
// if (aeState == null || aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
// mState = STATE_PICTURE_TAKEN;
// captureStillPicture();
// } else {
// runPrecaptureSequence();
// }
// }
// break;
// }
// case STATE_WAITING_PRECAPTURE: {
// Log.d(TAG, "STATE_WAITING_PRECAPTURE");
// // CONTROL_AE_STATE can be null on some devices
// Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
// if (aeState == null ||
// aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
// aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
// mState = STATE_WAITING_NON_PRECAPTURE;
// }
// break;
// }
// case STATE_WAITING_NON_PRECAPTURE: {
// Log.d(TAG, "STATE_WAITING_NON_PRECAPTURE");
// // CONTROL_AE_STATE can be null on some devices
// Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
// if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
// mState = STATE_PICTURE_TAKEN;
// captureStillPicture();
// }
// break;
// }
// }
}
项目:opencv-documentscanner-android
文件:Camera2BasicFragment.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:snu-artoon
文件:CameraConnectionFragment.java
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
项目:CameraFragment
文件:Camera2Manager.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
processCaptureResult(partialResult);
}
项目:TensorflowAndroidDemo
文件:CameraConnectionFragment.java
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
项目:SIGHT-For-the-Blind
文件:CameraHandler.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
Log.d(TAG, "Partial result");
}
项目:Android-Code-Demos
文件:Camera2BasicFragment.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:tensorflow-classifier-android
文件:CameraConnectionFragment.java
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
项目:PXLSRT
文件:Camera2.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:Camera_Calibration_Android
文件:Camera2BasicFragment.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:fast_face_android
文件:CameraConnectionFragment.java
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {
}
项目:Cam2Caption
文件:Camera2BasicFragment.java
private void process(CaptureResult result) {
switch (mState) {
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is working normally.
break;
}
/*
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
if (afState == null) {
//captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
mState = STATE_PICTURE_TAKEN;
//captureStillPicture();
} else {
//runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
mState = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
mState = STATE_PICTURE_TAKEN;
//captureStillPicture();
}
break;
}
*/
}
}
项目:Cam2Caption
文件:Camera2BasicFragment.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
Log.d(TAG, "Partial result");
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
Log.d(TAG, "Partial result");
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
Log.d(TAG, "Partial result");
}
项目:android-Camera2Basic-master
文件:Camera2BasicFragment.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
项目:Camera2Vision
文件:Camera2Source.java
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
process(partialResult);
}