Java 类android.media.ImageReader 实例源码
项目:ScreenRecordCaptureMaster
文件:CaptureScreenImage.java
/**
* 图像可用时截屏
*
* @param reader
* @param path
*@param callback @return
*/
private void imageAvailable(ImageReader reader, String path, IRecordShotCallback callback) {
mImageReaderLock.lock();
try{
Image image = reader.acquireLatestImage();
if(image == null) return;
int width = image.getWidth();
int height = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
image.close();
//需要在这里释放图片否则会截取很多图片
release();
saveBitmap(path, bitmap, callback);
}finally {
mImageReaderLock.unlock();
}
}
项目:ROLF-EV3
文件:PCVideoStreamer.java
public void readyCamera() {
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
String pickedCamera = getCamera(manager);
if (ActivityCompat.checkSelfPermission(activity, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// ActivityCompat#requestPermissions
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for ActivityCompat#requestPermissions for more details.
return;
}
manager.openCamera(pickedCamera, cameraStateCallback, null);
imageReader = ImageReader.newInstance(videoWidth, videoHeight, 0x00000001 /*ImageFormat.YUV_420_888*/, 2 /* images buffered */);
imageReader.setOnImageAvailableListener(onImageAvailableListener, null);
Log.i(TAG, "imageReader created");
} catch (CameraAccessException e){
Log.e(TAG, e.getMessage());
}
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ScreenCapturer.java
private void setImageListener(Handler handler) {
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
if (mImageAvailable) {
return;
}
mLatestImage = reader.acquireLatestImage();
if (mLatestImage != null) {
mImageAvailable = true;
synchronized (mImageWaitingLock) {
mImageWaitingLock.notify();
}
}
}
}, handler);
}
项目:Camera_Calibration_Android
文件:Camera2BasicFragment.java
@Override
public void onImageAvailable(ImageReader reader) {
// mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile, mImages));
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
mImages.add(ByteString.copyFrom(bytes));
showToast(String.valueOf(mImages.size()) + "/" + String.valueOf(mTotal), Toast.LENGTH_LONG);
if(mImages.size() >= mTotal) {
showToast(String.valueOf("Calibrating..."), Toast.LENGTH_LONG);
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new GrpcPostImageRunnable());
}
}
} finally {
if (image != null) {
image.close();
}
}
}
项目:Cam2Caption
文件:Camera2BasicFragment.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] imageBytes = new byte[buffer.remaining()];
buffer.get(imageBytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
final String text = runModel(bitmap);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
textView.setText(text);
}
});
} finally {
if (image != null) {
image.close();
}
}
}
项目:Bigbang
文件:ScreenCapture.java
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
private void createVirtualEnvironment() {
dateFormat = new SimpleDateFormat("yyyy_MM_dd_hh_mm_ss");
strDate = dateFormat.format(new java.util.Date());
pathImage = Environment.getExternalStorageDirectory().getPath() + "/Pictures/";
nameImage = pathImage + strDate + ".png";
mMediaProjectionManager1 = (MediaProjectionManager) activity.getApplication().getSystemService(Context.MEDIA_PROJECTION_SERVICE);
mWindowManager1 = (WindowManager) activity.getApplication().getSystemService(Context.WINDOW_SERVICE);
windowWidth = ViewUtil.getScreenWidth(activity);
windowHeight = ViewUtil.getSceenHeight(activity);
metrics = new DisplayMetrics();
mWindowManager1.getDefaultDisplay().getMetrics(metrics);
mScreenDensity = metrics.densityDpi;
mImageReader = ImageReader.newInstance(windowWidth, windowHeight, 0x1, 2); //ImageFormat.RGB_565
LogUtil.d(TAG, "prepared the virtual environment");
}
项目:SmingZZick_App
文件:ScreenCaptureService.java
/****************************************** Factoring Virtual Display creation ****************/
private void createVirtualDisplay() {
// get width and height
Point size = new Point();
mDisplay.getRealSize(size);
mWidth = size.x;
mHeight = size.y;
mDisplayMetrics = new DisplayMetrics();
mDisplay.getRealMetrics(mDisplayMetrics);
if (SmingManager.getInstance().isSaveScreenshot() == false) {
if (mWidth > SmingManager.getInstance().getSmingHalfWidth()) {
float downScale = (float) SmingManager.getInstance().getSmingHalfWidth() / mWidth;
mWidth *= downScale;
mHeight *= downScale;
mHeight += 2;
}
}
// start capture reader
mImageReader = ImageReader.newInstance(mWidth, mHeight, PixelFormat.RGBA_8888, 2);
mVirtualDisplay = sMediaProjection.createVirtualDisplay(SCREENCAP_NAME, mWidth, mHeight, mDensity, VIRTUAL_DISPLAY_FLAGS, mImageReader.getSurface(), null, mHandler);
mImageReader.setOnImageAvailableListener(new ImageAvailableListener(), mHandler);
}
项目:BWS-Android
文件:FacialRecognitionFragment.java
/**
* lazily initialize ImageReader and select preview size
*/
private void setupPreviewSizeAndImageReader() {
if (previewSize == null) {
previewSize = cameraHelper.selectPreviewSize(openCamera);
}
if (imageReader == null) {
int maxImages = 2; // should be at least 2 according to ImageReader.acquireLatestImage() documentation
imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, maxImages);
imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image img = reader.acquireLatestImage();
if (img != null) {
// Make a in memory copy of the image to close the image from the reader as soon as possible.
// This helps the thread running the preview staying up to date.
IntensityPlane imgCopy = IntensityPlane.extract(img);
img.close();
int imageRotation = cameraHelper.getImageRotation(openCamera, getRelativeDisplayRotation());
presenter.onImageCaptured(imgCopy, imageRotation);
}
}
}, null);
}
}
项目:ScreenRecordCaptureMaster
文件:CaptureScreenImage.java
public void initCapture(Context context, MediaProjection mediaProjection, final String path, final IRecordShotCallback callback){
int screenWidth = context.getResources().getDisplayMetrics().widthPixels;
int screenHeight = context.getResources().getDisplayMetrics().heightPixels;
// thread for image checking
mCheckThread = new HandlerThread("CheckHandler");
mCheckThread.start();
mCheckHandler = new Handler(mCheckThread.getLooper());
try {
mImageReader = ImageReader.newInstance(screenWidth, screenHeight, PixelFormat.RGBA_8888, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
imageAvailable(reader, path, callback);
}
}, mCheckHandler);
mSurface = mImageReader.getSurface();
}finally {
}
mVirtualDisplay = mediaProjection.createVirtualDisplay("mediaprojection", screenWidth, screenHeight,
1, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mSurface, null, null);
}
项目:AndroidScreenShot_SysApi
文件:Shotter.java
public Shotter(Context context, Intent data) {
this.mRefContext = new SoftReference<>(context);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mMediaProjection = getMediaProjectionManager().getMediaProjection(Activity.RESULT_OK,
data);
mImageReader = ImageReader.newInstance(
getScreenWidth(),
getScreenHeight(),
PixelFormat.RGBA_8888,//此处必须和下面 buffer处理一致的格式 ,RGB_565在一些机器上出现兼容问题。
1);
}
}
项目:doorbell
文件:DoorbellActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
// get image bytes
ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
final byte[] imageBytes = new byte[imageBuf.remaining()];
imageBuf.get(imageBytes);
image.close();
onPictureTaken(imageBytes);
}
项目:CameraCompat
文件:Camera2PreviewFragment.java
@Override
public void onOpened(final CameraDevice cameraDevice, final ImageReader imageReader,
final Handler cameraHandler, final Rotation rotation, final boolean flipHorizontal,
final boolean flipVertical) {
if (!isResumed() || !CheckUtil.nonNull(mProcessorChain)) {
return;
}
mProcessorChain.onCameraOpened(rotation, flipHorizontal, flipVertical,
surfaceTexture -> {
// fix MX5 preview not show bug: http://stackoverflow.com/a/34337226/3077508
surfaceTexture.setDefaultBufferSize(mPreviewWidth, mPreviewHeight);
Surface surface = new Surface(surfaceTexture);
List<Surface> targets = Arrays.asList(surface, imageReader.getSurface());
mCamera2Helper.outputTargetChanged(targets);
startPreviewDirectly(cameraDevice, targets, mIsDefaultFlashOpen, cameraHandler);
});
}
项目:MoleMapperAndroid
文件:LollipopCameraFragment.java
@Override
public void onImageAvailable(ImageReader reader)
{
Activity activity = getActivity();
if(null == mTextureView || null == mPreviewSize || null == activity)
{
return;
}
try
{
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraId);
mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(),
getJpegOrientation(characteristics, rotation)));
}
catch(CameraAccessException e)
{
LogExt.e(getClass(), e);
}
}
项目:AndroidSnippets
文件:CameraApi2ExampleActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
try {
if (image.getFormat() == ImageFormat.JPEG) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
WindowManager windowManager = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
int rotation = windowManager.getDefaultDisplay().getRotation();
Bitmap rotated = ImageUtils.rotateBitmap(bitmap, rotation, mCamera2Engine.getSensorOrientation());
mImageView.setImageBitmap(rotated);
}
} finally {
image.close();
}
}
项目:SimpleSmsRemote
文件:ImageUtils.java
/**
* Retrieve Bitmap with specific format from ImageReader.
*
* @param imageReader the image reader
* @return bitmap
*/
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public static Bitmap GetBitmapFromImageReader(ImageReader imageReader) {
Bitmap bitmap;
//get image buffer
Image image = imageReader.acquireLatestImage();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * image.getWidth();
// create bitmap
bitmap = Bitmap.createBitmap(image.getWidth() + rowPadding / pixelStride, image.getHeight(), Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
image.close();
return bitmap;
}
项目:Kaku
文件:MainService.java
private void createVirtualDisplay(){
// display metrics
mWindowManager = (WindowManager) getSystemService(WINDOW_SERVICE);
DisplayMetrics metrics = getResources().getDisplayMetrics();
int mDensity = metrics.densityDpi;
mDisplay = mWindowManager.getDefaultDisplay();
// get width and height
mDisplay.getRealSize(mRealDisplaySize);
// start capture reader
Log.e(TAG, String.format("Starting Projection: %dx%d", mRealDisplaySize.x, mRealDisplaySize.y));
if (mVirtualDisplay != null){
mVirtualDisplay.release();
}
mImageReader = ImageReader.newInstance(mRealDisplaySize.x, mRealDisplaySize.y, PixelFormat.RGBA_8888, 2);
mVirtualDisplay = mMediaProjection.createVirtualDisplay(getClass().getName(), mRealDisplaySize.x, mRealDisplaySize.y, mDensity, VIRTUAL_DISPLAY_FLAGS, mImageReader.getSurface(), null, mHandler);
}
项目:Camera2
文件:OneCameraImpl.java
/**
* Instantiates a new camera based on Camera 2 API.
*
* @param device The underlying Camera 2 device.
* @param characteristics The device's characteristics.
* @param pictureSize the size of the final image to be taken.
*/
OneCameraImpl(CameraDevice device, CameraCharacteristics characteristics, Size pictureSize) {
mDevice = device;
mCharacteristics = characteristics;
mFullSizeAspectRatio = calculateFullSizeAspectRatio(characteristics);
mCameraThread = new HandlerThread("OneCamera2");
mCameraThread.start();
mCameraHandler = new Handler(mCameraThread.getLooper());
mCaptureImageReader = ImageReader.newInstance(pictureSize.getWidth(),
pictureSize.getHeight(),
sCaptureImageFormat, 2);
mCaptureImageReader.setOnImageAvailableListener(mCaptureImageListener, mCameraHandler);
Log.d(TAG, "New Camera2 based OneCameraImpl created.");
}
项目:365browser
文件:VideoCaptureCamera2.java
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireLatestImage()) {
if (image == null) {
throw new IllegalStateException();
}
if (image.getFormat() != ImageFormat.JPEG) {
Log.e(TAG, "Unexpected image format: %d", image.getFormat());
throw new IllegalStateException();
}
final byte[] capturedData = readCapturedData(image);
nativeOnPhotoTaken(mNativeVideoCaptureDeviceAndroid, mCallbackId, capturedData);
} catch (IllegalStateException ex) {
notifyTakePhotoError(mCallbackId);
return;
}
if (createPreviewObjectsAndStartPreview()) return;
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error restarting preview");
}
项目:youtube_livestream
文件:StreamerActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image != null) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
if (rgbaData == null) {
rgbaData = new byte[width * height * 4];
}
buffer.get(rgbaData);
videoStreamingConnection.sendVideoFrame(rgbaData);
image.close();
}
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ScreenCapturer.java
private void initVirtualDisplay(MediaProjectionManager manager, Intent data, int screenWidth, int screenHeight, int screenDensity) {
mImageReader = ImageReader.newInstance(screenWidth, screenHeight, PixelFormat.RGBA_8888, 1);
mMediaProjection = manager.getMediaProjection(Activity.RESULT_OK, data);
mVirtualDisplay = mMediaProjection.createVirtualDisplay("screen-mirror",
screenWidth, screenHeight, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null, null);
}
项目:NotifyTools
文件:CaptureHelper.java
/**
* 初始化截屏相关设置
* MediaProjectionManager -> MediaProjection -> VirtualDisplay
* */
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void screenShotPrepare() {
initLooper();
// mediaProjection = MainApp.mediaProjection;
// SysUtils.hideStatusBar(this);
if(mediaProjection==null) {
log.d("screenShotPrepare mediaProjection null ");
return;
}
log.d("screenShotPrepare");
Display display = ((WindowManager)cxt.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
metrics = new DisplayMetrics();
display.getRealMetrics(metrics);
Point point = new Point();
display.getRealSize(point);
width = point.x;
height = point.y;
//将屏幕画面放入ImageReader关联的Surface中
imageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 1);
log.d("screenShotPrepare2 "+imageReader);
// imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
// @Override
// public void onImageAvailable(ImageReader reader) {
// log.d("onImageAvailable ");
// }
// },null);
virtualDisplay = mediaProjection.createVirtualDisplay("ScreenShotDemo",
width, height, metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
imageReader.getSurface(),
null, null/*Handler*/);
}
项目:NotifyTools
文件:JumpService.java
/**
* 初始化截屏相关设置
* MediaProjectionManager -> MediaProjection -> VirtualDisplay
* */
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
protected void screenShotPrepare() {
initLooper();
// mediaProjection = MainApp.mediaProjection;
// SysUtils.hideStatusBar(this);
if(mediaProjection==null) {
log.d("screenShotPrepare mediaProjection null ");
return;
}
log.d("screenShotPrepare");
Display display = ((WindowManager) getApplicationContext().getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
metrics = new DisplayMetrics();
display.getRealMetrics(metrics);
Point point = new Point();
display.getRealSize(point);
width = point.x;
height = point.y;
//将屏幕画面放入ImageReader关联的Surface中
imageReader = ImageReader.newInstance(width, height, PixelFormat.RGBA_8888, 1);
log.d("screenShotPrepare2 "+imageReader);
// imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
// @Override
// public void onImageAvailable(ImageReader reader) {
// log.d("onImageAvailable ");
// }
// },null);
virtualDisplay = mediaProjection.createVirtualDisplay("ScreenShotDemo",
width, height, metrics.densityDpi,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
imageReader.getSurface(),
null, null/*Handler*/);
}
项目:CameraFragment
文件:Camera2Manager.java
@Override
public void onImageAvailable(ImageReader imageReader) {
final File outputFile = outputPath;
backgroundHandler.post(new ImageSaver(imageReader.acquireNextImage(), outputFile, new ImageSaver.ImageSaverCallback() {
@Override
public void onSuccessFinish(final byte[] bytes) {
Log.d(TAG, "onPhotoSuccessFinish: ");
if (cameraPhotoListener != null) {
uiHandler.post(new Runnable() {
@Override
public void run() {
cameraPhotoListener.onPhotoTaken(bytes, outputPath, callback);
callback = null;
}
});
}
unlockFocus();
}
@Override
public void onError() {
Log.d(TAG, "onPhotoError: ");
uiHandler.post(new Runnable() {
@Override
public void run() {
cameraPhotoListener.onPhotoTakeError();
}
});
}
}));
}
项目:SIGHT-For-the-Blind
文件:CameraHandler.java
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
项目:SIGHT-For-the-Blind
文件:ImageClassifierActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
final Bitmap bitmap;
try (Image image = reader.acquireNextImage()) {
bitmap = mImagePreprocessor.preprocessImage(image);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
mImage.setImageBitmap(bitmap);
}
});
final List<Classifier.Recognition> results = mTensorFlowClassifier.doRecognize(bitmap);
Log.d(TAG, "Got the following results from Tensorflow: " + results);
if (mTtsEngine != null) {
// speak out loud the result of the image recognition
mTtsSpeaker.speakResults(mTtsEngine, results);
} else {
// if theres no TTS, we don't need to wait until the utterance is spoken, so we set
// to ready right away.
setReady(true);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
for (int i = 0; i < mResultViews.length; i++) {
if (results.size() > i) {
Classifier.Recognition r = results.get(i);
mResultViews[i].setText(r.getTitle() + " : " + r.getConfidence().toString());
} else {
mResultViews[i].setText(null);
}
}
}
});
}
项目:heifreader
文件:HeifReader.java
private static Bitmap renderHevcImageWithFormat(ByteBuffer bitstream, ImageInfo info, int imageFormat) throws FormatFallbackException {
try (ImageReader reader = ImageReader.newInstance(info.size.getWidth(), info.size.getHeight(), imageFormat, 1)) {
renderHevcImage(bitstream, info, reader.getSurface());
Image image = null;
try {
try {
image = reader.acquireNextImage();
} catch (UnsupportedOperationException ex) {
throw new FormatFallbackException(ex);
}
switch (image.getFormat()) {
case ImageFormat.YUV_420_888:
case ImageFormat.YV12:
return convertYuv420ToBitmap(image);
case ImageFormat.RGB_565:
return convertRgb565ToBitmap(image);
default:
throw new RuntimeException("unsupported image format(" + image.getFormat() + ")");
}
} finally {
if (image != null) {
image.close();
}
}
}
}
项目:Eye
文件:HomeActivity.java
/**
* Initialises the output surfaces for the camera's preview.
* There will be two output surfaces -
* 1) mSurfaceView : The surface to just show the preview frame.
* 2) mImageReader : The surface to get the actual pixel image
* data of the preview frame.
*/
private void setupOutputSurfaces() {
outputSurfaces = new ArrayList<>(2);
// For the live preview.
mSurfaceView.getHolder().setFixedSize(screenMaxX, screenMaxY);
outputSurfaces.add(mSurfaceView.getHolder().getSurface());
// For extracting the image.
mImageReader = ImageReader.newInstance(screenMaxX, screenMaxY,
ImageFormat.YUV_420_888, maxAcquired);
mImageReader.setOnImageAvailableListener(getImageAvailableListener(), null);
outputSurfaces.add(mImageReader.getSurface());
}
项目:PXLSRT
文件:Camera2.java
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireNextImage()) {
Image.Plane[] planes = image.getPlanes();
if (planes.length > 0) {
ByteBuffer buffer = planes[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
mCallback.onPictureTaken(data);
}
}
}
项目:android-robocar
文件:TensorFlowTrainer.java
@Override
public void onImageAvailable(final ImageReader reader) {
Timber.d("Image available.");
new Thread(new Runnable() {
@Override
public void run() {
File root = ImageSaver.getRoot(CameraOperator.ROBOCAR_FOLDER);
ImageSaver imageSaver = new ImageSaver(reader.acquireLatestImage(), root, getFilename());
imageSaver.run();
}
}).start();
}
项目:android-robocar
文件:CameraDriver.java
@Override
public void onImageAvailable(final ImageReader reader) {
Timber.d("Picture is available.");
new Thread(new Runnable() {
@Override
public void run() {
File root = ImageSaver.getRoot(CameraOperator.ROBOCAR_FOLDER);
new ImageSaver(reader.acquireLatestImage(), root, PHOTO_FILENAME).run();
processPhoto(root, PHOTO_FILENAME);
}
}).start();
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
项目:androidthings-imageclassifier
文件:CameraHandler.java
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
项目:androidthings-imageclassifier
文件:ImageClassifierActivity.java
/**
* Initialize the camera that will be used to capture images.
*/
private void initCamera() {
mImagePreprocessor = new ImagePreprocessor();
mCameraHandler = CameraHandler.getInstance();
Handler threadLooper = new Handler(getMainLooper());
mCameraHandler.initializeCamera(this, threadLooper,
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader imageReader) {
Bitmap bitmap = mImagePreprocessor.preprocessImage(imageReader.acquireNextImage());
onPhotoReady(bitmap);
}
});
}
项目:Camera2Vision
文件:Camera2Source.java
@Override
public void onImageAvailable(ImageReader reader) {
Image mImage = reader.acquireNextImage();
if(mImage == null) {
return;
}
mFrameProcessor.setNextFrame(convertYUV420888ToNV21(mImage));
mImage.close();
}
项目:sample-tensorflow-imageclassifier
文件:CameraHandler.java
/**
* Initialize the camera device
*/
public void initializeCamera(Context context,
Handler backgroundHandler,
ImageReader.OnImageAvailableListener imageAvailableListener) {
// Discover the camera instance
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String[] camIds = {};
try {
camIds = manager.getCameraIdList();
} catch (CameraAccessException e) {
Log.d(TAG, "Cam access exception getting IDs", e);
}
if (camIds.length < 1) {
Log.d(TAG, "No cameras found");
return;
}
String id = camIds[0];
Log.d(TAG, "Using camera id " + id);
// Initialize the image processor
mImageReader = ImageReader.newInstance(IMAGE_WIDTH, IMAGE_HEIGHT,
ImageFormat.JPEG, MAX_IMAGES);
mImageReader.setOnImageAvailableListener(
imageAvailableListener, backgroundHandler);
// Open the camera resource
try {
manager.openCamera(id, mStateCallback, backgroundHandler);
} catch (CameraAccessException cae) {
Log.d(TAG, "Camera access exception", cae);
}
}
项目:sample-tensorflow-imageclassifier
文件:ImageClassifierActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
final Bitmap bitmap;
try (Image image = reader.acquireNextImage()) {
bitmap = mImagePreprocessor.preprocessImage(image);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
mImage.setImageBitmap(bitmap);
}
});
final List<Classifier.Recognition> results = mTensorFlowClassifier.doRecognize(bitmap);
Log.d(TAG, "Got the following results from Tensorflow: " + results);
if (mTtsEngine != null) {
// speak out loud the result of the image recognition
mTtsSpeaker.speakResults(mTtsEngine, results);
} else {
// if theres no TTS, we don't need to wait until the utterance is spoken, so we set
// to ready right away.
setReady(true);
}
runOnUiThread(new Runnable() {
@Override
public void run() {
for (int i = 0; i < mResultViews.length; i++) {
if (results.size() > i) {
Classifier.Recognition r = results.get(i);
mResultViews[i].setText(r.getTitle() + " : " + r.getConfidence().toString());
} else {
mResultViews[i].setText(null);
}
}
}
});
}
项目:Auto.js
文件:ScreenCapturer.java
private void initVirtualDisplay(MediaProjectionManager manager, Intent data, int screenWidth, int screenHeight, int screenDensity) {
mImageReader = ImageReader.newInstance(screenWidth, screenHeight, PixelFormat.RGBA_8888, 2);
mMediaProjection = manager.getMediaProjection(Activity.RESULT_OK, data);
mVirtualDisplay = mMediaProjection.createVirtualDisplay("screen-mirror",
screenWidth, screenHeight, screenDensity, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mImageReader.getSurface(), null, null);
}
项目:SCCameraView
文件:Camera2View.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] imageData = new byte[buffer.remaining()];
buffer.get(imageData);
saveImage(imageData);
image.close();
}
项目:361Camera
文件:Camera2Fragment.java
private ImageSaver(Image image, File file, CaptureResult result,
CameraCharacteristics characteristics, Context context,
RefCountedAutoCloseable<ImageReader> reader) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mContext = context;
mReader = reader;
}