Java 类android.media.Image 实例源码
项目:FamilyBond
文件:ImageUtil.java
private static byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
nv21 = new byte[ySize + uSize + vSize];
//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}
项目:ScreenRecordCaptureMaster
文件:CaptureScreenImage.java
/**
* 图像可用时截屏
*
* @param reader
* @param path
*@param callback @return
*/
private void imageAvailable(ImageReader reader, String path, IRecordShotCallback callback) {
mImageReaderLock.lock();
try{
Image image = reader.acquireLatestImage();
if(image == null) return;
int width = image.getWidth();
int height = image.getHeight();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(width + rowPadding / pixelStride, height, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height);
image.close();
//需要在这里释放图片否则会截取很多图片
release();
saveBitmap(path, bitmap, callback);
}finally {
mImageReaderLock.unlock();
}
}
项目:SIGHT-For-the-Blind
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
项目:Camera_Calibration_Android
文件:Camera2BasicFragment.java
@Override
public void onImageAvailable(ImageReader reader) {
// mBackgroundHandler.post(new ImageSaver(reader.acquireNextImage(), mFile, mImages));
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
mImages.add(ByteString.copyFrom(bytes));
showToast(String.valueOf(mImages.size()) + "/" + String.valueOf(mTotal), Toast.LENGTH_LONG);
if(mImages.size() >= mTotal) {
showToast(String.valueOf("Calibrating..."), Toast.LENGTH_LONG);
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(new GrpcPostImageRunnable());
}
}
} finally {
if (image != null) {
image.close();
}
}
}
项目:Cam2Caption
文件:Camera2BasicFragment.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] imageBytes = new byte[buffer.remaining()];
buffer.get(imageBytes);
Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
final String text = runModel(bitmap);
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
textView.setText(text);
}
});
} finally {
if (image != null) {
image.close();
}
}
}
项目:androidthings-imageclassifier
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
项目:androidthings-imageclassifier
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
项目:androidthings-imageclassifier
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
项目:pc-android-controller-android
文件:ScreenCaptureSocket.java
/**
* 开始截图. 获取 image 面板上的图片.
*/
private void startCapture() {
Image image = null;
try {
try {
finalize();
} catch (Throwable throwable) {
throwable.printStackTrace();
}
image = mImageReader.acquireLatestImage();
} catch (Exception e) {
e.printStackTrace();
}
if (image == null) {
L.e(" 获取 image 为空 结束..");
return;
} else {
SaveTask mSaveTask = new SaveTask();
AsyncTaskCompat.executeParallel(mSaveTask, image);
}
}
项目:sample-tensorflow-imageclassifier
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(croppedBitmap);
}
return croppedBitmap;
}
项目:BWS-Android
文件:IntensityPlane.java
/**
* Extracts the Y-Plane from the YUV_420_8888 image to creates a IntensityPlane.
* The actual plane data will be copied into the new IntensityPlane object.
*
* @throws IllegalArgumentException if the provided images is not in the YUV_420_888 format
*/
@NonNull
public static IntensityPlane extract(@NonNull Image img) {
if (img.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("image format must be YUV_420_888");
}
Image.Plane[] planes = img.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
byte[] yPlane = new byte[buffer.remaining()];
buffer.get(yPlane);
int yRowStride = planes[0].getRowStride();
return new IntensityPlane(img.getWidth(), img.getHeight(), yPlane, yRowStride);
}
项目:BWS-Android
文件:FacialRecognitionFragment.java
/**
* lazily initialize ImageReader and select preview size
*/
private void setupPreviewSizeAndImageReader() {
if (previewSize == null) {
previewSize = cameraHelper.selectPreviewSize(openCamera);
}
if (imageReader == null) {
int maxImages = 2; // should be at least 2 according to ImageReader.acquireLatestImage() documentation
imageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, maxImages);
imageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image img = reader.acquireLatestImage();
if (img != null) {
// Make a in memory copy of the image to close the image from the reader as soon as possible.
// This helps the thread running the preview staying up to date.
IntensityPlane imgCopy = IntensityPlane.extract(img);
img.close();
int imageRotation = cameraHelper.getImageRotation(openCamera, getRelativeDisplayRotation());
presenter.onImageCaptured(imgCopy, imageRotation);
}
}
}, null);
}
}
项目:Paper-Melody
文件:CameraDebugActivity.java
public void processImage(Image image) {
/**
* Process image here
* Called on every frame of video
*/
if (!Tap.readyForNextFrame()) {
return;
}
Mat mat = ImageUtil.imageToBgr(image);
long frameTime = System.currentTimeMillis();
long t1 = System.currentTimeMillis();
List<Point> notUsedWhenDebug = Tap.getAll(mat,
canvasCameraDebug.getHandContours(), canvasCameraDebug.getFingerTips());
long t2 = System.currentTimeMillis();
CanvasUtil.setScreenHeight(ViewUtil.getScreenHeight(this));
canvasCameraDebug.updateInfo(
t2 - t1, frameTime - lastFrameTime, Tap.getProcessInterval()
);
lastFrameTime = frameTime;
}
项目:Paper-Melody
文件:ImageUtil.java
public static Mat yuvToBgr(Image image, Mat yuvMat) {
Mat bgrMat = new Mat(image.getHeight(), image.getWidth(), CvType.CV_8UC4);
//Log.d("TESTCALL", bgrMat.rows() + " " + bgrMat.cols());
Imgproc.cvtColor(yuvMat, bgrMat, Imgproc.COLOR_YUV2BGR_I420);
//Log.d("TESTCALL", yuvMat.rows() + " " + yuvMat.cols());
return bgrMat;
}
项目:android-things-drawbot
文件:ImagePreprocessor.java
public Bitmap preprocessImage(final Image image) {
if (image == null) {
return null;
}
Assert.assertEquals("Invalid size width", rgbFrameBitmap.getWidth(), image.getWidth());
Assert.assertEquals("Invalid size height", rgbFrameBitmap.getHeight(), image.getHeight());
if (croppedBitmap != null && rgbFrameBitmap != null) {
ByteBuffer bb = image.getPlanes()[0].getBuffer();
rgbFrameBitmap = BitmapFactory.decodeStream(new ByteBufferBackedInputStream(bb));
Helper.cropAndRescaleBitmap(rgbFrameBitmap, croppedBitmap, 0);
}
image.close();
// For debugging
if (SAVE_PREVIEW_BITMAP) {
Helper.saveBitmap(mContext, croppedBitmap);
}
return croppedBitmap;
}
项目:doorbell
文件:DoorbellActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
// get image bytes
ByteBuffer imageBuf = image.getPlanes()[0].getBuffer();
final byte[] imageBytes = new byte[imageBuf.remaining()];
imageBuf.get(imageBytes);
image.close();
onPictureTaken(imageBytes);
}
项目:CameraCompat
文件:DirectChain.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
if (mIsFrontCamera && mEnableMirror) {
if (mRotation == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropFlip(image, mVideoHeight, mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCropRotateC180Flip(image, mVideoHeight,
mGLYuvBuffer.array());
}
} else {
if (mRotation == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropRotateC180(image, mVideoHeight, mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCrop(image, mVideoHeight, mGLYuvBuffer.array());
}
}
mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(), mVideoHeight);
}
项目:CameraCompat
文件:GPUImageChain.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void onFrameData(final Image image, final Runnable postProcessedTask) {
final int width = image.getWidth();
final int height = image.getHeight();
if (mGLRgbaBuffer == null) {
mGLRgbaBuffer = ByteBuffer.allocateDirect(width * height * 4);
}
if (mGLYuvBuffer == null) {
// 16 bytes alignment
int bufHeight = (width * mGLRender.getFrameWidth() / mGLRender.getFrameHeight())
& 0xfffffff0;
mGLYuvBuffer = ByteBuffer.allocateDirect(width * bufHeight * 3 / 2);
}
if (!mGLRender.isBusyDrawing()) {
RgbYuvConverter.image2rgba(image, mGLRgbaBuffer.array());
mGLRender.scheduleDrawFrame(mGLRgbaBuffer, width, height, () -> {
if (!mGLRender.isEnableFilter() && !mGLRender.isPaused()) {
sendNormalImage(image);
}
postProcessedTask.run();
});
} else {
postProcessedTask.run();
}
}
项目:CameraCompat
文件:GPUImageChain.java
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private void sendNormalImage(Image image) {
if (mIsFrontCamera && mEnableMirror) {
if (mGLRender.getRotation() == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropFlip(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCropRotateC180Flip(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
}
} else {
if (mGLRender.getRotation() == Rotation.ROTATION_90) {
RgbYuvConverter.image2yuvCropRotateC180(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
} else {
RgbYuvConverter.image2yuvCrop(image, mGLRender.getVideoHeight(),
mGLYuvBuffer.array());
}
}
mVideoCaptureCallback.onFrameData(mGLYuvBuffer.array(), image.getWidth(),
mGLRender.getVideoHeight());
}
项目:CameraView
文件:PictureSession.java
private static byte[] toByteArray(Image image) {
byte[] data = null;
if (image.getFormat() == ImageFormat.JPEG) {
Image.Plane[] planes = image.getPlanes();
ByteBuffer buffer = planes[0].getBuffer();
data = new byte[buffer.capacity()];
buffer.get(data);
return data;
} else if (image.getFormat() == ImageFormat.YUV_420_888) {
data = NV21toJPEG(
YUV_420_888toNV21(image),
image.getWidth(), image.getHeight());
} else {
Log.w(TAG, "Unrecognized image format: " + image.getFormat());
}
return data;
}
项目:AndroidSnippets
文件:CameraApi2ExampleActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
try {
if (image.getFormat() == ImageFormat.JPEG) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data);
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
WindowManager windowManager = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
int rotation = windowManager.getDefaultDisplay().getRotation();
Bitmap rotated = ImageUtils.rotateBitmap(bitmap, rotation, mCamera2Engine.getSensorOrientation());
mImageView.setImageBitmap(rotated);
}
} finally {
image.close();
}
}
项目:SimpleSmsRemote
文件:ImageUtils.java
/**
* Retrieve Bitmap with specific format from ImageReader.
*
* @param imageReader the image reader
* @return bitmap
*/
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
public static Bitmap GetBitmapFromImageReader(ImageReader imageReader) {
Bitmap bitmap;
//get image buffer
Image image = imageReader.acquireLatestImage();
final Image.Plane[] planes = image.getPlanes();
final ByteBuffer buffer = planes[0].getBuffer();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * image.getWidth();
// create bitmap
bitmap = Bitmap.createBitmap(image.getWidth() + rowPadding / pixelStride, image.getHeight(), Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(buffer);
image.close();
return bitmap;
}
项目:DeviceConnect-Android
文件:HostDeviceScreenCast.java
private Bitmap decodeToBitmap(final Image img) {
Image.Plane[] planes = img.getPlanes();
if (planes[0].getBuffer() == null) {
return null;
}
int width = img.getWidth();
int height = img.getHeight();
int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * width;
Bitmap bitmap = Bitmap.createBitmap(
width + rowPadding / pixelStride, height,
Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(planes[0].getBuffer());
img.close();
return Bitmap.createBitmap(bitmap, 0, 0, width, height, null, true);
}
项目:Camera2
文件:OneCameraZslImpl.java
@Override
public void onImageCaptured(Image image, TotalCaptureResult
captureResult) {
long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
// We should only capture the image if it's more recent than the
// latest one. Synchronization is necessary since this method is
// called on {@link #mImageSaverThreadPool}.
synchronized (mLastCapturedImageTimestamp) {
if (timestamp > mLastCapturedImageTimestamp.get()) {
mLastCapturedImageTimestamp.set(timestamp);
} else {
// There was a more recent (or identical) image which has
// begun being saved, so abort.
return;
}
}
mReadyStateManager.setInput(
ReadyStateRequirement.CAPTURE_NOT_IN_PROGRESS, true);
mSession.startEmpty();
savePicture(image, mParams, mSession);
mParams.callback.onPictureTaken(mSession);
Log.v(TAG, "Image saved. Frame number = " + captureResult.getFrameNumber());
}
项目:FastBarcodeScanner
文件:FastBarcodeScanner.java
private void onSingleBarcodeFound(Barcode bc, Image source, BarcodeDetectedListener listener, Handler callbackHandler) {
if (bc == null || bc.contents == null) {
Log.v(TAG, "Found barcode: " + null);
mNoBarcodeCount++;
if (mLastReportedBarcode != null && mNoBarcodeCount >= NO_BARCODE_IGNORE_LIMIT) {
mLastReportedBarcode = null;
_onSingleBarcode(null, null, source, listener, callbackHandler);
}
} else {
Log.v(TAG, "Found barcode: " + bc.contents);
mNoBarcodeCount = 0;
if (!bc.contents.equals(mLastReportedBarcode)) {
mLastReportedBarcode = bc.contents;
_onSingleBarcode(mLastReportedBarcode, bc.points, source, listener, callbackHandler);
}
}
}
项目:FastBarcodeScanner
文件:FastBarcodeScanner.java
private void _onSingleBarcode(String barcode, Point[] points, final Image source, final BarcodeDetectedListener listener, Handler callbackHandler) {
if (listener != null) {
final BarcodeInfo bc = new BarcodeInfo(barcode, points);
final byte[] serialized = (source == null) ? null : ImageDecoder.Serialize(source);
final int width = (source == null) ? 0 : source.getWidth();
final int height = (source == null) ? 0 : source.getHeight();
final int format = (source == null) ? ImageFormat.UNKNOWN : source.getFormat();
callbackHandler.post(
new Runnable() {
@Override
public void run() {
listener.onSingleBarcodeAvailable(bc, serialized, format, width, height);
}
}
);
}
}
项目:FastBarcodeScanner
文件:FastBarcodeScanner.java
private void onMultipleBarcodesFound(Barcode[] bcs, Image source, MultipleBarcodesDetectedListener listener, Handler callbackHandler) {
if (bcs == null) {
Log.v(TAG, "Found 0 barcodes");
mNoBarcodeCount++;
if (mLastReportedMultiBarcode != null && mNoBarcodeCount >= NO_BARCODE_IGNORE_LIMIT) {
mLastReportedMultiBarcode = null;
_onMultipleBarcodes(mLastReportedMultiBarcode, source, listener, callbackHandler);
}
} else {
Log.v(TAG, "Found " + bcs.length + " barcodes");
mNoBarcodeCount = 0;
if (!_equals(bcs, mLastReportedMultiBarcode)) {
mLastReportedMultiBarcode = bcs;
_onMultipleBarcodes(mLastReportedMultiBarcode, source, listener, callbackHandler);
}
}
}
项目:FastBarcodeScanner
文件:FastBarcodeScanner.java
private void _onMultipleBarcodes(final Barcode[] barcodes, final Image source, final MultipleBarcodesDetectedListener listener, Handler callbackHandler) {
if (listener != null) {
final byte[] serialized = (source == null) ? null : ImageDecoder.Serialize(source);
final int width = (source == null) ? 0 : source.getWidth();
final int height = (source == null) ? 0 : source.getHeight();
final int format = (source == null) ? ImageFormat.UNKNOWN : source.getFormat();
callbackHandler.post(
new Runnable() {
@Override
public void run() {
listener.onMultipleBarcodeAvailable(_convert(barcodes), serialized, format, width, height);
}
}
);
}
}
项目:FastBarcodeScanner
文件:ImageDecoder.java
public static byte[] Serialize(Image image)
{
if (image==null)
return null;
Image.Plane[] planes = image.getPlanes();
// NV21 expects planes in order YVU, not YUV:
if (image.getFormat() == ImageFormat.YUV_420_888)
planes = new Image.Plane[] {planes[0], planes[2], planes[1]};
byte[] serializeBytes = new byte[getSerializedSize(image)];
int nextFree = 0;
for (Image.Plane plane: planes)
{
ByteBuffer buffer = plane.getBuffer();
buffer.position(0);
int nBytes = buffer.remaining();
plane.getBuffer().get(serializeBytes, nextFree, nBytes);
nextFree += nBytes;
}
return serializeBytes;
}
项目:365browser
文件:VideoCaptureCamera2.java
@Override
public void onImageAvailable(ImageReader reader) {
try (Image image = reader.acquireLatestImage()) {
if (image == null) {
throw new IllegalStateException();
}
if (image.getFormat() != ImageFormat.JPEG) {
Log.e(TAG, "Unexpected image format: %d", image.getFormat());
throw new IllegalStateException();
}
final byte[] capturedData = readCapturedData(image);
nativeOnPhotoTaken(mNativeVideoCaptureDeviceAndroid, mCallbackId, capturedData);
} catch (IllegalStateException ex) {
notifyTakePhotoError(mCallbackId);
return;
}
if (createPreviewObjectsAndStartPreview()) return;
nativeOnError(mNativeVideoCaptureDeviceAndroid, "Error restarting preview");
}
项目:youtube_livestream
文件:StreamerActivity.java
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image != null) {
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
if (rgbaData == null) {
rgbaData = new byte[width * height * 4];
}
buffer.get(rgbaData);
videoStreamingConnection.sendVideoFrame(rgbaData);
image.close();
}
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorFinder.java
public Point[] findAllColors(Image image, ColorDetector detector, Rect rect, int threadCount) {
List<Point> result = new Vector<>();
ColorIterator[] iterators = divide(image, rect, threadCount);
for (int i = 1; i < threadCount; i++) {
mThreadPoolExecutor.execute(new FindAllColorsRunnable(result, iterators[i], detector));
}
new FindAllColorsRunnable(result, iterators[0], detector).run();
Point[] points = new Point[result.size()];
for (int i = 0; i < points.length; i++) {
points[i] = scalePoint(result.get(i), image.getWidth(), image.getHeight());
}
return points;
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorFinder.java
public Point findColorConcurrently(Image image, ColorDetector detector, Rect rect, int threadCount) {
if (threadCount <= 1) {
return findColor(image, detector, rect);
}
VolatileBox<Point> result = new VolatileBox<>();
ColorIterator[] iterators = divide(image, rect, threadCount);
for (int i = 1; i < threadCount; i++) {
mThreadPoolExecutor.execute(new FindColorRunnable(result, iterators[i], detector));
}
new FindColorRunnable(result, iterators[0], detector).run();
return scalePoint(result.get(), image.getWidth(), image.getHeight());
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorFinder.java
protected ColorIterator[] divide(Image image, Rect rect, int count) {
Rect[] subAreas = divideIntoSubAreas(rect, count);
int centerY = rect.centerY();
ColorIterator[] iterators = new ColorIterator[count];
for (int i = 1; i < subAreas.length; i++) {
Rect subArea = subAreas[i];
if (subArea.top > centerY) {
iterators[i] = new ColorIterator.SequentialIterator(image, subArea, true);
} else {
iterators[i] = new ColorIterator.SequentialIterator(image, subArea, true);
}
}
iterators[0] = new ColorIterator.SequentialIterator(image, subAreas[0], false);
return iterators;
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorIterator.java
public ImageColorIterator(Image image, Rect area, boolean duplicateBuffer) {
Image.Plane plane = image.getPlanes()[0];
if (duplicateBuffer) {
mByteBuffer = plane.getBuffer().duplicate();
} else {
mByteBuffer = plane.getBuffer();
}
mIterateArea = area;
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorIterator.java
public SequentialIterator(Image image, Rect area, boolean duplicateBuffer) {
super(image, area, duplicateBuffer);
Image.Plane plane = image.getPlanes()[0];
int pixelStride = plane.getPixelStride();
mRowStride = plane.getRowStride();
mWidth = area.width();
mHeight = area.height();
int rowPadding = mRowStride - pixelStride * image.getWidth();
mSkipPerRow = rowPadding + (image.getWidth() - mWidth) * pixelStride;
int offset = mIterateArea.top * mRowStride + mIterateArea.left * pixelStride;
mByteBuffer.position(offset);
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ColorIterator.java
public CentralSpiralIterator(Image image, Rect area, boolean duplicateBuffer) {
super(image, area, duplicateBuffer);
Image.Plane plane = image.getPlanes()[0];
mPixelStride = mNextStepSkip = plane.getPixelStride();
mRowStride = plane.getRowStride();
mByteBuffer.position(area.centerX() * mPixelStride + area.centerY() * mRowStride);
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ScreenCapturer.java
@Nullable
public Image capture() {
if (!mImageAvailable) {
waitForImageAvailable();
return mLatestImage;
}
if (mLatestImage != null) {
tryClose(mLatestImage);
}
mLatestImage = mImageReader.acquireLatestImage();
return mLatestImage;
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ScreenCapturer.java
private void tryClose(Image image) {
try {
image.close();
} catch (Exception ignored) {
}
}
项目:https-github.com-hyb1996-NoRootScriptDroid
文件:ConcurrentColorIterator.java
public ConcurrentImageColorIterator(Image image, Rect area) {
Image.Plane plane = image.getPlanes()[0];
mByteBuffer = plane.getBuffer();
mImageWidth = image.getWidth();
mImageHeight = image.getHeight();
mIterateArea = area;
mAreaWidth = area.width();
mAreaHeight = area.height();
}