protected void setFragment() { final Fragment fragment = CameraConnectionFragment.newInstance( new CameraConnectionFragment.ConnectionCallback() { @Override public void onPreviewSizeChosen(final Size size, final int rotation) { CameraActivity.this.onPreviewSizeChosen(size, rotation); } }, this, getLayoutId(), getDesiredPreviewFrameSize()); getFragmentManager() .beginTransaction() .replace(R.id.container, fragment) .commit(); }
/** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the respective requested values, and whose aspect * ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<Size>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width && option.getHeight() >= height) { bigEnough.add(option); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } }
/** * Initialize HeifReader module. * * @param context Context. */ public static void initialize(Context context) { mRenderScript = RenderScript.create(context); mCacheDir = context.getCacheDir(); // find best HEVC decoder mDecoderName = null; mDecoderSupportedSize = new Size(0, 0); int numCodecs = MediaCodecList.getCodecCount(); for (int i = 0; i < numCodecs; i++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); if (codecInfo.isEncoder()) { continue; } for (String type : codecInfo.getSupportedTypes()) { if (type.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_HEVC)) { MediaCodecInfo.CodecCapabilities cap = codecInfo.getCapabilitiesForType(MediaFormat.MIMETYPE_VIDEO_HEVC); MediaCodecInfo.VideoCapabilities vcap = cap.getVideoCapabilities(); Size supportedSize = new Size(vcap.getSupportedWidths().getUpper(), vcap.getSupportedHeights().getUpper()); Log.d(TAG, "HEVC decoder=\"" + codecInfo.getName() + "\"" + " supported-size=" + supportedSize + " color-formats=" + Arrays.toString(cap.colorFormats) ); if (mDecoderSupportedSize.getWidth() * mDecoderSupportedSize.getHeight() < supportedSize.getWidth() * supportedSize.getHeight()) { mDecoderName = codecInfo.getName(); mDecoderSupportedSize = supportedSize; } } } } if (mDecoderName == null) { throw new RuntimeException("no HEVC decoding support"); } Log.i(TAG, "HEVC decoder=\"" + mDecoderName + "\" supported-size=" + mDecoderSupportedSize); }
private void startPreview() { try { CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId); StreamConfigurationMap configMap = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); Size previewSize = Util.getPreferredPreviewSize( configMap.getOutputSizes(ImageFormat.JPEG),textureView.getWidth(), textureView.getHeight()); surfaceTexture.setDefaultBufferSize(previewSize.getWidth(),previewSize.getHeight()); Surface surface = new Surface(surfaceTexture); captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureBuilder.addTarget(surface); cameraDevice.createCaptureSession(Arrays.asList(surface),captureSessionCallback,backgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } }
/** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the respective requested values, and whose aspect * ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ @SuppressLint("LongLogTag") @DebugLog private static Size chooseOptimalSize( final Size[] choices, final int width, final int height, final Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface final List<Size> bigEnough = new ArrayList<Size>(); for (final Size option : choices) { if (option.getHeight() >= MINIMUM_PREVIEW_SIZE && option.getWidth() >= MINIMUM_PREVIEW_SIZE) { Timber.tag(TAG).i("Adding size: " + option.getWidth() + "x" + option.getHeight()); bigEnough.add(option); } else { Timber.tag(TAG).i("Not adding size: " + option.getWidth() + "x" + option.getHeight()); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); Timber.tag(TAG).i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); return chosenSize; } else { Timber.tag(TAG).e("Couldn't find any suitable preview size"); return choices[0]; } }
/** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the respective requested values, and whose aspect * ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int width, int height, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getHeight() == option.getWidth() * h / w && option.getWidth() >= width && option.getHeight() >= height) { bigEnough.add(option); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } }
public void runCameraLiveView() { final Fragment cameraView = CameraConnectionFragment.newInstance( new CameraConnectionFragment.ConnectionCallback() { @Override public void onPreviewSizeChosen(Size size, int rotation) { MainActivity.this.onPreviewSizeChosen(size); } }, this, R.layout.placerecognizer_ui, new Size(mImageClassifier.getImageWidth(), mImageClassifier.getImageHeight()) ); getFragmentManager().beginTransaction() .replace(R.id.container, cameraView) .commit(); }
public void onPreviewSizeChosen(final Size size, final int rotation) { previewWidth = size.getWidth(); previewHeight = size.getHeight(); final Display display = getWindowManager().getDefaultDisplay(); final int screenOrientation = display.getRotation(); LOGGER.i("Sensor orientation: %d, Screen orientation: %d", rotation, screenOrientation); sensorOrientation = rotation + screenOrientation; LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); rgbBytes = new int[previewWidth * previewHeight]; rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(INPUT_SIZE, INPUT_SIZE, Bitmap.Config.ARGB_8888); frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, INPUT_SIZE, INPUT_SIZE, sensorOrientation, MAINTAIN_ASPECT); Matrix cropToFrameTransform = new Matrix(); frameToCropTransform.invert(cropToFrameTransform); yuvBytes = new byte[3][]; }
private static Size chooseOptimalSize( final Size[] choices, final int width, final int height, final Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface final List<Size> bigEnough = new ArrayList<>(); for (final Size option : choices) { if (option.getHeight() >= MINIMUM_PREVIEW_SIZE && option.getWidth() >= MINIMUM_PREVIEW_SIZE) { Log.i(TAG, "Adding size: " + option.getWidth() + "x" + option.getHeight()); bigEnough.add(option); } else { Log.i(TAG, "Not adding size: " + option.getWidth() + "x" + option.getHeight()); } } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); Log.i(TAG, "Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); return chosenSize; } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } }
@Override public void collectRatioSizes() { ratioSizeList.clear(); CameraCharacteristics characteristics; StreamConfigurationMap map = null; try { characteristics = ((CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE)).getCameraCharacteristics(Integer.toString(Integer.parseInt(getCameraId()))); map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); } catch (CameraAccessException e) { e.printStackTrace(); } Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class); if (outputSizes != null) { List<Double> ratioList = new ArrayList<>(); for (Size size : outputSizes) { double ratio = (double) size.getWidth() / (double) size.getHeight(); if (!ratioList.contains(ratio)) { ratioList.add(ratio); ratioSizeList.add(new AspectRatio(ratio, size.getWidth(), size.getHeight())); } } } }
/** * configures the ProportionalTextureView to respect the aspect ratio of the image and using an appropriate buffer size */ @VisibleForTesting void configureTextureView(@NonNull ProportionalTextureView textureView, @ConfigurationOrientation int deviceOrientation, @SurfaceRotation int relativeDisplayRotation, @NonNull Size previewSize) { switch (deviceOrientation) { case Configuration.ORIENTATION_PORTRAIT: // swap values because preview sizes are always landscape textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth(), relativeDisplayRotation); break; case Configuration.ORIENTATION_LANDSCAPE: textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight(), relativeDisplayRotation); break; } // working more memory efficient SurfaceTexture surface = textureView.getSurfaceTexture(); if (surface != null) { surface.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); } else { throw new CameraException("surface texture not attached to view"); } }
/** * Chooses an appropriate size for the images within the preview stream. * * @param camera camera to get available preview sizes * @return the preview size to use * @throws CameraException if the preview size could not be determined */ @NonNull public Size selectPreviewSize(@NonNull CameraDevice camera) { Size[] previewSizes = cameraCharacteristicsHelper.getPreviewOutputSizes(camera.getId()); if (previewSizes == null || previewSizes.length == 0) { throw new CameraException("camera did not provide any preview size"); } // preferably the preview has a size of 640x480 to save bandwidth while being large enough for the backend to be acceptable for (Size imageSize : previewSizes) { if (imageSize.getWidth() == 640 && imageSize.getHeight() == 480) { return imageSize; } } // fallback to first one which might not be optimal log.w("preferred preview size of 640x480 is not available, using %s", previewSizes[0]); return previewSizes[0]; }
@Before public void setUp() throws Exception { surfaces = asList(surface1, surface2); when(manager.getCameraIdList()).thenReturn(new String[]{CAMERA_ID_1, CAMERA_ID_2}); when(characteristicsHelper.getLensFacing(CAMERA_ID_1)).thenReturn(CameraCharacteristics.LENS_FACING_BACK); when(characteristicsHelper.getLensFacing(CAMERA_ID_2)).thenReturn(CameraCharacteristics.LENS_FACING_FRONT); when(characteristicsHelper.getSensorOrientation(CAMERA_ID_1)).thenReturn(SENSOR_ROTATION); when(characteristicsHelper.getPreviewOutputSizes(CAMERA_ID_1)).thenReturn( new Size[]{PREVIEWSIZE_1, PREVIEWSIZE_640x480, PREVIEWSIZE_2}); when(camera.getId()).thenReturn(CAMERA_ID_1); when(camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW)).thenReturn(captureRequestBuilder); when(textureView.getSurfaceTexture()).thenReturn(surfaceTexture); }
/** * We choose a largest video size with mAspectRatio. Also, we don't use sizes * larger than 1080p, since MediaRecorder cannot handle such a high-resolution video. * * @param choices The list of available sizes * @return The video size */ Size chooseVideoSize(Size[] choices) { List<Size> videoSizes = Arrays.asList(choices); List<Size> supportedVideoSizes = new ArrayList<>(); Collections.sort(videoSizes, new CompareSizesByArea()); for (int i = videoSizes.size() - 1; i >= 0; i--) { if (videoSizes.get(i).getWidth() <= MAX_PREVIEW_WIDTH && videoSizes.get(i).getHeight() <= MAX_PREVIEW_HEIGHT) { supportedVideoSizes.add(videoSizes.get(i)); if (videoSizes.get(i).getWidth() == videoSizes.get(i).getHeight() * mAspectRatio.getX() / mAspectRatio.getY()) { return videoSizes.get(i); } } } return supportedVideoSizes.size() > 0 ? supportedVideoSizes.get(0) : choices[choices.length - 1]; }
/** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List<Size> notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } }
public float[] getAngle() { // 物理センサのサイズを取得(単位はミリメートル) // SizeFクラス float型の幅widthと高さheightの情報を持つ SizeF physicalSize = mCameraCharacteristics.get( mCameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE ); Log.d( "CameraCharacteristics", "物理サイズ : " + physicalSize.getWidth() + ", " + physicalSize.getHeight() ); // 焦点距離取得 float[] focalLength = mCameraCharacteristics.get( mCameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS ); // 画素配列の画素数取得 // Sizeクラス int型の幅widthと高さheightの情報を持つ Size fullArraySize = mCameraCharacteristics.get( mCameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE ); Log.d( "CameraCharacteristics", "画素配列幅 : " + fullArraySize.getWidth() + ", " + fullArraySize.getHeight() ); // 有効な配列領域取得( = 切り取り領域[ 0, 0, activeRect.width, activeRect.height ]) Rect activeRect = mCameraCharacteristics.get( mCameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE ); Log.d( "CameraCharacteristics", "有効配列幅 : " + activeRect.width() + ", " + activeRect.height() ); // 出力ストリームのサイズ取得 Size outputStreamSize = new Size( mTextureView.getWidth(), mTextureView.getHeight() ); Log.d( "CameraCharacteristics", "出力ストリーム : " + outputStreamSize.getWidth() + ", " + outputStreamSize.getHeight() ); // // 縦方向を切り取る場合(出力アスペクト比 > 切り取り領域のアスペクト比) // 横方向を切り取る場合(出力アスペクト比 < 切り取り領域のアスペクト比) /* * 【FOVを求める式】 angle = 2 * arctan( d / (2 * f) ) * f = 焦点距離, d = 縦または横のセンサ物理サイズ */ // 実際に画面に表示している領域と取得したセンサ全体の物理サイズは異なる // その辺の計算がよくわからないので全体で考えます float[] angle = new float[2]; angle[0] = 2f * (float)Math.toDegrees( Math.atan( physicalSize.getWidth() / ( 2 * focalLength[0] ) ) ); // 横 angle[1] = 2f * (float)Math.toDegrees( Math.atan( physicalSize.getHeight() / ( 2 * focalLength[0] ) ) ); // 縦 Log.d("getAngle", angle[0] + ", " + angle[1] + ", " ); return angle; }
public static CameraConnectionFragment newInstance( final ConnectionCallback callback, final OnImageAvailableListener imageListener, final int layout, final Size inputSize) { return new CameraConnectionFragment(callback, imageListener, layout, inputSize); }
protected void setFragment() { final Fragment fragment = CameraConnectionFragment.newInstance(new CameraConnectionFragment.ConnectionCallback() { @Override public void onPreviewSizeChosen(final Size size, final int rotation) { CameraActivity.this.onPreviewSizeChosen(size, rotation); } }, this, takeSnapshot, getLayoutId(), getDesiredPreviewFrameSize()); getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit(); }
private CameraConnectionFragment( final ConnectionCallback connectionCallback, final OnImageAvailableListener imageListener, final int layout, final Size inputSize) { this.cameraConnectionCallback = connectionCallback; this.imageListener = imageListener; this.layout = layout; this.inputSize = inputSize; }
@Override public void onPreviewSizeChosen(final Size size, final int rotation) { final float textSizePx = TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); borderedText = new BorderedText(textSizePx); borderedText.setTypeface(Typeface.MONOSPACE); inferenceInterface = new TensorFlowInferenceInterface(getAssets(), MODEL_FILE); previewWidth = size.getWidth(); previewHeight = size.getHeight(); final Display display = getWindowManager().getDefaultDisplay(); final int screenOrientation = display.getRotation(); LOGGER.i("Sensor orientation: %d, Screen orientation: %d", rotation, screenOrientation); sensorOrientation = rotation + screenOrientation; addCallback( new DrawCallback() { @Override public void drawCallback(final Canvas canvas) { renderDebug(canvas); } }); adapter = new ImageGridAdapter(); grid = (GridView) findViewById(R.id.grid_layout); grid.setAdapter(adapter); grid.setOnTouchListener(gridTouchAdapter); setStyle(adapter.items[0], 1.0f); }
/** * In this sample, we choose a video size with 3x4 aspect ratio. Also, we don't use sizes * larger than 1080p, since MediaRecorder cannot handle such a high-resolution video. * * @param choices The list of available sizes * @return The video size */ private static Size chooseVideoSize(Size[] choices) { for (Size size : choices) { if (size.getWidth() == size.getHeight() * 4 / 3 && size.getWidth() <= 1080) { return size; } } Log.e(TAG, "Couldn't find any suitable video size"); return choices[choices.length - 1]; }
/** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ public static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List<Size> notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { return choices[0]; } }
private Size getParticipantSize() { DisplayMetrics metrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(metrics); int width = metrics.widthPixels; // ancho absoluto en pixels int height = metrics.heightPixels; // alto absoluto en pixels if (mParticipantsList.size() == 2) { return new Size(width, height / 2); } else { if (mParticipantsList.size() > 2) { return new Size(width / 2, height / 2); } } return new Size(width, height); }
/** * We choose a largest picture size with mAspectRatio */ Size choosePictureSize(Size[] choices) { List<Size> pictureSizes = Arrays.asList(choices); Collections.sort(pictureSizes, new CompareSizesByArea()); int maxIndex = pictureSizes.size() - 1; for (int i = maxIndex; i >= 0; i--) { if (pictureSizes.get(i).getWidth() == pictureSizes.get(i).getHeight() * mAspectRatio.getX() / mAspectRatio.getY()) { return pictureSizes.get(i); } } return pictureSizes.get(maxIndex); }
/** * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * width and height are at least as large as the minimum of both, or an exact match if possible. * * @param choices The list of sizes that the camera supports for the intended output class * @param width The minimum desired width * @param height The minimum desired height * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ private static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE); final Size desiredSize = new Size(width, height); // Collect the supported resolutions that are at least as big as the preview Surface boolean exactSizeFound = false; final List<Size> bigEnough = new ArrayList<Size>(); final List<Size> tooSmall = new ArrayList<Size>(); for (final Size option : choices) { if (option.equals(desiredSize)) { // Set the size but don't return yet so that remaining sizes will still be logged. exactSizeFound = true; } if (option.getHeight() >= minSize && option.getWidth() >= minSize) { bigEnough.add(option); } else { tooSmall.add(option); } } if (exactSizeFound) { return desiredSize; } // Pick the smallest of those, assuming we found any if (bigEnough.size() > 0) { final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); return chosenSize; } else { return choices[0]; } }
/** * Once camera is open and output surfaces are ready, configure the RS processing * and the camera device inputs/outputs. */ private void setupProcessing(Size outputSize) { if (mRenderer == null) { mRenderer = createNewRendererForCurrentType(outputSize); } if (mPreviewSurface == null) return; mRenderer.setOutputSurface(mPreviewSurface); mProcessingNormalSurface = mRenderer.getInputSurface(); List<Surface> cameraOutputSurfaces = new ArrayList<>(); cameraOutputSurfaces.add(mProcessingNormalSurface); mCameraOps.setSurfaces(cameraOutputSurfaces); }
@Override public RsSurfaceRenderer createNewRendererForCurrentType(Size outputSize) { if (cameraPreviewRenderer == null) { cameraPreviewRenderer = new RsCameraPreviewRenderer(rs, outputSize.getWidth(), outputSize.getHeight()); cameraPreviewRenderer.setDroppedFrameLogger(frameStats); } updateRsRenderer(); return cameraPreviewRenderer; }
@SuppressWarnings("NewApi") @Test public void testSerializeSize() { Size expectedValue = new Size(9, 5); mCoder.serialize(bundle(), randomKey(), expectedValue); assertEquals(expectedValue, bundle().getSize(randomKey())); }
@Config(minSdk = Build.VERSION_CODES.LOLLIPOP) @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) @Test public void testCoderApi21NotNullAbove21() throws CoderNotFoundException { assertCoderNotNull(Size.class); assertCoderNotNull(SizeF.class); }
@Config(maxSdk = Build.VERSION_CODES.KITKAT) @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) @Test(expected = CoderNotFoundException.class) public void testCoderApi21ThrowsBelowApi21() throws CoderNotFoundException { assertCoderNotNull(Size.class); assertCoderNotNull(SizeF.class); }
/** * Given {@code choices} of {@code Size}s supported by a camera, choose the smallest one that * is at least as large as the respective texture view size, and that is at most as large as the * respective max size, and whose aspect ratio matches with the specified value. If such size * doesn't exist, choose the largest one that is at most as large as the respective max size, * and whose aspect ratio matches with the specified value. * * @param choices The list of sizes that the camera supports for the intended output * class * @param textureViewWidth The width of the texture view relative to sensor coordinate * @param textureViewHeight The height of the texture view relative to sensor coordinate * @param maxWidth The maximum width that can be chosen * @param maxHeight The maximum height that can be chosen * @param aspectRatio The aspect ratio * @return The optimal {@code Size}, or an arbitrary one if none were big enough */ public static Size chooseOptimalSize(Size[] choices, int textureViewWidth, int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) { // Collect the supported resolutions that are at least as big as the preview Surface List<Size> bigEnough = new ArrayList<>(); // Collect the supported resolutions that are smaller than the preview Surface List<Size> notBigEnough = new ArrayList<>(); int w = aspectRatio.getWidth(); int h = aspectRatio.getHeight(); for (Size option : choices) { if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight && option.getHeight() == option.getWidth() * h / w) { if (option.getWidth() >= textureViewWidth && option.getHeight() >= textureViewHeight) { bigEnough.add(option); } else { notBigEnough.add(option); } } } // Pick the smallest of those big enough. If there is no one big enough, pick the // largest of those not big enough. if (bigEnough.size() > 0) { return Collections.min(bigEnough, new CompareSizesByArea()); } else if (notBigEnough.size() > 0) { return Collections.max(notBigEnough, new CompareSizesByArea()); } else { Log.e(TAG, "Couldn't find any suitable preview size"); return choices[0]; } }
@Override public void write(Bundle bundle, Object to, StateField field) throws IllegalAccessException { Field propertyField = field.getField(); propertyField.setAccessible(true); bundle.putSize(field.getBundleKey(), (Size) propertyField.get(to)); }