Java 类org.webrtc.MediaCodecVideoEncoder 实例源码

项目:AndroidRTC    文件:PeerConnectionClientTest.java   
@Test
@SmallTest
public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
  if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
    Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
    return;
  }
  // TODO(perkj): If we can always capture to textures, there is no need to check if the
  // hardware encoder supports to encode from a texture.
  if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
    Log.i(TAG, "VP8 encode to textures is not supported.");
    return;
  }
  doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
      createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
项目:AndroidRTC    文件:PeerConnectionClientTest.java   
@Test
@SmallTest
public void testLoopbackH264CaptureToTexture() throws InterruptedException {
  if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
    Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
    return;
  }
  // TODO(perkj): If we can always capture to textures, there is no need to check if the
  // hardware encoder supports to encode from a texture.
  if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) {
    Log.i(TAG, "H264 encode to textures is not supported.");
    return;
  }
  doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
      createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
项目:VideoCRE    文件:Mp4Recorder.java   
@Override
public void onEncodedFrame(final MediaCodecVideoEncoder.OutputBufferInfo frame,
        final MediaCodec.BufferInfo bufferInfo) {
    boolean configFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
    if (!configFrame) {
        mMediaMuxer.writeSampleData(mTrackIndex, frame.buffer(), bufferInfo);
    }
}
项目:VideoCRE    文件:HwAvcEncoder.java   
public HwAvcEncoder(final VideoConfig videoConfig, final MediaCodecCallback... callbacks) {
    mVideoConfig = videoConfig;
    mMediaCodecThread = new HandlerThread("HwAvcEncoderThread");
    mMediaCodecThread.start();
    mMediaCodecHandler = new Handler(mMediaCodecThread.getLooper());
    mVideoEncoder = new MediaCodecVideoEncoder();
    mMediaCodecCallbacks = Arrays.asList(callbacks);
}
项目:VideoCRE    文件:HwAvcEncoder.java   
public void start(final EglBase eglBase) {
    mMediaCodecHandler.post(new Runnable() {
        @Override
        public void run() {
            mVideoEncoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_H264,
                    MediaCodecVideoEncoder.H264Profile.CONSTRAINED_BASELINE.getValue(),
                    mVideoConfig.outputWidth(), mVideoConfig.outputHeight(),
                    mVideoConfig.outputBitrate(), mVideoConfig.fps(),
                    eglBase.getEglBaseContext(), HwAvcEncoder.this);
        }
    });
}
项目:VideoCRE    文件:HwAvcEncoder.java   
@Override
public void onEncodedFrame(final MediaCodecVideoEncoder.OutputBufferInfo frame,
        final MediaCodec.BufferInfo bufferInfo) {
    for (int i = 0, n = mMediaCodecCallbacks.size(); i < n; i++) {
        mMediaCodecCallbacks.get(i).onEncodedFrame(frame, bufferInfo);
    }
}
项目:webrtcpeer-android    文件:MediaResourceManager.java   
void createMediaConstraints() {
    // Create peer connection constraints.
    pcConstraints = new MediaConstraints();

    // Enable DTLS for normal calls and disable for loopback calls.
    if (peerConnectionParameters.loopback) {
        pcConstraints.optional.add(new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
    } else {
        pcConstraints.optional.add(new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
    }

    pcConstraints.optional.add(new MediaConstraints.KeyValuePair("internalSctpDataChannels", "true"));

    // Check if there is a camera on device and disable video call if not.
    if (numberOfCameras == 0) {
        Log.w(TAG, "No camera on device. Switch to audio only call.");
        videoCallEnabled = false;
    }
    // Create video constraints if video call is enabled.
    if (videoCallEnabled) {
        videoConstraints = new MediaConstraints();
        int videoWidth = peerConnectionParameters.videoWidth;
        int videoHeight = peerConnectionParameters.videoHeight;
        // If VP8 HW video encoder is supported and video resolution is not
        // specified force it to HD.
        if ((videoWidth == 0 || videoHeight == 0) && peerConnectionParameters.videoCodecHwAcceleration && MediaCodecVideoEncoder.isVp8HwSupported()) {
            videoWidth = HD_VIDEO_WIDTH;
            videoHeight = HD_VIDEO_HEIGHT;
        }
        // Add video resolution constraints.
        if (videoWidth > 0 && videoHeight > 0) {
            videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH);
            videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT);
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MIN_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MAX_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MIN_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MAX_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
        }
        // Add fps constraints.
        int videoFps = peerConnectionParameters.videoFps;
        if (videoFps > 0) {
            videoFps = Math.min(videoFps, MAX_VIDEO_FPS);
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MIN_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
            videoConstraints.mandatory.add(new MediaConstraints.KeyValuePair(MAX_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
        }
    }
    // Create audio constraints.
    audioConstraints = new MediaConstraints();
    // added for audio performance measurements
    if (peerConnectionParameters.noAudioProcessing) {
        Log.d(TAG, "Disabling audio processing");
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
        audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
    }
    // Create SDP constraints.
    sdpMediaConstraints = new MediaConstraints();
    sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
    if (videoCallEnabled || peerConnectionParameters.loopback) {
        sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
    } else {
        sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false"));
    }

    sdpMediaConstraints.optional.add(new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
    sdpMediaConstraints.optional.add(new MediaConstraints.KeyValuePair("internalSctpDataChannels", "true"));
}
项目:VideoCRE    文件:MediaCodecCallback.java   
/**
 * must consume it synchronously.
 */
void onEncodedFrame(MediaCodecVideoEncoder.OutputBufferInfo frame,
        MediaCodec.BufferInfo bufferInfo);