Java 类com.google.api.client.http.InputStreamContent 实例源码

项目:coala    文件:DealOAuth1Util.java   
/**
 * Post an order using the token and secret token to authenticate in DEAL.
 * To demonstrate successful authentication a POST request is executed.
 * 
 * @param messageToBePosted the JSON string representing the order
 * @return the result of the post action
 * 
 * @throws Exception on an exception occurring
 */
public static String PostOrderWithTokensOnly(String messageToBePosted)
        throws Exception
{
    // utilize accessToken to access protected resource in DEAL
    HttpRequestFactory factory = TRANSPORT
            .createRequestFactory(getParameters());
    GenericUrl url = new GenericUrl(PROTECTED_ORDERS_URL);
    InputStream stream = new ByteArrayInputStream(
            messageToBePosted.getBytes());
    InputStreamContent content = new InputStreamContent(JSON_IDENTIFIER,
            stream);
    HttpRequest req = factory.buildPostRequest(url, content);

    HttpResponse resp = req.execute();
    String response = resp.parseAsString();

    // log the response
    if (resp.getStatusCode() != 200 && LOG.isInfoEnabled())
    {
        LOG.info("Response Status Code: " + resp.getStatusCode());
        LOG.info("Response body:" + response);
    }

    return response;
}
项目:policyscanner    文件:GitGCSSyncApp.java   
private void recursiveGCSSync(File repo, String prefix, Storage gcs) throws IOException {
  for (String name : repo.list()) {
    if (!name.equals(".git")) {
      File file = new File(repo.getPath(), name);
      if (file.isDirectory()) {
        recursiveGCSSync(file, prefix + file.getName() + "/", gcs);
      } else {
        InputStream inputStream = new FileInputStream(repo.getPath() + "/" + file.getName());
        InputStreamContent mediaContent = new InputStreamContent("text/plain", inputStream);
        mediaContent.setLength(file.length());
        StorageObject objectMetadata = new StorageObject()
            .setName(prefix + file.getName());
        gcs.objects().insert(bucket, objectMetadata, mediaContent).execute();
      }
    }
  }
}
项目:MCSFS    文件:GCStore.java   
/**
 * Uploads data to an object in a bucket.
 *
 * @param name the name of the destination object.
 * @param contentType the MIME type of the data.
 * @param file the file to upload.
 * @param bucketName the name of the bucket to create the object in.
 */
private void uploadFile(String name, String contentType, File file, String bucketName) throws IOException,
        GeneralSecurityException {
    InputStreamContent contentStream = new InputStreamContent(contentType, new FileInputStream(file));
    // Setting the length improves upload performance
    contentStream.setLength(file.length());
    StorageObject objectMetadata = new StorageObject()
            // Set the destination object name
            .setName(name)
            // Set the access control list to publicly read-only
            .setAcl(Arrays.asList(
                    new ObjectAccessControl().setEntity("allUsers").setRole("READER")));

    // Do the insert
    Storage client = StorageFactory.getService();
    Storage.Objects.Insert insertRequest = client.objects().insert(
            bucketName, objectMetadata, contentStream);

    insertRequest.execute();
    LogUtils.debug(LOG_TAG, "Successfully uploaded file to bucket.\nName: " + name + "\nBucket name: " +
            bucketName);
}
项目:simpleci    文件:GoogleStorageCacheManager.java   
@Override
public void uploadCache(JobOutputProcessor outputProcessor, String cachePath) {
    try {
            outputProcessor.output("Uploading cache file " + cacheFileName + " to google storage\n");

            Storage client = createClient();
            File uploadFile = new File(cachePath);
            InputStreamContent contentStream = new InputStreamContent(
                    null, new FileInputStream(uploadFile));
            contentStream.setLength(uploadFile.length());
            StorageObject objectMetadata = new StorageObject()
                    .setName(cacheFileName);

            Storage.Objects.Insert insertRequest = client.objects().insert(
                    settings.bucketName, objectMetadata, contentStream);

            insertRequest.execute();

            outputProcessor.output("Cache uploaded\n");
    } catch (GeneralSecurityException | IOException e) {
        outputProcessor.output("Error upload cache: " + e.getMessage() + "\n");
    }
}
项目:gcloud-storage-speedtest    文件:StorageManager.java   
/** Upload data. */
public static boolean putBytes(final String bucket, final String key, final byte[] bytes) {
    final InputStreamContent mediaContent =
        new InputStreamContent("application/octet-stream", new ByteArrayInputStream(bytes));
    mediaContent.setLength(bytes.length);

    try {
        final Storage.Objects.Insert insertObject =
            client.objects().insert(bucket, null, mediaContent);
        insertObject.setName(key);
        if (mediaContent.getLength() > 0
            && mediaContent.getLength() <= 2 * 1000 * 1000 /* 2MB */) {
            insertObject.getMediaHttpUploader().setDirectUploadEnabled(true);
        }
        insertObject.execute();
        return true;
    } catch (IOException e) {
        LOGGER.error("Error uploading data", e);
        return false;
    }
}
项目:java-docs-samples    文件:StorageSample.java   
/**
 * Uploads data to an object in a bucket.
 *
 * @param name the name of the destination object.
 * @param contentType the MIME type of the data.
 * @param file the file to upload.
 * @param bucketName the name of the bucket to create the object in.
 */
public static void uploadFile(
    String name, String contentType, File file, String bucketName)
    throws IOException, GeneralSecurityException {
  InputStreamContent contentStream = new InputStreamContent(
      contentType, new FileInputStream(file));
  // Setting the length improves upload performance
  contentStream.setLength(file.length());
  StorageObject objectMetadata = new StorageObject()
      // Set the destination object name
      .setName(name)
      // Set the access control list to publicly read-only
      .setAcl(Arrays.asList(
          new ObjectAccessControl().setEntity("allUsers").setRole("READER")));

  // Do the insert
  Storage client = StorageFactory.getService();
  Storage.Objects.Insert insertRequest = client.objects().insert(
      bucketName, objectMetadata, contentStream);

  insertRequest.execute();
}
项目:bigdata-interop    文件:GoogleCloudStorageWriteChannel.java   
@Override
public Insert createRequest(InputStreamContent inputStream) throws IOException {
  // Create object with the given name and metadata.
  StorageObject object =
      new StorageObject()
          .setMetadata(metadata)
          .setName(objectName);

  Insert insert = gcs.objects().insert(bucketName, object, inputStream);
  writeConditions.apply(insert);
  if (insert.getMediaHttpUploader() != null) {
    insert.getMediaHttpUploader().setDirectUploadEnabled(isDirectUploadEnabled());
    insert.getMediaHttpUploader().setProgressListener(
      new LoggingMediaHttpUploaderProgressListener(this.objectName, MIN_LOGGING_INTERVAL_MS));
  }
  insert.setName(objectName);
  return insert;
}
项目:loli.io    文件:GDriveAPI.java   
public static com.google.api.services.drive.model.File uploadFile(
        java.io.File file, Credential credential, String parent)
        throws IOException {
    com.google.api.services.drive.model.File fileMetadata = new com.google.api.services.drive.model.File();
    fileMetadata.setParents(Arrays.asList(new ParentReference()
            .setId(parent)));
    fileMetadata.setTitle(file.getName());
    InputStreamContent mediaContent = new InputStreamContent(
            "image/png", new BufferedInputStream(new FileInputStream(
                    file)));
    mediaContent.setLength(file.length());

    Drive.Files.Insert insert = drive.files().insert(fileMetadata,
            mediaContent);
    MediaHttpUploader uploader = insert.getMediaHttpUploader();
    uploader.setDirectUploadEnabled(true);
    return insert.execute();
}
项目:jdbox    文件:DriveAdapter.java   
public File createFile(File file, InputStream content) throws IOException {

        logger.debug("creating {}", file);

        com.google.api.services.drive.model.File gdFile = file.toGdFile();

        Drive.Files.Insert request;
        if (file.isDirectory()) {
            request = drive.files().insert(gdFile);
        } else {
            request = drive.files().insert(gdFile, new InputStreamContent(file.getMimeType(), content));
            request.getMediaHttpUploader().setDirectUploadEnabled(true);
        }

        return new File(request.execute());
    }
项目:PicSync    文件:DriveApi.java   
public File uploadFile(String name, String mimeType, java.io.File mediaFile, boolean onWifi)
        throws IOException, JSONException {
    // File Metadata
    File fileMetadata = new File();
    fileMetadata.setTitle(name);
    fileMetadata.setMimeType(mimeType);

    // Set the parent folder.
    ParentReference uploadDir = new ParentReference();
    uploadDir.setId(findUploadDirectory().getId());
    fileMetadata.setParents(Arrays.asList(uploadDir));

    InputStreamContent mediaContent = new InputStreamContent(mimeType, new BufferedInputStream(
            new FileInputStream(mediaFile)));
    mediaContent.setLength(mediaFile.length());

    Drive.Files.Insert insert = drive.files().insert(fileMetadata, mediaContent);
    insert.getMediaHttpUploader().setProgressListener(new ProgressListener(mediaFile));
    insert.getMediaHttpUploader().setBackOffPolicyEnabled(true);
    int chunkSize = onWifi ? MediaHttpUploader.MINIMUM_CHUNK_SIZE * 2
            : MediaHttpUploader.MINIMUM_CHUNK_SIZE;
    insert.getMediaHttpUploader().setChunkSize(chunkSize);
    return insert.execute();

}
项目:simplejavayoutubeuploader    文件:ThumbnailServiceImpl.java   
@Override
public void upload(final File thumbnail, final String videoid, final Account account) throws FileNotFoundException, ThumbnailIOException {
    if (!thumbnail.exists()) {
        throw new FileNotFoundException(thumbnail.getName());
    }

    final YouTube youTube = youTubeProvider.setAccount(account).get();
    try (final BufferedInputStream bufferedInputStream = new BufferedInputStream(new FileInputStream(thumbnail))) {

        youTube.thumbnails()
                .set(videoid, new InputStreamContent("application/octet-stream", bufferedInputStream))
                .execute();
    } catch (final IOException e) {
        throw new ThumbnailIOException(e);
    }
}
项目:elasticsearch_my    文件:GoogleCloudStorageBlobStore.java   
/**
 * Writes a blob in the bucket.
 *
 * @param inputStream content of the blob to be written
 * @param blobSize    expected size of the blob to be written
 */
void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException {
    SocketAccess.doPrivilegedVoidIOException(() -> {
        InputStreamContent stream = new InputStreamContent(null, inputStream);
        stream.setLength(blobSize);

        Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream);
        insert.setName(blobName);
        insert.execute();
    });
}
项目:financisto1-holo    文件:Export.java   
/**
 * Backup database to google docs
 * 
 * @param drive Google docs connection
 * @param targetFolder Google docs folder name
 * */
public String exportOnline(Drive drive, String targetFolder) throws Exception {
    // get folder first
       String folderId = GoogleDriveClient.getOrCreateDriveFolder(drive, targetFolder);
    if (folderId == null) {
           throw new ImportExportException(R.string.gdocs_folder_not_found);
    }

    // generation backup file
    String fileName = generateFilename();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
       OutputStream out = new BufferedOutputStream(new GZIPOutputStream(outputStream));
    generateBackup(out);

    // transforming streams
    InputStream backup = new ByteArrayInputStream(outputStream.toByteArray());

       InputStreamContent mediaContent = new InputStreamContent(BACKUP_MIME_TYPE, new  BufferedInputStream(backup));
       mediaContent.setLength(outputStream.size());
       // File's metadata.
       com.google.api.services.drive.model.File body = new com.google.api.services.drive.model.File();
       body.setTitle(fileName);
       body.setMimeType(BACKUP_MIME_TYPE);
       body.setFileSize((long)outputStream.size());
       List<ParentReference> parentReference = new ArrayList<ParentReference>();
       parentReference.add(new ParentReference().setId(folderId)) ;
       body.setParents(parentReference);
       com.google.api.services.drive.model.File file = drive.files().insert(body, mediaContent).execute();

    return fileName;
}
项目:dataproc-java-submitter    文件:Jobs.java   
private String getStagedURI(Cluster cluster, String uri) {
  URI parsed = URI.create(uri);

  if (!isNullOrEmpty(parsed.getScheme()) && !"file".equals(parsed.getScheme())) {
    return uri;
  }

  // either a file URI or just a path, stage it to GCS
  File local = Paths.get(uri).toFile();
  String configBucket = cluster.getConfig().getConfigBucket();

  LOG.debug("Staging {} in GCS bucket {}", uri, configBucket);

  try {
    InputStreamContent content = new InputStreamContent(
        "application/octet-stream", new FileInputStream(local));
    content.setLength(local.length()); // docs say that setting length improves upload perf

    StorageObject object = new StorageObject()
        .setName(Paths.get(GCS_STAGING_PREFIX, local.getName()).toString());

    object = client.storage().objects()
        .insert(configBucket, object, content)
        .execute();
    return new URI("gs", object.getBucket(), "/" + object.getName(), null).toString();
  } catch (URISyntaxException | IOException e) {
    throw Throwables.propagate(e);
  }
}
项目:flowzr-android-black    文件:Export.java   
public String exportOnline(Drive drive, String targetFolder) throws Exception {
    // get folder first
    String folderId = GoogleDriveClient.getOrCreateDriveFolder(drive, targetFolder);
    if (folderId == null) {
        throw new ImportExportException(R.string.gdocs_folder_not_configured);
    }

    // generation backup file
    String fileName = generateFilename();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    OutputStream out = new BufferedOutputStream(new GZIPOutputStream(outputStream));
    generateBackup(out);

    // transforming streams
    InputStream backup = new ByteArrayInputStream(outputStream.toByteArray());

    InputStreamContent mediaContent = new InputStreamContent(BACKUP_MIME_TYPE, new  BufferedInputStream(backup));
    mediaContent.setLength(outputStream.size());
    // File's metadata.
    com.google.api.services.drive.model.File body = new com.google.api.services.drive.model.File();
    body.setTitle(fileName);
    body.setMimeType(BACKUP_MIME_TYPE);
    body.setFileSize((long)outputStream.size());
    List<ParentReference> parentReference = new ArrayList<ParentReference>();
    parentReference.add(new ParentReference().setId(folderId)) ;
    body.setParents(parentReference);
    com.google.api.services.drive.model.File file = drive.files().insert(body, mediaContent).execute();

    return fileName;
}
项目:ODK-Liberia    文件:PicasaClient.java   
public PhotoEntry executeInsertPhotoEntry(
    PicasaUrl albumFeedUrl, InputStreamContent content, String fileName) throws IOException {
  HttpRequest request = getRequestFactory().buildPostRequest(albumFeedUrl, content);
  HttpHeaders headers = new HttpHeaders();
  Atom.setSlugHeader(headers, fileName);
  request.setHeaders(headers);
  return execute(request).parseAs(PhotoEntry.class);
}
项目:java-asana    文件:Attachments.java   
/**
 * Upload a file and attach it to a task
 *
 * @param task        Globally unique identifier for the task.
 * @param fileContent Content of the file to be uploaded
 * @param fileName    Name of the file to be uploaded
 * @param fileType    MIME type of the file to be uploaded
 * @return Request object
 */
public ItemRequest<Attachment> createOnTask(String task, InputStream fileContent, String fileName, String fileType) {
    MultipartContent.Part part = new MultipartContent.Part()
            .setContent(new InputStreamContent(fileType, fileContent))
            .setHeaders(new HttpHeaders().set(
                    "Content-Disposition",
                    String.format("form-data; name=\"file\"; filename=\"%s\"", fileName) // TODO: escape fileName?
            ));
    MultipartContent content = new MultipartContent()
            .setMediaType(new HttpMediaType("multipart/form-data").setParameter("boundary", UUID.randomUUID().toString()))
            .addPart(part);

    String path = String.format("/tasks/%s/attachments", task);
    return new ItemRequest<Attachment>(this, Attachment.class, path, "POST")
            .data(content);
}
项目:providence    文件:ProvidenceServletTest.java   
private HttpResponse post(String contentType, String content) throws IOException {
    ByteArrayInputStream bais = new ByteArrayInputStream(content.getBytes(StandardCharsets.UTF_8));
    HttpRequest request = factory().buildPostRequest(endpoint(), new InputStreamContent(contentType, bais))
                         .setThrowExceptionOnExecuteError(false);
    request.getHeaders().setContentType(contentType);
    request.getHeaders().setAccept("*/*");
    return request.execute();
}
项目:tech-gallery    文件:StorageDAOImpl.java   
@Override
public String insertImage(String name, InputStream stream)
    throws IOException, GeneralSecurityException {
  InputStreamContent contentStream = new InputStreamContent(IMAGE_FORMAT, stream);

  return StorageHandler.saveImage(name, contentStream);
}
项目:java-docs-samples    文件:CustomerSuppliedEncryptionKeysSamples.java   
/**
 * Uploads an object to GCS, to be stored with a customer-supplied key (CSEK). The upload may
 * continue in the background after this method returns. The caller of this method is responsible
 * for closing the input stream.
 *
 * @param storage A Storage object, ready for use
 * @param bucketName The name of the destination bucket
 * @param objectName The name of the destination object
 * @param data An InputStream containing the contents of the object to upload
 * @param base64CseKey An AES256 key, encoded as a base64 string.
 * @param base64CseKeyHash The SHA-256 hash of the above key, also encoded as a base64 string.
 * @throws IOException if there was some error uploading to GCS.
 */
public static void uploadObject(
    Storage storage,
    String bucketName,
    String objectName,
    InputStream data,
    String base64CseKey,
    String base64CseKeyHash)
    throws IOException {
  InputStreamContent mediaContent = new InputStreamContent("text/plain", data);
  Storage.Objects.Insert insertObject =
      storage.objects().insert(bucketName, null, mediaContent).setName(objectName);
  // The client library's default gzip setting may cause objects to be stored with gzip encoding,
  // which can be desirable in some circumstances but has some disadvantages as well, such as
  // making it difficult to read only a certain range of the original object.
  insertObject.getMediaHttpUploader().setDisableGZipContent(true);

  // Now set the CSEK headers
  final HttpHeaders httpHeaders = new HttpHeaders();
  httpHeaders.set("x-goog-encryption-algorithm", "AES256");
  httpHeaders.set("x-goog-encryption-key", base64CseKey);
  httpHeaders.set("x-goog-encryption-key-sha256", base64CseKeyHash);

  insertObject.setRequestHeaders(httpHeaders);

  try {
    insertObject.execute();
  } catch (GoogleJsonResponseException e) {
    System.out.println("Error uploading: " + e.getContent());
    System.exit(1);
  }
}
项目:digdag    文件:BigQueryIT.java   
@Test
public void testLoad()
        throws Exception
{
    assumeThat(GCS_TEST_BUCKET, not(isEmptyOrNullString()));

    // Create source data object
    String objectName = GCS_PREFIX + "test.csv";
    byte[] data = Joiner.on('\n').join("a,b", "c,d").getBytes(UTF_8);
    InputStreamContent content = new InputStreamContent("text/csv", new ByteArrayInputStream(data))
            .setLength(data.length);
    StorageObject metadata = new StorageObject().setName(objectName);
    gcs.objects()
            .insert(GCS_TEST_BUCKET, metadata, content)
            .execute();

    // Create output dataset
    String datasetId = BQ_TAG + "_load_test";
    Dataset dataset = new Dataset().setDatasetReference(new DatasetReference()
            .setProjectId(gcpProjectId)
            .setDatasetId(datasetId));
    bq.datasets().insert(gcpProjectId, dataset)
            .execute();

    // Run load
    String tableId = "data";
    addWorkflow(projectDir, "acceptance/bigquery/load.dig");
    Id attemptId = pushAndStart(server.endpoint(), projectDir, "load", ImmutableMap.of(
            "source_bucket", GCS_TEST_BUCKET,
            "source_object", objectName,
            "target_dataset", datasetId,
            "target_table", tableId,
            "outfile", outfile.toString()));
    expect(Duration.ofMinutes(5), attemptSuccess(server.endpoint(), attemptId));
    assertThat(Files.exists(outfile), is(true));

    // Check that destination table was created
    Table destinationTable = bq.tables().get(gcpProjectId, datasetId, tableId).execute();
    assertThat(destinationTable.getTableReference().getTableId(), is(tableId));
}
项目:abelana    文件:CloudStorage.java   
/**
 * Uploads an image to Google Cloud Storage.
 * @param url the upload url.
 * @param bitmap the image to upload.
 * @throws IOException if cannot upload the image.
 */
public static void uploadImage(String url, Bitmap bitmap)
        throws IOException {

    ByteArrayOutputStream bOS = new ByteArrayOutputStream();
    bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bOS);
    byte[] bitmapData = bOS.toByteArray();

    InputStream stream = new ByteArrayInputStream(bitmapData);
        String contentType = URLConnection
                .guessContentTypeFromStream(stream);
        InputStreamContent content = new InputStreamContent(contentType,
                stream);

    MediaType MEDIA_TYPE_JPEG = MediaType.parse("image/jpeg");

   OkHttpClient client = new OkHttpClient();

    RequestBody requestBody = RequestBodyUtil.create(MEDIA_TYPE_JPEG,
            content.getInputStream());
    Request request = new Request.Builder()
            .url(url)
            .put(requestBody)
            .build();
    Response response = client.newCall(request).execute();
    if (!response.isSuccessful()) {
        throw new IOException("Unexpected code " + response);
    }
}
项目:heron    文件:GcsController.java   
StorageObject createStorageObject(String storageObjectName, File file) throws IOException {
  final InputStreamContent content =
      new InputStreamContent(MIME_TYPE, new FileInputStream(file));
  final Storage.Objects.Insert insertStorageObject =
      storage.objects()
          .insert(bucket, null, content)
          .setName(storageObjectName)
          .setPredefinedAcl(DEFAULT_PREDEFINED_ACL);

  // The media uploader gzips content by default, and alters the Content-Encoding accordingly.
  // GCS dutifully stores content as-uploaded. This line disables the media uploader behavior,
  // so the service stores exactly what is in the InputStream, without transformation.
  insertStorageObject.getMediaHttpUploader().setDisableGZipContent(true);
  return insertStorageObject.execute();
}
项目:reddcrawl    文件:GoogleStorageJsonArchive.java   
@Override
public void writeNodes(@Nonnull Multimap<String, JsonNode> nodesByArchiveName, @Nullable JsonArchiveEventHandler eventHandler) throws IOException {
    final DateTime currentTime = DateTime.now();
    final List<String> archivesUpdated = new ArrayList<String>();
    for (final String archiveName : nodesByArchiveName.keySet()) {
        //create a new buffer for this archive
        final ByteArrayOutputStream archiveOutputBuffer = new ByteArrayOutputStream();
        final String archiveObjectName = archiveName + "/stories-" + currentTime.getMillis() + ".json.gz";
        final OutputStreamWriter outputStreamWriter = new OutputStreamWriter(new GZIPOutputStream(archiveOutputBuffer), "UTF-8");

        final Iterator<JsonNode> nodeIterator = nodesByArchiveName.get(archiveName).iterator();
        while (nodeIterator.hasNext()) {
            final JsonNode nodeToWrite = nodeIterator.next();
            outputStreamWriter.write(OBJECT_MAPPER.writeValueAsString(nodeToWrite));
            outputStreamWriter.write('\n');
        }
        outputStreamWriter.close();

        final ByteArrayInputStream bufferInputStream = new ByteArrayInputStream(archiveOutputBuffer.toByteArray());
        final InputStreamContent googleInputStreamContent = new InputStreamContent("application/x-gzip", bufferInputStream);

        StorageObject uploadedStorageObject = this.googleStorage.objects()
                .insert(this.rootBucketName, new StorageObject().setName(archiveObjectName), googleInputStreamContent)
                .execute();

        if (uploadedStorageObject != null) {
            LOGGER.info("Wrote archive file " + rootBucketName + "/" + uploadedStorageObject.getName());
            if (eventHandler != null) {
                eventHandler.handleArchiveComplete(nodesByArchiveName.get(archiveName));
            }

            LOGGER.info("Recomposing archive " + rootBucketName + "/" + uploadedStorageObject.getName());
            recomposeArchive(archiveName, uploadedStorageObject);
        }
    }
}
项目:google-storage-plugin    文件:AbstractUpload.java   
/**
 * We need our own storage retry logic because we must recreate the
 * input stream for the media uploader.
 */
private void performUploadWithRetry(final Executor executor,
    final Storage service, final Bucket bucket, final StorageObject object,
    final FilePath include)
    throws ExecutorException, IOException, InterruptedException {
  Operation a = new Operation() {
    public void act()
        throws IOException, InterruptedException, ExecutorException {
      // Create the insertion operation with the decorated object and
      // an input stream of the file contents.
      Storage.Objects.Insert insertion =
          service.objects().insert(bucket.getName(), object,
              new InputStreamContent(
                  object.getContentType(), include.read()));

      // Make the operation non-resumable because we have seen a dramatic
      // (e.g. 1000x) speedup from this.
      MediaHttpUploader mediaUploader = insertion.getMediaHttpUploader();
      if (mediaUploader != null) {
        mediaUploader.setDirectUploadEnabled(true);
      }

      executor.execute(insertion);
    }
  };

  RetryStorageOperation
      .performRequestWithRetry(executor, a, module.getInsertRetryCount());
}
项目:bigdata-interop    文件:BigQueryRecordWriter.java   
@Override
public Insert createRequest(InputStreamContent inputStream) throws IOException {
  Insert insert = bigQueryHelper.getRawBigquery().jobs().insert(projectId, job, inputStream);
  insert.setProjectId(projectId);
  increment(Counter.JOBS_INSERTED);
  return insert;
}
项目:bigdata-interop    文件:AbstractGoogleAsyncWriteChannel.java   
/**
 * Initialize this channel object for writing.
 *
 * @throws IOException
 */
public void initialize() throws IOException {
  // Create a pipe such that its one end is connected to the input stream used by
  // the uploader and the other end is the write channel used by the caller.
  pipeSource = new PipedInputStream(pipeBufferSize);
  pipeSink = new PipedOutputStream(pipeSource);
  pipeSinkChannel = Channels.newChannel(pipeSink);

  // Connect pipe-source to the stream used by uploader.
  InputStreamContent objectContentStream =
      new InputStreamContent(contentType, pipeSource);
  // Indicate that we do not know length of file in advance.
  objectContentStream.setLength(-1);
  objectContentStream.setCloseInputStream(false);

  T request = createRequest(objectContentStream);
  request.setDisableGZipContent(true);

  // Set a larger backend upload-chunk granularity than system defaults.
  HttpHeaders headers = clientRequestHelper.getRequestHeaders(request);
  headers.set("X-Goog-Upload-Desired-Chunk-Granularity",
      Math.min(GCS_UPLOAD_GRANULARITY, uploadBufferSize));

  // Legacy check. Will be phased out.
  if (limitFileSizeTo250Gb) {
    headers.set("X-Goog-Upload-Max-Raw-Size", UPLOAD_MAX_SIZE);
  }

  // Change chunk size from default value (10MB) to one that yields higher performance.
  clientRequestHelper.setChunkSize(request, uploadBufferSize);

  // Given that the two ends of the pipe must operate asynchronous relative
  // to each other, we need to start the upload operation on a separate thread.
  uploadOperation = threadPool.submit(new UploadOperation(request, pipeSource));

  isInitialized = true;
}
项目:coala    文件:DealOAuth1Util.java   
/**
 * Put an order using the token and secret token to authenticate in DEAL. To
 * demonstrate successful authentication a POST request is executed.
 * 
 * @param messageToBePosted the JSON string representing the order
 * @param identifier the identifier of the order to Put
 * 
 * @throws Exception on an exception occurring
 */
public static void PutOrderWithTokensOnly(String messageToBePosted,
        String identifier) throws Exception
{
    // utilize accessToken to access protected resource in DEAL
    HttpRequestFactory factory = TRANSPORT
            .createRequestFactory(getParameters());
    GenericUrl url = new GenericUrl(PROTECTED_ORDERS_URL + identifier);
    InputStream stream = new ByteArrayInputStream(
            messageToBePosted.getBytes());
    InputStreamContent content = new InputStreamContent(JSON_IDENTIFIER,
            stream);

    HttpRequest req = factory.buildPutRequest(url, content);

    req.getContent().writeTo(System.out);
    HttpResponse resp = req.execute();

    // Should we consider posting the data in case the put fails?

    // log the response
    if (LOG.isInfoEnabled())
    {
        LOG.info("Response Status Code: " + resp.getStatusCode());
        LOG.info("Response body:" + resp.parseAsString());
    }
}
项目:google-play-publisher    文件:PublishHelper.java   
private Apk uploadApk() throws PublishApkException {
    try {
        Apk apk = edits.apks()
                .upload(packageName, appEditId, new InputStreamContent(MIME_TYPE_APK, apkFilePath.read()))
                .execute();
        logger.println(String.format("Version code %d has been uploaded", apk.getVersionCode()));
        return apk;
    } catch (IOException e) {
        throw new PublishApkException("Failed to execute upload request", e);
    }
}
项目:Abelana-Android    文件:AbelanaUpload.java   
@Override
protected Void doInBackground(Void... params) {

    try {
        InputStreamContent mediaContent = new InputStreamContent( "application/octet-stream", file_to_copy);
        // Not strictly necessary, but allows optimization in the cloud.
        // mediaContent.setLength(OBJECT_SIZE);

        StorageObject objectMetadata = null;

        Storage.Objects.Insert insertObject =
                AbelanaThings.storage.objects().insert(BUCKET, objectMetadata, mediaContent);

        // If you don't provide metadata, you will have specify the object
        // name by parameter. You will probably also want to ensure that your
        // default object ACLs (a bucket property) are set appropriately:
        // https://developers.google.com/storage/docs/json_api/v1/buckets#defaultObjectAcl
        insertObject.setName( fileName );

        insertObject.getMediaHttpUploader().setDisableGZipContent(true);
        // For small files, you may wish to call setDirectUploadEnabled(true), to
        // reduce the number of HTTP requests made to the server.
        if (mediaContent.getLength() > 0 && mediaContent.getLength() <= 2 * 1000 * 1000 /* 2MB */) {
            insertObject.getMediaHttpUploader().setDirectUploadEnabled(true);
        }
        insertObject.execute();

    } catch (IOException e) {
        e.printStackTrace();
    }

    return null;
}
项目:PicasaUploadSample    文件:PicasaClient.java   
public PhotoEntry executeInsertPhotoEntry(
    PicasaUrl albumFeedUrl, InputStreamContent content, String fileName) throws IOException {
  HttpRequest request = getRequestFactory().buildPostRequest(albumFeedUrl, content);
  HttpHeaders headers = new HttpHeaders();
  Atom.setSlugHeader(headers, fileName);
  request.setHeaders(headers);
  return execute(request).parseAs(PhotoEntry.class);
}
项目:PicasaUploadSample    文件:PicasaUploadActivity.java   
@SuppressWarnings("unused")
private PhotoEntry postPhoto(AlbumEntry album, Uri uri) throws IOException {
    String fileName = "test";
    InputStream iStream = getContentResolver().openInputStream(uri);
    InputStreamContent content = new InputStreamContent("image/jpeg", iStream);
    PhotoEntry photo =
        client.executeInsertPhotoEntry(new PicasaUrl(album.getFeedLink()), content, fileName);
    Log.i("TAG", "Image URL: " + photo.mediaGroup.content.url);
    return photo;
}
项目:PicasaUploadSample    文件:PicasaUploadActivity.java   
@SuppressWarnings("unused")
private PhotoEntry postPhotoWithMetaData(AlbumEntry album, Uri uri) throws IOException {
    // NOTE: this video is not included in the sample
    InputStream iStream = getContentResolver().openInputStream(uri);
    InputStreamContent imgContent = new InputStreamContent("image/jpeg", iStream);
    PhotoEntry photo = new PhotoEntry();
    photo.title = "未来へのキオクのテスト";
    photo.summary =  "未来へのキオクへの upload API のテストです。";
    GmlPoint point = GmlPoint.createLatLon(35.626446, 139.723444);
    PhotoEntry result = client.executeInsertPhotoEntryWithMetadata(
            photo, new PicasaUrl(album.getFeedLink()), imgContent, point);
    Log.i(TAG, "Image URL with Metadata: " + result.mediaGroup.content.url);
    return result;
}
项目:digdag    文件:GcsWaitIT.java   
protected void testGcsWait(String workflow, BiFunction<String, String, String> logNeedle)
        throws Exception
{
    String objectName = GCS_PREFIX + "data.csv";

    // Start waiting
    addWorkflow(projectDir, "acceptance/gcs_wait/" + workflow + ".dig");
    Id attemptId = pushAndStart(server.endpoint(), projectDir, workflow, ImmutableMap.of(
            "test_bucket", GCS_TEST_BUCKET,
            "test_object", objectName,
            "outfile", outfile.toString()));

    // Wait for gcs_wait polling to show up in logs
    expect(Duration.ofSeconds(30), () -> {
        String attemptLogs = TestUtils.getAttemptLogs(client, attemptId);
        return attemptLogs.contains(logNeedle.apply(GCS_TEST_BUCKET, objectName));
    });

    // Verify that the dependent task has not been executed
    assertThat(Files.exists(outfile), is(false));

    // Verify that the attempt is not yet done
    RestSessionAttempt attempt = client.getSessionAttempt(attemptId);
    assertThat(attempt.getDone(), is(false));

    // Create object
    byte[] data = "hello gcs!".getBytes(UTF_8);
    InputStreamContent content = new InputStreamContent("text/plain", new ByteArrayInputStream(data))
            .setLength(data.length);
    StorageObject metadata = new StorageObject().setName(objectName);
    gcs.objects()
            .insert(GCS_TEST_BUCKET, metadata, content)
            .execute();

    // Expect the attempt to finish and the dependent task to be executed
    expect(Duration.ofSeconds(300), attemptSuccess(server.endpoint(), attemptId));
    assertThat(Files.exists(outfile), is(true));

    JsonNode objectMetadata = MAPPER.readTree(Files.readAllBytes(outfile));
    int size = objectMetadata.get("metadata").get("size").asInt();
    assertThat(size, is(data.length));
}
项目:GoogleCloudStorage    文件:CloudImageCRUD.java   
/**
 * Attempts to insert the image in the given Bitmap into the given GoogleStorage, at the given
 * imageFullPath with the given format.
 *
 * All parameters are mandatory.
 * <p/>
 * NOTE: See full path explanation:
 * https://github.com/Mithrandir21/GoogleCloudStorage#object-full-path
 *
 * @param googleStorage
 * @param imageFullPath
 * @param image
 * @param format
 * @return
 * @throws IOException
 */
public static boolean insertCloudImage(GoogleStorage googleStorage, String imageFullPath, Bitmap image,
                                       MediaManipulation.SupportedImageFormats format)
        throws IOException
{
    if( googleStorage == null )
    {
        throw new IllegalArgumentException("Given GoogleStorage was null!");
    }

    if( (imageFullPath == null || imageFullPath.length() < 1) )
    {
        throw new IllegalArgumentException("Given imageFullPath was null or empty!");
    }

    if( image == null )
    {
        throw new IllegalArgumentException("Given image was null!");
    }

    if( format == null )
    {
        throw new IllegalArgumentException("Given format was null!");
    }


    Log.d(TAG, "Attempting upload " + imageFullPath);
    Bitmap.CompressFormat compressFormat = MediaManipulation.getCompressFormat(format);

    ByteArrayOutputStream stream = new ByteArrayOutputStream();
    image.compress(compressFormat, 100, stream);
    byte[] byteArray = stream.toByteArray();

    ByteArrayInputStream bs = new ByteArrayInputStream(byteArray);
    Log.d(TAG, "Created InputStream for Bitmap.");

    InputStreamContent mediaContent = new InputStreamContent("image/" + format, bs);
    Log.d(TAG, "Created InputStreamContent for upload.");

    Storage storage = googleStorage.getStorage();

    StorageObject storageObject = new StorageObject();
    storageObject.setBucket(googleStorage.getBucketName());
    storageObject.setName(imageFullPath);
    Log.d(TAG, "Create StorageObject to be inserted.");

    Storage.Objects.Insert insert = storage.objects().insert(googleStorage.getBucketName(), storageObject, mediaContent);
    Log.d(TAG, "Create insert request with StorageObject and InputStreamContent.");

    insert.execute();
    Log.d(TAG, "Executed upload.");

    return true;
}
项目:jdbox    文件:DriveAdapter.java   
public File updateFileContent(File file, InputStream content) throws IOException {

        if (safe && file.getEtag() == null)
            throw new AssertionError("file.etag must not be null");

        logger.debug("updating content {}", file);

        Drive.Files.Update request = drive.files().update(
                file.getId(), new com.google.api.services.drive.model.File(), new InputStreamContent(null, content));
        request.getMediaHttpUploader().setDirectUploadEnabled(true);

        if (safe)
            request.getRequestHeaders().setIfMatch(file.getEtag());

        return new File(request.execute());
    }
项目:BucketSyncer    文件:GCS_MockTest.java   
private void testSimpleCopyInternal(String key, String[] args, List<S3ObjectSummary> objectSummaries) throws Exception {
    main = new MirrorMain(args);
    main.parseArguments();
    main.setSourceClient(s3);
    main.setDestClient(gcs);
    MirrorOptions options = main.getOptions();
    MirrorContext context = new MirrorContext(options);
    main.setContext(context);
    MirrorMaster master = new MirrorMaster(s3, gcs, context);

    GoogleJsonResponseException e = PowerMockito.mock(GoogleJsonResponseException.class);
    PowerMockito.when(e.getStatusCode()).thenReturn(404);


    main.setMaster(master);

    PowerMockito.when(s3.listObjects(Mockito.any(ListObjectsRequest.class))).thenReturn(listing);
    PowerMockito.when(listing.getObjectSummaries()).thenReturn(objectSummaries);

    Storage.Objects objects = PowerMockito.mock(Storage.Objects.class);
    Storage.Objects.Get getObject = PowerMockito.mock(Storage.Objects.Get.class);
    PowerMockito.when(gcs.objects()).thenReturn(objects);
    PowerMockito.when(gcs.objects().get(DESTINATION, key)).thenReturn(getObject);
    PowerMockito.when(getObject.execute()).thenThrow(e);

    //mock object from S3
    S3Object s3Object = new S3Object();
    s3Object.setKey(key);
    String content = "S3Object_Content";
    InputStream inputStream = new ByteArrayInputStream(content.getBytes());
    s3Object.setObjectContent(inputStream);
    ObjectMetadata objectMetadata = new ObjectMetadata();
    objectMetadata.setContentType("text");
    objectMetadata.setContentLength(10);
    objectMetadata.setHeader(Headers.ETAG, "etag");

    s3Object.setObjectMetadata(objectMetadata);
    s3Object.setBucketName(SOURCE);

    //return ACL
    PowerMockito.when(s3.getObjectAcl(SOURCE, key)).thenReturn(objectAcl);

    PowerMockito.when(s3.getObjectMetadata(SOURCE, key)).thenReturn(objectMetadata);

    PowerMockito.when(s3.getObject(any(GetObjectRequest.class))).thenReturn(s3Object);


    Storage.Objects.Insert insertObject = PowerMockito.mock(Storage.Objects.Insert.class);
    PowerMockito.when(gcs.objects().insert(any(String.class), any(StorageObject.class), any(InputStreamContent.class))).thenReturn(insertObject);

    MediaHttpUploader mediaHttpUploader = PowerMockito.mock(MediaHttpUploader.class);
    PowerMockito.when(insertObject.getMediaHttpUploader()).thenReturn(mediaHttpUploader);
    PowerMockito.when(mediaHttpUploader.setProgressListener(any(MediaHttpUploaderProgressListener.class))).thenReturn(mediaHttpUploader);
    PowerMockito.when(mediaHttpUploader.setDisableGZipContent(any(boolean.class))).thenReturn(mediaHttpUploader);

    main.init();
    main.run();

    //number of copied files
    assertEquals(1, main.getContext().getStats().objectsCopied.get());

    //size of total copied files
    assertEquals(10, main.getContext().getStats().bytesCopied.get());
}
项目:BucketSyncer    文件:S32GCSTestFile.java   
public static S32GCSTestFile create(String key, Storage client, List<StorageAsset> stuffToCleanup, Copy copy, Clean clean) throws Exception {
    S32GCSTestFile s32GCSTestFile = new S32GCSTestFile();
    Storage.Objects.Insert insertObject = null;
    StorageObject objectMetadata = null;
    InputStream inputStream = new FileInputStream(s32GCSTestFile.file);
    Path source = Paths.get(s32GCSTestFile.file.getPath());
    String type = Files.probeContentType(source);
    InputStreamContent mediaContent = new InputStreamContent(type, inputStream);
    switch (clean) {
        case SOURCE:
            stuffToCleanup.add(new StorageAsset(S32GCSMirrorTest.SOURCE, key));
            break;
        case DEST:
            stuffToCleanup.add(new StorageAsset(S32GCSMirrorTest.DESTINATION, key));
            break;
        case SOURCE_AND_DEST:
            stuffToCleanup.add(new StorageAsset(S32GCSMirrorTest.SOURCE, key));
            stuffToCleanup.add(new StorageAsset(S32GCSMirrorTest.DESTINATION, key));
            break;
    }
    switch (copy) {
        case SOURCE:
            insertObject = client.objects().insert(S32GCSMirrorTest.SOURCE, objectMetadata, mediaContent);
            insertObject.setName(key);
            insertObject.execute();
            break;
        case DEST:
            insertObject = client.objects().insert(S32GCSMirrorTest.DESTINATION, objectMetadata, mediaContent);
            insertObject.setName(key);
            insertObject.execute();
            break;
        case SOURCE_AND_DEST:
            insertObject = client.objects().insert(S32GCSMirrorTest.SOURCE, objectMetadata, mediaContent);
            insertObject.setName(key);
            insertObject.execute();
            insertObject = client.objects().insert(S32GCSMirrorTest.DESTINATION, objectMetadata, mediaContent);
            insertObject.setName(key);
            insertObject.execute();
            break;
    }
    return s32GCSTestFile;
}
项目:cloudsync    文件:RemoteGoogleDriveConnector.java   
@Override
public void upload(final Handler handler, final Item item) throws CloudsyncException, FileIOException
{
    initService(handler);

    String title = handler.getLocalProcessedTitle(item);
    File parentDriveItem = null;
    File driveItem;
    int retryCount = 0;
    do
    {
        try
        {
            refreshCredential();
            parentDriveItem = _getDriveItem(item.getParent());
            final ParentReference parentReference = new ParentReference();
            parentReference.setId(parentDriveItem.getId());
            driveItem = new File();
            driveItem.setTitle(title);
            driveItem.setParents(Collections.singletonList(parentReference));
            final LocalStreamData data = _prepareDriveItem(driveItem, item, handler, true);
            if (data == null)
            {
                driveItem = service.files().insert(driveItem).execute();
            }
            else
            {
                final InputStreamContent params = new InputStreamContent(FILE, data.getStream());
                params.setLength(data.getLength());
                Insert inserter = service.files().insert(driveItem, params);
                MediaHttpUploader uploader = inserter.getMediaHttpUploader();
                prepareUploader(uploader, data.getLength());
                driveItem = inserter.execute();
            }
            if (driveItem == null)
            {
                throw new CloudsyncException("Couldn't create item '" + item.getPath() + "'");
            }
            _addToCache(driveItem, null);
            item.setRemoteIdentifier(driveItem.getId());
            return;
        }
        catch (final IOException e)
        {
            if (parentDriveItem != null)
            {
                for (int i = 0; i < MIN_SEARCH_RETRIES; i++)
                {
                    driveItem = _searchDriveItem(item.getParent(), title);
                    if (driveItem != null)
                    {
                        LOGGER.log(Level.WARNING, "Google Drive IOException: " + getExceptionMessage(e) + " - found partially uploaded item - try to update");

                        item.setRemoteIdentifier(driveItem.getId());
                        update(handler, item, true);
                        return;
                    }
                    LOGGER.log(Level.WARNING, "Google Drive IOException: " + getExceptionMessage(e) + " - item not uploaded - retry " + (i + 1) + "/" + MIN_SEARCH_RETRIES + " - wait "
                            + MIN_SEARCH_BREAK + " ms");
                    sleep(MIN_SEARCH_BREAK);
                }
            }
            retryCount = validateException("remote upload", item, e, retryCount);
            if( retryCount == -1 ) // ignore a failing item (workaround for now)
                return;
        }
    }
    while (true);
}