/** * Tests the write method of BigQueryRecordWriter without writing anything but throwing a 409 * conflict from the job-insertion. */ @Test public void testConflictExceptionOnCreate() throws IOException, GeneralSecurityException { IOException fakeConflictException = new IOException("fake 409 conflict"); when(mockInsert.execute()) .thenThrow(fakeConflictException); when(mockErrorExtractor.itemAlreadyExists(any(IOException.class))) .thenReturn(true); initializeRecordWriter(); // Close the RecordWriter. recordWriter.close(mockContext); // Check that the proper calls were sent to the BigQuery. verify(mockFactory).getBigQueryHelper(any(Configuration.class)); verify(mockBigQuery, times(2)).jobs(); verify(mockJobsGet, times(1)).execute(); verify(mockBigQueryJobs, times(1)).get(eq(jobProjectId), eq(jobReference.getJobId())); verify(mockBigQueryJobs).insert( eq(jobProjectId), eq(getExpectedJob()), any(AbstractInputStreamContent.class)); assertTrue(executorService.isShutdown()); }
/** * Tests the write method of BigQueryRecordWriter without writing anything but throwing an * unhandled exeption from the job-insertion. */ @Test public void testUnhandledExceptionOnCreate() throws IOException, GeneralSecurityException { IOException fakeUnhandledException = new IOException("fake unhandled exception"); when(mockInsert.execute()) .thenThrow(fakeUnhandledException); when(mockErrorExtractor.itemAlreadyExists(any(IOException.class))) .thenReturn(false); initializeRecordWriter(); // Close the RecordWriter; the stored exception finally propagates out. try { recordWriter.close(mockContext); fail("Expected IOException on close, got no exception."); } catch (IOException ioe) { assertEquals(fakeUnhandledException, ioe.getCause()); } // Check that the proper calls were sent to the BigQuery. verify(mockFactory).getBigQueryHelper(any(Configuration.class)); verify(mockBigQuery, times(1)).jobs(); verify(mockBigQueryJobs).insert( eq(jobProjectId), eq(getExpectedJob()), any(AbstractInputStreamContent.class)); assertTrue(executorService.isShutdown()); }
/** * Test successful operation of GoogleCloudStorage.createEmptyObject(1). */ @Test public void testCreateEmptyObject() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); gcs.createEmptyObject(new StorageResourceId(BUCKET_NAME, OBJECT_NAME)); verify(mockStorage).objects(); ArgumentCaptor<StorageObject> storageObjectCaptor = ArgumentCaptor.forClass(StorageObject.class); ArgumentCaptor<AbstractInputStreamContent> inputStreamCaptor = ArgumentCaptor.forClass(AbstractInputStreamContent.class); verify(mockStorageObjects).insert( eq(BUCKET_NAME), storageObjectCaptor.capture(), inputStreamCaptor.capture()); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); assertEquals(OBJECT_NAME, storageObjectCaptor.getValue().getName()); assertEquals(0, inputStreamCaptor.getValue().getLength()); }
@Test public void testIgnoreExceptionsOnCreateEmptyObjects() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("rateLimitExceeded")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenReturn(getStorageObjectForEmptyObjectWithMetadata(EMPTY_METADATA)); gcs.createEmptyObjects(ImmutableList.of(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))); verify(mockStorage, times(2)).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet).execute(); }
@Test public void testIgnoreExceptionsOnCreateEmptyObjectsNonIgnorableException() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("forbidden")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(false); when(mockErrorExtractor.isInternalServerError(any(IOException.class))).thenReturn(false); expectedException.expect(IOException.class); try { gcs.createEmptyObjects(ImmutableList.of(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))); } finally { verify(mockStorage).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled( eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockErrorExtractor).isInternalServerError(any(IOException.class)); } }
/** * The real Mirror API supports Multipart-bodies to attach images to cards, Emulator not, we will transformt the * card in one HTML TimeLineItem. https://github.com/Scarygami/mirror-api - REAMDME.md * * @param content The initial timeline item. * @param mediaContent The attachement image. * @return The initial timeline item modified to look like one HTML card with the atachement. * @throws IOException * @throws FileNotFoundException */ public static TimelineItem createTimeLineItemWithAtachement(TimelineItem content, AbstractInputStreamContent mediaContent) throws IOException, FileNotFoundException { // We are going to transform the card in one HTML card. if (content.getHtml() != null && content.getHtml().isEmpty()) { // If the card already contains html then we do nothing LOG.log(Level.WARNING, "Emulation limitation : Images are transformes in HTML card, you must choose, or HTML or Attachement. your attachement will be ignored. "); } else { // Store the image String attachementURL = storeAttachement(mediaContent); // Transform card in one HTML card String cardText = content.getText() != null ? content.getText() : ""; String html = "<article class=\"photo\"> <img src=" + attachementURL + " width=\"100%\" height=\"100%\"> <div class=\"photo-overlay\"></div> <section> <p class=\"text-auto-size\">" + cardText + "</p> </section></article>"; content.setHtml(html); } return content; }
/** * Transform attachement in one local URL * * @param mediaContent the attachement to transform into URL * @return The attachement return URL * @throws IOException * @throws FileNotFoundException */ public static String storeAttachement(com.google.api.client.http.AbstractInputStreamContent mediaContent) throws IOException, FileNotFoundException { // Write the file content File file = File.createTempFile("img", ".tmp"); FileOutputStream os = new FileOutputStream(file); mediaContent.writeTo(os); os.close(); String filename = file.getName(); file.deleteOnExit(); // Write the media type if (mediaContent.getType() != null) { File fileContentType = new File(file.getPath() + ".contenttype"); FileWriter fileWriter = new FileWriter(fileContentType, false); BufferedWriter bw = new BufferedWriter(fileWriter); bw.write(mediaContent.getType()); bw.close(); fileContentType.deleteOnExit(); } String imgSrc = "\"" + glasswareHost + "/attachements/" + filename + "\""; return imgSrc; }
public void snapshotUserTable() throws IOException { DateTimeFormatter formatter = ISODateTimeFormat.dateTime(); String timestamp = formatter.print(DateTime.now()); Table patientTable = new Table() .setName("User Profile Data " + timestamp) .setDescription("Locations of patients and volunteers as of " + timestamp) .setIsExportable(true) .setColumns(Arrays.asList( new Column().setName("User Type").setType("STRING"), new Column().setName("Location").setType("LOCATION") )); patientTable = fusiontables.table().insert(patientTable).execute(); FusionTableContentWriter fusionTableWriter = new FusionTableContentWriter(patientTable); for (UserProfile userProfile : UserProfile.listAll()) { fusionTableWriter.writeRecord(userProfile.getType().name(), userProfile.getLocation()); } AbstractInputStreamContent streamContent = fusionTableWriter.getInputStreamContent(); fusiontables.table().importRows(patientTable.getTableId(), streamContent).execute(); File patientFile = findFileByTitle(patientTable.getName()); addToFolder(patientFile); }
private void exportRideRecords(Table rideTable, Collection<RideRecord> rideRecords) throws IOException { FusionTableContentWriter fusionTableWriter = new FusionTableContentWriter(rideTable); for (RideRecord rideRecord : rideRecords) { fusionTableWriter.writeRecord( rideRecord.getVolunteerLocation(), rideRecord.getDepartureTime(), rideRecord.getPatientLocation(), rideRecord.getPickupTime(), rideRecord.getAppointmentAddress(), rideRecord.getAppointmentLocation(), rideRecord.getAppointmentTime(), rideRecord.getDistanceMiles(), rideRecord.getTripMinutes() ); } AbstractInputStreamContent streamContent = fusionTableWriter.getInputStreamContent(); fusiontables.table().importRows(rideTable.getTableId(), streamContent).execute(); }
/** * Launch a job, but do not wait for it to complete. * * @throws BigqueryJobFailureException */ private Job launchJob(Job job, @Nullable AbstractInputStreamContent data) { verify(job.getStatus() == null); try { return data != null ? bigquery.jobs().insert(getProjectId(), job, data).execute() : bigquery.jobs().insert(getProjectId(), job).execute(); } catch (IOException e) { throw BigqueryJobFailureException.create(e); } }
/** Runs job and returns a future that yields {@code result} when {@code job} is completed. */ private <T> ListenableFuture<T> runJobToCompletion( final Job job, final T result, @Nullable final AbstractInputStreamContent data) { return service.submit( () -> { runJob(job, data); return result; }); }
public File putResource(GoogleDrivePutParameters parameters) throws IOException { String folderId = parameters.getDestinationFolderId(); File putFile = new File(); putFile.setParents(Collections.singletonList(folderId)); Files.List fileRequest = drive.files().list() .setQ(format(QUERY_NOTTRASHED_NAME_NOTMIME_INPARENTS, parameters.getResourceName(), MIME_TYPE_FOLDER, folderId)); LOG.debug("[putResource] `{}` Exists in `{}` ? with `{}`.", parameters.getResourceName(), parameters.getDestinationFolderId(), fileRequest.getQ()); FileList existingFiles = fileRequest.execute(); if (existingFiles.getFiles().size() > 1) { throw new IOException(messages.getMessage("error.file.more.than.one", parameters.getResourceName())); } if (existingFiles.getFiles().size() == 1) { if (!parameters.isOverwriteIfExist()) { throw new IOException(messages.getMessage("error.file.already.exist", parameters.getResourceName())); } LOG.debug("[putResource] {} will be overwritten...", parameters.getResourceName()); drive.files().delete(existingFiles.getFiles().get(0).getId()).execute(); } putFile.setName(parameters.getResourceName()); String metadata = "id,parents,name"; if (!StringUtils.isEmpty(parameters.getFromLocalFilePath())) { // Reading content from local fileName FileContent fContent = new FileContent(null, new java.io.File(parameters.getFromLocalFilePath())); putFile = drive.files().create(putFile, fContent).setFields(metadata).execute(); // } else if (parameters.getFromBytes() != null) { AbstractInputStreamContent content = new ByteArrayContent(null, parameters.getFromBytes()); putFile = drive.files().create(putFile, content).setFields(metadata).execute(); } return putFile; }
@Before public void setUp() throws Exception { super.setUp(); properties = new GoogleDrivePutProperties("test"); properties.connection.setupProperties(); properties.connection.setupLayout(); properties.schemaMain.setupProperties(); properties.schemaMain.setupLayout(); properties.setupProperties(); properties.setupLayout(); properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties); properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE); properties.fileName.setValue(FILE_PUT_NAME); properties.localFilePath.setValue("c:/Users/undx/brasil.jpg"); properties.overwrite.setValue(true); properties.destinationFolder.setValue("root"); testRuntime = spy(GoogleDrivePutRuntime.class); doReturn(drive).when(testRuntime).getDriveService(); when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList); // File putFile = new File(); putFile.setId(PUT_FILE_ID); putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID)); when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute()) .thenReturn(putFile); }
@Before public void setUp() throws Exception { super.setUp(); properties = new GoogleDrivePutProperties("test"); properties.connection.setupProperties(); properties.connection.setupLayout(); properties.schemaMain.setupProperties(); properties.schemaMain.setupLayout(); properties.setupProperties(); properties.setupLayout(); properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties); properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE); properties.fileName.setValue("GoogleDrive Put test BR"); properties.localFilePath.setValue("c:/Users/undx/brasil.jpg"); properties.overwrite.setValue(true); properties.destinationFolder.setValue("root"); sink.initialize(container, properties); wop = (GoogleDriveWriteOperation) sink.createWriteOperation(); writer = new GoogleDrivePutWriter(wop, properties, container); when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList); // File putFile = new File(); putFile.setId(PUT_FILE_ID); putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID)); when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute()) .thenReturn(putFile); }
@Before public void setUp() throws Exception { super.setUp(); properties = new GoogleDrivePutProperties("test"); properties.connection.setupProperties(); properties.connection.setupLayout(); properties.schemaMain.setupProperties(); properties.schemaMain.setupLayout(); properties.setupProperties(); properties.setupLayout(); properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties); properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE); properties.fileName.setValue(FILE_PUT_NAME); properties.localFilePath .setValue(Paths.get(getClass().getClassLoader().getResource("service_account.json").toURI()).toString()); properties.overwrite.setValue(true); properties.destinationFolder.setValue("root"); when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList); // File putFile = new File(); putFile.setId(PUT_FILE_ID); putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID)); when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute()) .thenReturn(putFile); }
@Test public void testIgnoreExceptionsOnCreateEmptyObject() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("rateLimitExceeded")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenReturn(getStorageObjectForEmptyObjectWithMetadata( ImmutableMap.<String, byte[]>of("foo", new byte[0]))); gcs.createEmptyObject( new StorageResourceId(BUCKET_NAME, OBJECT_NAME), new CreateObjectOptions(true, ImmutableMap.<String, byte[]>of("foo", new byte[0]))); verify(mockStorage, times(2)).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet).execute(); }
@Test public void testIgnoreExceptionsOnCreateEmptyObjectMismatchMetadata() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("rateLimitExceeded")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenReturn(getStorageObjectForEmptyObjectWithMetadata(EMPTY_METADATA)); expectedException.expect(IOException.class); expectedException.expectMessage("rateLimitExceeded"); try { gcs.createEmptyObject( new StorageResourceId(BUCKET_NAME, OBJECT_NAME), new CreateObjectOptions(true, ImmutableMap.<String, byte[]>of("foo", new byte[0]))); } finally { verify(mockStorage, times(2)).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled( eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet).execute(); } }
@Test public void testIgnoreExceptionsOnCreateEmptyObjectMismatchMetadataButOptionsHasNoMetadata() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("rateLimitExceeded")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenReturn(getStorageObjectForEmptyObjectWithMetadata( ImmutableMap.<String, byte[]>of("foo", new byte[0]))); // The fetch will "mismatch" with more metadata than our default EMPTY_METADATA used in the // default CreateObjectOptions, but we won't care because the metadata-check requirement // will be false, so the call will complete successfully. gcs.createEmptyObject(new StorageResourceId(BUCKET_NAME, OBJECT_NAME)); verify(mockStorage, times(2)).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet).execute(); }
@Test public void testIgnoreExceptionsOnCreateEmptyObjectsErrorOnRefetch() throws IOException { when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(new IOException("rateLimitExceeded")); when(mockErrorExtractor.rateLimited(any(IOException.class))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenThrow(new RuntimeException("error while fetching")); expectedException.expect(IOException.class); expectedException.expectMessage("Multiple IOExceptions"); try { gcs.createEmptyObjects(ImmutableList.of(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))); } finally { verify(mockStorage, times(2)).objects(); verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled( eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet).execute(); } }
@Test public void testIgnoreExceptionsOnCreateEmptyObjectsWithMultipleRetries() throws IOException, InterruptedException { IOException notFoundException = new IOException("NotFound"); IOException rateLimitException = new IOException("RateLimited"); when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockStorageObjectsInsert.execute()) .thenThrow(rateLimitException); when(mockErrorExtractor.rateLimited(eq(rateLimitException))).thenReturn(true); when(mockErrorExtractor.itemNotFound(eq(notFoundException))).thenReturn(true); when(mockStorageObjects.get(eq(BUCKET_NAME), eq(OBJECT_NAME))) .thenReturn(mockStorageObjectsGet); when(mockStorageObjectsGet.execute()) .thenThrow(notFoundException) .thenThrow(notFoundException) .thenReturn(getStorageObjectForEmptyObjectWithMetadata(EMPTY_METADATA)); gcs.createEmptyObjects(ImmutableList.of(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))); verify(mockStorage, times(4)).objects(); // 1 insert, 3 gets verify(mockStorageObjects).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true)); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert).execute(); verify(mockErrorExtractor).rateLimited(any(IOException.class)); verify(mockErrorExtractor, times(2)).itemNotFound(eq(notFoundException)); verify(mockStorageObjects, times(3)).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet, times(3)).execute(); verify(mockSleeper, times(2)).sleep(any(Long.class)); }
public PhotoEntry executeInsertPhotoEntryWithMetadata( PhotoEntry photo, PicasaUrl albumFeedUrl, AbstractInputStreamContent content) throws IOException { HttpRequest request = getRequestFactory().buildPostRequest(albumFeedUrl, null); AtomContent atomContent = AtomContent.forEntry(DICTIONARY, photo); request.setContent(new MultipartContent().setContentParts(Arrays.asList(atomContent, content))); request.getHeaders().setMimeVersion("1.0"); return execute(request).parseAs(PhotoEntry.class); }
private YouTube.Videos.Insert createVideoInsert(YouTube youtube, AbstractInputStreamContent mediaContent, String title, String description) throws IOException { // Add extra information to the video before uploading. Video videoObjectDefiningMetadata = new Video(); /** * Set the video to public, so it is available to everyone (what most people want). This is * actually the default, but I wanted you to see what it looked like in case you need to set * it to "unlisted" or "private" via API. */ VideoStatus status = new VideoStatus(); status.setPrivacyStatus("public"); videoObjectDefiningMetadata.setStatus(status); // We set a majority of the metadata with the VideoSnippet object. VideoSnippet snippet = new VideoSnippet(); snippet.setTitle(title); snippet.setDescription(description); // Set completed snippet to the video object. videoObjectDefiningMetadata.setSnippet(snippet); /** * The upload command includes: 1. Information we want returned after file is successfully * uploaded. 2. Metadata we want associated with the uploaded video. 3. Video file itself. */ YouTube.Videos.Insert videoInsert = youtube.videos() .insert("snippet,statistics,status", videoObjectDefiningMetadata, mediaContent); // Set the upload type and add event listener. MediaHttpUploader uploader = videoInsert.getMediaHttpUploader(); /** * Sets whether direct media upload is enabled or disabled. True = whole media content is * uploaded in a single request. False (default) = resumable media upload protocol to upload * in data chunks. */ uploader.setDirectUploadEnabled(false); return videoInsert; }
public static void main(String[] args) { try { Preconditions.checkArgument(!Strings.isNullOrEmpty(ApplicationConfig.PACKAGE_NAME), "ApplicationConfig.PACKAGE_NAME cannot be null or empty!"); // Create the API service. AndroidPublisher service = AndroidPublisherHelper.init( ApplicationConfig.APPLICATION_NAME, ApplicationConfig.SERVICE_ACCOUNT_EMAIL); final Edits edits = service.edits(); // Create a new edit to make changes to your listing. Insert editRequest = edits .insert(ApplicationConfig.PACKAGE_NAME, null /** no content */); AppEdit edit = editRequest.execute(); final String editId = edit.getId(); log.info(String.format("Created edit with id: %s", editId)); // Upload new apk to developer console final InputStream is = BasicUploadApk.class.getResourceAsStream((ApplicationConfig.APK_FILE_PATH)); File apkTmpFile = AndroidPublisherHelper.getTempFile(is, "apk"); URL resource = BasicUploadApk.class.getResource(ApplicationConfig.APK_FILE_PATH); //final String apkPath = BasicUploadApk.class // .getResource(ApplicationConfig.APK_FILE_PATH) // .toURI().getPath(); final AbstractInputStreamContent apkFile = new FileContent(AndroidPublisherHelper.MIME_TYPE_APK, apkTmpFile); Upload uploadRequest = edits .apks() .upload(ApplicationConfig.PACKAGE_NAME, editId, apkFile); Apk apk = uploadRequest.execute(); log.info(String.format("Version code %d has been uploaded", apk.getVersionCode())); // Assign apk to alpha track. List<Integer> apkVersionCodes = new ArrayList<>(); apkVersionCodes.add(apk.getVersionCode()); Update updateTrackRequest = edits .tracks() .update(ApplicationConfig.PACKAGE_NAME, editId, TRACK_ALPHA, new Track().setVersionCodes(apkVersionCodes)); Track updatedTrack = updateTrackRequest.execute(); log.info(String.format("Track %s has been updated.", updatedTrack.getTrack())); // Commit changes for edit. Commit commitRequest = edits.commit(ApplicationConfig.PACKAGE_NAME, editId); AppEdit appEdit = commitRequest.execute(); log.info(String.format("App edit with id %s has been comitted", appEdit.getId())); } catch (IOException | GeneralSecurityException ex) { log.error("Excpetion was thrown while uploading apk to alpha track", ex); } }
/** * Tests the write method of BigQueryRecordWriter for a single write. */ @Test public void testSingleWrite() throws IOException, GeneralSecurityException { final ArgumentCaptor<AbstractInputStreamContent> inputStreamCaptor = ArgumentCaptor.forClass(AbstractInputStreamContent.class); final byte[] readData = new byte[4096]; final CountDownLatch waitTillWritesAreDoneLatch = new CountDownLatch(1); when(mockInsert.execute()) .thenAnswer(new Answer<Job>() { @Override public Job answer(InvocationOnMock unused) throws Throwable { // We want to make sure we have a consistent update of the read data which is // why we synchronize on it. synchronized (readData) { waitTillWritesAreDoneLatch.await(); inputStreamCaptor.getValue().getInputStream() .read(readData, 0, (int) recordWriter.getBytesWritten()); } return jobReturn; } }); initializeRecordWriter(); // Write the key, value pair. callWrite(recordWriter, 1); verify(mockBigQueryJobs).insert( eq(jobProjectId), eq(getExpectedJob()), inputStreamCaptor.capture()); // The writes are now done, we can return from the execute method. // This is an issue with how PipedInputStream functions since it checks // to see if the sending and receiving threads are alive. waitTillWritesAreDoneLatch.countDown(); // Close the RecordWriter. recordWriter.close(mockContext); // Check that the proper calls were sent to the BigQuery. verify(mockFactory).getBigQueryHelper(any(Configuration.class)); verify(mockBigQuery, times(2)).jobs(); verify(mockJobsGet, times(1)).execute(); verify(mockBigQueryJobs, times(1)).get(eq(jobProjectId), eq(jobReference.getJobId())); assertTrue(executorService.isShutdown()); // We want to make sure we have a consistent view of the read data which is // why we synchronize on it. synchronized (readData) { String readDataString = new String( Arrays.copyOfRange(readData, 0, (int) recordWriter.getBytesWritten()), StandardCharsets.UTF_8); assertEquals("{\"Name\":\"test name\",\"Number\":\"123\"}\n", readDataString); } }
/** * Tests the write method of BigQueryRecordWriter when nothing is written. */ @Test public void testNoWrites() throws IOException, GeneralSecurityException { final ArgumentCaptor<AbstractInputStreamContent> inputStreamCaptor = ArgumentCaptor.forClass(AbstractInputStreamContent.class); final byte[] readData = new byte[4096]; final CountDownLatch waitTillWritesAreDoneLatch = new CountDownLatch(1); when(mockInsert.execute()) .thenAnswer(new Answer<Job>() { @Override public Job answer(InvocationOnMock unused) throws Throwable { // We want to make sure we have a consistent update of the read data which is // why we synchronize on it. synchronized (readData) { waitTillWritesAreDoneLatch.await(); inputStreamCaptor.getValue().getInputStream() .read(readData, 0, (int) recordWriter.getBytesWritten()); } return jobReturn; } }); initializeRecordWriter(); verify(mockBigQueryJobs).insert( eq(jobProjectId), eq(getExpectedJob()), inputStreamCaptor.capture()); // The writes are now done, we can return from the execute method. // This is an issue with how PipedInputStream functions since it checks // to see if the sending and receiving threads are alive. waitTillWritesAreDoneLatch.countDown(); // Close the RecordWriter. recordWriter.close(mockContext); // Check that the proper calls were sent to the BigQuery. verify(mockFactory).getBigQueryHelper(any(Configuration.class)); verify(mockBigQuery, times(2)).jobs(); verify(mockJobsGet, times(1)).execute(); verify(mockBigQueryJobs, times(1)).get(eq(jobProjectId), eq(jobReference.getJobId())); assertTrue(executorService.isShutdown()); // We want to make sure we have a consistent view of the read data which is // why we synchronize on it. synchronized (readData) { String readDataString = new String( Arrays.copyOfRange(readData, 0, (int) recordWriter.getBytesWritten()), StandardCharsets.UTF_8); assertEquals("", readDataString); } }
@Test public void testMultipleWrites() throws IOException, GeneralSecurityException { final ArgumentCaptor<AbstractInputStreamContent> inputStreamCaptor = ArgumentCaptor.forClass(AbstractInputStreamContent.class); final byte[] readData = new byte[4096]; final CountDownLatch waitTillWritesAreDoneLatch = new CountDownLatch(1); when(mockInsert.execute()) .thenAnswer(new Answer<Job>() { @Override public Job answer(InvocationOnMock unused) throws Throwable { // We want to make sure we have a consistent update of the read data which is // why we synchronize on it. synchronized (readData) { waitTillWritesAreDoneLatch.await(); inputStreamCaptor.getValue().getInputStream() .read(readData, 0, (int) recordWriter.getBytesWritten()); } return jobReturn; } }); initializeRecordWriter(); // Write the key, value pair. callWrite(recordWriter, 2); verify(mockBigQueryJobs).insert( eq(jobProjectId), eq(getExpectedJob()), inputStreamCaptor.capture()); // The writes are now done, we can return from the execute method. // This is an issue with how PipedInputStream functions since it checks // to see if the sending and receiving threads are alive. waitTillWritesAreDoneLatch.countDown(); // Close the RecordWriter. recordWriter.close(mockContext); // Check that the proper calls were sent to the BigQuery. verify(mockFactory).getBigQueryHelper(any(Configuration.class)); verify(mockBigQuery, times(2)).jobs(); verify(mockJobsGet, times(1)).execute(); verify(mockBigQueryJobs, times(1)).get(eq(jobProjectId), eq(jobReference.getJobId())); assertTrue(executorService.isShutdown()); // We want to make sure we have a consistent view of the read data which is // why we synchronize on it. synchronized (readData) { String readDataString = new String( Arrays.copyOfRange(readData, 0, (int) recordWriter.getBytesWritten()), StandardCharsets.UTF_8); assertEquals( "{\"Name\":\"test name\",\"Number\":\"123\"}\n" + "{\"Name\":\"test name\",\"Number\":\"123\"}\n", readDataString); } }
/** * Test handling of various types of exceptions thrown during JSON API call for * GoogleCloudStorage.create(2). */ @Test public void testCreateObjectApiIOException() throws IOException { // Prepare the mock return values before invoking the method being tested. when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockClientRequestHelper.getRequestHeaders(eq(mockStorageObjectsInsert))) .thenReturn(mockHeaders); // Set up the mock Insert to throw an exception when execute() is called. IOException fakeException = new IOException("Fake IOException"); setupNonConflictedWrite(fakeException); WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME)); assertTrue(writeChannel.isOpen()); try { writeChannel.close(); fail("Expected IOException"); } catch (IOException ioe) { assertEquals(fakeException, ioe.getCause()); } verify(mockStorage, times(3)).objects(); verify(mockStorageObjects, times(2)).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjectsInsert, times(1)).setName(eq(OBJECT_NAME)); verify(mockStorageObjectsInsert, times(2)).setDisableGZipContent(eq(true)); verify(mockHeaders, times(1)).set( eq("X-Goog-Upload-Desired-Chunk-Granularity"), eq(AbstractGoogleAsyncWriteChannel.GCS_UPLOAD_GRANULARITY)); verify(mockHeaders, times(0)).set(eq("X-Goog-Upload-Max-Raw-Size"), anyInt()); verify(mockClientRequestHelper).getRequestHeaders(any(AbstractGoogleClientRequest.class)); verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt()); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert, times(1)).setIfGenerationMatch(eq(0L)); verify(mockStorageObjectsInsert, times(1)).setIfGenerationMatch(eq(1L)); verify(mockStorageObjects, times(1)).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockStorageObjectsGet, times(1)).execute(); verify(mockErrorExtractor).itemNotFound(any(IOException.class)); verify(mockBackOffFactory).newBackOff(); verify(mockBackOff).nextBackOffMillis(); verify(mockStorageObjectsInsert, times(2)).execute(); }
/** * Test handling of various types of exceptions thrown during JSON API call for * GoogleCloudStorage.create(2). */ @Test public void testCreateObjectApiRuntimeException() throws IOException { // Prepare the mock return values before invoking the method being tested. when(mockStorage.objects()).thenReturn(mockStorageObjects); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockClientRequestHelper.getRequestHeaders(eq(mockStorageObjectsInsert))) .thenReturn(mockHeaders); // Set up the mock Insert to throw an exception when execute() is called. RuntimeException fakeException = new RuntimeException("Fake exception"); setupNonConflictedWrite(fakeException); WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME)); assertTrue(writeChannel.isOpen()); try { writeChannel.close(); fail("Expected IOException"); } catch (IOException ioe) { assertEquals(fakeException, ioe.getCause()); } verify(mockStorageObjectsInsert, times(2)).execute(); verify(mockStorage, times(3)).objects(); verify(mockStorageObjects, times(2)).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjects, times(1)).get(eq(BUCKET_NAME), eq(OBJECT_NAME)); verify(mockErrorExtractor, atLeastOnce()).itemNotFound(any(IOException.class)); verify(mockBackOffFactory, atLeastOnce()).newBackOff(); verify(mockBackOff, times(1)).nextBackOffMillis(); verify(mockStorageObjectsGet, times(1)).execute(); verify(mockStorageObjectsInsert, times(1)).setName(eq(OBJECT_NAME)); verify(mockStorageObjectsInsert, times(2)).setDisableGZipContent(eq(true)); verify(mockStorageObjects, times(1)).get(anyString(), anyString()); verify(mockHeaders, times(1)).set( eq("X-Goog-Upload-Desired-Chunk-Granularity"), eq(AbstractGoogleAsyncWriteChannel.GCS_UPLOAD_GRANULARITY)); verify(mockHeaders, times(0)).set(eq("X-Goog-Upload-Max-Raw-Size"), anyInt()); verify(mockClientRequestHelper).getRequestHeaders(any(AbstractGoogleClientRequest.class)); verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt()); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert, times(2)).setIfGenerationMatch(anyLong()); }
/** * Test handling of various types of Errors thrown during JSON API call for * GoogleCloudStorage.create(2). */ @Test public void testCreateObjectApiError() throws IOException { // Prepare the mock return values before invoking the method being tested. when(mockStorage.objects()).thenReturn(mockStorageObjects); // Set up the mock Insert to throw an exception when execute() is called. Error fakeError = new Error("Fake error"); setupNonConflictedWrite(fakeError); when(mockStorageObjects.insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class))) .thenReturn(mockStorageObjectsInsert); when(mockClientRequestHelper.getRequestHeaders(eq(mockStorageObjectsInsert))) .thenReturn(mockHeaders); WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME)); assertTrue(writeChannel.isOpen()); try { writeChannel.close(); fail("Expected Error"); } catch (Error error) { assertThat(error).isEqualTo(fakeError); } verify(mockStorage, times(3)).objects(); verify(mockStorageObjects, times(2)).insert( eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)); verify(mockStorageObjects).get(BUCKET_NAME, OBJECT_NAME); verify(mockStorageObjectsGet).execute(); verify(mockStorageObjectsInsert, times(1)).setName(eq(OBJECT_NAME)); verify(mockStorageObjectsInsert, times(2)).setDisableGZipContent(eq(true)); verify(mockStorageObjectsInsert, times(1)).setIfGenerationMatch(eq(0L)); verify(mockStorageObjectsInsert, times(1)).setIfGenerationMatch(eq(1L)); verify(mockErrorExtractor, times(1)).itemNotFound(any(IOException.class)); verify(mockBackOff, atLeastOnce()).nextBackOffMillis(); verify(mockBackOffFactory, atLeastOnce()).newBackOff(); verify(mockHeaders, times(1)).set( eq("X-Goog-Upload-Desired-Chunk-Granularity"), eq(AbstractGoogleAsyncWriteChannel.GCS_UPLOAD_GRANULARITY)); verify(mockHeaders, times(0)).set(eq("X-Goog-Upload-Max-Raw-Size"), anyInt()); verify(mockClientRequestHelper).getRequestHeaders(any(AbstractGoogleClientRequest.class)); verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt()); verify(mockClientRequestHelper).setDirectUploadEnabled(eq(mockStorageObjectsInsert), eq(true)); verify(mockStorageObjectsInsert, times(2)).execute(); }
public static void main(String[] args) { try { Preconditions.checkArgument(!Strings.isNullOrEmpty(ApplicationConfig.PACKAGE_NAME), "ApplicationConfig.PACKAGE_NAME cannot be null or empty!"); // Create the API service. AndroidPublisher service = AndroidPublisherHelper.init(ApplicationConfig.APPLICATION_NAME, ApplicationConfig.SERVICE_ACCOUNT_EMAIL); final Edits edits = service.edits(); // Create a new edit to make changes to your listing. Insert editRequest = edits.insert(ApplicationConfig.PACKAGE_NAME, null /** no content */); AppEdit edit = editRequest.execute(); final String editId = edit.getId(); log.info(String.format("Created edit with id: %s", editId)); // Upload new apk to developer console final String apkPath = BasicUploadApk.class .getResource(ApplicationConfig.APK_FILE_PATH) .toURI().getPath(); final AbstractInputStreamContent apkFile = new FileContent(AndroidPublisherHelper.MIME_TYPE_APK, new File(apkPath)); Upload uploadRequest = edits .apks() .upload(ApplicationConfig.PACKAGE_NAME, editId, apkFile); Apk apk = uploadRequest.execute(); log.info(String.format("Version code %d has been uploaded", apk.getVersionCode())); // Assign apk to alpha track. List<Integer> apkVersionCodes = new ArrayList<Integer>(); apkVersionCodes.add(apk.getVersionCode()); Update updateTrackRequest = edits .tracks() .update(ApplicationConfig.PACKAGE_NAME, editId, TRACK_ALPHA, new Track().setVersionCodes(apkVersionCodes)); Track updatedTrack = updateTrackRequest.execute(); log.info(String.format("Track %s has been updated.", updatedTrack.getTrack())); // Commit changes for edit. Commit commitRequest = edits.commit(ApplicationConfig.PACKAGE_NAME, editId); AppEdit appEdit = commitRequest.execute(); log.info(String.format("App edit with id %s has been comitted", appEdit.getId())); } catch (IOException | URISyntaxException | GeneralSecurityException ex) { log.error("Excpetion was thrown while uploading apk to alpha track", ex); } }
public static void main(String[] args) { try { Preconditions.checkArgument(!Strings.isNullOrEmpty(ApplicationConfig.PACKAGE_NAME), "ApplicationConfig.PACKAGE_NAME cannot be null or empty!"); // Create the API service. AndroidPublisher service = AndroidPublisherHelper.init( ApplicationConfig.APPLICATION_NAME, ApplicationConfig.SERVICE_ACCOUNT_EMAIL); final Edits edits = service.edits(); // Create a new edit to make changes to your listing. Insert editRequest = edits .insert(ApplicationConfig.PACKAGE_NAME, null /** no content */); AppEdit edit = editRequest.execute(); final String editId = edit.getId(); log.info(String.format("Created edit with id: %s", editId)); // Upload new apk to developer console final String apkPath = BasicUploadApk.class .getResource(ApplicationConfig.APK_FILE_PATH) .toURI().getPath(); final AbstractInputStreamContent apkFile = new FileContent(AndroidPublisherHelper.MIME_TYPE_APK, new File(apkPath)); Upload uploadRequest = edits .apks() .upload(ApplicationConfig.PACKAGE_NAME, editId, apkFile); Apk apk = uploadRequest.execute(); log.info(String.format("Version code %d has been uploaded", apk.getVersionCode())); // Assign apk to alpha track. List<Integer> apkVersionCodes = new ArrayList<>(); apkVersionCodes.add(apk.getVersionCode()); Update updateTrackRequest = edits .tracks() .update(ApplicationConfig.PACKAGE_NAME, editId, TRACK_ALPHA, new Track().setVersionCodes(apkVersionCodes)); Track updatedTrack = updateTrackRequest.execute(); log.info(String.format("Track %s has been updated.", updatedTrack.getTrack())); // Commit changes for edit. Commit commitRequest = edits.commit(ApplicationConfig.PACKAGE_NAME, editId); AppEdit appEdit = commitRequest.execute(); log.info(String.format("App edit with id %s has been comitted", appEdit.getId())); } catch (IOException | URISyntaxException | GeneralSecurityException ex) { log.error("Excpetion was thrown while uploading apk to alpha track", ex); } }
public static void main(String[] args) { try { Preconditions.checkArgument(!Strings.isNullOrEmpty(ApplicationConfig.PACKAGE_NAME), "ApplicationConfig.PACKAGE_NAME cannot be null or empty!"); // Create the API service. AndroidPublisher service = AndroidPublisherHelper.init( ApplicationConfig.APPLICATION_NAME, ApplicationConfig.SERVICE_ACCOUNT_EMAIL); final Edits edits = service.edits(); // Create a new edit to make changes. Insert editRequest = edits .insert(ApplicationConfig.PACKAGE_NAME, null /** no content */); AppEdit edit = editRequest.execute(); final String editId = edit.getId(); log.info(String.format("Created edit with id: %s", editId)); // Upload new apk to developer console final String apkPath = UploadApkWithListing.class .getResource(ApplicationConfig.APK_FILE_PATH) .toURI().getPath(); final AbstractInputStreamContent apkFile = new FileContent(AndroidPublisherHelper.MIME_TYPE_APK, new File(apkPath)); Upload uploadRequest = edits .apks() .upload(ApplicationConfig.PACKAGE_NAME, editId, apkFile); Apk apk = uploadRequest.execute(); log.info(String.format("Version code %d has been uploaded", apk.getVersionCode())); // Assign apk to beta track. List<Integer> apkVersionCodes = new ArrayList<>(); apkVersionCodes.add(apk.getVersionCode()); Update updateTrackRequest = edits .tracks() .update(ApplicationConfig.PACKAGE_NAME, editId, TRACK_BETA, new Track().setVersionCodes(apkVersionCodes)); Track updatedTrack = updateTrackRequest.execute(); log.info(String.format("Track %s has been updated.", updatedTrack.getTrack())); // Update recent changes field in apk listing. final ApkListing newApkListing = new ApkListing(); newApkListing.setRecentChanges(APK_LISTING_RECENT_CHANGES_TEXT); Apklistings.Update updateRecentChangesRequest = edits .apklistings() .update(ApplicationConfig.PACKAGE_NAME, editId, apk.getVersionCode(), Locale.US.toString(), newApkListing); updateRecentChangesRequest.execute(); log.info("Recent changes has been updated."); // Commit changes for edit. Commit commitRequest = edits.commit(ApplicationConfig.PACKAGE_NAME, editId); AppEdit appEdit = commitRequest.execute(); log.info(String.format("App edit with id %s has been comitted", appEdit.getId())); } catch (IOException | URISyntaxException | GeneralSecurityException ex) { log.error( "Exception was thrown while uploading apk and updating recent changes", ex); } }
public AbstractInputStreamContent getInputStreamContent() { InputStream inputStream = new ByteArrayInputStream(stringWriter.toString().getBytes()); return new InputStreamContent("application/octet-stream", inputStream); }
/** * Construct the {@link MediaHttpUploader}. * * <p> * The input stream received by calling * {@link AbstractInputStreamContent#getInputStream} is closed when the * upload process is successfully completed. For resumable uploads, when the * media content length is known, if the input stream has * {@link InputStream#markSupported} as {@code false} then it is wrapped in * an {@link BufferedInputStream} to support the {@link InputStream#mark} * and {@link InputStream#reset} methods required for handling server * errors. If the media content length is unknown then each chunk is stored * temporarily in memory. This is required to determine when the last chunk * is reached. * </p> * * @param mediaContent * The Input stream content of the media to be uploaded * @param transport * The transport to use for requests * @param httpRequestInitializer * The initializer to use when creating an {@link HttpRequest} or * {@code null} for none */ public MediaHttpUploader(AbstractInputStreamContent mediaContent, HttpTransport transport, HttpRequestInitializer httpRequestInitializer) { this.mediaContent = Preconditions.checkNotNull(mediaContent); this.transport = Preconditions.checkNotNull(transport); this.requestFactory = httpRequestInitializer == null ? transport .createRequestFactory() : transport .createRequestFactory(httpRequestInitializer); }
/** * Launch a job, wait for it to complete, but <i>do not</i> check for errors. * * @throws BigqueryJobFailureException */ public Job runJob(Job job, @Nullable AbstractInputStreamContent data) { return checkJob(waitForJob(launchJob(job, data))); }