/** * Zips an entire directory specified by the path. * * @param sourceDirectory the directory to read from. This directory and all * subdirectories will be added to the zip-file. The path within the zip * file is relative to the directory given as parameter, not absolute. * @param zipFile the zip-file to write to. * @throws IOException the zipping failed, e.g. because the input was not * readable. */ static void zipDirectory( File sourceDirectory, File zipFile) throws IOException { checkNotNull(sourceDirectory); checkNotNull(zipFile); checkArgument( sourceDirectory.isDirectory(), "%s is not a valid directory", sourceDirectory.getAbsolutePath()); checkArgument( !zipFile.exists(), "%s does already exist, files are not being overwritten", zipFile.getAbsolutePath()); Closer closer = Closer.create(); try { OutputStream outputStream = closer.register(new BufferedOutputStream( new FileOutputStream(zipFile))); zipDirectory(sourceDirectory, outputStream); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
@Override public boolean load(final BuildCacheKey key, final BuildCacheEntryReader reader) throws BuildCacheException { return persistentCache.useCache("load build cache entry", new Factory<Boolean>() { @Override public Boolean create() { File file = getFile(key.getHashCode()); if (file.isFile()) { try { Closer closer = Closer.create(); FileInputStream stream = closer.register(new FileInputStream(file)); try { reader.readFrom(stream); return true; } finally { closer.close(); } } catch (IOException ex) { throw new UncheckedIOException(ex); } } return false; } }); }
@Override public void store(final BuildCacheKey key, final BuildCacheEntryWriter result) throws BuildCacheException { persistentCache.useCache("store build cache entry", new Runnable() { @Override public void run() { File file = getFile(key.getHashCode()); try { Closer closer = Closer.create(); OutputStream output = closer.register(new FileOutputStream(file)); try { result.writeTo(output); } finally { closer.close(); } } catch (IOException ex) { throw new UncheckedIOException(ex); } } }); }
/** * Write the dex program resources and proguard resource to @code{directory}. */ public void writeToDirectory(Path directory, OutputMode outputMode) throws IOException { if (outputMode == OutputMode.Indexed) { for (Path path : Files.list(directory).collect(Collectors.toList())) { if (isClassesDexFile(path)) { Files.delete(path); } } } CopyOption[] options = new CopyOption[] {StandardCopyOption.REPLACE_EXISTING}; try (Closer closer = Closer.create()) { List<Resource> dexProgramSources = getDexProgramResources(); for (int i = 0; i < dexProgramSources.size(); i++) { Path filePath = directory.resolve(outputMode.getOutputPath(dexProgramSources.get(i), i)); if (!Files.exists(filePath.getParent())) { Files.createDirectories(filePath.getParent()); } Files.copy(closer.register(dexProgramSources.get(i).getStream()), filePath, options); } } }
/** * Write the dex program resources to @code{archive} and the proguard resource as its sibling. */ public void writeToZip(Path archive, OutputMode outputMode) throws IOException { OpenOption[] options = new OpenOption[] {StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING}; try (Closer closer = Closer.create()) { try (ZipOutputStream out = new ZipOutputStream(Files.newOutputStream(archive, options))) { List<Resource> dexProgramSources = getDexProgramResources(); for (int i = 0; i < dexProgramSources.size(); i++) { ZipEntry zipEntry = new ZipEntry(outputMode.getOutputPath(dexProgramSources.get(i), i)); byte[] bytes = ByteStreams.toByteArray(closer.register(dexProgramSources.get(i).getStream())); zipEntry.setSize(bytes.length); out.putNextEntry(zipEntry); out.write(bytes); out.closeEntry(); } } } }
public static void main(String[] args) throws IOException, ProguardRuleParserException, CompilationException, ExecutionException { Command.Builder builder = Command.parse(args); Command command = builder.build(); if (command.isPrintHelp()) { System.out.println(Command.USAGE_MESSAGE); return; } AndroidApp app = command.getInputApp(); Map<String, Integer> result = new HashMap<>(); try (Closer closer = Closer.create()) { for (Resource resource : app.getDexProgramResources()) { for (Segment segment: DexFileReader.parseMapFrom(closer.register(resource.getStream()))) { int value = result.computeIfAbsent(segment.typeName(), (key) -> 0); result.put(segment.typeName(), value + segment.size()); } } } System.out.println("Segments in dex application (name: size):"); result.forEach( (key, value) -> System.out.println(" - " + key + ": " + value)); }
@Test public void buildAndTreeShakeFromDeployJar() throws ExecutionException, IOException, ProguardRuleParserException, CompilationException { int maxSize = 20000000; AndroidApp app = runAndCheckVerification( CompilerUnderTest.R8, CompilationMode.RELEASE, BASE + APK, null, BASE + PG_CONF, null, // Don't pass any inputs. The input will be read from the -injars in the Proguard // configuration file. ImmutableList.of()); int bytes = 0; try (Closer closer = Closer.create()) { for (Resource dex : app.getDexProgramResources()) { bytes += ByteStreams.toByteArray(closer.register(dex.getStream())).length; } } assertTrue("Expected max size of " + maxSize + ", got " + bytes, bytes < maxSize); }
public void assertIdenticalApplications(AndroidApp app1, AndroidApp app2, boolean write) throws IOException { try (Closer closer = Closer.create()) { if (write) { app1.writeToDirectory(temp.newFolder("app1").toPath(), OutputMode.Indexed); app2.writeToDirectory(temp.newFolder("app2").toPath(), OutputMode.Indexed); } List<Resource> files1 = app1.getDexProgramResources(); List<Resource> files2 = app2.getDexProgramResources(); assertEquals(files1.size(), files2.size()); for (int index = 0; index < files1.size(); index++) { InputStream file1 = closer.register(files1.get(index).getStream()); InputStream file2 = closer.register(files2.get(index).getStream()); byte[] bytes1 = ByteStreams.toByteArray(file1); byte[] bytes2 = ByteStreams.toByteArray(file2); assertArrayEquals("File index " + index, bytes1, bytes2); } } }
private static void writeSelfReferencingJarFile(File jarFile, String... entries) throws IOException { Manifest manifest = new Manifest(); // Without version, the manifest is silently ignored. Ugh! manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); manifest.getMainAttributes().put(Attributes.Name.CLASS_PATH, jarFile.getName()); Closer closer = Closer.create(); try { FileOutputStream fileOut = closer.register(new FileOutputStream(jarFile)); JarOutputStream jarOut = closer.register(new JarOutputStream(fileOut)); for (String entry : entries) { jarOut.putNextEntry(new ZipEntry(entry)); Resources.copy(ClassPathTest.class.getResource(entry), jarOut); jarOut.closeEntry(); } } catch (Throwable e) { throw closer.rethrow(e); } finally { closer.close(); } }
@Override @NonNull public Properties loadProperties(@NonNull File file) { Properties props = new Properties(); Closer closer = Closer.create(); try { FileInputStream fis = closer.register(new FileInputStream(file)); props.load(fis); } catch (IOException ignore) { } finally { try { closer.close(); } catch (IOException e) { } } return props; }
@Before public void setUp() { executor = createMock(DelayExecutor.class); clock = new FakeClock(); stateManager = createMock(StateManager.class); storageUtil = new StorageTestUtil(this); storageUtil.expectOperations(); shutdownCommand = createMock(Command.class); pruner = new TaskHistoryPruner( executor, stateManager, clock, new HistoryPrunnerSettings(ONE_DAY, ONE_MINUTE, PER_JOB_HISTORY), storageUtil.storage, new Lifecycle(shutdownCommand)); closer = Closer.create(); }
private static DockerRunner createDockerRunner( String id, Environment environment, StateManager stateManager, ScheduledExecutorService scheduler, Stats stats, Debug debug) { final Config config = environment.config(); final Closer closer = environment.closer(); if (isDevMode(config)) { LOG.info("Creating LocalDockerRunner"); return closer.register(DockerRunner.local(scheduler, stateManager)); } else { final NamespacedKubernetesClient kubernetes = closer.register(getKubernetesClient( config, id, createGkeClient(), DefaultKubernetesClient::new)); final ServiceAccountKeyManager serviceAccountKeyManager = createServiceAccountKeyManager(); return closer.register(DockerRunner.kubernetes(kubernetes, stateManager, stats, serviceAccountKeyManager, debug)); } }
/** * Creates a session and ensures schema if configured. Closes the cluster and session if any * exception occurred. */ @Override public Session create(Cassandra3Storage cassandra) { Closer closer = Closer.create(); try { Cluster cluster = closer.register(buildCluster(cassandra)); cluster.register(new QueryLogger.Builder().build()); Session session; if (cassandra.ensureSchema) { session = closer.register(cluster.connect()); Schema.ensureExists(cassandra.keyspace, session); session.execute("USE " + cassandra.keyspace); } else { session = cluster.connect(cassandra.keyspace); } initializeUDTs(session); return session; } catch (RuntimeException e) { try { closer.close(); } catch (IOException ignored) { } throw e; } }
/** * Creates a session and ensures schema if configured. Closes the cluster and session if any * exception occurred. */ @Override public Session create(CassandraStorage cassandra) { Closer closer = Closer.create(); try { Cluster cluster = closer.register(buildCluster(cassandra)); cluster.register(new QueryLogger.Builder().build()); if (cassandra.ensureSchema) { Session session = closer.register(cluster.connect()); Schema.ensureExists(cassandra.keyspace, session); session.execute("USE " + cassandra.keyspace); return session; } else { return cluster.connect(cassandra.keyspace); } } catch (RuntimeException e) { try { closer.close(); } catch (IOException ignored) { } throw e; } }
/** * Deflater 压缩 * * @param content * @return */ public static byte[] compress(String content) { Closer closer = Closer.create(); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); Deflater deflater = new Deflater(Deflater.BEST_COMPRESSION, true); try { DeflaterOutputStream out = closer.register(new DeflaterOutputStream(byteArrayOutputStream, deflater)); out.write(content.getBytes(Charsets.UTF_8.name())); } catch (Exception e) { logger.error(String.format("compress error:"), e); } finally { if (closer != null) try { closer.close(); } catch (Exception ex) { logger.info(String.format("compress error,close the stream error:"), ex); } } deflater.end(); return byteArrayOutputStream.toByteArray(); }
/** * inflater 解压 * * @param inputStream * @return */ public static byte[] decompress(InputStream inputStream) { Closer closer = Closer.create(); try { BufferedInputStream bufferInputStream = closer.register(new BufferedInputStream(new InflaterInputStream(inputStream, new Inflater(true)))); // bufferInputStream = closer.register(new BufferedInputStream(new GZIPInputStream(inputStream))); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); ByteStreams.copy(bufferInputStream, byteArrayOutputStream); return byteArrayOutputStream.toByteArray(); } catch (Exception e) { logger.error(String.format("decompress error:"), e); } finally { try { if (closer != null) { closer.close(); } } catch (Exception e1) { logger.error(String.format("decompress error,close the stream error"), e1); } } return null; }
/** * Read data from the original input stream and pipe it to the compressing stream until fully read. */ private void streamAndCompressInput() { try { byte[] newline = "\n".getBytes(Charsets.UTF_8); while (!_closed && fetchNextRow()) { _rawOut.write(_buffer.array(), 0, _buffer.limit()); _rawOut.write(newline); } _rawOut.close(); } catch (Exception e) { try { Closer closer = Closer.create(); closer.register(_rawOut); closer.register(_gzipIn); closer.close(); } catch (IOException ignore) { // Ignore exceptions closing, don't mask the original exception. } if (!_closed) { _inputException = e instanceof IOException ? (IOException ) e : new IOException(e); } } }
@Override public void close() throws IOException { // Goal: by the end of this function, both results and session are null and closed, // independent of what errors they throw or prior state. if (session == null) { // Only possible when previously closed, so we know that results is also null. return; } // Session does not implement Closeable -- it's AutoCloseable. So we can't register it with // the Closer, but we can use the Closer to simplify the error handling. try (Closer closer = Closer.create()) { if (results != null) { closer.register(results); results = null; } session.close(); } finally { session = null; } }
private int runProgram(PrintStream output, Map<Path, HashCode> digests) throws InterruptedException { AtomicBoolean failed = new AtomicBoolean(); try (Closer closer = Closer.create()) { component .newActionComponentBuilder() .args(new ArrayList<>(arguments)) .closer(closer) .inputDigests(digests) .output(output) .failed(failed) .build() .program() .run(); } catch (Exception e) { if (Utilities.wasInterrupted(e)) { throw new InterruptedException(); } output.printf( "ERROR: Program threw uncaught exception with args: %s%n", Joiner.on(' ').join(arguments)); e.printStackTrace(output); return 1; } return failed.get() ? 1 : 0; }
/** * Reads the set of service classes from a service file. * * @param input not {@code null}. Closed after use. * @return a not {@code null Set} of service class names. * @throws IOException */ static Set<String> readServiceFile(InputStream input) throws IOException { HashSet<String> serviceClasses = new HashSet<String>(); Closer closer = Closer.create(); try { // TODO(gak): use CharStreams BufferedReader r = closer.register(new BufferedReader(new InputStreamReader(input, Charsets.UTF_8))); String line; while ((line = r.readLine()) != null) { int commentStart = line.indexOf('#'); if (commentStart >= 0) { line = line.substring(0, commentStart); } line = line.trim(); if (!line.isEmpty()) { serviceClasses.add(line); } } return serviceClasses; } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
/** Returns a URL to a local copy of the given resource, or null. There is * no filename conflict resolution. */ protected String addLocalResources(URL url) throws IOException { // Attempt to make local copy File resourceDir = computeResourceDir(); if (resourceDir != null) { String base = url.getFile(); base = base.substring(base.lastIndexOf('/') + 1); mNameToFile.put(base, new File(url.toExternalForm())); File target = new File(resourceDir, base); Closer closer = Closer.create(); try { FileOutputStream output = closer.register(new FileOutputStream(target)); InputStream input = closer.register(url.openStream()); ByteStreams.copy(input, output); } catch (Throwable e) { closer.rethrow(e); } finally { closer.close(); } return resourceDir.getName() + '/' + encodeUrl(base); } return null; }
/** * We need real Jar contents as this test will actually run Gradle that will peek inside the archive. */ @NotNull private static byte[] createRealJarArchive() throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); Closer closer = Closer.create(); Manifest manifest = new Manifest(); manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); JarOutputStream jar = closer.register(new JarOutputStream(buffer, manifest)); try { jar.putNextEntry(new JarEntry("/dummy.txt")); jar.write(TOP_LEVEL_BUILD_GRADLE.getBytes()); closer.close(); return buffer.toByteArray(); } catch (IOException e) { closer.close(); throw closer.rethrow(e); } }
public static void main(String[] args) throws IOException { Closer closer = Closer.create(); try { File destination = new File("src/main/resources/copy.txt"); destination.deleteOnExit(); BufferedReader reader = new BufferedReader(new FileReader("src/main/resources/sampleTextFileOne.txt")); BufferedWriter writer = new BufferedWriter(new FileWriter(destination)); closer.register(reader); closer.register(writer); String line; while((line = reader.readLine())!=null){ writer.write(line); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
/** * Deserialize a {@link Writable} object from a string. * * @param writableClass the {@link Writable} implementation class * @param serializedWritableStr the string containing a serialized {@link Writable} object * @param configuration a {@link Configuration} object containing Hadoop configuration properties * @return a {@link Writable} deserialized from the string * @throws IOException if there's something wrong with the deserialization */ public static Writable deserializeFromString(Class<? extends Writable> writableClass, String serializedWritableStr, Configuration configuration) throws IOException { Closer closer = Closer.create(); try { byte[] writableBytes = BaseEncoding.base64().decode(serializedWritableStr); ByteArrayInputStream byteArrayInputStream = closer.register(new ByteArrayInputStream(writableBytes)); DataInputStream dataInputStream = closer.register(new DataInputStream(byteArrayInputStream)); Writable writable = ReflectionUtils.newInstance(writableClass, configuration); writable.readFields(dataInputStream); return writable; } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
/** * Convert a GenericRecord to a byte array. */ public static byte[] recordToByteArray(GenericRecord record) throws IOException { Closer closer = Closer.create(); try { ByteArrayOutputStream out = closer.register(new ByteArrayOutputStream()); Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<GenericRecord>(record.getSchema()); writer.write(record, encoder); byte[] byteArray = out.toByteArray(); return byteArray; } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
/** * Get the latest avro schema for a directory * @param directory the input dir that contains avro files * @param conf configuration * @param latest true to return latest schema, false to return oldest schema * @return the latest/oldest schema in the directory * @throws IOException */ public static Schema getDirectorySchema(Path directory, Configuration conf, boolean latest) throws IOException { Schema schema = null; Closer closer = Closer.create(); try { List<FileStatus> files = getDirectorySchemaHelper(directory, FileSystem.get(conf)); if (files == null || files.size() == 0) { LOG.warn("There is no previous avro file in the directory: " + directory); } else { FileStatus file = latest ? files.get(0) : files.get(files.size() - 1); LOG.info("Path to get the avro schema: " + file); FsInput fi = new FsInput(file.getPath(), conf); GenericDatumReader<GenericRecord> genReader = new GenericDatumReader<GenericRecord>(); schema = closer.register(new DataFileReader<GenericRecord>(fi, genReader)).getSchema(); } } catch (IOException ioe) { throw new IOException("Cannot get the schema for directory " + directory, ioe); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } return schema; }
/** * Get token from the token sequence file. * @param authPath * @param proxyUserName * @return Token for proxyUserName if it exists. * @throws IOException */ private Optional<Token> getTokenFromSeqFile(String authPath, String proxyUserName) throws IOException { Closer closer = Closer.create(); try { FileSystem localFs = FileSystem.getLocal(new Configuration()); SequenceFile.Reader tokenReader = closer.register(new SequenceFile.Reader(localFs, new Path(authPath), localFs.getConf())); Text key = new Text(); Token value = new Token(); while (tokenReader.next(key, value)) { LOG.info("Found token for " + key); if (key.toString().equals(proxyUserName)) { return Optional.of(value); } } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } return Optional.absent(); }
/** * Deserialize a list of {@link State} objects from a Hadoop {@link SequenceFile}. * * <p> * This method submits a task to deserialize the {@link State} objects and returns immediately * after the task is submitted. * </p> * * @param stateClass the {@link Class} object of the {@link State} class * @param inputFilePath the input {@link SequenceFile} to read from * @param states a {@link Collection} object to store the deserialized {@link State} objects * @param <T> the {@link State} object type */ public <T extends State> void deserializeFromSequenceFile(final Class<? extends Writable> keyClass, final Class<T> stateClass, final Path inputFilePath, final Collection<T> states) { this.futures.add(this.executor.submit(new Callable<Void>() { @Override public Void call() throws Exception { Closer closer = Closer.create(); try { @SuppressWarnings("deprecation") SequenceFile.Reader reader = closer.register(new SequenceFile.Reader(fs, inputFilePath, fs.getConf())); Writable key = keyClass.newInstance(); T state = stateClass.newInstance(); while (reader.next(key, state)) { states.add(state); state = stateClass.newInstance(); } } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } return null; } })); }
@Test public void testSerializeToFile() throws IOException { Closer closer = Closer.create(); try { ParallelRunner parallelRunner = closer.register(new ParallelRunner(2, this.fs)); WorkUnit workUnit1 = WorkUnit.createEmpty(); workUnit1.setProp("foo", "bar"); workUnit1.setProp("a", 10); parallelRunner.serializeToFile(workUnit1, new Path(this.outputPath, "wu1")); WorkUnit workUnit2 = WorkUnit.createEmpty(); workUnit2.setProp("foo", "baz"); workUnit2.setProp("b", 20); parallelRunner.serializeToFile(workUnit2, new Path(this.outputPath, "wu2")); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
@Test(dependsOnMethods = "testSerializeToFile") public void testDeserializeFromFile() throws IOException { WorkUnit workUnit1 = WorkUnit.createEmpty(); WorkUnit workUnit2 = WorkUnit.createEmpty(); Closer closer = Closer.create(); try { ParallelRunner parallelRunner = closer.register(new ParallelRunner(2, this.fs)); parallelRunner.deserializeFromFile(workUnit1, new Path(this.outputPath, "wu1")); parallelRunner.deserializeFromFile(workUnit2, new Path(this.outputPath, "wu2")); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } Assert.assertEquals(workUnit1.getPropertyNames().size(), 2); Assert.assertEquals(workUnit1.getProp("foo"), "bar"); Assert.assertEquals(workUnit1.getPropAsInt("a"), 10); Assert.assertEquals(workUnit2.getPropertyNames().size(), 2); Assert.assertEquals(workUnit2.getProp("foo"), "baz"); Assert.assertEquals(workUnit2.getPropAsInt("b"), 20); }
@Test(dependsOnMethods = "testSerializeToSequenceFile") public void testDeserializeFromSequenceFile() throws IOException { Queue<WorkUnitState> workUnitStates = Queues.newConcurrentLinkedQueue(); Closer closer = Closer.create(); try { ParallelRunner parallelRunner = closer.register(new ParallelRunner(2, this.fs)); parallelRunner.deserializeFromSequenceFile(Text.class, WorkUnitState.class, new Path(this.outputPath, "seq1"), workUnitStates); parallelRunner.deserializeFromSequenceFile(Text.class, WorkUnitState.class, new Path(this.outputPath, "seq2"), workUnitStates); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } Assert.assertEquals(workUnitStates.size(), 2); for (WorkUnitState workUnitState : workUnitStates) { TestWatermark watermark = new Gson().fromJson(workUnitState.getActualHighWatermark(), TestWatermark.class); Assert.assertTrue(watermark.getLongWatermark() == 10L || watermark.getLongWatermark() == 100L); } }
@Test public void testGenerateDumpScript() throws IOException { Path dumpScript = new Path(TEST_DIR, SCRIPT_NAME); HeapDumpForTaskUtils.generateDumpScript(dumpScript, this.fs, "test.hprof", "chmod 777 "); Assert.assertEquals(true, this.fs.exists(dumpScript)); Assert.assertEquals(true, this.fs.exists(new Path(dumpScript.getParent(), "dumps"))); Closer closer = Closer.create(); try { BufferedReader scriptReader = closer.register(new BufferedReader(new InputStreamReader(this.fs.open(dumpScript)))); Assert.assertEquals("#!/bin/sh", scriptReader.readLine()); Assert.assertEquals("if [ -n \"$HADOOP_PREFIX\" ]; then", scriptReader.readLine()); Assert.assertEquals(" ${HADOOP_PREFIX}/bin/hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof", scriptReader.readLine()); Assert.assertEquals("else", scriptReader.readLine()); Assert.assertEquals(" ${HADOOP_HOME}/bin/hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof", scriptReader.readLine()); Assert.assertEquals("fi", scriptReader.readLine()); } catch (Throwable t) { closer.rethrow(t); } finally { closer.close(); } }
public KafkaExtractor(WorkUnitState state) { super(state); this.workUnitState = state; this.topicName = KafkaUtils.getTopicName(state); this.partitions = KafkaUtils.getPartitions(state); this.lowWatermark = GSON.fromJson(state.getWorkunit().getLowWatermark(), MultiLongWatermark.class); this.highWatermark = GSON.fromJson(state.getWorkunit().getExpectedHighWatermark(), MultiLongWatermark.class); this.nextWatermark = new MultiLongWatermark(this.lowWatermark); this.closer = Closer.create(); this.kafkaWrapper = closer.register(KafkaWrapper.create(state)); this.stopwatch = Stopwatch.createUnstarted(); this.decodingErrorCount = Maps.newHashMap(); this.avgMillisPerRecord = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.avgRecordSizes = Maps.newHashMapWithExpectedSize(this.partitions.size()); this.errorPartitions = Sets.newHashSet(); // The actual high watermark starts with the low watermark this.workUnitState.setActualHighWatermark(this.lowWatermark); }
@Test public void testGetWorkUnitsAndExtractor() throws IOException, DataRecordException { HadoopFileInputSource<String, Text, LongWritable, Text> fileInputSource = new TestHadoopFileInputSource(); List<WorkUnit> workUnitList = fileInputSource.getWorkunits(this.sourceState); Assert.assertEquals(workUnitList.size(), 1); WorkUnitState workUnitState = new WorkUnitState(workUnitList.get(0)); Closer closer = Closer.create(); try { HadoopFileInputExtractor<String, Text, LongWritable, Text> extractor = (HadoopFileInputExtractor<String, Text, LongWritable, Text>) fileInputSource.getExtractor( workUnitState); Text text = extractor.readRecord(null); Assert.assertEquals(text.toString(), TEXT); Assert.assertNull(extractor.readRecord(null)); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
@Test public void testGetWorkUnitsAndExtractor() throws IOException, DataRecordException { OldApiHadoopFileInputSource<String, Text, LongWritable, Text> fileInputSource = new TestHadoopFileInputSource(); List<WorkUnit> workUnitList = fileInputSource.getWorkunits(this.sourceState); Assert.assertEquals(workUnitList.size(), 1); WorkUnitState workUnitState = new WorkUnitState(workUnitList.get(0)); Closer closer = Closer.create(); try { OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text> extractor = (OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text>) fileInputSource.getExtractor( workUnitState); Text text = extractor.readRecord(null); Assert.assertEquals(text.toString(), TEXT); Assert.assertNull(extractor.readRecord(null)); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
private void publishTaskData() throws IOException { Closer closer = Closer.create(); try { @SuppressWarnings("unchecked") Class<? extends DataPublisher> dataPublisherClass = (Class<? extends DataPublisher>) Class.forName( this.taskState.getProp(ConfigurationKeys.DATA_PUBLISHER_TYPE, ConfigurationKeys.DEFAULT_DATA_PUBLISHER_TYPE)); SingleTaskDataPublisher publisher = closer.register(SingleTaskDataPublisher.getInstance(dataPublisherClass, this.taskState)); LOG.info("Publishing data from task " + this.taskId); publisher.publish(this.taskState); } catch (IOException e) { throw closer.rethrow(e); } catch (Throwable t) { LOG.error(String.format("To publish data in task, the publisher class (%s) must extend %s", ConfigurationKeys.DATA_PUBLISHER_TYPE, SingleTaskDataPublisher.class.getSimpleName()), t); throw closer.rethrow(t); } finally { closer.close(); } }
@Override public D readRecord(@Deprecated D reuse) throws DataRecordException, IOException { Closer closer = Closer.create(); try { if (closer.register(this.limiter.acquirePermits(1)) != null) { return this.extractor.readRecord(reuse); } return null; } catch (InterruptedException ie) { Thread.currentThread().interrupt(); throw new IOException("Interrupted while trying to acquire the next permit", ie); } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }