private void validateFileCounters(Counters counter, long fileBytesRead, long fileBytesWritten, long mapOutputBytes, long mapOutputMaterializedBytes) { assertTrue(counter.findCounter(FileInputFormatCounter.BYTES_READ) .getValue() != 0); assertEquals(fileBytesRead, counter.findCounter(FileInputFormatCounter.BYTES_READ).getValue()); assertTrue(counter.findCounter(FileOutputFormatCounter.BYTES_WRITTEN) .getValue() != 0); if (mapOutputBytes >= 0) { assertTrue(counter.findCounter(TaskCounter.MAP_OUTPUT_BYTES).getValue() != 0); } if (mapOutputMaterializedBytes >= 0) { assertTrue(counter.findCounter(TaskCounter.MAP_OUTPUT_MATERIALIZED_BYTES) .getValue() != 0); } }
@SuppressWarnings("unchecked") NewDirectOutputCollector(MRJobConfig jobContext, JobConf job, TaskUmbilicalProtocol umbilical, TaskReporter reporter) throws IOException, ClassNotFoundException, InterruptedException { this.reporter = reporter; mapOutputRecordCounter = reporter .getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof org.apache.hadoop.mapreduce.lib.output.FileOutputFormat) { matchedStats = getFsStatistics(org.apache.hadoop.mapreduce.lib.output.FileOutputFormat .getOutputPath(taskContext), taskContext.getConfiguration()); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = outputFormat.getRecordWriter(taskContext); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
@SuppressWarnings("unchecked") public void init(MapOutputCollector.Context context ) throws IOException, ClassNotFoundException { this.reporter = context.getReporter(); JobConf job = context.getJobConf(); String finalName = getOutputName(getPartition()); FileSystem fs = FileSystem.get(job); OutputFormat<K, V> outputFormat = job.getOutputFormat(); mapOutputRecordCounter = reporter.getCounter(TaskCounter.MAP_OUTPUT_RECORDS); fileOutputByteCounter = reporter .getCounter(FileOutputFormatCounter.BYTES_WRITTEN); List<Statistics> matchedStats = null; if (outputFormat instanceof FileOutputFormat) { matchedStats = getFsStatistics(FileOutputFormat.getOutputPath(job), job); } fsStats = matchedStats; long bytesOutPrev = getOutputBytes(fsStats); out = job.getOutputFormat().getRecordWriter(fs, job, finalName, reporter); long bytesOutCurr = getOutputBytes(fsStats); fileOutputByteCounter.increment(bytesOutCurr - bytesOutPrev); }
@SuppressWarnings({ "deprecation" }) private static void initDepricatedMap() { depricatedCounterMap.put(FileInputFormat.Counter.class.getName(), FileInputFormatCounter.class.getName()); depricatedCounterMap.put(FileOutputFormat.Counter.class.getName(), FileOutputFormatCounter.class.getName()); depricatedCounterMap.put( org.apache.hadoop.mapreduce.lib.input.FileInputFormat.Counter.class .getName(), FileInputFormatCounter.class.getName()); depricatedCounterMap.put( org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.Counter.class .getName(), FileOutputFormatCounter.class.getName()); }