Java 类org.apache.hadoop.mapred.Counters 实例源码
项目:hadoop
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:aliyun-oss-hadoop-fs
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:aliyun-oss-hadoop-fs
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:big-c
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:big-c
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:reair
文件:MapRedStatsLogModule.java
/**
* Converts Hadoop counters to a JSON representation.
*
* @param counters the Hadoop counters to convert
* @return the JSON representation of the given counters
*
* @throws SerializationException if mapping the counters to JSON fails
*/
@VisibleForTesting
static String toJson(Counters counters) throws SerializationException {
ArrayNode countersJsonNode = JsonNodeFactory.instance.arrayNode();
ArrayNode groupsJsonNode = JsonNodeFactory.instance.arrayNode();
for (Group group: counters) {
for (Counters.Counter counter: group) {
ObjectNode counterJsonNode = JsonNodeFactory.instance.objectNode();
counterJsonNode.put("counterName", counter.getName())
.put("value", counter.getValue());
countersJsonNode.add(counterJsonNode);
}
ObjectNode groupJsonNode = JsonNodeFactory.instance.objectNode();
groupJsonNode.put("groupName", group.getDisplayName())
.put("counters", countersJsonNode);
groupsJsonNode.add(groupJsonNode);
}
ObjectMapper mapper = new ObjectMapper();
try {
return mapper.writeValueAsString(groupsJsonNode);
} catch (JsonProcessingException e) {
throw new SerializationException(e);
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:hops
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:hiped2
文件:JobHistoryHelper.java
public static String extractCounter(String counterFromHist,
String... counterNames)
throws ParseException {
Counters counters =
Counters.fromEscapedCompactString(counterFromHist);
for (Counters.Group group : counters) {
for (Counters.Counter counter : group) {
for (String counterName : counterNames) {
if (counterName.equals(counter.getName())) {
return String.valueOf(counter.getCounter());
}
}
}
}
return null;
}
项目:hiped2
文件:ExtractJobMetrics.java
public static String extractCounter(String counterFromHist,
String... counterNames)
throws ParseException {
Counters counters =
Counters.fromEscapedCompactString(counterFromHist);
for (Counters.Group group : counters) {
for (Counters.Counter counter : group) {
for (String counterName : counterNames) {
if (counterName.equals(counter.getName())) {
return String.valueOf(counter.getCounter());
}
}
}
}
return "";
}
项目:hadoop-plus
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getOutputKeyComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:hadoop-plus
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:FlexMap
文件:TestFetcher.java
@Before
@SuppressWarnings("unchecked") // mocked generics
public void setup() {
LOG.info(">>>> " + name.getMethodName());
job = new JobConf();
job.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, false);
jobWithRetry = new JobConf();
jobWithRetry.setBoolean(MRJobConfig.SHUFFLE_FETCH_RETRY_ENABLED, true);
id = TaskAttemptID.forName("attempt_0_1_r_1_1");
ss = mock(ShuffleSchedulerImpl.class);
mm = mock(MergeManagerImpl.class);
r = mock(Reporter.class);
metrics = mock(ShuffleClientMetrics.class);
except = mock(ExceptionReporter.class);
key = JobTokenSecretManager.createSecretKey(new byte[]{0,0,0,0});
connection = mock(HttpURLConnection.class);
allErrs = mock(Counters.Counter.class);
when(r.getCounter(anyString(), anyString())).thenReturn(allErrs);
ArrayList<TaskAttemptID> maps = new ArrayList<TaskAttemptID>(1);
maps.add(map1ID);
maps.add(map2ID);
when(ss.getMapsForHost(host)).thenReturn(maps);
}
项目:hadoop
文件:TestPipeApplication.java
public Counters.Counter getCounter(String group, String name) {
Counters.Counter counter = null;
if (counters != null) {
counter = counters.findCounter(group, name);
if (counter == null) {
Group grp = counters.addGroup(group, group);
counter = grp.addCounter(name, name, 10);
}
}
return counter;
}
项目:hadoop
文件:OutputHandler.java
public void incrementCounter(int id, long amount) throws IOException {
if (id < registeredCounters.size()) {
Counters.Counter counter = registeredCounters.get(id);
counter.increment(amount);
} else {
throw new IOException("Invalid counter with id: " + id);
}
}
项目:hadoop
文件:MergeManagerImpl.java
private void combineAndSpill(
RawKeyValueIterator kvIter,
Counters.Counter inCounter) throws IOException {
JobConf job = jobConf;
Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
RawComparator<K> comparator =
(RawComparator<K>)job.getCombinerKeyGroupingComparator();
try {
CombineValuesIterator values = new CombineValuesIterator(
kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
inCounter);
while (values.more()) {
combiner.reduce(values.getKey(), values, combineCollector,
Reporter.NULL);
values.nextKey();
}
} finally {
combiner.close();
}
}
项目:hadoop
文件:ShuffleSchedulerImpl.java
public ShuffleSchedulerImpl(JobConf job, TaskStatus status,
TaskAttemptID reduceId,
ExceptionReporter reporter,
Progress progress,
Counters.Counter shuffledMapsCounter,
Counters.Counter reduceShuffleBytes,
Counters.Counter failedShuffleCounter) {
totalMaps = job.getNumMapTasks();
abortFailureLimit = Math.max(30, totalMaps / 10);
copyTimeTracker = new CopyTimeTracker();
remainingMaps = totalMaps;
finishedMaps = new boolean[remainingMaps];
this.reporter = reporter;
this.status = status;
this.reduceId = reduceId;
this.progress = progress;
this.shuffledMapsCounter = shuffledMapsCounter;
this.reduceShuffleBytes = reduceShuffleBytes;
this.failedShuffleCounter = failedShuffleCounter;
this.startTime = Time.monotonicNow();
lastProgressTime = startTime;
referee.start();
this.maxFailedUniqueFetches = Math.min(totalMaps, 5);
this.maxFetchFailuresBeforeReporting = job.getInt(
MRJobConfig.SHUFFLE_FETCH_FAILURES, REPORT_FAILURE_LIMIT);
this.reportReadErrorImmediately = job.getBoolean(
MRJobConfig.SHUFFLE_NOTIFY_READERROR, true);
this.maxDelay = job.getLong(MRJobConfig.MAX_SHUFFLE_FETCH_RETRY_DELAY,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_RETRY_DELAY);
this.maxHostFailures = job.getInt(
MRJobConfig.MAX_SHUFFLE_FETCH_HOST_FAILURES,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES);
}
项目:hadoop
文件:TestStreamingCounters.java
private void validateCounters() throws IOException {
Counters counters = job.running_.getCounters();
assertNotNull("Counters", counters);
Group group = counters.getGroup("UserCounters");
assertNotNull("Group", group);
Counter counter = group.getCounterForName("InputLines");
assertNotNull("Counter", counter);
assertEquals(3, counter.getCounter());
}
项目:hadoop
文件:TestStreamingCombiner.java
@Test
public void testCommandLine() throws Exception {
super.testCommandLine();
// validate combiner counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = job.running_.getCounters();
assertTrue(counters.findCounter(
counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
assertTrue(counters.findCounter(
counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
}
项目:aliyun-oss-hadoop-fs
文件:TestPipeApplication.java
public Counters.Counter getCounter(String group, String name) {
Counters.Counter counter = null;
if (counters != null) {
counter = counters.findCounter(group, name);
if (counter == null) {
Group grp = counters.addGroup(group, group);
counter = grp.addCounter(name, name, 10);
}
}
return counter;
}
项目:aliyun-oss-hadoop-fs
文件:OutputHandler.java
public void incrementCounter(int id, long amount) throws IOException {
if (id < registeredCounters.size()) {
Counters.Counter counter = registeredCounters.get(id);
counter.increment(amount);
} else {
throw new IOException("Invalid counter with id: " + id);
}
}
项目:aliyun-oss-hadoop-fs
文件:TaskCheckpointID.java
public TaskCheckpointID(FSCheckpointID rawId, List<Path> partialOutput,
Counters counters) {
this.rawId = rawId;
this.counters = counters;
this.partialOutput = null == partialOutput
? new ArrayList<Path>()
: partialOutput;
}
项目:aliyun-oss-hadoop-fs
文件:ShuffleSchedulerImpl.java
public ShuffleSchedulerImpl(JobConf job, TaskStatus status,
TaskAttemptID reduceId,
ExceptionReporter reporter,
Progress progress,
Counters.Counter shuffledMapsCounter,
Counters.Counter reduceShuffleBytes,
Counters.Counter failedShuffleCounter) {
totalMaps = job.getNumMapTasks();
abortFailureLimit = Math.max(30, totalMaps / 10);
copyTimeTracker = new CopyTimeTracker();
remainingMaps = totalMaps;
finishedMaps = new boolean[remainingMaps];
this.reporter = reporter;
this.status = status;
this.reduceId = reduceId;
this.progress = progress;
this.shuffledMapsCounter = shuffledMapsCounter;
this.reduceShuffleBytes = reduceShuffleBytes;
this.failedShuffleCounter = failedShuffleCounter;
this.startTime = Time.monotonicNow();
lastProgressTime = startTime;
referee.start();
this.maxFailedUniqueFetches = Math.min(totalMaps, 5);
this.maxFetchFailuresBeforeReporting = job.getInt(
MRJobConfig.SHUFFLE_FETCH_FAILURES, REPORT_FAILURE_LIMIT);
this.reportReadErrorImmediately = job.getBoolean(
MRJobConfig.SHUFFLE_NOTIFY_READERROR, true);
this.maxDelay = job.getLong(MRJobConfig.MAX_SHUFFLE_FETCH_RETRY_DELAY,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_RETRY_DELAY);
this.maxHostFailures = job.getInt(
MRJobConfig.MAX_SHUFFLE_FETCH_HOST_FAILURES,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES);
}
项目:aliyun-oss-hadoop-fs
文件:TestStreamingCounters.java
private void validateCounters() throws IOException {
Counters counters = job.running_.getCounters();
assertNotNull("Counters", counters);
Group group = counters.getGroup("UserCounters");
assertNotNull("Group", group);
Counter counter = group.getCounterForName("InputLines");
assertNotNull("Counter", counter);
assertEquals(3, counter.getCounter());
}
项目:aliyun-oss-hadoop-fs
文件:TestStreamingCombiner.java
@Test
public void testCommandLine() throws Exception {
super.testCommandLine();
// validate combiner counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = job.running_.getCounters();
assertTrue(counters.findCounter(
counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
assertTrue(counters.findCounter(
counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
}
项目:big-c
文件:TestPipeApplication.java
public Counters.Counter getCounter(String group, String name) {
Counters.Counter counter = null;
if (counters != null) {
counter = counters.findCounter(group, name);
if (counter == null) {
Group grp = counters.addGroup(group, group);
counter = grp.addCounter(name, name, 10);
}
}
return counter;
}
项目:big-c
文件:OutputHandler.java
public void incrementCounter(int id, long amount) throws IOException {
if (id < registeredCounters.size()) {
Counters.Counter counter = registeredCounters.get(id);
counter.increment(amount);
} else {
throw new IOException("Invalid counter with id: " + id);
}
}
项目:big-c
文件:ShuffleSchedulerImpl.java
public ShuffleSchedulerImpl(JobConf job, TaskStatus status,
TaskAttemptID reduceId,
ExceptionReporter reporter,
Progress progress,
Counters.Counter shuffledMapsCounter,
Counters.Counter reduceShuffleBytes,
Counters.Counter failedShuffleCounter) {
totalMaps = job.getNumMapTasks();
abortFailureLimit = Math.max(30, totalMaps / 10);
copyTimeTracker = new CopyTimeTracker();
remainingMaps = totalMaps;
finishedMaps = new boolean[remainingMaps];
this.reporter = reporter;
this.status = status;
this.reduceId = reduceId;
this.progress = progress;
this.shuffledMapsCounter = shuffledMapsCounter;
this.reduceShuffleBytes = reduceShuffleBytes;
this.failedShuffleCounter = failedShuffleCounter;
this.startTime = Time.monotonicNow();
lastProgressTime = startTime;
referee.start();
this.maxFailedUniqueFetches = Math.min(totalMaps, 5);
this.maxFetchFailuresBeforeReporting = job.getInt(
MRJobConfig.SHUFFLE_FETCH_FAILURES, REPORT_FAILURE_LIMIT);
this.reportReadErrorImmediately = job.getBoolean(
MRJobConfig.SHUFFLE_NOTIFY_READERROR, true);
this.maxDelay = job.getLong(MRJobConfig.MAX_SHUFFLE_FETCH_RETRY_DELAY,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_RETRY_DELAY);
this.maxHostFailures = job.getInt(
MRJobConfig.MAX_SHUFFLE_FETCH_HOST_FAILURES,
MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES);
}
项目:big-c
文件:TestStreamingCounters.java
private void validateCounters() throws IOException {
Counters counters = job.running_.getCounters();
assertNotNull("Counters", counters);
Group group = counters.getGroup("UserCounters");
assertNotNull("Group", group);
Counter counter = group.getCounterForName("InputLines");
assertNotNull("Counter", counter);
assertEquals(3, counter.getCounter());
}
项目:big-c
文件:TestStreamingCombiner.java
@Test
public void testCommandLine() throws Exception {
super.testCommandLine();
// validate combiner counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = job.running_.getCounters();
assertTrue(counters.findCounter(
counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
assertTrue(counters.findCounter(
counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
}
项目:vespa
文件:VespaStorageTest.java
private void assertAllDocumentsOk(String script, Configuration conf) throws Exception {
PigServer ps = setup(script, conf);
List<ExecJob> jobs = ps.executeBatch();
PigStats stats = jobs.get(0).getStatistics();
for (JobStats js : stats.getJobGraph()) {
Counters hadoopCounters = ((MRJobStats)js).getHadoopCounters();
assertNotNull(hadoopCounters);
VespaCounters counters = VespaCounters.get(hadoopCounters);
assertEquals(10, counters.getDocumentsSent());
assertEquals(0, counters.getDocumentsFailed());
assertEquals(10, counters.getDocumentsOk());
}
}
项目:reair
文件:MapRedStatsLogModuleTest.java
@Test
public void testOneGroupOneCounterToJson() throws SerializationException {
Counters counters = new Counters();
counters.incrCounter("SomeCounterGroupName", "SomeCounterName", 3);
String json = MapRedStatsLogModule.toJson(counters);
assertEquals(
"[{\"groupName\":\"SomeCounterGroupName\",\"counters\":[{\"counterNa"
+ "me\":\"SomeCounterName\",\"value\":3}]}]",
json);
}
项目:reair
文件:MapRedStatsLogModuleTest.java
@Test
public void testOneGroupManyCountersToJson() throws SerializationException {
Counters counters = new Counters();
counters.incrCounter("SomeCounterGroupName", "SomeCounterName", 3);
counters.incrCounter("SomeCounterGroupName", "AnotherCounterName", 4);
counters.incrCounter("SomeCounterGroupName", "YetAnotherCounterName", 4);
String json = MapRedStatsLogModule.toJson(counters);
assertEquals(
"[{\"groupName\":\"SomeCounterGroupName\",\"counters\":[{\"counterNam"
+ "e\":\"AnotherCounterName\",\"value\":4},{\"counterName\":\"SomeCount"
+ "erName\",\"value\":3},{\"counterName\":\"YetAnotherCounterName\",\"v"
+ "alue\":4}]}]",
json);
}
项目:reair
文件:MapRedStatsLogModuleTest.java
@Test
public void testManyGroupsManyCountersToJson()
throws SerializationException {
Counters counters = new Counters();
counters.incrCounter("SomeCounterGroupName1", "SomeCounterName1", 3);
counters.incrCounter("SomeCounterGroupName1", "SomeCounterName2", 4);
counters.incrCounter("SomeCounterGroupName1", "SomeCounterName3", 5);
counters.incrCounter("SomeCounterGroupName2", "SomeCounterName1", 6);
counters.incrCounter("SomeCounterGroupName2", "SomeCounterName2", 7);
counters.incrCounter("SomeCounterGroupName2", "SomeCounterName3", 8);
counters.incrCounter("SomeCounterGroupName3", "SomeCounterName1", 9);
counters.incrCounter("SomeCounterGroupName3", "SomeCounterName2", 10);
counters.incrCounter("SomeCounterGroupName3", "SomeCounterName3", 11);
String json = MapRedStatsLogModule.toJson(counters);
assertEquals(
"[{\"groupName\":\"SomeCounterGroupName1\",\"counters\":[{\"counterN"
+ "ame\":\"SomeCounterName1\",\"value\":3},{\"counterName\":\"SomeCount"
+ "erName2\",\"value\":4},{\"counterName\":\"SomeCounterName3\",\"value"
+ "\":5},{\"counterName\":\"SomeCounterName1\",\"value\":6},{\"counterN"
+ "ame\":\"SomeCounterName2\",\"value\":7},{\"counterName\":\"SomeCount"
+ "erName3\",\"value\":8},{\"counterName\":\"SomeCounterName1\",\"value"
+ "\":9},{\"counterName\":\"SomeCounterName2\",\"value\":10},{\"counter"
+ "Name\":\"SomeCounterName3\",\"value\":11}]},{\"groupName\":\"SomeCou"
+ "nterGroupName2\",\"counters\":[{\"counterName\":\"SomeCounterName1\""
+ ",\"value\":3},{\"counterName\":\"SomeCounterName2\",\"value\":4},{\""
+ "counterName\":\"SomeCounterName3\",\"value\":5},{\"counterName\":\"S"
+ "omeCounterName1\",\"value\":6},{\"counterName\":\"SomeCounterName2\""
+ ",\"value\":7},{\"counterName\":\"SomeCounterName3\",\"value\":8},{\""
+ "counterName\":\"SomeCounterName1\",\"value\":9},{\"counterName\":\"S"
+ "omeCounterName2\",\"value\":10},{\"counterName\":\"SomeCounterName3"
+ "\",\"value\":11}]},{\"groupName\":\"SomeCounterGroupName3\",\"counte"
+ "rs\":[{\"counterName\":\"SomeCounterName1\",\"value\":3},{\"counterN"
+ "ame\":\"SomeCounterName2\",\"value\":4},{\"counterName\":\"SomeCount"
+ "erName3\",\"value\":5},{\"counterName\":\"SomeCounterName1\",\"value"
+ "\":6},{\"counterName\":\"SomeCounterName2\",\"value\":7},{\"counterN"
+ "ame\":\"SomeCounterName3\",\"value\":8},{\"counterName\":\"SomeCount"
+ "erName1\",\"value\":9},{\"counterName\":\"SomeCounterName2\",\"value"
+ "\":10},{\"counterName\":\"SomeCounterName3\",\"value\":11}]}]",
json);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:OutputHandler.java
public void incrementCounter(int id, long amount) throws IOException {
if (id < registeredCounters.size()) {
Counters.Counter counter = registeredCounters.get(id);
counter.increment(amount);
} else {
throw new IOException("Invalid counter with id: " + id);
}
}
项目:hops
文件:OutputHandler.java
public void incrementCounter(int id, long amount) throws IOException {
if (id < registeredCounters.size()) {
Counters.Counter counter = registeredCounters.get(id);
counter.increment(amount);
} else {
throw new IOException("Invalid counter with id: " + id);
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestStreamingCounters.java
private void validateCounters() throws IOException {
Counters counters = job.running_.getCounters();
assertNotNull("Counters", counters);
Group group = counters.getGroup("UserCounters");
assertNotNull("Group", group);
Counter counter = group.getCounterForName("InputLines");
assertNotNull("Counter", counter);
assertEquals(3, counter.getCounter());
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestStreamingCombiner.java
@Test
public void testCommandLine() throws Exception {
super.testCommandLine();
// validate combiner counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = job.running_.getCounters();
assertTrue(counters.findCounter(
counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
assertTrue(counters.findCounter(
counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:HadoopJob.java
/**
* Print this job counters (for debugging purpose)
*/
void printCounters() {
System.out.printf("New Job:\n", counters);
for (String groupName : counters.getGroupNames()) {
Counters.Group group = counters.getGroup(groupName);
System.out.printf("\t%s[%s]\n", groupName, group.getDisplayName());
for (Counters.Counter counter : group) {
System.out.printf("\t\t%s: %s\n", counter.getDisplayName(),
counter.getCounter());
}
}
System.out.printf("\n");
}