Java 类org.apache.hadoop.mapred.TaskReport 实例源码
项目:hadoop
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:aliyun-oss-hadoop-fs
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:big-c
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:hadoop-plus
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:hops
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:hadoop-TCP
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:spork-streaming
文件:TestJobStats.java
@BeforeClass
public static void oneTimeSetup() throws Exception {
// setting up TaskReport for map tasks
for (int i = 0; i < mapTaskReports.length; i++) {
mapTaskReports[i] = Mockito.mock(TaskReport.class);
Mockito.when(mapTaskReports[i].getStartTime()).thenReturn(MAP_START_FINISH_TIME_DATA[i][0] * ONE_THOUSAND);
Mockito.when(mapTaskReports[i].getFinishTime()).thenReturn(MAP_START_FINISH_TIME_DATA[i][1] * ONE_THOUSAND);
}
// setting up TaskReport for reduce tasks
for (int i = 0; i < reduceTaskReports.length; i++) {
reduceTaskReports[i] = Mockito.mock(TaskReport.class);
Mockito.when(reduceTaskReports[i].getStartTime()).thenReturn(REDUCE_START_FINISH_TIME_DATA[i][0] * ONE_THOUSAND);
Mockito.when(reduceTaskReports[i].getFinishTime()).thenReturn(REDUCE_START_FINISH_TIME_DATA[i][1] * ONE_THOUSAND);
}
StringBuilder sb = new StringBuilder();
sb.append(jobID.toString()).append("\t");
sb.append(mapTaskReports.length).append("\t");
sb.append(reduceTaskReports.length).append("\t");
sb.append("500\t100\t300\t300\t500\t100\t240\t200");
ASSERT_STRING = sb.toString();
}
项目:spork
文件:MRJobStats.java
private TaskStat getTaskStat(Iterator<TaskReport> tasks) {
int size = 0;
long max = 0;
long min = Long.MAX_VALUE;
long median = 0;
long total = 0;
List<Long> durations = new ArrayList<Long>();
while(tasks.hasNext()){
TaskReport rpt = tasks.next();
long duration = rpt.getFinishTime() - rpt.getStartTime();
durations.add(duration);
max = (duration > max) ? duration : max;
min = (duration < min) ? duration : min;
total += duration;
size++;
}
long avg = total / size;
median = calculateMedianValue(durations);
return new TaskStat(size, max, min, avg, median);
}
项目:spork
文件:TestMRJobStats.java
@BeforeClass
public static void oneTimeSetup() throws Exception {
// setting up TaskReport for map tasks
for (int i = 0; i < mapTaskReports.length; i++) {
mapTaskReports[i] = Mockito.mock(TaskReport.class);
Mockito.when(mapTaskReports[i].getStartTime()).thenReturn(MAP_START_FINISH_TIME_DATA[i][0] * ONE_THOUSAND);
Mockito.when(mapTaskReports[i].getFinishTime()).thenReturn(MAP_START_FINISH_TIME_DATA[i][1] * ONE_THOUSAND);
}
// setting up TaskReport for reduce tasks
for (int i = 0; i < reduceTaskReports.length; i++) {
reduceTaskReports[i] = Mockito.mock(TaskReport.class);
Mockito.when(reduceTaskReports[i].getStartTime()).thenReturn(REDUCE_START_FINISH_TIME_DATA[i][0] * ONE_THOUSAND);
Mockito.when(reduceTaskReports[i].getFinishTime()).thenReturn(REDUCE_START_FINISH_TIME_DATA[i][1] * ONE_THOUSAND);
}
StringBuilder sb = new StringBuilder();
sb.append(jobID.toString()).append("\t");
sb.append(mapTaskReports.length).append("\t");
sb.append(reduceTaskReports.length).append("\t");
sb.append("500\t100\t300\t300\t500\t100\t240\t200");
ASSERT_STRING = sb.toString();
}
项目:spork
文件:HadoopShims.java
public static Iterator<TaskReport> getTaskReports(Job job, TaskType type) throws IOException {
if (job.getJobConf().getBoolean(PigConfiguration.PIG_NO_TASK_REPORT, false)) {
LOG.info("TaskReports are disabled for job: " + job.getAssignedJobID());
return null;
}
Cluster cluster = new Cluster(job.getJobConf());
try {
org.apache.hadoop.mapreduce.Job mrJob = cluster.getJob(job.getAssignedJobID());
if (mrJob == null) { // In local mode, mrJob will be null
mrJob = job.getJob();
}
org.apache.hadoop.mapreduce.TaskReport[] reports = mrJob.getTaskReports(type);
return DowngradeHelper.downgradeTaskReports(reports);
} catch (InterruptedException ir) {
throw new IOException(ir);
}
}
项目:hardfs
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:hadoop-on-lustre2
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:azkaban-plugins
文件:AbstractHadoopJob.java
private void updateMapReduceJobState(JobConf jobConf) {
if (runningJob == null || visualizer == false) {
return;
}
try {
JobID jobId = runningJob.getID();
TaskReport[] mapTaskReport = jobClient.getMapTaskReports(jobId);
TaskReport[] reduceTaskReport = jobClient.getReduceTaskReports(jobId);
mapReduceJobState =
new MapReduceJobState(runningJob, mapTaskReport, reduceTaskReport);
writeMapReduceJobState(jobConf);
} catch (IOException e) {
logger.error("Cannot update MapReduceJobState");
}
}
项目:hanoi-hadoop-2.0.0-cdh
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 1 phase: map (note that in 0.21 onwards it has a sort phase too)
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask = null;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus +
(finalPhaseInTask == null ? "" : " > " + finalPhaseInTask),
reports[0].getState());
}
项目:mapreduce-fork
文件:TestStreamingStatus.java
void validateTaskStatus(StreamJob job, TaskType type) throws IOException {
// Map Task has 2 phases: map, sort
// Reduce Task has 3 phases: copy, sort, reduce
String finalPhaseInTask;
TaskReport[] reports;
if (type == TaskType.MAP) {
reports = job.jc_.getMapTaskReports(job.jobId_);
finalPhaseInTask = "sort";
} else {// reduce task
reports = job.jc_.getReduceTaskReports(job.jobId_);
finalPhaseInTask = "reduce";
}
assertEquals(1, reports.length);
assertEquals(expectedStatus + " > " + finalPhaseInTask,
reports[0].getState());
}
项目:spork-streaming
文件:Launcher.java
protected long computeTimeSpent(TaskReport[] taskReports) {
long timeSpent = 0;
for (TaskReport r : taskReports) {
timeSpent += (r.getFinishTime() - r.getStartTime());
}
return timeSpent;
}
项目:spork
文件:Launcher.java
protected long computeTimeSpent(Iterator<TaskReport> taskReports) {
long timeSpent = 0;
while (taskReports.hasNext()) {
TaskReport r = taskReports.next();
timeSpent += (r.getFinishTime() - r.getStartTime());
}
return timeSpent;
}
项目:spork
文件:TestMRJobStats.java
@Test
public void testOneTaskReport() throws Exception {
// setting up one map task report
TaskReport[] mapTaskReports = new TaskReport[1];
mapTaskReports[0] = Mockito.mock(TaskReport.class);
Mockito.when(mapTaskReports[0].getStartTime()).thenReturn(300L * ONE_THOUSAND);
Mockito.when(mapTaskReports[0].getFinishTime()).thenReturn(400L * ONE_THOUSAND);
// setting up one reduce task report
TaskReport[] reduceTaskReports = new TaskReport[1];
reduceTaskReports[0] = Mockito.mock(TaskReport.class);
Mockito.when(reduceTaskReports[0].getStartTime()).thenReturn(500L * ONE_THOUSAND);
Mockito.when(reduceTaskReports[0].getFinishTime()).thenReturn(700L * ONE_THOUSAND);
PigStats.JobGraph jobGraph = new PigStats.JobGraph();
MRJobStats jobStats = createJobStats("JobStatsTest", jobGraph);
getJobStatsMethod("setId", JobID.class).invoke(jobStats, jobID);
jobStats.setSuccessful(true);
getJobStatsMethod("addMapReduceStatistics", Iterator.class, Iterator.class)
.invoke(jobStats, Arrays.asList(mapTaskReports).iterator(), Arrays.asList(reduceTaskReports).iterator());
String msg = (String)getJobStatsMethod("getDisplayString")
.invoke(jobStats);
System.out.println(JobStats.SUCCESS_HEADER);
System.out.println(msg);
StringBuilder sb = new StringBuilder();
sb.append(jobID.toString()).append("\t");
sb.append(mapTaskReports.length).append("\t");
sb.append(reduceTaskReports.length).append("\t");
sb.append("100\t100\t100\t100\t200\t200\t200\t200");
System.out.println("assert msg: " + sb.toString());
assertTrue(msg.startsWith(sb.toString()));
}
项目:spork
文件:HadoopShims.java
public static boolean isJobFailed(TaskReport report) {
float successfulProgress = 1.0f;
// if the progress reported is not 1.0f then the map or reduce
// job failed
// this comparison is in place for the backward compatibility
// for Hadoop 0.20
return report.getProgress() != successfulProgress;
}
项目:spork
文件:HadoopShims.java
public static Iterator<TaskReport> getTaskReports(Job job, TaskType type) throws IOException {
if (job.getJobConf().getBoolean(PigConfiguration.PIG_NO_TASK_REPORT, false)) {
LOG.info("TaskReports are disabled for job: " + job.getAssignedJobID());
return null;
}
JobClient jobClient = job.getJobClient();
TaskReport[] reports = null;
if (type == TaskType.MAP) {
reports = jobClient.getMapTaskReports(job.getAssignedJobID());
} else {
reports = jobClient.getReduceTaskReports(job.getAssignedJobID());
}
return reports == null ? null : Arrays.asList(reports).iterator();
}
项目:azkaban-plugins
文件:AzkabanPigListener.java
@SuppressWarnings("deprecation")
private void addMapReduceJobState(PigJobDagNode node) {
JobClient jobClient = PigStats.get().getJobClient();
try {
RunningJob runningJob = jobClient.getJob(node.getJobId());
if (runningJob == null) {
logger.warn("Couldn't find job status for jobId=" + node.getJobId());
return;
}
JobID jobID = runningJob.getID();
TaskReport[] mapTaskReport = jobClient.getMapTaskReports(jobID);
TaskReport[] reduceTaskReport = jobClient.getReduceTaskReports(jobID);
node.setMapReduceJobState(new MapReduceJobState(runningJob,
mapTaskReport, reduceTaskReport));
if (node.getJobConfiguration() == null) {
Properties jobConfProperties = StatsUtils.getJobConf(runningJob);
if (jobConfProperties != null && jobConfProperties.size() > 0) {
node.setJobConfiguration(jobConfProperties);
}
}
} catch (IOException e) {
logger.error("Error getting job info.", e);
}
}
项目:PonIC
文件:Launcher.java
protected long computeTimeSpent(TaskReport[] mapReports) {
long timeSpent = 0;
for (TaskReport r : mapReports) {
timeSpent += (r.getFinishTime() - r.getStartTime());
}
return timeSpent;
}
项目:sedge
文件:Launcher.java
protected long computeTimeSpent(TaskReport[] mapReports) {
long timeSpent = 0;
for (TaskReport r : mapReports) {
timeSpent += (r.getFinishTime() - r.getStartTime());
}
return timeSpent;
}
项目:hadoop
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:aliyun-oss-hadoop-fs
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:big-c
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:hadoop-plus
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:FlexMap
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:hops
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:hadoop-TCP
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:spork-streaming
文件:TestJobStats.java
@Test
public void testOneTaskReport() throws Exception {
// setting up one map task report
TaskReport[] mapTaskReports = new TaskReport[1];
mapTaskReports[0] = Mockito.mock(TaskReport.class);
Mockito.when(mapTaskReports[0].getStartTime()).thenReturn(300L * ONE_THOUSAND);
Mockito.when(mapTaskReports[0].getFinishTime()).thenReturn(400L * ONE_THOUSAND);
// setting up one reduce task report
TaskReport[] reduceTaskReports = new TaskReport[1];
reduceTaskReports[0] = Mockito.mock(TaskReport.class);
Mockito.when(reduceTaskReports[0].getStartTime()).thenReturn(500L * ONE_THOUSAND);
Mockito.when(reduceTaskReports[0].getFinishTime()).thenReturn(700L * ONE_THOUSAND);
JobConf jobConf = new JobConf();
JobClient jobClient = Mockito.mock(JobClient.class);
Mockito.when(jobClient.getMapTaskReports(jobID)).thenReturn(mapTaskReports);
Mockito.when(jobClient.getReduceTaskReports(jobID)).thenReturn(reduceTaskReports);
PigStats.JobGraph jobGraph = new PigStats.JobGraph();
JobStats jobStats = createJobStats("JobStatsTest", jobGraph);
getJobStatsMethod("setId", JobID.class).invoke(jobStats, jobID);
getJobStatsMethod("setSuccessful", boolean.class).invoke(jobStats, true);
getJobStatsMethod("addMapReduceStatistics", JobClient.class, Configuration.class)
.invoke(jobStats, jobClient, jobConf);
String msg = (String)getJobStatsMethod("getDisplayString", boolean.class)
.invoke(jobStats, false);
System.out.println(JobStats.SUCCESS_HEADER);
System.out.println(msg);
StringBuilder sb = new StringBuilder();
sb.append(jobID.toString()).append("\t");
sb.append(mapTaskReports.length).append("\t");
sb.append(reduceTaskReports.length).append("\t");
sb.append("100\t100\t100\t100\t200\t200\t200\t200");
System.out.println("assert msg: " + sb.toString());
assertTrue(msg.startsWith(sb.toString()));
}
项目:spork
文件:HadoopShims.java
public static boolean isJobFailed(TaskReport report) {
return report.getCurrentStatus()==TIPStatus.FAILED;
}
项目:hardfs
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}
项目:hadoop-on-lustre2
文件:TestJobMonitorAndPrint.java
@Test
public void testJobMonitorAndPrint() throws Exception {
JobStatus jobStatus_1 = new JobStatus(new JobID("job_000", 1), 1f, 0.1f,
0.1f, 0f, State.RUNNING, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
JobStatus jobStatus_2 = new JobStatus(new JobID("job_000", 1), 1f, 1f,
1f, 1f, State.SUCCEEDED, JobPriority.HIGH, "tmp-user", "tmp-jobname",
"tmp-queue", "tmp-jobfile", "tmp-url", true);
doAnswer(
new Answer<TaskCompletionEvent[]>() {
@Override
public TaskCompletionEvent[] answer(InvocationOnMock invocation)
throws Throwable {
return new TaskCompletionEvent[0];
}
}
).when(job).getTaskCompletionEvents(anyInt(), anyInt());
doReturn(new TaskReport[5]).when(job).getTaskReports(isA(TaskType.class));
when(clientProtocol.getJobStatus(any(JobID.class))).thenReturn(jobStatus_1, jobStatus_2);
// setup the logger to capture all logs
Layout layout =
Logger.getRootLogger().getAppender("stdout").getLayout();
ByteArrayOutputStream os = new ByteArrayOutputStream();
WriterAppender appender = new WriterAppender(layout, os);
appender.setThreshold(Level.ALL);
Logger qlogger = Logger.getLogger(Job.class);
qlogger.addAppender(appender);
job.monitorAndPrintJob();
qlogger.removeAppender(appender);
LineNumberReader r = new LineNumberReader(new StringReader(os.toString()));
String line;
boolean foundHundred = false;
boolean foundComplete = false;
boolean foundUber = false;
String uberModeMatch = "uber mode : true";
String progressMatch = "map 100% reduce 100%";
String completionMatch = "completed successfully";
while ((line = r.readLine()) != null) {
if (line.contains(uberModeMatch)) {
foundUber = true;
}
foundHundred = line.contains(progressMatch);
if (foundHundred)
break;
}
line = r.readLine();
foundComplete = line.contains(completionMatch);
assertTrue(foundUber);
assertTrue(foundHundred);
assertTrue(foundComplete);
System.out.println("The output of job.toString() is : \n" + job.toString());
assertTrue(job.toString().contains("Number of maps: 5\n"));
assertTrue(job.toString().contains("Number of reduces: 5\n"));
}