Java 类org.apache.hadoop.mapreduce.v2.app.job.Job 实例源码
项目:hadoop
文件:TestHsWebServicesTasks.java
@Test
public void testTaskId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobCountersDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("counters/").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobCounters");
verifyAMJobCounters(info, jobsMap.get(id));
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobAttemptsSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobAttempts");
verifyHsJobAttempts(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobsSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject jobs = json.getJSONObject("jobs");
JSONArray arr = jobs.getJSONArray("job");
JSONObject info = arr.getJSONObject(0);
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
verifyAMJob(info, job);
}
项目:hadoop
文件:TestAMWebServicesAttempts.java
@Test
public void testTaskAttempts() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyAMTaskAttempts(json, task);
}
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobsSlash() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject jobs = json.getJSONObject("jobs");
JSONArray arr = jobs.getJSONArray("job");
assertEquals("incorrect number of elements", 1, arr.length());
JSONObject info = arr.getJSONObject(0);
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
VerifyJobsUtils.verifyHsJobPartial(info, job);
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobCountersSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("counters/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobCounters");
verifyHsJobCounters(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobs() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject jobs = json.getJSONObject("jobs");
JSONArray arr = jobs.getJSONArray("job");
JSONObject info = arr.getJSONObject(0);
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
verifyAMJob(info, job);
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobCountersXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("counters")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList info = dom.getElementsByTagName("jobCounters");
verifyAMJobCountersXML(info, jobsMap.get(id));
}
}
项目:hadoop
文件:CachedHistoryStorage.java
@Override
public Map<JobId, Job> getAllPartialJobs() {
LOG.debug("Called getAllPartialJobs()");
SortedMap<JobId, Job> result = new TreeMap<JobId, Job>();
try {
for (HistoryFileInfo mi : hsManager.getAllFileInfo()) {
if (mi != null) {
JobId id = mi.getJobId();
result.put(id, new PartialJob(mi.getJobIndexInfo(), id));
}
}
} catch (IOException e) {
LOG.warn("Error trying to scan for all FileInfos", e);
throw new YarnRuntimeException(e);
}
return result;
}
项目:hadoop
文件:AMWebServices.java
@GET
@Path("/jobs/{jobid}/tasks/{taskid}/attempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
init();
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
for (TaskAttempt ta : task.getAttempts().values()) {
if (ta != null) {
if (task.getType() == TaskType.REDUCE) {
attempts.add(new ReduceTaskAttemptInfo(ta, task.getType()));
} else {
attempts.add(new TaskAttemptInfo(ta, task.getType(), true));
}
}
}
return attempts;
}
项目:hadoop
文件:TestFail.java
@Test
//All Task attempts are timed out, leading to Job failure
public void testTimedOutTask() throws Exception {
MRApp app = new TimeOutTaskMRApp(1, 0);
Configuration conf = new Configuration();
int maxAttempts = 2;
conf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, maxAttempts);
// disable uberization (requires entire job to be reattempted, so max for
// subtask attempts is overridden to 1)
conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId,Task> tasks = job.getTasks();
Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
Task task = tasks.values().iterator().next();
Assert.assertEquals("Task state not correct", TaskState.FAILED,
task.getReport().getTaskState());
Map<TaskAttemptId, TaskAttempt> attempts =
tasks.values().iterator().next().getAttempts();
Assert.assertEquals("Num attempts is not correct", maxAttempts,
attempts.size());
for (TaskAttempt attempt : attempts.values()) {
Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
attempt.getReport().getTaskAttemptState());
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobIdXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList job = dom.getElementsByTagName("job");
verifyAMJobXML(job, appContext);
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId)
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("job");
VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("job");
verifyAMJob(info, jobsMap.get(id));
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobIdSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId + "/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("job");
VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestTaskAttempt.java
private void testMRAppHistory(MRApp app) throws Exception {
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.FAILED);
Map<TaskId, Task> tasks = job.getTasks();
Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
Task task = tasks.values().iterator().next();
Assert.assertEquals("Task state not correct", TaskState.FAILED, task
.getReport().getTaskState());
Map<TaskAttemptId, TaskAttempt> attempts = tasks.values().iterator().next()
.getAttempts();
Assert.assertEquals("Num attempts is not correct", 4, attempts.size());
Iterator<TaskAttempt> it = attempts.values().iterator();
TaskAttemptReport report = it.next().getReport();
Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
report.getTaskAttemptState());
Assert.assertEquals("Diagnostic Information is not Correct",
"Test Diagnostic Event", report.getDiagnosticInfo());
report = it.next().getReport();
Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED,
report.getTaskAttemptState());
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobIdXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId)
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList job = dom.getElementsByTagName("job");
verifyHsJobXML(job, appContext);
}
}
项目:hadoop
文件:TestHsWebServicesTasks.java
@Test
public void testTasksQueryReduce() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "r";
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.queryParam("type", type).accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals("incorrect number of elements", 1, arr.length());
verifyHsTask(arr, jobsMap.get(id), type);
}
}
项目:hadoop
文件:TestHsWebServicesJobsQuery.java
@Test
public void testJobsQueryUser() throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").queryParam("user", "mock")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
System.out.println(json.toString());
assertEquals("incorrect number of elements", 1, json.length());
JSONObject jobs = json.getJSONObject("jobs");
JSONArray arr = jobs.getJSONArray("job");
assertEquals("incorrect number of elements", 3, arr.length());
// just verify one of them.
JSONObject info = arr.getJSONObject(0);
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
VerifyJobsUtils.verifyHsJobPartial(info, job);
}
项目:hadoop
文件:TestHsWebServicesTasks.java
@Test
public void testTaskIdCountersSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters/").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
项目:hadoop
文件:TestHsWebServicesTasks.java
public void verifyHsTaskXML(NodeList nodes, Job job) {
assertEquals("incorrect number of elements", 2, nodes.getLength());
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
Boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyHsSingleTaskXML(element, task);
}
}
assertTrue("task with id: " + tid + " not in web service output", found);
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobCountersDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("counters/")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("jobCounters");
verifyHsJobCounters(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestRMContainerAllocator.java
private static AppContext createAppContext(
ApplicationAttemptId appAttemptId, Job job) {
AppContext context = mock(AppContext.class);
ApplicationId appId = appAttemptId.getApplicationId();
when(context.getApplicationID()).thenReturn(appId);
when(context.getApplicationAttemptId()).thenReturn(appAttemptId);
when(context.getJob(isA(JobId.class))).thenReturn(job);
when(context.getClusterInfo()).thenReturn(
new ClusterInfo(Resource.newInstance(10240, 1)));
when(context.getEventHandler()).thenReturn(new EventHandler() {
@Override
public void handle(Event event) {
// Only capture interesting events.
if (event instanceof TaskAttemptContainerAssignedEvent) {
events.add((TaskAttemptContainerAssignedEvent) event);
} else if (event instanceof TaskAttemptKillEvent) {
taskAttemptKillEvents.add((TaskAttemptKillEvent)event);
} else if (event instanceof JobUpdatedNodesEvent) {
jobUpdatedNodeEvents.add((JobUpdatedNodesEvent)event);
} else if (event instanceof JobEvent) {
jobEvents.add((JobEvent)event);
}
}
});
return context;
}
项目:hadoop
文件:TestStagingCleanup.java
@Override
protected Job createJob(Configuration conf, JobStateInternal forcedState,
String diagnostic) {
UserGroupInformation currentUser = null;
try {
currentUser = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
Job newJob = new TestJob(getJobId(), getAttemptID(), conf,
getDispatcher().getEventHandler(),
getTaskAttemptListener(), getContext().getClock(),
getCommitter(), isNewApiCommitter(),
currentUser.getUserName(), getContext(),
forcedState, diagnostic);
((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob);
getDispatcher().register(JobFinishEvent.Type.class,
createJobFinishEventHandler());
return newJob;
}
项目:hadoop
文件:TestAMWebServicesAttempts.java
@Test
public void testTaskAttemptsDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
verifyAMTaskAttempts(json, task);
}
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Test
public void testJobAttemptsXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList attempts = dom.getElementsByTagName("jobAttempts");
assertEquals("incorrect number of elements", 1, attempts.getLength());
NodeList info = dom.getElementsByTagName("jobAttempt");
verifyJobAttemptsXML(info, jobsMap.get(id));
}
}
项目:hadoop
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestMRApp.java
@Test
public void testJobRebootOnLastRetryOnUnregistrationFailure()
throws Exception {
// make startCount as 2 since this is last retry which equals to
// DEFAULT_MAX_AM_RETRY
// The last param mocks the unregistration failure
MRApp app = new MRApp(1, 0, false, this.getClass().getName(), true, 2, false);
Configuration conf = new Configuration();
Job job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Assert.assertEquals("Num tasks not correct", 1, job.getTasks().size());
Iterator<Task> it = job.getTasks().values().iterator();
Task task = it.next();
app.waitForState(task, TaskState.RUNNING);
//send an reboot event
app.getContext().getEventHandler().handle(new JobEvent(job.getID(),
JobEventType.JOB_AM_REBOOT));
app.waitForInternalState((JobImpl) job, JobStateInternal.REBOOT);
// return exteranl state as RUNNING if this is the last retry while
// unregistration fails
app.waitForState(job, JobState.RUNNING);
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Test
public void testTasks() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals("incorrect number of elements", 2, arr.length());
verifyAMTask(arr, jobsMap.get(id), null);
}
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Test
public void testTasksDefault() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals("incorrect number of elements", 2, arr.length());
verifyAMTask(arr, jobsMap.get(id), null);
}
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Test
public void testTasksSlash() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks/")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals("incorrect number of elements", 2, arr.length());
verifyAMTask(arr, jobsMap.get(id), null);
}
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Test
public void testTasksXML() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList tasks = dom.getElementsByTagName("tasks");
assertEquals("incorrect number of elements", 1, tasks.getLength());
NodeList task = dom.getElementsByTagName("task");
verifyAMTaskXML(task, jobsMap.get(id));
}
}
项目:hadoop
文件:TestHsWebServicesJobs.java
@Test
public void testJobAttemptsXML() throws Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
ClientResponse response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("jobattempts")
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList attempts = dom.getElementsByTagName("jobAttempts");
assertEquals("incorrect number of elements", 1, attempts.getLength());
NodeList info = dom.getElementsByTagName("jobAttempt");
verifyHsJobAttemptsXML(info, appContext.getJob(id));
}
}
项目:hadoop
文件:TestMRAppComponentDependencies.java
@Override
protected Job createJob(Configuration conf, JobStateInternal forcedState,
String diagnostic) {
UserGroupInformation currentUser = null;
try {
currentUser = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
Job newJob =
new TestJob(getJobId(), getAttemptID(), conf, getDispatcher()
.getEventHandler(), getTaskAttemptListener(), getContext()
.getClock(), getCommitter(), isNewApiCommitter(),
currentUser.getUserName(), getContext(), forcedState, diagnostic);
((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob);
getDispatcher().register(JobFinishEvent.Type.class,
createJobFinishEventHandler());
return newJob;
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Test
public void testTaskId() throws JSONException, Exception {
WebResource r = resource();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
.path("jobs").path(jobId).path("tasks").path(tid)
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("task");
verifyAMSingleTask(info, task);
}
}
}
项目:hadoop
文件:TestAMWebServicesJobs.java
public void verifyJobAttemptsGeneric(Job job, String nodeHttpAddress,
String nodeId, int id, long startTime, String containerId, String logsLink) {
boolean attemptFound = false;
for (AMInfo amInfo : job.getAMInfos()) {
if (amInfo.getAppAttemptId().getAttemptId() == id) {
attemptFound = true;
String nmHost = amInfo.getNodeManagerHost();
int nmHttpPort = amInfo.getNodeManagerHttpPort();
int nmPort = amInfo.getNodeManagerPort();
WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
+ nmHttpPort, nodeHttpAddress);
WebServicesTestUtils.checkStringMatch("nodeId",
NodeId.newInstance(nmHost, nmPort).toString(), nodeId);
assertTrue("startime not greater than 0", startTime > 0);
WebServicesTestUtils.checkStringMatch("containerId", amInfo
.getContainerId().toString(), containerId);
String localLogsLink =ujoin("node", "containerlogs", containerId,
job.getUserName());
assertTrue("logsLink", logsLink.contains(localLogsLink));
}
}
assertTrue("attempt: " + id + " was not found", attemptFound);
}
项目:hadoop
文件:TestAMWebServicesTasks.java
public void verifyAMTask(JSONArray arr, Job job, String type)
throws JSONException {
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
Boolean found = false;
if (type != null && task.getType() == MRApps.taskType(type)) {
for (int i = 0; i < arr.length(); i++) {
JSONObject info = arr.getJSONObject(i);
if (tid.matches(info.getString("id"))) {
found = true;
verifyAMSingleTask(info, task);
}
}
assertTrue("task with id: " + tid + " not in web service output", found);
}
}
}
项目:hadoop
文件:TestAMWebServicesTasks.java
public void verifyAMTaskXML(NodeList nodes, Job job) {
assertEquals("incorrect number of elements", 2, nodes.getLength());
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
Boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyAMSingleTaskXML(element, task);
}
}
assertTrue("task with id: " + tid + " not in web service output", found);
}
}
项目:hadoop
文件:HsAttemptsPage.java
@Override
protected Collection<TaskAttempt> getTaskAttempts() {
List<TaskAttempt> fewTaskAttemps = new ArrayList<TaskAttempt>();
String taskTypeStr = $(TASK_TYPE);
TaskType taskType = MRApps.taskType(taskTypeStr);
String attemptStateStr = $(ATTEMPT_STATE);
TaskAttemptStateUI neededState = MRApps
.taskAttemptState(attemptStateStr);
Job j = app.getJob();
Map<TaskId, Task> tasks = j.getTasks(taskType);
for (Task task : tasks.values()) {
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
for (TaskAttempt attempt : attempts.values()) {
if (neededState.correspondsTo(attempt.getState())) {
fewTaskAttemps.add(attempt);
}
}
}
return fewTaskAttemps;
}