Java 类org.apache.hadoop.mapreduce.v2.app.MockAppContext 实例源码
项目:hadoop
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:aliyun-oss-hadoop-fs
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:aliyun-oss-hadoop-fs
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:aliyun-oss-hadoop-fs
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:big-c
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:big-c
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:big-c
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:hops
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:hadoop-plus
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hops
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:hadoop-plus
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:hops
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:FlexMap
文件:TestAMWebApp.java
@Test public void testSingleTaskCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 2);
Map<String, String> params = getTaskParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
// remove counters from one task attempt
// to test handling of missing counters
TaskId taskID = MRApps.toTaskID(params.get(AMParams.TASK_ID));
Job job = appContext.getJob(taskID.getJobId());
Task task = job.getTask(taskID);
TaskAttempt attempt = task.getAttempts().values().iterator().next();
attempt.getReport().setCounters(null);
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:FlexMap
文件:TestHSWebApp.java
@Test
public void testLogsView2() throws IOException {
LOG.info("HsLogsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Aggregation is not enabled. Try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:FlexMap
文件:TestHSWebApp.java
@Test
public void testLogsViewBadStartEnd() throws IOException {
LOG.info("HsLogsPage with bad start/end params");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
params.put("start", "foo");
params.put("end", "bar");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params);
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Invalid log start value: foo");
verify(spyPw).write("Invalid log end value: bar");
}
项目:hadoop
文件:TestAMWebServicesTasks.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:hadoop
文件:TestAMWebServicesJobConf.java
@Override
protected void configureServlets() {
Path confPath = new Path(testConfDir.toString(),
MRJobConfig.JOB_CONF_FILE);
Configuration config = new Configuration();
FileSystem localFs;
try {
localFs = FileSystem.getLocal(config);
confPath = localFs.makeQualified(confPath);
OutputStream out = localFs.create(confPath);
try {
conf.writeXml(out);
} finally {
out.close();
}
if (!localFs.exists(confPath)) {
fail("error creating config file: " + confPath);
}
} catch (IOException e) {
fail("error creating config file: " + e.getMessage());
}
appContext = new MockAppContext(0, 2, 1, confPath);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:hadoop
文件:TestAMWebServicesJobs.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:hadoop
文件:TestAMWebServices.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 1, 1);
appContext.setBlacklistedNodes(Sets.newHashSet("badnode1", "badnode2"));
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:hadoop
文件:TestAMWebApp.java
@Test public void testAppControllerIndex() {
AppContext ctx = new MockAppContext(0, 1, 1, 1);
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
AppController controller = injector.getInstance(AppController.class);
controller.index();
assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
}
项目:hadoop
文件:TestAMWebApp.java
@Test public void testTaskView() {
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = getTaskParams(appContext);
App app = new App(appContext);
app.setJob(appContext.getAllJobs().values().iterator().next());
app.setTask(app.getJob().getTasks().values().iterator().next());
WebAppTests.testPage(TaskPage.class, App.class, app, params);
}
项目:hadoop
文件:TestAMWebApp.java
@Test public void testSingleCounterView() {
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Job job = appContext.getAllJobs().values().iterator().next();
// add a failed task to the job without any counters
Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
Map<TaskId,Task> tasks = job.getTasks();
tasks.put(failedTask.getID(), failedTask);
Map<String, String> params = getJobParams(appContext);
params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter");
params.put(AMParams.COUNTER_NAME, "HDFS_WRITE_OPS");
WebAppTests.testPage(SingleCounterPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestAMWebServicesAttempts.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:hadoop
文件:TestHSWebApp.java
@Test public void testAppControllerIndex() {
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
HsController controller = injector.getInstance(HsController.class);
controller.index();
assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testTasksView() {
LOG.info("HsTasksPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext,
params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testTaskView() {
LOG.info("HsTaskPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests
.testPage(HsTaskPage.class, AppContext.class, appContext, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test public void testAttemptsWithJobView() {
LOG.info("HsAttemptsPage with data");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
JobId id = ctx.getAllJobs().keySet().iterator().next();
Map<String, String> params = new HashMap<String,String>();
params.put(JOB_ID, id.toString());
params.put(TASK_TYPE, "m");
params.put(ATTEMPT_STATE, "SUCCESSFUL");
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class,
ctx, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test public void testAttemptsView() {
LOG.info("HsAttemptsPage");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test public void testJobCounterView() {
LOG.info("JobCounterView");
AppContext appContext = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test public void testJobCounterViewForKilledJob() {
LOG.info("JobCounterViewForKilledJob");
AppContext appContext = new MockAppContext(0, 1, 1, 1, true);
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
appContext, params);
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testLogsView1() throws IOException {
LOG.info("HsLogsPage");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class,
new MockAppContext(0, 1, 1, 1));
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write("Cannot get container logs without a ContainerId");
verify(spyPw).write("Cannot get container logs without a NodeId");
verify(spyPw).write("Cannot get container logs without an app owner");
}
项目:hadoop
文件:TestHSWebApp.java
@Test
public void testLogsViewSingle() throws IOException {
LOG.info("HsLogsPage with params for single log and data limits");
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
Map<String, String> params = new HashMap<String, String>();
final Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
params.put("start", "-2048");
params.put("end", "-1024");
params.put(CONTAINER_LOG_TYPE, "syslog");
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString());
params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
params.put(ENTITY_STRING, "container_10_0001_01_000001");
params.put(APP_OWNER, "owner");
Injector injector =
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, ctx,
params, new AbstractModule() {
@Override
protected void configure() {
bind(Configuration.class).toInstance(conf);
}
});
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
verify(spyPw).write(
"Logs not available for container_10_0001_01_000001."
+ " Aggregation may not be complete, "
+ "Check back later or try the nodemanager at "
+ MockJobs.NM_HOST + ":" + MockJobs.NM_PORT);
}
项目:aliyun-oss-hadoop-fs
文件:TestAMWebServicesTasks.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:aliyun-oss-hadoop-fs
文件:TestAMWebServicesJobConf.java
@Override
protected void configureServlets() {
Path confPath = new Path(testConfDir.toString(),
MRJobConfig.JOB_CONF_FILE);
Configuration config = new Configuration();
FileSystem localFs;
try {
localFs = FileSystem.getLocal(config);
confPath = localFs.makeQualified(confPath);
OutputStream out = localFs.create(confPath);
try {
conf.writeXml(out);
} finally {
out.close();
}
if (!localFs.exists(confPath)) {
fail("error creating config file: " + confPath);
}
} catch (IOException e) {
fail("error creating config file: " + e.getMessage());
}
appContext = new MockAppContext(0, 2, 1, confPath);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}
项目:aliyun-oss-hadoop-fs
文件:TestAMWebServicesJobs.java
@Override
protected void configureServlets() {
appContext = new MockAppContext(0, 1, 2, 1);
bind(JAXBContextResolver.class);
bind(AMWebServices.class);
bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(appContext);
bind(Configuration.class).toInstance(conf);
serve("/*").with(GuiceContainer.class);
}