Java 类org.apache.hadoop.mapreduce.MapReduceTestUtil 实例源码
项目:hadoop
文件:TestJobOutputCommitter.java
private void testSuccessfulJob(String filename,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
job.setOutputFormatClass(output);
assertTrue("Job failed!", job.waitForCompletion(true));
Path testFile = new Path(outDir, filename);
assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for successful job "
+ job.getJobID(), fs.exists(file));
}
}
项目:hadoop
文件:TestJobOutputCommitter.java
private void testFailedJob(String fileName,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
job.setOutputFormatClass(output);
assertFalse("Job did not fail!", job.waitForCompletion(true));
if (fileName != null) {
Path testFile = new Path(outDir, fileName);
assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
fs.exists(testFile));
}
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for failed job "
+ job.getJobID(), fs.exists(file));
}
}
项目:hadoop
文件:TestChainErrors.java
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:hadoop
文件:TestChainErrors.java
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:hadoop
文件:TestChainErrors.java
/**
* Tests one of the maps consuming output.
*
* @throws Exception
*/
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop
文件:TestChainErrors.java
/**
* Tests reducer consuming output.
*
* @throws Exception
*/
public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop
文件:TestMRKeyValueTextInputFormat.java
private static List<Text> readSplit(KeyValueTextInputFormat format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<Text, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<Text, Text, Text, Text> mcontext =
new MapContextImpl<Text, Text, Text, Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
reader.close();
return result;
}
项目:hadoop
文件:TestCombineTextInputFormat.java
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<LongWritable,Text,LongWritable,Text> mcontext =
new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
return result;
}
项目:hadoop
文件:TestMapReduceJobControl.java
public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf();
cleanupData(conf);
// create a Fail job
Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
// create job dependencies
JobControl theControl = createDependencies(conf, job1);
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
theControl.stop();
}
项目:hadoop
文件:TestMapReduceJobControl.java
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
LOG.info("Starting testControlledJob");
Configuration conf = createJobConf();
cleanupData(conf);
Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
JobControl theControl = createDependencies(conf, job1);
while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
break;
}
}
Assert.assertNotNull(cjob1.getMapredJobId());
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
theControl.stop();
}
项目:ditb
文件:TestWALRecordReader.java
/**
* Create a new reader from the split, and match the edits against the passed columns.
*/
private void testSplit(InputSplit split, byte[]... columns) throws Exception {
final WALRecordReader reader = getReader();
reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
for (byte[] column : columns) {
assertTrue(reader.nextKeyValue());
Cell cell = reader.getCurrentValue().getCells().get(0);
if (!Bytes.equals(column, cell.getQualifier())) {
assertTrue("expected [" + Bytes.toString(column) + "], actual ["
+ Bytes.toString(cell.getQualifier()) + "]", false);
}
}
assertFalse(reader.nextKeyValue());
reader.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestJobOutputCommitter.java
private void testSuccessfulJob(String filename,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
job.setOutputFormatClass(output);
assertTrue("Job failed!", job.waitForCompletion(true));
Path testFile = new Path(outDir, filename);
assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for successful job "
+ job.getJobID(), fs.exists(file));
}
}
项目:aliyun-oss-hadoop-fs
文件:TestJobOutputCommitter.java
private void testFailedJob(String fileName,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
job.setOutputFormatClass(output);
assertFalse("Job did not fail!", job.waitForCompletion(true));
if (fileName != null) {
Path testFile = new Path(outDir, fileName);
assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
fs.exists(testFile));
}
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for failed job "
+ job.getJobID(), fs.exists(file));
}
}
项目:aliyun-oss-hadoop-fs
文件:TestChainErrors.java
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:aliyun-oss-hadoop-fs
文件:TestChainErrors.java
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:aliyun-oss-hadoop-fs
文件:TestChainErrors.java
/**
* Tests one of the maps consuming output.
*
* @throws Exception
*/
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:aliyun-oss-hadoop-fs
文件:TestChainErrors.java
/**
* Tests reducer consuming output.
*
* @throws Exception
*/
public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:aliyun-oss-hadoop-fs
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:aliyun-oss-hadoop-fs
文件:TestMRKeyValueTextInputFormat.java
private static List<Text> readSplit(KeyValueTextInputFormat format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<Text, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<Text, Text, Text, Text> mcontext =
new MapContextImpl<Text, Text, Text, Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
reader.close();
return result;
}
项目:aliyun-oss-hadoop-fs
文件:TestCombineTextInputFormat.java
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<LongWritable,Text,LongWritable,Text> mcontext =
new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
return result;
}
项目:aliyun-oss-hadoop-fs
文件:TestMapReduceJobControl.java
public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf();
cleanupData(conf);
// create a Fail job
Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
// create job dependencies
JobControl theControl = createDependencies(conf, job1);
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
theControl.stop();
}
项目:aliyun-oss-hadoop-fs
文件:TestMapReduceJobControl.java
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
LOG.info("Starting testControlledJob");
Configuration conf = createJobConf();
cleanupData(conf);
Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
JobControl theControl = createDependencies(conf, job1);
while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
break;
}
}
Assert.assertNotNull(cjob1.getMapredJobId());
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
theControl.stop();
}
项目:big-c
文件:TestJobOutputCommitter.java
private void testSuccessfulJob(String filename,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
job.setOutputFormatClass(output);
assertTrue("Job failed!", job.waitForCompletion(true));
Path testFile = new Path(outDir, filename);
assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for successful job "
+ job.getJobID(), fs.exists(file));
}
}
项目:big-c
文件:TestJobOutputCommitter.java
private void testFailedJob(String fileName,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createFailJob(conf, outDir, inDir);
job.setOutputFormatClass(output);
assertFalse("Job did not fail!", job.waitForCompletion(true));
if (fileName != null) {
Path testFile = new Path(outDir, fileName);
assertTrue("File " + testFile + " missing for failed job " + job.getJobID(),
fs.exists(testFile));
}
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for failed job "
+ job.getJobID(), fs.exists(file));
}
}
项目:big-c
文件:TestChainErrors.java
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:big-c
文件:TestChainErrors.java
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:big-c
文件:TestChainErrors.java
/**
* Tests one of the maps consuming output.
*
* @throws Exception
*/
public void testChainMapNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, ConsumeMap.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:big-c
文件:TestChainErrors.java
/**
* Tests reducer consuming output.
*
* @throws Exception
*/
public void testChainReduceNoOuptut() throws Exception {
Configuration conf = createJobConf();
String expectedOutput = "";
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, ConsumeReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:big-c
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:big-c
文件:TestMRKeyValueTextInputFormat.java
private static List<Text> readSplit(KeyValueTextInputFormat format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<Text, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<Text, Text, Text, Text> mcontext =
new MapContextImpl<Text, Text, Text, Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
reader.close();
return result;
}
项目:big-c
文件:TestCombineTextInputFormat.java
private static List<Text> readSplit(InputFormat<LongWritable,Text> format,
InputSplit split, Job job) throws IOException, InterruptedException {
List<Text> result = new ArrayList<Text>();
Configuration conf = job.getConfiguration();
TaskAttemptContext context = MapReduceTestUtil.
createDummyMapTaskAttemptContext(conf);
RecordReader<LongWritable, Text> reader = format.createRecordReader(split,
MapReduceTestUtil.createDummyMapTaskAttemptContext(conf));
MapContext<LongWritable,Text,LongWritable,Text> mcontext =
new MapContextImpl<LongWritable,Text,LongWritable,Text>(conf,
context.getTaskAttemptID(), reader, null, null,
MapReduceTestUtil.createDummyReporter(),
split);
reader.initialize(split, mcontext);
while (reader.nextKeyValue()) {
result.add(new Text(reader.getCurrentValue()));
}
return result;
}
项目:big-c
文件:TestMapReduceJobControl.java
public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf();
cleanupData(conf);
// create a Fail job
Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
// create job dependencies
JobControl theControl = createDependencies(conf, job1);
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
theControl.stop();
}
项目:big-c
文件:TestMapReduceJobControl.java
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
LOG.info("Starting testControlledJob");
Configuration conf = createJobConf();
cleanupData(conf);
Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
JobControl theControl = createDependencies(conf, job1);
while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
break;
}
}
Assert.assertNotNull(cjob1.getMapredJobId());
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
theControl.stop();
}
项目:hops
文件:TestMapReduceJobControl.java
@Test(timeout = 30000)
public void testControlledJob() throws Exception {
LOG.info("Starting testControlledJob");
Configuration conf = createJobConf();
cleanupData(conf);
Job job1 = MapReduceTestUtil.createCopyJob(conf, outdir_1, indir);
JobControl theControl = createDependencies(conf, job1);
while (cjob1.getJobState() != ControlledJob.State.RUNNING) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
break;
}
}
Assert.assertNotNull(cjob1.getMapredJobId());
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertEquals("Some jobs failed", 0, theControl.getFailedJobList().size());
theControl.stop();
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestJobOutputCommitter.java
private void testSuccessfulJob(String filename,
Class<? extends OutputFormat> output, String[] exclude) throws Exception {
Path outDir = getNewOutputDir();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0);
job.setOutputFormatClass(output);
assertTrue("Job failed!", job.waitForCompletion(true));
Path testFile = new Path(outDir, filename);
assertTrue("Done file missing for job " + job.getJobID(), fs.exists(testFile));
// check if the files from the missing set exists
for (String ex : exclude) {
Path file = new Path(outDir, ex);
assertFalse("File " + file + " should not be present for successful job "
+ job.getJobID(), fs.exists(file));
}
}
项目:hops
文件:TestMapReduceJobControl.java
@Test
public void testJobControlWithFailJob() throws Exception {
LOG.info("Starting testJobControlWithFailJob");
Configuration conf = createJobConf();
cleanupData(conf);
// create a Fail job
Job job1 = MapReduceTestUtil.createFailJob(conf, outdir_1, indir);
// create job dependencies
JobControl theControl = createDependencies(conf, job1);
// wait till all the jobs complete
waitTillAllFinished(theControl);
assertTrue(cjob1.getJobState() == ControlledJob.State.FAILED);
assertTrue(cjob2.getJobState() == ControlledJob.State.SUCCESS);
assertTrue(cjob3.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
assertTrue(cjob4.getJobState() == ControlledJob.State.DEPENDENT_FAILED);
theControl.stop();
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestChainErrors.java
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestChainErrors.java
/**
* Tests Reducer throwing exception.
*
* @throws Exception
*/
public void testReducerFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainReducer.setReducer(job, FailReduce.class, LongWritable.class,
Text.class, LongWritable.class, Text.class, null);
ChainReducer.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}
项目:hops
文件:TestChainErrors.java
/**
* Tests one of the mappers throwing exception.
*
* @throws Exception
*/
@Test
public void testChainFail() throws Exception {
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 0, input);
job.setJobName("chain");
ChainMapper.addMapper(job, Mapper.class, LongWritable.class, Text.class,
LongWritable.class, Text.class, null);
ChainMapper.addMapper(job, FailMap.class, LongWritable.class, Text.class,
IntWritable.class, Text.class, null);
ChainMapper.addMapper(job, Mapper.class, IntWritable.class, Text.class,
LongWritable.class, Text.class, null);
job.waitForCompletion(true);
assertTrue("Job Not failed", !job.isSuccessful());
}