Java 类org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper 实例源码
项目:hadoop
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:aliyun-oss-hadoop-fs
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:big-c
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop-plus
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:FlexMap
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hops
文件:TestSingleElementChain.java
@Test
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop-TCP
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hardfs
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:hadoop-on-lustre2
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:mapreduce-fork
文件:TestSingleElementChain.java
public void testNoChain() throws Exception {
Path inDir = new Path(localPathRoot, "testing/chain/input");
Path outDir = new Path(localPathRoot, "testing/chain/output");
String input = "a\nb\na\n";
String expectedOutput = "a\t2\nb\t1\n";
Configuration conf = createJobConf();
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1, input);
job.setJobName("chain");
ChainMapper.addMapper(job, TokenCounterMapper.class, Object.class,
Text.class, Text.class, IntWritable.class, null);
ChainReducer.setReducer(job, IntSumReducer.class, Text.class,
IntWritable.class, Text.class, IntWritable.class, null);
job.waitForCompletion(true);
assertTrue("Job failed", job.isSuccessful());
assertEquals("Outputs doesn't match", expectedOutput, MapReduceTestUtil
.readOutput(outDir, conf));
}
项目:Data-Science-with-Hadoop
文件:WordCountPredefined.java
public static void main(String[] args) throws Exception
{
Configuration conf = new Configuration();
Job job = new Job(conf, "word count1");
job.setJarByClass(WordCountPredefined.class);
job.setMapperClass(TokenCounterMapper.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
项目:IReS-Platform
文件:Main.java
public static void main(String[] args) throws Exception {
String input=null, output =null;
if (args.length!=2){
System.err.println("This Job takes exactly 2 arguments: input, output");
System.exit(-1);
}
else{
input=args[0];
output=args[1];
}
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "word count");
job.setJarByClass(Main.class);
job.setMapperClass(TokenCounterMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(input));
Path output_path = new Path(output);
(FileSystem.get(conf)).delete(output_path, true); //remove previous output
FileOutputFormat.setOutputPath(job,output_path );
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
项目:IReS-Platform
文件:WordCountJob.java
@Override
public void JobConfig() {
job.setJarByClass(Main.class);
job.setMapperClass(TokenCounterMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
}
项目:Hadoop-Codes
文件:countingDriver.java
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
Configuration conf= new Configuration();
Job job = Job.getInstance(conf, "countword");
job.setMapperClass(TokenCounterMapper.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
if(!job.waitForCompletion(false))
return;
}