Java 类org.apache.hadoop.mapreduce.task.reduce.Shuffle 实例源码
项目:hadoop
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:aliyun-oss-hadoop-fs
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:big-c
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hadoop-plus
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:FlexMap
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hops
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hadoop-TCP
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hardfs
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}
项目:hadoop-on-lustre2
文件:TestShufflePlugin.java
@Test
/**
* A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
* as if it came from a 3rd party.
*/
public void testPluginAbility() {
try{
// create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
JobConf jobConf = new JobConf();
jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
TestShufflePlugin.TestShuffleConsumerPlugin.class,
ShuffleConsumerPlugin.class);
ShuffleConsumerPlugin shuffleConsumerPlugin = null;
Class<? extends ShuffleConsumerPlugin> clazz =
jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);
// load 3rd party plugin through core's factory method
shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
}
catch (Exception e) {
assertTrue("Threw exception:" + e, false);
}
}