Java 类org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner 实例源码
项目:hadoop
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:aliyun-oss-hadoop-fs
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:big-c
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-EAR
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-EAR
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-plus
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hops
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-TCP
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-on-lustre
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hardfs
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-on-lustre2
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:RDFS
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:RDFS
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-0.20
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hanoi-hadoop-2.0.0-cdh
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:mapreduce-fork
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "mapreduce.map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapreduce.job.reduces=2",
"-jobconf", "mapreduce.task.files.preserve.failedtasks=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hortonworks-extension
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hortonworks-extension
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:hadoop-gpu
文件:TestStreamDataProtocol.java
protected String[] genArgs() {
return new String[] {
"-input", INPUT_FILE.getAbsolutePath(),
"-output", OUTPUT_DIR.getAbsolutePath(),
"-mapper", map,
"-reducer", reduce,
"-partitioner", KeyFieldBasedPartitioner.class.getCanonicalName(),
//"-verbose",
"-jobconf", "stream.map.output.field.separator=.",
"-jobconf", "stream.num.map.output.key.fields=2",
"-jobconf", "map.output.key.field.separator=.",
"-jobconf", "num.key.fields.for.partition=1",
"-jobconf", "mapred.reduce.tasks=2",
"-jobconf", "keep.failed.task.files=true",
"-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp")
};
}
项目:mapreduce-fork
文件:TestKeyFieldBasedPartitioner.java
/**
* Test is key-field-based partitioned works with empty key.
*/
public void testEmptyKey() throws Exception {
KeyFieldBasedPartitioner<Text, Text> kfbp =
new KeyFieldBasedPartitioner<Text, Text>();
JobConf conf = new JobConf();
conf.setInt("num.key.fields.for.partition", 10);
kfbp.configure(conf);
assertEquals("Empty key should map to 0th partition",
0, kfbp.getPartition(new Text(), new Text(), 10));
}
项目:hadoop
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:aliyun-oss-hadoop-fs
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:big-c
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hadoop-plus
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:FlexMap
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hops
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hadoop-TCP
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hardfs
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hadoop-on-lustre2
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:mapreduce-fork
文件:JobConf.java
/**
* Get the {@link KeyFieldBasedPartitioner} options
*/
public String getKeyFieldPartitionerOption() {
return get(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS);
}
项目:hadoop
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, keySpec);
}
项目:aliyun-oss-hadoop-fs
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, keySpec);
}
项目:big-c
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, keySpec);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, keySpec);
}
项目:hadoop-2.6.0-cdh5.4.3
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set("mapred.text.key.partitioner.options", keySpec);
}
项目:hadoop-EAR
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set("mapred.text.key.partitioner.options", keySpec);
}
项目:hadoop-plus
文件:JobConf.java
/**
* Set the {@link KeyFieldBasedPartitioner} options used for
* {@link Partitioner}
*
* @param keySpec the key specification of the form -k pos1[,pos2], where,
* pos is of the form f[.c][opts], where f is the number
* of the key field to use, and c is the number of the first character from
* the beginning of the field. Fields and character posns are numbered
* starting with 1; a character position of zero in pos2 indicates the
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
*/
public void setKeyFieldPartitionerOptions(String keySpec) {
setPartitionerClass(KeyFieldBasedPartitioner.class);
set(KeyFieldBasedPartitioner.PARTITIONER_OPTIONS, keySpec);
}