Java 类org.apache.hadoop.mapred.TaskAttemptContextImpl 实例源码
项目:hazelcast-jet
文件:WriteHdfsP.java
@Override @Nonnull
public List<Processor> get(int count) {
return processorList = range(0, count).mapToObj(i -> {
try {
String uuid = context.jetInstance().getCluster().getLocalMember().getUuid();
TaskAttemptID taskAttemptID = new TaskAttemptID("jet-node-" + uuid, jobContext.getJobID().getId(),
JOB_SETUP, i, 0);
jobConf.set("mapred.task.id", taskAttemptID.toString());
jobConf.setInt("mapred.task.partition", i);
TaskAttemptContextImpl taskAttemptContext = new TaskAttemptContextImpl(jobConf, taskAttemptID);
@SuppressWarnings("unchecked")
OutputFormat<K, V> outFormat = jobConf.getOutputFormat();
RecordWriter<K, V> recordWriter = outFormat.getRecordWriter(
null, jobConf, uuid + '-' + valueOf(i), Reporter.NULL);
return new WriteHdfsP<>(
recordWriter, taskAttemptContext, outputCommitter, extractKeyFn, extractValueFn);
} catch (IOException e) {
throw new JetException(e);
}
}).collect(toList());
}
项目:hadoop
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:aliyun-oss-hadoop-fs
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:aliyun-oss-hadoop-fs
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:big-c
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:big-c
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:flink
文件:HadoopOutputFormatBase.java
/**
* create the temporary output file for hadoop RecordWriter.
* @param taskNumber The number of the parallel instance.
* @param numTasks The number of parallel tasks.
* @throws java.io.IOException
*/
@Override
public void open(int taskNumber, int numTasks) throws IOException {
// enforce sequential open() calls
synchronized (OPEN_MUTEX) {
if (Integer.toString(taskNumber + 1).length() > 6) {
throw new IOException("Task id too large.");
}
TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_"
+ String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0")
+ Integer.toString(taskNumber + 1)
+ "_0");
this.jobConf.set("mapred.task.id", taskAttemptID.toString());
this.jobConf.setInt("mapred.task.partition", taskNumber + 1);
// for hadoop 2.2
this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString());
this.jobConf.setInt("mapreduce.task.partition", taskNumber + 1);
this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID);
this.outputCommitter = this.jobConf.getOutputCommitter();
JobContext jobContext = new JobContextImpl(this.jobConf, new JobID());
this.outputCommitter.setupJob(jobContext);
this.recordWriter = this.mapredOutputFormat.getRecordWriter(null, this.jobConf, Integer.toString(taskNumber + 1), new HadoopDummyProgressable());
}
}
项目:flink
文件:HadoopOutputFormatBase.java
/**
* create the temporary output file for hadoop RecordWriter.
* @param taskNumber The number of the parallel instance.
* @param numTasks The number of parallel tasks.
* @throws java.io.IOException
*/
@Override
public void open(int taskNumber, int numTasks) throws IOException {
// enforce sequential open() calls
synchronized (OPEN_MUTEX) {
if (Integer.toString(taskNumber + 1).length() > 6) {
throw new IOException("Task id too large.");
}
TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_"
+ String.format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0")
+ Integer.toString(taskNumber + 1)
+ "_0");
this.jobConf.set("mapred.task.id", taskAttemptID.toString());
this.jobConf.setInt("mapred.task.partition", taskNumber + 1);
// for hadoop 2.2
this.jobConf.set("mapreduce.task.attempt.id", taskAttemptID.toString());
this.jobConf.setInt("mapreduce.task.partition", taskNumber + 1);
this.context = new TaskAttemptContextImpl(this.jobConf, taskAttemptID);
this.outputCommitter = this.jobConf.getOutputCommitter();
JobContext jobContext = new JobContextImpl(this.jobConf, new JobID());
this.outputCommitter.setupJob(jobContext);
this.recordWriter = this.mapredOutputFormat.getRecordWriter(null, this.jobConf, Integer.toString(taskNumber + 1), new HadoopDummyProgressable());
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-plus
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop-plus
文件:TestFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hazelcast-jet
文件:WriteHdfsP.java
private WriteHdfsP(RecordWriter<K, V> recordWriter,
TaskAttemptContextImpl taskAttemptContext,
OutputCommitter outputCommitter,
DistributedFunction<? super T, K> extractKeyFn,
DistributedFunction<? super T, V> extractValueFn
) {
this.recordWriter = recordWriter;
this.taskAttemptContext = taskAttemptContext;
this.outputCommitter = outputCommitter;
this.extractKeyFn = extractKeyFn;
this.extractValueFn = extractValueFn;
}
项目:FlexMap
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:FlexMap
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:ignite
文件:HadoopV1OutputCollector.java
/**
* Commit task.
*
* @throws IOException In failed.
*/
public void commit() throws IOException {
if (writer != null) {
OutputCommitter outputCommitter = jobConf.getOutputCommitter();
TaskAttemptContext taskCtx = new TaskAttemptContextImpl(jobConf, attempt);
if (outputCommitter.needsTaskCommit(taskCtx))
outputCommitter.commitTask(taskCtx);
}
}
项目:ignite
文件:HadoopV1OutputCollector.java
/**
* Abort task.
*/
public void abort() {
try {
if (writer != null)
jobConf.getOutputCommitter().abortTask(new TaskAttemptContextImpl(jobConf, attempt));
}
catch (IOException ignore) {
// No-op.
}
}
项目:hops
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop-TCP
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop-TCP
文件:TestFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hardfs
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hardfs
文件:TestFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-on-lustre2
文件:TaskAttemptImpl.java
@SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
TaskAttemptContext taskContext =
new TaskAttemptContextImpl(taskAttempt.conf,
TypeConverter.fromYarn(taskAttempt.attemptId));
taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
taskAttempt.attemptId, taskContext));
}
项目:hadoop-on-lustre2
文件:TestMRCJCFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTaskAttemptPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTaskAttemptPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir "+expectedFile+" still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:mapreduce-fork
文件:TestFileOutputCommitter.java
public void testAbort() throws IOException {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job, committer
.getTempTaskOutputPath(tContext));
// do setup
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs,
job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do abort
committer.abortTask(tContext);
File expectedFile = new File(new Path(committer
.getTempTaskOutputPath(tContext), file).toString());
assertFalse("task temp dir still exists", expectedFile.exists());
committer.abortJob(jContext, JobStatus.State.FAILED);
expectedFile = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME)
.toString());
assertFalse("job temp dir still exists", expectedFile.exists());
assertEquals("Output directory not empty", 0, new File(outDir.toString())
.listFiles().length);
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:aliyun-oss-hadoop-fs
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:big-c
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
job.set("mapred.task.id", attempt);
job.setOutputCommitter(FileOutputCommitter.class);
FileOutputFormat.setOutputPath(job, outDir);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTempTaskOutputPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
Text key1 = new Text("key1");
Text key2 = new Text("key2");
Text val1 = new Text("val1");
Text val2 = new Text("val2");
NullWritable nullWritable = NullWritable.get();
try {
theRecordWriter.write(key1, val1);
theRecordWriter.write(null, nullWritable);
theRecordWriter.write(null, val1);
theRecordWriter.write(nullWritable, val2);
theRecordWriter.write(key2, nullWritable);
theRecordWriter.write(key1, null);
theRecordWriter.write(null, null);
theRecordWriter.write(key2, val2);
} finally {
theRecordWriter.close(reporter);
}
committer.commitTask(tContext);
committer.commitJob(jContext);
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
}
项目:hadoop-plus
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:FlexMap
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-TCP
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hardfs
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hadoop-on-lustre2
文件:TestMRCJCFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTaskAttemptPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:hanoi-hadoop-2.0.0-cdh
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
job.set("mapred.task.id", attempt);
job.setOutputCommitter(FileOutputCommitter.class);
FileOutputFormat.setOutputPath(job, outDir);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTempTaskOutputPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
Text key1 = new Text("key1");
Text key2 = new Text("key2");
Text val1 = new Text("val1");
Text val2 = new Text("val2");
NullWritable nullWritable = NullWritable.get();
try {
theRecordWriter.write(key1, val1);
theRecordWriter.write(null, nullWritable);
theRecordWriter.write(null, val1);
theRecordWriter.write(nullWritable, val2);
theRecordWriter.write(key2, nullWritable);
theRecordWriter.write(key1, null);
theRecordWriter.write(null, null);
theRecordWriter.write(key2, val2);
} finally {
theRecordWriter.close(reporter);
}
committer.commitTask(tContext);
committer.commitJob(jContext);
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
}
项目:mapreduce-fork
文件:TestFileOutputCommitter.java
@SuppressWarnings("unchecked")
public void testCommitter() throws Exception {
JobConf job = new JobConf();
setConfForFileOutputCommitter(job);
JobContext jContext = new JobContextImpl(job, taskID.getJobID());
TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
FileOutputCommitter committer = new FileOutputCommitter();
FileOutputFormat.setWorkOutputPath(job,
committer.getTempTaskOutputPath(tContext));
committer.setupJob(jContext);
committer.setupTask(tContext);
String file = "test.txt";
// A reporter that does nothing
Reporter reporter = Reporter.NULL;
// write output
FileSystem localFs = FileSystem.getLocal(job);
TextOutputFormat theOutputFormat = new TextOutputFormat();
RecordWriter theRecordWriter =
theOutputFormat.getRecordWriter(localFs, job, file, reporter);
writeOutput(theRecordWriter, reporter);
// do commit
committer.commitTask(tContext);
committer.commitJob(jContext);
// validate output
File expectedFile = new File(new Path(outDir, file).toString());
StringBuffer expectedOutput = new StringBuffer();
expectedOutput.append(key1).append('\t').append(val1).append("\n");
expectedOutput.append(val1).append("\n");
expectedOutput.append(val2).append("\n");
expectedOutput.append(key2).append("\n");
expectedOutput.append(key1).append("\n");
expectedOutput.append(key2).append('\t').append(val2).append("\n");
String output = UtilsForTests.slurp(expectedFile);
assertEquals(output, expectedOutput.toString());
FileUtil.fullyDelete(new File(outDir.toString()));
}
项目:ignite
文件:HadoopV1OutputCollector.java
/**
* Setup task.
*
* @throws IOException If failed.
*/
public void setup() throws IOException {
if (writer != null)
jobConf.getOutputCommitter().setupTask(new TaskAttemptContextImpl(jobConf, attempt));
}