Java 类org.apache.hadoop.io.file.tfile.TFile.Reader 实例源码
项目:hadoop-oss
文件:TestTFile.java
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader =
new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
项目:hadoop-oss
文件:TestTFileStreams.java
@Test
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta =
writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
private void readValueBeforeKey(int recordIndex)
throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
recordIndex));
} finally {
scanner.close();
reader.close();
}
}
项目:hadoop-oss
文件:TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
+ (recordIndex + 1));
}
scanner.close();
reader.close();
}
项目:hadoop-oss
文件:TestTFileUnsortedByteArrays.java
@Test
public void testFailureScannerWithKeys() throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner =
reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert
.fail("Failed to catch creating scanner with keys on unsorted file.");
}
catch (RuntimeException e) {
}
finally {
reader.close();
}
}
项目:hadoop
文件:TestTFile.java
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader =
new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
项目:hadoop
文件:TestTFileStreams.java
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
Assert.fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta =
writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
项目:hadoop
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
项目:hadoop
文件:TestTFileByteArrays.java
private void readValueBeforeKey(int recordIndex)
throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
recordIndex));
} finally {
scanner.close();
reader.close();
}
}
项目:hadoop
文件:TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
+ (recordIndex + 1));
}
scanner.close();
reader.close();
}
项目:hadoop
文件:TestTFileUnsortedByteArrays.java
public void testFailureScannerWithKeys() throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner =
reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert
.fail("Failed to catch creating scanner with keys on unsorted file.");
}
catch (RuntimeException e) {
}
finally {
reader.close();
}
}
项目:aliyun-oss-hadoop-fs
文件:TestTFile.java
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader =
new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileStreams.java
@Test
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta =
writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
private void readValueBeforeKey(int recordIndex)
throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY,
recordIndex));
} finally {
scanner.close();
reader.close();
}
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileByteArrays.java
private void readValueWithoutKey(int recordIndex)
throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner =
reader.createScannerByKey(composeSortedKey(KEY, recordIndex)
.getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE
+ (recordIndex + 1));
}
scanner.close();
reader.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestTFileUnsortedByteArrays.java
@Test
public void testFailureScannerWithKeys() throws IOException {
Reader reader =
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner =
reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert
.fail("Failed to catch creating scanner with keys on unsorted file.");
}
catch (RuntimeException e) {
}
finally {
reader.close();
}
}
项目:big-c
文件:TestTFile.java
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader =
new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
项目:big-c
文件:TestTFileStreams.java
public void testFailureNegativeOffset() throws IOException {
if (skip)
return;
writeRecords(2, true, true);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] buf = new byte[K];
try {
scanner.entry().getKey(buf, -1);
Assert.fail("Failed to handle key negative offset.");
}
catch (Exception e) {
// noop, expecting exceptions
}
finally {
}
scanner.close();
reader.close();
}
项目:big-c
文件:TestTFileByteArrays.java
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta =
writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
项目:big-c
文件:TestTFileByteArrays.java
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
项目:big-c
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
项目:big-c
文件:TestTFileByteArrays.java
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
项目:big-c
文件:TestTFileByteArrays.java
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}