Java 类org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater 实例源码
项目:hadoop-oss
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
GenericTestUtils.assertExceptionContains(
"testCompressorDecompressor error !!!", ex);
}
}
项目:hadoop
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:aliyun-oss-hadoop-fs
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:big-c
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hadoop-plus
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hops
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
GenericTestUtils.assertExceptionContains(
"testCompressorDecompressor error !!!", ex);
}
}
项目:hadoop-TCP
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hardfs
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hadoop-on-lustre2
文件:TestCompressorDecompressor.java
@Test
public void testCompressorDecompressor() {
// no more for this data
int SIZE = 44 * 1024;
byte[] rawData = generate(SIZE);
try {
CompressDecompressTester.of(rawData)
.withCompressDecompressPair(new SnappyCompressor(), new SnappyDecompressor())
.withCompressDecompressPair(new Lz4Compressor(), new Lz4Decompressor())
.withCompressDecompressPair(new BuiltInZlibDeflater(), new BuiltInZlibInflater())
.withTestCases(ImmutableSet.of(CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
.test();
} catch (Exception ex) {
fail("testCompressorDecompressor error !!!" + ex);
}
}
项目:hadoop-oss
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:big-c
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-plus
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hops
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
Path f = new Path(GenericTestUtils.getTempPath("testGzipCodecRead.txt.gz"));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-TCP
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-on-lustre
文件:TestCodec.java
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hardfs
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-on-lustre2
文件:TestCodec.java
@Test
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-0.20
文件:TestCodec.java
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hortonworks-extension
文件:TestCodec.java
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hortonworks-extension
文件:TestCodec.java
public void testGzipCodecRead() throws IOException {
// Create a gzipped file and try to read it back, using a decompressor
// from the CodecPool.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipCodecRead.txt.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final String msg = "This is the message in the file!";
bw.write(msg);
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String line = br.readLine();
assertEquals("Didn't get the same message back!", msg, line);
br.close();
}
项目:hadoop-oss
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:hadoop-oss
文件:TestCodec.java
@Test
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:hadoop
文件:TestCodec.java
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:aliyun-oss-hadoop-fs
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:aliyun-oss-hadoop-fs
文件:TestCodec.java
@Test
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:big-c
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:big-c
文件:TestCodec.java
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:hadoop-2.6.0-cdh5.4.3
文件:TestCodec.java
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:hadoop-plus
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:hadoop-plus
文件:TestCodec.java
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:hops
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
Path f = new Path(GenericTestUtils.getTempPath("testGzipLongOverflow.bin.gz"));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:hops
文件:TestCodec.java
@Test
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}
项目:hadoop-TCP
文件:TestCodec.java
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
项目:hadoop-TCP
文件:TestCodec.java
public void testCodecPoolAndGzipDecompressor() {
// BuiltInZlibInflater should not be used as the GzipCodec decompressor.
// Assert that this is the case.
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// This should give us a BuiltInZlibInflater.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
// its createOutputStream() just wraps the existing stream in a
// java.util.zip.GZIPOutputStream.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(new Path("foo.gz"));
assertTrue("Codec for .gz file is not GzipCodec",
codec instanceof GzipCodec);
// make sure we don't get a null decompressor
Decompressor codecDecompressor = codec.createDecompressor();
if (null == codecDecompressor) {
fail("Got null codecDecompressor");
}
// Asking the CodecPool for a decompressor for GzipCodec
// should not return null
Decompressor poolDecompressor = CodecPool.getDecompressor(codec);
if (null == poolDecompressor) {
fail("Got null poolDecompressor");
}
// return a couple decompressors
CodecPool.returnDecompressor(zlibDecompressor);
CodecPool.returnDecompressor(poolDecompressor);
Decompressor poolDecompressor2 = CodecPool.getDecompressor(codec);
if (poolDecompressor.getClass() == BuiltInGzipDecompressor.class) {
if (poolDecompressor == poolDecompressor2) {
fail("Reused java gzip decompressor in pool");
}
} else {
if (poolDecompressor != poolDecompressor2) {
fail("Did not reuse native gzip decompressor in pool");
}
}
}