Skip to content

Commit 2f970f2

Browse files
update aircompressor to 3.5
1 parent 7190ab6 commit 2f970f2

3 files changed

Lines changed: 22 additions & 11 deletions

File tree

parquet-hadoop/pom.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,8 @@
143143
</dependency>
144144
<dependency>
145145
<groupId>io.airlift</groupId>
146-
<artifactId>aircompressor</artifactId>
147-
<version>2.0.2</version>
146+
<artifactId>aircompressor-v3</artifactId>
147+
<version>3.5</version>
148148
</dependency>
149149
<dependency>
150150
<groupId>commons-pool</groupId>

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/Lz4RawCompressor.java

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,23 +18,28 @@
1818
*/
1919
package org.apache.parquet.hadoop.codec;
2020

21-
import io.airlift.compress.lz4.Lz4Compressor;
21+
import io.airlift.compress.v3.lz4.Lz4Compressor;
2222
import java.io.IOException;
2323
import java.nio.ByteBuffer;
2424

2525
public class Lz4RawCompressor extends NonBlockedCompressor {
2626

27-
private Lz4Compressor compressor = new Lz4Compressor();
27+
private final Lz4Compressor compressor = Lz4Compressor.create();
2828

2929
@Override
3030
protected int maxCompressedLength(int byteSize) {
31-
return io.airlift.compress.lz4.Lz4RawCompressor.maxCompressedLength(byteSize);
31+
return compressor.maxCompressedLength(byteSize);
3232
}
3333

3434
@Override
3535
protected int compress(ByteBuffer uncompressed, ByteBuffer compressed) throws IOException {
36-
compressor.compress(uncompressed, compressed);
37-
int compressedSize = compressed.position();
36+
int inputLen = uncompressed.remaining();
37+
byte[] inputArr = new byte[inputLen];
38+
uncompressed.get(inputArr);
39+
int maxOut = compressor.maxCompressedLength(inputLen);
40+
byte[] outputArr = new byte[maxOut];
41+
int compressedSize = compressor.compress(inputArr, 0, inputLen, outputArr, 0, maxOut);
42+
compressed.put(outputArr, 0, compressedSize);
3843
compressed.limit(compressedSize);
3944
compressed.rewind();
4045
return compressedSize;

parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/Lz4RawDecompressor.java

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,14 @@
1818
*/
1919
package org.apache.parquet.hadoop.codec;
2020

21-
import io.airlift.compress.lz4.Lz4Decompressor;
21+
import io.airlift.compress.v3.lz4.Lz4Decompressor;
2222
import java.io.IOException;
2323
import java.nio.ByteBuffer;
2424
import org.apache.hadoop.io.compress.DirectDecompressor;
2525

2626
public class Lz4RawDecompressor extends NonBlockedDecompressor implements DirectDecompressor {
2727

28-
private Lz4Decompressor decompressor = new Lz4Decompressor();
28+
private final Lz4Decompressor decompressor = Lz4Decompressor.create();
2929

3030
@Override
3131
protected int maxUncompressedLength(ByteBuffer compressed, int maxUncompressedLength) throws IOException {
@@ -36,8 +36,14 @@ protected int maxUncompressedLength(ByteBuffer compressed, int maxUncompressedLe
3636

3737
@Override
3838
protected int uncompress(ByteBuffer compressed, ByteBuffer uncompressed) throws IOException {
39-
decompressor.decompress(compressed, uncompressed);
40-
int uncompressedSize = uncompressed.position();
39+
int compressedLen = compressed.remaining();
40+
byte[] inputArr = new byte[compressedLen];
41+
compressed.get(inputArr);
42+
int maxOut = uncompressed.remaining();
43+
byte[] outputArr = new byte[maxOut];
44+
45+
int uncompressedSize = decompressor.decompress(inputArr, 0, compressedLen, outputArr, 0, maxOut);
46+
uncompressed.put(outputArr, 0, uncompressedSize);
4147
uncompressed.limit(uncompressedSize);
4248
uncompressed.rewind();
4349
return uncompressedSize;

0 commit comments

Comments
 (0)