Skip to content

Commit

Permalink
Merge pull request from GHSA-55g7-9cwv-5qfv
Browse files Browse the repository at this point in the history
* Validate chunk size to be within a configured maximum

* Add constructors to have max size configurable

* Code cleanup

* Use 512MB for consistency

---------

Co-authored-by: Taro L. Saito <leo@xerial.org>
  • Loading branch information
BD and xerial committed Sep 23, 2023
1 parent 49d7001 commit 9f8c3cf
Show file tree
Hide file tree
Showing 5 changed files with 60 additions and 3 deletions.
@@ -1,9 +1,9 @@
package org.xerial.snappy;

import java.io.OutputStream;

import org.xerial.snappy.buffer.CachedBufferAllocator;

import java.io.OutputStream;

public class SnappyHadoopCompatibleOutputStream extends SnappyOutputStream
{
public SnappyHadoopCompatibleOutputStream(OutputStream out)
Expand Down
23 changes: 23 additions & 0 deletions src/main/java/org/xerial/snappy/SnappyInputStream.java
Expand Up @@ -36,8 +36,11 @@
public class SnappyInputStream
extends InputStream
{
public static final int MAX_CHUNK_SIZE = 512 * 1024 * 1024; // 512 MiB

private boolean finishedReading = false;
protected final InputStream in;
private final int maxChunkSize;

private byte[] compressed;
private byte[] uncompressed;
Expand All @@ -55,6 +58,21 @@ public class SnappyInputStream
public SnappyInputStream(InputStream input)
throws IOException
{
this(input, MAX_CHUNK_SIZE);
}


/**
* Create a filter for reading compressed data as a uncompressed stream with provided maximum chunk size
*
* @param input
* @param maxChunkSize
* @throws IOException
*/
public SnappyInputStream(InputStream input, int maxChunkSize)
throws IOException
{
this.maxChunkSize = maxChunkSize;
this.in = input;
readHeader();
}
Expand Down Expand Up @@ -422,6 +440,11 @@ protected boolean hasNextChunk()
throw new SnappyError(SnappyErrorCode.INVALID_CHUNK_SIZE, "chunkSize is too big or negative : " + chunkSize);
}

// chunkSize is big
if (chunkSize > maxChunkSize) {
throw new SnappyError(SnappyErrorCode.FAILED_TO_UNCOMPRESS, String.format("Received chunkSize %,d is greater than max configured chunk size %,d", chunkSize, maxChunkSize));
}

// extend the compressed data buffer size
if (compressed == null || chunkSize > compressed.length) {
// chunkSize exceeds limit
Expand Down
6 changes: 5 additions & 1 deletion src/main/java/org/xerial/snappy/SnappyOutputStream.java
Expand Up @@ -59,6 +59,7 @@
public class SnappyOutputStream
extends OutputStream
{
public static final int MAX_BLOCK_SIZE = 512 * 1024 * 1024; // 512 MiB
static final int MIN_BLOCK_SIZE = 1 * 1024;
static final int DEFAULT_BLOCK_SIZE = 32 * 1024; // Use 32kb for the default block size

Expand All @@ -84,7 +85,7 @@ public SnappyOutputStream(OutputStream out)
/**
* @param out
* @param blockSize byte size of the internal buffer size
* @throws IOException
* @throws IllegalArgumentException when blockSize is larger than 512 MiB
*/
public SnappyOutputStream(OutputStream out, int blockSize)
{
Expand All @@ -95,6 +96,9 @@ public SnappyOutputStream(OutputStream out, int blockSize, BufferAllocatorFactor
{
this.out = out;
this.blockSize = Math.max(MIN_BLOCK_SIZE, blockSize);
if (this.blockSize > MAX_BLOCK_SIZE){
throw new IllegalArgumentException(String.format("Provided chunk size %,d larger than max %,d", this.blockSize, MAX_BLOCK_SIZE));
}
int inputSize = blockSize;
int outputSize = SnappyCodec.HEADER_SIZE + 4 + Snappy.maxCompressedLength(blockSize);

Expand Down
12 changes: 12 additions & 0 deletions src/test/java/org/xerial/snappy/SnappyOutputStreamTest.java
Expand Up @@ -34,6 +34,7 @@
import java.nio.ByteOrder;

import org.junit.Test;
import org.junit.Assert;
import org.xerial.snappy.buffer.BufferAllocatorFactory;
import org.xerial.snappy.buffer.CachedBufferAllocator;
import org.xerial.snappy.buffer.DefaultBufferAllocator;
Expand Down Expand Up @@ -106,6 +107,17 @@ public void bufferSize()
is.close();
}

@Test(expected = IllegalArgumentException.class)
public void invalidBlockSize()
throws Exception
{
// We rely on catch below, if there is no error this test will pass
// This can be done better with Assertions.assertThrows
Boolean exceptionThrown = false;
ByteArrayOutputStream b = new ByteArrayOutputStream();
SnappyOutputStream os = new SnappyOutputStream(b, 1024 * 1024 * 1024);
}

@Test
public void smallWrites()
throws Exception
Expand Down
18 changes: 18 additions & 0 deletions src/test/java/org/xerial/snappy/SnappyTest.java
Expand Up @@ -379,6 +379,24 @@ public void isInvalidChunkLengthForSnappyInputStreamOutOfMemory()
}
}

/*
Tests sad cases for SnappyInputStream.read method
- Expects a failed to compress exception due to upper bounds chunk size
- {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0,(byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff}
*/
@Test
public void isInvalidChunkLengthForSnappyInputStream()
throws Exception {
byte[] data = {-126, 'S', 'N', 'A', 'P', 'P', 'Y', 0, 0, 0, 0, 0, 0, 0, 0, 0, (byte) 0x7f, (byte) 0xff, (byte) 0xff, (byte) 0xff};
SnappyInputStream in = new SnappyInputStream(new ByteArrayInputStream(data));
byte[] out = new byte[50];
try {
in.read(out);
} catch (SnappyError error) {
Assert.assertEquals(error.errorCode, SnappyErrorCode.FAILED_TO_UNCOMPRESS);
}
}

/*
Tests happy cases for BitShuffle.shuffle method
- double: 0, 10
Expand Down

0 comments on commit 9f8c3cf

Please sign in to comment.