Skip to content

Commit

Permalink
Merge pull request #531 from carbon/master
Browse files Browse the repository at this point in the history
Improve CQ3
  • Loading branch information
adamhathcock committed Aug 1, 2020
2 parents 1391794 + 33ffcb9 commit f36167d
Show file tree
Hide file tree
Showing 17 changed files with 98 additions and 86 deletions.
12 changes: 6 additions & 6 deletions src/SharpCompress/Common/GZip/GZipFilePart.cs
Expand Up @@ -8,7 +8,7 @@

namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
internal sealed class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
Expand Down Expand Up @@ -40,8 +40,8 @@ internal override Stream GetRawStream()
private void ReadAndValidateGzipHeader(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = stream.Read(header);

// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
Expand All @@ -59,14 +59,14 @@ private void ReadAndValidateGzipHeader(Stream stream)
throw new ZlibException("Bad GZIP header.");
}

int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
n = stream.Read(header.Slice(0, 2)); // 2-byte length field

Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];

if (!stream.ReadFully(extra))
Expand Down
6 changes: 3 additions & 3 deletions src/SharpCompress/Common/Rar/RarRijndael.cs
Expand Up @@ -87,11 +87,11 @@ public static RarRijndael InitializeFrom(string password, byte[] salt)
return rijndael;
}

public byte[] ProcessBlock(byte[] cipherText)
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
_rijndael.ProcessBlock(cipherText, plainText);

for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
Expand Down
4 changes: 2 additions & 2 deletions src/SharpCompress/Common/SevenZip/CMethodId.cs
@@ -1,6 +1,6 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
internal readonly struct CMethodId
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
Expand All @@ -26,7 +26,7 @@ public override int GetHashCode()

public override bool Equals(object obj)
{
return obj is CMethodId && (CMethodId)obj == this;
return obj is CMethodId other && Equals(other);
}

public bool Equals(CMethodId other)
Expand Down
12 changes: 9 additions & 3 deletions src/SharpCompress/Common/Tar/Headers/TarHeader.cs
Expand Up @@ -7,7 +7,7 @@

namespace SharpCompress.Common.Tar.Headers
{
internal class TarHeader
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);

Expand Down Expand Up @@ -260,10 +260,16 @@ private static long ReadAsciiInt64(byte[] buffer, int offset, int count)
return Convert.ToInt64(s);
}


private static readonly byte[] eightSpaces = {
(byte)' ', (byte)' ', (byte)' ', (byte)' ',
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};

internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);

// Calculate checksum
int headerChecksum = 0;
Expand All @@ -276,7 +282,7 @@ internal static int RecalculateChecksum(byte[] buf)

internal static int RecalculateAltChecksum(byte[] buf)
{
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{
Expand Down
32 changes: 12 additions & 20 deletions src/SharpCompress/Compressors/ADC/ADCBase.cs
Expand Up @@ -52,32 +52,24 @@ private static int GetChunkType(byte byt)

private static int GetChunkSize(byte byt)
{
switch (GetChunkType(byt))
return GetChunkType(byt) switch
{
case PLAIN:
return (byt & 0x7F) + 1;
case TWO_BYTE:
return ((byt & 0x3F) >> 2) + 3;
case THREE_BYTE:
return (byt & 0x3F) + 4;
default:
return -1;
}
PLAIN => (byt & 0x7F) + 1,
TWO_BYTE => ((byt & 0x3F) >> 2) + 3,
THREE_BYTE => (byt & 0x3F) + 4,
_ => -1,
};
}

private static int GetOffset(ReadOnlySpan<byte> chunk)
{
switch (GetChunkType(chunk[0]))
return GetChunkType(chunk[0]) switch
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[0] & 0x03) << 8) + chunk[1];
case THREE_BYTE:
return (chunk[1] << 8) + chunk[2];
default:
return -1;
}
PLAIN => 0,
TWO_BYTE => ((chunk[0] & 0x03) << 8) + chunk[1],
THREE_BYTE => (chunk[1] << 8) + chunk[2],
_ => -1,
};
}

/// <summary>
Expand Down
4 changes: 2 additions & 2 deletions src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
@@ -1,5 +1,6 @@
#nullable disable

using System;
using System.IO;

/*
Expand Down Expand Up @@ -835,7 +836,7 @@ cache misses.

private void SetupBlock()
{
int[] cftab = new int[257];
Span<int> cftab = stackalloc int[257];
char ch;

cftab[0] = 0;
Expand All @@ -854,7 +855,6 @@ private void SetupBlock()
tt[cftab[ch]] = i;
cftab[ch]++;
}
cftab = null;

tPos = tt[origPtr];

Expand Down
13 changes: 7 additions & 6 deletions src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs
@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;

/*
* Copyright 2001,2004-2005 The Apache Software Foundation
Expand Down Expand Up @@ -88,9 +89,9 @@ private void MakeMaps()
int nNodes, nHeap, n1, n2, i, j, k;
bool tooLong;

int[] heap = new int[BZip2Constants.MAX_ALPHA_SIZE + 2];
int[] weight = new int[BZip2Constants.MAX_ALPHA_SIZE * 2];
int[] parent = new int[BZip2Constants.MAX_ALPHA_SIZE * 2];
Span<int> heap = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE + 2]; // 1040 bytes
Span<int> weight = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE * 2]; // 1040 bytes
Span<int> parent = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE * 2]; // 1040 bytes

for (i = 0; i < alphaSize; i++)
{
Expand Down Expand Up @@ -1328,8 +1329,8 @@ private void QSort3(int loSt, int hiSt, int dSt)
private void MainSort()
{
int i, j, ss, sb;
int[] runningOrder = new int[256];
int[] copy = new int[256];
Span<int> runningOrder = stackalloc int[256];
Span<int> copy = stackalloc int[256];
bool[] bigDone = new bool[256];
int c1, c2;
int numQSorted;
Expand Down
8 changes: 4 additions & 4 deletions src/SharpCompress/Compressors/Deflate/CRC32.cs
Expand Up @@ -189,7 +189,7 @@ public void SlurpBlock(byte[] block, int offset, int count)

// pre-initialize the crc table for speed of lookup.

private uint gf2_matrix_times(uint[] matrix, uint vec)
private uint gf2_matrix_times(ReadOnlySpan<uint> matrix, uint vec)
{
uint sum = 0;
int i = 0;
Expand All @@ -205,7 +205,7 @@ private uint gf2_matrix_times(uint[] matrix, uint vec)
return sum;
}

private void gf2_matrix_square(uint[] square, uint[] mat)
private void gf2_matrix_square(Span<uint> square, Span<uint> mat)
{
for (int i = 0; i < 32; i++)
{
Expand All @@ -225,8 +225,8 @@ private void gf2_matrix_square(uint[] square, uint[] mat)
/// <param name="length">the length of data the CRC value was calculated on</param>
public void Combine(int crc, int length)
{
var even = new uint[32]; // even-power-of-two zeros operator
var odd = new uint[32]; // odd-power-of-two zeros operator
Span<uint> even = stackalloc uint[32]; // even-power-of-two zeros operator
Span<uint> odd = stackalloc uint[32]; // odd-power-of-two zeros operator

if (length == 0)
{
Expand Down
10 changes: 5 additions & 5 deletions src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
Expand Up @@ -413,8 +413,8 @@ private int _ReadAndValidateGzipHeader()
int totalBytesRead = 0;

// read the header on the first read
byte[] header = new byte[10];
int n = _stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);

// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
Expand All @@ -432,16 +432,16 @@ private int _ReadAndValidateGzipHeader()
throw new ZlibException("Bad GZIP header.");
}

Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header, 0, 2); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
totalBytesRead += n;

Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
n = _stream.Read(extra, 0, extra.Length);
if (n != extraLength)
Expand Down
5 changes: 3 additions & 2 deletions src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs
Expand Up @@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using System.Diagnostics;

namespace SharpCompress.Compressors.Deflate64
Expand All @@ -10,7 +11,7 @@ internal static class FastEncoderStatics
{
// static information for encoding, DO NOT MODIFY

internal static readonly byte[] FAST_ENCODER_TREE_STRUCTURE_DATA =
internal static ReadOnlySpan<byte> FAST_ENCODER_TREE_STRUCTURE_DATA => new byte[]
{
0xec,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,
Expand All @@ -23,7 +24,7 @@ internal static class FastEncoderStatics
0x1f,0x3f
};

internal static readonly byte[] B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA =
internal static ReadOnlySpan<byte> B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA => new byte[]
{
0xed,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,
Expand Down
5 changes: 3 additions & 2 deletions src/SharpCompress/Compressors/Deflate64/HuffmanTree.cs
Expand Up @@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using System.Diagnostics;
using System.IO;

Expand Down Expand Up @@ -118,14 +119,14 @@ private static byte[] GetStaticDistanceTreeLength()
// This algorithm is described in standard RFC 1951
private uint[] CalculateHuffmanCode()
{
uint[] bitLengthCount = new uint[17];
Span<uint> bitLengthCount = stackalloc uint[17];
foreach (int codeLength in _codeLengthArray)
{
bitLengthCount[codeLength]++;
}
bitLengthCount[0] = 0; // clear count for length 0

uint[] nextCode = new uint[17];
Span<uint> nextCode = stackalloc uint[17];
uint tempCode = 0;
for (int bits = 1; bits <= 16; bits++)
{
Expand Down
5 changes: 3 additions & 2 deletions src/SharpCompress/Compressors/LZMA/LZipStream.cs
Expand Up @@ -168,9 +168,10 @@ public static int ValidateAndReadSize(Stream stream)
{
throw new ArgumentNullException(nameof(stream));
}

// Read the header
byte[] header = new byte[6];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[6];
int n = stream.Read(header);

// TODO: Handle reading only part of the header?

Expand Down
5 changes: 3 additions & 2 deletions src/SharpCompress/Compressors/Rar/UnpackV1/UnpackUtility.cs
@@ -1,4 +1,5 @@
using System;

using SharpCompress.Compressors.Rar.VM;

namespace SharpCompress.Compressors.Rar.UnpackV1
Expand Down Expand Up @@ -182,8 +183,8 @@ internal static int decodeNumber(this BitInput input, Decode.Decode dec)

internal static void makeDecodeTables(byte[] lenTab, int offset, Decode.Decode dec, int size)
{
int[] lenCount = new int[16];
int[] tmpPos = new int[16];
Span<int> lenCount = stackalloc int[16];
Span<int> tmpPos = stackalloc int[16];
int i;
long M, N;

Expand Down
5 changes: 3 additions & 2 deletions src/SharpCompress/Compressors/Rar/UnpackV2017/unpack_hpp.cs
Expand Up @@ -12,6 +12,7 @@
using System.Collections.Generic;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
using System;

// TODO: REMOVE THIS... WIP
#pragma warning disable 169
Expand Down Expand Up @@ -271,8 +272,8 @@ internal partial class Unpack
byte *ReadBufMT;
#endif

private byte[] FilterSrcMemory = new byte[0];
private byte[] FilterDstMemory = new byte[0];
private byte[] FilterSrcMemory = Array.Empty<byte>();
private byte[] FilterDstMemory = Array.Empty<byte>();

// Filters code, one entry per filter.
private readonly List<UnpackFilter> Filters = new List<UnpackFilter>();
Expand Down
8 changes: 4 additions & 4 deletions src/SharpCompress/Crypto/IBlockCipher.cs
@@ -1,4 +1,6 @@
namespace SharpCompress.Crypto
using System;

namespace SharpCompress.Crypto
{
/// <remarks>Base interface for a symmetric key block cipher.</remarks>
public interface IBlockCipher
Expand All @@ -19,12 +21,10 @@ public interface IBlockCipher

/// <summary>Process a block.</summary>
/// <param name="inBuf">The input buffer.</param>
/// <param name="inOff">The offset into <paramref>inBuf</paramref> that the input block begins.</param>
/// <param name="outBuf">The output buffer.</param>
/// <param name="outOff">The offset into <paramref>outBuf</paramref> to write the output block.</param>
/// <exception cref="DataLengthException">If input block is wrong size, or outBuf too small.</exception>
/// <returns>The number of bytes processed and produced.</returns>
int ProcessBlock(byte[] inBuf, int inOff, byte[] outBuf, int outOff);
int ProcessBlock(ReadOnlySpan<byte> inBuf, Span<byte> outBuf);

/// <summary>
/// Reset the cipher to the same state as it was after the last init (if there was one).
Expand Down

0 comments on commit f36167d

Please sign in to comment.