初始化上传

This commit is contained in:
2025-08-26 08:37:44 +08:00
commit 31d81b91b6
448 changed files with 80981 additions and 0 deletions

View File

@@ -0,0 +1,604 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This is the Deflater class. The deflater class compresses input
/// with the deflate algorithm described in RFC 1951. It has several
/// compression levels and three different strategies described below.
///
/// This class is <i>not</i> thread safe. This is inherent in the API, due
/// to the split of deflate and setInput.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class Deflater
{
#region Deflater Documentation
/*
* The Deflater can do the following state transitions:
*
* (1) -> INIT_STATE ----> INIT_FINISHING_STATE ---.
* / | (2) (5) |
* / v (5) |
* (3)| SETDICT_STATE ---> SETDICT_FINISHING_STATE |(3)
* \ | (3) | ,--------'
* | | | (3) /
* v v (5) v v
* (1) -> BUSY_STATE ----> FINISHING_STATE
* | (6)
* v
* FINISHED_STATE
* \_____________________________________/
* | (7)
* v
* CLOSED_STATE
*
* (1) If we should produce a header we start in INIT_STATE, otherwise
* we start in BUSY_STATE.
* (2) A dictionary may be set only when we are in INIT_STATE, then
* we change the state as indicated.
* (3) Whether a dictionary is set or not, on the first call of deflate
* we change to BUSY_STATE.
* (4) -- intentionally left blank -- :)
* (5) FINISHING_STATE is entered, when flush() is called to indicate that
* there is no more INPUT. There are also states indicating, that
* the header wasn't written yet.
* (6) FINISHED_STATE is entered, when everything has been flushed to the
* internal pending output buffer.
* (7) At any time (7)
*
*/
#endregion Deflater Documentation
#region Public Constants
/// <summary>
/// The best and slowest compression level. This tries to find very
/// long and distant string repetitions.
/// </summary>
public const int BEST_COMPRESSION = 9;
/// <summary>
/// The worst but fastest compression level.
/// </summary>
public const int BEST_SPEED = 1;
/// <summary>
/// The default compression level.
/// </summary>
public const int DEFAULT_COMPRESSION = -1;
/// <summary>
/// This level won't compress at all but output uncompressed blocks.
/// </summary>
public const int NO_COMPRESSION = 0;
/// <summary>
/// The compression method. This is the only method supported so far.
/// There is no need to use this constant at all.
/// </summary>
public const int DEFLATED = 8;
#endregion Public Constants
#region Public Enum
/// <summary>
/// Compression Level as an enum for safer use
/// </summary>
public enum CompressionLevel
{
/// <summary>
/// The best and slowest compression level. This tries to find very
/// long and distant string repetitions.
/// </summary>
BEST_COMPRESSION = Deflater.BEST_COMPRESSION,
/// <summary>
/// The worst but fastest compression level.
/// </summary>
BEST_SPEED = Deflater.BEST_SPEED,
/// <summary>
/// The default compression level.
/// </summary>
DEFAULT_COMPRESSION = Deflater.DEFAULT_COMPRESSION,
/// <summary>
/// This level won't compress at all but output uncompressed blocks.
/// </summary>
NO_COMPRESSION = Deflater.NO_COMPRESSION,
/// <summary>
/// The compression method. This is the only method supported so far.
/// There is no need to use this constant at all.
/// </summary>
DEFLATED = Deflater.DEFLATED
}
#endregion Public Enum
#region Local Constants
private const int IS_SETDICT = 0x01;
private const int IS_FLUSHING = 0x04;
private const int IS_FINISHING = 0x08;
private const int INIT_STATE = 0x00;
private const int SETDICT_STATE = 0x01;
// private static int INIT_FINISHING_STATE = 0x08;
// private static int SETDICT_FINISHING_STATE = 0x09;
private const int BUSY_STATE = 0x10;
private const int FLUSHING_STATE = 0x14;
private const int FINISHING_STATE = 0x1c;
private const int FINISHED_STATE = 0x1e;
private const int CLOSED_STATE = 0x7f;
#endregion Local Constants
#region Constructors
/// <summary>
/// Creates a new deflater with default compression level.
/// </summary>
public Deflater() : this(DEFAULT_COMPRESSION, false)
{
}
/// <summary>
/// Creates a new deflater with given compression level.
/// </summary>
/// <param name="level">
/// the compression level, a value between NO_COMPRESSION
/// and BEST_COMPRESSION, or DEFAULT_COMPRESSION.
/// </param>
/// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception>
public Deflater(int level) : this(level, false)
{
}
/// <summary>
/// Creates a new deflater with given compression level.
/// </summary>
/// <param name="level">
/// the compression level, a value between NO_COMPRESSION
/// and BEST_COMPRESSION.
/// </param>
/// <param name="noZlibHeaderOrFooter">
/// true, if we should suppress the Zlib/RFC1950 header at the
/// beginning and the adler checksum at the end of the output. This is
/// useful for the GZIP/PKZIP formats.
/// </param>
/// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception>
public Deflater(int level, bool noZlibHeaderOrFooter)
{
if (level == DEFAULT_COMPRESSION)
{
level = 6;
}
else if (level < NO_COMPRESSION || level > BEST_COMPRESSION)
{
throw new ArgumentOutOfRangeException(nameof(level));
}
pending = new DeflaterPending();
engine = new DeflaterEngine(pending, noZlibHeaderOrFooter);
this.noZlibHeaderOrFooter = noZlibHeaderOrFooter;
SetStrategy(DeflateStrategy.Default);
SetLevel(level);
Reset();
}
#endregion Constructors
/// <summary>
/// Resets the deflater. The deflater acts afterwards as if it was
/// just created with the same compression level and strategy as it
/// had before.
/// </summary>
public void Reset()
{
state = (noZlibHeaderOrFooter ? BUSY_STATE : INIT_STATE);
totalOut = 0;
pending.Reset();
engine.Reset();
}
/// <summary>
/// Gets the current adler checksum of the data that was processed so far.
/// </summary>
public int Adler
{
get
{
return engine.Adler;
}
}
/// <summary>
/// Gets the number of input bytes processed so far.
/// </summary>
public long TotalIn
{
get
{
return engine.TotalIn;
}
}
/// <summary>
/// Gets the number of output bytes so far.
/// </summary>
public long TotalOut
{
get
{
return totalOut;
}
}
/// <summary>
/// Flushes the current input block. Further calls to deflate() will
/// produce enough output to inflate everything in the current input
/// block. This is not part of Sun's JDK so I have made it package
/// private. It is used by DeflaterOutputStream to implement
/// flush().
/// </summary>
public void Flush()
{
state |= IS_FLUSHING;
}
/// <summary>
/// Finishes the deflater with the current input block. It is an error
/// to give more input after this method was called. This method must
/// be called to force all bytes to be flushed.
/// </summary>
public void Finish()
{
state |= (IS_FLUSHING | IS_FINISHING);
}
/// <summary>
/// Returns true if the stream was finished and no more output bytes
/// are available.
/// </summary>
public bool IsFinished
{
get
{
return (state == FINISHED_STATE) && pending.IsFlushed;
}
}
/// <summary>
/// Returns true, if the input buffer is empty.
/// You should then call setInput().
/// NOTE: This method can also return true when the stream
/// was finished.
/// </summary>
public bool IsNeedingInput
{
get
{
return engine.NeedsInput();
}
}
/// <summary>
/// Sets the data which should be compressed next. This should be only
/// called when needsInput indicates that more input is needed.
/// If you call setInput when needsInput() returns false, the
/// previous input that is still pending will be thrown away.
/// The given byte array should not be changed, before needsInput() returns
/// true again.
/// This call is equivalent to <code>setInput(input, 0, input.length)</code>.
/// </summary>
/// <param name="input">
/// the buffer containing the input data.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if the buffer was finished() or ended().
/// </exception>
public void SetInput(byte[] input)
{
SetInput(input, 0, input.Length);
}
/// <summary>
/// Sets the data which should be compressed next. This should be
/// only called when needsInput indicates that more input is needed.
/// The given byte array should not be changed, before needsInput() returns
/// true again.
/// </summary>
/// <param name="input">
/// the buffer containing the input data.
/// </param>
/// <param name="offset">
/// the start of the data.
/// </param>
/// <param name="count">
/// the number of data bytes of input.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if the buffer was Finish()ed or if previous input is still pending.
/// </exception>
public void SetInput(byte[] input, int offset, int count)
{
if ((state & IS_FINISHING) != 0)
{
throw new InvalidOperationException("Finish() already called");
}
engine.SetInput(input, offset, count);
}
/// <summary>
/// Sets the compression level. There is no guarantee of the exact
/// position of the change, but if you call this when needsInput is
/// true the change of compression level will occur somewhere near
/// before the end of the so far given input.
/// </summary>
/// <param name="level">
/// the new compression level.
/// </param>
public void SetLevel(int level)
{
if (level == DEFAULT_COMPRESSION)
{
level = 6;
}
else if (level < NO_COMPRESSION || level > BEST_COMPRESSION)
{
throw new ArgumentOutOfRangeException(nameof(level));
}
if (this.level != level)
{
this.level = level;
engine.SetLevel(level);
}
}
/// <summary>
/// Get current compression level
/// </summary>
/// <returns>Returns the current compression level</returns>
public int GetLevel()
{
return level;
}
/// <summary>
/// Sets the compression strategy. Strategy is one of
/// DEFAULT_STRATEGY, HUFFMAN_ONLY and FILTERED. For the exact
/// position where the strategy is changed, the same as for
/// SetLevel() applies.
/// </summary>
/// <param name="strategy">
/// The new compression strategy.
/// </param>
public void SetStrategy(DeflateStrategy strategy)
{
engine.Strategy = strategy;
}
/// <summary>
/// Deflates the current input block with to the given array.
/// </summary>
/// <param name="output">
/// The buffer where compressed data is stored
/// </param>
/// <returns>
/// The number of compressed bytes added to the output, or 0 if either
/// IsNeedingInput() or IsFinished returns true or length is zero.
/// </returns>
public int Deflate(byte[] output)
{
return Deflate(output, 0, output.Length);
}
/// <summary>
/// Deflates the current input block to the given array.
/// </summary>
/// <param name="output">
/// Buffer to store the compressed data.
/// </param>
/// <param name="offset">
/// Offset into the output array.
/// </param>
/// <param name="length">
/// The maximum number of bytes that may be stored.
/// </param>
/// <returns>
/// The number of compressed bytes added to the output, or 0 if either
/// needsInput() or finished() returns true or length is zero.
/// </returns>
/// <exception cref="System.InvalidOperationException">
/// If Finish() was previously called.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// If offset or length don't match the array length.
/// </exception>
public int Deflate(byte[] output, int offset, int length)
{
int origLength = length;
if (state == CLOSED_STATE)
{
throw new InvalidOperationException("Deflater closed");
}
if (state < BUSY_STATE)
{
// output header
int header = (DEFLATED +
((DeflaterConstants.MAX_WBITS - 8) << 4)) << 8;
int level_flags = (level - 1) >> 1;
if (level_flags < 0 || level_flags > 3)
{
level_flags = 3;
}
header |= level_flags << 6;
if ((state & IS_SETDICT) != 0)
{
// Dictionary was set
header |= DeflaterConstants.PRESET_DICT;
}
header += 31 - (header % 31);
pending.WriteShortMSB(header);
if ((state & IS_SETDICT) != 0)
{
int chksum = engine.Adler;
engine.ResetAdler();
pending.WriteShortMSB(chksum >> 16);
pending.WriteShortMSB(chksum & 0xffff);
}
state = BUSY_STATE | (state & (IS_FLUSHING | IS_FINISHING));
}
for (; ; )
{
int count = pending.Flush(output, offset, length);
offset += count;
totalOut += count;
length -= count;
if (length == 0 || state == FINISHED_STATE)
{
break;
}
if (!engine.Deflate((state & IS_FLUSHING) != 0, (state & IS_FINISHING) != 0))
{
switch (state)
{
case BUSY_STATE:
// We need more input now
return origLength - length;
case FLUSHING_STATE:
if (level != NO_COMPRESSION)
{
/* We have to supply some lookahead. 8 bit lookahead
* is needed by the zlib inflater, and we must fill
* the next byte, so that all bits are flushed.
*/
int neededbits = 8 + ((-pending.BitCount) & 7);
while (neededbits > 0)
{
/* write a static tree block consisting solely of
* an EOF:
*/
pending.WriteBits(2, 10);
neededbits -= 10;
}
}
state = BUSY_STATE;
break;
case FINISHING_STATE:
pending.AlignToByte();
// Compressed data is complete. Write footer information if required.
if (!noZlibHeaderOrFooter)
{
int adler = engine.Adler;
pending.WriteShortMSB(adler >> 16);
pending.WriteShortMSB(adler & 0xffff);
}
state = FINISHED_STATE;
break;
}
}
}
return origLength - length;
}
/// <summary>
/// Sets the dictionary which should be used in the deflate process.
/// This call is equivalent to <code>setDictionary(dict, 0, dict.Length)</code>.
/// </summary>
/// <param name="dictionary">
/// the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if SetInput () or Deflate () were already called or another dictionary was already set.
/// </exception>
public void SetDictionary(byte[] dictionary)
{
SetDictionary(dictionary, 0, dictionary.Length);
}
/// <summary>
/// Sets the dictionary which should be used in the deflate process.
/// The dictionary is a byte array containing strings that are
/// likely to occur in the data which should be compressed. The
/// dictionary is not stored in the compressed output, only a
/// checksum. To decompress the output you need to supply the same
/// dictionary again.
/// </summary>
/// <param name="dictionary">
/// The dictionary data
/// </param>
/// <param name="index">
/// The index where dictionary information commences.
/// </param>
/// <param name="count">
/// The number of bytes in the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// If SetInput () or Deflate() were already called or another dictionary was already set.
/// </exception>
public void SetDictionary(byte[] dictionary, int index, int count)
{
if (state != INIT_STATE)
{
throw new InvalidOperationException();
}
state = SETDICT_STATE;
engine.SetDictionary(dictionary, index, count);
}
#region Instance Fields
/// <summary>
/// Compression level.
/// </summary>
private int level;
/// <summary>
/// If true no Zlib/RFC1950 headers or footers are generated
/// </summary>
private bool noZlibHeaderOrFooter;
/// <summary>
/// The current state.
/// </summary>
private int state;
/// <summary>
/// The total bytes of output written.
/// </summary>
private long totalOut;
/// <summary>
/// The pending output.
/// </summary>
private DeflaterPending pending;
/// <summary>
/// The deflater engine.
/// </summary>
private DeflaterEngine engine;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,146 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class contains constants used for deflation.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")]
public static class DeflaterConstants
{
/// <summary>
/// Set to true to enable debugging
/// </summary>
public const bool DEBUGGING = false;
/// <summary>
/// Written to Zip file to identify a stored block
/// </summary>
public const int STORED_BLOCK = 0;
/// <summary>
/// Identifies static tree in Zip file
/// </summary>
public const int STATIC_TREES = 1;
/// <summary>
/// Identifies dynamic tree in Zip file
/// </summary>
public const int DYN_TREES = 2;
/// <summary>
/// Header flag indicating a preset dictionary for deflation
/// </summary>
public const int PRESET_DICT = 0x20;
/// <summary>
/// Sets internal buffer sizes for Huffman encoding
/// </summary>
public const int DEFAULT_MEM_LEVEL = 8;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_MATCH = 258;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MIN_MATCH = 3;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_WBITS = 15;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int WSIZE = 1 << MAX_WBITS;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int WMASK = WSIZE - 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_BITS = DEFAULT_MEM_LEVEL + 7;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_SIZE = 1 << HASH_BITS;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_MASK = HASH_SIZE - 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_SHIFT = (HASH_BITS + MIN_MATCH - 1) / MIN_MATCH;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MIN_LOOKAHEAD = MAX_MATCH + MIN_MATCH + 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_DIST = WSIZE - MIN_LOOKAHEAD;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int PENDING_BUF_SIZE = 1 << (DEFAULT_MEM_LEVEL + 8);
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int MAX_BLOCK_SIZE = Math.Min(65535, PENDING_BUF_SIZE - 5);
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_STORED = 0;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_FAST = 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_SLOW = 2;
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] GOOD_LENGTH = { 0, 4, 4, 4, 4, 8, 8, 8, 32, 32 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] MAX_LAZY = { 0, 4, 5, 6, 4, 16, 16, 32, 128, 258 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] NICE_LENGTH = { 0, 8, 16, 32, 16, 32, 128, 128, 258, 258 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] MAX_CHAIN = { 0, 4, 8, 32, 16, 32, 128, 256, 1024, 4096 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] COMPR_FUNC = { 0, 1, 1, 1, 1, 2, 2, 2, 2, 2 };
}
}

View File

@@ -0,0 +1,946 @@
using ICSharpCode.SharpZipLib.Checksum;
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Strategies for deflater
/// </summary>
public enum DeflateStrategy
{
/// <summary>
/// The default strategy
/// </summary>
Default = 0,
/// <summary>
/// This strategy will only allow longer string repetitions. It is
/// useful for random data with a small character set.
/// </summary>
Filtered = 1,
/// <summary>
/// This strategy will not look for string repetitions at all. It
/// only encodes with Huffman trees (which means, that more common
/// characters get a smaller encoding.
/// </summary>
HuffmanOnly = 2
}
// DEFLATE ALGORITHM:
//
// The uncompressed stream is inserted into the window array. When
// the window array is full the first half is thrown away and the
// second half is copied to the beginning.
//
// The head array is a hash table. Three characters build a hash value
// and they the value points to the corresponding index in window of
// the last string with this hash. The prev array implements a
// linked list of matches with the same hash: prev[index & WMASK] points
// to the previous index with the same hash.
//
/// <summary>
/// Low level compression engine for deflate algorithm which uses a 32K sliding window
/// with secondary compression from Huffman/Shannon-Fano codes.
/// </summary>
public class DeflaterEngine
{
#region Constants
private const int TooFar = 4096;
#endregion Constants
#region Constructors
/// <summary>
/// Construct instance with pending buffer
/// Adler calculation will be performed
/// </summary>
/// <param name="pending">
/// Pending buffer to use
/// </param>
public DeflaterEngine(DeflaterPending pending)
: this (pending, false)
{
}
/// <summary>
/// Construct instance with pending buffer
/// </summary>
/// <param name="pending">
/// Pending buffer to use
/// </param>
/// <param name="noAdlerCalculation">
/// If no adler calculation should be performed
/// </param>
public DeflaterEngine(DeflaterPending pending, bool noAdlerCalculation)
{
this.pending = pending;
huffman = new DeflaterHuffman(pending);
if (!noAdlerCalculation)
adler = new Adler32();
window = new byte[2 * DeflaterConstants.WSIZE];
head = new short[DeflaterConstants.HASH_SIZE];
prev = new short[DeflaterConstants.WSIZE];
// We start at index 1, to avoid an implementation deficiency, that
// we cannot build a repeat pattern at index 0.
blockStart = strstart = 1;
}
#endregion Constructors
/// <summary>
/// Deflate drives actual compression of data
/// </summary>
/// <param name="flush">True to flush input buffers</param>
/// <param name="finish">Finish deflation with the current input.</param>
/// <returns>Returns true if progress has been made.</returns>
public bool Deflate(bool flush, bool finish)
{
bool progress;
do
{
FillWindow();
bool canFlush = flush && (inputOff == inputEnd);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("window: [" + blockStart + "," + strstart + ","
+ lookahead + "], " + compressionFunction + "," + canFlush);
}
#endif
switch (compressionFunction)
{
case DeflaterConstants.DEFLATE_STORED:
progress = DeflateStored(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_FAST:
progress = DeflateFast(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_SLOW:
progress = DeflateSlow(canFlush, finish);
break;
default:
throw new InvalidOperationException("unknown compressionFunction");
}
} while (pending.IsFlushed && progress); // repeat while we have no pending output and progress was made
return progress;
}
/// <summary>
/// Sets input data to be deflated. Should only be called when <code>NeedsInput()</code>
/// returns true
/// </summary>
/// <param name="buffer">The buffer containing input data.</param>
/// <param name="offset">The offset of the first byte of data.</param>
/// <param name="count">The number of bytes of data to use as input.</param>
public void SetInput(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (inputOff < inputEnd)
{
throw new InvalidOperationException("Old input was not completely processed");
}
int end = offset + count;
/* We want to throw an ArrayIndexOutOfBoundsException early. The
* check is very tricky: it also handles integer wrap around.
*/
if ((offset > end) || (end > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
inputBuf = buffer;
inputOff = offset;
inputEnd = end;
}
/// <summary>
/// Determines if more <see cref="SetInput">input</see> is needed.
/// </summary>
/// <returns>Return true if input is needed via <see cref="SetInput">SetInput</see></returns>
public bool NeedsInput()
{
return (inputEnd == inputOff);
}
/// <summary>
/// Set compression dictionary
/// </summary>
/// <param name="buffer">The buffer containing the dictionary data</param>
/// <param name="offset">The offset in the buffer for the first byte of data</param>
/// <param name="length">The length of the dictionary data.</param>
public void SetDictionary(byte[] buffer, int offset, int length)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (strstart != 1) )
{
throw new InvalidOperationException("strstart not 1");
}
#endif
adler?.Update(new ArraySegment<byte>(buffer, offset, length));
if (length < DeflaterConstants.MIN_MATCH)
{
return;
}
if (length > DeflaterConstants.MAX_DIST)
{
offset += length - DeflaterConstants.MAX_DIST;
length = DeflaterConstants.MAX_DIST;
}
System.Array.Copy(buffer, offset, window, strstart, length);
UpdateHash();
--length;
while (--length > 0)
{
InsertString();
strstart++;
}
strstart += 2;
blockStart = strstart;
}
/// <summary>
/// Reset internal state
/// </summary>
public void Reset()
{
huffman.Reset();
adler?.Reset();
blockStart = strstart = 1;
lookahead = 0;
totalIn = 0;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
for (int i = 0; i < DeflaterConstants.HASH_SIZE; i++)
{
head[i] = 0;
}
for (int i = 0; i < DeflaterConstants.WSIZE; i++)
{
prev[i] = 0;
}
}
/// <summary>
/// Reset Adler checksum
/// </summary>
public void ResetAdler()
{
adler?.Reset();
}
/// <summary>
/// Get current value of Adler checksum
/// </summary>
public int Adler
{
get
{
return (adler != null) ? unchecked((int)adler.Value) : 0;
}
}
/// <summary>
/// Total data processed
/// </summary>
public long TotalIn
{
get
{
return totalIn;
}
}
/// <summary>
/// Get/set the <see cref="DeflateStrategy">deflate strategy</see>
/// </summary>
public DeflateStrategy Strategy
{
get
{
return strategy;
}
set
{
strategy = value;
}
}
/// <summary>
/// Set the deflate level (0-9)
/// </summary>
/// <param name="level">The value to set the level to.</param>
public void SetLevel(int level)
{
if ((level < 0) || (level > 9))
{
throw new ArgumentOutOfRangeException(nameof(level));
}
goodLength = DeflaterConstants.GOOD_LENGTH[level];
max_lazy = DeflaterConstants.MAX_LAZY[level];
niceLength = DeflaterConstants.NICE_LENGTH[level];
max_chain = DeflaterConstants.MAX_CHAIN[level];
if (DeflaterConstants.COMPR_FUNC[level] != compressionFunction)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("Change from " + compressionFunction + " to "
+ DeflaterConstants.COMPR_FUNC[level]);
}
#endif
switch (compressionFunction)
{
case DeflaterConstants.DEFLATE_STORED:
if (strstart > blockStart)
{
huffman.FlushStoredBlock(window, blockStart,
strstart - blockStart, false);
blockStart = strstart;
}
UpdateHash();
break;
case DeflaterConstants.DEFLATE_FAST:
if (strstart > blockStart)
{
huffman.FlushBlock(window, blockStart, strstart - blockStart,
false);
blockStart = strstart;
}
break;
case DeflaterConstants.DEFLATE_SLOW:
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
if (strstart > blockStart)
{
huffman.FlushBlock(window, blockStart, strstart - blockStart, false);
blockStart = strstart;
}
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
break;
}
compressionFunction = DeflaterConstants.COMPR_FUNC[level];
}
}
/// <summary>
/// Fill the window
/// </summary>
public void FillWindow()
{
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
if (strstart >= DeflaterConstants.WSIZE + DeflaterConstants.MAX_DIST)
{
SlideWindow();
}
/* If there is not enough lookahead, but still some input left,
* read in the input
*/
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && inputOff < inputEnd)
{
int more = 2 * DeflaterConstants.WSIZE - lookahead - strstart;
if (more > inputEnd - inputOff)
{
more = inputEnd - inputOff;
}
System.Array.Copy(inputBuf, inputOff, window, strstart + lookahead, more);
adler?.Update(new ArraySegment<byte>(inputBuf, inputOff, more));
inputOff += more;
totalIn += more;
lookahead += more;
}
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
UpdateHash();
}
}
private void UpdateHash()
{
/*
if (DEBUGGING) {
Console.WriteLine("updateHash: "+strstart);
}
*/
ins_h = (window[strstart] << DeflaterConstants.HASH_SHIFT) ^ window[strstart + 1];
}
/// <summary>
/// Inserts the current string in the head hash and returns the previous
/// value for this hash.
/// </summary>
/// <returns>The previous hash value</returns>
private int InsertString()
{
short match;
int hash = ((ins_h << DeflaterConstants.HASH_SHIFT) ^ window[strstart + (DeflaterConstants.MIN_MATCH - 1)]) & DeflaterConstants.HASH_MASK;
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
if (hash != (((window[strstart] << (2*HASH_SHIFT)) ^
(window[strstart + 1] << HASH_SHIFT) ^
(window[strstart + 2])) & HASH_MASK)) {
throw new SharpZipBaseException("hash inconsistent: " + hash + "/"
+window[strstart] + ","
+window[strstart + 1] + ","
+window[strstart + 2] + "," + HASH_SHIFT);
}
}
#endif
prev[strstart & DeflaterConstants.WMASK] = match = head[hash];
head[hash] = unchecked((short)strstart);
ins_h = hash;
return match & 0xffff;
}
private void SlideWindow()
{
Array.Copy(window, DeflaterConstants.WSIZE, window, 0, DeflaterConstants.WSIZE);
matchStart -= DeflaterConstants.WSIZE;
strstart -= DeflaterConstants.WSIZE;
blockStart -= DeflaterConstants.WSIZE;
// Slide the hash table (could be avoided with 32 bit values
// at the expense of memory usage).
for (int i = 0; i < DeflaterConstants.HASH_SIZE; ++i)
{
int m = head[i] & 0xffff;
head[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
// Slide the prev table.
for (int i = 0; i < DeflaterConstants.WSIZE; i++)
{
int m = prev[i] & 0xffff;
prev[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
}
/// <summary>
/// Find the best (longest) string in the window matching the
/// string starting at strstart.
///
/// Preconditions:
/// <code>
/// strstart + DeflaterConstants.MAX_MATCH &lt;= window.length.</code>
/// </summary>
/// <param name="curMatch"></param>
/// <returns>True if a match greater than the minimum length is found</returns>
private bool FindLongestMatch(int curMatch)
{
int match;
int scan = strstart;
// scanMax is the highest position that we can look at
int scanMax = scan + Math.Min(DeflaterConstants.MAX_MATCH, lookahead) - 1;
int limit = Math.Max(scan - DeflaterConstants.MAX_DIST, 0);
byte[] window = this.window;
short[] prev = this.prev;
int chainLength = this.max_chain;
int niceLength = Math.Min(this.niceLength, lookahead);
matchLen = Math.Max(matchLen, DeflaterConstants.MIN_MATCH - 1);
if (scan + matchLen > scanMax) return false;
byte scan_end1 = window[scan + matchLen - 1];
byte scan_end = window[scan + matchLen];
// Do not waste too much time if we already have a good match:
if (matchLen >= this.goodLength) chainLength >>= 2;
do
{
match = curMatch;
scan = strstart;
if (window[match + matchLen] != scan_end
|| window[match + matchLen - 1] != scan_end1
|| window[match] != window[scan]
|| window[++match] != window[++scan])
{
continue;
}
// scan is set to strstart+1 and the comparison passed, so
// scanMax - scan is the maximum number of bytes we can compare.
// below we compare 8 bytes at a time, so first we compare
// (scanMax - scan) % 8 bytes, so the remainder is a multiple of 8
switch ((scanMax - scan) % 8)
{
case 1:
if (window[++scan] == window[++match]) break;
break;
case 2:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 3:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 4:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 5:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 6:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 7:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
}
if (window[scan] == window[match])
{
/* We check for insufficient lookahead only every 8th comparison;
* the 256th check will be made at strstart + 258 unless lookahead is
* exhausted first.
*/
do
{
if (scan == scanMax)
{
++scan; // advance to first position not matched
++match;
break;
}
}
while (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]);
}
if (scan - strstart > matchLen)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (ins_h == 0) )
Console.Error.WriteLine("Found match: " + curMatch + "-" + (scan - strstart));
#endif
matchStart = curMatch;
matchLen = scan - strstart;
if (matchLen >= niceLength)
break;
scan_end1 = window[scan - 1];
scan_end = window[scan];
}
} while ((curMatch = (prev[curMatch & DeflaterConstants.WMASK] & 0xffff)) > limit && 0 != --chainLength);
return matchLen >= DeflaterConstants.MIN_MATCH;
}
private bool DeflateStored(bool flush, bool finish)
{
if (!flush && (lookahead == 0))
{
return false;
}
strstart += lookahead;
lookahead = 0;
int storedLength = strstart - blockStart;
if ((storedLength >= DeflaterConstants.MAX_BLOCK_SIZE) || // Block is full
(blockStart < DeflaterConstants.WSIZE && storedLength >= DeflaterConstants.MAX_DIST) || // Block may move out of window
flush)
{
bool lastBlock = finish;
if (storedLength > DeflaterConstants.MAX_BLOCK_SIZE)
{
storedLength = DeflaterConstants.MAX_BLOCK_SIZE;
lastBlock = false;
}
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
Console.WriteLine("storedBlock[" + storedLength + "," + lastBlock + "]");
}
#endif
huffman.FlushStoredBlock(window, blockStart, storedLength, lastBlock);
blockStart += storedLength;
return !(lastBlock || storedLength == 0);
}
return true;
}
private bool DeflateFast(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush)
{
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush)
{
if (lookahead == 0)
{
// We are flushing everything
huffman.FlushBlock(window, blockStart, strstart - blockStart, finish);
blockStart = strstart;
return false;
}
if (strstart > 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD)
{
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int hashHead;
if (lookahead >= DeflaterConstants.MIN_MATCH &&
(hashHead = InsertString()) != 0 &&
strategy != DeflateStrategy.HuffmanOnly &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead))
{
// longestMatch sets matchStart and matchLen
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart + i] != window[matchStart + i]) {
throw new SharpZipBaseException("Match failure");
}
}
}
#endif
bool full = huffman.TallyDist(strstart - matchStart, matchLen);
lookahead -= matchLen;
if (matchLen <= max_lazy && lookahead >= DeflaterConstants.MIN_MATCH)
{
while (--matchLen > 0)
{
++strstart;
InsertString();
}
++strstart;
}
else
{
strstart += matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH - 1)
{
UpdateHash();
}
}
matchLen = DeflaterConstants.MIN_MATCH - 1;
if (!full)
{
continue;
}
}
else
{
// No match found
huffman.TallyLit(window[strstart] & 0xff);
++strstart;
--lookahead;
}
if (huffman.IsFull())
{
bool lastBlock = finish && (lookahead == 0);
huffman.FlushBlock(window, blockStart, strstart - blockStart, lastBlock);
blockStart = strstart;
return !lastBlock;
}
}
return true;
}
private bool DeflateSlow(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush)
{
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush)
{
if (lookahead == 0)
{
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = false;
// We are flushing everything
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && !flush)
{
throw new SharpZipBaseException("Not flushing, but no lookahead");
}
#endif
huffman.FlushBlock(window, blockStart, strstart - blockStart,
finish);
blockStart = strstart;
return false;
}
if (strstart >= 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD)
{
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int prevMatch = matchStart;
int prevLen = matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
int hashHead = InsertString();
if (strategy != DeflateStrategy.HuffmanOnly &&
hashHead != 0 &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead))
{
// longestMatch sets matchStart and matchLen
// Discard match if too small and too far away
if (matchLen <= 5 && (strategy == DeflateStrategy.Filtered || (matchLen == DeflaterConstants.MIN_MATCH && strstart - matchStart > TooFar)))
{
matchLen = DeflaterConstants.MIN_MATCH - 1;
}
}
}
// previous match was better
if ((prevLen >= DeflaterConstants.MIN_MATCH) && (matchLen <= prevLen))
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart-1+i] != window[prevMatch + i])
throw new SharpZipBaseException();
}
}
#endif
huffman.TallyDist(strstart - 1 - prevMatch, prevLen);
prevLen -= 2;
do
{
strstart++;
lookahead--;
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
InsertString();
}
} while (--prevLen > 0);
strstart++;
lookahead--;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
}
else
{
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = true;
strstart++;
lookahead--;
}
if (huffman.IsFull())
{
int len = strstart - blockStart;
if (prevAvailable)
{
len--;
}
bool lastBlock = (finish && (lookahead == 0) && !prevAvailable);
huffman.FlushBlock(window, blockStart, len, lastBlock);
blockStart += len;
return !lastBlock;
}
}
return true;
}
#region Instance Fields
// Hash index of string to be inserted
private int ins_h;
/// <summary>
/// Hashtable, hashing three characters to an index for window, so
/// that window[index]..window[index+2] have this hash code.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
private short[] head;
/// <summary>
/// <code>prev[index &amp; WMASK]</code> points to the previous index that has the
/// same hash code as the string starting at index. This way
/// entries with the same hash code are in a linked list.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
private short[] prev;
private int matchStart;
// Length of best match
private int matchLen;
// Set if previous match exists
private bool prevAvailable;
private int blockStart;
/// <summary>
/// Points to the current character in the window.
/// </summary>
private int strstart;
/// <summary>
/// lookahead is the number of characters starting at strstart in
/// window that are valid.
/// So window[strstart] until window[strstart+lookahead-1] are valid
/// characters.
/// </summary>
private int lookahead;
/// <summary>
/// This array contains the part of the uncompressed stream that
/// is of relevance. The current character is indexed by strstart.
/// </summary>
private byte[] window;
private DeflateStrategy strategy;
private int max_chain, max_lazy, niceLength, goodLength;
/// <summary>
/// The current compression function.
/// </summary>
private int compressionFunction;
/// <summary>
/// The input data for compression.
/// </summary>
private byte[] inputBuf;
/// <summary>
/// The total bytes of input read.
/// </summary>
private long totalIn;
/// <summary>
/// The offset into inputBuf, where input data starts.
/// </summary>
private int inputOff;
/// <summary>
/// The end offset of the input data.
/// </summary>
private int inputEnd;
private DeflaterPending pending;
private DeflaterHuffman huffman;
/// <summary>
/// The adler checksum
/// </summary>
private Adler32 adler;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,959 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This is the DeflaterHuffman class.
///
/// This class is <i>not</i> thread safe. This is inherent in the API, due
/// to the split of Deflate and SetInput.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class DeflaterHuffman
{
private const int BUFSIZE = 1 << (DeflaterConstants.DEFAULT_MEM_LEVEL + 6);
private const int LITERAL_NUM = 286;
// Number of distance codes
private const int DIST_NUM = 30;
// Number of codes used to transfer bit lengths
private const int BITLEN_NUM = 19;
// repeat previous bit length 3-6 times (2 bits of repeat count)
private const int REP_3_6 = 16;
// repeat a zero length 3-10 times (3 bits of repeat count)
private const int REP_3_10 = 17;
// repeat a zero length 11-138 times (7 bits of repeat count)
private const int REP_11_138 = 18;
private const int EOF_SYMBOL = 256;
// The lengths of the bit length codes are sent in order of decreasing
// probability, to avoid transmitting the lengths for unused bit length codes.
private static readonly int[] BL_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static readonly byte[] bit4Reverse = {
0,
8,
4,
12,
2,
10,
6,
14,
1,
9,
5,
13,
3,
11,
7,
15
};
private static short[] staticLCodes;
private static byte[] staticLLength;
private static short[] staticDCodes;
private static byte[] staticDLength;
private class Tree
{
#region Instance Fields
public short[] freqs;
public byte[] length;
public int minNumCodes;
public int numCodes;
private short[] codes;
private readonly int[] bl_counts;
private readonly int maxLength;
private DeflaterHuffman dh;
#endregion Instance Fields
#region Constructors
public Tree(DeflaterHuffman dh, int elems, int minCodes, int maxLength)
{
this.dh = dh;
this.minNumCodes = minCodes;
this.maxLength = maxLength;
freqs = new short[elems];
bl_counts = new int[maxLength];
}
#endregion Constructors
/// <summary>
/// Resets the internal state of the tree
/// </summary>
public void Reset()
{
for (int i = 0; i < freqs.Length; i++)
{
freqs[i] = 0;
}
codes = null;
length = null;
}
public void WriteSymbol(int code)
{
// if (DeflaterConstants.DEBUGGING) {
// freqs[code]--;
// // Console.Write("writeSymbol("+freqs.length+","+code+"): ");
// }
dh.pending.WriteBits(codes[code] & 0xffff, length[code]);
}
/// <summary>
/// Check that all frequencies are zero
/// </summary>
/// <exception cref="SharpZipBaseException">
/// At least one frequency is non-zero
/// </exception>
public void CheckEmpty()
{
bool empty = true;
for (int i = 0; i < freqs.Length; i++)
{
empty &= freqs[i] == 0;
}
if (!empty)
{
throw new SharpZipBaseException("!Empty");
}
}
/// <summary>
/// Set static codes and length
/// </summary>
/// <param name="staticCodes">new codes</param>
/// <param name="staticLengths">length for new codes</param>
public void SetStaticCodes(short[] staticCodes, byte[] staticLengths)
{
codes = staticCodes;
length = staticLengths;
}
/// <summary>
/// Build dynamic codes and lengths
/// </summary>
public void BuildCodes()
{
int numSymbols = freqs.Length;
int[] nextCode = new int[maxLength];
int code = 0;
codes = new short[freqs.Length];
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("buildCodes: "+freqs.Length);
// }
for (int bits = 0; bits < maxLength; bits++)
{
nextCode[bits] = code;
code += bl_counts[bits] << (15 - bits);
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("bits: " + ( bits + 1) + " count: " + bl_counts[bits]
// +" nextCode: "+code);
// }
}
#if DebugDeflation
if ( DeflaterConstants.DEBUGGING && (code != 65536) )
{
throw new SharpZipBaseException("Inconsistent bl_counts!");
}
#endif
for (int i = 0; i < numCodes; i++)
{
int bits = length[i];
if (bits > 0)
{
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("codes["+i+"] = rev(" + nextCode[bits-1]+"),
// +bits);
// }
codes[i] = BitReverse(nextCode[bits - 1]);
nextCode[bits - 1] += 1 << (16 - bits);
}
}
}
public void BuildTree()
{
int numSymbols = freqs.Length;
/* heap is a priority queue, sorted by frequency, least frequent
* nodes first. The heap is a binary tree, with the property, that
* the parent node is smaller than both child nodes. This assures
* that the smallest node is the first parent.
*
* The binary tree is encoded in an array: 0 is root node and
* the nodes 2*n+1, 2*n+2 are the child nodes of node n.
*/
int[] heap = new int[numSymbols];
int heapLen = 0;
int maxCode = 0;
for (int n = 0; n < numSymbols; n++)
{
int freq = freqs[n];
if (freq != 0)
{
// Insert n into heap
int pos = heapLen++;
int ppos;
while (pos > 0 && freqs[heap[ppos = (pos - 1) / 2]] > freq)
{
heap[pos] = heap[ppos];
pos = ppos;
}
heap[pos] = n;
maxCode = n;
}
}
/* We could encode a single literal with 0 bits but then we
* don't see the literals. Therefore we force at least two
* literals to avoid this case. We don't care about order in
* this case, both literals get a 1 bit code.
*/
while (heapLen < 2)
{
int node = maxCode < 2 ? ++maxCode : 0;
heap[heapLen++] = node;
}
numCodes = Math.Max(maxCode + 1, minNumCodes);
int numLeafs = heapLen;
int[] childs = new int[4 * heapLen - 2];
int[] values = new int[2 * heapLen - 1];
int numNodes = numLeafs;
for (int i = 0; i < heapLen; i++)
{
int node = heap[i];
childs[2 * i] = node;
childs[2 * i + 1] = -1;
values[i] = freqs[node] << 8;
heap[i] = i;
}
/* Construct the Huffman tree by repeatedly combining the least two
* frequent nodes.
*/
do
{
int first = heap[0];
int last = heap[--heapLen];
// Propagate the hole to the leafs of the heap
int ppos = 0;
int path = 1;
while (path < heapLen)
{
if (path + 1 < heapLen && values[heap[path]] > values[heap[path + 1]])
{
path++;
}
heap[ppos] = heap[path];
ppos = path;
path = path * 2 + 1;
}
/* Now propagate the last element down along path. Normally
* it shouldn't go too deep.
*/
int lastVal = values[last];
while ((path = ppos) > 0 && values[heap[ppos = (path - 1) / 2]] > lastVal)
{
heap[path] = heap[ppos];
}
heap[path] = last;
int second = heap[0];
// Create a new node father of first and second
last = numNodes++;
childs[2 * last] = first;
childs[2 * last + 1] = second;
int mindepth = Math.Min(values[first] & 0xff, values[second] & 0xff);
values[last] = lastVal = values[first] + values[second] - mindepth + 1;
// Again, propagate the hole to the leafs
ppos = 0;
path = 1;
while (path < heapLen)
{
if (path + 1 < heapLen && values[heap[path]] > values[heap[path + 1]])
{
path++;
}
heap[ppos] = heap[path];
ppos = path;
path = ppos * 2 + 1;
}
// Now propagate the new element down along path
while ((path = ppos) > 0 && values[heap[ppos = (path - 1) / 2]] > lastVal)
{
heap[path] = heap[ppos];
}
heap[path] = last;
} while (heapLen > 1);
if (heap[0] != childs.Length / 2 - 1)
{
throw new SharpZipBaseException("Heap invariant violated");
}
BuildLength(childs);
}
/// <summary>
/// Get encoded length
/// </summary>
/// <returns>Encoded length, the sum of frequencies * lengths</returns>
public int GetEncodedLength()
{
int len = 0;
for (int i = 0; i < freqs.Length; i++)
{
len += freqs[i] * length[i];
}
return len;
}
/// <summary>
/// Scan a literal or distance tree to determine the frequencies of the codes
/// in the bit length tree.
/// </summary>
public void CalcBLFreq(Tree blTree)
{
int max_count; /* max repeat count */
int min_count; /* min repeat count */
int count; /* repeat count of the current code */
int curlen = -1; /* length of current code */
int i = 0;
while (i < numCodes)
{
count = 1;
int nextlen = length[i];
if (nextlen == 0)
{
max_count = 138;
min_count = 3;
}
else
{
max_count = 6;
min_count = 3;
if (curlen != nextlen)
{
blTree.freqs[nextlen]++;
count = 0;
}
}
curlen = nextlen;
i++;
while (i < numCodes && curlen == length[i])
{
i++;
if (++count >= max_count)
{
break;
}
}
if (count < min_count)
{
blTree.freqs[curlen] += (short)count;
}
else if (curlen != 0)
{
blTree.freqs[REP_3_6]++;
}
else if (count <= 10)
{
blTree.freqs[REP_3_10]++;
}
else
{
blTree.freqs[REP_11_138]++;
}
}
}
/// <summary>
/// Write tree values
/// </summary>
/// <param name="blTree">Tree to write</param>
public void WriteTree(Tree blTree)
{
int max_count; // max repeat count
int min_count; // min repeat count
int count; // repeat count of the current code
int curlen = -1; // length of current code
int i = 0;
while (i < numCodes)
{
count = 1;
int nextlen = length[i];
if (nextlen == 0)
{
max_count = 138;
min_count = 3;
}
else
{
max_count = 6;
min_count = 3;
if (curlen != nextlen)
{
blTree.WriteSymbol(nextlen);
count = 0;
}
}
curlen = nextlen;
i++;
while (i < numCodes && curlen == length[i])
{
i++;
if (++count >= max_count)
{
break;
}
}
if (count < min_count)
{
while (count-- > 0)
{
blTree.WriteSymbol(curlen);
}
}
else if (curlen != 0)
{
blTree.WriteSymbol(REP_3_6);
dh.pending.WriteBits(count - 3, 2);
}
else if (count <= 10)
{
blTree.WriteSymbol(REP_3_10);
dh.pending.WriteBits(count - 3, 3);
}
else
{
blTree.WriteSymbol(REP_11_138);
dh.pending.WriteBits(count - 11, 7);
}
}
}
private void BuildLength(int[] childs)
{
this.length = new byte[freqs.Length];
int numNodes = childs.Length / 2;
int numLeafs = (numNodes + 1) / 2;
int overflow = 0;
for (int i = 0; i < maxLength; i++)
{
bl_counts[i] = 0;
}
// First calculate optimal bit lengths
int[] lengths = new int[numNodes];
lengths[numNodes - 1] = 0;
for (int i = numNodes - 1; i >= 0; i--)
{
if (childs[2 * i + 1] != -1)
{
int bitLength = lengths[i] + 1;
if (bitLength > maxLength)
{
bitLength = maxLength;
overflow++;
}
lengths[childs[2 * i]] = lengths[childs[2 * i + 1]] = bitLength;
}
else
{
// A leaf node
int bitLength = lengths[i];
bl_counts[bitLength - 1]++;
this.length[childs[2 * i]] = (byte)lengths[i];
}
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Tree "+freqs.Length+" lengths:");
// for (int i=0; i < numLeafs; i++) {
// //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]]
// + " len: "+length[childs[2*i]]);
// }
// }
if (overflow == 0)
{
return;
}
int incrBitLen = maxLength - 1;
do
{
// Find the first bit length which could increase:
while (bl_counts[--incrBitLen] == 0)
{
}
// Move this node one down and remove a corresponding
// number of overflow nodes.
do
{
bl_counts[incrBitLen]--;
bl_counts[++incrBitLen]++;
overflow -= 1 << (maxLength - 1 - incrBitLen);
} while (overflow > 0 && incrBitLen < maxLength - 1);
} while (overflow > 0);
/* We may have overshot above. Move some nodes from maxLength to
* maxLength-1 in that case.
*/
bl_counts[maxLength - 1] += overflow;
bl_counts[maxLength - 2] -= overflow;
/* Now recompute all bit lengths, scanning in increasing
* frequency. It is simpler to reconstruct all lengths instead of
* fixing only the wrong ones. This idea is taken from 'ar'
* written by Haruhiko Okumura.
*
* The nodes were inserted with decreasing frequency into the childs
* array.
*/
int nodePtr = 2 * numLeafs;
for (int bits = maxLength; bits != 0; bits--)
{
int n = bl_counts[bits - 1];
while (n > 0)
{
int childPtr = 2 * childs[nodePtr++];
if (childs[childPtr + 1] == -1)
{
// We found another leaf
length[childs[childPtr]] = (byte)bits;
n--;
}
}
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("*** After overflow elimination. ***");
// for (int i=0; i < numLeafs; i++) {
// //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]]
// + " len: "+length[childs[2*i]]);
// }
// }
}
}
#region Instance Fields
/// <summary>
/// Pending buffer to use
/// </summary>
public DeflaterPending pending;
private Tree literalTree;
private Tree distTree;
private Tree blTree;
// Buffer for distances
private short[] d_buf;
private byte[] l_buf;
private int last_lit;
private int extra_bits;
#endregion Instance Fields
static DeflaterHuffman()
{
// See RFC 1951 3.2.6
// Literal codes
staticLCodes = new short[LITERAL_NUM];
staticLLength = new byte[LITERAL_NUM];
int i = 0;
while (i < 144)
{
staticLCodes[i] = BitReverse((0x030 + i) << 8);
staticLLength[i++] = 8;
}
while (i < 256)
{
staticLCodes[i] = BitReverse((0x190 - 144 + i) << 7);
staticLLength[i++] = 9;
}
while (i < 280)
{
staticLCodes[i] = BitReverse((0x000 - 256 + i) << 9);
staticLLength[i++] = 7;
}
while (i < LITERAL_NUM)
{
staticLCodes[i] = BitReverse((0x0c0 - 280 + i) << 8);
staticLLength[i++] = 8;
}
// Distance codes
staticDCodes = new short[DIST_NUM];
staticDLength = new byte[DIST_NUM];
for (i = 0; i < DIST_NUM; i++)
{
staticDCodes[i] = BitReverse(i << 11);
staticDLength[i] = 5;
}
}
/// <summary>
/// Construct instance with pending buffer
/// </summary>
/// <param name="pending">Pending buffer to use</param>
public DeflaterHuffman(DeflaterPending pending)
{
this.pending = pending;
literalTree = new Tree(this, LITERAL_NUM, 257, 15);
distTree = new Tree(this, DIST_NUM, 1, 15);
blTree = new Tree(this, BITLEN_NUM, 4, 7);
d_buf = new short[BUFSIZE];
l_buf = new byte[BUFSIZE];
}
/// <summary>
/// Reset internal state
/// </summary>
public void Reset()
{
last_lit = 0;
extra_bits = 0;
literalTree.Reset();
distTree.Reset();
blTree.Reset();
}
/// <summary>
/// Write all trees to pending buffer
/// </summary>
/// <param name="blTreeCodes">The number/rank of treecodes to send.</param>
public void SendAllTrees(int blTreeCodes)
{
blTree.BuildCodes();
literalTree.BuildCodes();
distTree.BuildCodes();
pending.WriteBits(literalTree.numCodes - 257, 5);
pending.WriteBits(distTree.numCodes - 1, 5);
pending.WriteBits(blTreeCodes - 4, 4);
for (int rank = 0; rank < blTreeCodes; rank++)
{
pending.WriteBits(blTree.length[BL_ORDER[rank]], 3);
}
literalTree.WriteTree(blTree);
distTree.WriteTree(blTree);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
blTree.CheckEmpty();
}
#endif
}
/// <summary>
/// Compress current buffer writing data to pending buffer
/// </summary>
public void CompressBlock()
{
for (int i = 0; i < last_lit; i++)
{
int litlen = l_buf[i] & 0xff;
int dist = d_buf[i];
if (dist-- != 0)
{
// if (DeflaterConstants.DEBUGGING) {
// Console.Write("["+(dist+1)+","+(litlen+3)+"]: ");
// }
int lc = Lcode(litlen);
literalTree.WriteSymbol(lc);
int bits = (lc - 261) / 4;
if (bits > 0 && bits <= 5)
{
pending.WriteBits(litlen & ((1 << bits) - 1), bits);
}
int dc = Dcode(dist);
distTree.WriteSymbol(dc);
bits = dc / 2 - 1;
if (bits > 0)
{
pending.WriteBits(dist & ((1 << bits) - 1), bits);
}
}
else
{
// if (DeflaterConstants.DEBUGGING) {
// if (litlen > 32 && litlen < 127) {
// Console.Write("("+(char)litlen+"): ");
// } else {
// Console.Write("{"+litlen+"}: ");
// }
// }
literalTree.WriteSymbol(litlen);
}
}
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.Write("EOF: ");
}
#endif
literalTree.WriteSymbol(EOF_SYMBOL);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
literalTree.CheckEmpty();
distTree.CheckEmpty();
}
#endif
}
/// <summary>
/// Flush block to output with no compression
/// </summary>
/// <param name="stored">Data to write</param>
/// <param name="storedOffset">Index of first byte to write</param>
/// <param name="storedLength">Count of bytes to write</param>
/// <param name="lastBlock">True if this is the last block</param>
public void FlushStoredBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock)
{
#if DebugDeflation
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Flushing stored block "+ storedLength);
// }
#endif
pending.WriteBits((DeflaterConstants.STORED_BLOCK << 1) + (lastBlock ? 1 : 0), 3);
pending.AlignToByte();
pending.WriteShort(storedLength);
pending.WriteShort(~storedLength);
pending.WriteBlock(stored, storedOffset, storedLength);
Reset();
}
/// <summary>
/// Flush block to output with compression
/// </summary>
/// <param name="stored">Data to flush</param>
/// <param name="storedOffset">Index of first byte to flush</param>
/// <param name="storedLength">Count of bytes to flush</param>
/// <param name="lastBlock">True if this is the last block</param>
public void FlushBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock)
{
literalTree.freqs[EOF_SYMBOL]++;
// Build trees
literalTree.BuildTree();
distTree.BuildTree();
// Calculate bitlen frequency
literalTree.CalcBLFreq(blTree);
distTree.CalcBLFreq(blTree);
// Build bitlen tree
blTree.BuildTree();
int blTreeCodes = 4;
for (int i = 18; i > blTreeCodes; i--)
{
if (blTree.length[BL_ORDER[i]] > 0)
{
blTreeCodes = i + 1;
}
}
int opt_len = 14 + blTreeCodes * 3 + blTree.GetEncodedLength() +
literalTree.GetEncodedLength() + distTree.GetEncodedLength() +
extra_bits;
int static_len = extra_bits;
for (int i = 0; i < LITERAL_NUM; i++)
{
static_len += literalTree.freqs[i] * staticLLength[i];
}
for (int i = 0; i < DIST_NUM; i++)
{
static_len += distTree.freqs[i] * staticDLength[i];
}
if (opt_len >= static_len)
{
// Force static trees
opt_len = static_len;
}
if (storedOffset >= 0 && storedLength + 4 < opt_len >> 3)
{
// Store Block
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Storing, since " + storedLength + " < " + opt_len
// + " <= " + static_len);
// }
FlushStoredBlock(stored, storedOffset, storedLength, lastBlock);
}
else if (opt_len == static_len)
{
// Encode with static tree
pending.WriteBits((DeflaterConstants.STATIC_TREES << 1) + (lastBlock ? 1 : 0), 3);
literalTree.SetStaticCodes(staticLCodes, staticLLength);
distTree.SetStaticCodes(staticDCodes, staticDLength);
CompressBlock();
Reset();
}
else
{
// Encode with dynamic tree
pending.WriteBits((DeflaterConstants.DYN_TREES << 1) + (lastBlock ? 1 : 0), 3);
SendAllTrees(blTreeCodes);
CompressBlock();
Reset();
}
}
/// <summary>
/// Get value indicating if internal buffer is full
/// </summary>
/// <returns>true if buffer is full</returns>
public bool IsFull()
{
return last_lit >= BUFSIZE;
}
/// <summary>
/// Add literal to buffer
/// </summary>
/// <param name="literal">Literal value to add to buffer.</param>
/// <returns>Value indicating internal buffer is full</returns>
public bool TallyLit(int literal)
{
// if (DeflaterConstants.DEBUGGING) {
// if (lit > 32 && lit < 127) {
// //Console.WriteLine("("+(char)lit+")");
// } else {
// //Console.WriteLine("{"+lit+"}");
// }
// }
d_buf[last_lit] = 0;
l_buf[last_lit++] = (byte)literal;
literalTree.freqs[literal]++;
return IsFull();
}
/// <summary>
/// Add distance code and length to literal and distance trees
/// </summary>
/// <param name="distance">Distance code</param>
/// <param name="length">Length</param>
/// <returns>Value indicating if internal buffer is full</returns>
public bool TallyDist(int distance, int length)
{
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("[" + distance + "," + length + "]");
// }
d_buf[last_lit] = (short)distance;
l_buf[last_lit++] = (byte)(length - 3);
int lc = Lcode(length - 3);
literalTree.freqs[lc]++;
if (lc >= 265 && lc < 285)
{
extra_bits += (lc - 261) / 4;
}
int dc = Dcode(distance - 1);
distTree.freqs[dc]++;
if (dc >= 4)
{
extra_bits += dc / 2 - 1;
}
return IsFull();
}
/// <summary>
/// Reverse the bits of a 16 bit value.
/// </summary>
/// <param name="toReverse">Value to reverse bits</param>
/// <returns>Value with bits reversed</returns>
public static short BitReverse(int toReverse)
{
return (short)(bit4Reverse[toReverse & 0xF] << 12 |
bit4Reverse[(toReverse >> 4) & 0xF] << 8 |
bit4Reverse[(toReverse >> 8) & 0xF] << 4 |
bit4Reverse[toReverse >> 12]);
}
private static int Lcode(int length)
{
if (length == 255)
{
return 285;
}
int code = 257;
while (length >= 8)
{
code += 4;
length >>= 1;
}
return code + length;
}
private static int Dcode(int distance)
{
int code = 0;
while (distance >= 4)
{
code += 2;
distance >>= 1;
}
return code + distance;
}
}
}

View File

@@ -0,0 +1,17 @@
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class stores the pending output of the Deflater.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class DeflaterPending : PendingBuffer
{
/// <summary>
/// Construct instance with default buffer size
/// </summary>
public DeflaterPending() : base(DeflaterConstants.PENDING_BUF_SIZE)
{
}
}
}

View File

@@ -0,0 +1,887 @@
using ICSharpCode.SharpZipLib.Checksum;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Inflater is used to decompress data that has been compressed according
/// to the "deflate" standard described in rfc1951.
///
/// By default Zlib (rfc1950) headers and footers are expected in the input.
/// You can use constructor <code> public Inflater(bool noHeader)</code> passing true
/// if there is no Zlib header information
///
/// The usage is as following. First you have to set some input with
/// <code>SetInput()</code>, then Inflate() it. If inflate doesn't
/// inflate any bytes there may be three reasons:
/// <ul>
/// <li>IsNeedingInput() returns true because the input buffer is empty.
/// You have to provide more input with <code>SetInput()</code>.
/// NOTE: IsNeedingInput() also returns true when, the stream is finished.
/// </li>
/// <li>IsNeedingDictionary() returns true, you have to provide a preset
/// dictionary with <code>SetDictionary()</code>.</li>
/// <li>IsFinished returns true, the inflater has finished.</li>
/// </ul>
/// Once the first output byte is produced, a dictionary will not be
/// needed at a later stage.
///
/// author of the original java version : John Leuner, Jochen Hoenicke
/// </summary>
public class Inflater
{
#region Constants/Readonly
/// <summary>
/// Copy lengths for literal codes 257..285
/// </summary>
private static readonly int[] CPLENS = {
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258
};
/// <summary>
/// Extra bits for literal codes 257..285
/// </summary>
private static readonly int[] CPLEXT = {
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0
};
/// <summary>
/// Copy offsets for distance codes 0..29
/// </summary>
private static readonly int[] CPDIST = {
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
8193, 12289, 16385, 24577
};
/// <summary>
/// Extra bits for distance codes
/// </summary>
private static readonly int[] CPDEXT = {
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
12, 12, 13, 13
};
/// <summary>
/// These are the possible states for an inflater
/// </summary>
private const int DECODE_HEADER = 0;
private const int DECODE_DICT = 1;
private const int DECODE_BLOCKS = 2;
private const int DECODE_STORED_LEN1 = 3;
private const int DECODE_STORED_LEN2 = 4;
private const int DECODE_STORED = 5;
private const int DECODE_DYN_HEADER = 6;
private const int DECODE_HUFFMAN = 7;
private const int DECODE_HUFFMAN_LENBITS = 8;
private const int DECODE_HUFFMAN_DIST = 9;
private const int DECODE_HUFFMAN_DISTBITS = 10;
private const int DECODE_CHKSUM = 11;
private const int FINISHED = 12;
#endregion Constants/Readonly
#region Instance Fields
/// <summary>
/// This variable contains the current state.
/// </summary>
private int mode;
/// <summary>
/// The adler checksum of the dictionary or of the decompressed
/// stream, as it is written in the header resp. footer of the
/// compressed stream.
/// Only valid if mode is DECODE_DICT or DECODE_CHKSUM.
/// </summary>
private int readAdler;
/// <summary>
/// The number of bits needed to complete the current state. This
/// is valid, if mode is DECODE_DICT, DECODE_CHKSUM,
/// DECODE_HUFFMAN_LENBITS or DECODE_HUFFMAN_DISTBITS.
/// </summary>
private int neededBits;
private int repLength;
private int repDist;
private int uncomprLen;
/// <summary>
/// True, if the last block flag was set in the last block of the
/// inflated stream. This means that the stream ends after the
/// current block.
/// </summary>
private bool isLastBlock;
/// <summary>
/// The total number of inflated bytes.
/// </summary>
private long totalOut;
/// <summary>
/// The total number of bytes set with setInput(). This is not the
/// value returned by the TotalIn property, since this also includes the
/// unprocessed input.
/// </summary>
private long totalIn;
/// <summary>
/// This variable stores the noHeader flag that was given to the constructor.
/// True means, that the inflated stream doesn't contain a Zlib header or
/// footer.
/// </summary>
private bool noHeader;
private readonly StreamManipulator input;
private OutputWindow outputWindow;
private InflaterDynHeader dynHeader;
private InflaterHuffmanTree litlenTree, distTree;
private Adler32 adler;
#endregion Instance Fields
#region Constructors
/// <summary>
/// Creates a new inflater or RFC1951 decompressor
/// RFC1950/Zlib headers and footers will be expected in the input data
/// </summary>
public Inflater() : this(false)
{
}
/// <summary>
/// Creates a new inflater.
/// </summary>
/// <param name="noHeader">
/// True if no RFC1950/Zlib header and footer fields are expected in the input data
///
/// This is used for GZIPed/Zipped input.
///
/// For compatibility with
/// Sun JDK you should provide one byte of input more than needed in
/// this case.
/// </param>
public Inflater(bool noHeader)
{
this.noHeader = noHeader;
if (!noHeader)
this.adler = new Adler32();
input = new StreamManipulator();
outputWindow = new OutputWindow();
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
}
#endregion Constructors
/// <summary>
/// Resets the inflater so that a new stream can be decompressed. All
/// pending input and output will be discarded.
/// </summary>
public void Reset()
{
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
totalIn = 0;
totalOut = 0;
input.Reset();
outputWindow.Reset();
dynHeader = null;
litlenTree = null;
distTree = null;
isLastBlock = false;
adler?.Reset();
}
/// <summary>
/// Decodes a zlib/RFC1950 header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// The header is invalid.
/// </exception>
private bool DecodeHeader()
{
int header = input.PeekBits(16);
if (header < 0)
{
return false;
}
input.DropBits(16);
// The header is written in "wrong" byte order
header = ((header << 8) | (header >> 8)) & 0xffff;
if (header % 31 != 0)
{
throw new SharpZipBaseException("Header checksum illegal");
}
if ((header & 0x0f00) != (Deflater.DEFLATED << 8))
{
throw new SharpZipBaseException("Compression Method unknown");
}
/* Maximum size of the backwards window in bits.
* We currently ignore this, but we could use it to make the
* inflater window more space efficient. On the other hand the
* full window (15 bits) is needed most times, anyway.
int max_wbits = ((header & 0x7000) >> 12) + 8;
*/
if ((header & 0x0020) == 0)
{ // Dictionary flag?
mode = DECODE_BLOCKS;
}
else
{
mode = DECODE_DICT;
neededBits = 32;
}
return true;
}
/// <summary>
/// Decodes the dictionary checksum after the deflate header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
private bool DecodeDict()
{
while (neededBits > 0)
{
int dictByte = input.PeekBits(8);
if (dictByte < 0)
{
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | dictByte;
neededBits -= 8;
}
return false;
}
/// <summary>
/// Decodes the huffman encoded symbols in the input stream.
/// </summary>
/// <returns>
/// false if more input is needed, true if output window is
/// full or the current block ends.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool DecodeHuffman()
{
int free = outputWindow.GetFreeSpace();
while (free >= 258)
{
int symbol;
switch (mode)
{
case DECODE_HUFFMAN:
// This is the inner loop so it is optimized a bit
while (((symbol = litlenTree.GetSymbol(input)) & ~0xff) == 0)
{
outputWindow.Write(symbol);
if (--free < 258)
{
return true;
}
}
if (symbol < 257)
{
if (symbol < 0)
{
return false;
}
else
{
// symbol == 256: end of block
distTree = null;
litlenTree = null;
mode = DECODE_BLOCKS;
return true;
}
}
try
{
repLength = CPLENS[symbol - 257];
neededBits = CPLEXT[symbol - 257];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep length code");
}
goto case DECODE_HUFFMAN_LENBITS; // fall through
case DECODE_HUFFMAN_LENBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_LENBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repLength += i;
}
mode = DECODE_HUFFMAN_DIST;
goto case DECODE_HUFFMAN_DIST; // fall through
case DECODE_HUFFMAN_DIST:
symbol = distTree.GetSymbol(input);
if (symbol < 0)
{
return false;
}
try
{
repDist = CPDIST[symbol];
neededBits = CPDEXT[symbol];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep dist code");
}
goto case DECODE_HUFFMAN_DISTBITS; // fall through
case DECODE_HUFFMAN_DISTBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_DISTBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repDist += i;
}
outputWindow.Repeat(repLength, repDist);
free -= repLength;
mode = DECODE_HUFFMAN;
break;
default:
throw new SharpZipBaseException("Inflater unknown mode");
}
}
return true;
}
/// <summary>
/// Decodes the adler checksum after the deflate stream.
/// </summary>
/// <returns>
/// false if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// If checksum doesn't match.
/// </exception>
private bool DecodeChksum()
{
while (neededBits > 0)
{
int chkByte = input.PeekBits(8);
if (chkByte < 0)
{
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | chkByte;
neededBits -= 8;
}
if ((int)adler?.Value != readAdler)
{
throw new SharpZipBaseException("Adler chksum doesn't match: " + (int)adler?.Value + " vs. " + readAdler);
}
mode = FINISHED;
return false;
}
/// <summary>
/// Decodes the deflated stream.
/// </summary>
/// <returns>
/// false if more input is needed, or if finished.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool Decode()
{
switch (mode)
{
case DECODE_HEADER:
return DecodeHeader();
case DECODE_DICT:
return DecodeDict();
case DECODE_CHKSUM:
return DecodeChksum();
case DECODE_BLOCKS:
if (isLastBlock)
{
if (noHeader)
{
mode = FINISHED;
return false;
}
else
{
input.SkipToByteBoundary();
neededBits = 32;
mode = DECODE_CHKSUM;
return true;
}
}
int type = input.PeekBits(3);
if (type < 0)
{
return false;
}
input.DropBits(3);
isLastBlock |= (type & 1) != 0;
switch (type >> 1)
{
case DeflaterConstants.STORED_BLOCK:
input.SkipToByteBoundary();
mode = DECODE_STORED_LEN1;
break;
case DeflaterConstants.STATIC_TREES:
litlenTree = InflaterHuffmanTree.defLitLenTree;
distTree = InflaterHuffmanTree.defDistTree;
mode = DECODE_HUFFMAN;
break;
case DeflaterConstants.DYN_TREES:
dynHeader = new InflaterDynHeader(input);
mode = DECODE_DYN_HEADER;
break;
default:
throw new SharpZipBaseException("Unknown block type " + type);
}
return true;
case DECODE_STORED_LEN1:
{
if ((uncomprLen = input.PeekBits(16)) < 0)
{
return false;
}
input.DropBits(16);
mode = DECODE_STORED_LEN2;
}
goto case DECODE_STORED_LEN2; // fall through
case DECODE_STORED_LEN2:
{
int nlen = input.PeekBits(16);
if (nlen < 0)
{
return false;
}
input.DropBits(16);
if (nlen != (uncomprLen ^ 0xffff))
{
throw new SharpZipBaseException("broken uncompressed block");
}
mode = DECODE_STORED;
}
goto case DECODE_STORED; // fall through
case DECODE_STORED:
{
int more = outputWindow.CopyStored(input, uncomprLen);
uncomprLen -= more;
if (uncomprLen == 0)
{
mode = DECODE_BLOCKS;
return true;
}
return !input.IsNeedingInput;
}
case DECODE_DYN_HEADER:
if (!dynHeader.AttemptRead())
{
return false;
}
litlenTree = dynHeader.LiteralLengthTree;
distTree = dynHeader.DistanceTree;
mode = DECODE_HUFFMAN;
goto case DECODE_HUFFMAN; // fall through
case DECODE_HUFFMAN:
case DECODE_HUFFMAN_LENBITS:
case DECODE_HUFFMAN_DIST:
case DECODE_HUFFMAN_DISTBITS:
return DecodeHuffman();
case FINISHED:
return false;
default:
throw new SharpZipBaseException("Inflater.Decode unknown mode");
}
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
public void SetDictionary(byte[] buffer)
{
SetDictionary(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
/// <param name="index">
/// The index into buffer where the dictionary starts.
/// </param>
/// <param name="count">
/// The number of bytes in the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No dictionary is needed.
/// </exception>
/// <exception cref="SharpZipBaseException">
/// The adler checksum for the buffer is invalid
/// </exception>
public void SetDictionary(byte[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (!IsNeedingDictionary)
{
throw new InvalidOperationException("Dictionary is not needed");
}
adler?.Update(new ArraySegment<byte>(buffer, index, count));
if (adler != null && (int)adler.Value != readAdler)
{
throw new SharpZipBaseException("Wrong adler checksum");
}
adler?.Reset();
outputWindow.CopyDict(buffer, index, count);
mode = DECODE_BLOCKS;
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// the input.
/// </param>
public void SetInput(byte[] buffer)
{
SetInput(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// The source of input data
/// </param>
/// <param name="index">
/// The index into buffer where the input starts.
/// </param>
/// <param name="count">
/// The number of bytes of input to use.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No input is needed.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// The index and/or count are wrong.
/// </exception>
public void SetInput(byte[] buffer, int index, int count)
{
input.SetInput(buffer, index, count);
totalIn += (long)count;
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether IsNeedingDictionary(),
/// IsNeedingInput() or IsFinished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <returns>
/// The number of bytes written to the buffer, 0 if no further
/// output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if buffer has length 0.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
return Inflate(buffer, 0, buffer.Length);
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether needsDictionary(),
/// needsInput() or finished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <param name="offset">
/// the offset in buffer where storing starts.
/// </param>
/// <param name="count">
/// the maximum number of bytes to output.
/// </param>
/// <returns>
/// the number of bytes written to the buffer, 0 if no further output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if count is less than 0.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if the index and / or count are wrong.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "count cannot be negative");
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "offset cannot be negative");
}
if (offset + count > buffer.Length)
{
throw new ArgumentException("count exceeds buffer bounds");
}
// Special case: count may be zero
if (count == 0)
{
if (!IsFinished)
{ // -jr- 08-Nov-2003 INFLATE_BUG fix..
Decode();
}
return 0;
}
int bytesCopied = 0;
do
{
if (mode != DECODE_CHKSUM)
{
/* Don't give away any output, if we are waiting for the
* checksum in the input stream.
*
* With this trick we have always:
* IsNeedingInput() and not IsFinished()
* implies more output can be produced.
*/
int more = outputWindow.CopyOutput(buffer, offset, count);
if (more > 0)
{
adler?.Update(new ArraySegment<byte>(buffer, offset, more));
offset += more;
bytesCopied += more;
totalOut += (long)more;
count -= more;
if (count == 0)
{
return bytesCopied;
}
}
}
} while (Decode() || ((outputWindow.GetAvailable() > 0) && (mode != DECODE_CHKSUM)));
return bytesCopied;
}
/// <summary>
/// Returns true, if the input buffer is empty.
/// You should then call setInput().
/// NOTE: This method also returns true when the stream is finished.
/// </summary>
public bool IsNeedingInput
{
get
{
return input.IsNeedingInput;
}
}
/// <summary>
/// Returns true, if a preset dictionary is needed to inflate the input.
/// </summary>
public bool IsNeedingDictionary
{
get
{
return mode == DECODE_DICT && neededBits == 0;
}
}
/// <summary>
/// Returns true, if the inflater has finished. This means, that no
/// input is needed and no output can be produced.
/// </summary>
public bool IsFinished
{
get
{
return mode == FINISHED && outputWindow.GetAvailable() == 0;
}
}
/// <summary>
/// Gets the adler checksum. This is either the checksum of all
/// uncompressed bytes returned by inflate(), or if needsDictionary()
/// returns true (and thus no output was yet produced) this is the
/// adler checksum of the expected dictionary.
/// </summary>
/// <returns>
/// the adler checksum.
/// </returns>
public int Adler
{
get
{
if (IsNeedingDictionary)
{
return readAdler;
}
else if (adler != null)
{
return (int)adler.Value;
}
else
{
return 0;
}
}
}
/// <summary>
/// Gets the total number of output bytes returned by Inflate().
/// </summary>
/// <returns>
/// the total number of output bytes.
/// </returns>
public long TotalOut
{
get
{
return totalOut;
}
}
/// <summary>
/// Gets the total number of processed compressed input bytes.
/// </summary>
/// <returns>
/// The total number of bytes of processed input bytes.
/// </returns>
public long TotalIn
{
get
{
return totalIn - (long)RemainingInput;
}
}
/// <summary>
/// Gets the number of unprocessed input bytes. Useful, if the end of the
/// stream is reached and you want to further process the bytes after
/// the deflate stream.
/// </summary>
/// <returns>
/// The number of bytes of the input which have not been processed.
/// </returns>
public int RemainingInput
{
// TODO: This should be a long?
get
{
return input.AvailableBytes;
}
}
}
}

View File

@@ -0,0 +1,151 @@
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.Collections.Generic;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
internal class InflaterDynHeader
{
#region Constants
// maximum number of literal/length codes
private const int LITLEN_MAX = 286;
// maximum number of distance codes
private const int DIST_MAX = 30;
// maximum data code lengths to read
private const int CODELEN_MAX = LITLEN_MAX + DIST_MAX;
// maximum meta code length codes to read
private const int META_MAX = 19;
private static readonly int[] MetaCodeLengthIndex =
{ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
#endregion Constants
/// <summary>
/// Continue decoding header from <see cref="input"/> until more bits are needed or decoding has been completed
/// </summary>
/// <returns>Returns whether decoding could be completed</returns>
public bool AttemptRead()
=> !state.MoveNext() || state.Current;
public InflaterDynHeader(StreamManipulator input)
{
this.input = input;
stateMachine = CreateStateMachine();
state = stateMachine.GetEnumerator();
}
private IEnumerable<bool> CreateStateMachine()
{
// Read initial code length counts from header
while (!input.TryGetBits(5, ref litLenCodeCount, 257)) yield return false;
while (!input.TryGetBits(5, ref distanceCodeCount, 1)) yield return false;
while (!input.TryGetBits(4, ref metaCodeCount, 4)) yield return false;
var dataCodeCount = litLenCodeCount + distanceCodeCount;
if (litLenCodeCount > LITLEN_MAX) throw new ValueOutOfRangeException(nameof(litLenCodeCount));
if (distanceCodeCount > DIST_MAX) throw new ValueOutOfRangeException(nameof(distanceCodeCount));
if (metaCodeCount > META_MAX) throw new ValueOutOfRangeException(nameof(metaCodeCount));
// Load code lengths for the meta tree from the header bits
for (int i = 0; i < metaCodeCount; i++)
{
while (!input.TryGetBits(3, ref codeLengths, MetaCodeLengthIndex[i])) yield return false;
}
var metaCodeTree = new InflaterHuffmanTree(codeLengths);
// Decompress the meta tree symbols into the data table code lengths
int index = 0;
while (index < dataCodeCount)
{
byte codeLength;
int symbol;
while ((symbol = metaCodeTree.GetSymbol(input)) < 0) yield return false;
if (symbol < 16)
{
// append literal code length
codeLengths[index++] = (byte)symbol;
}
else
{
int repeatCount = 0;
if (symbol == 16) // Repeat last code length 3..6 times
{
if (index == 0)
throw new StreamDecodingException("Cannot repeat previous code length when no other code length has been read");
codeLength = codeLengths[index - 1];
// 2 bits + 3, [3..6]
while (!input.TryGetBits(2, ref repeatCount, 3)) yield return false;
}
else if (symbol == 17) // Repeat zero 3..10 times
{
codeLength = 0;
// 3 bits + 3, [3..10]
while (!input.TryGetBits(3, ref repeatCount, 3)) yield return false;
}
else // (symbol == 18), Repeat zero 11..138 times
{
codeLength = 0;
// 7 bits + 11, [11..138]
while (!input.TryGetBits(7, ref repeatCount, 11)) yield return false;
}
if (index + repeatCount > dataCodeCount)
throw new StreamDecodingException("Cannot repeat code lengths past total number of data code lengths");
while (repeatCount-- > 0)
codeLengths[index++] = codeLength;
}
}
if (codeLengths[256] == 0)
throw new StreamDecodingException("Inflater dynamic header end-of-block code missing");
litLenTree = new InflaterHuffmanTree(new ArraySegment<byte>(codeLengths, 0, litLenCodeCount));
distTree = new InflaterHuffmanTree(new ArraySegment<byte>(codeLengths, litLenCodeCount, distanceCodeCount));
yield return true;
}
/// <summary>
/// Get literal/length huffman tree, must not be used before <see cref="AttemptRead"/> has returned true
/// </summary>
/// <exception cref="StreamDecodingException">If hader has not been successfully read by the state machine</exception>
public InflaterHuffmanTree LiteralLengthTree
=> litLenTree ?? throw new StreamDecodingException("Header properties were accessed before header had been successfully read");
/// <summary>
/// Get distance huffman tree, must not be used before <see cref="AttemptRead"/> has returned true
/// </summary>
/// <exception cref="StreamDecodingException">If hader has not been successfully read by the state machine</exception>
public InflaterHuffmanTree DistanceTree
=> distTree ?? throw new StreamDecodingException("Header properties were accessed before header had been successfully read");
#region Instance Fields
private readonly StreamManipulator input;
private readonly IEnumerator<bool> state;
private readonly IEnumerable<bool> stateMachine;
private byte[] codeLengths = new byte[CODELEN_MAX];
private InflaterHuffmanTree litLenTree;
private InflaterHuffmanTree distTree;
private int litLenCodeCount, distanceCodeCount, metaCodeCount;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,237 @@
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.Collections.Generic;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Huffman tree used for inflation
/// </summary>
public class InflaterHuffmanTree
{
#region Constants
private const int MAX_BITLEN = 15;
#endregion Constants
#region Instance Fields
private short[] tree;
#endregion Instance Fields
/// <summary>
/// Literal length tree
/// </summary>
public static InflaterHuffmanTree defLitLenTree;
/// <summary>
/// Distance tree
/// </summary>
public static InflaterHuffmanTree defDistTree;
static InflaterHuffmanTree()
{
try
{
byte[] codeLengths = new byte[288];
int i = 0;
while (i < 144)
{
codeLengths[i++] = 8;
}
while (i < 256)
{
codeLengths[i++] = 9;
}
while (i < 280)
{
codeLengths[i++] = 7;
}
while (i < 288)
{
codeLengths[i++] = 8;
}
defLitLenTree = new InflaterHuffmanTree(codeLengths);
codeLengths = new byte[32];
i = 0;
while (i < 32)
{
codeLengths[i++] = 5;
}
defDistTree = new InflaterHuffmanTree(codeLengths);
}
catch (Exception)
{
throw new SharpZipBaseException("InflaterHuffmanTree: static tree length illegal");
}
}
#region Constructors
/// <summary>
/// Constructs a Huffman tree from the array of code lengths.
/// </summary>
/// <param name = "codeLengths">
/// the array of code lengths
/// </param>
public InflaterHuffmanTree(IList<byte> codeLengths)
{
BuildTree(codeLengths);
}
#endregion Constructors
private void BuildTree(IList<byte> codeLengths)
{
int[] blCount = new int[MAX_BITLEN + 1];
int[] nextCode = new int[MAX_BITLEN + 1];
for (int i = 0; i < codeLengths.Count; i++)
{
int bits = codeLengths[i];
if (bits > 0)
{
blCount[bits]++;
}
}
int code = 0;
int treeSize = 512;
for (int bits = 1; bits <= MAX_BITLEN; bits++)
{
nextCode[bits] = code;
code += blCount[bits] << (16 - bits);
if (bits >= 10)
{
/* We need an extra table for bit lengths >= 10. */
int start = nextCode[bits] & 0x1ff80;
int end = code & 0x1ff80;
treeSize += (end - start) >> (16 - bits);
}
}
/* -jr comment this out! doesnt work for dynamic trees and pkzip 2.04g
if (code != 65536)
{
throw new SharpZipBaseException("Code lengths don't add up properly.");
}
*/
/* Now create and fill the extra tables from longest to shortest
* bit len. This way the sub trees will be aligned.
*/
tree = new short[treeSize];
int treePtr = 512;
for (int bits = MAX_BITLEN; bits >= 10; bits--)
{
int end = code & 0x1ff80;
code -= blCount[bits] << (16 - bits);
int start = code & 0x1ff80;
for (int i = start; i < end; i += 1 << 7)
{
tree[DeflaterHuffman.BitReverse(i)] = (short)((-treePtr << 4) | bits);
treePtr += 1 << (bits - 9);
}
}
for (int i = 0; i < codeLengths.Count; i++)
{
int bits = codeLengths[i];
if (bits == 0)
{
continue;
}
code = nextCode[bits];
int revcode = DeflaterHuffman.BitReverse(code);
if (bits <= 9)
{
do
{
tree[revcode] = (short)((i << 4) | bits);
revcode += 1 << bits;
} while (revcode < 512);
}
else
{
int subTree = tree[revcode & 511];
int treeLen = 1 << (subTree & 15);
subTree = -(subTree >> 4);
do
{
tree[subTree | (revcode >> 9)] = (short)((i << 4) | bits);
revcode += 1 << bits;
} while (revcode < treeLen);
}
nextCode[bits] = code + (1 << (16 - bits));
}
}
/// <summary>
/// Reads the next symbol from input. The symbol is encoded using the
/// huffman tree.
/// </summary>
/// <param name="input">
/// input the input source.
/// </param>
/// <returns>
/// the next symbol, or -1 if not enough input is available.
/// </returns>
public int GetSymbol(StreamManipulator input)
{
int lookahead, symbol;
if ((lookahead = input.PeekBits(9)) >= 0)
{
symbol = tree[lookahead];
int bitlen = symbol & 15;
if (symbol >= 0)
{
if(bitlen == 0){
throw new SharpZipBaseException("Encountered invalid codelength 0");
}
input.DropBits(bitlen);
return symbol >> 4;
}
int subtree = -(symbol >> 4);
if ((lookahead = input.PeekBits(bitlen)) >= 0)
{
symbol = tree[subtree | (lookahead >> 9)];
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
int bits = input.AvailableBits;
lookahead = input.PeekBits(bits);
symbol = tree[subtree | (lookahead >> 9)];
if ((symbol & 15) <= bits)
{
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
return -1;
}
}
}
else // Less than 9 bits
{
int bits = input.AvailableBits;
lookahead = input.PeekBits(bits);
symbol = tree[lookahead];
if (symbol >= 0 && (symbol & 15) <= bits)
{
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
return -1;
}
}
}
}
}

View File

@@ -0,0 +1,268 @@
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class is general purpose class for writing data to a buffer.
///
/// It allows you to write bits as well as bytes
/// Based on DeflaterPending.java
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class PendingBuffer
{
#region Instance Fields
/// <summary>
/// Internal work buffer
/// </summary>
private readonly byte[] buffer;
private int start;
private int end;
private uint bits;
private int bitCount;
#endregion Instance Fields
#region Constructors
/// <summary>
/// construct instance using default buffer size of 4096
/// </summary>
public PendingBuffer() : this(4096)
{
}
/// <summary>
/// construct instance using specified buffer size
/// </summary>
/// <param name="bufferSize">
/// size to use for internal buffer
/// </param>
public PendingBuffer(int bufferSize)
{
buffer = new byte[bufferSize];
}
#endregion Constructors
/// <summary>
/// Clear internal state/buffers
/// </summary>
public void Reset()
{
start = end = bitCount = 0;
}
/// <summary>
/// Write a byte to buffer
/// </summary>
/// <param name="value">
/// The value to write
/// </param>
public void WriteByte(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
}
/// <summary>
/// Write a short value to buffer LSB first
/// </summary>
/// <param name="value">
/// The value to write.
/// </param>
public void WriteShort(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
buffer[end++] = unchecked((byte)(value >> 8));
}
/// <summary>
/// write an integer LSB first
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteInt(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
buffer[end++] = unchecked((byte)(value >> 8));
buffer[end++] = unchecked((byte)(value >> 16));
buffer[end++] = unchecked((byte)(value >> 24));
}
/// <summary>
/// Write a block of data to buffer
/// </summary>
/// <param name="block">data to write</param>
/// <param name="offset">offset of first byte to write</param>
/// <param name="length">number of bytes to write</param>
public void WriteBlock(byte[] block, int offset, int length)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
System.Array.Copy(block, offset, buffer, end, length);
end += length;
}
/// <summary>
/// The number of bits written to the buffer
/// </summary>
public int BitCount
{
get
{
return bitCount;
}
}
/// <summary>
/// Align internal buffer on a byte boundary
/// </summary>
public void AlignToByte()
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
if (bitCount > 0)
{
buffer[end++] = unchecked((byte)bits);
if (bitCount > 8)
{
buffer[end++] = unchecked((byte)(bits >> 8));
}
}
bits = 0;
bitCount = 0;
}
/// <summary>
/// Write bits to internal buffer
/// </summary>
/// <param name="b">source of bits</param>
/// <param name="count">number of bits to write</param>
public void WriteBits(int b, int count)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("writeBits("+b+","+count+")");
// }
#endif
bits |= (uint)(b << bitCount);
bitCount += count;
if (bitCount >= 16)
{
buffer[end++] = unchecked((byte)bits);
buffer[end++] = unchecked((byte)(bits >> 8));
bits >>= 16;
bitCount -= 16;
}
}
/// <summary>
/// Write a short value to internal buffer most significant byte first
/// </summary>
/// <param name="s">value to write</param>
public void WriteShortMSB(int s)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)(s >> 8));
buffer[end++] = unchecked((byte)s);
}
/// <summary>
/// Indicates if buffer has been flushed
/// </summary>
public bool IsFlushed
{
get
{
return end == 0;
}
}
/// <summary>
/// Flushes the pending buffer into the given output array. If the
/// output array is to small, only a partial flush is done.
/// </summary>
/// <param name="output">The output array.</param>
/// <param name="offset">The offset into output array.</param>
/// <param name="length">The maximum number of bytes to store.</param>
/// <returns>The number of bytes flushed.</returns>
public int Flush(byte[] output, int offset, int length)
{
if (bitCount >= 8)
{
buffer[end++] = unchecked((byte)bits);
bits >>= 8;
bitCount -= 8;
}
if (length > end - start)
{
length = end - start;
System.Array.Copy(buffer, start, output, offset, length);
start = 0;
end = 0;
}
else
{
System.Array.Copy(buffer, start, output, offset, length);
start += length;
}
return length;
}
/// <summary>
/// Convert internal buffer to byte array.
/// Buffer is empty on completion
/// </summary>
/// <returns>
/// The internal buffer contents converted to a byte array.
/// </returns>
public byte[] ToByteArray()
{
AlignToByte();
byte[] result = new byte[end - start];
System.Array.Copy(buffer, start, result, 0, result.Length);
start = 0;
end = 0;
return result;
}
}
}

View File

@@ -0,0 +1,438 @@
using ICSharpCode.SharpZipLib.Encryption;
using System;
using System.IO;
using System.Security.Cryptography;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// A special stream deflating or compressing the bytes that are
/// written to it. It uses a Deflater to perform actual deflating.<br/>
/// Authors of the original java version : Tom Tromey, Jochen Hoenicke
/// </summary>
public class DeflaterOutputStream : Stream
{
#region Constructors
/// <summary>
/// Creates a new DeflaterOutputStream with a default Deflater and default buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// the output stream where deflated output should be written.
/// </param>
public DeflaterOutputStream(Stream baseOutputStream)
: this(baseOutputStream, new Deflater(), 512)
{
}
/// <summary>
/// Creates a new DeflaterOutputStream with the given Deflater and
/// default buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// the output stream where deflated output should be written.
/// </param>
/// <param name="deflater">
/// the underlying deflater.
/// </param>
public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater)
: this(baseOutputStream, deflater, 512)
{
}
/// <summary>
/// Creates a new DeflaterOutputStream with the given Deflater and
/// buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// The output stream where deflated output is written.
/// </param>
/// <param name="deflater">
/// The underlying deflater to use
/// </param>
/// <param name="bufferSize">
/// The buffer size in bytes to use when deflating (minimum value 512)
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// bufsize is less than or equal to zero.
/// </exception>
/// <exception cref="ArgumentException">
/// baseOutputStream does not support writing
/// </exception>
/// <exception cref="ArgumentNullException">
/// deflater instance is null
/// </exception>
public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater, int bufferSize)
{
if (baseOutputStream == null)
{
throw new ArgumentNullException(nameof(baseOutputStream));
}
if (baseOutputStream.CanWrite == false)
{
throw new ArgumentException("Must support writing", nameof(baseOutputStream));
}
if (bufferSize < 512)
{
throw new ArgumentOutOfRangeException(nameof(bufferSize));
}
baseOutputStream_ = baseOutputStream;
buffer_ = new byte[bufferSize];
deflater_ = deflater ?? throw new ArgumentNullException(nameof(deflater));
}
#endregion Constructors
#region Public API
/// <summary>
/// Finishes the stream by calling finish() on the deflater.
/// </summary>
/// <exception cref="SharpZipBaseException">
/// Not all input is deflated
/// </exception>
public virtual void Finish()
{
deflater_.Finish();
while (!deflater_.IsFinished)
{
int len = deflater_.Deflate(buffer_, 0, buffer_.Length);
if (len <= 0)
{
break;
}
if (cryptoTransform_ != null)
{
EncryptBlock(buffer_, 0, len);
}
baseOutputStream_.Write(buffer_, 0, len);
}
if (!deflater_.IsFinished)
{
throw new SharpZipBaseException("Can't deflate all input?");
}
baseOutputStream_.Flush();
if (cryptoTransform_ != null)
{
if (cryptoTransform_ is ZipAESTransform)
{
AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode();
}
cryptoTransform_.Dispose();
cryptoTransform_ = null;
}
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Allows client to determine if an entry can be patched after its added
/// </summary>
public bool CanPatchEntries
{
get
{
return baseOutputStream_.CanSeek;
}
}
#endregion Public API
#region Encryption
/// <summary>
/// The CryptoTransform currently being used to encrypt the compressed data.
/// </summary>
protected ICryptoTransform cryptoTransform_;
/// <summary>
/// Returns the 10 byte AUTH CODE to be appended immediately following the AES data stream.
/// </summary>
protected byte[] AESAuthCode;
/// <summary>
/// Encrypt a block of data
/// </summary>
/// <param name="buffer">
/// Data to encrypt. NOTE the original contents of the buffer are lost
/// </param>
/// <param name="offset">
/// Offset of first byte in buffer to encrypt
/// </param>
/// <param name="length">
/// Number of bytes in buffer to encrypt
/// </param>
protected void EncryptBlock(byte[] buffer, int offset, int length)
{
cryptoTransform_.TransformBlock(buffer, 0, length, buffer, 0);
}
#endregion Encryption
#region Deflation Support
/// <summary>
/// Deflates everything in the input buffers. This will call
/// <code>def.deflate()</code> until all bytes from the input buffers
/// are processed.
/// </summary>
protected void Deflate()
{
Deflate(false);
}
private void Deflate(bool flushing)
{
while (flushing || !deflater_.IsNeedingInput)
{
int deflateCount = deflater_.Deflate(buffer_, 0, buffer_.Length);
if (deflateCount <= 0)
{
break;
}
if (cryptoTransform_ != null)
{
EncryptBlock(buffer_, 0, deflateCount);
}
baseOutputStream_.Write(buffer_, 0, deflateCount);
}
if (!deflater_.IsNeedingInput)
{
throw new SharpZipBaseException("DeflaterOutputStream can't deflate all input?");
}
}
#endregion Deflation Support
#region Stream Overrides
/// <summary>
/// Gets value indicating stream can be read from
/// </summary>
public override bool CanRead
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating if seeking is supported for this stream
/// This property always returns false
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Get value indicating if this stream supports writing
/// </summary>
public override bool CanWrite
{
get
{
return baseOutputStream_.CanWrite;
}
}
/// <summary>
/// Get current length of stream
/// </summary>
public override long Length
{
get
{
return baseOutputStream_.Length;
}
}
/// <summary>
/// Gets the current position within the stream.
/// </summary>
/// <exception cref="NotSupportedException">Any attempt to set position</exception>
public override long Position
{
get
{
return baseOutputStream_.Position;
}
set
{
throw new NotSupportedException("Position property not supported");
}
}
/// <summary>
/// Sets the current position of this stream to the given value. Not supported by this class!
/// </summary>
/// <param name="offset">The offset relative to the <paramref name="origin"/> to seek.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("DeflaterOutputStream Seek not supported");
}
/// <summary>
/// Sets the length of this stream to the given value. Not supported by this class!
/// </summary>
/// <param name="value">The new stream length.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("DeflaterOutputStream SetLength not supported");
}
/// <summary>
/// Read a byte from stream advancing position by one
/// </summary>
/// <returns>The byte read cast to an int. THe value is -1 if at the end of the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override int ReadByte()
{
throw new NotSupportedException("DeflaterOutputStream ReadByte not supported");
}
/// <summary>
/// Read a block of bytes from stream
/// </summary>
/// <param name="buffer">The buffer to store read data in.</param>
/// <param name="offset">The offset to start storing at.</param>
/// <param name="count">The maximum number of bytes to read.</param>
/// <returns>The actual number of bytes read. Zero if end of stream is detected.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("DeflaterOutputStream Read not supported");
}
/// <summary>
/// Flushes the stream by calling <see cref="Flush">Flush</see> on the deflater and then
/// on the underlying stream. This ensures that all bytes are flushed.
/// </summary>
public override void Flush()
{
deflater_.Flush();
Deflate(true);
baseOutputStream_.Flush();
}
/// <summary>
/// Calls <see cref="Finish"/> and closes the underlying
/// stream when <see cref="IsStreamOwner"></see> is true.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed_)
{
isClosed_ = true;
try
{
Finish();
if (cryptoTransform_ != null)
{
GetAuthCodeIfAES();
cryptoTransform_.Dispose();
cryptoTransform_ = null;
}
}
finally
{
if (IsStreamOwner)
{
baseOutputStream_.Dispose();
}
}
}
}
/// <summary>
/// Get the Auth code for AES encrypted entries
/// </summary>
protected void GetAuthCodeIfAES()
{
if (cryptoTransform_ is ZipAESTransform)
{
AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode();
}
}
/// <summary>
/// Writes a single byte to the compressed output stream.
/// </summary>
/// <param name="value">
/// The byte value.
/// </param>
public override void WriteByte(byte value)
{
byte[] b = new byte[1];
b[0] = value;
Write(b, 0, 1);
}
/// <summary>
/// Writes bytes from an array to the compressed stream.
/// </summary>
/// <param name="buffer">
/// The byte array
/// </param>
/// <param name="offset">
/// The offset into the byte array where to start.
/// </param>
/// <param name="count">
/// The number of bytes to write.
/// </param>
public override void Write(byte[] buffer, int offset, int count)
{
deflater_.SetInput(buffer, offset, count);
Deflate();
}
#endregion Stream Overrides
#region Instance Fields
/// <summary>
/// This buffer is used temporarily to retrieve the bytes from the
/// deflater and write them to the underlying output stream.
/// </summary>
private byte[] buffer_;
/// <summary>
/// The deflater which is used to deflate the stream.
/// </summary>
protected Deflater deflater_;
/// <summary>
/// Base stream the deflater depends on.
/// </summary>
protected Stream baseOutputStream_;
private bool isClosed_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,713 @@
using System;
using System.IO;
using System.Security.Cryptography;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// An input buffer customised for use by <see cref="InflaterInputStream"/>
/// </summary>
/// <remarks>
/// The buffer supports decryption of incoming data.
/// </remarks>
public class InflaterInputBuffer
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="InflaterInputBuffer"/> with a default buffer size
/// </summary>
/// <param name="stream">The stream to buffer.</param>
public InflaterInputBuffer(Stream stream) : this(stream, 4096)
{
}
/// <summary>
/// Initialise a new instance of <see cref="InflaterInputBuffer"/>
/// </summary>
/// <param name="stream">The stream to buffer.</param>
/// <param name="bufferSize">The size to use for the buffer</param>
/// <remarks>A minimum buffer size of 1KB is permitted. Lower sizes are treated as 1KB.</remarks>
public InflaterInputBuffer(Stream stream, int bufferSize)
{
inputStream = stream;
if (bufferSize < 1024)
{
bufferSize = 1024;
}
rawData = new byte[bufferSize];
clearText = rawData;
}
#endregion Constructors
/// <summary>
/// Get the length of bytes in the <see cref="RawData"/>
/// </summary>
public int RawLength
{
get
{
return rawLength;
}
}
/// <summary>
/// Get the contents of the raw data buffer.
/// </summary>
/// <remarks>This may contain encrypted data.</remarks>
public byte[] RawData
{
get
{
return rawData;
}
}
/// <summary>
/// Get the number of useable bytes in <see cref="ClearText"/>
/// </summary>
public int ClearTextLength
{
get
{
return clearTextLength;
}
}
/// <summary>
/// Get the contents of the clear text buffer.
/// </summary>
public byte[] ClearText
{
get
{
return clearText;
}
}
/// <summary>
/// Get/set the number of bytes available
/// </summary>
public int Available
{
get { return available; }
set { available = value; }
}
/// <summary>
/// Call <see cref="Inflater.SetInput(byte[], int, int)"/> passing the current clear text buffer contents.
/// </summary>
/// <param name="inflater">The inflater to set input for.</param>
public void SetInflaterInput(Inflater inflater)
{
if (available > 0)
{
inflater.SetInput(clearText, clearTextLength - available, available);
available = 0;
}
}
/// <summary>
/// Fill the buffer from the underlying input stream.
/// </summary>
public void Fill()
{
rawLength = 0;
int toRead = rawData.Length;
while (toRead > 0 && inputStream.CanRead)
{
int count = inputStream.Read(rawData, rawLength, toRead);
if (count <= 0)
{
break;
}
rawLength += count;
toRead -= count;
}
if (cryptoTransform != null)
{
clearTextLength = cryptoTransform.TransformBlock(rawData, 0, rawLength, clearText, 0);
}
else
{
clearTextLength = rawLength;
}
available = clearTextLength;
}
/// <summary>
/// Read a buffer directly from the input stream
/// </summary>
/// <param name="buffer">The buffer to fill</param>
/// <returns>Returns the number of bytes read.</returns>
public int ReadRawBuffer(byte[] buffer)
{
return ReadRawBuffer(buffer, 0, buffer.Length);
}
/// <summary>
/// Read a buffer directly from the input stream
/// </summary>
/// <param name="outBuffer">The buffer to read into</param>
/// <param name="offset">The offset to start reading data into.</param>
/// <param name="length">The number of bytes to read.</param>
/// <returns>Returns the number of bytes read.</returns>
public int ReadRawBuffer(byte[] outBuffer, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
int currentOffset = offset;
int currentLength = length;
while (currentLength > 0)
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
return 0;
}
}
int toCopy = Math.Min(currentLength, available);
System.Array.Copy(rawData, rawLength - (int)available, outBuffer, currentOffset, toCopy);
currentOffset += toCopy;
currentLength -= toCopy;
available -= toCopy;
}
return length;
}
/// <summary>
/// Read clear text data from the input stream.
/// </summary>
/// <param name="outBuffer">The buffer to add data to.</param>
/// <param name="offset">The offset to start adding data at.</param>
/// <param name="length">The number of bytes to read.</param>
/// <returns>Returns the number of bytes actually read.</returns>
public int ReadClearTextBuffer(byte[] outBuffer, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
int currentOffset = offset;
int currentLength = length;
while (currentLength > 0)
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
return 0;
}
}
int toCopy = Math.Min(currentLength, available);
Array.Copy(clearText, clearTextLength - (int)available, outBuffer, currentOffset, toCopy);
currentOffset += toCopy;
currentLength -= toCopy;
available -= toCopy;
}
return length;
}
/// <summary>
/// Read a <see cref="byte"/> from the input stream.
/// </summary>
/// <returns>Returns the byte read.</returns>
public byte ReadLeByte()
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
throw new ZipException("EOF in header");
}
}
byte result = rawData[rawLength - available];
available -= 1;
return result;
}
/// <summary>
/// Read an <see cref="short"/> in little endian byte order.
/// </summary>
/// <returns>The short value read case to an int.</returns>
public int ReadLeShort()
{
return ReadLeByte() | (ReadLeByte() << 8);
}
/// <summary>
/// Read an <see cref="int"/> in little endian byte order.
/// </summary>
/// <returns>The int value read.</returns>
public int ReadLeInt()
{
return ReadLeShort() | (ReadLeShort() << 16);
}
/// <summary>
/// Read a <see cref="long"/> in little endian byte order.
/// </summary>
/// <returns>The long value read.</returns>
public long ReadLeLong()
{
return (uint)ReadLeInt() | ((long)ReadLeInt() << 32);
}
/// <summary>
/// Get/set the <see cref="ICryptoTransform"/> to apply to any data.
/// </summary>
/// <remarks>Set this value to null to have no transform applied.</remarks>
public ICryptoTransform CryptoTransform
{
set
{
cryptoTransform = value;
if (cryptoTransform != null)
{
if (rawData == clearText)
{
if (internalClearText == null)
{
internalClearText = new byte[rawData.Length];
}
clearText = internalClearText;
}
clearTextLength = rawLength;
if (available > 0)
{
cryptoTransform.TransformBlock(rawData, rawLength - available, available, clearText, rawLength - available);
}
}
else
{
clearText = rawData;
clearTextLength = rawLength;
}
}
}
#region Instance Fields
private int rawLength;
private byte[] rawData;
private int clearTextLength;
private byte[] clearText;
private byte[] internalClearText;
private int available;
private ICryptoTransform cryptoTransform;
private Stream inputStream;
#endregion Instance Fields
}
/// <summary>
/// This filter stream is used to decompress data compressed using the "deflate"
/// format. The "deflate" format is described in RFC 1951.
///
/// This stream may form the basis for other decompression filters, such
/// as the <see cref="ICSharpCode.SharpZipLib.GZip.GZipInputStream">GZipInputStream</see>.
///
/// Author of the original java version : John Leuner.
/// </summary>
public class InflaterInputStream : Stream
{
#region Constructors
/// <summary>
/// Create an InflaterInputStream with the default decompressor
/// and a default buffer size of 4KB.
/// </summary>
/// <param name = "baseInputStream">
/// The InputStream to read bytes from
/// </param>
public InflaterInputStream(Stream baseInputStream)
: this(baseInputStream, new Inflater(), 4096)
{
}
/// <summary>
/// Create an InflaterInputStream with the specified decompressor
/// and a default buffer size of 4KB.
/// </summary>
/// <param name = "baseInputStream">
/// The source of input data
/// </param>
/// <param name = "inf">
/// The decompressor used to decompress data read from baseInputStream
/// </param>
public InflaterInputStream(Stream baseInputStream, Inflater inf)
: this(baseInputStream, inf, 4096)
{
}
/// <summary>
/// Create an InflaterInputStream with the specified decompressor
/// and the specified buffer size.
/// </summary>
/// <param name = "baseInputStream">
/// The InputStream to read bytes from
/// </param>
/// <param name = "inflater">
/// The decompressor to use
/// </param>
/// <param name = "bufferSize">
/// Size of the buffer to use
/// </param>
public InflaterInputStream(Stream baseInputStream, Inflater inflater, int bufferSize)
{
if (baseInputStream == null)
{
throw new ArgumentNullException(nameof(baseInputStream));
}
if (inflater == null)
{
throw new ArgumentNullException(nameof(inflater));
}
if (bufferSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(bufferSize));
}
this.baseInputStream = baseInputStream;
this.inf = inflater;
inputBuffer = new InflaterInputBuffer(baseInputStream, bufferSize);
}
#endregion Constructors
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Skip specified number of bytes of uncompressed data
/// </summary>
/// <param name ="count">
/// Number of bytes to skip
/// </param>
/// <returns>
/// The number of bytes skipped, zero if the end of
/// stream has been reached
/// </returns>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="count">The number of bytes</paramref> to skip is less than or equal to zero.
/// </exception>
public long Skip(long count)
{
if (count <= 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
// v0.80 Skip by seeking if underlying stream supports it...
if (baseInputStream.CanSeek)
{
baseInputStream.Seek(count, SeekOrigin.Current);
return count;
}
else
{
int length = 2048;
if (count < length)
{
length = (int)count;
}
byte[] tmp = new byte[length];
int readCount = 1;
long toSkip = count;
while ((toSkip > 0) && (readCount > 0))
{
if (toSkip < length)
{
length = (int)toSkip;
}
readCount = baseInputStream.Read(tmp, 0, length);
toSkip -= readCount;
}
return count - toSkip;
}
}
/// <summary>
/// Clear any cryptographic state.
/// </summary>
protected void StopDecrypting()
{
inputBuffer.CryptoTransform = null;
}
/// <summary>
/// Returns 0 once the end of the stream (EOF) has been reached.
/// Otherwise returns 1.
/// </summary>
public virtual int Available
{
get
{
return inf.IsFinished ? 0 : 1;
}
}
/// <summary>
/// Fills the buffer with more data to decompress.
/// </summary>
/// <exception cref="SharpZipBaseException">
/// Stream ends early
/// </exception>
protected void Fill()
{
// Protect against redundant calls
if (inputBuffer.Available <= 0)
{
inputBuffer.Fill();
if (inputBuffer.Available <= 0)
{
throw new SharpZipBaseException("Unexpected EOF");
}
}
inputBuffer.SetInflaterInput(inf);
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get
{
return baseInputStream.CanRead;
}
}
/// <summary>
/// Gets a value of false indicating seeking is not supported for this stream.
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Gets a value of false indicating that this stream is not writeable.
/// </summary>
public override bool CanWrite
{
get
{
return false;
}
}
/// <summary>
/// A value representing the length of the stream in bytes.
/// </summary>
public override long Length
{
get
{
//return inputBuffer.RawLength;
throw new NotSupportedException("InflaterInputStream Length is not supported");
}
}
/// <summary>
/// The current position within the stream.
/// Throws a NotSupportedException when attempting to set the position
/// </summary>
/// <exception cref="NotSupportedException">Attempting to set the position</exception>
public override long Position
{
get
{
return baseInputStream.Position;
}
set
{
throw new NotSupportedException("InflaterInputStream Position not supported");
}
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
baseInputStream.Flush();
}
/// <summary>
/// Sets the position within the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="offset">The relative offset to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("Seek not supported");
}
/// <summary>
/// Set the length of the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The new length value for the stream.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("InflaterInputStream SetLength not supported");
}
/// <summary>
/// Writes a sequence of bytes to stream and advances the current position
/// This method always throws a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing data to write.</param>
/// <param name="offset">The offset of the first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("InflaterInputStream Write not supported");
}
/// <summary>
/// Writes one byte to the current stream and advances the current position
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The byte to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("InflaterInputStream WriteByte not supported");
}
/// <summary>
/// Closes the input stream. When <see cref="IsStreamOwner"></see>
/// is true the underlying stream is also closed.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
if (IsStreamOwner)
{
baseInputStream.Dispose();
}
}
}
/// <summary>
/// Reads decompressed data into the provided buffer byte array
/// </summary>
/// <param name ="buffer">
/// The array to read and decompress data into
/// </param>
/// <param name ="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name ="count">
/// The number of bytes to decompress
/// </param>
/// <returns>The number of bytes read. Zero signals the end of stream</returns>
/// <exception cref="SharpZipBaseException">
/// Inflater needs a dictionary
/// </exception>
public override int Read(byte[] buffer, int offset, int count)
{
if (inf.IsNeedingDictionary)
{
throw new SharpZipBaseException("Need a dictionary");
}
int remainingBytes = count;
while (true)
{
int bytesRead = inf.Inflate(buffer, offset, remainingBytes);
offset += bytesRead;
remainingBytes -= bytesRead;
if (remainingBytes == 0 || inf.IsFinished)
{
break;
}
if (inf.IsNeedingInput)
{
Fill();
}
else if (bytesRead == 0)
{
throw new ZipException("Invalid input data");
}
}
return count - remainingBytes;
}
#endregion Stream Overrides
#region Instance Fields
/// <summary>
/// Decompressor for this stream
/// </summary>
protected Inflater inf;
/// <summary>
/// <see cref="InflaterInputBuffer">Input buffer</see> for this stream.
/// </summary>
protected InflaterInputBuffer inputBuffer;
/// <summary>
/// Base stream the inflater reads from.
/// </summary>
private Stream baseInputStream;
/// <summary>
/// The compressed size
/// </summary>
protected long csize;
/// <summary>
/// Flag indicating whether this instance has been closed or not.
/// </summary>
private bool isClosed;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,220 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// Contains the output from the Inflation process.
/// We need to have a window so that we can refer backwards into the output stream
/// to repeat stuff.<br/>
/// Author of the original java version : John Leuner
/// </summary>
public class OutputWindow
{
#region Constants
private const int WindowSize = 1 << 15;
private const int WindowMask = WindowSize - 1;
#endregion Constants
#region Instance Fields
private byte[] window = new byte[WindowSize]; //The window is 2^15 bytes
private int windowEnd;
private int windowFilled;
#endregion Instance Fields
/// <summary>
/// Write a byte to this output window
/// </summary>
/// <param name="value">value to write</param>
/// <exception cref="InvalidOperationException">
/// if window is full
/// </exception>
public void Write(int value)
{
if (windowFilled++ == WindowSize)
{
throw new InvalidOperationException("Window full");
}
window[windowEnd++] = (byte)value;
windowEnd &= WindowMask;
}
private void SlowRepeat(int repStart, int length, int distance)
{
while (length-- > 0)
{
window[windowEnd++] = window[repStart++];
windowEnd &= WindowMask;
repStart &= WindowMask;
}
}
/// <summary>
/// Append a byte pattern already in the window itself
/// </summary>
/// <param name="length">length of pattern to copy</param>
/// <param name="distance">distance from end of window pattern occurs</param>
/// <exception cref="InvalidOperationException">
/// If the repeated data overflows the window
/// </exception>
public void Repeat(int length, int distance)
{
if ((windowFilled += length) > WindowSize)
{
throw new InvalidOperationException("Window full");
}
int repStart = (windowEnd - distance) & WindowMask;
int border = WindowSize - length;
if ((repStart <= border) && (windowEnd < border))
{
if (length <= distance)
{
System.Array.Copy(window, repStart, window, windowEnd, length);
windowEnd += length;
}
else
{
// We have to copy manually, since the repeat pattern overlaps.
while (length-- > 0)
{
window[windowEnd++] = window[repStart++];
}
}
}
else
{
SlowRepeat(repStart, length, distance);
}
}
/// <summary>
/// Copy from input manipulator to internal window
/// </summary>
/// <param name="input">source of data</param>
/// <param name="length">length of data to copy</param>
/// <returns>the number of bytes copied</returns>
public int CopyStored(StreamManipulator input, int length)
{
length = Math.Min(Math.Min(length, WindowSize - windowFilled), input.AvailableBytes);
int copied;
int tailLen = WindowSize - windowEnd;
if (length > tailLen)
{
copied = input.CopyBytes(window, windowEnd, tailLen);
if (copied == tailLen)
{
copied += input.CopyBytes(window, 0, length - tailLen);
}
}
else
{
copied = input.CopyBytes(window, windowEnd, length);
}
windowEnd = (windowEnd + copied) & WindowMask;
windowFilled += copied;
return copied;
}
/// <summary>
/// Copy dictionary to window
/// </summary>
/// <param name="dictionary">source dictionary</param>
/// <param name="offset">offset of start in source dictionary</param>
/// <param name="length">length of dictionary</param>
/// <exception cref="InvalidOperationException">
/// If window isnt empty
/// </exception>
public void CopyDict(byte[] dictionary, int offset, int length)
{
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
if (windowFilled > 0)
{
throw new InvalidOperationException();
}
if (length > WindowSize)
{
offset += length - WindowSize;
length = WindowSize;
}
System.Array.Copy(dictionary, offset, window, 0, length);
windowEnd = length & WindowMask;
}
/// <summary>
/// Get remaining unfilled space in window
/// </summary>
/// <returns>Number of bytes left in window</returns>
public int GetFreeSpace()
{
return WindowSize - windowFilled;
}
/// <summary>
/// Get bytes available for output in window
/// </summary>
/// <returns>Number of bytes filled</returns>
public int GetAvailable()
{
return windowFilled;
}
/// <summary>
/// Copy contents of window to output
/// </summary>
/// <param name="output">buffer to copy to</param>
/// <param name="offset">offset to start at</param>
/// <param name="len">number of bytes to count</param>
/// <returns>The number of bytes copied</returns>
/// <exception cref="InvalidOperationException">
/// If a window underflow occurs
/// </exception>
public int CopyOutput(byte[] output, int offset, int len)
{
int copyEnd = windowEnd;
if (len > windowFilled)
{
len = windowFilled;
}
else
{
copyEnd = (windowEnd - windowFilled + len) & WindowMask;
}
int copied = len;
int tailLen = len - copyEnd;
if (tailLen > 0)
{
System.Array.Copy(window, WindowSize - tailLen, output, offset, tailLen);
offset += tailLen;
len = copyEnd;
}
System.Array.Copy(window, copyEnd - len, output, offset, len);
windowFilled -= copied;
if (windowFilled < 0)
{
throw new InvalidOperationException();
}
return copied;
}
/// <summary>
/// Reset by clearing window so <see cref="GetAvailable">GetAvailable</see> returns 0
/// </summary>
public void Reset()
{
windowFilled = windowEnd = 0;
}
}
}

View File

@@ -0,0 +1,298 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// This class allows us to retrieve a specified number of bits from
/// the input buffer, as well as copy big byte blocks.
///
/// It uses an int buffer to store up to 31 bits for direct
/// manipulation. This guarantees that we can get at least 16 bits,
/// but we only need at most 15, so this is all safe.
///
/// There are some optimizations in this class, for example, you must
/// never peek more than 8 bits more than needed, and you must first
/// peek bits before you may drop them. This is not a general purpose
/// class but optimized for the behaviour of the Inflater.
///
/// authors of the original java version : John Leuner, Jochen Hoenicke
/// </summary>
public class StreamManipulator
{
/// <summary>
/// Get the next sequence of bits but don't increase input pointer. bitCount must be
/// less or equal 16 and if this call succeeds, you must drop
/// at least n - 8 bits in the next call.
/// </summary>
/// <param name="bitCount">The number of bits to peek.</param>
/// <returns>
/// the value of the bits, or -1 if not enough bits available. */
/// </returns>
public int PeekBits(int bitCount)
{
if (bitsInBuffer_ < bitCount)
{
if (windowStart_ == windowEnd_)
{
return -1; // ok
}
buffer_ |= (uint)((window_[windowStart_++] & 0xff |
(window_[windowStart_++] & 0xff) << 8) << bitsInBuffer_);
bitsInBuffer_ += 16;
}
return (int)(buffer_ & ((1 << bitCount) - 1));
}
/// <summary>
/// Tries to grab the next <paramref name="bitCount"/> bits from the input and
/// sets <paramref name="output"/> to the value, adding <paramref name="outputOffset"/>.
/// </summary>
/// <returns>true if enough bits could be read, otherwise false</returns>
public bool TryGetBits(int bitCount, ref int output, int outputOffset = 0)
{
var bits = PeekBits(bitCount);
if (bits < 0)
{
return false;
}
output = bits + outputOffset;
DropBits(bitCount);
return true;
}
/// <summary>
/// Tries to grab the next <paramref name="bitCount"/> bits from the input and
/// sets <paramref name="index"/> of <paramref name="array"/> to the value.
/// </summary>
/// <returns>true if enough bits could be read, otherwise false</returns>
public bool TryGetBits(int bitCount, ref byte[] array, int index)
{
var bits = PeekBits(bitCount);
if (bits < 0)
{
return false;
}
array[index] = (byte)bits;
DropBits(bitCount);
return true;
}
/// <summary>
/// Drops the next n bits from the input. You should have called PeekBits
/// with a bigger or equal n before, to make sure that enough bits are in
/// the bit buffer.
/// </summary>
/// <param name="bitCount">The number of bits to drop.</param>
public void DropBits(int bitCount)
{
buffer_ >>= bitCount;
bitsInBuffer_ -= bitCount;
}
/// <summary>
/// Gets the next n bits and increases input pointer. This is equivalent
/// to <see cref="PeekBits"/> followed by <see cref="DropBits"/>, except for correct error handling.
/// </summary>
/// <param name="bitCount">The number of bits to retrieve.</param>
/// <returns>
/// the value of the bits, or -1 if not enough bits available.
/// </returns>
public int GetBits(int bitCount)
{
int bits = PeekBits(bitCount);
if (bits >= 0)
{
DropBits(bitCount);
}
return bits;
}
/// <summary>
/// Gets the number of bits available in the bit buffer. This must be
/// only called when a previous PeekBits() returned -1.
/// </summary>
/// <returns>
/// the number of bits available.
/// </returns>
public int AvailableBits
{
get
{
return bitsInBuffer_;
}
}
/// <summary>
/// Gets the number of bytes available.
/// </summary>
/// <returns>
/// The number of bytes available.
/// </returns>
public int AvailableBytes
{
get
{
return windowEnd_ - windowStart_ + (bitsInBuffer_ >> 3);
}
}
/// <summary>
/// Skips to the next byte boundary.
/// </summary>
public void SkipToByteBoundary()
{
buffer_ >>= (bitsInBuffer_ & 7);
bitsInBuffer_ &= ~7;
}
/// <summary>
/// Returns true when SetInput can be called
/// </summary>
public bool IsNeedingInput
{
get
{
return windowStart_ == windowEnd_;
}
}
/// <summary>
/// Copies bytes from input buffer to output buffer starting
/// at output[offset]. You have to make sure, that the buffer is
/// byte aligned. If not enough bytes are available, copies fewer
/// bytes.
/// </summary>
/// <param name="output">
/// The buffer to copy bytes to.
/// </param>
/// <param name="offset">
/// The offset in the buffer at which copying starts
/// </param>
/// <param name="length">
/// The length to copy, 0 is allowed.
/// </param>
/// <returns>
/// The number of bytes copied, 0 if no bytes were available.
/// </returns>
/// <exception cref="ArgumentOutOfRangeException">
/// Length is less than zero
/// </exception>
/// <exception cref="InvalidOperationException">
/// Bit buffer isnt byte aligned
/// </exception>
public int CopyBytes(byte[] output, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
if ((bitsInBuffer_ & 7) != 0)
{
// bits_in_buffer may only be 0 or a multiple of 8
throw new InvalidOperationException("Bit buffer is not byte aligned!");
}
int count = 0;
while ((bitsInBuffer_ > 0) && (length > 0))
{
output[offset++] = (byte)buffer_;
buffer_ >>= 8;
bitsInBuffer_ -= 8;
length--;
count++;
}
if (length == 0)
{
return count;
}
int avail = windowEnd_ - windowStart_;
if (length > avail)
{
length = avail;
}
System.Array.Copy(window_, windowStart_, output, offset, length);
windowStart_ += length;
if (((windowStart_ - windowEnd_) & 1) != 0)
{
// We always want an even number of bytes in input, see peekBits
buffer_ = (uint)(window_[windowStart_++] & 0xff);
bitsInBuffer_ = 8;
}
return count + length;
}
/// <summary>
/// Resets state and empties internal buffers
/// </summary>
public void Reset()
{
buffer_ = 0;
windowStart_ = windowEnd_ = bitsInBuffer_ = 0;
}
/// <summary>
/// Add more input for consumption.
/// Only call when IsNeedingInput returns true
/// </summary>
/// <param name="buffer">data to be input</param>
/// <param name="offset">offset of first byte of input</param>
/// <param name="count">number of bytes of input to add.</param>
public void SetInput(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "Cannot be negative");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "Cannot be negative");
}
if (windowStart_ < windowEnd_)
{
throw new InvalidOperationException("Old input was not completely processed");
}
int end = offset + count;
// We want to throw an ArrayIndexOutOfBoundsException early.
// Note the check also handles integer wrap around.
if ((offset > end) || (end > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if ((count & 1) != 0)
{
// We always want an even number of bytes in input, see PeekBits
buffer_ |= (uint)((buffer[offset++] & 0xff) << bitsInBuffer_);
bitsInBuffer_ += 8;
}
window_ = buffer;
windowStart_ = offset;
windowEnd_ = end;
}
#region Instance Fields
private byte[] window_;
private int windowStart_;
private int windowEnd_;
private uint buffer_;
private int bitsInBuffer_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,975 @@
using ICSharpCode.SharpZipLib.Core;
using ICSharpCode.SharpZipLib.Zip.Compression;
using System;
using System.IO;
using static ICSharpCode.SharpZipLib.Zip.Compression.Deflater;
using static ICSharpCode.SharpZipLib.Zip.ZipEntryFactory;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// FastZipEvents supports all events applicable to <see cref="FastZip">FastZip</see> operations.
/// </summary>
public class FastZipEvents
{
/// <summary>
/// Delegate to invoke when processing directories.
/// </summary>
public event EventHandler<DirectoryEventArgs> ProcessDirectory;
/// <summary>
/// Delegate to invoke when processing files.
/// </summary>
public ProcessFileHandler ProcessFile;
/// <summary>
/// Delegate to invoke during processing of files.
/// </summary>
public ProgressHandler Progress;
/// <summary>
/// Delegate to invoke when processing for a file has been completed.
/// </summary>
public CompletedFileHandler CompletedFile;
/// <summary>
/// Delegate to invoke when processing directory failures.
/// </summary>
public DirectoryFailureHandler DirectoryFailure;
/// <summary>
/// Delegate to invoke when processing file failures.
/// </summary>
public FileFailureHandler FileFailure;
/// <summary>
/// Raise the <see cref="DirectoryFailure">directory failure</see> event.
/// </summary>
/// <param name="directory">The directory causing the failure.</param>
/// <param name="e">The exception for this event.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnDirectoryFailure(string directory, Exception e)
{
bool result = false;
DirectoryFailureHandler handler = DirectoryFailure;
if (handler != null)
{
var args = new ScanFailureEventArgs(directory, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="FileFailure"> file failure handler delegate</see>.
/// </summary>
/// <param name="file">The file causing the failure.</param>
/// <param name="e">The exception for this failure.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnFileFailure(string file, Exception e)
{
FileFailureHandler handler = FileFailure;
bool result = (handler != null);
if (result)
{
var args = new ScanFailureEventArgs(file, e);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessFile">ProcessFile delegate</see>.
/// </summary>
/// <param name="file">The file being processed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnProcessFile(string file)
{
bool result = true;
ProcessFileHandler handler = ProcessFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="CompletedFile"/> delegate
/// </summary>
/// <param name="file">The file whose processing has been completed.</param>
/// <returns>A boolean indicating if execution should continue or not.</returns>
public bool OnCompletedFile(string file)
{
bool result = true;
CompletedFileHandler handler = CompletedFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Fires the <see cref="ProcessDirectory">process directory</see> delegate.
/// </summary>
/// <param name="directory">The directory being processed.</param>
/// <param name="hasMatchingFiles">Flag indicating if the directory has matching files as determined by the current filter.</param>
/// <returns>A <see cref="bool"/> of true if the operation should continue; false otherwise.</returns>
public bool OnProcessDirectory(string directory, bool hasMatchingFiles)
{
bool result = true;
EventHandler<DirectoryEventArgs> handler = ProcessDirectory;
if (handler != null)
{
var args = new DirectoryEventArgs(directory, hasMatchingFiles);
handler(this, args);
result = args.ContinueRunning;
}
return result;
}
/// <summary>
/// The minimum timespan between <see cref="Progress"/> events.
/// </summary>
/// <value>The minimum period of time between <see cref="Progress"/> events.</value>
/// <seealso cref="Progress"/>
/// <remarks>The default interval is three seconds.</remarks>
public TimeSpan ProgressInterval
{
get { return progressInterval_; }
set { progressInterval_ = value; }
}
#region Instance Fields
private TimeSpan progressInterval_ = TimeSpan.FromSeconds(3);
#endregion Instance Fields
}
/// <summary>
/// FastZip provides facilities for creating and extracting zip files.
/// </summary>
public class FastZip
{
#region Enumerations
/// <summary>
/// Defines the desired handling when overwriting files during extraction.
/// </summary>
public enum Overwrite
{
/// <summary>
/// Prompt the user to confirm overwriting
/// </summary>
Prompt,
/// <summary>
/// Never overwrite files.
/// </summary>
Never,
/// <summary>
/// Always overwrite files.
/// </summary>
Always
}
#endregion Enumerations
#region Constructors
/// <summary>
/// Initialise a default instance of <see cref="FastZip"/>.
/// </summary>
public FastZip()
{
}
/// <summary>
/// Initialise a new instance of <see cref="FastZip"/> using the specified <see cref="TimeSetting"/>
/// </summary>
/// <param name="timeSetting">The <see cref="TimeSetting">time setting</see> to use when creating or extracting <see cref="ZipEntry">Zip entries</see>.</param>
/// <remarks>Using <see cref="TimeSetting.LastAccessTime">TimeSetting.LastAccessTime</see><see cref="TimeSetting.LastAccessTimeUtc">[Utc]</see> when
/// creating an archive will set the file time to the moment of reading.
/// </remarks>
public FastZip(TimeSetting timeSetting)
{
entryFactory_ = new ZipEntryFactory(timeSetting);
restoreDateTimeOnExtract_ = true;
}
/// <summary>
/// Initialise a new instance of <see cref="FastZip"/> using the specified <see cref="DateTime"/>
/// </summary>
/// <param name="time">The time to set all <see cref="ZipEntry.DateTime"/> values for created or extracted <see cref="ZipEntry">Zip Entries</see>.</param>
public FastZip(DateTime time)
{
entryFactory_ = new ZipEntryFactory(time);
restoreDateTimeOnExtract_ = true;
}
/// <summary>
/// Initialise a new instance of <see cref="FastZip"/>
/// </summary>
/// <param name="events">The <see cref="FastZipEvents">events</see> to use during operations.</param>
public FastZip(FastZipEvents events)
{
events_ = events;
}
#endregion Constructors
#region Properties
/// <summary>
/// Get/set a value indicating whether empty directories should be created.
/// </summary>
public bool CreateEmptyDirectories
{
get { return createEmptyDirectories_; }
set { createEmptyDirectories_ = value; }
}
/// <summary>
/// Get / set the password value.
/// </summary>
public string Password
{
get { return password_; }
set { password_ = value; }
}
/// <summary>
/// Get / set the method of encrypting entries.
/// </summary>
/// <remarks>
/// Only applies when <see cref="Password"/> is set.
/// Defaults to ZipCrypto for backwards compatibility purposes.
/// </remarks>
public ZipEncryptionMethod EntryEncryptionMethod { get; set; } = ZipEncryptionMethod.ZipCrypto;
/// <summary>
/// Get or set the <see cref="INameTransform"></see> active when creating Zip files.
/// </summary>
/// <seealso cref="EntryFactory"></seealso>
public INameTransform NameTransform
{
get { return entryFactory_.NameTransform; }
set
{
entryFactory_.NameTransform = value;
}
}
/// <summary>
/// Get or set the <see cref="IEntryFactory"></see> active when creating Zip files.
/// </summary>
public IEntryFactory EntryFactory
{
get { return entryFactory_; }
set
{
if (value == null)
{
entryFactory_ = new ZipEntryFactory();
}
else
{
entryFactory_ = value;
}
}
}
/// <summary>
/// Gets or sets the setting for <see cref="UseZip64">Zip64 handling when writing.</see>
/// </summary>
/// <remarks>
/// The default value is dynamic which is not backwards compatible with old
/// programs and can cause problems with XP's built in compression which cant
/// read Zip64 archives. However it does avoid the situation were a large file
/// is added and cannot be completed correctly.
/// NOTE: Setting the size for entries before they are added is the best solution!
/// By default the EntryFactory used by FastZip will set the file size.
/// </remarks>
public UseZip64 UseZip64
{
get { return useZip64_; }
set { useZip64_ = value; }
}
/// <summary>
/// Get/set a value indicating whether file dates and times should
/// be restored when extracting files from an archive.
/// </summary>
/// <remarks>The default value is false.</remarks>
public bool RestoreDateTimeOnExtract
{
get
{
return restoreDateTimeOnExtract_;
}
set
{
restoreDateTimeOnExtract_ = value;
}
}
/// <summary>
/// Get/set a value indicating whether file attributes should
/// be restored during extract operations
/// </summary>
public bool RestoreAttributesOnExtract
{
get { return restoreAttributesOnExtract_; }
set { restoreAttributesOnExtract_ = value; }
}
/// <summary>
/// Get/set the Compression Level that will be used
/// when creating the zip
/// </summary>
public Deflater.CompressionLevel CompressionLevel
{
get { return compressionLevel_; }
set { compressionLevel_ = value; }
}
#endregion Properties
#region Delegates
/// <summary>
/// Delegate called when confirming overwriting of files.
/// </summary>
public delegate bool ConfirmOverwriteDelegate(string fileName);
#endregion Delegates
#region CreateZip
/// <summary>
/// Create a zip file.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory,
bool recurse, string fileFilter, string directoryFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, directoryFilter);
}
/// <summary>
/// Create a zip file/archive.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to obtain files and directories from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The file filter to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory, bool recurse, string fileFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, null);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
/// <remarks>The <paramref name="outputStream"/> is closed after creation.</remarks>
public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, string fileFilter, string directoryFilter)
{
CreateZip(outputStream, sourceDirectory, recurse, fileFilter, directoryFilter, false);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter">directory filter</see> to apply.</param>
/// <param name="leaveOpen">true to leave <paramref name="outputStream"/> open after the zip has been created, false to dispose it.</param>
public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, string fileFilter, string directoryFilter, bool leaveOpen)
{
var scanner = new FileSystemScanner(fileFilter, directoryFilter);
CreateZip(outputStream, sourceDirectory, recurse, scanner, leaveOpen);
}
/// <summary>
/// Create a zip file.
/// </summary>
/// <param name="zipFileName">The name of the zip file to create.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="IScanFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="IScanFilter">directory filter</see> to apply.</param>
public void CreateZip(string zipFileName, string sourceDirectory,
bool recurse, IScanFilter fileFilter, IScanFilter directoryFilter)
{
CreateZip(File.Create(zipFileName), sourceDirectory, recurse, fileFilter, directoryFilter, false);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="fileFilter">The <see cref="IScanFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="IScanFilter">directory filter</see> to apply.</param>
/// <param name="leaveOpen">true to leave <paramref name="outputStream"/> open after the zip has been created, false to dispose it.</param>
public void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, IScanFilter fileFilter, IScanFilter directoryFilter, bool leaveOpen = false)
{
var scanner = new FileSystemScanner(fileFilter, directoryFilter);
CreateZip(outputStream, sourceDirectory, recurse, scanner, leaveOpen);
}
/// <summary>
/// Create a zip archive sending output to the <paramref name="outputStream"/> passed.
/// </summary>
/// <param name="outputStream">The stream to write archive data to.</param>
/// <param name="sourceDirectory">The directory to source files from.</param>
/// <param name="recurse">True to recurse directories, false for no recursion.</param>
/// <param name="scanner">For performing the actual file system scan</param>
/// <param name="leaveOpen">true to leave <paramref name="outputStream"/> open after the zip has been created, false to dispose it.</param>
/// <remarks>The <paramref name="outputStream"/> is closed after creation.</remarks>
private void CreateZip(Stream outputStream, string sourceDirectory, bool recurse, FileSystemScanner scanner, bool leaveOpen)
{
NameTransform = new ZipNameTransform(sourceDirectory);
sourceDirectory_ = sourceDirectory;
using (outputStream_ = new ZipOutputStream(outputStream))
{
outputStream_.SetLevel((int)CompressionLevel);
outputStream_.IsStreamOwner = !leaveOpen;
outputStream_.NameTransform = null; // all required transforms handled by us
if (false == string.IsNullOrEmpty(password_) && EntryEncryptionMethod != ZipEncryptionMethod.None)
{
outputStream_.Password = password_;
}
outputStream_.UseZip64 = UseZip64;
scanner.ProcessFile += ProcessFile;
if (this.CreateEmptyDirectories)
{
scanner.ProcessDirectory += ProcessDirectory;
}
if (events_ != null)
{
if (events_.FileFailure != null)
{
scanner.FileFailure += events_.FileFailure;
}
if (events_.DirectoryFailure != null)
{
scanner.DirectoryFailure += events_.DirectoryFailure;
}
}
scanner.Scan(sourceDirectory, recurse);
}
}
#endregion CreateZip
#region ExtractZip
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
public void ExtractZip(string zipFileName, string targetDirectory, string fileFilter)
{
ExtractZip(zipFileName, targetDirectory, Overwrite.Always, null, fileFilter, null, restoreDateTimeOnExtract_);
}
/// <summary>
/// Extract the contents of a zip file.
/// </summary>
/// <param name="zipFileName">The zip file to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
/// <param name="allowParentTraversal">Allow parent directory traversal in file paths (e.g. ../file)</param>
public void ExtractZip(string zipFileName, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime, bool allowParentTraversal = false)
{
Stream inputStream = File.Open(zipFileName, FileMode.Open, FileAccess.Read, FileShare.Read);
ExtractZip(inputStream, targetDirectory, overwrite, confirmDelegate, fileFilter, directoryFilter, restoreDateTime, true, allowParentTraversal);
}
/// <summary>
/// Extract the contents of a zip file held in a stream.
/// </summary>
/// <param name="inputStream">The seekable input stream containing the zip to extract from.</param>
/// <param name="targetDirectory">The directory to save extracted information in.</param>
/// <param name="overwrite">The style of <see cref="Overwrite">overwriting</see> to apply.</param>
/// <param name="confirmDelegate">A delegate to invoke when confirming overwriting.</param>
/// <param name="fileFilter">A filter to apply to files.</param>
/// <param name="directoryFilter">A filter to apply to directories.</param>
/// <param name="restoreDateTime">Flag indicating whether to restore the date and time for extracted files.</param>
/// <param name="isStreamOwner">Flag indicating whether the inputStream will be closed by this method.</param>
/// <param name="allowParentTraversal">Allow parent directory traversal in file paths (e.g. ../file)</param>
public void ExtractZip(Stream inputStream, string targetDirectory,
Overwrite overwrite, ConfirmOverwriteDelegate confirmDelegate,
string fileFilter, string directoryFilter, bool restoreDateTime,
bool isStreamOwner, bool allowParentTraversal = false)
{
if ((overwrite == Overwrite.Prompt) && (confirmDelegate == null))
{
throw new ArgumentNullException(nameof(confirmDelegate));
}
continueRunning_ = true;
overwrite_ = overwrite;
confirmDelegate_ = confirmDelegate;
extractNameTransform_ = new WindowsNameTransform(targetDirectory, allowParentTraversal);
fileFilter_ = new NameFilter(fileFilter);
directoryFilter_ = new NameFilter(directoryFilter);
restoreDateTimeOnExtract_ = restoreDateTime;
using (zipFile_ = new ZipFile(inputStream, !isStreamOwner))
{
if (password_ != null)
{
zipFile_.Password = password_;
}
System.Collections.IEnumerator enumerator = zipFile_.GetEnumerator();
while (continueRunning_ && enumerator.MoveNext())
{
var entry = (ZipEntry)enumerator.Current;
if (entry.IsFile)
{
// TODO Path.GetDirectory can fail here on invalid characters.
if (directoryFilter_.IsMatch(Path.GetDirectoryName(entry.Name)) && fileFilter_.IsMatch(entry.Name))
{
ExtractEntry(entry);
}
}
else if (entry.IsDirectory)
{
if (directoryFilter_.IsMatch(entry.Name) && CreateEmptyDirectories)
{
ExtractEntry(entry);
}
}
else
{
// Do nothing for volume labels etc...
}
}
}
}
#endregion ExtractZip
#region Internal Processing
private void ProcessDirectory(object sender, DirectoryEventArgs e)
{
if (!e.HasMatchingFiles && CreateEmptyDirectories)
{
if (events_ != null)
{
events_.OnProcessDirectory(e.Name, e.HasMatchingFiles);
}
if (e.ContinueRunning)
{
if (e.Name != sourceDirectory_)
{
ZipEntry entry = entryFactory_.MakeDirectoryEntry(e.Name);
outputStream_.PutNextEntry(entry);
}
}
}
}
private void ProcessFile(object sender, ScanEventArgs e)
{
if ((events_ != null) && (events_.ProcessFile != null))
{
events_.ProcessFile(sender, e);
}
if (e.ContinueRunning)
{
try
{
// The open below is equivalent to OpenRead which guarantees that if opened the
// file will not be changed by subsequent openers, but precludes opening in some cases
// were it could succeed. ie the open may fail as its already open for writing and the share mode should reflect that.
using (FileStream stream = File.Open(e.Name, FileMode.Open, FileAccess.Read, FileShare.Read))
{
ZipEntry entry = entryFactory_.MakeFileEntry(e.Name);
// Set up AES encryption for the entry if required.
ConfigureEntryEncryption(entry);
outputStream_.PutNextEntry(entry);
AddFileContents(e.Name, stream);
}
}
catch (Exception ex)
{
if (events_ != null)
{
continueRunning_ = events_.OnFileFailure(e.Name, ex);
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
// Set up the encryption method to use for the specific entry.
private void ConfigureEntryEncryption(ZipEntry entry)
{
// Only alter the entries options if AES isn't already enabled for it
// (it might have been set up by the entry factory, and if so we let that take precedence)
if (!string.IsNullOrEmpty(Password) && entry.AESEncryptionStrength == 0)
{
switch (EntryEncryptionMethod)
{
case ZipEncryptionMethod.AES128:
entry.AESKeySize = 128;
break;
case ZipEncryptionMethod.AES256:
entry.AESKeySize = 256;
break;
}
}
}
private void AddFileContents(string name, Stream stream)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (buffer_ == null)
{
buffer_ = new byte[4096];
}
if ((events_ != null) && (events_.Progress != null))
{
StreamUtils.Copy(stream, outputStream_, buffer_,
events_.Progress, events_.ProgressInterval, this, name);
}
else
{
StreamUtils.Copy(stream, outputStream_, buffer_);
}
if (events_ != null)
{
continueRunning_ = events_.OnCompletedFile(name);
}
}
private void ExtractFileEntry(ZipEntry entry, string targetName)
{
bool proceed = true;
if (overwrite_ != Overwrite.Always)
{
if (File.Exists(targetName))
{
if ((overwrite_ == Overwrite.Prompt) && (confirmDelegate_ != null))
{
proceed = confirmDelegate_(targetName);
}
else
{
proceed = false;
}
}
}
if (proceed)
{
if (events_ != null)
{
continueRunning_ = events_.OnProcessFile(entry.Name);
}
if (continueRunning_)
{
try
{
using (FileStream outputStream = File.Create(targetName))
{
if (buffer_ == null)
{
buffer_ = new byte[4096];
}
using (var inputStream = zipFile_.GetInputStream(entry))
{
if ((events_ != null) && (events_.Progress != null))
{
StreamUtils.Copy(inputStream, outputStream, buffer_,
events_.Progress, events_.ProgressInterval, this, entry.Name, entry.Size);
}
else
{
StreamUtils.Copy(inputStream, outputStream, buffer_);
}
}
if (events_ != null)
{
continueRunning_ = events_.OnCompletedFile(entry.Name);
}
}
if (restoreDateTimeOnExtract_)
{
switch (entryFactory_.Setting)
{
case TimeSetting.CreateTime:
File.SetCreationTime(targetName, entry.DateTime);
break;
case TimeSetting.CreateTimeUtc:
File.SetCreationTimeUtc(targetName, entry.DateTime);
break;
case TimeSetting.LastAccessTime:
File.SetLastAccessTime(targetName, entry.DateTime);
break;
case TimeSetting.LastAccessTimeUtc:
File.SetLastAccessTimeUtc(targetName, entry.DateTime);
break;
case TimeSetting.LastWriteTime:
File.SetLastWriteTime(targetName, entry.DateTime);
break;
case TimeSetting.LastWriteTimeUtc:
File.SetLastWriteTimeUtc(targetName, entry.DateTime);
break;
case TimeSetting.Fixed:
File.SetLastWriteTime(targetName, entryFactory_.FixedDateTime);
break;
default:
throw new ZipException("Unhandled time setting in ExtractFileEntry");
}
}
if (RestoreAttributesOnExtract && entry.IsDOSEntry && (entry.ExternalFileAttributes != -1))
{
var fileAttributes = (FileAttributes)entry.ExternalFileAttributes;
// TODO: FastZip - Setting of other file attributes on extraction is a little trickier.
fileAttributes &= (FileAttributes.Archive | FileAttributes.Normal | FileAttributes.ReadOnly | FileAttributes.Hidden);
File.SetAttributes(targetName, fileAttributes);
}
}
catch (Exception ex)
{
if (events_ != null)
{
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
}
private void ExtractEntry(ZipEntry entry)
{
bool doExtraction = entry.IsCompressionMethodSupported();
string targetName = entry.Name;
if (doExtraction)
{
if (entry.IsFile)
{
targetName = extractNameTransform_.TransformFile(targetName);
}
else if (entry.IsDirectory)
{
targetName = extractNameTransform_.TransformDirectory(targetName);
}
doExtraction = !(string.IsNullOrEmpty(targetName));
}
// TODO: Fire delegate/throw exception were compression method not supported, or name is invalid?
string dirName = string.Empty;
if (doExtraction)
{
if (entry.IsDirectory)
{
dirName = targetName;
}
else
{
dirName = Path.GetDirectoryName(Path.GetFullPath(targetName));
}
}
if (doExtraction && !Directory.Exists(dirName))
{
if (!entry.IsDirectory || CreateEmptyDirectories)
{
try
{
continueRunning_ = events_?.OnProcessDirectory(dirName, true) ?? true;
if (continueRunning_)
{
Directory.CreateDirectory(dirName);
if (entry.IsDirectory && restoreDateTimeOnExtract_)
{
switch (entryFactory_.Setting)
{
case TimeSetting.CreateTime:
Directory.SetCreationTime(dirName, entry.DateTime);
break;
case TimeSetting.CreateTimeUtc:
Directory.SetCreationTimeUtc(dirName, entry.DateTime);
break;
case TimeSetting.LastAccessTime:
Directory.SetLastAccessTime(dirName, entry.DateTime);
break;
case TimeSetting.LastAccessTimeUtc:
Directory.SetLastAccessTimeUtc(dirName, entry.DateTime);
break;
case TimeSetting.LastWriteTime:
Directory.SetLastWriteTime(dirName, entry.DateTime);
break;
case TimeSetting.LastWriteTimeUtc:
Directory.SetLastWriteTimeUtc(dirName, entry.DateTime);
break;
case TimeSetting.Fixed:
Directory.SetLastWriteTime(dirName, entryFactory_.FixedDateTime);
break;
default:
throw new ZipException("Unhandled time setting in ExtractEntry");
}
}
}
else
{
doExtraction = false;
}
}
catch (Exception ex)
{
doExtraction = false;
if (events_ != null)
{
if (entry.IsDirectory)
{
continueRunning_ = events_.OnDirectoryFailure(targetName, ex);
}
else
{
continueRunning_ = events_.OnFileFailure(targetName, ex);
}
}
else
{
continueRunning_ = false;
throw;
}
}
}
}
if (doExtraction && entry.IsFile)
{
ExtractFileEntry(entry, targetName);
}
}
private static int MakeExternalAttributes(FileInfo info)
{
return (int)info.Attributes;
}
private static bool NameIsValid(string name)
{
return !string.IsNullOrEmpty(name) &&
(name.IndexOfAny(Path.GetInvalidPathChars()) < 0);
}
#endregion Internal Processing
#region Instance Fields
private bool continueRunning_;
private byte[] buffer_;
private ZipOutputStream outputStream_;
private ZipFile zipFile_;
private string sourceDirectory_;
private NameFilter fileFilter_;
private NameFilter directoryFilter_;
private Overwrite overwrite_;
private ConfirmOverwriteDelegate confirmDelegate_;
private bool restoreDateTimeOnExtract_;
private bool restoreAttributesOnExtract_;
private bool createEmptyDirectories_;
private FastZipEvents events_;
private IEntryFactory entryFactory_ = new ZipEntryFactory();
private INameTransform extractNameTransform_;
private UseZip64 useZip64_ = UseZip64.Dynamic;
private CompressionLevel compressionLevel_ = CompressionLevel.DEFAULT_COMPRESSION;
private string password_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,67 @@
using System;
using ICSharpCode.SharpZipLib.Core;
using static ICSharpCode.SharpZipLib.Zip.ZipEntryFactory;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// Defines factory methods for creating new <see cref="ZipEntry"></see> values.
/// </summary>
public interface IEntryFactory
{
/// <summary>
/// Create a <see cref="ZipEntry"/> for a file given its name
/// </summary>
/// <param name="fileName">The name of the file to create an entry for.</param>
/// <returns>Returns a <see cref="ZipEntry">file entry</see> based on the <paramref name="fileName"/> passed.</returns>
ZipEntry MakeFileEntry(string fileName);
/// <summary>
/// Create a <see cref="ZipEntry"/> for a file given its name
/// </summary>
/// <param name="fileName">The name of the file to create an entry for.</param>
/// <param name="useFileSystem">If true get details from the file system if the file exists.</param>
/// <returns>Returns a <see cref="ZipEntry">file entry</see> based on the <paramref name="fileName"/> passed.</returns>
ZipEntry MakeFileEntry(string fileName, bool useFileSystem);
/// <summary>
/// Create a <see cref="ZipEntry"/> for a file given its actual name and optional override name
/// </summary>
/// <param name="fileName">The name of the file to create an entry for.</param>
/// <param name="entryName">An alternative name to be used for the new entry. Null if not applicable.</param>
/// <param name="useFileSystem">If true get details from the file system if the file exists.</param>
/// <returns>Returns a <see cref="ZipEntry">file entry</see> based on the <paramref name="fileName"/> passed.</returns>
ZipEntry MakeFileEntry(string fileName, string entryName, bool useFileSystem);
/// <summary>
/// Create a <see cref="ZipEntry"/> for a directory given its name
/// </summary>
/// <param name="directoryName">The name of the directory to create an entry for.</param>
/// <returns>Returns a <see cref="ZipEntry">directory entry</see> based on the <paramref name="directoryName"/> passed.</returns>
ZipEntry MakeDirectoryEntry(string directoryName);
/// <summary>
/// Create a <see cref="ZipEntry"/> for a directory given its name
/// </summary>
/// <param name="directoryName">The name of the directory to create an entry for.</param>
/// <param name="useFileSystem">If true get details from the file system for this directory if it exists.</param>
/// <returns>Returns a <see cref="ZipEntry">directory entry</see> based on the <paramref name="directoryName"/> passed.</returns>
ZipEntry MakeDirectoryEntry(string directoryName, bool useFileSystem);
/// <summary>
/// Get/set the <see cref="INameTransform"></see> applicable.
/// </summary>
INameTransform NameTransform { get; set; }
/// <summary>
/// Get the <see cref="TimeSetting"/> in use.
/// </summary>
TimeSetting Setting { get; }
/// <summary>
/// Get the <see cref="DateTime"/> value to use when <see cref="Setting"/> is set to <see cref="TimeSetting.Fixed"/>,
/// or if not specified, the value of <see cref="DateTime.Now"/> when the class was the initialized
/// </summary>
DateTime FixedDateTime { get; }
}
}

View File

@@ -0,0 +1,266 @@
using ICSharpCode.SharpZipLib.Core;
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// WindowsNameTransform transforms <see cref="ZipFile"/> names to windows compatible ones.
/// </summary>
public class WindowsNameTransform : INameTransform
{
/// <summary>
/// The maximum windows path name permitted.
/// </summary>
/// <remarks>This may not valid for all windows systems - CE?, etc but I cant find the equivalent in the CLR.</remarks>
private const int MaxPath = 260;
private string _baseDirectory;
private bool _trimIncomingPaths;
private char _replacementChar = '_';
private bool _allowParentTraversal;
/// <summary>
/// In this case we need Windows' invalid path characters.
/// Path.GetInvalidPathChars() only returns a subset invalid on all platforms.
/// </summary>
private static readonly char[] InvalidEntryChars = new char[] {
'"', '<', '>', '|', '\0', '\u0001', '\u0002', '\u0003', '\u0004', '\u0005',
'\u0006', '\a', '\b', '\t', '\n', '\v', '\f', '\r', '\u000e', '\u000f',
'\u0010', '\u0011', '\u0012', '\u0013', '\u0014', '\u0015', '\u0016',
'\u0017', '\u0018', '\u0019', '\u001a', '\u001b', '\u001c', '\u001d',
'\u001e', '\u001f',
// extra characters for masks, etc.
'*', '?', ':'
};
/// <summary>
/// Initialises a new instance of <see cref="WindowsNameTransform"/>
/// </summary>
/// <param name="baseDirectory"></param>
/// <param name="allowParentTraversal">Allow parent directory traversal in file paths (e.g. ../file)</param>
public WindowsNameTransform(string baseDirectory, bool allowParentTraversal = false)
{
BaseDirectory = baseDirectory ?? throw new ArgumentNullException(nameof(baseDirectory), "Directory name is invalid");
AllowParentTraversal = allowParentTraversal;
}
/// <summary>
/// Initialise a default instance of <see cref="WindowsNameTransform"/>
/// </summary>
public WindowsNameTransform()
{
// Do nothing.
}
/// <summary>
/// Gets or sets a value containing the target directory to prefix values with.
/// </summary>
public string BaseDirectory
{
get { return _baseDirectory; }
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
_baseDirectory = Path.GetFullPath(value);
}
}
/// <summary>
/// Allow parent directory traversal in file paths (e.g. ../file)
/// </summary>
public bool AllowParentTraversal
{
get => _allowParentTraversal;
set => _allowParentTraversal = value;
}
/// <summary>
/// Gets or sets a value indicating whether paths on incoming values should be removed.
/// </summary>
public bool TrimIncomingPaths
{
get { return _trimIncomingPaths; }
set { _trimIncomingPaths = value; }
}
/// <summary>
/// Transform a Zip directory name to a windows directory name.
/// </summary>
/// <param name="name">The directory name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformDirectory(string name)
{
name = TransformFile(name);
if (name.Length > 0)
{
while (name.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal))
{
name = name.Remove(name.Length - 1, 1);
}
}
else
{
throw new InvalidNameException("Cannot have an empty directory name");
}
return name;
}
/// <summary>
/// Transform a Zip format file name to a windows style one.
/// </summary>
/// <param name="name">The file name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformFile(string name)
{
if (name != null)
{
name = MakeValidName(name, _replacementChar);
if (_trimIncomingPaths)
{
name = Path.GetFileName(name);
}
// This may exceed windows length restrictions.
// Combine will throw a PathTooLongException in that case.
if (_baseDirectory != null)
{
name = Path.Combine(_baseDirectory, name);
// Ensure base directory ends with directory separator ('/' or '\' depending on OS)
var pathBase = Path.GetFullPath(_baseDirectory);
if (pathBase[pathBase.Length - 1] != Path.DirectorySeparatorChar)
{
pathBase += Path.DirectorySeparatorChar;
}
if (!_allowParentTraversal && !Path.GetFullPath(name).StartsWith(pathBase, StringComparison.InvariantCultureIgnoreCase))
{
throw new InvalidNameException("Parent traversal in paths is not allowed");
}
}
}
else
{
name = string.Empty;
}
return name;
}
/// <summary>
/// Test a name to see if it is a valid name for a windows filename as extracted from a Zip archive.
/// </summary>
/// <param name="name">The name to test.</param>
/// <returns>Returns true if the name is a valid zip name; false otherwise.</returns>
/// <remarks>The filename isnt a true windows path in some fundamental ways like no absolute paths, no rooted paths etc.</remarks>
public static bool IsValidName(string name)
{
bool result =
(name != null) &&
(name.Length <= MaxPath) &&
(string.Compare(name, MakeValidName(name, '_'), StringComparison.Ordinal) == 0)
;
return result;
}
/// <summary>
/// Force a name to be valid by replacing invalid characters with a fixed value
/// </summary>
/// <param name="name">The name to make valid</param>
/// <param name="replacement">The replacement character to use for any invalid characters.</param>
/// <returns>Returns a valid name</returns>
public static string MakeValidName(string name, char replacement)
{
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
name = PathUtils.DropPathRoot(name.Replace("/", Path.DirectorySeparatorChar.ToString()));
// Drop any leading slashes.
while ((name.Length > 0) && (name[0] == Path.DirectorySeparatorChar))
{
name = name.Remove(0, 1);
}
// Drop any trailing slashes.
while ((name.Length > 0) && (name[name.Length - 1] == Path.DirectorySeparatorChar))
{
name = name.Remove(name.Length - 1, 1);
}
// Convert consecutive \\ characters to \
int index = name.IndexOf(string.Format("{0}{0}", Path.DirectorySeparatorChar), StringComparison.Ordinal);
while (index >= 0)
{
name = name.Remove(index, 1);
index = name.IndexOf(string.Format("{0}{0}", Path.DirectorySeparatorChar), StringComparison.Ordinal);
}
// Convert any invalid characters using the replacement one.
index = name.IndexOfAny(InvalidEntryChars);
if (index >= 0)
{
var builder = new StringBuilder(name);
while (index >= 0)
{
builder[index] = replacement;
if (index >= name.Length)
{
index = -1;
}
else
{
index = name.IndexOfAny(InvalidEntryChars, index + 1);
}
}
name = builder.ToString();
}
// Check for names greater than MaxPath characters.
// TODO: Were is CLR version of MaxPath defined? Can't find it in Environment.
if (name.Length > MaxPath)
{
throw new PathTooLongException();
}
return name;
}
/// <summary>
/// Gets or set the character to replace invalid characters during transformations.
/// </summary>
public char Replacement
{
get { return _replacementChar; }
set
{
for (int i = 0; i < InvalidEntryChars.Length; ++i)
{
if (InvalidEntryChars[i] == value)
{
throw new ArgumentException("invalid path character");
}
}
if ((value == Path.DirectorySeparatorChar) || (value == Path.AltDirectorySeparatorChar))
{
throw new ArgumentException("invalid replacement character");
}
_replacementChar = value;
}
}
}
}

View File

@@ -0,0 +1,518 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip
{
#region Enumerations
/// <summary>
/// Determines how entries are tested to see if they should use Zip64 extensions or not.
/// </summary>
public enum UseZip64
{
/// <summary>
/// Zip64 will not be forced on entries during processing.
/// </summary>
/// <remarks>An entry can have this overridden if required <see cref="ZipEntry.ForceZip64"></see></remarks>
Off,
/// <summary>
/// Zip64 should always be used.
/// </summary>
On,
/// <summary>
/// #ZipLib will determine use based on entry values when added to archive.
/// </summary>
Dynamic,
}
/// <summary>
/// The kind of compression used for an entry in an archive
/// </summary>
public enum CompressionMethod
{
/// <summary>
/// A direct copy of the file contents is held in the archive
/// </summary>
Stored = 0,
/// <summary>
/// Common Zip compression method using a sliding dictionary
/// of up to 32KB and secondary compression from Huffman/Shannon-Fano trees
/// </summary>
Deflated = 8,
/// <summary>
/// An extension to deflate with a 64KB window. Not supported by #Zip currently
/// </summary>
Deflate64 = 9,
/// <summary>
/// BZip2 compression. Not supported by #Zip.
/// </summary>
BZip2 = 12,
/// <summary>
/// LZMA compression. Not supported by #Zip.
/// </summary>
LZMA = 14,
/// <summary>
/// PPMd compression. Not supported by #Zip.
/// </summary>
PPMd = 98,
/// <summary>
/// WinZip special for AES encryption, Now supported by #Zip.
/// </summary>
WinZipAES = 99,
}
/// <summary>
/// Identifies the encryption algorithm used for an entry
/// </summary>
public enum EncryptionAlgorithm
{
/// <summary>
/// No encryption has been used.
/// </summary>
None = 0,
/// <summary>
/// Encrypted using PKZIP 2.0 or 'classic' encryption.
/// </summary>
PkzipClassic = 1,
/// <summary>
/// DES encryption has been used.
/// </summary>
Des = 0x6601,
/// <summary>
/// RC2 encryption has been used for encryption.
/// </summary>
RC2 = 0x6602,
/// <summary>
/// Triple DES encryption with 168 bit keys has been used for this entry.
/// </summary>
TripleDes168 = 0x6603,
/// <summary>
/// Triple DES with 112 bit keys has been used for this entry.
/// </summary>
TripleDes112 = 0x6609,
/// <summary>
/// AES 128 has been used for encryption.
/// </summary>
Aes128 = 0x660e,
/// <summary>
/// AES 192 has been used for encryption.
/// </summary>
Aes192 = 0x660f,
/// <summary>
/// AES 256 has been used for encryption.
/// </summary>
Aes256 = 0x6610,
/// <summary>
/// RC2 corrected has been used for encryption.
/// </summary>
RC2Corrected = 0x6702,
/// <summary>
/// Blowfish has been used for encryption.
/// </summary>
Blowfish = 0x6720,
/// <summary>
/// Twofish has been used for encryption.
/// </summary>
Twofish = 0x6721,
/// <summary>
/// RC4 has been used for encryption.
/// </summary>
RC4 = 0x6801,
/// <summary>
/// An unknown algorithm has been used for encryption.
/// </summary>
Unknown = 0xffff
}
/// <summary>
/// Defines the contents of the general bit flags field for an archive entry.
/// </summary>
[Flags]
public enum GeneralBitFlags
{
/// <summary>
/// Bit 0 if set indicates that the file is encrypted
/// </summary>
Encrypted = 0x0001,
/// <summary>
/// Bits 1 and 2 - Two bits defining the compression method (only for Method 6 Imploding and 8,9 Deflating)
/// </summary>
Method = 0x0006,
/// <summary>
/// Bit 3 if set indicates a trailing data descriptor is appended to the entry data
/// </summary>
Descriptor = 0x0008,
/// <summary>
/// Bit 4 is reserved for use with method 8 for enhanced deflation
/// </summary>
ReservedPKware4 = 0x0010,
/// <summary>
/// Bit 5 if set indicates the file contains Pkzip compressed patched data.
/// Requires version 2.7 or greater.
/// </summary>
Patched = 0x0020,
/// <summary>
/// Bit 6 if set indicates strong encryption has been used for this entry.
/// </summary>
StrongEncryption = 0x0040,
/// <summary>
/// Bit 7 is currently unused
/// </summary>
Unused7 = 0x0080,
/// <summary>
/// Bit 8 is currently unused
/// </summary>
Unused8 = 0x0100,
/// <summary>
/// Bit 9 is currently unused
/// </summary>
Unused9 = 0x0200,
/// <summary>
/// Bit 10 is currently unused
/// </summary>
Unused10 = 0x0400,
/// <summary>
/// Bit 11 if set indicates the filename and
/// comment fields for this file must be encoded using UTF-8.
/// </summary>
UnicodeText = 0x0800,
/// <summary>
/// Bit 12 is documented as being reserved by PKware for enhanced compression.
/// </summary>
EnhancedCompress = 0x1000,
/// <summary>
/// Bit 13 if set indicates that values in the local header are masked to hide
/// their actual values, and the central directory is encrypted.
/// </summary>
/// <remarks>
/// Used when encrypting the central directory contents.
/// </remarks>
HeaderMasked = 0x2000,
/// <summary>
/// Bit 14 is documented as being reserved for use by PKware
/// </summary>
ReservedPkware14 = 0x4000,
/// <summary>
/// Bit 15 is documented as being reserved for use by PKware
/// </summary>
ReservedPkware15 = 0x8000
}
#endregion Enumerations
/// <summary>
/// This class contains constants used for Zip format files
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")]
public static class ZipConstants
{
#region Versions
/// <summary>
/// The version made by field for entries in the central header when created by this library
/// </summary>
/// <remarks>
/// This is also the Zip version for the library when comparing against the version required to extract
/// for an entry. See <see cref="ZipEntry.CanDecompress"/>.
/// </remarks>
public const int VersionMadeBy = 51; // was 45 before AES
/// <summary>
/// The version made by field for entries in the central header when created by this library
/// </summary>
/// <remarks>
/// This is also the Zip version for the library when comparing against the version required to extract
/// for an entry. See <see cref="ZipInputStream.CanDecompressEntry">ZipInputStream.CanDecompressEntry</see>.
/// </remarks>
[Obsolete("Use VersionMadeBy instead")]
public const int VERSION_MADE_BY = 51;
/// <summary>
/// The minimum version required to support strong encryption
/// </summary>
public const int VersionStrongEncryption = 50;
/// <summary>
/// The minimum version required to support strong encryption
/// </summary>
[Obsolete("Use VersionStrongEncryption instead")]
public const int VERSION_STRONG_ENCRYPTION = 50;
/// <summary>
/// Version indicating AES encryption
/// </summary>
public const int VERSION_AES = 51;
/// <summary>
/// The version required for Zip64 extensions (4.5 or higher)
/// </summary>
public const int VersionZip64 = 45;
/// <summary>
/// The version required for BZip2 compression (4.6 or higher)
/// </summary>
public const int VersionBZip2 = 46;
#endregion Versions
#region Header Sizes
/// <summary>
/// Size of local entry header (excluding variable length fields at end)
/// </summary>
public const int LocalHeaderBaseSize = 30;
/// <summary>
/// Size of local entry header (excluding variable length fields at end)
/// </summary>
[Obsolete("Use LocalHeaderBaseSize instead")]
public const int LOCHDR = 30;
/// <summary>
/// Size of Zip64 data descriptor
/// </summary>
public const int Zip64DataDescriptorSize = 24;
/// <summary>
/// Size of data descriptor
/// </summary>
public const int DataDescriptorSize = 16;
/// <summary>
/// Size of data descriptor
/// </summary>
[Obsolete("Use DataDescriptorSize instead")]
public const int EXTHDR = 16;
/// <summary>
/// Size of central header entry (excluding variable fields)
/// </summary>
public const int CentralHeaderBaseSize = 46;
/// <summary>
/// Size of central header entry
/// </summary>
[Obsolete("Use CentralHeaderBaseSize instead")]
public const int CENHDR = 46;
/// <summary>
/// Size of end of central record (excluding variable fields)
/// </summary>
public const int EndOfCentralRecordBaseSize = 22;
/// <summary>
/// Size of end of central record (excluding variable fields)
/// </summary>
[Obsolete("Use EndOfCentralRecordBaseSize instead")]
public const int ENDHDR = 22;
/// <summary>
/// Size of 'classic' cryptographic header stored before any entry data
/// </summary>
public const int CryptoHeaderSize = 12;
/// <summary>
/// Size of cryptographic header stored before entry data
/// </summary>
[Obsolete("Use CryptoHeaderSize instead")]
public const int CRYPTO_HEADER_SIZE = 12;
/// <summary>
/// The size of the Zip64 central directory locator.
/// </summary>
public const int Zip64EndOfCentralDirectoryLocatorSize = 20;
#endregion Header Sizes
#region Header Signatures
/// <summary>
/// Signature for local entry header
/// </summary>
public const int LocalHeaderSignature = 'P' | ('K' << 8) | (3 << 16) | (4 << 24);
/// <summary>
/// Signature for local entry header
/// </summary>
[Obsolete("Use LocalHeaderSignature instead")]
public const int LOCSIG = 'P' | ('K' << 8) | (3 << 16) | (4 << 24);
/// <summary>
/// Signature for spanning entry
/// </summary>
public const int SpanningSignature = 'P' | ('K' << 8) | (7 << 16) | (8 << 24);
/// <summary>
/// Signature for spanning entry
/// </summary>
[Obsolete("Use SpanningSignature instead")]
public const int SPANNINGSIG = 'P' | ('K' << 8) | (7 << 16) | (8 << 24);
/// <summary>
/// Signature for temporary spanning entry
/// </summary>
public const int SpanningTempSignature = 'P' | ('K' << 8) | ('0' << 16) | ('0' << 24);
/// <summary>
/// Signature for temporary spanning entry
/// </summary>
[Obsolete("Use SpanningTempSignature instead")]
public const int SPANTEMPSIG = 'P' | ('K' << 8) | ('0' << 16) | ('0' << 24);
/// <summary>
/// Signature for data descriptor
/// </summary>
/// <remarks>
/// This is only used where the length, Crc, or compressed size isnt known when the
/// entry is created and the output stream doesnt support seeking.
/// The local entry cannot be 'patched' with the correct values in this case
/// so the values are recorded after the data prefixed by this header, as well as in the central directory.
/// </remarks>
public const int DataDescriptorSignature = 'P' | ('K' << 8) | (7 << 16) | (8 << 24);
/// <summary>
/// Signature for data descriptor
/// </summary>
/// <remarks>
/// This is only used where the length, Crc, or compressed size isnt known when the
/// entry is created and the output stream doesnt support seeking.
/// The local entry cannot be 'patched' with the correct values in this case
/// so the values are recorded after the data prefixed by this header, as well as in the central directory.
/// </remarks>
[Obsolete("Use DataDescriptorSignature instead")]
public const int EXTSIG = 'P' | ('K' << 8) | (7 << 16) | (8 << 24);
/// <summary>
/// Signature for central header
/// </summary>
[Obsolete("Use CentralHeaderSignature instead")]
public const int CENSIG = 'P' | ('K' << 8) | (1 << 16) | (2 << 24);
/// <summary>
/// Signature for central header
/// </summary>
public const int CentralHeaderSignature = 'P' | ('K' << 8) | (1 << 16) | (2 << 24);
/// <summary>
/// Signature for Zip64 central file header
/// </summary>
public const int Zip64CentralFileHeaderSignature = 'P' | ('K' << 8) | (6 << 16) | (6 << 24);
/// <summary>
/// Signature for Zip64 central file header
/// </summary>
[Obsolete("Use Zip64CentralFileHeaderSignature instead")]
public const int CENSIG64 = 'P' | ('K' << 8) | (6 << 16) | (6 << 24);
/// <summary>
/// Signature for Zip64 central directory locator
/// </summary>
public const int Zip64CentralDirLocatorSignature = 'P' | ('K' << 8) | (6 << 16) | (7 << 24);
/// <summary>
/// Signature for archive extra data signature (were headers are encrypted).
/// </summary>
public const int ArchiveExtraDataSignature = 'P' | ('K' << 8) | (6 << 16) | (7 << 24);
/// <summary>
/// Central header digital signature
/// </summary>
public const int CentralHeaderDigitalSignature = 'P' | ('K' << 8) | (5 << 16) | (5 << 24);
/// <summary>
/// Central header digital signature
/// </summary>
[Obsolete("Use CentralHeaderDigitalSignaure instead")]
public const int CENDIGITALSIG = 'P' | ('K' << 8) | (5 << 16) | (5 << 24);
/// <summary>
/// End of central directory record signature
/// </summary>
public const int EndOfCentralDirectorySignature = 'P' | ('K' << 8) | (5 << 16) | (6 << 24);
/// <summary>
/// End of central directory record signature
/// </summary>
[Obsolete("Use EndOfCentralDirectorySignature instead")]
public const int ENDSIG = 'P' | ('K' << 8) | (5 << 16) | (6 << 24);
#endregion Header Signatures
/// <summary>
/// Default encoding used for string conversion. 0 gives the default system OEM code page.
/// Using the default code page isnt the full solution necessarily
/// there are many variable factors, codepage 850 is often a good choice for
/// European users, however be careful about compatability.
/// </summary>
[Obsolete("Use ZipStrings instead")]
public static int DefaultCodePage
{
get => ZipStrings.CodePage;
set => ZipStrings.CodePage = value;
}
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToString(byte[], int)"/></summary>
[Obsolete("Use ZipStrings.ConvertToString instead")]
public static string ConvertToString(byte[] data, int count)
=> ZipStrings.ConvertToString(data, count);
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToString(byte[])"/></summary>
[Obsolete("Use ZipStrings.ConvertToString instead")]
public static string ConvertToString(byte[] data)
=> ZipStrings.ConvertToString(data);
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToStringExt(int, byte[], int)"/></summary>
[Obsolete("Use ZipStrings.ConvertToStringExt instead")]
public static string ConvertToStringExt(int flags, byte[] data, int count)
=> ZipStrings.ConvertToStringExt(flags, data, count);
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToStringExt(int, byte[])"/></summary>
[Obsolete("Use ZipStrings.ConvertToStringExt instead")]
public static string ConvertToStringExt(int flags, byte[] data)
=> ZipStrings.ConvertToStringExt(flags, data);
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToArray(string)"/></summary>
[Obsolete("Use ZipStrings.ConvertToArray instead")]
public static byte[] ConvertToArray(string str)
=> ZipStrings.ConvertToArray(str);
/// <summary> Deprecated wrapper for <see cref="ZipStrings.ConvertToArray(int, string)"/></summary>
[Obsolete("Use ZipStrings.ConvertToArray instead")]
public static byte[] ConvertToArray(int flags, string str)
=> ZipStrings.ConvertToArray(flags, str);
}
}

View File

@@ -0,0 +1,28 @@
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// The method of encrypting entries when creating zip archives.
/// </summary>
public enum ZipEncryptionMethod
{
/// <summary>
/// No encryption will be used.
/// </summary>
None,
/// <summary>
/// Encrypt entries with ZipCrypto.
/// </summary>
ZipCrypto,
/// <summary>
/// Encrypt entries with AES 128.
/// </summary>
AES128,
/// <summary>
/// Encrypt entries with AES 256.
/// </summary>
AES256
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,32 @@
using System;
using System.Collections.Generic;
using System.Text;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// General ZipEntry helper extensions
/// </summary>
public static class ZipEntryExtensions
{
/// <summary>
/// Efficiently check if a <see cref="GeneralBitFlags">flag</see> is set without enum un-/boxing
/// </summary>
/// <param name="entry"></param>
/// <param name="flag"></param>
/// <returns>Returns whether the flag was set</returns>
public static bool HasFlag(this ZipEntry entry, GeneralBitFlags flag)
=> (entry.Flags & (int) flag) != 0;
/// <summary>
/// Efficiently set a <see cref="GeneralBitFlags">flag</see> without enum un-/boxing
/// </summary>
/// <param name="entry"></param>
/// <param name="flag"></param>
/// <param name="enabled">Whether the passed flag should be set (1) or cleared (0)</param>
public static void SetFlag(this ZipEntry entry, GeneralBitFlags flag, bool enabled = true)
=> entry.Flags = enabled
? entry.Flags | (int) flag
: entry.Flags & ~(int) flag;
}
}

View File

@@ -0,0 +1,375 @@
using ICSharpCode.SharpZipLib.Core;
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// Basic implementation of <see cref="IEntryFactory"></see>
/// </summary>
public class ZipEntryFactory : IEntryFactory
{
#region Enumerations
/// <summary>
/// Defines the possible values to be used for the <see cref="ZipEntry.DateTime"/>.
/// </summary>
public enum TimeSetting
{
/// <summary>
/// Use the recorded LastWriteTime value for the file.
/// </summary>
LastWriteTime,
/// <summary>
/// Use the recorded LastWriteTimeUtc value for the file
/// </summary>
LastWriteTimeUtc,
/// <summary>
/// Use the recorded CreateTime value for the file.
/// </summary>
CreateTime,
/// <summary>
/// Use the recorded CreateTimeUtc value for the file.
/// </summary>
CreateTimeUtc,
/// <summary>
/// Use the recorded LastAccessTime value for the file.
/// </summary>
LastAccessTime,
/// <summary>
/// Use the recorded LastAccessTimeUtc value for the file.
/// </summary>
LastAccessTimeUtc,
/// <summary>
/// Use a fixed value.
/// </summary>
/// <remarks>The actual <see cref="DateTime"/> value used can be
/// specified via the <see cref="ZipEntryFactory(DateTime)"/> constructor or
/// using the <see cref="ZipEntryFactory(TimeSetting)"/> with the setting set
/// to <see cref="TimeSetting.Fixed"/> which will use the <see cref="DateTime"/> when this class was constructed.
/// The <see cref="FixedDateTime"/> property can also be used to set this value.</remarks>
Fixed,
}
#endregion Enumerations
#region Constructors
/// <summary>
/// Initialise a new instance of the <see cref="ZipEntryFactory"/> class.
/// </summary>
/// <remarks>A default <see cref="INameTransform"/>, and the LastWriteTime for files is used.</remarks>
public ZipEntryFactory()
{
nameTransform_ = new ZipNameTransform();
isUnicodeText_ = ZipStrings.UseUnicode;
}
/// <summary>
/// Initialise a new instance of <see cref="ZipEntryFactory"/> using the specified <see cref="TimeSetting"/>
/// </summary>
/// <param name="timeSetting">The <see cref="TimeSetting">time setting</see> to use when creating <see cref="ZipEntry">Zip entries</see>.</param>
public ZipEntryFactory(TimeSetting timeSetting) : this()
{
timeSetting_ = timeSetting;
}
/// <summary>
/// Initialise a new instance of <see cref="ZipEntryFactory"/> using the specified <see cref="DateTime"/>
/// </summary>
/// <param name="time">The time to set all <see cref="ZipEntry.DateTime"/> values to.</param>
public ZipEntryFactory(DateTime time) : this()
{
timeSetting_ = TimeSetting.Fixed;
FixedDateTime = time;
}
#endregion Constructors
#region Properties
/// <summary>
/// Get / set the <see cref="INameTransform"/> to be used when creating new <see cref="ZipEntry"/> values.
/// </summary>
/// <remarks>
/// Setting this property to null will cause a default <see cref="ZipNameTransform">name transform</see> to be used.
/// </remarks>
public INameTransform NameTransform
{
get { return nameTransform_; }
set
{
if (value == null)
{
nameTransform_ = new ZipNameTransform();
}
else
{
nameTransform_ = value;
}
}
}
/// <summary>
/// Get / set the <see cref="TimeSetting"/> in use.
/// </summary>
public TimeSetting Setting
{
get { return timeSetting_; }
set { timeSetting_ = value; }
}
/// <summary>
/// Get / set the <see cref="DateTime"/> value to use when <see cref="Setting"/> is set to <see cref="TimeSetting.Fixed"/>
/// </summary>
public DateTime FixedDateTime
{
get { return fixedDateTime_; }
set
{
if (value.Year < 1970)
{
throw new ArgumentException("Value is too old to be valid", nameof(value));
}
fixedDateTime_ = value;
}
}
/// <summary>
/// A bitmask defining the attributes to be retrieved from the actual file.
/// </summary>
/// <remarks>The default is to get all possible attributes from the actual file.</remarks>
public int GetAttributes
{
get { return getAttributes_; }
set { getAttributes_ = value; }
}
/// <summary>
/// A bitmask defining which attributes are to be set on.
/// </summary>
/// <remarks>By default no attributes are set on.</remarks>
public int SetAttributes
{
get { return setAttributes_; }
set { setAttributes_ = value; }
}
/// <summary>
/// Get set a value indicating whether unidoce text should be set on.
/// </summary>
public bool IsUnicodeText
{
get { return isUnicodeText_; }
set { isUnicodeText_ = value; }
}
#endregion Properties
#region IEntryFactory Members
/// <summary>
/// Make a new <see cref="ZipEntry"/> for a file.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName)
{
return MakeFileEntry(fileName, null, true);
}
/// <summary>
/// Make a new <see cref="ZipEntry"/> for a file.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName, bool useFileSystem)
{
return MakeFileEntry(fileName, null, useFileSystem);
}
/// <summary>
/// Make a new <see cref="ZipEntry"/> from a name.
/// </summary>
/// <param name="fileName">The name of the file to create a new entry for.</param>
/// <param name="entryName">An alternative name to be used for the new entry. Null if not applicable.</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"/> based on the <paramref name="fileName"/>.</returns>
public ZipEntry MakeFileEntry(string fileName, string entryName, bool useFileSystem)
{
var result = new ZipEntry(nameTransform_.TransformFile(!string.IsNullOrEmpty(entryName) ? entryName : fileName));
result.IsUnicodeText = isUnicodeText_;
int externalAttributes = 0;
bool useAttributes = (setAttributes_ != 0);
FileInfo fi = null;
if (useFileSystem)
{
fi = new FileInfo(fileName);
}
if ((fi != null) && fi.Exists)
{
switch (timeSetting_)
{
case TimeSetting.CreateTime:
result.DateTime = fi.CreationTime;
break;
case TimeSetting.CreateTimeUtc:
result.DateTime = fi.CreationTimeUtc;
break;
case TimeSetting.LastAccessTime:
result.DateTime = fi.LastAccessTime;
break;
case TimeSetting.LastAccessTimeUtc:
result.DateTime = fi.LastAccessTimeUtc;
break;
case TimeSetting.LastWriteTime:
result.DateTime = fi.LastWriteTime;
break;
case TimeSetting.LastWriteTimeUtc:
result.DateTime = fi.LastWriteTimeUtc;
break;
case TimeSetting.Fixed:
result.DateTime = fixedDateTime_;
break;
default:
throw new ZipException("Unhandled time setting in MakeFileEntry");
}
result.Size = fi.Length;
useAttributes = true;
externalAttributes = ((int)fi.Attributes & getAttributes_);
}
else
{
if (timeSetting_ == TimeSetting.Fixed)
{
result.DateTime = fixedDateTime_;
}
}
if (useAttributes)
{
externalAttributes |= setAttributes_;
result.ExternalFileAttributes = externalAttributes;
}
return result;
}
/// <summary>
/// Make a new <see cref="ZipEntry"></see> for a directory.
/// </summary>
/// <param name="directoryName">The raw untransformed name for the new directory</param>
/// <returns>Returns a new <see cref="ZipEntry"></see> representing a directory.</returns>
public ZipEntry MakeDirectoryEntry(string directoryName)
{
return MakeDirectoryEntry(directoryName, true);
}
/// <summary>
/// Make a new <see cref="ZipEntry"></see> for a directory.
/// </summary>
/// <param name="directoryName">The raw untransformed name for the new directory</param>
/// <param name="useFileSystem">If true entry detail is retrieved from the file system if the file exists.</param>
/// <returns>Returns a new <see cref="ZipEntry"></see> representing a directory.</returns>
public ZipEntry MakeDirectoryEntry(string directoryName, bool useFileSystem)
{
var result = new ZipEntry(nameTransform_.TransformDirectory(directoryName));
result.IsUnicodeText = isUnicodeText_;
result.Size = 0;
int externalAttributes = 0;
DirectoryInfo di = null;
if (useFileSystem)
{
di = new DirectoryInfo(directoryName);
}
if ((di != null) && di.Exists)
{
switch (timeSetting_)
{
case TimeSetting.CreateTime:
result.DateTime = di.CreationTime;
break;
case TimeSetting.CreateTimeUtc:
result.DateTime = di.CreationTimeUtc;
break;
case TimeSetting.LastAccessTime:
result.DateTime = di.LastAccessTime;
break;
case TimeSetting.LastAccessTimeUtc:
result.DateTime = di.LastAccessTimeUtc;
break;
case TimeSetting.LastWriteTime:
result.DateTime = di.LastWriteTime;
break;
case TimeSetting.LastWriteTimeUtc:
result.DateTime = di.LastWriteTimeUtc;
break;
case TimeSetting.Fixed:
result.DateTime = fixedDateTime_;
break;
default:
throw new ZipException("Unhandled time setting in MakeDirectoryEntry");
}
externalAttributes = ((int)di.Attributes & getAttributes_);
}
else
{
if (timeSetting_ == TimeSetting.Fixed)
{
result.DateTime = fixedDateTime_;
}
}
// Always set directory attribute on.
externalAttributes |= (setAttributes_ | 16);
result.ExternalFileAttributes = externalAttributes;
return result;
}
#endregion IEntryFactory Members
#region Instance Fields
private INameTransform nameTransform_;
private DateTime fixedDateTime_ = DateTime.Now;
private TimeSetting timeSetting_ = TimeSetting.LastWriteTime;
private bool isUnicodeText_;
private int getAttributes_ = -1;
private int setAttributes_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// ZipException represents exceptions specific to Zip classes and code.
/// </summary>
[Serializable]
public class ZipException : SharpZipBaseException
{
/// <summary>
/// Initialise a new instance of <see cref="ZipException" />.
/// </summary>
public ZipException()
{
}
/// <summary>
/// Initialise a new instance of <see cref="ZipException" /> with its message string.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
public ZipException(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of <see cref="ZipException" />.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
/// <param name="innerException">The <see cref="Exception"/> that caused this exception.</param>
public ZipException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the ZipException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected ZipException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,980 @@
using System;
using System.IO;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Zip
{
// TODO: Sort out whether tagged data is useful and what a good implementation might look like.
// Its just a sketch of an idea at the moment.
/// <summary>
/// ExtraData tagged value interface.
/// </summary>
public interface ITaggedData
{
/// <summary>
/// Get the ID for this tagged data value.
/// </summary>
short TagID { get; }
/// <summary>
/// Set the contents of this instance from the data passed.
/// </summary>
/// <param name="data">The data to extract contents from.</param>
/// <param name="offset">The offset to begin extracting data from.</param>
/// <param name="count">The number of bytes to extract.</param>
void SetData(byte[] data, int offset, int count);
/// <summary>
/// Get the data representing this instance.
/// </summary>
/// <returns>Returns the data for this instance.</returns>
byte[] GetData();
}
/// <summary>
/// A raw binary tagged value
/// </summary>
public class RawTaggedData : ITaggedData
{
/// <summary>
/// Initialise a new instance.
/// </summary>
/// <param name="tag">The tag ID.</param>
public RawTaggedData(short tag)
{
_tag = tag;
}
#region ITaggedData Members
/// <summary>
/// Get the ID for this tagged data value.
/// </summary>
public short TagID
{
get { return _tag; }
set { _tag = value; }
}
/// <summary>
/// Set the data from the raw values provided.
/// </summary>
/// <param name="data">The raw data to extract values from.</param>
/// <param name="offset">The index to start extracting values from.</param>
/// <param name="count">The number of bytes available.</param>
public void SetData(byte[] data, int offset, int count)
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
}
_data = new byte[count];
Array.Copy(data, offset, _data, 0, count);
}
/// <summary>
/// Get the binary data representing this instance.
/// </summary>
/// <returns>The raw binary data representing this instance.</returns>
public byte[] GetData()
{
return _data;
}
#endregion ITaggedData Members
/// <summary>
/// Get /set the binary data representing this instance.
/// </summary>
/// <returns>The raw binary data representing this instance.</returns>
public byte[] Data
{
get { return _data; }
set { _data = value; }
}
#region Instance Fields
/// <summary>
/// The tag ID for this instance.
/// </summary>
private short _tag;
private byte[] _data;
#endregion Instance Fields
}
/// <summary>
/// Class representing extended unix date time values.
/// </summary>
public class ExtendedUnixData : ITaggedData
{
/// <summary>
/// Flags indicate which values are included in this instance.
/// </summary>
[Flags]
public enum Flags : byte
{
/// <summary>
/// The modification time is included
/// </summary>
ModificationTime = 0x01,
/// <summary>
/// The access time is included
/// </summary>
AccessTime = 0x02,
/// <summary>
/// The create time is included.
/// </summary>
CreateTime = 0x04,
}
#region ITaggedData Members
/// <summary>
/// Get the ID
/// </summary>
public short TagID
{
get { return 0x5455; }
}
/// <summary>
/// Set the data from the raw values provided.
/// </summary>
/// <param name="data">The raw data to extract values from.</param>
/// <param name="index">The index to start extracting values from.</param>
/// <param name="count">The number of bytes available.</param>
public void SetData(byte[] data, int index, int count)
{
using (MemoryStream ms = new MemoryStream(data, index, count, false))
using (ZipHelperStream helperStream = new ZipHelperStream(ms))
{
// bit 0 if set, modification time is present
// bit 1 if set, access time is present
// bit 2 if set, creation time is present
_flags = (Flags)helperStream.ReadByte();
if (((_flags & Flags.ModificationTime) != 0))
{
int iTime = helperStream.ReadLEInt();
_modificationTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) +
new TimeSpan(0, 0, 0, iTime, 0);
// Central-header version is truncated after modification time
if (count <= 5) return;
}
if ((_flags & Flags.AccessTime) != 0)
{
int iTime = helperStream.ReadLEInt();
_lastAccessTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) +
new TimeSpan(0, 0, 0, iTime, 0);
}
if ((_flags & Flags.CreateTime) != 0)
{
int iTime = helperStream.ReadLEInt();
_createTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) +
new TimeSpan(0, 0, 0, iTime, 0);
}
}
}
/// <summary>
/// Get the binary data representing this instance.
/// </summary>
/// <returns>The raw binary data representing this instance.</returns>
public byte[] GetData()
{
using (MemoryStream ms = new MemoryStream())
using (ZipHelperStream helperStream = new ZipHelperStream(ms))
{
helperStream.IsStreamOwner = false;
helperStream.WriteByte((byte)_flags); // Flags
if ((_flags & Flags.ModificationTime) != 0)
{
TimeSpan span = _modificationTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
var seconds = (int)span.TotalSeconds;
helperStream.WriteLEInt(seconds);
}
if ((_flags & Flags.AccessTime) != 0)
{
TimeSpan span = _lastAccessTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
var seconds = (int)span.TotalSeconds;
helperStream.WriteLEInt(seconds);
}
if ((_flags & Flags.CreateTime) != 0)
{
TimeSpan span = _createTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
var seconds = (int)span.TotalSeconds;
helperStream.WriteLEInt(seconds);
}
return ms.ToArray();
}
}
#endregion ITaggedData Members
/// <summary>
/// Test a <see cref="DateTime"> value to see if is valid and can be represented here.</see>
/// </summary>
/// <param name="value">The <see cref="DateTime">value</see> to test.</param>
/// <returns>Returns true if the value is valid and can be represented; false if not.</returns>
/// <remarks>The standard Unix time is a signed integer data type, directly encoding the Unix time number,
/// which is the number of seconds since 1970-01-01.
/// Being 32 bits means the values here cover a range of about 136 years.
/// The minimum representable time is 1901-12-13 20:45:52,
/// and the maximum representable time is 2038-01-19 03:14:07.
/// </remarks>
public static bool IsValidValue(DateTime value)
{
return ((value >= new DateTime(1901, 12, 13, 20, 45, 52)) ||
(value <= new DateTime(2038, 1, 19, 03, 14, 07)));
}
/// <summary>
/// Get /set the Modification Time
/// </summary>
/// <exception cref="ArgumentOutOfRangeException"></exception>
/// <seealso cref="IsValidValue"></seealso>
public DateTime ModificationTime
{
get { return _modificationTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_flags |= Flags.ModificationTime;
_modificationTime = value;
}
}
/// <summary>
/// Get / set the Access Time
/// </summary>
/// <exception cref="ArgumentOutOfRangeException"></exception>
/// <seealso cref="IsValidValue"></seealso>
public DateTime AccessTime
{
get { return _lastAccessTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_flags |= Flags.AccessTime;
_lastAccessTime = value;
}
}
/// <summary>
/// Get / Set the Create Time
/// </summary>
/// <exception cref="ArgumentOutOfRangeException"></exception>
/// <seealso cref="IsValidValue"></seealso>
public DateTime CreateTime
{
get { return _createTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_flags |= Flags.CreateTime;
_createTime = value;
}
}
/// <summary>
/// Get/set the <see cref="Flags">values</see> to include.
/// </summary>
public Flags Include
{
get { return _flags; }
set { _flags = value; }
}
#region Instance Fields
private Flags _flags;
private DateTime _modificationTime = new DateTime(1970, 1, 1);
private DateTime _lastAccessTime = new DateTime(1970, 1, 1);
private DateTime _createTime = new DateTime(1970, 1, 1);
#endregion Instance Fields
}
/// <summary>
/// Class handling NT date time values.
/// </summary>
public class NTTaggedData : ITaggedData
{
/// <summary>
/// Get the ID for this tagged data value.
/// </summary>
public short TagID
{
get { return 10; }
}
/// <summary>
/// Set the data from the raw values provided.
/// </summary>
/// <param name="data">The raw data to extract values from.</param>
/// <param name="index">The index to start extracting values from.</param>
/// <param name="count">The number of bytes available.</param>
public void SetData(byte[] data, int index, int count)
{
using (MemoryStream ms = new MemoryStream(data, index, count, false))
using (ZipHelperStream helperStream = new ZipHelperStream(ms))
{
helperStream.ReadLEInt(); // Reserved
while (helperStream.Position < helperStream.Length)
{
int ntfsTag = helperStream.ReadLEShort();
int ntfsLength = helperStream.ReadLEShort();
if (ntfsTag == 1)
{
if (ntfsLength >= 24)
{
long lastModificationTicks = helperStream.ReadLELong();
_lastModificationTime = DateTime.FromFileTimeUtc(lastModificationTicks);
long lastAccessTicks = helperStream.ReadLELong();
_lastAccessTime = DateTime.FromFileTimeUtc(lastAccessTicks);
long createTimeTicks = helperStream.ReadLELong();
_createTime = DateTime.FromFileTimeUtc(createTimeTicks);
}
break;
}
else
{
// An unknown NTFS tag so simply skip it.
helperStream.Seek(ntfsLength, SeekOrigin.Current);
}
}
}
}
/// <summary>
/// Get the binary data representing this instance.
/// </summary>
/// <returns>The raw binary data representing this instance.</returns>
public byte[] GetData()
{
using (MemoryStream ms = new MemoryStream())
using (ZipHelperStream helperStream = new ZipHelperStream(ms))
{
helperStream.IsStreamOwner = false;
helperStream.WriteLEInt(0); // Reserved
helperStream.WriteLEShort(1); // Tag
helperStream.WriteLEShort(24); // Length = 3 x 8.
helperStream.WriteLELong(_lastModificationTime.ToFileTimeUtc());
helperStream.WriteLELong(_lastAccessTime.ToFileTimeUtc());
helperStream.WriteLELong(_createTime.ToFileTimeUtc());
return ms.ToArray();
}
}
/// <summary>
/// Test a <see cref="DateTime"> valuie to see if is valid and can be represented here.</see>
/// </summary>
/// <param name="value">The <see cref="DateTime">value</see> to test.</param>
/// <returns>Returns true if the value is valid and can be represented; false if not.</returns>
/// <remarks>
/// NTFS filetimes are 64-bit unsigned integers, stored in Intel
/// (least significant byte first) byte order. They determine the
/// number of 1.0E-07 seconds (1/10th microseconds!) past WinNT "epoch",
/// which is "01-Jan-1601 00:00:00 UTC". 28 May 60056 is the upper limit
/// </remarks>
public static bool IsValidValue(DateTime value)
{
bool result = true;
try
{
value.ToFileTimeUtc();
}
catch
{
result = false;
}
return result;
}
/// <summary>
/// Get/set the <see cref="DateTime">last modification time</see>.
/// </summary>
public DateTime LastModificationTime
{
get { return _lastModificationTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_lastModificationTime = value;
}
}
/// <summary>
/// Get /set the <see cref="DateTime">create time</see>
/// </summary>
public DateTime CreateTime
{
get { return _createTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_createTime = value;
}
}
/// <summary>
/// Get /set the <see cref="DateTime">last access time</see>.
/// </summary>
public DateTime LastAccessTime
{
get { return _lastAccessTime; }
set
{
if (!IsValidValue(value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
_lastAccessTime = value;
}
}
#region Instance Fields
private DateTime _lastAccessTime = DateTime.FromFileTimeUtc(0);
private DateTime _lastModificationTime = DateTime.FromFileTimeUtc(0);
private DateTime _createTime = DateTime.FromFileTimeUtc(0);
#endregion Instance Fields
}
/// <summary>
/// A factory that creates <see cref="ITaggedData">tagged data</see> instances.
/// </summary>
internal interface ITaggedDataFactory
{
/// <summary>
/// Get data for a specific tag value.
/// </summary>
/// <param name="tag">The tag ID to find.</param>
/// <param name="data">The data to search.</param>
/// <param name="offset">The offset to begin extracting data from.</param>
/// <param name="count">The number of bytes to extract.</param>
/// <returns>The located <see cref="ITaggedData">value found</see>, or null if not found.</returns>
ITaggedData Create(short tag, byte[] data, int offset, int count);
}
///
/// <summary>
/// A class to handle the extra data field for Zip entries
/// </summary>
/// <remarks>
/// Extra data contains 0 or more values each prefixed by a header tag and length.
/// They contain zero or more bytes of actual data.
/// The data is held internally using a copy on write strategy. This is more efficient but
/// means that for extra data created by passing in data can have the values modified by the caller
/// in some circumstances.
/// </remarks>
sealed public class ZipExtraData : IDisposable
{
#region Constructors
/// <summary>
/// Initialise a default instance.
/// </summary>
public ZipExtraData()
{
Clear();
}
/// <summary>
/// Initialise with known extra data.
/// </summary>
/// <param name="data">The extra data.</param>
public ZipExtraData(byte[] data)
{
if (data == null)
{
_data = Empty.Array<byte>();
}
else
{
_data = data;
}
}
#endregion Constructors
/// <summary>
/// Get the raw extra data value
/// </summary>
/// <returns>Returns the raw byte[] extra data this instance represents.</returns>
public byte[] GetEntryData()
{
if (Length > ushort.MaxValue)
{
throw new ZipException("Data exceeds maximum length");
}
return (byte[])_data.Clone();
}
/// <summary>
/// Clear the stored data.
/// </summary>
public void Clear()
{
if ((_data == null) || (_data.Length != 0))
{
_data = Empty.Array<byte>();
}
}
/// <summary>
/// Gets the current extra data length.
/// </summary>
public int Length
{
get { return _data.Length; }
}
/// <summary>
/// Get a read-only <see cref="Stream"/> for the associated tag.
/// </summary>
/// <param name="tag">The tag to locate data for.</param>
/// <returns>Returns a <see cref="Stream"/> containing tag data or null if no tag was found.</returns>
public Stream GetStreamForTag(int tag)
{
Stream result = null;
if (Find(tag))
{
result = new MemoryStream(_data, _index, _readValueLength, false);
}
return result;
}
/// <summary>
/// Get the <see cref="ITaggedData">tagged data</see> for a tag.
/// </summary>
/// <typeparam name="T">The tag to search for.</typeparam>
/// <returns>Returns a <see cref="ITaggedData">tagged value</see> or null if none found.</returns>
public T GetData<T>()
where T : class, ITaggedData, new()
{
T result = new T();
if (Find(result.TagID))
{
result.SetData(_data, _readValueStart, _readValueLength);
return result;
}
else return null;
}
/// <summary>
/// Get the length of the last value found by <see cref="Find"/>
/// </summary>
/// <remarks>This is only valid if <see cref="Find"/> has previously returned true.</remarks>
public int ValueLength
{
get { return _readValueLength; }
}
/// <summary>
/// Get the index for the current read value.
/// </summary>
/// <remarks>This is only valid if <see cref="Find"/> has previously returned true.
/// Initially the result will be the index of the first byte of actual data. The value is updated after calls to
/// <see cref="ReadInt"/>, <see cref="ReadShort"/> and <see cref="ReadLong"/>. </remarks>
public int CurrentReadIndex
{
get { return _index; }
}
/// <summary>
/// Get the number of bytes remaining to be read for the current value;
/// </summary>
public int UnreadCount
{
get
{
if ((_readValueStart > _data.Length) ||
(_readValueStart < 4))
{
throw new ZipException("Find must be called before calling a Read method");
}
return _readValueStart + _readValueLength - _index;
}
}
/// <summary>
/// Find an extra data value
/// </summary>
/// <param name="headerID">The identifier for the value to find.</param>
/// <returns>Returns true if the value was found; false otherwise.</returns>
public bool Find(int headerID)
{
_readValueStart = _data.Length;
_readValueLength = 0;
_index = 0;
int localLength = _readValueStart;
int localTag = headerID - 1;
// Trailing bytes that cant make up an entry (as there arent enough
// bytes for a tag and length) are ignored!
while ((localTag != headerID) && (_index < _data.Length - 3))
{
localTag = ReadShortInternal();
localLength = ReadShortInternal();
if (localTag != headerID)
{
_index += localLength;
}
}
bool result = (localTag == headerID) && ((_index + localLength) <= _data.Length);
if (result)
{
_readValueStart = _index;
_readValueLength = localLength;
}
return result;
}
/// <summary>
/// Add a new entry to extra data.
/// </summary>
/// <param name="taggedData">The <see cref="ITaggedData"/> value to add.</param>
public void AddEntry(ITaggedData taggedData)
{
if (taggedData == null)
{
throw new ArgumentNullException(nameof(taggedData));
}
AddEntry(taggedData.TagID, taggedData.GetData());
}
/// <summary>
/// Add a new entry to extra data
/// </summary>
/// <param name="headerID">The ID for this entry.</param>
/// <param name="fieldData">The data to add.</param>
/// <remarks>If the ID already exists its contents are replaced.</remarks>
public void AddEntry(int headerID, byte[] fieldData)
{
if ((headerID > ushort.MaxValue) || (headerID < 0))
{
throw new ArgumentOutOfRangeException(nameof(headerID));
}
int addLength = (fieldData == null) ? 0 : fieldData.Length;
if (addLength > ushort.MaxValue)
{
throw new ArgumentOutOfRangeException(nameof(fieldData), "exceeds maximum length");
}
// Test for new length before adjusting data.
int newLength = _data.Length + addLength + 4;
if (Find(headerID))
{
newLength -= (ValueLength + 4);
}
if (newLength > ushort.MaxValue)
{
throw new ZipException("Data exceeds maximum length");
}
Delete(headerID);
byte[] newData = new byte[newLength];
_data.CopyTo(newData, 0);
int index = _data.Length;
_data = newData;
SetShort(ref index, headerID);
SetShort(ref index, addLength);
if (fieldData != null)
{
fieldData.CopyTo(newData, index);
}
}
/// <summary>
/// Start adding a new entry.
/// </summary>
/// <remarks>Add data using <see cref="AddData(byte[])"/>, <see cref="AddLeShort"/>, <see cref="AddLeInt"/>, or <see cref="AddLeLong"/>.
/// The new entry is completed and actually added by calling <see cref="AddNewEntry"/></remarks>
/// <seealso cref="AddEntry(ITaggedData)"/>
public void StartNewEntry()
{
_newEntry = new MemoryStream();
}
/// <summary>
/// Add entry data added since <see cref="StartNewEntry"/> using the ID passed.
/// </summary>
/// <param name="headerID">The identifier to use for this entry.</param>
public void AddNewEntry(int headerID)
{
byte[] newData = _newEntry.ToArray();
_newEntry = null;
AddEntry(headerID, newData);
}
/// <summary>
/// Add a byte of data to the pending new entry.
/// </summary>
/// <param name="data">The byte to add.</param>
/// <seealso cref="StartNewEntry"/>
public void AddData(byte data)
{
_newEntry.WriteByte(data);
}
/// <summary>
/// Add data to a pending new entry.
/// </summary>
/// <param name="data">The data to add.</param>
/// <seealso cref="StartNewEntry"/>
public void AddData(byte[] data)
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
}
_newEntry.Write(data, 0, data.Length);
}
/// <summary>
/// Add a short value in little endian order to the pending new entry.
/// </summary>
/// <param name="toAdd">The data to add.</param>
/// <seealso cref="StartNewEntry"/>
public void AddLeShort(int toAdd)
{
unchecked
{
_newEntry.WriteByte((byte)toAdd);
_newEntry.WriteByte((byte)(toAdd >> 8));
}
}
/// <summary>
/// Add an integer value in little endian order to the pending new entry.
/// </summary>
/// <param name="toAdd">The data to add.</param>
/// <seealso cref="StartNewEntry"/>
public void AddLeInt(int toAdd)
{
unchecked
{
AddLeShort((short)toAdd);
AddLeShort((short)(toAdd >> 16));
}
}
/// <summary>
/// Add a long value in little endian order to the pending new entry.
/// </summary>
/// <param name="toAdd">The data to add.</param>
/// <seealso cref="StartNewEntry"/>
public void AddLeLong(long toAdd)
{
unchecked
{
AddLeInt((int)(toAdd & 0xffffffff));
AddLeInt((int)(toAdd >> 32));
}
}
/// <summary>
/// Delete an extra data field.
/// </summary>
/// <param name="headerID">The identifier of the field to delete.</param>
/// <returns>Returns true if the field was found and deleted.</returns>
public bool Delete(int headerID)
{
bool result = false;
if (Find(headerID))
{
result = true;
int trueStart = _readValueStart - 4;
byte[] newData = new byte[_data.Length - (ValueLength + 4)];
Array.Copy(_data, 0, newData, 0, trueStart);
int trueEnd = trueStart + ValueLength + 4;
Array.Copy(_data, trueEnd, newData, trueStart, _data.Length - trueEnd);
_data = newData;
}
return result;
}
#region Reading Support
/// <summary>
/// Read a long in little endian form from the last <see cref="Find">found</see> data value
/// </summary>
/// <returns>Returns the long value read.</returns>
public long ReadLong()
{
ReadCheck(8);
return (ReadInt() & 0xffffffff) | (((long)ReadInt()) << 32);
}
/// <summary>
/// Read an integer in little endian form from the last <see cref="Find">found</see> data value.
/// </summary>
/// <returns>Returns the integer read.</returns>
public int ReadInt()
{
ReadCheck(4);
int result = _data[_index] + (_data[_index + 1] << 8) +
(_data[_index + 2] << 16) + (_data[_index + 3] << 24);
_index += 4;
return result;
}
/// <summary>
/// Read a short value in little endian form from the last <see cref="Find">found</see> data value.
/// </summary>
/// <returns>Returns the short value read.</returns>
public int ReadShort()
{
ReadCheck(2);
int result = _data[_index] + (_data[_index + 1] << 8);
_index += 2;
return result;
}
/// <summary>
/// Read a byte from an extra data
/// </summary>
/// <returns>The byte value read or -1 if the end of data has been reached.</returns>
public int ReadByte()
{
int result = -1;
if ((_index < _data.Length) && (_readValueStart + _readValueLength > _index))
{
result = _data[_index];
_index += 1;
}
return result;
}
/// <summary>
/// Skip data during reading.
/// </summary>
/// <param name="amount">The number of bytes to skip.</param>
public void Skip(int amount)
{
ReadCheck(amount);
_index += amount;
}
private void ReadCheck(int length)
{
if ((_readValueStart > _data.Length) ||
(_readValueStart < 4))
{
throw new ZipException("Find must be called before calling a Read method");
}
if (_index > _readValueStart + _readValueLength - length)
{
throw new ZipException("End of extra data");
}
if (_index + length < 4)
{
throw new ZipException("Cannot read before start of tag");
}
}
/// <summary>
/// Internal form of <see cref="ReadShort"/> that reads data at any location.
/// </summary>
/// <returns>Returns the short value read.</returns>
private int ReadShortInternal()
{
if (_index > _data.Length - 2)
{
throw new ZipException("End of extra data");
}
int result = _data[_index] + (_data[_index + 1] << 8);
_index += 2;
return result;
}
private void SetShort(ref int index, int source)
{
_data[index] = (byte)source;
_data[index + 1] = (byte)(source >> 8);
index += 2;
}
#endregion Reading Support
#region IDisposable Members
/// <summary>
/// Dispose of this instance.
/// </summary>
public void Dispose()
{
if (_newEntry != null)
{
_newEntry.Dispose();
}
}
#endregion IDisposable Members
#region Instance Fields
private int _index;
private int _readValueStart;
private int _readValueLength;
private MemoryStream _newEntry;
private byte[] _data;
#endregion Instance Fields
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,629 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// Holds data pertinent to a data descriptor.
/// </summary>
public class DescriptorData
{
/// <summary>
/// Get /set the compressed size of data.
/// </summary>
public long CompressedSize
{
get { return compressedSize; }
set { compressedSize = value; }
}
/// <summary>
/// Get / set the uncompressed size of data
/// </summary>
public long Size
{
get { return size; }
set { size = value; }
}
/// <summary>
/// Get /set the crc value.
/// </summary>
public long Crc
{
get { return crc; }
set { crc = (value & 0xffffffff); }
}
#region Instance Fields
private long size;
private long compressedSize;
private long crc;
#endregion Instance Fields
}
internal class EntryPatchData
{
public long SizePatchOffset
{
get { return sizePatchOffset_; }
set { sizePatchOffset_ = value; }
}
public long CrcPatchOffset
{
get { return crcPatchOffset_; }
set { crcPatchOffset_ = value; }
}
#region Instance Fields
private long sizePatchOffset_;
private long crcPatchOffset_;
#endregion Instance Fields
}
/// <summary>
/// This class assists with writing/reading from Zip files.
/// </summary>
internal class ZipHelperStream : Stream
{
#region Constructors
/// <summary>
/// Initialise an instance of this class.
/// </summary>
/// <param name="name">The name of the file to open.</param>
public ZipHelperStream(string name)
{
stream_ = new FileStream(name, FileMode.Open, FileAccess.ReadWrite);
isOwner_ = true;
}
/// <summary>
/// Initialise a new instance of <see cref="ZipHelperStream"/>.
/// </summary>
/// <param name="stream">The stream to use.</param>
public ZipHelperStream(Stream stream)
{
stream_ = stream;
}
#endregion Constructors
/// <summary>
/// Get / set a value indicating whether the underlying stream is owned or not.
/// </summary>
/// <remarks>If the stream is owned it is closed when this instance is closed.</remarks>
public bool IsStreamOwner
{
get { return isOwner_; }
set { isOwner_ = value; }
}
#region Base Stream Methods
public override bool CanRead
{
get { return stream_.CanRead; }
}
public override bool CanSeek
{
get { return stream_.CanSeek; }
}
public override bool CanTimeout
{
get { return stream_.CanTimeout; }
}
public override long Length
{
get { return stream_.Length; }
}
public override long Position
{
get { return stream_.Position; }
set { stream_.Position = value; }
}
public override bool CanWrite
{
get { return stream_.CanWrite; }
}
public override void Flush()
{
stream_.Flush();
}
public override long Seek(long offset, SeekOrigin origin)
{
return stream_.Seek(offset, origin);
}
public override void SetLength(long value)
{
stream_.SetLength(value);
}
public override int Read(byte[] buffer, int offset, int count)
{
return stream_.Read(buffer, offset, count);
}
public override void Write(byte[] buffer, int offset, int count)
{
stream_.Write(buffer, offset, count);
}
/// <summary>
/// Close the stream.
/// </summary>
/// <remarks>
/// The underlying stream is closed only if <see cref="IsStreamOwner"/> is true.
/// </remarks>
protected override void Dispose(bool disposing)
{
Stream toClose = stream_;
stream_ = null;
if (isOwner_ && (toClose != null))
{
isOwner_ = false;
toClose.Dispose();
}
}
#endregion Base Stream Methods
// Write the local file header
// TODO: ZipHelperStream.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage
private void WriteLocalHeader(ZipEntry entry, EntryPatchData patchData)
{
CompressionMethod method = entry.CompressionMethod;
bool headerInfoAvailable = true; // How to get this?
bool patchEntryHeader = false;
WriteLEInt(ZipConstants.LocalHeaderSignature);
WriteLEShort(entry.Version);
WriteLEShort(entry.Flags);
WriteLEShort((byte)method);
WriteLEInt((int)entry.DosTime);
if (headerInfoAvailable == true)
{
WriteLEInt((int)entry.Crc);
if (entry.LocalHeaderRequiresZip64)
{
WriteLEInt(-1);
WriteLEInt(-1);
}
else
{
WriteLEInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize);
WriteLEInt((int)entry.Size);
}
}
else
{
if (patchData != null)
{
patchData.CrcPatchOffset = stream_.Position;
}
WriteLEInt(0); // Crc
if (patchData != null)
{
patchData.SizePatchOffset = stream_.Position;
}
// For local header both sizes appear in Zip64 Extended Information
if (entry.LocalHeaderRequiresZip64 && patchEntryHeader)
{
WriteLEInt(-1);
WriteLEInt(-1);
}
else
{
WriteLEInt(0); // Compressed size
WriteLEInt(0); // Uncompressed size
}
}
byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name);
if (name.Length > 0xFFFF)
{
throw new ZipException("Entry name too long.");
}
var ed = new ZipExtraData(entry.ExtraData);
if (entry.LocalHeaderRequiresZip64 && (headerInfoAvailable || patchEntryHeader))
{
ed.StartNewEntry();
if (headerInfoAvailable)
{
ed.AddLeLong(entry.Size);
ed.AddLeLong(entry.CompressedSize);
}
else
{
ed.AddLeLong(-1);
ed.AddLeLong(-1);
}
ed.AddNewEntry(1);
if (!ed.Find(1))
{
throw new ZipException("Internal error cant find extra data");
}
if (patchData != null)
{
patchData.SizePatchOffset = ed.CurrentReadIndex;
}
}
else
{
ed.Delete(1);
}
byte[] extra = ed.GetEntryData();
WriteLEShort(name.Length);
WriteLEShort(extra.Length);
if (name.Length > 0)
{
stream_.Write(name, 0, name.Length);
}
if (entry.LocalHeaderRequiresZip64 && patchEntryHeader)
{
patchData.SizePatchOffset += stream_.Position;
}
if (extra.Length > 0)
{
stream_.Write(extra, 0, extra.Length);
}
}
/// <summary>
/// Locates a block with the desired <paramref name="signature"/>.
/// </summary>
/// <param name="signature">The signature to find.</param>
/// <param name="endLocation">Location, marking the end of block.</param>
/// <param name="minimumBlockSize">Minimum size of the block.</param>
/// <param name="maximumVariableData">The maximum variable data.</param>
/// <returns>Returns the offset of the first byte after the signature; -1 if not found</returns>
public long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData)
{
long pos = endLocation - minimumBlockSize;
if (pos < 0)
{
return -1;
}
long giveUpMarker = Math.Max(pos - maximumVariableData, 0);
// TODO: This loop could be optimised for speed.
do
{
if (pos < giveUpMarker)
{
return -1;
}
Seek(pos--, SeekOrigin.Begin);
} while (ReadLEInt() != signature);
return Position;
}
/// <summary>
/// Write Zip64 end of central directory records (File header and locator).
/// </summary>
/// <param name="noOfEntries">The number of entries in the central directory.</param>
/// <param name="sizeEntries">The size of entries in the central directory.</param>
/// <param name="centralDirOffset">The offset of the central directory.</param>
public void WriteZip64EndOfCentralDirectory(long noOfEntries, long sizeEntries, long centralDirOffset)
{
long centralSignatureOffset = centralDirOffset + sizeEntries;
WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature);
WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12)
WriteLEShort(ZipConstants.VersionMadeBy); // Version made by
WriteLEShort(ZipConstants.VersionZip64); // Version to extract
WriteLEInt(0); // Number of this disk
WriteLEInt(0); // number of the disk with the start of the central directory
WriteLELong(noOfEntries); // No of entries on this disk
WriteLELong(noOfEntries); // Total No of entries in central directory
WriteLELong(sizeEntries); // Size of the central directory
WriteLELong(centralDirOffset); // offset of start of central directory
// zip64 extensible data sector not catered for here (variable size)
// Write the Zip64 end of central directory locator
WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature);
// no of the disk with the start of the zip64 end of central directory
WriteLEInt(0);
// relative offset of the zip64 end of central directory record
WriteLELong(centralSignatureOffset);
// total number of disks
WriteLEInt(1);
}
/// <summary>
/// Write the required records to end the central directory.
/// </summary>
/// <param name="noOfEntries">The number of entries in the directory.</param>
/// <param name="sizeEntries">The size of the entries in the directory.</param>
/// <param name="startOfCentralDirectory">The start of the central directory.</param>
/// <param name="comment">The archive comment. (This can be null).</param>
public void WriteEndOfCentralDirectory(long noOfEntries, long sizeEntries,
long startOfCentralDirectory, byte[] comment)
{
if ((noOfEntries >= 0xffff) ||
(startOfCentralDirectory >= 0xffffffff) ||
(sizeEntries >= 0xffffffff))
{
WriteZip64EndOfCentralDirectory(noOfEntries, sizeEntries, startOfCentralDirectory);
}
WriteLEInt(ZipConstants.EndOfCentralDirectorySignature);
// TODO: ZipFile Multi disk handling not done
WriteLEShort(0); // number of this disk
WriteLEShort(0); // no of disk with start of central dir
// Number of entries
if (noOfEntries >= 0xffff)
{
WriteLEUshort(0xffff); // Zip64 marker
WriteLEUshort(0xffff);
}
else
{
WriteLEShort((short)noOfEntries); // entries in central dir for this disk
WriteLEShort((short)noOfEntries); // total entries in central directory
}
// Size of the central directory
if (sizeEntries >= 0xffffffff)
{
WriteLEUint(0xffffffff); // Zip64 marker
}
else
{
WriteLEInt((int)sizeEntries);
}
// offset of start of central directory
if (startOfCentralDirectory >= 0xffffffff)
{
WriteLEUint(0xffffffff); // Zip64 marker
}
else
{
WriteLEInt((int)startOfCentralDirectory);
}
int commentLength = (comment != null) ? comment.Length : 0;
if (commentLength > 0xffff)
{
throw new ZipException(string.Format("Comment length({0}) is too long can only be 64K", commentLength));
}
WriteLEShort(commentLength);
if (commentLength > 0)
{
Write(comment, 0, comment.Length);
}
}
#region LE value reading/writing
/// <summary>
/// Read an unsigned short in little endian byte order.
/// </summary>
/// <returns>Returns the value read.</returns>
/// <exception cref="IOException">
/// An i/o error occurs.
/// </exception>
/// <exception cref="EndOfStreamException">
/// The file ends prematurely
/// </exception>
public int ReadLEShort()
{
int byteValue1 = stream_.ReadByte();
if (byteValue1 < 0)
{
throw new EndOfStreamException();
}
int byteValue2 = stream_.ReadByte();
if (byteValue2 < 0)
{
throw new EndOfStreamException();
}
return byteValue1 | (byteValue2 << 8);
}
/// <summary>
/// Read an int in little endian byte order.
/// </summary>
/// <returns>Returns the value read.</returns>
/// <exception cref="IOException">
/// An i/o error occurs.
/// </exception>
/// <exception cref="System.IO.EndOfStreamException">
/// The file ends prematurely
/// </exception>
public int ReadLEInt()
{
return ReadLEShort() | (ReadLEShort() << 16);
}
/// <summary>
/// Read a long in little endian byte order.
/// </summary>
/// <returns>The value read.</returns>
public long ReadLELong()
{
return (uint)ReadLEInt() | ((long)ReadLEInt() << 32);
}
/// <summary>
/// Write an unsigned short in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEShort(int value)
{
stream_.WriteByte((byte)(value & 0xff));
stream_.WriteByte((byte)((value >> 8) & 0xff));
}
/// <summary>
/// Write a ushort in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUshort(ushort value)
{
stream_.WriteByte((byte)(value & 0xff));
stream_.WriteByte((byte)(value >> 8));
}
/// <summary>
/// Write an int in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEInt(int value)
{
WriteLEShort(value);
WriteLEShort(value >> 16);
}
/// <summary>
/// Write a uint in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUint(uint value)
{
WriteLEUshort((ushort)(value & 0xffff));
WriteLEUshort((ushort)(value >> 16));
}
/// <summary>
/// Write a long in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLELong(long value)
{
WriteLEInt((int)value);
WriteLEInt((int)(value >> 32));
}
/// <summary>
/// Write a ulong in little endian byte order.
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteLEUlong(ulong value)
{
WriteLEUint((uint)(value & 0xffffffff));
WriteLEUint((uint)(value >> 32));
}
#endregion LE value reading/writing
/// <summary>
/// Write a data descriptor.
/// </summary>
/// <param name="entry">The entry to write a descriptor for.</param>
/// <returns>Returns the number of descriptor bytes written.</returns>
public int WriteDataDescriptor(ZipEntry entry)
{
if (entry == null)
{
throw new ArgumentNullException(nameof(entry));
}
int result = 0;
// Add data descriptor if flagged as required
if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0)
{
// The signature is not PKZIP originally but is now described as optional
// in the PKZIP Appnote documenting the format.
WriteLEInt(ZipConstants.DataDescriptorSignature);
WriteLEInt(unchecked((int)(entry.Crc)));
result += 8;
if (entry.LocalHeaderRequiresZip64)
{
WriteLELong(entry.CompressedSize);
WriteLELong(entry.Size);
result += 16;
}
else
{
WriteLEInt((int)entry.CompressedSize);
WriteLEInt((int)entry.Size);
result += 8;
}
}
return result;
}
/// <summary>
/// Read data descriptor at the end of compressed data.
/// </summary>
/// <param name="zip64">if set to <c>true</c> [zip64].</param>
/// <param name="data">The data to fill in.</param>
/// <returns>Returns the number of bytes read in the descriptor.</returns>
public void ReadDataDescriptor(bool zip64, DescriptorData data)
{
int intValue = ReadLEInt();
// In theory this may not be a descriptor according to PKZIP appnote.
// In practice its always there.
if (intValue != ZipConstants.DataDescriptorSignature)
{
throw new ZipException("Data descriptor signature not found");
}
data.Crc = ReadLEInt();
if (zip64)
{
data.CompressedSize = ReadLELong();
data.Size = ReadLELong();
}
else
{
data.CompressedSize = ReadLEInt();
data.Size = ReadLEInt();
}
}
#region Instance Fields
private bool isOwner_;
private Stream stream_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,727 @@
using ICSharpCode.SharpZipLib.Checksum;
using ICSharpCode.SharpZipLib.Encryption;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// This is an InflaterInputStream that reads the files baseInputStream an zip archive
/// one after another. It has a special method to get the zip entry of
/// the next file. The zip entry contains information about the file name
/// size, compressed size, Crc, etc.
/// It includes support for Stored and Deflated entries.
/// <br/>
/// <br/>Author of the original java version : Jochen Hoenicke
/// </summary>
///
/// <example> This sample shows how to read a zip file
/// <code lang="C#">
/// using System;
/// using System.Text;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Zip;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using ( ZipInputStream s = new ZipInputStream(File.OpenRead(args[0]))) {
///
/// ZipEntry theEntry;
/// const int size = 2048;
/// byte[] data = new byte[2048];
///
/// while ((theEntry = s.GetNextEntry()) != null) {
/// if ( entry.IsFile ) {
/// Console.Write("Show contents (y/n) ?");
/// if (Console.ReadLine() == "y") {
/// while (true) {
/// size = s.Read(data, 0, data.Length);
/// if (size > 0) {
/// Console.Write(new ASCIIEncoding().GetString(data, 0, size));
/// } else {
/// break;
/// }
/// }
/// }
/// }
/// }
/// }
/// }
/// }
/// </code>
/// </example>
public class ZipInputStream : InflaterInputStream
{
#region Instance Fields
/// <summary>
/// Delegate for reading bytes from a stream.
/// </summary>
private delegate int ReadDataHandler(byte[] b, int offset, int length);
/// <summary>
/// The current reader this instance.
/// </summary>
private ReadDataHandler internalReader;
private Crc32 crc = new Crc32();
private ZipEntry entry;
private long size;
private CompressionMethod method;
private int flags;
private string password;
#endregion Instance Fields
#region Constructors
/// <summary>
/// Creates a new Zip input stream, for reading a zip archive.
/// </summary>
/// <param name="baseInputStream">The underlying <see cref="Stream"/> providing data.</param>
public ZipInputStream(Stream baseInputStream)
: base(baseInputStream, new Inflater(true))
{
internalReader = new ReadDataHandler(ReadingNotAvailable);
}
/// <summary>
/// Creates a new Zip input stream, for reading a zip archive.
/// </summary>
/// <param name="baseInputStream">The underlying <see cref="Stream"/> providing data.</param>
/// <param name="bufferSize">Size of the buffer.</param>
public ZipInputStream(Stream baseInputStream, int bufferSize)
: base(baseInputStream, new Inflater(true), bufferSize)
{
internalReader = new ReadDataHandler(ReadingNotAvailable);
}
#endregion Constructors
/// <summary>
/// Optional password used for encryption when non-null
/// </summary>
/// <value>A password for all encrypted <see cref="ZipEntry">entries </see> in this <see cref="ZipInputStream"/></value>
public string Password
{
get
{
return password;
}
set
{
password = value;
}
}
/// <summary>
/// Gets a value indicating if there is a current entry and it can be decompressed
/// </summary>
/// <remarks>
/// The entry can only be decompressed if the library supports the zip features required to extract it.
/// See the <see cref="ZipEntry.Version">ZipEntry Version</see> property for more details.
///
/// Since <see cref="ZipInputStream"/> uses the local headers for extraction, entries with no compression combined with the
/// <see cref="GeneralBitFlags.Descriptor"/> flag set, cannot be extracted as the end of the entry data cannot be deduced.
/// </remarks>
public bool CanDecompressEntry
=> entry != null
&& IsEntryCompressionMethodSupported(entry)
&& entry.CanDecompress
&& (!entry.HasFlag(GeneralBitFlags.Descriptor) || entry.CompressionMethod != CompressionMethod.Stored || entry.IsCrypted);
/// <summary>
/// Is the compression method for the specified entry supported?
/// </summary>
/// <remarks>
/// Uses entry.CompressionMethodForHeader so that entries of type WinZipAES will be rejected.
/// </remarks>
/// <param name="entry">the entry to check.</param>
/// <returns>true if the compression method is supported, false if not.</returns>
private static bool IsEntryCompressionMethodSupported(ZipEntry entry)
{
var entryCompressionMethod = entry.CompressionMethodForHeader;
return entryCompressionMethod == CompressionMethod.Deflated ||
entryCompressionMethod == CompressionMethod.Stored;
}
/// <summary>
/// Advances to the next entry in the archive
/// </summary>
/// <returns>
/// The next <see cref="ZipEntry">entry</see> in the archive or null if there are no more entries.
/// </returns>
/// <remarks>
/// If the previous entry is still open <see cref="CloseEntry">CloseEntry</see> is called.
/// </remarks>
/// <exception cref="InvalidOperationException">
/// Input stream is closed
/// </exception>
/// <exception cref="ZipException">
/// Password is not set, password is invalid, compression method is invalid,
/// version required to extract is not supported
/// </exception>
public ZipEntry GetNextEntry()
{
if (crc == null)
{
throw new InvalidOperationException("Closed.");
}
if (entry != null)
{
CloseEntry();
}
int header = inputBuffer.ReadLeInt();
if (header == ZipConstants.CentralHeaderSignature ||
header == ZipConstants.EndOfCentralDirectorySignature ||
header == ZipConstants.CentralHeaderDigitalSignature ||
header == ZipConstants.ArchiveExtraDataSignature ||
header == ZipConstants.Zip64CentralFileHeaderSignature)
{
// No more individual entries exist
Dispose();
return null;
}
// -jr- 07-Dec-2003 Ignore spanning temporary signatures if found
// Spanning signature is same as descriptor signature and is untested as yet.
if ((header == ZipConstants.SpanningTempSignature) || (header == ZipConstants.SpanningSignature))
{
header = inputBuffer.ReadLeInt();
}
if (header != ZipConstants.LocalHeaderSignature)
{
throw new ZipException("Wrong Local header signature: 0x" + String.Format("{0:X}", header));
}
var versionRequiredToExtract = (short)inputBuffer.ReadLeShort();
flags = inputBuffer.ReadLeShort();
method = (CompressionMethod)inputBuffer.ReadLeShort();
var dostime = (uint)inputBuffer.ReadLeInt();
int crc2 = inputBuffer.ReadLeInt();
csize = inputBuffer.ReadLeInt();
size = inputBuffer.ReadLeInt();
int nameLen = inputBuffer.ReadLeShort();
int extraLen = inputBuffer.ReadLeShort();
bool isCrypted = (flags & 1) == 1;
byte[] buffer = new byte[nameLen];
inputBuffer.ReadRawBuffer(buffer);
string name = ZipStrings.ConvertToStringExt(flags, buffer);
entry = new ZipEntry(name, versionRequiredToExtract, ZipConstants.VersionMadeBy, method)
{
Flags = flags,
};
if ((flags & 8) == 0)
{
entry.Crc = crc2 & 0xFFFFFFFFL;
entry.Size = size & 0xFFFFFFFFL;
entry.CompressedSize = csize & 0xFFFFFFFFL;
entry.CryptoCheckValue = (byte)((crc2 >> 24) & 0xff);
}
else
{
// This allows for GNU, WinZip and possibly other archives, the PKZIP spec
// says these values are zero under these circumstances.
if (crc2 != 0)
{
entry.Crc = crc2 & 0xFFFFFFFFL;
}
if (size != 0)
{
entry.Size = size & 0xFFFFFFFFL;
}
if (csize != 0)
{
entry.CompressedSize = csize & 0xFFFFFFFFL;
}
entry.CryptoCheckValue = (byte)((dostime >> 8) & 0xff);
}
entry.DosTime = dostime;
// If local header requires Zip64 is true then the extended header should contain
// both values.
// Handle extra data if present. This can set/alter some fields of the entry.
if (extraLen > 0)
{
byte[] extra = new byte[extraLen];
inputBuffer.ReadRawBuffer(extra);
entry.ExtraData = extra;
}
entry.ProcessExtraData(true);
if (entry.CompressedSize >= 0)
{
csize = entry.CompressedSize;
}
if (entry.Size >= 0)
{
size = entry.Size;
}
if (method == CompressionMethod.Stored && (!isCrypted && csize != size || (isCrypted && csize - ZipConstants.CryptoHeaderSize != size)))
{
throw new ZipException("Stored, but compressed != uncompressed");
}
// Determine how to handle reading of data if this is attempted.
if (IsEntryCompressionMethodSupported(entry))
{
internalReader = new ReadDataHandler(InitialRead);
}
else
{
internalReader = new ReadDataHandler(ReadingNotSupported);
}
return entry;
}
/// <summary>
/// Read data descriptor at the end of compressed data.
/// </summary>
private void ReadDataDescriptor()
{
if (inputBuffer.ReadLeInt() != ZipConstants.DataDescriptorSignature)
{
throw new ZipException("Data descriptor signature not found");
}
entry.Crc = inputBuffer.ReadLeInt() & 0xFFFFFFFFL;
if (entry.LocalHeaderRequiresZip64)
{
csize = inputBuffer.ReadLeLong();
size = inputBuffer.ReadLeLong();
}
else
{
csize = inputBuffer.ReadLeInt();
size = inputBuffer.ReadLeInt();
}
entry.CompressedSize = csize;
entry.Size = size;
}
/// <summary>
/// Complete cleanup as the final part of closing.
/// </summary>
/// <param name="testCrc">True if the crc value should be tested</param>
private void CompleteCloseEntry(bool testCrc)
{
StopDecrypting();
if ((flags & 8) != 0)
{
ReadDataDescriptor();
}
size = 0;
if (testCrc &&
((crc.Value & 0xFFFFFFFFL) != entry.Crc) && (entry.Crc != -1))
{
throw new ZipException("CRC mismatch");
}
crc.Reset();
if (method == CompressionMethod.Deflated)
{
inf.Reset();
}
entry = null;
}
/// <summary>
/// Closes the current zip entry and moves to the next one.
/// </summary>
/// <exception cref="InvalidOperationException">
/// The stream is closed
/// </exception>
/// <exception cref="ZipException">
/// The Zip stream ends early
/// </exception>
public void CloseEntry()
{
if (crc == null)
{
throw new InvalidOperationException("Closed");
}
if (entry == null)
{
return;
}
if (method == CompressionMethod.Deflated)
{
if ((flags & 8) != 0)
{
// We don't know how much we must skip, read until end.
byte[] tmp = new byte[4096];
// Read will close this entry
while (Read(tmp, 0, tmp.Length) > 0)
{
}
return;
}
csize -= inf.TotalIn;
inputBuffer.Available += inf.RemainingInput;
}
if ((inputBuffer.Available > csize) && (csize >= 0))
{
inputBuffer.Available = (int)((long)inputBuffer.Available - csize);
}
else
{
csize -= inputBuffer.Available;
inputBuffer.Available = 0;
while (csize != 0)
{
long skipped = Skip(csize);
if (skipped <= 0)
{
throw new ZipException("Zip archive ends early.");
}
csize -= skipped;
}
}
CompleteCloseEntry(false);
}
/// <summary>
/// Returns 1 if there is an entry available
/// Otherwise returns 0.
/// </summary>
public override int Available
{
get
{
return entry != null ? 1 : 0;
}
}
/// <summary>
/// Returns the current size that can be read from the current entry if available
/// </summary>
/// <exception cref="ZipException">Thrown if the entry size is not known.</exception>
/// <exception cref="InvalidOperationException">Thrown if no entry is currently available.</exception>
public override long Length
{
get
{
if (entry != null)
{
if (entry.Size >= 0)
{
return entry.Size;
}
else
{
throw new ZipException("Length not available for the current entry");
}
}
else
{
throw new InvalidOperationException("No current entry");
}
}
}
/// <summary>
/// Reads a byte from the current zip entry.
/// </summary>
/// <returns>
/// The byte or -1 if end of stream is reached.
/// </returns>
public override int ReadByte()
{
byte[] b = new byte[1];
if (Read(b, 0, 1) <= 0)
{
return -1;
}
return b[0] & 0xff;
}
/// <summary>
/// Handle attempts to read by throwing an <see cref="InvalidOperationException"/>.
/// </summary>
/// <param name="destination">The destination array to store data in.</param>
/// <param name="offset">The offset at which data read should be stored.</param>
/// <param name="count">The maximum number of bytes to read.</param>
/// <returns>Returns the number of bytes actually read.</returns>
private int ReadingNotAvailable(byte[] destination, int offset, int count)
{
throw new InvalidOperationException("Unable to read from this stream");
}
/// <summary>
/// Handle attempts to read from this entry by throwing an exception
/// </summary>
private int ReadingNotSupported(byte[] destination, int offset, int count)
{
throw new ZipException("The compression method for this entry is not supported");
}
/// <summary>
/// Handle attempts to read from this entry by throwing an exception
/// </summary>
private int StoredDescriptorEntry(byte[] destination, int offset, int count) =>
throw new StreamUnsupportedException(
"The combination of Stored compression method and Descriptor flag is not possible to read using ZipInputStream");
/// <summary>
/// Perform the initial read on an entry which may include
/// reading encryption headers and setting up inflation.
/// </summary>
/// <param name="destination">The destination to fill with data read.</param>
/// <param name="offset">The offset to start reading at.</param>
/// <param name="count">The maximum number of bytes to read.</param>
/// <returns>The actual number of bytes read.</returns>
private int InitialRead(byte[] destination, int offset, int count)
{
var usesDescriptor = (entry.Flags & (int)GeneralBitFlags.Descriptor) != 0;
// Handle encryption if required.
if (entry.IsCrypted)
{
if (password == null)
{
throw new ZipException("No password set.");
}
// Generate and set crypto transform...
var managed = new PkzipClassicManaged();
byte[] key = PkzipClassic.GenerateKeys(ZipStrings.ConvertToArray(password));
inputBuffer.CryptoTransform = managed.CreateDecryptor(key, null);
byte[] cryptbuffer = new byte[ZipConstants.CryptoHeaderSize];
inputBuffer.ReadClearTextBuffer(cryptbuffer, 0, ZipConstants.CryptoHeaderSize);
if (cryptbuffer[ZipConstants.CryptoHeaderSize - 1] != entry.CryptoCheckValue)
{
throw new ZipException("Invalid password");
}
if (csize >= ZipConstants.CryptoHeaderSize)
{
csize -= ZipConstants.CryptoHeaderSize;
}
else if (!usesDescriptor)
{
throw new ZipException($"Entry compressed size {csize} too small for encryption");
}
}
else
{
inputBuffer.CryptoTransform = null;
}
if (csize > 0 || usesDescriptor)
{
if (method == CompressionMethod.Deflated && inputBuffer.Available > 0)
{
inputBuffer.SetInflaterInput(inf);
}
// It's not possible to know how many bytes to read when using "Stored" compression (unless using encryption)
if (!entry.IsCrypted && method == CompressionMethod.Stored && usesDescriptor)
{
internalReader = StoredDescriptorEntry;
return StoredDescriptorEntry(destination, offset, count);
}
if (!CanDecompressEntry)
{
internalReader = ReadingNotSupported;
return ReadingNotSupported(destination, offset, count);
}
internalReader = BodyRead;
return BodyRead(destination, offset, count);
}
internalReader = ReadingNotAvailable;
return 0;
}
/// <summary>
/// Read a block of bytes from the stream.
/// </summary>
/// <param name="buffer">The destination for the bytes.</param>
/// <param name="offset">The index to start storing data.</param>
/// <param name="count">The number of bytes to attempt to read.</param>
/// <returns>Returns the number of bytes read.</returns>
/// <remarks>Zero bytes read means end of stream.</remarks>
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "Cannot be negative");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "Cannot be negative");
}
if ((buffer.Length - offset) < count)
{
throw new ArgumentException("Invalid offset/count combination");
}
return internalReader(buffer, offset, count);
}
/// <summary>
/// Reads a block of bytes from the current zip entry.
/// </summary>
/// <returns>
/// The number of bytes read (this may be less than the length requested, even before the end of stream), or 0 on end of stream.
/// </returns>
/// <exception cref="IOException">
/// An i/o error occurred.
/// </exception>
/// <exception cref="ZipException">
/// The deflated stream is corrupted.
/// </exception>
/// <exception cref="InvalidOperationException">
/// The stream is not open.
/// </exception>
private int BodyRead(byte[] buffer, int offset, int count)
{
if (crc == null)
{
throw new InvalidOperationException("Closed");
}
if ((entry == null) || (count <= 0))
{
return 0;
}
if (offset + count > buffer.Length)
{
throw new ArgumentException("Offset + count exceeds buffer size");
}
bool finished = false;
switch (method)
{
case CompressionMethod.Deflated:
count = base.Read(buffer, offset, count);
if (count <= 0)
{
if (!inf.IsFinished)
{
throw new ZipException("Inflater not finished!");
}
inputBuffer.Available = inf.RemainingInput;
// A csize of -1 is from an unpatched local header
if ((flags & 8) == 0 &&
(inf.TotalIn != csize && csize != 0xFFFFFFFF && csize != -1 || inf.TotalOut != size))
{
throw new ZipException("Size mismatch: " + csize + ";" + size + " <-> " + inf.TotalIn + ";" + inf.TotalOut);
}
inf.Reset();
finished = true;
}
break;
case CompressionMethod.Stored:
if ((count > csize) && (csize >= 0))
{
count = (int)csize;
}
if (count > 0)
{
count = inputBuffer.ReadClearTextBuffer(buffer, offset, count);
if (count > 0)
{
csize -= count;
size -= count;
}
}
if (csize == 0)
{
finished = true;
}
else
{
if (count < 0)
{
throw new ZipException("EOF in stored block");
}
}
break;
}
if (count > 0)
{
crc.Update(new ArraySegment<byte>(buffer, offset, count));
}
if (finished)
{
CompleteCloseEntry(true);
}
return count;
}
/// <summary>
/// Closes the zip input stream
/// </summary>
protected override void Dispose(bool disposing)
{
internalReader = new ReadDataHandler(ReadingNotAvailable);
crc = null;
entry = null;
base.Dispose(disposing);
}
}
}

View File

@@ -0,0 +1,313 @@
using ICSharpCode.SharpZipLib.Core;
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// ZipNameTransform transforms names as per the Zip file naming convention.
/// </summary>
/// <remarks>The use of absolute names is supported although its use is not valid
/// according to Zip naming conventions, and should not be used if maximum compatability is desired.</remarks>
public class ZipNameTransform : INameTransform
{
#region Constructors
/// <summary>
/// Initialize a new instance of <see cref="ZipNameTransform"></see>
/// </summary>
public ZipNameTransform()
{
}
/// <summary>
/// Initialize a new instance of <see cref="ZipNameTransform"></see>
/// </summary>
/// <param name="trimPrefix">The string to trim from the front of paths if found.</param>
public ZipNameTransform(string trimPrefix)
{
TrimPrefix = trimPrefix;
}
#endregion Constructors
/// <summary>
/// Static constructor.
/// </summary>
static ZipNameTransform()
{
char[] invalidPathChars;
invalidPathChars = Path.GetInvalidPathChars();
int howMany = invalidPathChars.Length + 2;
InvalidEntryCharsRelaxed = new char[howMany];
Array.Copy(invalidPathChars, 0, InvalidEntryCharsRelaxed, 0, invalidPathChars.Length);
InvalidEntryCharsRelaxed[howMany - 1] = '*';
InvalidEntryCharsRelaxed[howMany - 2] = '?';
howMany = invalidPathChars.Length + 4;
InvalidEntryChars = new char[howMany];
Array.Copy(invalidPathChars, 0, InvalidEntryChars, 0, invalidPathChars.Length);
InvalidEntryChars[howMany - 1] = ':';
InvalidEntryChars[howMany - 2] = '\\';
InvalidEntryChars[howMany - 3] = '*';
InvalidEntryChars[howMany - 4] = '?';
}
/// <summary>
/// Transform a windows directory name according to the Zip file naming conventions.
/// </summary>
/// <param name="name">The directory name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformDirectory(string name)
{
name = TransformFile(name);
if (name.Length > 0)
{
if (!name.EndsWith("/", StringComparison.Ordinal))
{
name += "/";
}
}
else
{
throw new ZipException("Cannot have an empty directory name");
}
return name;
}
/// <summary>
/// Transform a windows file name according to the Zip file naming conventions.
/// </summary>
/// <param name="name">The file name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformFile(string name)
{
if (name != null)
{
string lowerName = name.ToLower();
if ((trimPrefix_ != null) && (lowerName.IndexOf(trimPrefix_, StringComparison.Ordinal) == 0))
{
name = name.Substring(trimPrefix_.Length);
}
name = name.Replace(@"\", "/");
name = PathUtils.DropPathRoot(name);
// Drop any leading and trailing slashes.
name = name.Trim('/');
// Convert consecutive // characters to /
int index = name.IndexOf("//", StringComparison.Ordinal);
while (index >= 0)
{
name = name.Remove(index, 1);
index = name.IndexOf("//", StringComparison.Ordinal);
}
name = MakeValidName(name, '_');
}
else
{
name = string.Empty;
}
return name;
}
/// <summary>
/// Get/set the path prefix to be trimmed from paths if present.
/// </summary>
/// <remarks>The prefix is trimmed before any conversion from
/// a windows path is done.</remarks>
public string TrimPrefix
{
get { return trimPrefix_; }
set
{
trimPrefix_ = value;
if (trimPrefix_ != null)
{
trimPrefix_ = trimPrefix_.ToLower();
}
}
}
/// <summary>
/// Force a name to be valid by replacing invalid characters with a fixed value
/// </summary>
/// <param name="name">The name to force valid</param>
/// <param name="replacement">The replacement character to use.</param>
/// <returns>Returns a valid name</returns>
private static string MakeValidName(string name, char replacement)
{
int index = name.IndexOfAny(InvalidEntryChars);
if (index >= 0)
{
var builder = new StringBuilder(name);
while (index >= 0)
{
builder[index] = replacement;
if (index >= name.Length)
{
index = -1;
}
else
{
index = name.IndexOfAny(InvalidEntryChars, index + 1);
}
}
name = builder.ToString();
}
if (name.Length > 0xffff)
{
throw new PathTooLongException();
}
return name;
}
/// <summary>
/// Test a name to see if it is a valid name for a zip entry.
/// </summary>
/// <param name="name">The name to test.</param>
/// <param name="relaxed">If true checking is relaxed about windows file names and absolute paths.</param>
/// <returns>Returns true if the name is a valid zip name; false otherwise.</returns>
/// <remarks>Zip path names are actually in Unix format, and should only contain relative paths.
/// This means that any path stored should not contain a drive or
/// device letter, or a leading slash. All slashes should forward slashes '/'.
/// An empty name is valid for a file where the input comes from standard input.
/// A null name is not considered valid.
/// </remarks>
public static bool IsValidName(string name, bool relaxed)
{
bool result = (name != null);
if (result)
{
if (relaxed)
{
result = name.IndexOfAny(InvalidEntryCharsRelaxed) < 0;
}
else
{
result =
(name.IndexOfAny(InvalidEntryChars) < 0) &&
(name.IndexOf('/') != 0);
}
}
return result;
}
/// <summary>
/// Test a name to see if it is a valid name for a zip entry.
/// </summary>
/// <param name="name">The name to test.</param>
/// <returns>Returns true if the name is a valid zip name; false otherwise.</returns>
/// <remarks>Zip path names are actually in unix format,
/// and should only contain relative paths if a path is present.
/// This means that the path stored should not contain a drive or
/// device letter, or a leading slash. All slashes should forward slashes '/'.
/// An empty name is valid where the input comes from standard input.
/// A null name is not considered valid.
/// </remarks>
public static bool IsValidName(string name)
{
bool result =
(name != null) &&
(name.IndexOfAny(InvalidEntryChars) < 0) &&
(name.IndexOf('/') != 0)
;
return result;
}
#region Instance Fields
private string trimPrefix_;
#endregion Instance Fields
#region Class Fields
private static readonly char[] InvalidEntryChars;
private static readonly char[] InvalidEntryCharsRelaxed;
#endregion Class Fields
}
/// <summary>
/// An implementation of INameTransform that transforms entry paths as per the Zip file naming convention.
/// Strips path roots and puts directory separators in the correct format ('/')
/// </summary>
public class PathTransformer : INameTransform
{
/// <summary>
/// Initialize a new instance of <see cref="PathTransformer"></see>
/// </summary>
public PathTransformer()
{
}
/// <summary>
/// Transform a windows directory name according to the Zip file naming conventions.
/// </summary>
/// <param name="name">The directory name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformDirectory(string name)
{
name = TransformFile(name);
if (name.Length > 0)
{
if (!name.EndsWith("/", StringComparison.Ordinal))
{
name += "/";
}
}
else
{
throw new ZipException("Cannot have an empty directory name");
}
return name;
}
/// <summary>
/// Transform a windows file name according to the Zip file naming conventions.
/// </summary>
/// <param name="name">The file name to transform.</param>
/// <returns>The transformed name.</returns>
public string TransformFile(string name)
{
if (name != null)
{
// Put separators in the expected format.
name = name.Replace(@"\", "/");
// Remove the path root.
name = PathUtils.DropPathRoot(name);
// Drop any leading and trailing slashes.
name = name.Trim('/');
// Convert consecutive // characters to /
int index = name.IndexOf("//", StringComparison.Ordinal);
while (index >= 0)
{
name = name.Remove(index, 1);
index = name.IndexOf("//", StringComparison.Ordinal);
}
}
else
{
name = string.Empty;
}
return name;
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,194 @@
using System;
using System.Text;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Zip
{
/// <summary>
/// This static class contains functions for encoding and decoding zip file strings
/// </summary>
public static class ZipStrings
{
static ZipStrings()
{
try
{
var platformCodepage = Encoding.GetEncoding(0).CodePage;
SystemDefaultCodePage = (platformCodepage == 1 || platformCodepage == 2 || platformCodepage == 3 || platformCodepage == 42) ? FallbackCodePage : platformCodepage;
}
catch
{
SystemDefaultCodePage = FallbackCodePage;
}
}
/// <summary>Code page backing field</summary>
/// <remarks>
/// The original Zip specification (https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT) states
/// that file names should only be encoded with IBM Code Page 437 or UTF-8.
/// In practice, most zip apps use OEM or system encoding (typically cp437 on Windows).
/// Let's be good citizens and default to UTF-8 http://utf8everywhere.org/
/// </remarks>
private static int codePage = AutomaticCodePage;
/// Automatically select codepage while opening archive
/// see https://github.com/icsharpcode/SharpZipLib/pull/280#issuecomment-433608324
///
private const int AutomaticCodePage = -1;
/// <summary>
/// Encoding used for string conversion. Setting this to 65001 (UTF-8) will
/// also set the Language encoding flag to indicate UTF-8 encoded file names.
/// </summary>
public static int CodePage
{
get
{
return codePage == AutomaticCodePage? Encoding.UTF8.CodePage:codePage;
}
set
{
if ((value < 0) || (value > 65535) ||
(value == 1) || (value == 2) || (value == 3) || (value == 42))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
codePage = value;
}
}
private const int FallbackCodePage = 437;
/// <summary>
/// Attempt to get the operating system default codepage, or failing that, to
/// the fallback code page IBM 437.
/// </summary>
public static int SystemDefaultCodePage { get; }
/// <summary>
/// Get whether the default codepage is set to UTF-8. Setting this property to false will
/// set the <see cref="CodePage"/> to <see cref="SystemDefaultCodePage"/>
/// </summary>
/// <remarks>
/// Get OEM codepage from NetFX, which parses the NLP file with culture info table etc etc.
/// But sometimes it yields the special value of 1 which is nicknamed <c>CodePageNoOEM</c> in <see cref="Encoding"/> sources (might also mean <c>CP_OEMCP</c>, but Encoding puts it so).
/// This was observed on Ukranian and Hindu systems.
/// Given this value, <see cref="Encoding.GetEncoding(int)"/> throws an <see cref="ArgumentException"/>.
/// So replace it with <see cref="FallbackCodePage"/>, (IBM 437 which is the default code page in a default Windows installation console.
/// </remarks>
public static bool UseUnicode
{
get
{
return codePage == Encoding.UTF8.CodePage;
}
set
{
if (value)
{
codePage = Encoding.UTF8.CodePage;
}
else
{
codePage = SystemDefaultCodePage;
}
}
}
/// <summary>
/// Convert a portion of a byte array to a string using <see cref="CodePage"/>
/// </summary>
/// <param name="data">
/// Data to convert to string
/// </param>
/// <param name="count">
/// Number of bytes to convert starting from index 0
/// </param>
/// <returns>
/// data[0]..data[count - 1] converted to a string
/// </returns>
public static string ConvertToString(byte[] data, int count)
=> data == null
? string.Empty
: Encoding.GetEncoding(CodePage).GetString(data, 0, count);
/// <summary>
/// Convert a byte array to a string using <see cref="CodePage"/>
/// </summary>
/// <param name="data">
/// Byte array to convert
/// </param>
/// <returns>
/// <paramref name="data">data</paramref>converted to a string
/// </returns>
public static string ConvertToString(byte[] data)
=> ConvertToString(data, data.Length);
private static Encoding EncodingFromFlag(int flags)
=> ((flags & (int)GeneralBitFlags.UnicodeText) != 0)
? Encoding.UTF8
: Encoding.GetEncoding(
// if CodePage wasn't set manually and no utf flag present
// then we must use SystemDefault (old behavior)
// otherwise, CodePage should be preferred over SystemDefault
// see https://github.com/icsharpcode/SharpZipLib/issues/274
codePage == AutomaticCodePage?
SystemDefaultCodePage:
codePage);
/// <summary>
/// Convert a byte array to a string using <see cref="CodePage"/>
/// </summary>
/// <param name="flags">The applicable general purpose bits flags</param>
/// <param name="data">
/// Byte array to convert
/// </param>
/// <param name="count">The number of bytes to convert.</param>
/// <returns>
/// <paramref name="data">data</paramref>converted to a string
/// </returns>
public static string ConvertToStringExt(int flags, byte[] data, int count)
=> (data == null)
? string.Empty
: EncodingFromFlag(flags).GetString(data, 0, count);
/// <summary>
/// Convert a byte array to a string using <see cref="CodePage"/>
/// </summary>
/// <param name="data">
/// Byte array to convert
/// </param>
/// <param name="flags">The applicable general purpose bits flags</param>
/// <returns>
/// <paramref name="data">data</paramref>converted to a string
/// </returns>
public static string ConvertToStringExt(int flags, byte[] data)
=> ConvertToStringExt(flags, data, data.Length);
/// <summary>
/// Convert a string to a byte array using <see cref="CodePage"/>
/// </summary>
/// <param name="str">
/// String to convert to an array
/// </param>
/// <returns>Converted array</returns>
public static byte[] ConvertToArray(string str)
=> str == null
? Empty.Array<byte>()
: Encoding.GetEncoding(CodePage).GetBytes(str);
/// <summary>
/// Convert a string to a byte array using <see cref="CodePage"/>
/// </summary>
/// <param name="flags">The applicable <see cref="GeneralBitFlags">general purpose bits flags</see></param>
/// <param name="str">
/// String to convert to an array
/// </param>
/// <returns>Converted array</returns>
public static byte[] ConvertToArray(int flags, string str)
=> (string.IsNullOrEmpty(str))
? Empty.Array<byte>()
: EncodingFromFlag(flags).GetBytes(str);
}
}