初始化上传

This commit is contained in:
2025-08-26 08:37:44 +08:00
commit 31d81b91b6
448 changed files with 80981 additions and 0 deletions

View File

@@ -0,0 +1,31 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
namespace .Utility
{
//public class BindingProxy : Freezable
//{
// #region Overrides of Freezable
// protected override Freezable CreateInstanceCore()
// {
// return new BindingProxy();
// }
// #endregion Overrides of Freezable
// public object Data
// {
// get { return (object)GetValue(DataProperty); }
// set { SetValue(DataProperty, value); }
// }
// // Using a DependencyProperty as the backing store for Data. This enables animation, styling, binding, etc...
// public static readonly DependencyProperty DataProperty =
// DependencyProperty.Register("Data", typeof(object), typeof(BindingProxy), new UIPropertyMetadata(null));
//}
}

View File

@@ -0,0 +1,39 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public static class CPUHelper
{
/// <summary>
/// 获取当前系统消耗的 CPU 时间
/// </summary>
/// <returns></returns>
public static CPUTime GetCPUTime()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return WindowsCPU.GetCPUTime();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
return LinuxCPU.GetCPUTime();
return new CPUTime();
}
/// <summary>
/// 计算 CPU 使用率
/// </summary>
/// <param name="oldTime"></param>
/// <param name="newTime"></param>
/// <returns></returns>
public static double CalculateCPULoad(CPUTime oldTime, CPUTime newTime)
{
ulong totalTicksSinceLastTime = newTime.SystemTime - oldTime.SystemTime;
ulong idleTicksSinceLastTime = newTime.IdleTime - oldTime.IdleTime;
double ret = 1.0f - ((totalTicksSinceLastTime > 0) ? ((double)idleTicksSinceLastTime) / totalTicksSinceLastTime : 0);
return ret;
}
}
}

View File

@@ -0,0 +1,29 @@
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public struct CPUTime
{
/// <summary>
///
/// </summary>
/// <param name="idleTime"></param>
/// <param name="systemTime"></param>
public CPUTime(ulong idleTime, ulong systemTime)
{
IdleTime = idleTime;
SystemTime = systemTime;
}
/// <summary>
/// CPU 空闲时间
/// </summary>
public ulong IdleTime { get; private set; }
/// <summary>
/// CPU 工作时间
/// </summary>
public ulong SystemTime { get; private set; }
}
}

View File

@@ -0,0 +1,21 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public struct FILETIME
{
/// <summary>
/// 时间的低位部分
/// </summary>
public uint DateTimeLow;
/// <summary>
/// 时间的高位部分
/// </summary>
public uint DateTimeHigh;
}
}

View File

@@ -0,0 +1,55 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
/// Linux
/// </summary>
public static class LinuxCPU
{
const string Path = "/proc/stat";
/// <summary>
/// 获取 CPU 时间
/// </summary>
/// <returns></returns>
public static CPUTime GetCPUTime()
{
ulong IdleTime = 0;
ulong SystemTime = 0;
try
{
var text = File.ReadAllLines(Path);
foreach (var item in text)
{
if (!item.StartsWith("cpu")) continue;
#if NET6_0_OR_GREATER
var values = item.Split(" ", StringSplitOptions.RemoveEmptyEntries).ToArray();
SystemTime += (ulong)(values[1..].Select(x => decimal.Parse(x)).Sum());
#else
var values = item.Split(new char[] { ' '}, StringSplitOptions.RemoveEmptyEntries).ToArray();
SystemTime += (ulong)(values.ToList().GetRange(1, values.Length).Select(x => decimal.Parse(x)).Sum());
#endif
IdleTime += ulong.Parse(values[4]);
}
}
catch (Exception ex)
{
Debug.WriteLine(ex.ToString());
Debug.Assert(false, ex.Message);
throw new PlatformNotSupportedException($"{RuntimeInformation.OSArchitecture.ToString()} {Environment.OSVersion.Platform.ToString()} {Environment.OSVersion.ToString()}");
}
return new CPUTime(IdleTime, SystemTime);
}
}
}

View File

@@ -0,0 +1,71 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
/// Windows
/// </summary>
public partial class WindowsCPU
{
/*
IdleTime 空闲时间
KernelTime 内核时间
UserTime 用户时间
系统时间 = 内核时间 + 用户时间
SystemTime = KernelTime + UserTime
*/
/// <summary>
/// 在多处理器系统上,返回的值是所有处理器指定时间的总和
/// </summary>
/// <remarks><see href="https://docs.microsoft.com/en-us/windows/win32/api/processthreadsapi/nf-processthreadsapi-getsystemtimes"/></remarks>
/// <param name="lpIdleTime">指向 FILETIME 结构的指针,该结构接收系统空闲的时间量</param>
/// <param name="lpKernelTime">指向 FILETIME 结构的指针,该结构接收系统在内核模式下执行的时间量(包括所有进程中的所有线程以及所有处理器上的所有线程)。此时间值还包括系统空闲的时间</param>
/// <param name="lpUserTime">指向 FILETIME 结构的指针,该结构接收系统在 User 模式下执行的时间量(包括所有进程中的所有线程以及所有处理器上的所有线程)</param>
/// <returns></returns>
#if NET7_0_OR_GREATER
[LibraryImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static partial bool GetSystemTimes(out FILETIME lpIdleTime, out FILETIME lpKernelTime, out FILETIME lpUserTime);
#else
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool GetSystemTimes(out FILETIME lpIdleTime, out FILETIME lpKernelTime, out FILETIME lpUserTime);
#endif
/// <summary>
/// 获取 CPU 工作时间
/// </summary>
/// <param name="lpIdleTime"></param>
/// <param name="lpKernelTime"></param>
/// <param name="lpUserTime"></param>
/// <returns></returns>
public static CPUTime GetCPUTime(FILETIME lpIdleTime, FILETIME lpKernelTime, FILETIME lpUserTime)
{
var IdleTime = ((ulong)lpIdleTime.DateTimeHigh << 32) | lpIdleTime.DateTimeLow;
var KernelTime = ((ulong)lpKernelTime.DateTimeHigh << 32) | lpKernelTime.DateTimeLow;
var UserTime = ((ulong)lpUserTime.DateTimeHigh << 32) | lpUserTime.DateTimeLow;
var SystemTime = KernelTime + UserTime;
return new CPUTime(IdleTime, SystemTime);
}
/// <summary>
/// 获取 CPU 工作时间
/// </summary>
/// <returns></returns>
public static CPUTime GetCPUTime()
{
FILETIME lpIdleTime = default;
FILETIME lpKernelTime = default;
FILETIME lpUserTime = default;
if (!GetSystemTimes(out lpIdleTime, out lpKernelTime, out lpUserTime))
{
return default;
}
return GetCPUTime(lpIdleTime, lpKernelTime, lpUserTime);
}
}
}

View File

@@ -0,0 +1,129 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace CZGL.SystemInfo
{
/// <summary>
/// 磁盘信息
/// </summary>
public class DiskInfo
{
private readonly DriveInfo _info;
/// <summary>
/// 获取磁盘类
/// </summary>
public DriveInfo DriveInfo => _info;
private DiskInfo(DriveInfo info)
{
_info = info;
}
/// <summary>
/// 驱动器名称
/// <para>ex: C:\</para>
/// </summary>
public string Id => _info.Name;
/// <summary>
/// 磁盘名称
/// <para>ex:<br />
/// Windows: system<br />
/// Linux: /dev
/// </para>
/// </summary>
public string Name => _info.Name;
/// <summary>
/// 获取驱动器类型
/// </summary>
/// <remarks>获取驱动器类型,如 CD-ROM、可移动、网络或固定</remarks>
public DriveType DriveType => _info.DriveType;
/// <summary>
/// 文件系统
/// <para>
/// Windows: NTFS、 CDFS...<br />
/// Linux: rootfs、tmpfs、binfmt_misc...
/// </para>
/// </summary>
public string FileSystem => _info.DriveFormat;
/// <summary>
/// 磁盘剩余容量(以字节为单位)
/// </summary>
public long FreeSpace => _info.AvailableFreeSpace;
/// <summary>
/// 磁盘总容量(以字节为单位)
/// </summary>
public long TotalSize => _info.TotalSize;
/// <summary>
/// 磁盘剩余可用容量
/// </summary>
public long UsedSize => TotalSize - FreeSpace;
/// <summary>
/// 磁盘根目录位置
/// </summary>
public string RootPath => _info.RootDirectory.FullName;
/// <summary>
/// 获取本地所有磁盘信息
/// </summary>
/// <returns></returns>
public static DiskInfo[] GetDisks()
{
return DriveInfo.GetDrives().Where(x => x.IsReady).Select(x => new DiskInfo(x)).ToArray();
}
/// <summary>
/// 获取 Docker 运行的容器其容器文件系统在主机中的存储位置
/// </summary>
/// <remarks>程序需要在宿主机运行才有效果在容器中运行调用此API获取不到相关信息</remarks>
/// <returns></returns>
public static DiskInfo[] GetDockerMerge()
{
return DriveInfo.GetDrives()
.Where(x => x.DriveFormat.Equals("overlay", StringComparison.OrdinalIgnoreCase) && x.DriveFormat.Contains("docker"))
.Select(x => new DiskInfo(x)).ToArray();
}
/// <summary>
/// 筛选出真正能够使用的磁盘
/// </summary>
/// <returns></returns>
public static DiskInfo[] GetRealDisk()
{
var disks = DriveInfo.GetDrives()
.Where(x =>
x.DriveType == DriveType.Fixed &&
x.TotalSize != 0 && x.DriveFormat != "overlay");
return disks.Select(x => new DiskInfo(x))
.Distinct(new DiskInfoEquality()).ToArray();
}
/// <summary>
/// 筛选重复项
/// </summary>
private class DiskInfoEquality : IEqualityComparer<DiskInfo>
{
public bool Equals(DiskInfo x, DiskInfo y)
{
return x?.Id == y?.Id;
}
public int GetHashCode(DiskInfo obj)
{
return obj.Id.GetHashCode();
}
}
}
}

View File

@@ -0,0 +1,54 @@
using CZGL.SystemInfo.Memory;
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public partial class LinuxMemory
{
/// <summary>
///
/// </summary>
/// <returns></returns>
public static MemoryValue GetMemory()
{
Sysinfo info = new Sysinfo();
if (sysinfo(ref info) != 0)
{
return default;
}
var usedPercentage = (((double)info.totalram - info.freeram) / (double)info.totalram) * 100;
MemoryValue value = new MemoryValue(info.totalram, info.freeram, (ulong)usedPercentage, info.totalswap, info.freeswap);
return value;
}
#if NET7_0_OR_GREATER
/// <summary>
/// 返回整个系统统计信息,<see href="https://linux.die.net/man/2/sysinfo"/>
/// </summary>
/// <remarks>int sysinfo(struct sysinfo *info);</remarks>
/// <param name="info"></param>
/// <returns></returns>
[LibraryImport("libc.so.6", SetLastError = true)]
[return: MarshalAs(UnmanagedType.I4)]
public static partial System.Int32 sysinfo(ref Sysinfo info);
#else
/// <summary>
/// 返回整个系统统计信息,<see href="https://linux.die.net/man/2/sysinfo"/>
/// </summary>
/// <remarks>int sysinfo(struct sysinfo *info);</remarks>
/// <param name="info"></param>
/// <returns></returns>
[DllImport("libc.so.6", CharSet = CharSet.Auto, SetLastError = true)]
[return: MarshalAs(UnmanagedType.I4)]
public static extern System.Int32 sysinfo(ref Sysinfo info);
#endif
}
}

View File

@@ -0,0 +1,17 @@
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public struct MEMORYSTATUS
{
public uint dwLength;
public uint dwMemoryLoad;
public uint dwTotalPhys;
public uint dwAvailPhys;
public uint dwTotalPageFile;
public uint dwAvailPageFile;
public uint dwTotalVirtual;
public uint dwAvailVirtual;
}
}

View File

@@ -0,0 +1,24 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public static class MemoryHelper
{
/// <summary>
/// 获取当前系统的内存信息
/// </summary>
/// <returns></returns>
public static MemoryValue GetMemoryValue()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
return WindowsMemory.GetMemory();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
return LinuxMemory.GetMemory();
return default;
}
}
}

View File

@@ -0,0 +1,68 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo.Memory
{
/// <summary>
/// 包含有关物理内存和虚拟内存(包括扩展内存)的当前状态的信息。该 GlobalMemoryStatusEx在这个构造函数存储信息。
/// <see ref="https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/ns-sysinfoapi-memorystatusex"/>
/// </summary>
public struct MemoryStatusExE
{
/// <summary>
/// 结构的大小,以字节为单位,必须在调用 GlobalMemoryStatusEx 之前设置此成员,可以用 Init 方法提前处理
/// </summary>
/// <remarks>应当使用本对象提供的 Init ,而不是使用构造函数!</remarks>
public uint dwLength;
/// <summary>
/// 一个介于 0 和 100 之间的数字用于指定正在使用的物理内存的大致百分比0 表示没有内存使用100 表示内存已满)。
/// </summary>
public uint dwMemoryLoad;
/// <summary>
/// 实际物理内存量,以字节为单位
/// </summary>
public ulong ullTotalPhys;
/// <summary>
/// 当前可用的物理内存量,以字节为单位。这是可以立即重用而无需先将其内容写入磁盘的物理内存量。它是备用列表、空闲列表和零列表的大小之和
/// </summary>
public ulong ullAvailPhys;
/// <summary>
/// 系统或当前进程的当前已提交内存限制以字节为单位以较小者为准。要获得系统范围的承诺内存限制请调用GetPerformanceInfo
/// </summary>
public ulong ullTotalPageFile;
/// <summary>
/// 当前进程可以提交的最大内存量以字节为单位。该值等于或小于系统范围的可用提交值。要计算整个系统的可承诺值调用GetPerformanceInfo核减价值CommitTotal从价值CommitLimit
/// </summary>
public ulong ullAvailPageFile;
/// <summary>
/// 调用进程的虚拟地址空间的用户模式部分的大小,以字节为单位。该值取决于进程类型、处理器类型和操作系统的配置。例如,对于 x86 处理器上的大多数 32 位进程,此值约为 2 GB对于在启用4 GB 调整的系统上运行的具有大地址感知能力的 32 位进程约为 3 GB 。
/// </summary>
public ulong ullTotalVirtual;
/// <summary>
/// 当前在调用进程的虚拟地址空间的用户模式部分中未保留和未提交的内存量,以字节为单位
/// </summary>
public ulong ullAvailVirtual;
/// <summary>
/// 预订的。该值始终为 0
/// </summary>
public ulong ullAvailExtendedVirtual;
/// <summary>
///
/// </summary>
public void Init()
{
dwLength = checked((uint)Marshal.SizeOf(typeof(MemoryStatusExE)));
}
}
}

View File

@@ -0,0 +1,66 @@
namespace CZGL.SystemInfo
{
/// <summary>
/// 内存值表示
/// </summary>
public struct MemoryValue
{
/// <summary>
///
/// </summary>
/// <param name="totalPhysicalMemory">物理内存字节数</param>
/// <param name="availablePhysicalMemory">可用的物理内存字节数</param>
/// <param name="usedPercentage">已用物理内存百分比</param>
/// <param name="totalVirtualMemory">虚拟内存字节数</param>
/// <param name="availableVirtualMemory">可用虚拟内存字节数</param>
public MemoryValue(
ulong totalPhysicalMemory,
ulong availablePhysicalMemory,
double usedPercentage,
ulong totalVirtualMemory,
ulong availableVirtualMemory)
{
TotalPhysicalMemory = totalPhysicalMemory;
AvailablePhysicalMemory = availablePhysicalMemory;
UsedPercentage = usedPercentage;
TotalVirtualMemory = totalVirtualMemory;
AvailableVirtualMemory = availableVirtualMemory;
}
/// <summary>
/// 物理内存字节数
/// </summary>
public ulong TotalPhysicalMemory { get; private set; }
/// <summary>
/// 可用的物理内存字节数
/// </summary>
public ulong AvailablePhysicalMemory { get; private set; }
/// <summary>
/// 已用物理内存字节数
/// </summary>
public ulong UsedPhysicalMemory => TotalPhysicalMemory - AvailablePhysicalMemory;
/// <summary>
/// 已用物理内存百分比0~100100表示内存已用尽
/// </summary>
public double UsedPercentage { get; private set; }
/// <summary>
/// 虚拟内存字节数
/// </summary>
public ulong TotalVirtualMemory { get; private set; }
/// <summary>
/// 可用虚拟内存字节数
/// </summary>
public ulong AvailableVirtualMemory { get; private set; }
/// <summary>
/// 已用虚拟内存字节数
/// </summary>
public ulong UsedVirtualMemory => TotalVirtualMemory - AvailableVirtualMemory;
}
}

View File

@@ -0,0 +1,74 @@
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public struct Sysinfo
{
/// <summary>
/// Seconds since boot
/// </summary>
public long uptime;
/// <summary>
/// 获取 1515 分钟内存的平均使用量,数组大小为 3
/// </summary>
unsafe public fixed ulong loads[3];
/// <summary>
/// 总物理内存
/// </summary>
public ulong totalram;
/// <summary>
/// 可用内存
/// </summary>
public ulong freeram;
/// <summary>
/// 共享内存
/// </summary>
public ulong sharedram;
/// <summary>
/// Memory used by buffers
/// </summary>
public ulong bufferram;
/// <summary>
/// Total swap space size
/// </summary>
public ulong totalswap;
/// <summary>
/// swap space still available
/// </summary>
public ulong freeswap;
/// <summary>
/// Number of current processes
/// </summary>
public ushort procs;
/// <summary>
/// Total high memory size
/// </summary>
public ulong totalhigh;
/// <summary>
/// Available high memory size
/// </summary>
public ulong freehigh;
/// <summary>
/// Memory unit size in bytes
/// </summary>
public uint mem_unit;
/// <summary>
/// Padding to 64 bytes
/// </summary>
unsafe public fixed byte _f[64];
}
}

View File

@@ -0,0 +1,86 @@
using CZGL.SystemInfo.Memory;
using System;
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public partial class WindowsMemory
{
#if NET7_0_OR_GREATER
/// <summary>
/// 在内存超过 4 GB 的计算机上, GlobalMemoryStatus函数可能返回不正确的信息报告值 1 表示溢出。因此,应用程序应改用 GlobalMemoryStatusEx函数。
/// </summary>
/// <remarks>Windows XP [仅限桌面应用程序];最低支持服务器 Windows Server 2003 [仅限桌面应用程序]</remarks>
/// <param name="lpBuffer"></param>
[LibraryImport("Kernel32.dll", SetLastError = true)]
public static partial void GlobalMemoryStatus(ref MEMORYSTATUS lpBuffer);
/// <summary>
/// 检索有关系统当前使用物理和虚拟内存的信息
/// </summary>
/// <remarks><see href="https://docs.microsoft.com/zh-cn/windows/win32/api/sysinfoapi/nf-sysinfoapi-globalmemorystatusex"/></remarks>
/// <param name="lpBuffer"></param>
/// <returns></returns>
[LibraryImport("Kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static partial Boolean GlobalMemoryStatusEx(ref MemoryStatusExE lpBuffer);
#else
/// <summary>
/// 在内存超过 4 GB 的计算机上, GlobalMemoryStatus函数可能返回不正确的信息报告值 1 表示溢出。因此,应用程序应改用 GlobalMemoryStatusEx函数。
/// </summary>
/// <remarks>Windows XP [仅限桌面应用程序];最低支持服务器 Windows Server 2003 [仅限桌面应用程序]</remarks>
/// <param name="lpBuffer"></param>
[DllImport("Kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)]
public static extern void GlobalMemoryStatus(ref MEMORYSTATUS lpBuffer);
/// <summary>
/// 检索有关系统当前使用物理和虚拟内存的信息
/// </summary>
/// <remarks><see href="https://docs.microsoft.com/zh-cn/windows/win32/api/sysinfoapi/nf-sysinfoapi-globalmemorystatusex"/></remarks>
/// <param name="lpBuffer"></param>
/// <returns></returns>
[DllImport("Kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern Boolean GlobalMemoryStatusEx(ref MemoryStatusExE lpBuffer);
#endif
/// <summary>
///
/// </summary>
/// <returns></returns>
public static MemoryValue GetMemory()
{
// 检查 Windows 内核版本,是否为旧系统
if (Environment.OSVersion.Version.Major < 5)
{
// https://en.wikipedia.org/wiki/List_of_Microsoft_Windows_versions");
return default;
}
MemoryStatusExE memoryStatusEx = new MemoryStatusExE();
// 初始化结构的大小
memoryStatusEx.Init();
// 刷新值
if (!GlobalMemoryStatusEx(ref memoryStatusEx)) return default;
var TotalPhysicalMemory = memoryStatusEx.ullTotalPhys;
var AvailablePhysicalMemory = memoryStatusEx.ullAvailPhys;
var TotalVirtualMemory = memoryStatusEx.ullTotalVirtual;
var AvailableVirtualMemory = memoryStatusEx.ullAvailVirtual;
var UsedPercentage = memoryStatusEx.dwMemoryLoad;
return new MemoryValue(
TotalPhysicalMemory,
AvailablePhysicalMemory,
UsedPercentage,
TotalVirtualMemory,
AvailableVirtualMemory);
}
}
}

View File

@@ -0,0 +1,231 @@
using System;
using System.Linq;
using System.Net;
using System.Net.NetworkInformation;
using System.Net.Sockets;
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
/// 网络接口信息
/// </summary>
public class NetworkInfo
{
private NetworkInterface _instance;
private NetworkInfo(NetworkInterface network)
{
_instance = network;
}
/// <summary>
/// 当前实例使用的网络接口
/// </summary>
public NetworkInterface NetworkInterface => _instance;
#region
/// <summary>
/// 获取网络适配器的标识符
/// </summary>
/// <remarks>ex{92D3E528-5363-43C7-82E8-D143DC6617ED}</remarks>
public string Id => _instance.Id;
/// <summary>
/// 网络的 Mac 地址
/// </summary>
/// <remarks>ex 1C997AF108E3</remarks>
public string Mac => _instance.GetPhysicalAddress().ToString();
/// <summary>
/// 网卡名称
/// </summary>
/// <remarks>ex以太网WLAN</remarks>
public string Name => _instance.Name;
/// <summary>
/// 描述网络接口的用户可读文本,
/// 在 Windows 上,它通常描述接口供应商、类型 (例如,以太网) 、品牌和型号;
/// </summary>
/// <remarks>exRealtek PCIe GbE Family Controller、 Realtek 8822CE Wireless LAN 802.11ac PCI-E NIC</remarks>
public string Trademark => _instance.Description;
/// <summary>
/// 获取网络连接的当前操作状态<br />
/// </summary>
public OperationalStatus Status => _instance.OperationalStatus;
/// <summary>
/// 获取网卡接口类型<br />
/// </summary>
public NetworkInterfaceType NetworkType => _instance.NetworkInterfaceType;
/// <summary>
/// 网卡链接速度,每字节/秒为单位
/// </summary>
/// <remarks>如果是-1则说明无法获取此网卡的链接速度例如 270_000_000 表示是 270MB 的链接速度</remarks>
public long Speed => _instance.Speed;
/// <summary>
/// 是否支持 Ipv4
/// </summary>
public bool IsSupportIpv4 => _instance.Supports(NetworkInterfaceComponent.IPv4);
/// <summary>
/// 获取分配给此接口的任意广播 IP 地址。只支持 Windows
/// </summary>
/// <remarks>一般情况下为空数组</remarks>
public IPAddress[] AnycastAddresses
{
get
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return _instance.GetIPProperties().AnycastAddresses.Select(x => x.Address).ToArray();
}
else
{
return Array.Empty<IPAddress>();
}
}
}
/// <summary>
/// 获取分配给此接口的多播地址ipv4、ipv6
/// </summary>
/// <remarks>exff01::1%9 ff02::1%9<br />
/// ff02::fb%9<br />
/// ff02::1:3%9<br />
/// ff02::1:ff61:9ae7%9<br />
/// 224.0.0.1</remarks>
public IPAddress[] MulticastAddresses => _instance.GetIPProperties().MulticastAddresses.Select(x => x.Address).ToArray();
/// <summary>
/// 获取分配给此接口的单播地址ipv4、ipv6
/// </summary>
/// <remarks>ex192.168.3.38</remarks>
public IPAddress[] UnicastAddresses => _instance.GetIPProperties().UnicastAddresses.Select(x => x.Address).ToArray();
/// <summary>
/// 获取此接口的 IPv4 网关地址ipv4、ipv6
/// </summary>
/// <remarks>exfe80::1677:40ff:fef9:bf95%5、192.168.3.1</remarks>
public IPAddress[] GatewayAddresses => _instance.GetIPProperties().GatewayAddresses.Select(x => x.Address).ToArray();
/// <summary>
/// 获取此接口的域名系统 (DNS) 服务器的地址ipv4、ipv6
/// </summary>
/// <remarks>exfe80::1677:40ff:fef9:bf95%5、192.168.3.1</remarks>
public IPAddress[] DnsAddresses => _instance.GetIPProperties().DnsAddresses.ToArray();
/// <summary>
/// 是否支持 Ipv6
/// </summary>
public bool IsSupportIpv6 => _instance.Supports(NetworkInterfaceComponent.IPv6);
#endregion
/// <summary>
/// 当前主机是否能够与其他计算机通讯(公网或内网),如果任何网络接口标记为 "up" 且不是环回或隧道接口,则认为网络连接可用。
/// </summary>
public static bool GetIsNetworkAvailable => NetworkInterface.GetIsNetworkAvailable();
/// <summary>
/// 计算 IPV4 的网络流量
/// </summary>
/// <returns></returns>
/// <exception cref="NotSupportedException">当前网卡不支持 IPV4</exception>
public Rate GetIpv4Speed()
{
// 当前网卡不支持 IPV4
if (!IsSupportIpv4) return default;
var ipv4Statistics = _instance.GetIPv4Statistics();
var speed = new Rate(DateTime.Now, ipv4Statistics.BytesReceived, ipv4Statistics.BytesSent);
return speed;
}
/// <summary>
/// 计算 IPV4 、IPV6 的网络流量
/// </summary>
/// <returns></returns>
public Rate IpvSpeed()
{
var ipvStatistics = _instance.GetIPStatistics();
var speed = new Rate(DateTime.Now, ipvStatistics.BytesReceived, ipvStatistics.BytesSent);
return speed;
}
/// <summary>
/// 获取所有 IP 地址
/// </summary>
/// <returns></returns>
public static IPAddress[] GetIPAddresses()
{
var hostName = Dns.GetHostName();
return Dns.GetHostAddresses(hostName);
}
/// <summary>
/// 获取当前真实 IP
/// </summary>
/// <returns></returns>
public static IPAddress TryGetRealIpv4()
{
var addrs = GetIPAddresses();
var ipv4 = addrs.FirstOrDefault(x => x.AddressFamily == AddressFamily.InterNetwork);
return ipv4;
}
/// <summary>
/// 获取真实网卡
/// </summary>
/// <returns></returns>
public static NetworkInfo TryGetRealNetworkInfo()
{
var realIp = TryGetRealIpv4();
if (realIp == null)
{
return default;
}
var infos = NetworkInfo.GetNetworkInfos().ToArray();
var info = infos.FirstOrDefault(x => x.UnicastAddresses.Any(i => i.MapToIPv4().ToString() == realIp.MapToIPv4().ToString()));
if (info == null)
{
return default;
}
return info;
}
/// <summary>
/// 获取此主机中所有网卡接口
/// </summary>
/// <returns></returns>
public static NetworkInfo[] GetNetworkInfos()
{
return NetworkInterface.GetAllNetworkInterfaces().Select(x => new NetworkInfo(x)).ToArray();
}
/// <summary>
/// 计算网络流量速率
/// </summary>
/// <param name="oldRate"></param>
/// <param name="newRate"></param>
/// <returns></returns>
public static (SizeInfo Received, SizeInfo Sent) GetSpeed(Rate oldRate, Rate newRate)
{
var receive = newRate.ReceivedLength - oldRate.ReceivedLength;
var send = newRate.SendLength - oldRate.SendLength;
var interval = Math.Round((newRate.StartTime - oldRate.StartTime).TotalSeconds, 2);
long rSpeed = (long)(receive / interval);
long sSpeed = (long)(send / interval);
return (SizeInfo.Get(rSpeed), SizeInfo.Get(sSpeed));
}
}
}

View File

@@ -0,0 +1,34 @@
using System;
namespace CZGL.SystemInfo
{
/// <summary>
///
/// </summary>
public struct Rate
{
public Rate(DateTime startTime, long receivedLength, long sendLength)
{
StartTime = startTime;
ReceivedLength = receivedLength;
SendLength = sendLength;
}
/// <summary>
/// 记录时间
/// </summary>
public DateTime StartTime { get; private set; }
/// <summary>
/// 此网卡总接收网络流量字节数
/// </summary>
public long ReceivedLength { get; private set; }
/// <summary>
/// 此网卡总发送网络流量字节数
/// </summary>
public long SendLength { get; private set; }
}
}

View File

@@ -0,0 +1,61 @@
using System;
namespace CZGL.SystemInfo
{
/// <summary>
/// 大小信息
/// </summary>
public struct SizeInfo
{
/// <summary>
/// Byte 长度
/// </summary>
public long ByteLength { get; private set; }
/// <summary>
/// 大小
/// </summary>
public decimal Size { get; set; }
/// <summary>
/// 单位
/// </summary>
public UnitType SizeType { get; set; }
/// <summary>
/// 将字节单位转换为合适的单位
/// </summary>
/// <param name="byteLength">字节长度</param>
/// <returns></returns>
public static SizeInfo Get(long byteLength)
{
UnitType unit = 0;
decimal number = byteLength;
if (byteLength < 1000)
{
return new SizeInfo()
{
ByteLength = byteLength,
Size = byteLength,
SizeType = UnitType.B
};
}
// 避免出现 1023B 这种情况;这样 1023B 会显示 0.99KB
while (Math.Round(number / 1000) >= 1)
{
number = number / 1024;
unit++;
}
return new SizeInfo
{
Size = Math.Round(number, 2),
SizeType = unit,
ByteLength = byteLength
};
throw new Exception();
}
}
}

View File

@@ -0,0 +1,133 @@
using System;
using System.Runtime.InteropServices;
namespace CZGL.SystemInfo
{
/// <summary>
/// 提供有关 .NET 运行时安装的信息、程序系统信息等。
/// </summary>
public static class SystemPlatformInfo
{
/// <summary>
/// .NET Fx/Core Runtime version
/// <para>ex: .NET Core 3.1.9</para>
/// </summary>
public static string FrameworkDescription => RuntimeInformation.FrameworkDescription;
/// <summary>
/// .NET Fx/Core version
/// <para>
/// ex:<br />
/// 3.1.9
/// </para>
/// </summary>
public static string FrameworkVersion => Environment.Version.ToString();
/// <summary>
/// 操作系统平台架构,可点击 <see cref="Architecture" /> 获取详细的信息
/// <para>
/// ex:<br />
/// X86<br />
/// X64<br />
/// Arm<br />
/// Arm64
/// </para>
/// </summary>
public static string OSArchitecture => RuntimeInformation.OSArchitecture.ToString();
/// <summary>
/// 获取操作系统的类型 <see cref="PlatformID"/>
/// <para>
/// ex:<br />
/// Win32S、Win32Windows、Win32NT、WinCE、Unix、Xbox、MacOSX
/// </para>
/// </summary>
public static string OSPlatformID => Environment.OSVersion.Platform.ToString();
/// <summary>
/// 操作系统内核版本
/// <para>
/// ex:<br />
/// Microsoft Windows NT 6.2.9200.0<br />
/// Unix 4.4.0.19041
/// </para>
/// </summary>
public static string OSVersion => Environment.OSVersion.ToString();
/// <summary>
/// 操作系统的版本描述
/// <para>
/// ex: <br />
/// Microsoft Windows 10.0.19041
/// <br />
/// Linux 4.4.0-19041-Microsoft #488-Microsoft Mon Sep 01 13:43:00 PST 2020
/// </para>
/// </summary>
public static string OSDescription => RuntimeInformation.OSDescription;
/// <summary>
/// 本进程的架构,可点击 <see cref="Architecture" /> 获取详细的信息
/// <para>
/// ex:<br />
/// X86<br />
/// X64<br />
/// Arm<br />
/// Arm64
/// </para>
/// </summary>
public static string ProcessArchitecture => RuntimeInformation.ProcessArchitecture.ToString();
/// <summary>
/// 当前计算机上的处理器数
/// </summary>
/// <remarks>如 4核心8线程的 CPU这里会获取到 8</remarks>
public static int ProcessorCount => Environment.ProcessorCount;
/// <summary>
/// 计算机名称
/// </summary>
public static string MachineName => Environment.MachineName;
/// <summary>
/// 当前登录到此系统的用户名称
/// </summary>
public static string UserName => Environment.UserName;
/// <summary>
/// 用户网络域名称,即 hostname
/// </summary>
public static string UserDomainName => Environment.UserDomainName;
/// <summary>
/// 是否在交互模式中运行
/// </summary>
public static bool IsUserInteractive => Environment.UserInteractive;
/// <summary>
/// 系统的磁盘和分区列表
/// <para>
/// ex:<br />
/// Windows: D:\, E:\, F:\, G:\, H:\, J:\, X:\<br />
/// Linux: /, /dev, /sys, /proc, /dev/pts, /run, /run/lock, /run/shm ...
/// </para>
/// </summary>
public static string[] GetLogicalDrives => Environment.GetLogicalDrives();
/// <summary>
/// 系统根目录完全路径。<b>Linux 没有系统根目录</b>
/// <para>
/// ex:<br />
/// Windows: X:\WINDOWS\system32<br></br>
/// Linux : null
/// </para>
/// </summary>
public static string SystemDirectory => Environment.SystemDirectory;
/// <summary>
/// 操作系统内存页一页的字节数
/// </summary>
public static int MemoryPageSize => Environment.SystemPageSize;
}
}

View File

@@ -0,0 +1,38 @@
namespace CZGL.SystemInfo
{
/// <summary>
/// 单位
/// </summary>
public enum UnitType : int
{
/// <summary>
/// Byte
/// </summary>
///
B = 0,
/// <summary>
/// KB
/// </summary>
KB,
/// <summary>
/// MB
/// </summary>
MB,
/// <summary>
/// GB
/// </summary>
GB,
/// <summary>
/// TB
/// </summary>
TB,
/// <summary>
/// PB
/// </summary>
PB
}
}

View File

@@ -0,0 +1,394 @@
using System;
using System.Text;
using System.Text.RegularExpressions;
namespace MES.Utility.Core
{
public static class Checker
{
#region IP
/// <summary>
/// 验证IP
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsIP(this string source)
{
return Regex.IsMatch(source, @"^(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])$", RegexOptions.IgnoreCase);
}
public static bool HasIP(this string source)
{
return Regex.IsMatch(source, @"(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9])\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[1-9]|0)\.(25[0-5]|2[0-4][0-9]|[0-1]{1}[0-9]{2}|[1-9]{1}[0-9]{1}|[0-9])", RegexOptions.IgnoreCase);
}
#endregion
#region EMail是否合法
/// <summary>
/// 验证EMail是否合法
/// </summary>
/// <param name="email">要验证的Email</param>
public static bool IsEmail(this string source)
{
return Regex.IsMatch(source, @"^[A-Za-z0-9](([_\.\-]?[a-zA-Z0-9]+)*)@([A-Za-z0-9]+)(([\.\-]?[a-zA-Z0-9]+)*)\.([A-Za-z]{2,})$", RegexOptions.IgnoreCase);
}
public static bool HasEmail(this string source)
{
return Regex.IsMatch(source, @"[A-Za-z0-9](([_\.\-]?[a-zA-Z0-9]+)*)@([A-Za-z0-9]+)(([\.\-]?[a-zA-Z0-9]+)*)\.([A-Za-z]{2,})", RegexOptions.IgnoreCase);
}
#endregion
#region
/// <summary>
/// 验证网址
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsUrl(this string source)
{
return Regex.IsMatch(source, @"^(((file|gopher|news|nntp|telnet|http|ftp|https|ftps|sftp)://)|(www\.))+(([a-zA-Z0-9\._-]+\.[a-zA-Z]{2,6})|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(/[a-zA-Z0-9\&amp;%_\./-~-]*)?$", RegexOptions.IgnoreCase);
}
public static bool HasUrl(this string source)
{
return Regex.IsMatch(source, @"(((file|gopher|news|nntp|telnet|http|ftp|https|ftps|sftp)://)|(www\.))+(([a-zA-Z0-9\._-]+\.[a-zA-Z]{2,6})|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(/[a-zA-Z0-9\&amp;%_\./-~-]*)?", RegexOptions.IgnoreCase);
}
#endregion
#region
/// <summary>
/// 验证日期
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsDateTime(this string source)
{
try
{
DateTime time = Convert.ToDateTime(source);
return true;
}
catch
{
return false;
}
}
#endregion
#region
/// <summary>
/// 验证手机号
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsMobile(this string source)
{
return Regex.IsMatch(source, @"^1[35678]\d{9}$", RegexOptions.IgnoreCase);
}
public static bool HasMobile(this string source)
{
return Regex.IsMatch(source, @"1[35678]\d{9}", RegexOptions.IgnoreCase);
}
#endregion
#region
/// <summary>
/// 验证身份证是否有效
/// </summary>
/// <param name="Id"></param>
/// <returns></returns>
public static bool IsIDCard(this string Id)
{
if (Id.Length == 18)
{
bool check = IsIDCard18(Id);
return check;
}
else if (Id.Length == 15)
{
bool check = IsIDCard15(Id);
return check;
}
else
{
return false;
}
}
public static bool IsIDCard18(this string Id)
{
long n = 0;
if (long.TryParse(Id.Remove(17), out n) == false || n < Math.Pow(10, 16) || long.TryParse(Id.Replace('x', '0').Replace('X', '0'), out n) == false)
{
return false;//数字验证
}
string address = "11x22x35x44x53x12x23x36x45x54x13x31x37x46x61x14x32x41x50x62x15x33x42x51x63x21x34x43x52x64x65x71x81x82x91";
if (address.IndexOf(Id.Remove(2)) == -1)
{
return false;//省份验证
}
string birth = Id.Substring(6, 8).Insert(6, "-").Insert(4, "-");
DateTime time = new DateTime();
if (DateTime.TryParse(birth, out time) == false)
{
return false;//生日验证
}
string[] arrVarifyCode = ("1,0,x,9,8,7,6,5,4,3,2").Split(',');
string[] Wi = ("7,9,10,5,8,4,2,1,6,3,7,9,10,5,8,4,2").Split(',');
char[] Ai = Id.Remove(17).ToCharArray();
int sum = 0;
for (int i = 0; i < 17; i++)
{
sum += int.Parse(Wi[i]) * int.Parse(Ai[i].ToString());
}
int y = -1;
Math.DivRem(sum, 11, out y);
if (arrVarifyCode[y] != Id.Substring(17, 1).ToLower())
{
return false;//校验码验证
}
return true;//符合GB11643-1999标准
}
public static bool IsIDCard15(this string Id)
{
long n = 0;
if (long.TryParse(Id, out n) == false || n < Math.Pow(10, 14))
{
return false;//数字验证
}
string address = "11x22x35x44x53x12x23x36x45x54x13x31x37x46x61x14x32x41x50x62x15x33x42x51x63x21x34x43x52x64x65x71x81x82x91";
if (address.IndexOf(Id.Remove(2)) == -1)
{
return false;//省份验证
}
string birth = Id.Substring(6, 6).Insert(4, "-").Insert(2, "-");
DateTime time = new DateTime();
if (DateTime.TryParse(birth, out time) == false)
{
return false;//生日验证
}
return true;//符合15位身份证标准
}
#endregion
#region Int型的
/// <summary>
/// 是不是Int型的
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsInt(this string source)
{
Regex regex = new Regex(@"^(-){0,1}\d+$");
if (regex.Match(source).Success)
{
if ((long.Parse(source) > 0x7fffffffL) || (long.Parse(source) < -2147483648L))
{
return false;
}
return true;
}
return false;
}
#endregion
#region
/// <summary>
/// 看字符串的长度是不是在限定数之间 一个中文为两个字符
/// </summary>
/// <param name="source">字符串</param>
/// <param name="begin">大于等于</param>
/// <param name="end">小于等于</param>
/// <returns></returns>
public static bool IsLengthStr(this string source, int begin, int end)
{
int length = Regex.Replace(source, @"[^\x00-\xff]", "OK").Length;
if ((length <= begin) && (length >= end))
{
return false;
}
return true;
}
#endregion
#region 010-85849685
/// <summary>
/// 是不是中国电话格式010-85849685
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsTel(this string source)
{
return Regex.IsMatch(source, @"^\d{3,4}-?\d{6,8}$", RegexOptions.IgnoreCase);
}
#endregion
#region 6
/// <summary>
/// 邮政编码 6个数字
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsPostCode(this string source)
{
return Regex.IsMatch(source, @"^\d{6}$", RegexOptions.IgnoreCase);
}
#endregion
#region
/// <summary>
/// 中文
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsChinese(this string source)
{
return Regex.IsMatch(source, @"^[\u4e00-\u9fa5]+$", RegexOptions.IgnoreCase);
}
public static bool hasChinese(this string source)
{
return Regex.IsMatch(source, @"[\u4e00-\u9fa5]+", RegexOptions.IgnoreCase);
}
#endregion
#region 线
/// <summary>
/// 验证是不是正常字符 字母,数字,下划线的组合
/// </summary>
/// <param name="source"></param>
/// <returns></returns>
public static bool IsNormalChar(this string source)
{
return Regex.IsMatch(source, @"[\w\d_]+", RegexOptions.IgnoreCase);
}
#endregion
#region _.5
/// <summary>
/// 验证用户名必须以字母开头可以包含字母、数字、“_”、“.”至少5个字符
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
public static bool checkUserId(this string str)
{
Regex regex = new Regex("[a-zA-Z]{1}([a-zA-Z0-9]|[._]){4,19}");
if (regex.Match(str).Success)
if (regex.Matches(str)[0].Value.Length == str.Length)
return true;
return false;
}
#endregion
/// <summary>
/// 是否是Base64字符串
/// </summary>
/// <param name="eStr"></param>
/// <returns></returns>
public static bool IsBase64(string eStr)
{
if ((eStr.Length % 4) != 0)
{
return false;
}
if (!Regex.IsMatch(eStr, "^[A-Z0-9/+=]*$", RegexOptions.IgnoreCase))
{
return false;
}
return true;
}
#region
public static bool IsValidDecimal(this string strIn)
{
return Regex.IsMatch(strIn, @"[0].d{1,2}|[1]");
}
#endregion
#region
public static bool IsValidDate(this string strIn)
{
return Regex.IsMatch(strIn, @"^2d{3}-(?:0?[1-9]|1[0-2])-(?:0?[1-9]|[1-2]d|3[0-1])(?:0?[1-9]|1d|2[0-3]):(?:0?[1-9]|[1-5]d):(?:0?[1-9]|[1-5]d)$");
}
#endregion
#region
//检察是否正确的日期格式
public static bool IsDate(this string str)
{
//考虑到了4年一度的366天还有特殊的2月的日期
Regex reg = new Regex(@"^((((1[6-9]|[2-9]\d)\d{2})-(0?[13578]|1[02])-(0?[1-9]|[12]\d|3[01]))|(((1[6-9]|[2-9]\d)\d{2})-(0?[13456789]|1[012])-(0?[1-9]|[12]\d|30))|(((1[6-9]|[2-9]\d)\d{2})-0?2-(0?[1-9]|1\d|2[0-8]))|(((1[6-9]|[2-9]\d)(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00))-0?2-29-)) (20|21|22|23|[0-1]?\d):[0-5]?\d:[0-5]?\d$");
return reg.IsMatch(str);
}
#endregion
#region
public static bool IsValidPostfix(this string strIn)
{
return Regex.IsMatch(strIn, @".(?i:gif|jpg)$");
}
#endregion
#region 412
public static bool IsValidByte(this string strIn)
{
return Regex.IsMatch(strIn, @"^[a-z]{4,12}$");
}
#endregion
#region
/// <summary>
/// 判断字符串是否为数字
/// </summary>
/// <param name="str">待验证的字符窜</param>
/// <returns>bool</returns>
public static bool IsNumber(this string str)
{
bool result = true;
foreach (char ar in str)
{
if (!char.IsNumber(ar))
{
result = false;
break;
}
}
return result;
}
#endregion
#region
/// <summary>
/// 是否为数字型
/// </summary>
/// <param name="strNumber"></param>
/// <returns></returns>
public static bool IsDecimal(this string strNumber)
{
return new System.Text.RegularExpressions.Regex(@"^([0-9])[0-9]*(\.\w*)?$").IsMatch(strNumber);
}
#endregion
#region /
/// <summary>
/// 验证是否包含汉语
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
public static bool IsHanyu(this string str)
{
Regex regex = new Regex("[\u4e00-\u9fa5]");
if (regex.Match(str).Success)
return true;
else
return false;
}
/// <summary>
/// 验证是否全部汉语
/// </summary>
/// <param name="str"></param>
/// <returns></returns>
public static bool IsHanyuAll(this string str)
{
Regex regex = new Regex("[\u4e00-\u9fa5]");
//匹配的内容长度和被验证的内容长度相同时,验证通过
if (regex.Match(str).Success)
if (regex.Matches(str).Count == str.Length)
return true;
//其它,未通过
return false;
}
#endregion
}
}

View File

@@ -0,0 +1,464 @@
using System;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace MES.Utility.Core
{
/// <summary>
/// ChineseDateTime
/// 一日有十二时辰,一时辰有四刻,一刻有三盏茶,一盏茶有两柱香
/// 一柱香有五分,一分有六弹指,一弹指有十刹那,一刹那为一念
/// </summary>
public class ChineseDateTime
{
#region ====== ======
private readonly ChineseLunisolarCalendar _chineseDateTime;
private readonly DateTime _dateTime;
private readonly int _serialMonth;
private static readonly string[] _chineseNumber = { "", "一", "二", "三", "四", "五", "六", "七", "八", "九" };
private static readonly string[] _chineseMonth =
{
"正", "二", "三", "四", "五", "六", "七", "八", "九", "十", "冬", "腊"
};
private static readonly string[] _chineseDay =
{
"初一", "初二", "初三", "初四", "初五", "初六", "初七", "初八", "初九", "初十",
"十一", "十二", "十三", "十四", "十五", "十六", "十七", "十八", "十九", "二十",
"廿一", "廿二", "廿三", "廿四", "廿五", "廿六", "廿七", "廿八", "廿九", "三十"
};
private static readonly string[] _chineseWeek =
{
"星期日", "星期一", "星期二", "星期三", "星期四", "星期五", "星期六"
};
private static readonly string[] _celestialStem = { "甲", "乙", "丙", "丁", "戊", "己", "庚", "辛", "壬", "癸" };
private static readonly string[] _terrestrialBranch = { "子", "丑", "寅", "卯", "辰", "巳", "午", "未", "申", "酉", "戌", "亥" };
private static readonly string[] _chineseZodiac = { "鼠", "牛", "虎", "免", "龙", "蛇", "马", "羊", "猴", "鸡", "狗", "猪" };
private static readonly string[] _solarTerm =
{
"小寒", "大寒", "立春", "雨水", "惊蛰", "春分",
"清明", "谷雨", "立夏", "小满", "芒种", "夏至",
"小暑", "大暑", "立秋", "处暑", "白露", "秋分",
"寒露", "霜降", "立冬", "小雪", "大雪", "冬至"
};
private static readonly int[] _solarTermInfo = {
0, 21208, 42467, 63836, 85337, 107014, 128867, 150921, 173149, 195551, 218072, 240693, 263343, 285989,
308563, 331033, 353350, 375494, 397447, 419210, 440795, 462224, 483532, 504758
};
#endregion
#region ======= ======
public ChineseDateTime(DateTime dateTime)
{
_chineseDateTime = new ChineseLunisolarCalendar();
if (dateTime < _chineseDateTime.MinSupportedDateTime || dateTime > _chineseDateTime.MaxSupportedDateTime)
{
throw new ArgumentOutOfRangeException(
$"参数日期不在有效的范围内:只支持{_chineseDateTime.MinSupportedDateTime.ToShortTimeString()}到{_chineseDateTime.MaxSupportedDateTime}");
}
Year = _chineseDateTime.GetYear(dateTime);
Month = _chineseDateTime.GetMonth(dateTime);
Day = _chineseDateTime.GetDayOfMonth(dateTime);
IsLeep = _chineseDateTime.IsLeapMonth(Year, Month);
_dateTime = dateTime;
_serialMonth = Month;
var leepMonth = _chineseDateTime.GetLeapMonth(Year);
if (leepMonth > 0 && leepMonth <= Month) Month--;
}
/// <summary>
/// 参数为农历的年月日及是否润月
/// </summary>
/// <param name="year"></param>
/// <param name="month"></param>
/// <param name="day"></param>
/// <param name="isLeap"></param>
public ChineseDateTime(int year, int month, int day, bool isLeap = false)
: this(year, month, day, 0, 0, 0, isLeap)
{
}
public ChineseDateTime(int year, int month, int day, int hour, int minute, int second, bool isLeap = false)
: this(year, month, day, hour, minute, second, 0, isLeap)
{
}
public ChineseDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, bool isLeap = false)
{
_chineseDateTime = new ChineseLunisolarCalendar();
if (year < _chineseDateTime.MinSupportedDateTime.Year || year >= _chineseDateTime.MaxSupportedDateTime.Year)
{
throw new ArgumentOutOfRangeException(
$"参数年份不在有效的范围内,只支持{_chineseDateTime.MinSupportedDateTime.Year}到{_chineseDateTime.MaxSupportedDateTime.Year - 1}");
}
if (month < 1 || month > 12) throw new ArgumentOutOfRangeException($"月份只支持1-12");
IsLeep = isLeap;
var leepMonth = _chineseDateTime.GetLeapMonth(year);
if (leepMonth - 1 != month)
IsLeep = false;
_serialMonth = month;
if (leepMonth > 0 && (month == leepMonth - 1 && isLeap || month > leepMonth - 1))
_serialMonth = month + 1;
if (_chineseDateTime.GetDaysInMonth(year, _serialMonth) < day || day < 1)
throw new ArgumentOutOfRangeException($"指定的月份天数,不在有效的范围内");
Year = year;
Month = month;
Day = day;
_dateTime = _chineseDateTime.ToDateTime(Year, _serialMonth, Day, hour, minute, second, millisecond);
}
public static ChineseDateTime Now => new ChineseDateTime(DateTime.Now);
#endregion
#region ====== ======
public int Year { get; }
public int Month { get; }
public int Day { get; }
/// <summary>
/// 是否为润月
/// </summary>
public bool IsLeep { get; }
#endregion
#region ====== ======
/// <summary>
/// 转换为公历
/// </summary>
/// <returns></returns>
public DateTime ToDateTime()
{
return _chineseDateTime.ToDateTime(Year, _serialMonth, Day, _dateTime.Hour,
_dateTime.Minute,
_dateTime.Second, _dateTime.Millisecond);
}
/// <summary>
/// 短日期(农历)
/// </summary>
/// <returns></returns>
public string ToShortDateString()
{
return $"{Year}-{GetLeap(false)}{Month}-{Day}";
}
/// <summary>
/// 长日期(农历)
/// </summary>
/// <returns></returns>
public string ToLongDateString()
{
return $"{Year}年{GetLeap()}{Month}月-{Day}日";
}
public new string ToString()
{
return $"{Year}-{GetLeap(false)}{Month}-{Day} {_dateTime.Hour}:{_dateTime.Minute}:{_dateTime.Second}";
}
#endregion
#region ====== ======
public string ToChineseString()
{
return ToChineseString("yMd");
}
public string GetChineseDate()
{
var date = new StringBuilder();
date.Append(GetMonth() + "月");
date.Append(GetDay() + "");
date.AppendLine();
date.Append(GetEraYear() + ChineseZodiac + "年");
date.AppendLine();
return date.ToString();
}
public string ToChineseString(string format)
{
var year = GetYear();
var month = GetMonth();
var day = GetDay();
var date = new StringBuilder();
foreach (var item in format.ToCharArray())
{
switch (item)
{
case 'y':
date.Append($"{year}年");
break;
case 'M':
date.Append($"{month}月");
break;
case 'd':
date.Append($"{day}");
break;
default:
date.Append(item);
break;
}
}
var def = $"{year}年{month}月{day}";
var result = date.ToString();
return string.IsNullOrEmpty(result) ? def : result;
}
public string ChineseWeek => _chineseWeek[(int)_dateTime.DayOfWeek];
#endregion
#region ====== ======
public string ToChineseEraString()
{
return ToChineseEraString("yMdHm");
}
public string ToChineseEraString(string format)
{
var year = GetEraYear();
var month = GetEraMonth();
var day = GetEraDay();
var hour = GetEraHour();
var minute = GetEraMinute();
var date = new StringBuilder();
foreach (var item in format.ToCharArray())
{
switch (item)
{
case 'y':
date.Append($"{year}年");
break;
case 'M':
date.Append($"{month}月");
break;
case 'd':
date.Append($"{day}日");
break;
case 'H':
date.Append($"{hour}时");
break;
case 'm':
date.Append($"{minute}刻");
break;
default:
date.Append(item);
break;
}
}
var def = $"{year}年{month}月{day}日{hour}时";
var result = date.ToString();
return result.IsNullOrEmpty() ? def : result;
}
public string ChineseZodiac => _chineseZodiac[(Year - 4) % 12];
#endregion
#region ====== (Chinese) ======
private string GetYear()
{
var yearArray = Array.ConvertAll(Year.ToString().ToCharArray(), x => int.Parse(x.ToString()));
var year = new StringBuilder();
foreach (var item in yearArray)
year.Append(_chineseNumber[item]);
return year.ToString();
}
private string GetMonth()
{
return $"{GetLeap()}{_chineseMonth[Month - 1]}";
}
private string GetDay()
{
return _chineseDay[Day - 1];
}
private string GetLeap(bool isChinese = true)
{
return IsLeep ? isChinese ? "润" : "L" : "";
}
#endregion
#region ====== ======
//年采用的头尾法,月采用的是节令法,主流日历基本上都这种结合,如百度的日历
private string GetEraYear()
{
var sexagenaryYear = _chineseDateTime.GetSexagenaryYear(_dateTime);
var stemIndex = _chineseDateTime.GetCelestialStem(sexagenaryYear) - 1;
var branchIndex = _chineseDateTime.GetTerrestrialBranch(sexagenaryYear) - 1;
return $"{_celestialStem[stemIndex]}{_terrestrialBranch[branchIndex]}";
}
private string GetEraMonth()
{
#region ====== ======
var solarIndex = SolarTermFunc((x, y) => x <= y, out var dt);
solarIndex = solarIndex == -1 ? 23 : solarIndex;
var currentIndex = (int)Math.Floor(solarIndex / (decimal)2);
//天干
var solarMonth = currentIndex == 0 ? 11 : currentIndex - 1; //计算天干序(月份)
var sexagenaryYear = _chineseDateTime.GetSexagenaryYear(_dateTime);
var stemYear = _chineseDateTime.GetCelestialStem(sexagenaryYear) - 1;
if (solarMonth == 0) //立春时,春节前后的不同处理
{
var year = _chineseDateTime.GetYear(dt);
var month = _chineseDateTime.GetMonth(dt);
stemYear = year == Year && month != 1 ? stemYear + 1 : stemYear;
}
if (solarMonth == 11) //立春在春节后,对前一节气春节前后不同处理
{
var year = _chineseDateTime.GetYear(dt);
stemYear = year != Year ? stemYear - 1 : stemYear;
}
int stemIndex;
switch (stemYear)
{
case 0:
case 5:
stemIndex = 3;
break;
case 1:
case 6:
stemIndex = 5;
break;
case 2:
case 7:
stemIndex = 7;
break;
case 3:
case 8:
stemIndex = 9;
break;
default:
stemIndex = 1;
break;
}
//天干序
stemIndex = (stemIndex - 1 + solarMonth) % 10;
//地支序
var branchIndex = currentIndex >= 11 ? currentIndex - 11 : currentIndex + 1;
return $"{_celestialStem[stemIndex]}{_terrestrialBranch[branchIndex]}";
#endregion
#region ====== ======
//这里算法要容易些,原理和节令法一样,只需取农历整年整月即可。未贴上来
#endregion
}
private string GetEraDay()
{
var ts = _dateTime - new DateTime(1901, 2, 15);
var offset = ts.Days;
var sexagenaryDay = offset % 60;
return $"{_celestialStem[sexagenaryDay % 10]}{_terrestrialBranch[sexagenaryDay % 12]}";
}
private string GetEraHour()
{
var hourIndex = (int)Math.Floor((_dateTime.Hour + 1) / (decimal)2);
hourIndex = hourIndex == 12 ? 0 : hourIndex;
return _terrestrialBranch[hourIndex];
}
private string GetEraMinute()
{
var realMinute = (_dateTime.Hour % 2 == 0 ? 60 : 0) + _dateTime.Minute;
return $"{_chineseNumber[(int)Math.Floor(realMinute / (decimal)30) + 1]}";
}
#endregion
#region ====== 24 ======
/// <summary>
/// 当前节气,没有则返回空
/// </summary>
public string SolarTerm
{
get
{
var i = SolarTermFunc((x, y) => x == y, out var dt);
return i == -1 ? "" : _solarTerm[i];
}
}
/// <summary>
/// 上一个节气
/// </summary>
public string SolarTermPrev
{
get
{
var i = SolarTermFunc((x, y) => x < y, out var dt);
return i == -1 ? "" : _solarTerm[i];
}
}
/// <summary>
/// 下一个节气
/// </summary>
public string SolarTermNext
{
get
{
var i = SolarTermFunc((x, y) => x > y, out var dt);
return i == -1 ? "" : $"{_solarTerm[i]}";
}
}
/// <summary>
/// 节气计算(当前年),返回指定条件的节气序及日期(公历)
/// </summary>
/// <param name="func"></param>
/// <param name="dateTime"></param>
/// <returns>-1时即没找到</returns>
private int SolarTermFunc(Expression<Func<int, int, bool>> func, out DateTime dateTime)
{
var baseDateAndTime = new DateTime(1900, 1, 6, 2, 5, 0); //#1/6/1900 2:05:00 AM#
var year = _dateTime.Year;
int[] solar = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24 };
var expressionType = func.Body.NodeType;
if (expressionType != ExpressionType.LessThan && expressionType != ExpressionType.LessThanOrEqual &&
expressionType != ExpressionType.GreaterThan && expressionType != ExpressionType.GreaterThanOrEqual &&
expressionType != ExpressionType.Equal)
{
throw new NotSupportedException("不受支持的操作符");
}
if (expressionType == ExpressionType.LessThan || expressionType == ExpressionType.LessThanOrEqual)
{
solar = solar.OrderByDescending(x => x).ToArray();
}
foreach (var item in solar)
{
var num = 525948.76 * (year - 1900) + _solarTermInfo[item - 1];
var newDate = baseDateAndTime.AddMinutes(num); //按分钟计算
if (func.Compile()(newDate.DayOfYear, _dateTime.DayOfYear))
{
dateTime = newDate;
return item - 1;
}
}
dateTime = _chineseDateTime.MinSupportedDateTime;
return -1;
}
#endregion
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,55 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Reflection;
using System.Text;
namespace MES.Utility.Core
{
/// <summary>
/// 枚举类型操作公共类。
/// </summary>
public static class EnumHelper
{
/// <summary>
/// 获取枚举所有成员名称。
/// </summary>
/// <typeparam name="T">枚举类型</typeparam>
public static string[] GetNames<T>()
{
return Enum.GetNames(typeof(T));
}
/// <summary>
/// 检测枚举是否包含指定成员。
/// </summary>
/// <typeparam name="T">枚举类型</typeparam>
/// <param name="member">成员名或成员值</param>
public static bool IsDefined(this Enum value)
{
Type type = value.GetType();
return Enum.IsDefined(type, value);
}
/// <summary>
/// 返回指定枚举类型的指定值的描述。
/// </summary>
/// <param name="t">枚举类型</param>
/// <param name="v">枚举值</param>
/// <returns></returns>
public static string GetDescription(this Enum value)
{
try
{
Type type = value.GetType();
FieldInfo field = type.GetField(value.ToString());
DescriptionAttribute[] attributes = (DescriptionAttribute[])field.GetCustomAttributes(typeof(DescriptionAttribute), false);
return (attributes.Length > 0) ? attributes[0].Description : string.Empty;
}
catch
{
return string.Empty;
}
}
}
}

View File

@@ -0,0 +1,257 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace MES.Utility.Core
{
public static class ExtDateTime
{
/// <summary>
/// 获取格式化字符串,不带时分秒。格式:"yyyy-MM-dd"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToDateString(this DateTime dateTime)
{
return dateTime.ToString("yyyy-MM-dd");
}
/// <summary>
/// 获取格式化字符串,不带时分秒。格式:"yyyy-MM-dd"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToDateString(this DateTime? dateTime)
{
if (dateTime == null)
{
return string.Empty;
}
return ToDateString(dateTime.Value);
}
/// <summary>
/// 获取格式化字符串,不带年月日,格式:"HH:mm:ss"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToTimeString(this DateTime dateTime)
{
return dateTime.ToString("HH:mm:ss");
}
/// <summary>
/// 获取格式化字符串,不带年月日,格式:"HH:mm:ss"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToTimeString(this DateTime? dateTime)
{
if (dateTime == null)
{
return string.Empty;
}
return ToTimeString(dateTime.Value);
}
/// <summary>
/// 获取格式化字符串,带毫秒,格式:"yyyy-MM-dd HH:mm:ss.fff"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToMillisecondString(this DateTime dateTime)
{
return dateTime.ToString("yyyy-MM-dd HH:mm:ss.fff");
}
/// <summary>
/// 获取格式化字符串,带毫秒,格式:"yyyy-MM-dd HH:mm:ss.fff"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToMillisecondString(this DateTime? dateTime)
{
if (dateTime == null)
{
return string.Empty;
}
return ToMillisecondString(dateTime.Value);
}
/// <summary>
/// 获取格式化字符串,不带时分秒,格式:"yyyy年MM月dd日"
/// </summary>
/// <param name="dateTime">日期</param>
public static string ToChineseDateString(this DateTime dateTime)
{
return string.Format("{0}年{1}月{2}日", dateTime.Year, dateTime.Month, dateTime.Day);
}
/// <summary>
/// 获取格式化字符串,带时分秒,格式:"yyyy年MM月dd日 HH时mm分"
/// </summary>
/// <param name="dateTime">日期</param>
/// <param name="isRemoveSecond">是否移除秒</param>
public static string ToChineseDateTimeString(this DateTime dateTime, bool isRemoveSecond = false)
{
StringBuilder result = new StringBuilder();
result.AppendFormat("{0}年{1}月{2}日", dateTime.Year, dateTime.Month, dateTime.Day);
result.AppendFormat(" {0}时{1}分", dateTime.Hour, dateTime.Minute);
if (isRemoveSecond == false)
{
result.AppendFormat("{0}秒", dateTime.Second);
}
return result.ToString();
}
/// <summary>
/// 获取格式化字符串,带时分秒,格式:"yyyy年MM月dd日 HH时mm分"
/// </summary>
/// <param name="dateTime">日期</param>
/// <param name="isRemoveSecond">是否移除秒</param>
public static string ToChineseDateTimeString(this DateTime? dateTime, bool isRemoveSecond = false)
{
if (dateTime == null)
{
return string.Empty;
}
return ToChineseDateTimeString(dateTime.Value);
}
/// <summary>
/// 返回指定日期起始时间。
/// </summary>
/// <param name="dateTime"></param>
/// <returns></returns>
public static DateTime StartDateTime(this DateTime dateTime)
{
if (dateTime == null)
{
throw new ArgumentNullException();
}
return new DateTime(dateTime.Year, dateTime.Month, dateTime.Day, 0, 0, 0);
}
/// <summary>
/// 返回指定日期结束时间。
/// </summary>
/// <param name="dateTime"></param>
/// <returns></returns>
public static DateTime EndDateTime(this DateTime dateTime)
{
if (dateTime == null)
{
throw new ArgumentNullException();
}
return new DateTime(dateTime.Year, dateTime.Month, dateTime.Day, 23, 59, 59);
}
#region
/// <summary>
/// 获取时间戳
/// </summary>
public static string GetTimeStamp(DateTime dateTime)
{
DateTime dtStart = new DateTime(1970, 1, 1, 8, 0, 0);
return Convert.ToInt64(dateTime.Subtract(dtStart).TotalMilliseconds).ToString();
}
#endregion
#region
/// <summary>
/// 根据时间戳获取时间
/// </summary>
public static DateTime TimeStampToDateTime(string timeStamp)
{
DateTime dtStart = new DateTime(1970, 1, 1, 8, 0, 0);
return dtStart.AddMilliseconds(Convert.ToInt64(timeStamp));
}
#endregion
#region
/// <summary>
/// 本周开始时间
/// </summary>
public static DateTime GetCurrentWeekStart()
{
DateTime now = DateTime.Now;
int day = Convert.ToInt32(now.DayOfWeek.ToString("d"));
return now.AddDays(1 - day).Date;
}
#endregion
#region
/// <summary>
/// 本周结束时间
/// </summary>
public static DateTime GetCurrentWeekEnd()
{
return GetCurrentWeekStart().AddDays(7).AddSeconds(-1);
}
#endregion
#region
/// <summary>
/// 本月开始时间
/// </summary>
public static DateTime GetCurrentMonthStart()
{
DateTime now = DateTime.Now;
return now.AddDays(1 - now.Day).Date;
}
#endregion
#region
/// <summary>
/// 本月结束时间
/// </summary>
public static DateTime GetCurrentMonthEnd()
{
return GetCurrentWeekStart().AddMonths(1).AddSeconds(-1);
}
#endregion
#region
/// <summary>
/// 本季度开始时间
/// </summary>
public static DateTime GetCurrentQuarterStart()
{
DateTime now = DateTime.Now;
return now.AddMonths(0 - (now.Month - 1) % 3).AddDays(1 - now.Day).Date;
}
#endregion
#region
/// <summary>
/// 本季度结束时间
/// </summary>
public static DateTime GetCurrentQuarterthEnd()
{
return GetCurrentWeekStart().AddMonths(3).AddSeconds(-1);
}
#endregion
#region
/// <summary>
/// 本年开始时间
/// </summary>
public static DateTime GetCurrentYearStart()
{
return new DateTime(DateTime.Now.Year, 1, 1);
}
#endregion
#region
/// <summary>
/// 本年结束时间
/// </summary>
public static DateTime GetCurrentYearEnd()
{
return new DateTime(DateTime.Now.Year, 12, 31, 23, 59, 59);
}
#endregion
}
}

View File

@@ -0,0 +1,143 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace MES.Utility.Core
{
public static class JsonHelper
{
/// <summary>
/// 对象序列化成JSON字符串。
/// </summary>
/// <param name="obj">序列化对象</param>
/// <param name="ignoreProperties">设置需要忽略的属性</param>
/// <returns></returns>
public static string ToJson(this object obj)
{
if (obj == null)
return string.Empty;
IsoDateTimeConverter timeConverter = new IsoDateTimeConverter();
timeConverter.DateTimeFormat = "yyyy-MM-dd HH:mm:ss";
return JsonConvert.SerializeObject(obj, timeConverter);
}
/// <summary>
/// JSON字符串序列化成对象。
/// </summary>
/// <typeparam name="T">对象类型</typeparam>
/// <param name="json">JSON字符串</param>
/// <returns></returns>
public static T ToObject<T>(this string json)
{
//var setting = new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore };
return json == null ? default(T) : JsonConvert.DeserializeObject<T>(json);//, setting);
}
/// <summary>
/// JSON字符串序列化成集合。
/// </summary>
/// <typeparam name="T">集合类型</typeparam>
/// <param name="json">JSON字符串</param>
/// <returns></returns>
public static List<T> ToList<T>(this string json)
{
//var setting = new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore };
return json == null ? null : JsonConvert.DeserializeObject<List<T>>(json);//, setting);
}
/// <summary>
/// JSON字符串序列化成DataTable。
/// </summary>
/// <param name="json">JSON字符串</param>
/// <returns></returns>
public static DataTable ToTable(this string json)
{
return json == null ? null : JsonConvert.DeserializeObject<DataTable>(json);
}
/// <summary>
/// 将JSON字符串反序列化成对象
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="baseEntity"></param>
/// <param name="strJson"></param>
/// <returns></returns>
public static T Json2Obj<T>(T baseEntity, string strJson)
{
return JsonConvert.DeserializeAnonymousType(strJson, baseEntity);
}
/// <summary>
/// 将对象转换层JSON字符串
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="data"></param>
/// <returns></returns>
public static string Obj2Json<T>(T data)
{
return JsonConvert.SerializeObject(data);
}
public static List<T> JsonToList<T>(string strJson)
{
T[] list = JsonConvert.DeserializeObject<T[]>(strJson);
return list.ToList();
}
public static T Json2Obj<T>(string strJson)
{
return JsonConvert.DeserializeObject<T>(strJson);
}
public static DataTable ToDataTable(this string json)
{
return json.ToTable();
}
public static string RemoveComments(string code)
{
code = Regex.Replace(code, @"(?s)(?<=<!--).+?(?=-->)", "");
code = Regex.Replace(code, @"/\*[\s\S]*?\*/", "", RegexOptions.IgnoreCase);
code = Regex.Replace(code, @"^\s*//[\s\S]*?$", "", RegexOptions.Multiline);
code = Regex.Replace(code, @"^\s*$\n", "", RegexOptions.Multiline);
code = Regex.Replace(code, @"^\s*//[\s\S]*", "", RegexOptions.Multiline);
return code;
}
public static string FormatJson(this string json)
{
//格式化json字符串
JsonSerializer serializer = new JsonSerializer();
TextReader tr = new StringReader(json);
JsonTextReader jtr = new JsonTextReader(tr);
object obj = serializer.Deserialize(jtr);
if (obj != null)
{
StringWriter textWriter = new StringWriter();
JsonTextWriter jsonWriter = new JsonTextWriter(textWriter)
{
Formatting = Formatting.Indented,
Indentation = 4,
IndentChar = ' '
};
serializer.Serialize(jsonWriter, obj);
return textWriter.ToString();
}
else
{
return json;
}
}
}
}

View File

@@ -0,0 +1,255 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
namespace MES.Utility.Core
{
/// <summary>
/// LINQ扩展方法
/// </summary>
public static class LinqExtension
{
/// <summary>
/// 与连接
/// </summary>
/// <typeparam name="T">类型</typeparam>
/// <param name="left">左条件</param>
/// <param name="right">右条件</param>
/// <returns>新表达式</returns>
public static Expression<Func<T, bool>> And<T>(this Expression<Func<T, bool>> left, Expression<Func<T, bool>> right)
{
return CombineLambdas(left, right, ExpressionType.AndAlso);
}
/// <summary>
/// 或连接
/// </summary>
/// <typeparam name="T">类型</typeparam>
/// <param name="left">左条件</param>
/// <param name="right">右条件</param>
/// <returns>新表达式</returns>
public static Expression<Func<T, bool>> Or<T>(this Expression<Func<T, bool>> left, Expression<Func<T, bool>> right)
{
return CombineLambdas(left, right, ExpressionType.OrElse);
}
private static Expression<Func<T, bool>> CombineLambdas<T>(this Expression<Func<T, bool>> left, Expression<Func<T, bool>> right, ExpressionType expressionType)
{
var visitor = new SubstituteParameterVisitor
{
Sub =
{
[right.Parameters[0]] = left.Parameters[0]
}
};
Expression body = Expression.MakeBinary(expressionType, left.Body, visitor.Visit(right.Body));
return Expression.Lambda<Func<T, bool>>(body, left.Parameters[0]);
}
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <returns></returns>
public static TResult MaxOrDefault<TSource, TResult>(this IQueryable<TSource> source, Expression<Func<TSource, TResult>> selector) => source.Select(selector).OrderByDescending(_ => _).FirstOrDefault();
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TResult MaxOrDefault<TSource, TResult>(this IQueryable<TSource> source, Expression<Func<TSource, TResult>> selector, TResult defaultValue)
{
TResult result = source.Select(selector).OrderByDescending(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <returns></returns>
public static TSource MaxOrDefault<TSource>(this IQueryable<TSource> source)
{
return source.OrderByDescending(_ => _).FirstOrDefault();
}
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TSource MaxOrDefault<TSource>(this IQueryable<TSource> source, TSource defaultValue)
{
TSource result = source.OrderByDescending(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TResult MaxOrDefault<TSource, TResult>(this IEnumerable<TSource> source, Func<TSource, TResult> selector, TResult defaultValue)
{
TResult result = source.Select(selector).OrderByDescending(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <returns></returns>
public static TSource MaxOrDefault<TSource>(this IEnumerable<TSource> source) => source.OrderByDescending(_ => _).FirstOrDefault();
/// <summary>
/// 取最大值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TSource MaxOrDefault<TSource>(this IEnumerable<TSource> source, TSource defaultValue)
{
TSource result = source.OrderByDescending(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <returns></returns>
public static TResult MinOrDefault<TSource, TResult>(this IQueryable<TSource> source, Expression<Func<TSource, TResult>> selector) => source.Select(selector).OrderBy(_ => _).FirstOrDefault();
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TResult MinOrDefault<TSource, TResult>(this IQueryable<TSource> source, Expression<Func<TSource, TResult>> selector, TResult defaultValue)
{
TResult result = source.Select(selector).OrderBy(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <returns></returns>
public static TSource MinOrDefault<TSource>(this IQueryable<TSource> source) => source.OrderBy(_ => _).FirstOrDefault();
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TSource MinOrDefault<TSource>(this IQueryable<TSource> source, TSource defaultValue)
{
TSource result = source.OrderBy(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <returns></returns>
public static TResult MinOrDefault<TSource, TResult>(this IEnumerable<TSource> source, Func<TSource, TResult> selector) => source.Select(selector).OrderBy(_ => _).FirstOrDefault();
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <typeparam name="TResult"></typeparam>
/// <param name="source"></param>
/// <param name="selector"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TResult MinOrDefault<TSource, TResult>(this IEnumerable<TSource> source, Func<TSource, TResult> selector, TResult defaultValue)
{
TResult result = source.Select(selector).OrderBy(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <returns></returns>
public static TSource MinOrDefault<TSource>(this IEnumerable<TSource> source) => source.OrderBy(_ => _).FirstOrDefault();
/// <summary>
/// 取最小值
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source"></param>
/// <param name="defaultValue"></param>
/// <returns></returns>
public static TSource MinOrDefault<TSource>(this IEnumerable<TSource> source, TSource defaultValue)
{
TSource result = source.OrderBy(_ => _).FirstOrDefault();
if (result != null)
return result;
return defaultValue;
}
}
internal class SubstituteParameterVisitor : ExpressionVisitor
{
public Dictionary<Expression, Expression> Sub = new Dictionary<Expression, Expression>();
protected override Expression VisitParameter(ParameterExpression node)
{
return Sub.TryGetValue(node, out var newValue) ? newValue : node;
}
}
}

View File

@@ -0,0 +1,163 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace MES.Utility.Core
{
public static class RMB
{/// <summary>
/// 转换人民币大小金额
/// </summary>
/// <param name="num">金额</param>
/// <returns>返回大写形式</returns>
public static string ToRMB(this decimal num)
{
string str1 = "零壹贰叁肆伍陆柒捌玖"; //0-9所对应的汉字
string str2 = "万仟佰拾亿仟佰拾万仟佰拾元角分"; //数字位所对应的汉字
string str3 = ""; //从原num值中取出的值
string str4 = ""; //数字的字符串形式
string str5 = ""; //人民币大写金额形式
int i; //循环变量
int j; //num的值乘以100的字符串长度
string ch1 = ""; //数字的汉语读法
string ch2 = ""; //数字位的汉字读法
int nzero = 0; //用来计算连续的零值是几个
int temp; //从原num值中取出的值
num = Math.Round(Math.Abs(num), 2); //将num取绝对值并四舍五入取2位小数
str4 = ((long)(num * 100)).ToString(); //将num乘100并转换成字符串形式
j = str4.Length; //找出最高位
if (j > 15) { return "溢出"; }
str2 = str2.Substring(15 - j); //取出对应位数的str2的值。如200.55,j为5所以str2=佰拾元角分
//循环取出每一位需要转换的值
for (i = 0; i < j; i++)
{
str3 = str4.Substring(i, 1); //取出需转换的某一位的值
temp = Convert.ToInt32(str3); //转换为数字
if (i != (j - 3) && i != (j - 7) && i != (j - 11) && i != (j - 15))
{
//当所取位数不为元、万、亿、万亿上的数字时
if (str3 == "0")
{
ch1 = "";
ch2 = "";
nzero = nzero + 1;
}
else
{
if (str3 != "0" && nzero != 0)
{
ch1 = "零" + str1.Substring(temp * 1, 1);
ch2 = str2.Substring(i, 1);
nzero = 0;
}
else
{
ch1 = str1.Substring(temp * 1, 1);
ch2 = str2.Substring(i, 1);
nzero = 0;
}
}
}
else
{
//该位是万亿,亿,万,元位等关键位
if (str3 != "0" && nzero != 0)
{
ch1 = "零" + str1.Substring(temp * 1, 1);
ch2 = str2.Substring(i, 1);
nzero = 0;
}
else
{
if (str3 != "0" && nzero == 0)
{
ch1 = str1.Substring(temp * 1, 1);
ch2 = str2.Substring(i, 1);
nzero = 0;
}
else
{
if (str3 == "0" && nzero >= 3)
{
ch1 = "";
ch2 = "";
nzero = nzero + 1;
}
else
{
if (j >= 11)
{
ch1 = "";
nzero = nzero + 1;
}
else
{
ch1 = "";
ch2 = str2.Substring(i, 1);
nzero = nzero + 1;
}
}
}
}
}
if (i == (j - 11) || i == (j - 3))
{
//如果该位是亿位或元位,则必须写上
ch2 = str2.Substring(i, 1);
}
str5 = str5 + ch1 + ch2;
if (i == j - 1 && str3 == "0")
{
//最后一位为0时加上“整”
str5 = str5 + '整';
}
}
if (num == 0)
{
str5 = "零元整";
}
return str5;
}
public static string ToRMB(this int num)
{
return ToRMB(Convert.ToString(num));
}
public static string ToRMB(this float num)
{
return ToRMB(Convert.ToString(num));
}
public static string ToRMB(this double num)
{
return ToRMB(Convert.ToString(num));
}
public static string ToRMB(this long num)
{
return ToRMB(Convert.ToString(num));
}
/// <summary>
/// 一个重载将字符串先转换成数字在调用CmycurD(decimal num)
/// </summary>
/// <param name="num">用户输入的金额字符串形式未转成decimal</param>
/// <returns></returns>
public static string ToRMB(this string numstr)
{
try
{
decimal num = Convert.ToDecimal(numstr);
return ToRMB(num);
}
catch
{
return "非数字形式!";
}
}
}
}

View File

@@ -0,0 +1,61 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace MES.Utility.Core
{
/// <summary>
/// 使用Random类生成伪随机数
/// </summary>
public static class RandomHelper
{
/// <summary>
/// 生成一个指定范围的随机整数,该随机数范围包括最小值,但不包括最大值
/// </summary>
/// <param name="minNum">最小值</param>
/// <param name="maxNum">最大值</param>
public static int GetRandomInt(int minNum, int maxNum)
{
return new Random().Next(minNum, maxNum);
}
/// <summary>
/// 生成一个0.0到1.0的随机小数
/// </summary>
public static double GetRandomDouble()
{
return new Random().NextDouble();
}
/// <summary>
/// 对一个数组进行随机排序
/// </summary>
/// <typeparam name="T">数组的类型</typeparam>
/// <param name="arr">需要随机排序的数组</param>
public static void GetRandomArray<T>(T[] arr)
{
//对数组进行随机排序的算法:随机选择两个位置,将两个位置上的值交换
//交换的次数,这里使用数组的长度作为交换次数
int count = arr.Length;
//开始交换
for (int i = 0; i < count; i++)
{
//生成两个随机数位置
int targetIndex1 = GetRandomInt(0, arr.Length);
int targetIndex2 = GetRandomInt(0, arr.Length);
//定义临时变量
T temp;
//交换两个随机数位置的值
temp = arr[targetIndex1];
arr[targetIndex1] = arr[targetIndex2];
arr[targetIndex2] = temp;
}
}
}
}

View File

@@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace MES.Utility.Core
{
/// <summary>
/// 操作正则表达式的公共类
/// </summary>
public class RegexHelper
{
/// <summary>
/// 验证输入字符串是否与模式字符串匹配匹配返回true
/// </summary>
/// <param name="input">输入字符串</param>
/// <param name="pattern">模式字符串</param>
public static bool IsMatch(string input, string pattern)
{
return IsMatch(input, pattern, RegexOptions.IgnoreCase);
}
/// <summary>
/// 验证输入字符串是否与模式字符串匹配匹配返回true
/// </summary>
/// <param name="input">输入的字符串</param>
/// <param name="pattern">模式字符串</param>
/// <param name="options">筛选条件</param>
public static bool IsMatch(string input, string pattern, RegexOptions options)
{
return Regex.IsMatch(input, pattern, options);
}
}
}

View File

@@ -0,0 +1,277 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.Win32;
using System.Reflection;
namespace MES.Utility.Core
{
/// <summary>
/// 注册表辅助类
/// </summary>
public class RegisterHelper
{
/// <summary>
/// 默认注册表基项
/// </summary>
private string baseKey = "Software";
#region
/// <summary>
/// 构造函数
/// </summary>
/// <param name="baseKey">基项的名称</param>
public RegisterHelper()
{
}
/// <summary>
/// 构造函数
/// </summary>
/// <param name="baseKey">基项的名称</param>
public RegisterHelper(string baseKey)
{
this.baseKey = baseKey;
}
#endregion
#region
/// <summary>
/// 写入注册表,如果指定项已经存在,则修改指定项的值
/// </summary>
/// <param name="keytype">注册表基项枚举</param>
/// <param name="key">注册表项,不包括基项</param>
/// <param name="name">值名称</param>
/// <param name="values">值</param>
public void SetValue(KeyType keytype, string key, string name, string values)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey, true);
RegistryKey rkt = software.CreateSubKey(key);
if (rkt != null)
{
rkt.SetValue(name, values);
}
}
/// <summary>
/// 读取注册表
/// </summary>
/// <param name="keytype">注册表基项枚举</param>
/// <param name="key">注册表项,不包括基项</param>
/// <param name="name">值名称</param>
/// <returns>返回字符串</returns>
public string GetValue(KeyType keytype, string key, string name)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey, true);
RegistryKey rkt = software.OpenSubKey(key);
if (rkt != null)
{
return rkt.GetValue(name).ToString();
}
else
{
return string.Empty;
}
}
/// <summary>
/// 删除注册表中的值
/// </summary>
/// <param name="keytype">注册表基项枚举</param>
/// <param name="key">注册表项名称,不包括基项</param>
/// <param name="name">值名称</param>
public void DeleteValue(KeyType keytype, string key, string name)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey, true);
RegistryKey rkt = software.OpenSubKey(key, true);
if (rkt != null)
{
object value = rkt.GetValue(name);
if (value != null)
{
rkt.DeleteValue(name, true);
}
}
}
/// <summary>
/// 删除注册表中的指定项
/// </summary>
/// <param name="keytype">注册表基项枚举</param>
/// <param name="key">注册表中的项,不包括基项</param>
/// <returns>返回布尔值,指定操作是否成功</returns>
public void DeleteSubKey(KeyType keytype, string key)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey, true);
if (software != null)
{
software.DeleteSubKeyTree(key);
}
}
/// <summary>
/// 判断指定项是否存在
/// </summary>
/// <param name="keytype">基项枚举</param>
/// <param name="key">指定项字符串</param>
/// <returns>返回布尔值,说明指定项是否存在</returns>
public bool IsExist(KeyType keytype, string key)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey);
RegistryKey rkt = software.OpenSubKey(key);
if (rkt != null)
{
return true;
}
else
{
return false;
}
}
/// <summary>
/// 检索指定项关联的所有值
/// </summary>
/// <param name="keytype">基项枚举</param>
/// <param name="key">指定项字符串</param>
/// <returns>返回指定项关联的所有值的字符串数组</returns>
public string[] GetValues(KeyType keytype, string key)
{
RegistryKey rk = (RegistryKey)GetRegistryKey(keytype);
RegistryKey software = rk.OpenSubKey(baseKey, true);
RegistryKey rkt = software.OpenSubKey(key);
string[] names = rkt.GetValueNames();
if (names.Length == 0)
{
return names;
}
else
{
string[] values = new string[names.Length];
int i = 0;
foreach (string name in names)
{
values[i] = rkt.GetValue(name).ToString();
i++;
}
return values;
}
}
/// <summary>
/// 将对象所有属性写入指定注册表中
/// </summary>
/// <param name="keytype">注册表基项枚举</param>
/// <param name="key">注册表项,不包括基项</param>
/// <param name="obj">传入的对象</param>
public void SetObjectValue(KeyType keyType, string key, Object obj)
{
if (obj != null)
{
Type t = obj.GetType();
string name;
object value;
foreach (var p in t.GetProperties())
{
if (p != null)
{
name = p.Name;
value = p.GetValue(obj, null);
this.SetValue(keyType, key, name, value.ToString());
}
}
}
}
#endregion
#region
/// <summary>
/// 返回RegistryKey对象
/// </summary>
/// <param name="keyType">注册表基项枚举</param>
/// <returns></returns>
private object GetRegistryKey(KeyType keyType)
{
RegistryKey rk = null;
switch (keyType)
{
case KeyType.HKEY_CLASS_ROOT:
rk = Registry.ClassesRoot;
break;
case KeyType.HKEY_CURRENT_USER:
rk = Registry.CurrentUser;
break;
case KeyType.HKEY_LOCAL_MACHINE:
rk = Registry.LocalMachine;
break;
case KeyType.HKEY_USERS:
rk = Registry.Users;
break;
case KeyType.HKEY_CURRENT_CONFIG:
rk = Registry.CurrentConfig;
break;
}
return rk;
}
#endregion
#region
/// <summary>
/// 注册表基项枚举
/// </summary>
public enum KeyType : int
{
/// <summary>
/// 注册表基项 HKEY_CLASSES_ROOT
/// </summary>
HKEY_CLASS_ROOT,
/// <summary>
/// 注册表基项 HKEY_CURRENT_USER
/// </summary>
HKEY_CURRENT_USER,
/// <summary>
/// 注册表基项 HKEY_LOCAL_MACHINE
/// </summary>
HKEY_LOCAL_MACHINE,
/// <summary>
/// 注册表基项 HKEY_USERS
/// </summary>
HKEY_USERS,
/// <summary>
/// 注册表基项 HKEY_CURRENT_CONFIG
/// </summary>
HKEY_CURRENT_CONFIG
}
#endregion
}
}

View File

@@ -0,0 +1,318 @@

using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace MES.Utility.Core
{
/// <summary>
/// 字符串操作类
/// </summary>
public static class StringHelper
{
/// <summary>
/// 把字符串按照分隔符转换成 List
/// </summary>
/// <param name="str">源字符串</param>
/// <param name="speater">分隔符</param>
/// <param name="toLower">是否转换为小写</param>
/// <returns></returns>
public static List<string> SplitToList(this string str, char speater = ',', bool toLower = false)
{
List<string> list = new List<string>();
if (str == null)
return list;
string[] ss = str.Split(speater);
foreach (string s in ss)
{
if (!string.IsNullOrEmpty(s) && s != speater.ToString())
{
string strVal = s;
if (toLower)
{
strVal = s.ToLower();
}
list.Add(strVal);
}
}
return list;
}
/// <summary>
/// 把 List<string> 按照分隔符组装成 string
/// </summary>
/// <param name="list"></param>
/// <param name="speater"></param>
/// <returns></returns>
public static string GetStrArray(this List<string> list, string speater = ",")
{
StringBuilder sb = new StringBuilder();
for (int i = 0; i < list.Count; i++)
{
if (i == list.Count - 1)
{
sb.Append(list[i]);
}
else
{
sb.Append(list[i]);
sb.Append(speater);
}
}
return sb.ToString();
}
/// <summary>
/// 删除最后结尾的指定字符后的字符
/// </summary>
public static string DelLastChar(this string str, string strChar = ",")
{
return str.Substring(0, str.LastIndexOf(strChar));
}
/// <summary>
/// 转全角的函数(SBC case)
/// </summary>
/// <param name="input"></param>
/// <returns></returns>
public static string ToSBC(string input)
{
//半角转全角:
char[] c = input.ToCharArray();
for (int i = 0; i < c.Length; i++)
{
if (c[i] == 32)
{
c[i] = (char)12288;
continue;
}
if (c[i] < 127)
c[i] = (char)(c[i] + 65248);
}
return new string(c);
}
/// <summary>
/// 转半角的函数(SBC case)
/// </summary>
/// <param name="input">输入</param>
/// <returns></returns>
public static string ToDBC(string input)
{
char[] c = input.ToCharArray();
for (int i = 0; i < c.Length; i++)
{
if (c[i] == 12288)
{
c[i] = (char)32;
continue;
}
if (c[i] > 65280 && c[i] < 65375)
c[i] = (char)(c[i] - 65248);
}
return new string(c);
}
/// <summary>
/// 获取正确的Id如果不是正整数返回0
/// </summary>
/// <param name="value"></param>
/// <returns>返回正确的整数ID失败返回0</returns>
public static int ToInt32(this string value)
{
if (IsNumberId(value))
return int.Parse(value);
else
return 0;
}
/// <summary>
/// 检查一个字符串是否是纯数字构成的,一般用于查询字符串参数的有效性验证。(0除外)
/// </summary>
/// <param name="_value">需验证的字符串。。</param>
/// <returns>是否合法的bool值。</returns>
public static bool IsNumberId(string _value)
{
return QuickValidate("^[1-9]*[0-9]*$", _value);
}
/// <summary>
/// 快速验证一个字符串是否符合指定的正则表达式。
/// </summary>
/// <param name="_express">正则表达式的内容。</param>
/// <param name="_value">需验证的字符串。</param>
/// <returns>是否合法的bool值。</returns>
public static bool QuickValidate(string _express, string _value)
{
if (_value == null) return false;
Regex myRegex = new Regex(_express);
if (_value.Length == 0)
{
return false;
}
return myRegex.IsMatch(_value);
}
/// <summary>
/// 得到字符串长度一个汉字长度为2
/// </summary>
/// <param name="inputString">参数字符串</param>
/// <returns></returns>
public static int StrLength(this string inputString)
{
System.Text.ASCIIEncoding ascii = new System.Text.ASCIIEncoding();
int tempLen = 0;
byte[] s = ascii.GetBytes(inputString);
for (int i = 0; i < s.Length; i++)
{
if ((int)s[i] == 63)
tempLen += 2;
else
tempLen += 1;
}
return tempLen;
}
/// <summary>
/// 截取指定长度字符串
/// </summary>
/// <param name="inputString">要处理的字符串</param>
/// <param name="len">指定长度</param>
/// <returns>返回处理后的字符串</returns>
public static string splitString(this string inputString, int len)
{
bool isShowFix = false;
if (len % 2 == 1)
{
isShowFix = true;
len--;
}
System.Text.ASCIIEncoding ascii = new System.Text.ASCIIEncoding();
int tempLen = 0;
string tempString = "";
byte[] s = ascii.GetBytes(inputString);
for (int i = 0; i < s.Length; i++)
{
if ((int)s[i] == 63)
tempLen += 2;
else
tempLen += 1;
try
{
tempString += inputString.Substring(i, 1);
}
catch
{
break;
}
if (tempLen > len)
break;
}
byte[] mybyte = System.Text.Encoding.Default.GetBytes(inputString);
if (isShowFix && mybyte.Length > len)
tempString += "…";
return tempString;
}
/// <summary>
/// HTML转行成TEXT
/// </summary>
/// <param name="strHtml"></param>
/// <returns></returns>
public static string HtmlToTxt(this string strHtml)
{
string[] aryReg ={
@"<script[^>]*?>.*?</script>",
@"<(\/\s*)?!?((\w+:)?\w+)(\w+(\s*=?\s*(([""'])(\\[""'tbnr]|[^\7])*?\7|\w+)|.{0})|\s)*?(\/\s*)?>",
@"([\r\n])[\s]+",
@"&(quot|#34);",
@"&(amp|#38);",
@"&(lt|#60);",
@"&(gt|#62);",
@"&(nbsp|#160);",
@"&(iexcl|#161);",
@"&(cent|#162);",
@"&(pound|#163);",
@"&(copy|#169);",
@"&#(\d+);",
@"-->",
@"<!--.*\n"
};
string newReg = aryReg[0];
string strOutput = strHtml;
for (int i = 0; i < aryReg.Length; i++)
{
Regex regex = new Regex(aryReg[i], RegexOptions.IgnoreCase);
strOutput = regex.Replace(strOutput, string.Empty);
}
strOutput.Replace("<", "");
strOutput.Replace(">", "");
strOutput.Replace("\r\n", "");
return strOutput;
}
/// <summary>
/// 判断对象是否为空为空返回true
/// </summary>
/// <typeparam name="T">要验证的对象的类型</typeparam>
/// <param name="data">要验证的对象</param>
public static bool IsNullOrEmpty(this string data)
{
//如果为null
if (data == null)
{
return true;
}
if (string.IsNullOrEmpty(data.ToString().Trim()))
return true;
return false;
}
/// <summary>
/// 判断对象是否为空为空返回true
/// </summary>
/// <typeparam name="T">要验证的对象的类型</typeparam>
/// <param name="data">要验证的对象</param>
public static bool IsNullOrEmpty<T>(this List<T> collection)
{
if (collection == null)
return true;
if (collection.Count() == 0)
return true;
return false;
}
public static bool IsNullOrEmpty(this DataSet ds)
{
if (ds == null)
return true;
if (ds.Tables.Count == 0)
return true;
return ds.Tables[0].IsNullOrEmpty();
}
public static bool IsNullOrEmpty(this DataTable dt)
{
if (dt == null)
return true;
if (dt.Rows.Count == 0)
return true;
return false;
}
}
}

View File

@@ -0,0 +1,215 @@
using ICSharpCode.SharpZipLib.Zip;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
namespace MES.Utility.Core
{
public static class ZipHelper
{
/// <summary>
/// 压缩
/// </summary>
/// <param name="filename"> 压缩后的文件名(包含物理路径)</param>
/// <param name="directory">待压缩的文件夹(包含物理路径)</param>
//public static void PackFiles(string filename, string directory)
//{
// try
// {
// FastZip fz = new FastZip();
// fz.CreateEmptyDirectories = true;
// fz.CreateZip(filename, directory, true, "");
// fz = null;
// }
// catch (Exception)
// {
// throw;
// }
//}
/// <summary>
/// 判断是文件夹还是文件
/// </summary>
/// <param name="filePath"></param>
/// <returns></returns>
private static bool IsDir(string filePath)
{
FileInfo fi = new FileInfo(filePath);
if ((fi.Attributes & FileAttributes.Directory) != 0)
return true;
return false;
}
/// <summary>
/// 压缩
/// </summary>
/// <param name="fileName">压缩后的文件名(包含物理路径)</param>
/// <param name="fileOrDirectoryPath">动长参数,待压缩的文件或者文件夹</param>
/// <returns></returns>
public static bool PackFiles(string fileName, params string[] fileOrDirectoryPath)
{
try
{
ZipStrings.UseUnicode = true;
using (ZipOutputStream zipStream = new ZipOutputStream(File.Create(fileName)))
{
zipStream.SetComment("版本1.0");
zipStream.SetLevel(6);//设值CompressionLevel,压缩比
foreach (string filePath in fileOrDirectoryPath)
{
ZipMultiFiles(filePath, zipStream);
}
}
return true;
}
catch
{
return false;
}
}
/// <summary>
/// 加密压缩
/// </summary>
/// <param name="fileName">压缩后的文件名(包含物理路径)</param>
/// <param name="password">密码</param>
/// <param name="fileOrDirectoryPath">动长参数,待压缩的文件或者文件夹</param>
/// <returns></returns>
public static bool PackFilesWithPassword(string fileName, string password, params string[] fileOrDirectoryPath)
{
try
{
ZipStrings.UseUnicode = true;
using (ZipOutputStream zipStream = new ZipOutputStream(File.Create(fileName)))
{
zipStream.SetComment("版本1.0");
zipStream.SetLevel(6);//设值CompressionLevel,压缩比
if (!password.IsNullOrEmpty())
{
zipStream.Password = password;
}
foreach (string filePath in fileOrDirectoryPath)
{
ZipMultiFiles(filePath, zipStream);
}
}
return true;
}
catch
{
return false;
}
}
private static void ZipMultiFiles(string file, ZipOutputStream zipStream, string lastName = "")
{
file = file.Replace("\\", "/");
if (!IsDir(file))
{
if (!File.Exists(file))
{
throw new Exception($"文件{file}不存在");
}
using (FileStream streamReader = File.OpenRead(file))
{
string path = Path.GetFileName(file);
if (lastName != "")
{
path = lastName + "/" + path;
}
ZipEntry zipEntry = new ZipEntry(path);
zipEntry.DateTime = DateTime.Now;
zipEntry.Size = streamReader.Length;
zipStream.PutNextEntry(zipEntry);//压入文件
int sourceCount = 0;
byte[] buffer = new byte[4096 * 1024];
while ((sourceCount = streamReader.Read(buffer, 0, buffer.Length)) > 0)
{
zipStream.Write(buffer, 0, sourceCount);
}
}
}
else
{
if (!Directory.Exists(file))
{
throw new Exception($"文件夹{file}不存在");
}
string[] fileArray = Directory.GetFileSystemEntries(file);
string folderName = new DirectoryInfo(file).Name; //Regex.Match(file, @"[^\/:*\?\”“\<>|,\\]*$").ToString();
if (lastName != "")
{
folderName = lastName + "/" + folderName;
}
if (fileArray.Length == 0)
{
ZipEntry zipEntry = new ZipEntry(folderName + "/");
zipStream.PutNextEntry(zipEntry);
}
foreach (string f in fileArray)
{
ZipMultiFiles(f, zipStream, folderName);
}
}
}
/// <summary>
/// 解压缩
/// </summary>
/// <param name="file">待解压文件名(包含物理路径)</param>
/// <param name="dir"> 解压到哪个目录中(包含物理路径)</param>
public static bool UnpackFiles(string file, string dir)
{
try
{
if (!Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
ZipInputStream s = new ZipInputStream(File.OpenRead(file));
ZipEntry theEntry;
while ((theEntry = s.GetNextEntry()) != null)
{
string directoryName = Path.GetDirectoryName(theEntry.Name);
string fileName = Path.GetFileName(theEntry.Name);
if (directoryName != String.Empty)
{
Directory.CreateDirectory(dir + directoryName);
}
if (fileName != String.Empty)
{
FileStream streamWriter = File.Create(dir + theEntry.Name);
int size = 2048;
byte[] data = new byte[2048];
while (true)
{
size = s.Read(data, 0, data.Length);
if (size > 0)
{
streamWriter.Write(data, 0, size);
}
else
{
break;
}
}
streamWriter.Close();
}
}
s.Close();
return true;
}
catch (Exception)
{
throw;
}
}
}
}

View File

@@ -0,0 +1,62 @@
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text;
namespace Base.Utility
{
public class DesktopRefurbish
{
/// <summary>
/// 桌面刷新
/// </summary>
[DllImport("shell32.dll")]
public static extern void SHChangeNotify(HChangeNotifyEventID wEventId, HChangeNotifyFlags uFlags, IntPtr dwItem1, IntPtr dwItem2);
public static void DeskRef()
{
SHChangeNotify(HChangeNotifyEventID.SHCNE_ASSOCCHANGED, HChangeNotifyFlags.SHCNF_IDLIST, IntPtr.Zero, IntPtr.Zero);
}
}
#region public enum HChangeNotifyFlags
[Flags]
public enum HChangeNotifyFlags
{
SHCNF_DWORD = 0x0003,
SHCNF_IDLIST = 0x0000,
SHCNF_PATHA = 0x0001,
SHCNF_PATHW = 0x0005,
SHCNF_PRINTERA = 0x0002,
SHCNF_PRINTERW = 0x0006,
SHCNF_FLUSH = 0x1000,
SHCNF_FLUSHNOWAIT = 0x2000
}
#endregion//enum HChangeNotifyEventID
#region enum HChangeNotifyEventID
[Flags]
public enum HChangeNotifyEventID
{
SHCNE_ALLEVENTS = 0x7FFFFFFF,
SHCNE_ASSOCCHANGED = 0x08000000,
SHCNE_ATTRIBUTES = 0x00000800,
SHCNE_CREATE = 0x00000002,
SHCNE_DELETE = 0x00000004,
SHCNE_DRIVEADD = 0x00000100,
SHCNE_DRIVEADDGUI = 0x00010000,
SHCNE_DRIVEREMOVED = 0x00000080,
SHCNE_EXTENDED_EVENT = 0x04000000,
SHCNE_FREESPACE = 0x00040000,
SHCNE_MEDIAINSERTED = 0x00000020,
SHCNE_MEDIAREMOVED = 0x00000040,
SHCNE_MKDIR = 0x00000008,
SHCNE_NETSHARE = 0x00000200,
SHCNE_NETUNSHARE = 0x00000400,
SHCNE_RENAMEFOLDER = 0x00020000,
SHCNE_RENAMEITEM = 0x00000001,
SHCNE_RMDIR = 0x00000010,
SHCNE_SERVERDISCONNECT = 0x00004000,
SHCNE_UPDATEDIR = 0x00001000,
SHCNE_UPDATEIMAGE = 0x00008000,
}
#endregion
}

View File

@@ -0,0 +1,80 @@
/****************************************************************
** 文件名: ActionDelegatedEventHandler.cs
** 主要类: ActionDelegatedEventHandler类
** Copyright (c) 章为忠
** 创建人:
** 日 期: 2017.3.10
** 修改人:
** 日 期:
** 修改内容:
** 描 述:
** 版 本:
** 备 注:
****************************************************************/
using System;
namespace Weiz.EventBus.Core
{
/// <summary>
/// Represents the event handler which delegates the event handling process to
/// a given <see cref="Action{T}"/> delegated method.
/// </summary>
/// <typeparam name="TEvent">The type of the event to be handled by current handler.</typeparam>
public sealed class ActionDelegatedEventHandler<TEvent> : IEventHandler<TEvent>
where TEvent : IEvent
{
#region Private Fields
private readonly Action<TEvent> action;
#endregion
#region Ctor
/// <summary>
/// Initializes a new instance of <c>ActionDelegatedEventHandler{TEvent}</c> class.
/// </summary>
/// <param name="action">The <see cref="Action{T}"/> instance that delegates the event handling process.</param>
public ActionDelegatedEventHandler(Action<TEvent> action)
{
this.action = action;
}
#endregion
#region Public Methods
/// <summary>
/// Returns a <see cref="Boolean"/> value which indicates whether the current
/// <c>ActionDelegatedEventHandler{T}</c> equals to the given object.
/// </summary>
/// <param name="obj">The <see cref="Object"/> which is used to compare to
/// the current <c>ActionDelegatedEventHandler{T}</c> instance.</param>
/// <returns>If the given object equals to the current <c>ActionDelegatedEventHandler{T}</c>
/// instance, returns true, otherwise, false.</returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(this, obj))
return true;
if (obj == null)
return false;
ActionDelegatedEventHandler<TEvent> other = obj as ActionDelegatedEventHandler<TEvent>;
if (other == null)
return false;
return Delegate.Equals(this.action, other.action);
}
#endregion
#region IHandler<TDomainEvent> Members
/// <summary>
/// Handles the specified message.
/// </summary>
/// <param name="message">The message to be handled.</param>
public void Handle(TEvent message)
{
action(message);
}
#endregion
}
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class ConnectErrorEvent : IEvent
{
public ConnectErrorEvent() { }
public ConnectErrorEvent(string message)
{
Time = DateTime.Now;
Message = message;
}
public ConnectErrorEvent(DateTime time, string message)
{
Time = time;
Message = message;
}
public DateTime Time { get; set; }
public string Message { get; set; }
}
}

View File

@@ -0,0 +1,226 @@
/****************************************************************
** 文件名: EventBus.cs
** 主要类: EventBus类
** Copyright (c) 章为忠
** 创建人:
** 日 期: 2017.3.10
** 修改人:
** 日 期:
** 修改内容:
** 描 述:
** 版 本:
** 备 注:
****************************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
namespace Weiz.EventBus.Core
{
public class EventBus
{
/// <summary>
/// 事件总线对象
/// </summary>
private static EventBus _eventBus = null;
/// <summary>
/// 领域模型事件句柄字典,用于存储领域模型的句柄
/// </summary>
private static Dictionary<Type, List<object>> _dicEventHandler = new Dictionary<Type, List<object>>();
/// <summary>
/// 附加领域模型处理句柄时,锁住
/// </summary>
private readonly object _syncObject = new object();
/// <summary>
/// 单例事件总线
/// </summary>
public static EventBus Instance
{
get
{
return _eventBus ?? (_eventBus = new EventBus());
}
}
/// <summary>
/// 通过XML文件初始化事件总线,订阅信自在XML里配置
/// </summary>
/// <returns></returns>
public static EventBus InstanceForXml()
{
if (_eventBus == null)
{
XElement root = XElement.Load(System.IO.Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "EventBus.xml"));
foreach (var evt in root.Elements("Event"))
{
List<object> handlers = new List<object>();
Type publishEventType = Type.GetType(evt.Element("PublishEvent").Value);
foreach (var subscritedEvt in evt.Elements("SubscribedEvents"))
{
foreach (var concreteEvt in subscritedEvt.Elements("SubscribedEvent"))
{
var type = Type.GetType(concreteEvt.Value);
handlers.Add(System.Activator.CreateInstance(type));
}
_dicEventHandler[publishEventType] = handlers;
}
}
_eventBus = new EventBus();
}
return _eventBus;
}
/// <summary>
///
/// </summary>
private readonly Func<object, object, bool> eventHandlerEquals = (o1, o2) =>
{
var o1Type = o1.GetType();
var o2Type = o2.GetType();
return o1Type == o2Type;
};
#region
public void Subscribe<TEvent>(IEventHandler<TEvent> eventHandler) where TEvent : IEvent
{
//同步锁
lock (_syncObject)
{
//获取领域模型的类型
var eventType = typeof(TEvent);
//如果此领域类型在事件总线中已注册过
if (_dicEventHandler.ContainsKey(eventType))
{
var handlers = _dicEventHandler[eventType];
if (handlers != null)
{
handlers.Add(eventHandler);
}
else
{
handlers = new List<object>
{
eventHandler
};
}
}
else
{
_dicEventHandler.Add(eventType, new List<object> { eventHandler });
}
}
}
/// <summary>
/// 订阅事件实体
/// </summary>
/// <param name="type"></param>
/// <param name="subTypeList"></param>
public void Subscribe<TEvent>(Action<TEvent> eventHandlerFunc)
where TEvent : IEvent
{
Subscribe<TEvent>(new ActionDelegatedEventHandler<TEvent>(eventHandlerFunc));
}
public void Subscribe<TEvent>(IEnumerable<IEventHandler<TEvent>> eventHandlers)
where TEvent : IEvent
{
foreach (var eventHandler in eventHandlers)
{
Subscribe<TEvent>(eventHandler);
}
}
#endregion
#region
public void Publish<TEvent>(TEvent tEvent) where TEvent : IEvent
{
var eventType = typeof(TEvent);
if (_dicEventHandler.ContainsKey(eventType) && _dicEventHandler[eventType] != null &&
_dicEventHandler[eventType].Count > 0)
{
var handlers = _dicEventHandler[eventType];
try
{
foreach (var handler in handlers)
{
var eventHandler = handler as IEventHandler<TEvent>;
eventHandler.Handle(tEvent);
}
}
catch (Exception ex)
{
}
}
else
{
}
}
public void Publish<TEvent>(TEvent tEvent, Action<TEvent, bool, Exception> callback) where TEvent : IEvent
{
var eventType = typeof(TEvent);
if (_dicEventHandler.ContainsKey(eventType) && _dicEventHandler[eventType] != null &&
_dicEventHandler[eventType].Count > 0)
{
var handlers = _dicEventHandler[eventType];
try
{
foreach (var handler in handlers)
{
var eventHandler = handler as IEventHandler<TEvent>;
eventHandler.Handle(tEvent);
callback(tEvent, true, null);
}
}
catch (Exception ex)
{
callback(tEvent, false, ex);
}
}
else
{
callback(tEvent, false, null);
}
}
#endregion
#region
/// <summary>
/// 取消订阅事件
/// </summary>
/// <param name="type"></param>
/// <param name="subType"></param>
public void Unsubscribe<TEvent>(IEventHandler<TEvent> eventHandler) where TEvent : IEvent
{
lock (_syncObject)
{
var eventType = typeof(TEvent);
if (_dicEventHandler.ContainsKey(eventType))
{
var handlers = _dicEventHandler[eventType];
if (handlers != null
&& handlers.Exists(deh => eventHandlerEquals(deh, eventHandler)))
{
var handlerToRemove = handlers.First(deh => eventHandlerEquals(deh, eventHandler));
handlers.Remove(handlerToRemove);
}
}
}
}
#endregion
}
}

View File

@@ -0,0 +1,17 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MaiFu.Utility.EventBus
{
public class FileDownloadEvent : IEvent
{
public int What;
public long TotalLength { get; set; }
public long Current { get; set; }
}
}

View File

@@ -0,0 +1,20 @@
/****************************************************************
** 文件名: IEvent.cs
** 主要类: IEvent
** Copyright (c) 章为忠
** 创建人:
** 日 期: 2017.3.10
** 修改人:
** 日 期:
** 修改内容:
** 描 述:
** 版 本:
** 备 注:
****************************************************************/
namespace Weiz.EventBus.Core
{
public interface IEvent
{
}
}

View File

@@ -0,0 +1,30 @@
/****************************************************************
** 文件名: IEventHandler.cs
** 主要类: IEventHandler
** Copyright (c) 章为忠
** 创建人:
** 日 期: 2017.3.10
** 修改人:
** 日 期:
** 修改内容:
** 描 述:
** 版 本:
** 备 注:
****************************************************************/
namespace Weiz.EventBus.Core
{
/// <summary>
/// 事件处理接口
/// </summary>
/// <typeparam name="TEvent">继承IEvent对象的事件源对象</typeparam>
public interface IEventHandler<TEvent> where TEvent : IEvent
{
/// <summary>
/// 处理程序
/// </summary>
/// <param name="evt"></param>
void Handle(TEvent evt);
}
}

View File

@@ -0,0 +1,18 @@
using System;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class KeyBordHookEvent : IEvent
{
public KeyBordHookEvent(string message)
{
Time = DateTime.Now;
Message = message;
}
public DateTime Time { get; set; }
public string Message { get; set; }
}
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class MessageEvent : IEvent
{
public MessageEvent() { }
public MessageEvent(string message)
{
Time = DateTime.Now;
Message = message;
}
public MessageEvent(DateTime time, string message)
{
Time = time;
Message = message;
}
public DateTime Time { get; set; }
public string Message { get; set; }
}
}

View File

@@ -0,0 +1,31 @@
using MES.Utility.Core;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class ReciveMessageEvent : IEvent
{
public ReciveMessageEvent() { }
public ReciveMessageEvent(byte[] data)
{
Data = data;
}
public byte[] Data { get; set; }
public string DataHexString
{
get
{
if (Data == null)
return string.Empty;
return Data.Select(it => Convert.ToString(it, 16).PadLeft(2, '0').ToUpper()).ToList().GetStrArray(" ");
}
}
}
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class ScanEvent : IEvent
{
public ScanEvent() { }
public ScanEvent(string message)
{
Time = DateTime.Now;
Message = message;
}
public ScanEvent(DateTime time, string message)
{
Time = time;
Message = message;
}
public DateTime Time { get; set; }
public string Message { get; set; }
}
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class ScanResultEvent : IEvent
{
public ScanResultEvent() { }
public ScanResultEvent(string message)
{
Time = DateTime.Now;
Message = message;
}
public ScanResultEvent(DateTime time, string message)
{
Time = time;
Message = message;
}
public DateTime Time { get; set; }
public string Message { get; set; }
}
}

View File

@@ -0,0 +1,31 @@
using MES.Utility.Core;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Weiz.EventBus.Core;
namespace MES.Utility.EventBus
{
public class SendMessageEvent : IEvent
{
public SendMessageEvent() { }
public SendMessageEvent(byte[] data)
{
Data = data;
}
public byte[] Data { get; set; }
public string DataHexString
{
get
{
if (Data == null)
return string.Empty;
return Data.Select(it => Convert.ToString(it, 16).PadLeft(2, '0').ToUpper()).ToList().GetStrArray(" ");
}
}
}
}

View File

@@ -0,0 +1,79 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.BZip2
{
/// <summary>
/// An example class to demonstrate compression and decompression of BZip2 streams.
/// </summary>
public static class BZip2
{
/// <summary>
/// Decompress the <paramref name="inStream">input</paramref> writing
/// uncompressed data to the <paramref name="outStream">output stream</paramref>
/// </summary>
/// <param name="inStream">The readable stream containing data to decompress.</param>
/// <param name="outStream">The output stream to receive the decompressed data.</param>
/// <param name="isStreamOwner">Both streams are closed on completion if true.</param>
public static void Decompress(Stream inStream, Stream outStream, bool isStreamOwner)
{
if (inStream == null)
throw new ArgumentNullException(nameof(inStream));
if (outStream == null)
throw new ArgumentNullException(nameof(outStream));
try
{
using (BZip2InputStream bzipInput = new BZip2InputStream(inStream))
{
bzipInput.IsStreamOwner = isStreamOwner;
Core.StreamUtils.Copy(bzipInput, outStream, new byte[4096]);
}
}
finally
{
if (isStreamOwner)
{
// inStream is closed by the BZip2InputStream if stream owner
outStream.Dispose();
}
}
}
/// <summary>
/// Compress the <paramref name="inStream">input stream</paramref> sending
/// result data to <paramref name="outStream">output stream</paramref>
/// </summary>
/// <param name="inStream">The readable stream to compress.</param>
/// <param name="outStream">The output stream to receive the compressed data.</param>
/// <param name="isStreamOwner">Both streams are closed on completion if true.</param>
/// <param name="level">Block size acts as compression level (1 to 9) with 1 giving
/// the lowest compression and 9 the highest.</param>
public static void Compress(Stream inStream, Stream outStream, bool isStreamOwner, int level)
{
if (inStream == null)
throw new ArgumentNullException(nameof(inStream));
if (outStream == null)
throw new ArgumentNullException(nameof(outStream));
try
{
using (BZip2OutputStream bzipOutput = new BZip2OutputStream(outStream, level))
{
bzipOutput.IsStreamOwner = isStreamOwner;
Core.StreamUtils.Copy(inStream, bzipOutput, new byte[4096]);
}
}
finally
{
if (isStreamOwner)
{
// outStream is closed by the BZip2OutputStream if stream owner
inStream.Dispose();
}
}
}
}
}

View File

@@ -0,0 +1,117 @@
namespace ICSharpCode.SharpZipLib.BZip2
{
/// <summary>
/// Defines internal values for both compression and decompression
/// </summary>
internal static class BZip2Constants
{
/// <summary>
/// Random numbers used to randomise repetitive blocks
/// </summary>
public readonly static int[] RandomNumbers = {
619, 720, 127, 481, 931, 816, 813, 233, 566, 247,
985, 724, 205, 454, 863, 491, 741, 242, 949, 214,
733, 859, 335, 708, 621, 574, 73, 654, 730, 472,
419, 436, 278, 496, 867, 210, 399, 680, 480, 51,
878, 465, 811, 169, 869, 675, 611, 697, 867, 561,
862, 687, 507, 283, 482, 129, 807, 591, 733, 623,
150, 238, 59, 379, 684, 877, 625, 169, 643, 105,
170, 607, 520, 932, 727, 476, 693, 425, 174, 647,
73, 122, 335, 530, 442, 853, 695, 249, 445, 515,
909, 545, 703, 919, 874, 474, 882, 500, 594, 612,
641, 801, 220, 162, 819, 984, 589, 513, 495, 799,
161, 604, 958, 533, 221, 400, 386, 867, 600, 782,
382, 596, 414, 171, 516, 375, 682, 485, 911, 276,
98, 553, 163, 354, 666, 933, 424, 341, 533, 870,
227, 730, 475, 186, 263, 647, 537, 686, 600, 224,
469, 68, 770, 919, 190, 373, 294, 822, 808, 206,
184, 943, 795, 384, 383, 461, 404, 758, 839, 887,
715, 67, 618, 276, 204, 918, 873, 777, 604, 560,
951, 160, 578, 722, 79, 804, 96, 409, 713, 940,
652, 934, 970, 447, 318, 353, 859, 672, 112, 785,
645, 863, 803, 350, 139, 93, 354, 99, 820, 908,
609, 772, 154, 274, 580, 184, 79, 626, 630, 742,
653, 282, 762, 623, 680, 81, 927, 626, 789, 125,
411, 521, 938, 300, 821, 78, 343, 175, 128, 250,
170, 774, 972, 275, 999, 639, 495, 78, 352, 126,
857, 956, 358, 619, 580, 124, 737, 594, 701, 612,
669, 112, 134, 694, 363, 992, 809, 743, 168, 974,
944, 375, 748, 52, 600, 747, 642, 182, 862, 81,
344, 805, 988, 739, 511, 655, 814, 334, 249, 515,
897, 955, 664, 981, 649, 113, 974, 459, 893, 228,
433, 837, 553, 268, 926, 240, 102, 654, 459, 51,
686, 754, 806, 760, 493, 403, 415, 394, 687, 700,
946, 670, 656, 610, 738, 392, 760, 799, 887, 653,
978, 321, 576, 617, 626, 502, 894, 679, 243, 440,
680, 879, 194, 572, 640, 724, 926, 56, 204, 700,
707, 151, 457, 449, 797, 195, 791, 558, 945, 679,
297, 59, 87, 824, 713, 663, 412, 693, 342, 606,
134, 108, 571, 364, 631, 212, 174, 643, 304, 329,
343, 97, 430, 751, 497, 314, 983, 374, 822, 928,
140, 206, 73, 263, 980, 736, 876, 478, 430, 305,
170, 514, 364, 692, 829, 82, 855, 953, 676, 246,
369, 970, 294, 750, 807, 827, 150, 790, 288, 923,
804, 378, 215, 828, 592, 281, 565, 555, 710, 82,
896, 831, 547, 261, 524, 462, 293, 465, 502, 56,
661, 821, 976, 991, 658, 869, 905, 758, 745, 193,
768, 550, 608, 933, 378, 286, 215, 979, 792, 961,
61, 688, 793, 644, 986, 403, 106, 366, 905, 644,
372, 567, 466, 434, 645, 210, 389, 550, 919, 135,
780, 773, 635, 389, 707, 100, 626, 958, 165, 504,
920, 176, 193, 713, 857, 265, 203, 50, 668, 108,
645, 990, 626, 197, 510, 357, 358, 850, 858, 364,
936, 638
};
/// <summary>
/// When multiplied by compression parameter (1-9) gives the block size for compression
/// 9 gives the best compression but uses the most memory.
/// </summary>
public const int BaseBlockSize = 100000;
/// <summary>
/// Backend constant
/// </summary>
public const int MaximumAlphaSize = 258;
/// <summary>
/// Backend constant
/// </summary>
public const int MaximumCodeLength = 23;
/// <summary>
/// Backend constant
/// </summary>
public const int RunA = 0;
/// <summary>
/// Backend constant
/// </summary>
public const int RunB = 1;
/// <summary>
/// Backend constant
/// </summary>
public const int GroupCount = 6;
/// <summary>
/// Backend constant
/// </summary>
public const int GroupSize = 50;
/// <summary>
/// Backend constant
/// </summary>
public const int NumberOfIterations = 4;
/// <summary>
/// Backend constant
/// </summary>
public const int MaximumSelectors = (2 + (900000 / GroupSize));
/// <summary>
/// Backend constant
/// </summary>
public const int OvershootBytes = 20;
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.BZip2
{
/// <summary>
/// BZip2Exception represents exceptions specific to BZip2 classes and code.
/// </summary>
[Serializable]
public class BZip2Exception : SharpZipBaseException
{
/// <summary>
/// Initialise a new instance of <see cref="BZip2Exception" />.
/// </summary>
public BZip2Exception()
{
}
/// <summary>
/// Initialise a new instance of <see cref="BZip2Exception" /> with its message string.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
public BZip2Exception(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of <see cref="BZip2Exception" />.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
/// <param name="innerException">The <see cref="Exception"/> that caused this exception.</param>
public BZip2Exception(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the BZip2Exception class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected BZip2Exception(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,163 @@
using System;
namespace ICSharpCode.SharpZipLib.Checksum
{
/// <summary>
/// Computes Adler32 checksum for a stream of data. An Adler32
/// checksum is not as reliable as a CRC32 checksum, but a lot faster to
/// compute.
///
/// The specification for Adler32 may be found in RFC 1950.
/// ZLIB Compressed Data Format Specification version 3.3)
///
///
/// From that document:
///
/// "ADLER32 (Adler-32 checksum)
/// This contains a checksum value of the uncompressed data
/// (excluding any dictionary data) computed according to Adler-32
/// algorithm. This algorithm is a 32-bit extension and improvement
/// of the Fletcher algorithm, used in the ITU-T X.224 / ISO 8073
/// standard.
///
/// Adler-32 is composed of two sums accumulated per byte: s1 is
/// the sum of all bytes, s2 is the sum of all s1 values. Both sums
/// are done modulo 65521. s1 is initialized to 1, s2 to zero. The
/// Adler-32 checksum is stored as s2*65536 + s1 in most-
/// significant-byte first (network) order."
///
/// "8.2. The Adler-32 algorithm
///
/// The Adler-32 algorithm is much faster than the CRC32 algorithm yet
/// still provides an extremely low probability of undetected errors.
///
/// The modulo on unsigned long accumulators can be delayed for 5552
/// bytes, so the modulo operation time is negligible. If the bytes
/// are a, b, c, the second sum is 3a + 2b + c + 3, and so is position
/// and order sensitive, unlike the first sum, which is just a
/// checksum. That 65521 is prime is important to avoid a possible
/// large class of two-byte errors that leave the check unchanged.
/// (The Fletcher checksum uses 255, which is not prime and which also
/// makes the Fletcher check insensitive to single byte changes 0 -
/// 255.)
///
/// The sum s1 is initialized to 1 instead of zero to make the length
/// of the sequence part of s2, so that the length does not have to be
/// checked separately. (Any sequence of zeroes has a Fletcher
/// checksum of zero.)"
/// </summary>
/// <see cref="ICSharpCode.SharpZipLib.Zip.Compression.Streams.InflaterInputStream"/>
/// <see cref="ICSharpCode.SharpZipLib.Zip.Compression.Streams.DeflaterOutputStream"/>
public sealed class Adler32 : IChecksum
{
#region Instance Fields
/// <summary>
/// largest prime smaller than 65536
/// </summary>
private static readonly uint BASE = 65521;
/// <summary>
/// The CRC data checksum so far.
/// </summary>
private uint checkValue;
#endregion Instance Fields
/// <summary>
/// Initialise a default instance of <see cref="Adler32"></see>
/// </summary>
public Adler32()
{
Reset();
}
/// <summary>
/// Resets the Adler32 data checksum as if no update was ever called.
/// </summary>
public void Reset()
{
checkValue = 1;
}
/// <summary>
/// Returns the Adler32 data checksum computed so far.
/// </summary>
public long Value
{
get
{
return checkValue;
}
}
/// <summary>
/// Updates the checksum with the byte b.
/// </summary>
/// <param name="bval">
/// The data value to add. The high byte of the int is ignored.
/// </param>
public void Update(int bval)
{
// We could make a length 1 byte array and call update again, but I
// would rather not have that overhead
uint s1 = checkValue & 0xFFFF;
uint s2 = checkValue >> 16;
s1 = (s1 + ((uint)bval & 0xFF)) % BASE;
s2 = (s1 + s2) % BASE;
checkValue = (s2 << 16) + s1;
}
/// <summary>
/// Updates the Adler32 data checksum with the bytes taken from
/// a block of data.
/// </summary>
/// <param name="buffer">Contains the data to update the checksum with.</param>
public void Update(byte[] buffer)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
Update(new ArraySegment<byte>(buffer, 0, buffer.Length));
}
/// <summary>
/// Update Adler32 data checksum based on a portion of a block of data
/// </summary>
/// <param name = "segment">
/// The chunk of data to add
/// </param>
public void Update(ArraySegment<byte> segment)
{
//(By Per Bothner)
uint s1 = checkValue & 0xFFFF;
uint s2 = checkValue >> 16;
var count = segment.Count;
var offset = segment.Offset;
while (count > 0)
{
// We can defer the modulo operation:
// s1 maximally grows from 65521 to 65521 + 255 * 3800
// s2 maximally grows by 3800 * median(s1) = 2090079800 < 2^31
int n = 3800;
if (n > count)
{
n = count;
}
count -= n;
while (--n >= 0)
{
s1 = s1 + (uint)(segment.Array[offset++] & 0xff);
s2 = s2 + s1;
}
s1 %= BASE;
s2 %= BASE;
}
checkValue = (s2 << 16) | s1;
}
}
}

View File

@@ -0,0 +1,171 @@
using System;
using System.Runtime.CompilerServices;
namespace ICSharpCode.SharpZipLib.Checksum
{
/// <summary>
/// CRC-32 with unreversed data and reversed output
/// </summary>
/// <remarks>
/// Generate a table for a byte-wise 32-bit CRC calculation on the polynomial:
/// x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x^1+x^0.
///
/// Polynomials over GF(2) are represented in binary, one bit per coefficient,
/// with the lowest powers in the most significant bit. Then adding polynomials
/// is just exclusive-or, and multiplying a polynomial by x is a right shift by
/// one. If we call the above polynomial p, and represent a byte as the
/// polynomial q, also with the lowest power in the most significant bit (so the
/// byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p,
/// where a mod b means the remainder after dividing a by b.
///
/// This calculation is done using the shift-register method of multiplying and
/// taking the remainder. The register is initialized to zero, and for each
/// incoming bit, x^32 is added mod p to the register if the bit is a one (where
/// x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by
/// x (which is shifting right by one and adding x^32 mod p if the bit shifted
/// out is a one). We start with the highest power (least significant bit) of
/// q and repeat for all eight bits of q.
///
/// This implementation uses sixteen lookup tables stored in one linear array
/// to implement the slicing-by-16 algorithm, a variant of the slicing-by-8
/// algorithm described in this Intel white paper:
///
/// https://web.archive.org/web/20120722193753/http://download.intel.com/technology/comms/perfnet/download/slicing-by-8.pdf
///
/// The first lookup table is simply the CRC of all possible eight bit values.
/// Each successive lookup table is derived from the original table generated
/// by Sarwate's algorithm. Slicing a 16-bit input and XORing the outputs
/// together will produce the same output as a byte-by-byte CRC loop with
/// fewer arithmetic and bit manipulation operations, at the cost of increased
/// memory consumed by the lookup tables. (Slicing-by-16 requires a 16KB table,
/// which is still small enough to fit in most processors' L1 cache.)
/// </remarks>
public sealed class BZip2Crc : IChecksum
{
#region Instance Fields
private const uint crcInit = 0xFFFFFFFF;
//const uint crcXor = 0x00000000;
private static readonly uint[] crcTable = CrcUtilities.GenerateSlicingLookupTable(0x04C11DB7, isReversed: false);
/// <summary>
/// The CRC data checksum so far.
/// </summary>
private uint checkValue;
#endregion Instance Fields
/// <summary>
/// Initialise a default instance of <see cref="BZip2Crc"></see>
/// </summary>
public BZip2Crc()
{
Reset();
}
/// <summary>
/// Resets the CRC data checksum as if no update was ever called.
/// </summary>
public void Reset()
{
checkValue = crcInit;
}
/// <summary>
/// Returns the CRC data checksum computed so far.
/// </summary>
/// <remarks>Reversed Out = true</remarks>
public long Value
{
get
{
// Technically, the output should be:
//return (long)(~checkValue ^ crcXor);
// but x ^ 0 = x, so there is no point in adding
// the XOR operation
return (long)(~checkValue);
}
}
/// <summary>
/// Updates the checksum with the int bval.
/// </summary>
/// <param name = "bval">
/// the byte is taken as the lower 8 bits of bval
/// </param>
/// <remarks>Reversed Data = false</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Update(int bval)
{
checkValue = unchecked(crcTable[(byte)(((checkValue >> 24) & 0xFF) ^ bval)] ^ (checkValue << 8));
}
/// <summary>
/// Updates the CRC data checksum with the bytes taken from
/// a block of data.
/// </summary>
/// <param name="buffer">Contains the data to update the CRC with.</param>
public void Update(byte[] buffer)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
Update(buffer, 0, buffer.Length);
}
/// <summary>
/// Update CRC data checksum based on a portion of a block of data
/// </summary>
/// <param name = "segment">
/// The chunk of data to add
/// </param>
public void Update(ArraySegment<byte> segment)
{
Update(segment.Array, segment.Offset, segment.Count);
}
/// <summary>
/// Internal helper function for updating a block of data using slicing.
/// </summary>
/// <param name="data">The array containing the data to add</param>
/// <param name="offset">Range start for <paramref name="data"/> (inclusive)</param>
/// <param name="count">The number of bytes to checksum starting from <paramref name="offset"/></param>
private void Update(byte[] data, int offset, int count)
{
int remainder = count % CrcUtilities.SlicingDegree;
int end = offset + count - remainder;
while (offset != end)
{
checkValue = CrcUtilities.UpdateDataForNormalPoly(data, offset, crcTable, checkValue);
offset += CrcUtilities.SlicingDegree;
}
if (remainder != 0)
{
SlowUpdateLoop(data, offset, end + remainder);
}
}
/// <summary>
/// A non-inlined function for updating data that doesn't fit in a 16-byte
/// block. We don't expect to enter this function most of the time, and when
/// we do we're not here for long, so disabling inlining here improves
/// performance overall.
/// </summary>
/// <param name="data">The array containing the data to add</param>
/// <param name="offset">Range start for <paramref name="data"/> (inclusive)</param>
/// <param name="end">Range end for <paramref name="data"/> (exclusive)</param>
[MethodImpl(MethodImplOptions.NoInlining)]
private void SlowUpdateLoop(byte[] data, int offset, int end)
{
while (offset != end)
{
Update(data[offset++]);
}
}
}
}

View File

@@ -0,0 +1,173 @@
using System;
using System.Runtime.CompilerServices;
namespace ICSharpCode.SharpZipLib.Checksum
{
/// <summary>
/// CRC-32 with reversed data and unreversed output
/// </summary>
/// <remarks>
/// Generate a table for a byte-wise 32-bit CRC calculation on the polynomial:
/// x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x^1+x^0.
///
/// Polynomials over GF(2) are represented in binary, one bit per coefficient,
/// with the lowest powers in the most significant bit. Then adding polynomials
/// is just exclusive-or, and multiplying a polynomial by x is a right shift by
/// one. If we call the above polynomial p, and represent a byte as the
/// polynomial q, also with the lowest power in the most significant bit (so the
/// byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p,
/// where a mod b means the remainder after dividing a by b.
///
/// This calculation is done using the shift-register method of multiplying and
/// taking the remainder. The register is initialized to zero, and for each
/// incoming bit, x^32 is added mod p to the register if the bit is a one (where
/// x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by
/// x (which is shifting right by one and adding x^32 mod p if the bit shifted
/// out is a one). We start with the highest power (least significant bit) of
/// q and repeat for all eight bits of q.
///
/// This implementation uses sixteen lookup tables stored in one linear array
/// to implement the slicing-by-16 algorithm, a variant of the slicing-by-8
/// algorithm described in this Intel white paper:
///
/// https://web.archive.org/web/20120722193753/http://download.intel.com/technology/comms/perfnet/download/slicing-by-8.pdf
///
/// The first lookup table is simply the CRC of all possible eight bit values.
/// Each successive lookup table is derived from the original table generated
/// by Sarwate's algorithm. Slicing a 16-bit input and XORing the outputs
/// together will produce the same output as a byte-by-byte CRC loop with
/// fewer arithmetic and bit manipulation operations, at the cost of increased
/// memory consumed by the lookup tables. (Slicing-by-16 requires a 16KB table,
/// which is still small enough to fit in most processors' L1 cache.)
/// </remarks>
public sealed class Crc32 : IChecksum
{
#region Instance Fields
private static readonly uint crcInit = 0xFFFFFFFF;
private static readonly uint crcXor = 0xFFFFFFFF;
private static readonly uint[] crcTable = CrcUtilities.GenerateSlicingLookupTable(0xEDB88320, isReversed: true);
/// <summary>
/// The CRC data checksum so far.
/// </summary>
private uint checkValue;
#endregion Instance Fields
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static uint ComputeCrc32(uint oldCrc, byte bval)
{
return (uint)(Crc32.crcTable[(oldCrc ^ bval) & 0xFF] ^ (oldCrc >> 8));
}
/// <summary>
/// Initialise a default instance of <see cref="Crc32"></see>
/// </summary>
public Crc32()
{
Reset();
}
/// <summary>
/// Resets the CRC data checksum as if no update was ever called.
/// </summary>
public void Reset()
{
checkValue = crcInit;
}
/// <summary>
/// Returns the CRC data checksum computed so far.
/// </summary>
/// <remarks>Reversed Out = false</remarks>
public long Value
{
get
{
return (long)(checkValue ^ crcXor);
}
}
/// <summary>
/// Updates the checksum with the int bval.
/// </summary>
/// <param name = "bval">
/// the byte is taken as the lower 8 bits of bval
/// </param>
/// <remarks>Reversed Data = true</remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Update(int bval)
{
checkValue = unchecked(crcTable[(checkValue ^ bval) & 0xFF] ^ (checkValue >> 8));
}
/// <summary>
/// Updates the CRC data checksum with the bytes taken from
/// a block of data.
/// </summary>
/// <param name="buffer">Contains the data to update the CRC with.</param>
public void Update(byte[] buffer)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
Update(buffer, 0, buffer.Length);
}
/// <summary>
/// Update CRC data checksum based on a portion of a block of data
/// </summary>
/// <param name = "segment">
/// The chunk of data to add
/// </param>
public void Update(ArraySegment<byte> segment)
{
Update(segment.Array, segment.Offset, segment.Count);
}
/// <summary>
/// Internal helper function for updating a block of data using slicing.
/// </summary>
/// <param name="data">The array containing the data to add</param>
/// <param name="offset">Range start for <paramref name="data"/> (inclusive)</param>
/// <param name="count">The number of bytes to checksum starting from <paramref name="offset"/></param>
private void Update(byte[] data, int offset, int count)
{
int remainder = count % CrcUtilities.SlicingDegree;
int end = offset + count - remainder;
while (offset != end)
{
checkValue = CrcUtilities.UpdateDataForReversedPoly(data, offset, crcTable, checkValue);
offset += CrcUtilities.SlicingDegree;
}
if (remainder != 0)
{
SlowUpdateLoop(data, offset, end + remainder);
}
}
/// <summary>
/// A non-inlined function for updating data that doesn't fit in a 16-byte
/// block. We don't expect to enter this function most of the time, and when
/// we do we're not here for long, so disabling inlining here improves
/// performance overall.
/// </summary>
/// <param name="data">The array containing the data to add</param>
/// <param name="offset">Range start for <paramref name="data"/> (inclusive)</param>
/// <param name="end">Range end for <paramref name="data"/> (exclusive)</param>
[MethodImpl(MethodImplOptions.NoInlining)]
private void SlowUpdateLoop(byte[] data, int offset, int end)
{
while (offset != end)
{
Update(data[offset++]);
}
}
}
}

View File

@@ -0,0 +1,158 @@
using System.Runtime.CompilerServices;
namespace ICSharpCode.SharpZipLib.Checksum
{
internal static class CrcUtilities
{
/// <summary>
/// The number of slicing lookup tables to generate.
/// </summary>
internal const int SlicingDegree = 16;
/// <summary>
/// Generates multiple CRC lookup tables for a given polynomial, stored
/// in a linear array of uints. The first block (i.e. the first 256
/// elements) is the same as the byte-by-byte CRC lookup table.
/// </summary>
/// <param name="polynomial">The generating CRC polynomial</param>
/// <param name="isReversed">Whether the polynomial is in reversed bit order</param>
/// <returns>A linear array of 256 * <see cref="SlicingDegree"/> elements</returns>
/// <remarks>
/// This table could also be generated as a rectangular array, but the
/// JIT compiler generates slower code than if we use a linear array.
/// Known issue, see: https://github.com/dotnet/runtime/issues/30275
/// </remarks>
internal static uint[] GenerateSlicingLookupTable(uint polynomial, bool isReversed)
{
var table = new uint[256 * SlicingDegree];
uint one = isReversed ? 1 : (1U << 31);
for (int i = 0; i < 256; i++)
{
uint res = (uint)(isReversed ? i : i << 24);
for (int j = 0; j < SlicingDegree; j++)
{
for (int k = 0; k < 8; k++)
{
if (isReversed)
{
res = (res & one) == 1 ? polynomial ^ (res >> 1) : res >> 1;
}
else
{
res = (res & one) != 0 ? polynomial ^ (res << 1) : res << 1;
}
}
table[(256 * j) + i] = res;
}
}
return table;
}
/// <summary>
/// Mixes the first four bytes of input with <paramref name="checkValue"/>
/// using normal ordering before calling <see cref="UpdateDataCommon"/>.
/// </summary>
/// <param name="input">Array of data to checksum</param>
/// <param name="offset">Offset to start reading <paramref name="input"/> from</param>
/// <param name="crcTable">The table to use for slicing-by-16 lookup</param>
/// <param name="checkValue">Checksum state before this update call</param>
/// <returns>A new unfinalized checksum value</returns>
/// <seealso cref="UpdateDataForReversedPoly"/>
/// <remarks>
/// Assumes input[offset]..input[offset + 15] are valid array indexes.
/// For performance reasons, this must be checked by the caller.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static uint UpdateDataForNormalPoly(byte[] input, int offset, uint[] crcTable, uint checkValue)
{
byte x1 = (byte)((byte)(checkValue >> 24) ^ input[offset]);
byte x2 = (byte)((byte)(checkValue >> 16) ^ input[offset + 1]);
byte x3 = (byte)((byte)(checkValue >> 8) ^ input[offset + 2]);
byte x4 = (byte)((byte)checkValue ^ input[offset + 3]);
return UpdateDataCommon(input, offset, crcTable, x1, x2, x3, x4);
}
/// <summary>
/// Mixes the first four bytes of input with <paramref name="checkValue"/>
/// using reflected ordering before calling <see cref="UpdateDataCommon"/>.
/// </summary>
/// <param name="input">Array of data to checksum</param>
/// <param name="offset">Offset to start reading <paramref name="input"/> from</param>
/// <param name="crcTable">The table to use for slicing-by-16 lookup</param>
/// <param name="checkValue">Checksum state before this update call</param>
/// <returns>A new unfinalized checksum value</returns>
/// <seealso cref="UpdateDataForNormalPoly"/>
/// <remarks>
/// Assumes input[offset]..input[offset + 15] are valid array indexes.
/// For performance reasons, this must be checked by the caller.
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static uint UpdateDataForReversedPoly(byte[] input, int offset, uint[] crcTable, uint checkValue)
{
byte x1 = (byte)((byte)checkValue ^ input[offset]);
byte x2 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 1]);
byte x3 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 2]);
byte x4 = (byte)((byte)(checkValue >>= 8) ^ input[offset + 3]);
return UpdateDataCommon(input, offset, crcTable, x1, x2, x3, x4);
}
/// <summary>
/// A shared method for updating an unfinalized CRC checksum using slicing-by-16.
/// </summary>
/// <param name="input">Array of data to checksum</param>
/// <param name="offset">Offset to start reading <paramref name="input"/> from</param>
/// <param name="crcTable">The table to use for slicing-by-16 lookup</param>
/// <param name="x1">First byte of input after mixing with the old CRC</param>
/// <param name="x2">Second byte of input after mixing with the old CRC</param>
/// <param name="x3">Third byte of input after mixing with the old CRC</param>
/// <param name="x4">Fourth byte of input after mixing with the old CRC</param>
/// <returns>A new unfinalized checksum value</returns>
/// <remarks>
/// <para>
/// Even though the first four bytes of input are fed in as arguments,
/// <paramref name="offset"/> should be the same value passed to this
/// function's caller (either <see cref="UpdateDataForNormalPoly"/> or
/// <see cref="UpdateDataForReversedPoly"/>). This method will get inlined
/// into both functions, so using the same offset produces faster code.
/// </para>
/// <para>
/// Because most processors running C# have some kind of instruction-level
/// parallelism, the order of XOR operations can affect performance. This
/// ordering assumes that the assembly code generated by the just-in-time
/// compiler will emit a bunch of arithmetic operations for checking array
/// bounds. Then it opportunistically XORs a1 and a2 to keep the processor
/// busy while those other parts of the pipeline handle the range check
/// calculations.
/// </para>
/// </remarks>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static uint UpdateDataCommon(byte[] input, int offset, uint[] crcTable, byte x1, byte x2, byte x3, byte x4)
{
uint result;
uint a1 = crcTable[x1 + 3840] ^ crcTable[x2 + 3584];
uint a2 = crcTable[x3 + 3328] ^ crcTable[x4 + 3072];
result = crcTable[input[offset + 4] + 2816];
result ^= crcTable[input[offset + 5] + 2560];
a1 ^= crcTable[input[offset + 9] + 1536];
result ^= crcTable[input[offset + 6] + 2304];
result ^= crcTable[input[offset + 7] + 2048];
result ^= crcTable[input[offset + 8] + 1792];
a2 ^= crcTable[input[offset + 13] + 512];
result ^= crcTable[input[offset + 10] + 1280];
result ^= crcTable[input[offset + 11] + 1024];
result ^= crcTable[input[offset + 12] + 768];
result ^= a1;
result ^= crcTable[input[offset + 14] + 256];
result ^= crcTable[input[offset + 15]];
result ^= a2;
return result;
}
}
}

View File

@@ -0,0 +1,51 @@
using System;
namespace ICSharpCode.SharpZipLib.Checksum
{
/// <summary>
/// Interface to compute a data checksum used by checked input/output streams.
/// A data checksum can be updated by one byte or with a byte array. After each
/// update the value of the current checksum can be returned by calling
/// <code>getValue</code>. The complete checksum object can also be reset
/// so it can be used again with new data.
/// </summary>
public interface IChecksum
{
/// <summary>
/// Resets the data checksum as if no update was ever called.
/// </summary>
void Reset();
/// <summary>
/// Returns the data checksum computed so far.
/// </summary>
long Value
{
get;
}
/// <summary>
/// Adds one byte to the data checksum.
/// </summary>
/// <param name = "bval">
/// the data value to add. The high byte of the int is ignored.
/// </param>
void Update(int bval);
/// <summary>
/// Updates the data checksum with the bytes taken from the array.
/// </summary>
/// <param name="buffer">
/// buffer an array of bytes
/// </param>
void Update(byte[] buffer);
/// <summary>
/// Adds the byte array to the data checksum.
/// </summary>
/// <param name = "segment">
/// The chunk of data to add
/// </param>
void Update(ArraySegment<byte> segment);
}
}

View File

@@ -0,0 +1,13 @@
using System;
namespace ICSharpCode.SharpZipLib.Core
{
internal static class Empty
{
internal static class EmptyArray<T>
{
public static readonly T[] Value = new T[0];
}
public static T[] Array<T>() => EmptyArray<T>.Value;
}
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib
{
/// <summary>
/// SharpZipBaseException is the base exception class for SharpZipLib.
/// All library exceptions are derived from this.
/// </summary>
/// <remarks>NOTE: Not all exceptions thrown will be derived from this class.
/// A variety of other exceptions are possible for example <see cref="ArgumentNullException"></see></remarks>
[Serializable]
public class SharpZipBaseException : Exception
{
/// <summary>
/// Initializes a new instance of the SharpZipBaseException class.
/// </summary>
public SharpZipBaseException()
{
}
/// <summary>
/// Initializes a new instance of the SharpZipBaseException class with a specified error message.
/// </summary>
/// <param name="message">A message describing the exception.</param>
public SharpZipBaseException(string message)
: base(message)
{
}
/// <summary>
/// Initializes a new instance of the SharpZipBaseException class with a specified
/// error message and a reference to the inner exception that is the cause of this exception.
/// </summary>
/// <param name="message">A message describing the exception.</param>
/// <param name="innerException">The inner exception</param>
public SharpZipBaseException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the SharpZipBaseException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected SharpZipBaseException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,50 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib
{
/// <summary>
/// Indicates that an error occurred during decoding of a input stream due to corrupt
/// data or (unintentional) library incompatibility.
/// </summary>
[Serializable]
public class StreamDecodingException : SharpZipBaseException
{
private const string GenericMessage = "Input stream could not be decoded";
/// <summary>
/// Initializes a new instance of the StreamDecodingException with a generic message
/// </summary>
public StreamDecodingException() : base(GenericMessage) { }
/// <summary>
/// Initializes a new instance of the StreamDecodingException class with a specified error message.
/// </summary>
/// <param name="message">A message describing the exception.</param>
public StreamDecodingException(string message) : base(message) { }
/// <summary>
/// Initializes a new instance of the StreamDecodingException class with a specified
/// error message and a reference to the inner exception that is the cause of this exception.
/// </summary>
/// <param name="message">A message describing the exception.</param>
/// <param name="innerException">The inner exception</param>
public StreamDecodingException(string message, Exception innerException) : base(message, innerException) { }
/// <summary>
/// Initializes a new instance of the StreamDecodingException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected StreamDecodingException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,49 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib
{
/// <summary>
/// Indicates that the input stream could not decoded due to known library incompability or missing features
/// </summary>
[Serializable]
public class StreamUnsupportedException : StreamDecodingException
{
private const string GenericMessage = "Input stream is in a unsupported format";
/// <summary>
/// Initializes a new instance of the StreamUnsupportedException with a generic message
/// </summary>
public StreamUnsupportedException() : base(GenericMessage) { }
/// <summary>
/// Initializes a new instance of the StreamUnsupportedException class with a specified error message.
/// </summary>
/// <param name="message">A message describing the exception.</param>
public StreamUnsupportedException(string message) : base(message) { }
/// <summary>
/// Initializes a new instance of the StreamUnsupportedException class with a specified
/// error message and a reference to the inner exception that is the cause of this exception.
/// </summary>
/// <param name="message">A message describing the exception.</param>
/// <param name="innerException">The inner exception</param>
public StreamUnsupportedException(string message, Exception innerException) : base(message, innerException) { }
/// <summary>
/// Initializes a new instance of the StreamUnsupportedException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected StreamUnsupportedException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,49 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib
{
/// <summary>
/// Indicates that the input stream could not decoded due to the stream ending before enough data had been provided
/// </summary>
[Serializable]
public class UnexpectedEndOfStreamException : StreamDecodingException
{
private const string GenericMessage = "Input stream ended unexpectedly";
/// <summary>
/// Initializes a new instance of the UnexpectedEndOfStreamException with a generic message
/// </summary>
public UnexpectedEndOfStreamException() : base(GenericMessage) { }
/// <summary>
/// Initializes a new instance of the UnexpectedEndOfStreamException class with a specified error message.
/// </summary>
/// <param name="message">A message describing the exception.</param>
public UnexpectedEndOfStreamException(string message) : base(message) { }
/// <summary>
/// Initializes a new instance of the UnexpectedEndOfStreamException class with a specified
/// error message and a reference to the inner exception that is the cause of this exception.
/// </summary>
/// <param name="message">A message describing the exception.</param>
/// <param name="innerException">The inner exception</param>
public UnexpectedEndOfStreamException(string message, Exception innerException) : base(message, innerException) { }
/// <summary>
/// Initializes a new instance of the UnexpectedEndOfStreamException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected UnexpectedEndOfStreamException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,66 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib
{
/// <summary>
/// Indicates that a value was outside of the expected range when decoding an input stream
/// </summary>
[Serializable]
public class ValueOutOfRangeException : StreamDecodingException
{
/// <summary>
/// Initializes a new instance of the ValueOutOfRangeException class naming the causing variable
/// </summary>
/// <param name="nameOfValue">Name of the variable, use: nameof()</param>
public ValueOutOfRangeException(string nameOfValue)
: base($"{nameOfValue} out of range") { }
/// <summary>
/// Initializes a new instance of the ValueOutOfRangeException class naming the causing variable,
/// it's current value and expected range.
/// </summary>
/// <param name="nameOfValue">Name of the variable, use: nameof()</param>
/// <param name="value">The invalid value</param>
/// <param name="maxValue">Expected maximum value</param>
/// <param name="minValue">Expected minimum value</param>
public ValueOutOfRangeException(string nameOfValue, long value, long maxValue, long minValue = 0)
: this(nameOfValue, value.ToString(), maxValue.ToString(), minValue.ToString()) { }
/// <summary>
/// Initializes a new instance of the ValueOutOfRangeException class naming the causing variable,
/// it's current value and expected range.
/// </summary>
/// <param name="nameOfValue">Name of the variable, use: nameof()</param>
/// <param name="value">The invalid value</param>
/// <param name="maxValue">Expected maximum value</param>
/// <param name="minValue">Expected minimum value</param>
public ValueOutOfRangeException(string nameOfValue, string value, string maxValue, string minValue = "0") :
base($"{nameOfValue} out of range: {value}, should be {minValue}..{maxValue}")
{ }
private ValueOutOfRangeException()
{
}
private ValueOutOfRangeException(string message, Exception innerException) : base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the ValueOutOfRangeException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected ValueOutOfRangeException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,545 @@
using System;
namespace ICSharpCode.SharpZipLib.Core
{
#region EventArgs
/// <summary>
/// Event arguments for scanning.
/// </summary>
public class ScanEventArgs : EventArgs
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="ScanEventArgs"/>
/// </summary>
/// <param name="name">The file or directory name.</param>
public ScanEventArgs(string name)
{
name_ = name;
}
#endregion Constructors
/// <summary>
/// The file or directory name for this event.
/// </summary>
public string Name
{
get { return name_; }
}
/// <summary>
/// Get set a value indicating if scanning should continue or not.
/// </summary>
public bool ContinueRunning
{
get { return continueRunning_; }
set { continueRunning_ = value; }
}
#region Instance Fields
private string name_;
private bool continueRunning_ = true;
#endregion Instance Fields
}
/// <summary>
/// Event arguments during processing of a single file or directory.
/// </summary>
public class ProgressEventArgs : EventArgs
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="ScanEventArgs"/>
/// </summary>
/// <param name="name">The file or directory name if known.</param>
/// <param name="processed">The number of bytes processed so far</param>
/// <param name="target">The total number of bytes to process, 0 if not known</param>
public ProgressEventArgs(string name, long processed, long target)
{
name_ = name;
processed_ = processed;
target_ = target;
}
#endregion Constructors
/// <summary>
/// The name for this event if known.
/// </summary>
public string Name
{
get { return name_; }
}
/// <summary>
/// Get set a value indicating whether scanning should continue or not.
/// </summary>
public bool ContinueRunning
{
get { return continueRunning_; }
set { continueRunning_ = value; }
}
/// <summary>
/// Get a percentage representing how much of the <see cref="Target"></see> has been processed
/// </summary>
/// <value>0.0 to 100.0 percent; 0 if target is not known.</value>
public float PercentComplete
{
get
{
float result;
if (target_ <= 0)
{
result = 0;
}
else
{
result = ((float)processed_ / (float)target_) * 100.0f;
}
return result;
}
}
/// <summary>
/// The number of bytes processed so far
/// </summary>
public long Processed
{
get { return processed_; }
}
/// <summary>
/// The number of bytes to process.
/// </summary>
/// <remarks>Target may be 0 or negative if the value isnt known.</remarks>
public long Target
{
get { return target_; }
}
#region Instance Fields
private string name_;
private long processed_;
private long target_;
private bool continueRunning_ = true;
#endregion Instance Fields
}
/// <summary>
/// Event arguments for directories.
/// </summary>
public class DirectoryEventArgs : ScanEventArgs
{
#region Constructors
/// <summary>
/// Initialize an instance of <see cref="DirectoryEventArgs"></see>.
/// </summary>
/// <param name="name">The name for this directory.</param>
/// <param name="hasMatchingFiles">Flag value indicating if any matching files are contained in this directory.</param>
public DirectoryEventArgs(string name, bool hasMatchingFiles)
: base(name)
{
hasMatchingFiles_ = hasMatchingFiles;
}
#endregion Constructors
/// <summary>
/// Get a value indicating if the directory contains any matching files or not.
/// </summary>
public bool HasMatchingFiles
{
get { return hasMatchingFiles_; }
}
private readonly
#region Instance Fields
bool hasMatchingFiles_;
#endregion Instance Fields
}
/// <summary>
/// Arguments passed when scan failures are detected.
/// </summary>
public class ScanFailureEventArgs : EventArgs
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="ScanFailureEventArgs"></see>
/// </summary>
/// <param name="name">The name to apply.</param>
/// <param name="e">The exception to use.</param>
public ScanFailureEventArgs(string name, Exception e)
{
name_ = name;
exception_ = e;
continueRunning_ = true;
}
#endregion Constructors
/// <summary>
/// The applicable name.
/// </summary>
public string Name
{
get { return name_; }
}
/// <summary>
/// The applicable exception.
/// </summary>
public Exception Exception
{
get { return exception_; }
}
/// <summary>
/// Get / set a value indicating whether scanning should continue.
/// </summary>
public bool ContinueRunning
{
get { return continueRunning_; }
set { continueRunning_ = value; }
}
#region Instance Fields
private string name_;
private Exception exception_;
private bool continueRunning_;
#endregion Instance Fields
}
#endregion EventArgs
#region Delegates
/// <summary>
/// Delegate invoked before starting to process a file.
/// </summary>
/// <param name="sender">The source of the event</param>
/// <param name="e">The event arguments.</param>
public delegate void ProcessFileHandler(object sender, ScanEventArgs e);
/// <summary>
/// Delegate invoked during processing of a file or directory
/// </summary>
/// <param name="sender">The source of the event</param>
/// <param name="e">The event arguments.</param>
public delegate void ProgressHandler(object sender, ProgressEventArgs e);
/// <summary>
/// Delegate invoked when a file has been completely processed.
/// </summary>
/// <param name="sender">The source of the event</param>
/// <param name="e">The event arguments.</param>
public delegate void CompletedFileHandler(object sender, ScanEventArgs e);
/// <summary>
/// Delegate invoked when a directory failure is detected.
/// </summary>
/// <param name="sender">The source of the event</param>
/// <param name="e">The event arguments.</param>
public delegate void DirectoryFailureHandler(object sender, ScanFailureEventArgs e);
/// <summary>
/// Delegate invoked when a file failure is detected.
/// </summary>
/// <param name="sender">The source of the event</param>
/// <param name="e">The event arguments.</param>
public delegate void FileFailureHandler(object sender, ScanFailureEventArgs e);
#endregion Delegates
/// <summary>
/// FileSystemScanner provides facilities scanning of files and directories.
/// </summary>
public class FileSystemScanner
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="FileSystemScanner"></see>
/// </summary>
/// <param name="filter">The <see cref="PathFilter">file filter</see> to apply when scanning.</param>
public FileSystemScanner(string filter)
{
fileFilter_ = new PathFilter(filter);
}
/// <summary>
/// Initialise a new instance of <see cref="FileSystemScanner"></see>
/// </summary>
/// <param name="fileFilter">The <see cref="PathFilter">file filter</see> to apply.</param>
/// <param name="directoryFilter">The <see cref="PathFilter"> directory filter</see> to apply.</param>
public FileSystemScanner(string fileFilter, string directoryFilter)
{
fileFilter_ = new PathFilter(fileFilter);
directoryFilter_ = new PathFilter(directoryFilter);
}
/// <summary>
/// Initialise a new instance of <see cref="FileSystemScanner"></see>
/// </summary>
/// <param name="fileFilter">The file <see cref="IScanFilter">filter</see> to apply.</param>
public FileSystemScanner(IScanFilter fileFilter)
{
fileFilter_ = fileFilter;
}
/// <summary>
/// Initialise a new instance of <see cref="FileSystemScanner"></see>
/// </summary>
/// <param name="fileFilter">The file <see cref="IScanFilter">filter</see> to apply.</param>
/// <param name="directoryFilter">The directory <see cref="IScanFilter">filter</see> to apply.</param>
public FileSystemScanner(IScanFilter fileFilter, IScanFilter directoryFilter)
{
fileFilter_ = fileFilter;
directoryFilter_ = directoryFilter;
}
#endregion Constructors
#region Delegates
/// <summary>
/// Delegate to invoke when a directory is processed.
/// </summary>
public event EventHandler<DirectoryEventArgs> ProcessDirectory;
/// <summary>
/// Delegate to invoke when a file is processed.
/// </summary>
public ProcessFileHandler ProcessFile;
/// <summary>
/// Delegate to invoke when processing for a file has finished.
/// </summary>
public CompletedFileHandler CompletedFile;
/// <summary>
/// Delegate to invoke when a directory failure is detected.
/// </summary>
public DirectoryFailureHandler DirectoryFailure;
/// <summary>
/// Delegate to invoke when a file failure is detected.
/// </summary>
public FileFailureHandler FileFailure;
#endregion Delegates
/// <summary>
/// Raise the DirectoryFailure event.
/// </summary>
/// <param name="directory">The directory name.</param>
/// <param name="e">The exception detected.</param>
private bool OnDirectoryFailure(string directory, Exception e)
{
DirectoryFailureHandler handler = DirectoryFailure;
bool result = (handler != null);
if (result)
{
var args = new ScanFailureEventArgs(directory, e);
handler(this, args);
alive_ = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Raise the FileFailure event.
/// </summary>
/// <param name="file">The file name.</param>
/// <param name="e">The exception detected.</param>
private bool OnFileFailure(string file, Exception e)
{
FileFailureHandler handler = FileFailure;
bool result = (handler != null);
if (result)
{
var args = new ScanFailureEventArgs(file, e);
FileFailure(this, args);
alive_ = args.ContinueRunning;
}
return result;
}
/// <summary>
/// Raise the ProcessFile event.
/// </summary>
/// <param name="file">The file name.</param>
private void OnProcessFile(string file)
{
ProcessFileHandler handler = ProcessFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
alive_ = args.ContinueRunning;
}
}
/// <summary>
/// Raise the complete file event
/// </summary>
/// <param name="file">The file name</param>
private void OnCompleteFile(string file)
{
CompletedFileHandler handler = CompletedFile;
if (handler != null)
{
var args = new ScanEventArgs(file);
handler(this, args);
alive_ = args.ContinueRunning;
}
}
/// <summary>
/// Raise the ProcessDirectory event.
/// </summary>
/// <param name="directory">The directory name.</param>
/// <param name="hasMatchingFiles">Flag indicating if the directory has matching files.</param>
private void OnProcessDirectory(string directory, bool hasMatchingFiles)
{
EventHandler<DirectoryEventArgs> handler = ProcessDirectory;
if (handler != null)
{
var args = new DirectoryEventArgs(directory, hasMatchingFiles);
handler(this, args);
alive_ = args.ContinueRunning;
}
}
/// <summary>
/// Scan a directory.
/// </summary>
/// <param name="directory">The base directory to scan.</param>
/// <param name="recurse">True to recurse subdirectories, false to scan a single directory.</param>
public void Scan(string directory, bool recurse)
{
alive_ = true;
ScanDir(directory, recurse);
}
private void ScanDir(string directory, bool recurse)
{
try
{
string[] names = System.IO.Directory.GetFiles(directory);
bool hasMatch = false;
for (int fileIndex = 0; fileIndex < names.Length; ++fileIndex)
{
if (!fileFilter_.IsMatch(names[fileIndex]))
{
names[fileIndex] = null;
}
else
{
hasMatch = true;
}
}
OnProcessDirectory(directory, hasMatch);
if (alive_ && hasMatch)
{
foreach (string fileName in names)
{
try
{
if (fileName != null)
{
OnProcessFile(fileName);
if (!alive_)
{
break;
}
}
}
catch (Exception e)
{
if (!OnFileFailure(fileName, e))
{
throw;
}
}
}
}
}
catch (Exception e)
{
if (!OnDirectoryFailure(directory, e))
{
throw;
}
}
if (alive_ && recurse)
{
try
{
string[] names = System.IO.Directory.GetDirectories(directory);
foreach (string fulldir in names)
{
if ((directoryFilter_ == null) || (directoryFilter_.IsMatch(fulldir)))
{
ScanDir(fulldir, true);
if (!alive_)
{
break;
}
}
}
}
catch (Exception e)
{
if (!OnDirectoryFailure(directory, e))
{
throw;
}
}
}
}
#region Instance Fields
/// <summary>
/// The file filter currently in use.
/// </summary>
private IScanFilter fileFilter_;
/// <summary>
/// The directory filter currently in use.
/// </summary>
private IScanFilter directoryFilter_;
/// <summary>
/// Flag indicating if scanning should continue running.
/// </summary>
private bool alive_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,22 @@
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// INameTransform defines how file system names are transformed for use with archives, or vice versa.
/// </summary>
public interface INameTransform
{
/// <summary>
/// Given a file name determine the transformed value.
/// </summary>
/// <param name="name">The name to transform.</param>
/// <returns>The transformed file name.</returns>
string TransformFile(string name);
/// <summary>
/// Given a directory name determine the transformed value.
/// </summary>
/// <param name="name">The name to transform.</param>
/// <returns>The transformed directory name</returns>
string TransformDirectory(string name);
}
}

View File

@@ -0,0 +1,15 @@
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// Scanning filters support filtering of names.
/// </summary>
public interface IScanFilter
{
/// <summary>
/// Test a name to see if it 'matches' the filter.
/// </summary>
/// <param name="name">The name to test.</param>
/// <returns>Returns true if the name matches the filter, false if it does not match.</returns>
bool IsMatch(string name);
}
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// InvalidNameException is thrown for invalid names such as directory traversal paths and names with invalid characters
/// </summary>
[Serializable]
public class InvalidNameException : SharpZipBaseException
{
/// <summary>
/// Initializes a new instance of the InvalidNameException class with a default error message.
/// </summary>
public InvalidNameException() : base("An invalid name was specified")
{
}
/// <summary>
/// Initializes a new instance of the InvalidNameException class with a specified error message.
/// </summary>
/// <param name="message">A message describing the exception.</param>
public InvalidNameException(string message) : base(message)
{
}
/// <summary>
/// Initializes a new instance of the InvalidNameException class with a specified
/// error message and a reference to the inner exception that is the cause of this exception.
/// </summary>
/// <param name="message">A message describing the exception.</param>
/// <param name="innerException">The inner exception</param>
public InvalidNameException(string message, Exception innerException) : base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the InvalidNameException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected InvalidNameException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,284 @@
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// NameFilter is a string matching class which allows for both positive and negative
/// matching.
/// A filter is a sequence of independant <see cref="Regex">regular expressions</see> separated by semi-colons ';'.
/// To include a semi-colon it may be quoted as in \;. Each expression can be prefixed by a plus '+' sign or
/// a minus '-' sign to denote the expression is intended to include or exclude names.
/// If neither a plus or minus sign is found include is the default.
/// A given name is tested for inclusion before checking exclusions. Only names matching an include spec
/// and not matching an exclude spec are deemed to match the filter.
/// An empty filter matches any name.
/// </summary>
/// <example>The following expression includes all name ending in '.dat' with the exception of 'dummy.dat'
/// "+\.dat$;-^dummy\.dat$"
/// </example>
public class NameFilter : IScanFilter
{
#region Constructors
/// <summary>
/// Construct an instance based on the filter expression passed
/// </summary>
/// <param name="filter">The filter expression.</param>
public NameFilter(string filter)
{
filter_ = filter;
inclusions_ = new List<Regex>();
exclusions_ = new List<Regex>();
Compile();
}
#endregion Constructors
/// <summary>
/// Test a string to see if it is a valid regular expression.
/// </summary>
/// <param name="expression">The expression to test.</param>
/// <returns>True if expression is a valid <see cref="System.Text.RegularExpressions.Regex"/> false otherwise.</returns>
public static bool IsValidExpression(string expression)
{
bool result = true;
try
{
var exp = new Regex(expression, RegexOptions.IgnoreCase | RegexOptions.Singleline);
}
catch (ArgumentException)
{
result = false;
}
return result;
}
/// <summary>
/// Test an expression to see if it is valid as a filter.
/// </summary>
/// <param name="toTest">The filter expression to test.</param>
/// <returns>True if the expression is valid, false otherwise.</returns>
public static bool IsValidFilterExpression(string toTest)
{
bool result = true;
try
{
if (toTest != null)
{
string[] items = SplitQuoted(toTest);
for (int i = 0; i < items.Length; ++i)
{
if ((items[i] != null) && (items[i].Length > 0))
{
string toCompile;
if (items[i][0] == '+')
{
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else if (items[i][0] == '-')
{
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else
{
toCompile = items[i];
}
var testRegex = new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Singleline);
}
}
}
}
catch (ArgumentException)
{
result = false;
}
return result;
}
/// <summary>
/// Split a string into its component pieces
/// </summary>
/// <param name="original">The original string</param>
/// <returns>Returns an array of <see cref="System.String"/> values containing the individual filter elements.</returns>
public static string[] SplitQuoted(string original)
{
char escape = '\\';
char[] separators = { ';' };
var result = new List<string>();
if (!string.IsNullOrEmpty(original))
{
int endIndex = -1;
var b = new StringBuilder();
while (endIndex < original.Length)
{
endIndex += 1;
if (endIndex >= original.Length)
{
result.Add(b.ToString());
}
else if (original[endIndex] == escape)
{
endIndex += 1;
if (endIndex >= original.Length)
{
throw new ArgumentException("Missing terminating escape character", nameof(original));
}
// include escape if this is not an escaped separator
if (Array.IndexOf(separators, original[endIndex]) < 0)
b.Append(escape);
b.Append(original[endIndex]);
}
else
{
if (Array.IndexOf(separators, original[endIndex]) >= 0)
{
result.Add(b.ToString());
b.Length = 0;
}
else
{
b.Append(original[endIndex]);
}
}
}
}
return result.ToArray();
}
/// <summary>
/// Convert this filter to its string equivalent.
/// </summary>
/// <returns>The string equivalent for this filter.</returns>
public override string ToString()
{
return filter_;
}
/// <summary>
/// Test a value to see if it is included by the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value is included, false otherwise.</returns>
public bool IsIncluded(string name)
{
bool result = false;
if (inclusions_.Count == 0)
{
result = true;
}
else
{
foreach (Regex r in inclusions_)
{
if (r.IsMatch(name))
{
result = true;
break;
}
}
}
return result;
}
/// <summary>
/// Test a value to see if it is excluded by the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value is excluded, false otherwise.</returns>
public bool IsExcluded(string name)
{
bool result = false;
foreach (Regex r in exclusions_)
{
if (r.IsMatch(name))
{
result = true;
break;
}
}
return result;
}
#region IScanFilter Members
/// <summary>
/// Test a value to see if it matches the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value matches, false otherwise.</returns>
public bool IsMatch(string name)
{
return (IsIncluded(name) && !IsExcluded(name));
}
#endregion IScanFilter Members
/// <summary>
/// Compile this filter.
/// </summary>
private void Compile()
{
// TODO: Check to see if combining RE's makes it faster/smaller.
// simple scheme would be to have one RE for inclusion and one for exclusion.
if (filter_ == null)
{
return;
}
string[] items = SplitQuoted(filter_);
for (int i = 0; i < items.Length; ++i)
{
if ((items[i] != null) && (items[i].Length > 0))
{
bool include = (items[i][0] != '-');
string toCompile;
if (items[i][0] == '+')
{
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else if (items[i][0] == '-')
{
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else
{
toCompile = items[i];
}
// NOTE: Regular expressions can fail to compile here for a number of reasons that cause an exception
// these are left unhandled here as the caller is responsible for ensuring all is valid.
// several functions IsValidFilterExpression and IsValidExpression are provided for such checking
if (include)
{
inclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline));
}
else
{
exclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline));
}
}
}
}
#region Instance Fields
private string filter_;
private List<Regex> inclusions_;
private List<Regex> exclusions_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,318 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// PathFilter filters directories and files using a form of <see cref="System.Text.RegularExpressions.Regex">regular expressions</see>
/// by full path name.
/// See <see cref="NameFilter">NameFilter</see> for more detail on filtering.
/// </summary>
public class PathFilter : IScanFilter
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="PathFilter"></see>.
/// </summary>
/// <param name="filter">The <see cref="NameFilter">filter</see> expression to apply.</param>
public PathFilter(string filter)
{
nameFilter_ = new NameFilter(filter);
}
#endregion Constructors
#region IScanFilter Members
/// <summary>
/// Test a name to see if it matches the filter.
/// </summary>
/// <param name="name">The name to test.</param>
/// <returns>True if the name matches, false otherwise.</returns>
/// <remarks><see cref="Path.GetFullPath(string)"/> is used to get the full path before matching.</remarks>
public virtual bool IsMatch(string name)
{
bool result = false;
if (name != null)
{
string cooked = (name.Length > 0) ? Path.GetFullPath(name) : "";
result = nameFilter_.IsMatch(cooked);
}
return result;
}
private readonly
#endregion IScanFilter Members
#region Instance Fields
NameFilter nameFilter_;
#endregion Instance Fields
}
/// <summary>
/// ExtendedPathFilter filters based on name, file size, and the last write time of the file.
/// </summary>
/// <remarks>Provides an example of how to customise filtering.</remarks>
public class ExtendedPathFilter : PathFilter
{
#region Constructors
/// <summary>
/// Initialise a new instance of ExtendedPathFilter.
/// </summary>
/// <param name="filter">The filter to apply.</param>
/// <param name="minSize">The minimum file size to include.</param>
/// <param name="maxSize">The maximum file size to include.</param>
public ExtendedPathFilter(string filter,
long minSize, long maxSize)
: base(filter)
{
MinSize = minSize;
MaxSize = maxSize;
}
/// <summary>
/// Initialise a new instance of ExtendedPathFilter.
/// </summary>
/// <param name="filter">The filter to apply.</param>
/// <param name="minDate">The minimum <see cref="DateTime"/> to include.</param>
/// <param name="maxDate">The maximum <see cref="DateTime"/> to include.</param>
public ExtendedPathFilter(string filter,
DateTime minDate, DateTime maxDate)
: base(filter)
{
MinDate = minDate;
MaxDate = maxDate;
}
/// <summary>
/// Initialise a new instance of ExtendedPathFilter.
/// </summary>
/// <param name="filter">The filter to apply.</param>
/// <param name="minSize">The minimum file size to include.</param>
/// <param name="maxSize">The maximum file size to include.</param>
/// <param name="minDate">The minimum <see cref="DateTime"/> to include.</param>
/// <param name="maxDate">The maximum <see cref="DateTime"/> to include.</param>
public ExtendedPathFilter(string filter,
long minSize, long maxSize,
DateTime minDate, DateTime maxDate)
: base(filter)
{
MinSize = minSize;
MaxSize = maxSize;
MinDate = minDate;
MaxDate = maxDate;
}
#endregion Constructors
#region IScanFilter Members
/// <summary>
/// Test a filename to see if it matches the filter.
/// </summary>
/// <param name="name">The filename to test.</param>
/// <returns>True if the filter matches, false otherwise.</returns>
/// <exception cref="System.IO.FileNotFoundException">The <see paramref="fileName"/> doesnt exist</exception>
public override bool IsMatch(string name)
{
bool result = base.IsMatch(name);
if (result)
{
var fileInfo = new FileInfo(name);
result =
(MinSize <= fileInfo.Length) &&
(MaxSize >= fileInfo.Length) &&
(MinDate <= fileInfo.LastWriteTime) &&
(MaxDate >= fileInfo.LastWriteTime)
;
}
return result;
}
#endregion IScanFilter Members
#region Properties
/// <summary>
/// Get/set the minimum size/length for a file that will match this filter.
/// </summary>
/// <remarks>The default value is zero.</remarks>
/// <exception cref="ArgumentOutOfRangeException">value is less than zero; greater than <see cref="MaxSize"/></exception>
public long MinSize
{
get { return minSize_; }
set
{
if ((value < 0) || (maxSize_ < value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
minSize_ = value;
}
}
/// <summary>
/// Get/set the maximum size/length for a file that will match this filter.
/// </summary>
/// <remarks>The default value is <see cref="System.Int64.MaxValue"/></remarks>
/// <exception cref="ArgumentOutOfRangeException">value is less than zero or less than <see cref="MinSize"/></exception>
public long MaxSize
{
get { return maxSize_; }
set
{
if ((value < 0) || (minSize_ > value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
maxSize_ = value;
}
}
/// <summary>
/// Get/set the minimum <see cref="DateTime"/> value that will match for this filter.
/// </summary>
/// <remarks>Files with a LastWrite time less than this value are excluded by the filter.</remarks>
public DateTime MinDate
{
get
{
return minDate_;
}
set
{
if (value > maxDate_)
{
throw new ArgumentOutOfRangeException(nameof(value), "Exceeds MaxDate");
}
minDate_ = value;
}
}
/// <summary>
/// Get/set the maximum <see cref="DateTime"/> value that will match for this filter.
/// </summary>
/// <remarks>Files with a LastWrite time greater than this value are excluded by the filter.</remarks>
public DateTime MaxDate
{
get
{
return maxDate_;
}
set
{
if (minDate_ > value)
{
throw new ArgumentOutOfRangeException(nameof(value), "Exceeds MinDate");
}
maxDate_ = value;
}
}
#endregion Properties
#region Instance Fields
private long minSize_;
private long maxSize_ = long.MaxValue;
private DateTime minDate_ = DateTime.MinValue;
private DateTime maxDate_ = DateTime.MaxValue;
#endregion Instance Fields
}
/// <summary>
/// NameAndSizeFilter filters based on name and file size.
/// </summary>
/// <remarks>A sample showing how filters might be extended.</remarks>
[Obsolete("Use ExtendedPathFilter instead")]
public class NameAndSizeFilter : PathFilter
{
/// <summary>
/// Initialise a new instance of NameAndSizeFilter.
/// </summary>
/// <param name="filter">The filter to apply.</param>
/// <param name="minSize">The minimum file size to include.</param>
/// <param name="maxSize">The maximum file size to include.</param>
public NameAndSizeFilter(string filter, long minSize, long maxSize)
: base(filter)
{
MinSize = minSize;
MaxSize = maxSize;
}
/// <summary>
/// Test a filename to see if it matches the filter.
/// </summary>
/// <param name="name">The filename to test.</param>
/// <returns>True if the filter matches, false otherwise.</returns>
public override bool IsMatch(string name)
{
bool result = base.IsMatch(name);
if (result)
{
var fileInfo = new FileInfo(name);
long length = fileInfo.Length;
result =
(MinSize <= length) &&
(MaxSize >= length);
}
return result;
}
/// <summary>
/// Get/set the minimum size for a file that will match this filter.
/// </summary>
public long MinSize
{
get { return minSize_; }
set
{
if ((value < 0) || (maxSize_ < value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
minSize_ = value;
}
}
/// <summary>
/// Get/set the maximum size for a file that will match this filter.
/// </summary>
public long MaxSize
{
get { return maxSize_; }
set
{
if ((value < 0) || (minSize_ > value))
{
throw new ArgumentOutOfRangeException(nameof(value));
}
maxSize_ = value;
}
}
#region Instance Fields
private long minSize_;
private long maxSize_ = long.MaxValue;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.IO;
using System.Linq;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// PathUtils provides simple utilities for handling paths.
/// </summary>
public static class PathUtils
{
/// <summary>
/// Remove any path root present in the path
/// </summary>
/// <param name="path">A <see cref="string"/> containing path information.</param>
/// <returns>The path with the root removed if it was present; path otherwise.</returns>
public static string DropPathRoot(string path)
{
var invalidChars = Path.GetInvalidPathChars();
// If the first character after the root is a ':', .NET < 4.6.2 throws
var cleanRootSep = path.Length >= 3 && path[1] == ':' && path[2] == ':';
// Replace any invalid path characters with '_' to prevent Path.GetPathRoot from throwing.
// Only pass the first 258 (should be 260, but that still throws for some reason) characters
// as .NET < 4.6.2 throws on longer paths
var cleanPath = new string(path.Take(258)
.Select( (c, i) => invalidChars.Contains(c) || (i == 2 && cleanRootSep) ? '_' : c).ToArray());
var stripLength = Path.GetPathRoot(cleanPath).Length;
while (path.Length > stripLength && (path[stripLength] == '/' || path[stripLength] == '\\')) stripLength++;
return path.Substring(stripLength);
}
/// <summary>
/// Returns a random file name in the users temporary directory, or in directory of <paramref name="original"/> if specified
/// </summary>
/// <param name="original">If specified, used as the base file name for the temporary file</param>
/// <returns>Returns a temporary file name</returns>
public static string GetTempFileName(string original = null)
{
string fileName;
var tempPath = Path.GetTempPath();
do
{
fileName = original == null
? Path.Combine(tempPath, Path.GetRandomFileName())
: $"{original}.{Path.GetRandomFileName()}";
} while (File.Exists(fileName));
return fileName;
}
}
}

View File

@@ -0,0 +1,284 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// Provides simple <see cref="Stream"/>" utilities.
/// </summary>
public sealed class StreamUtils
{
/// <summary>
/// Read from a <see cref="Stream"/> ensuring all the required data is read.
/// </summary>
/// <param name="stream">The stream to read.</param>
/// <param name="buffer">The buffer to fill.</param>
/// <seealso cref="ReadFully(Stream,byte[],int,int)"/>
static public void ReadFully(Stream stream, byte[] buffer)
{
ReadFully(stream, buffer, 0, buffer.Length);
}
/// <summary>
/// Read from a <see cref="Stream"/>" ensuring all the required data is read.
/// </summary>
/// <param name="stream">The stream to read data from.</param>
/// <param name="buffer">The buffer to store data in.</param>
/// <param name="offset">The offset at which to begin storing data.</param>
/// <param name="count">The number of bytes of data to store.</param>
/// <exception cref="ArgumentNullException">Required parameter is null</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="offset"/> and or <paramref name="count"/> are invalid.</exception>
/// <exception cref="EndOfStreamException">End of stream is encountered before all the data has been read.</exception>
static public void ReadFully(Stream stream, byte[] buffer, int offset, int count)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Offset can equal length when buffer and count are 0.
if ((offset < 0) || (offset > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((count < 0) || (offset + count > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
while (count > 0)
{
int readCount = stream.Read(buffer, offset, count);
if (readCount <= 0)
{
throw new EndOfStreamException();
}
offset += readCount;
count -= readCount;
}
}
/// <summary>
/// Read as much data as possible from a <see cref="Stream"/>", up to the requested number of bytes
/// </summary>
/// <param name="stream">The stream to read data from.</param>
/// <param name="buffer">The buffer to store data in.</param>
/// <param name="offset">The offset at which to begin storing data.</param>
/// <param name="count">The number of bytes of data to store.</param>
/// <exception cref="ArgumentNullException">Required parameter is null</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="offset"/> and or <paramref name="count"/> are invalid.</exception>
static public int ReadRequestedBytes(Stream stream, byte[] buffer, int offset, int count)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Offset can equal length when buffer and count are 0.
if ((offset < 0) || (offset > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((count < 0) || (offset + count > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
int totalReadCount = 0;
while (count > 0)
{
int readCount = stream.Read(buffer, offset, count);
if (readCount <= 0)
{
break;
}
offset += readCount;
count -= readCount;
totalReadCount += readCount;
}
return totalReadCount;
}
/// <summary>
/// Copy the contents of one <see cref="Stream"/> to another.
/// </summary>
/// <param name="source">The stream to source data from.</param>
/// <param name="destination">The stream to write data to.</param>
/// <param name="buffer">The buffer to use during copying.</param>
static public void Copy(Stream source, Stream destination, byte[] buffer)
{
if (source == null)
{
throw new ArgumentNullException(nameof(source));
}
if (destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Ensure a reasonable size of buffer is used without being prohibitive.
if (buffer.Length < 128)
{
throw new ArgumentException("Buffer is too small", nameof(buffer));
}
bool copying = true;
while (copying)
{
int bytesRead = source.Read(buffer, 0, buffer.Length);
if (bytesRead > 0)
{
destination.Write(buffer, 0, bytesRead);
}
else
{
destination.Flush();
copying = false;
}
}
}
/// <summary>
/// Copy the contents of one <see cref="Stream"/> to another.
/// </summary>
/// <param name="source">The stream to source data from.</param>
/// <param name="destination">The stream to write data to.</param>
/// <param name="buffer">The buffer to use during copying.</param>
/// <param name="progressHandler">The <see cref="ProgressHandler">progress handler delegate</see> to use.</param>
/// <param name="updateInterval">The minimum <see cref="TimeSpan"/> between progress updates.</param>
/// <param name="sender">The source for this event.</param>
/// <param name="name">The name to use with the event.</param>
/// <remarks>This form is specialised for use within #Zip to support events during archive operations.</remarks>
static public void Copy(Stream source, Stream destination,
byte[] buffer, ProgressHandler progressHandler, TimeSpan updateInterval, object sender, string name)
{
Copy(source, destination, buffer, progressHandler, updateInterval, sender, name, -1);
}
/// <summary>
/// Copy the contents of one <see cref="Stream"/> to another.
/// </summary>
/// <param name="source">The stream to source data from.</param>
/// <param name="destination">The stream to write data to.</param>
/// <param name="buffer">The buffer to use during copying.</param>
/// <param name="progressHandler">The <see cref="ProgressHandler">progress handler delegate</see> to use.</param>
/// <param name="updateInterval">The minimum <see cref="TimeSpan"/> between progress updates.</param>
/// <param name="sender">The source for this event.</param>
/// <param name="name">The name to use with the event.</param>
/// <param name="fixedTarget">A predetermined fixed target value to use with progress updates.
/// If the value is negative the target is calculated by looking at the stream.</param>
/// <remarks>This form is specialised for use within #Zip to support events during archive operations.</remarks>
static public void Copy(Stream source, Stream destination,
byte[] buffer,
ProgressHandler progressHandler, TimeSpan updateInterval,
object sender, string name, long fixedTarget)
{
if (source == null)
{
throw new ArgumentNullException(nameof(source));
}
if (destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
// Ensure a reasonable size of buffer is used without being prohibitive.
if (buffer.Length < 128)
{
throw new ArgumentException("Buffer is too small", nameof(buffer));
}
if (progressHandler == null)
{
throw new ArgumentNullException(nameof(progressHandler));
}
bool copying = true;
DateTime marker = DateTime.Now;
long processed = 0;
long target = 0;
if (fixedTarget >= 0)
{
target = fixedTarget;
}
else if (source.CanSeek)
{
target = source.Length - source.Position;
}
// Always fire 0% progress..
var args = new ProgressEventArgs(name, processed, target);
progressHandler(sender, args);
bool progressFired = true;
while (copying)
{
int bytesRead = source.Read(buffer, 0, buffer.Length);
if (bytesRead > 0)
{
processed += bytesRead;
progressFired = false;
destination.Write(buffer, 0, bytesRead);
}
else
{
destination.Flush();
copying = false;
}
if (DateTime.Now - marker > updateInterval)
{
progressFired = true;
marker = DateTime.Now;
args = new ProgressEventArgs(name, processed, target);
progressHandler(sender, args);
copying = args.ContinueRunning;
}
}
if (!progressFired)
{
args = new ProgressEventArgs(name, processed, target);
progressHandler(sender, args);
}
}
/// <summary>
/// Initialise an instance of <see cref="StreamUtils"></see>
/// </summary>
private StreamUtils()
{
// Do nothing.
}
}
}

View File

@@ -0,0 +1,487 @@
using ICSharpCode.SharpZipLib.Checksum;
using System;
using System.Security.Cryptography;
namespace ICSharpCode.SharpZipLib.Encryption
{
/// <summary>
/// PkzipClassic embodies the classic or original encryption facilities used in Pkzip archives.
/// While it has been superceded by more recent and more powerful algorithms, its still in use and
/// is viable for preventing casual snooping
/// </summary>
public abstract class PkzipClassic : SymmetricAlgorithm
{
/// <summary>
/// Generates new encryption keys based on given seed
/// </summary>
/// <param name="seed">The seed value to initialise keys with.</param>
/// <returns>A new key value.</returns>
static public byte[] GenerateKeys(byte[] seed)
{
if (seed == null)
{
throw new ArgumentNullException(nameof(seed));
}
if (seed.Length == 0)
{
throw new ArgumentException("Length is zero", nameof(seed));
}
uint[] newKeys = {
0x12345678,
0x23456789,
0x34567890
};
for (int i = 0; i < seed.Length; ++i)
{
newKeys[0] = Crc32.ComputeCrc32(newKeys[0], seed[i]);
newKeys[1] = newKeys[1] + (byte)newKeys[0];
newKeys[1] = newKeys[1] * 134775813 + 1;
newKeys[2] = Crc32.ComputeCrc32(newKeys[2], (byte)(newKeys[1] >> 24));
}
byte[] result = new byte[12];
result[0] = (byte)(newKeys[0] & 0xff);
result[1] = (byte)((newKeys[0] >> 8) & 0xff);
result[2] = (byte)((newKeys[0] >> 16) & 0xff);
result[3] = (byte)((newKeys[0] >> 24) & 0xff);
result[4] = (byte)(newKeys[1] & 0xff);
result[5] = (byte)((newKeys[1] >> 8) & 0xff);
result[6] = (byte)((newKeys[1] >> 16) & 0xff);
result[7] = (byte)((newKeys[1] >> 24) & 0xff);
result[8] = (byte)(newKeys[2] & 0xff);
result[9] = (byte)((newKeys[2] >> 8) & 0xff);
result[10] = (byte)((newKeys[2] >> 16) & 0xff);
result[11] = (byte)((newKeys[2] >> 24) & 0xff);
return result;
}
}
/// <summary>
/// PkzipClassicCryptoBase provides the low level facilities for encryption
/// and decryption using the PkzipClassic algorithm.
/// </summary>
internal class PkzipClassicCryptoBase
{
/// <summary>
/// Transform a single byte
/// </summary>
/// <returns>
/// The transformed value
/// </returns>
protected byte TransformByte()
{
uint temp = ((keys[2] & 0xFFFF) | 2);
return (byte)((temp * (temp ^ 1)) >> 8);
}
/// <summary>
/// Set the key schedule for encryption/decryption.
/// </summary>
/// <param name="keyData">The data use to set the keys from.</param>
protected void SetKeys(byte[] keyData)
{
if (keyData == null)
{
throw new ArgumentNullException(nameof(keyData));
}
if (keyData.Length != 12)
{
throw new InvalidOperationException("Key length is not valid");
}
keys = new uint[3];
keys[0] = (uint)((keyData[3] << 24) | (keyData[2] << 16) | (keyData[1] << 8) | keyData[0]);
keys[1] = (uint)((keyData[7] << 24) | (keyData[6] << 16) | (keyData[5] << 8) | keyData[4]);
keys[2] = (uint)((keyData[11] << 24) | (keyData[10] << 16) | (keyData[9] << 8) | keyData[8]);
}
/// <summary>
/// Update encryption keys
/// </summary>
protected void UpdateKeys(byte ch)
{
keys[0] = Crc32.ComputeCrc32(keys[0], ch);
keys[1] = keys[1] + (byte)keys[0];
keys[1] = keys[1] * 134775813 + 1;
keys[2] = Crc32.ComputeCrc32(keys[2], (byte)(keys[1] >> 24));
}
/// <summary>
/// Reset the internal state.
/// </summary>
protected void Reset()
{
keys[0] = 0;
keys[1] = 0;
keys[2] = 0;
}
#region Instance Fields
private uint[] keys;
#endregion Instance Fields
}
/// <summary>
/// PkzipClassic CryptoTransform for encryption.
/// </summary>
internal class PkzipClassicEncryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform
{
/// <summary>
/// Initialise a new instance of <see cref="PkzipClassicEncryptCryptoTransform"></see>
/// </summary>
/// <param name="keyBlock">The key block to use.</param>
internal PkzipClassicEncryptCryptoTransform(byte[] keyBlock)
{
SetKeys(keyBlock);
}
#region ICryptoTransform Members
/// <summary>
/// Transforms the specified region of the specified byte array.
/// </summary>
/// <param name="inputBuffer">The input for which to compute the transform.</param>
/// <param name="inputOffset">The offset into the byte array from which to begin using data.</param>
/// <param name="inputCount">The number of bytes in the byte array to use as data.</param>
/// <returns>The computed transform.</returns>
public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
byte[] result = new byte[inputCount];
TransformBlock(inputBuffer, inputOffset, inputCount, result, 0);
return result;
}
/// <summary>
/// Transforms the specified region of the input byte array and copies
/// the resulting transform to the specified region of the output byte array.
/// </summary>
/// <param name="inputBuffer">The input for which to compute the transform.</param>
/// <param name="inputOffset">The offset into the input byte array from which to begin using data.</param>
/// <param name="inputCount">The number of bytes in the input byte array to use as data.</param>
/// <param name="outputBuffer">The output to which to write the transform.</param>
/// <param name="outputOffset">The offset into the output byte array from which to begin writing data.</param>
/// <returns>The number of bytes written.</returns>
public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset)
{
for (int i = inputOffset; i < inputOffset + inputCount; ++i)
{
byte oldbyte = inputBuffer[i];
outputBuffer[outputOffset++] = (byte)(inputBuffer[i] ^ TransformByte());
UpdateKeys(oldbyte);
}
return inputCount;
}
/// <summary>
/// Gets a value indicating whether the current transform can be reused.
/// </summary>
public bool CanReuseTransform
{
get
{
return true;
}
}
/// <summary>
/// Gets the size of the input data blocks in bytes.
/// </summary>
public int InputBlockSize
{
get
{
return 1;
}
}
/// <summary>
/// Gets the size of the output data blocks in bytes.
/// </summary>
public int OutputBlockSize
{
get
{
return 1;
}
}
/// <summary>
/// Gets a value indicating whether multiple blocks can be transformed.
/// </summary>
public bool CanTransformMultipleBlocks
{
get
{
return true;
}
}
#endregion ICryptoTransform Members
#region IDisposable Members
/// <summary>
/// Cleanup internal state.
/// </summary>
public void Dispose()
{
Reset();
}
#endregion IDisposable Members
}
/// <summary>
/// PkzipClassic CryptoTransform for decryption.
/// </summary>
internal class PkzipClassicDecryptCryptoTransform : PkzipClassicCryptoBase, ICryptoTransform
{
/// <summary>
/// Initialise a new instance of <see cref="PkzipClassicDecryptCryptoTransform"></see>.
/// </summary>
/// <param name="keyBlock">The key block to decrypt with.</param>
internal PkzipClassicDecryptCryptoTransform(byte[] keyBlock)
{
SetKeys(keyBlock);
}
#region ICryptoTransform Members
/// <summary>
/// Transforms the specified region of the specified byte array.
/// </summary>
/// <param name="inputBuffer">The input for which to compute the transform.</param>
/// <param name="inputOffset">The offset into the byte array from which to begin using data.</param>
/// <param name="inputCount">The number of bytes in the byte array to use as data.</param>
/// <returns>The computed transform.</returns>
public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
byte[] result = new byte[inputCount];
TransformBlock(inputBuffer, inputOffset, inputCount, result, 0);
return result;
}
/// <summary>
/// Transforms the specified region of the input byte array and copies
/// the resulting transform to the specified region of the output byte array.
/// </summary>
/// <param name="inputBuffer">The input for which to compute the transform.</param>
/// <param name="inputOffset">The offset into the input byte array from which to begin using data.</param>
/// <param name="inputCount">The number of bytes in the input byte array to use as data.</param>
/// <param name="outputBuffer">The output to which to write the transform.</param>
/// <param name="outputOffset">The offset into the output byte array from which to begin writing data.</param>
/// <returns>The number of bytes written.</returns>
public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset)
{
for (int i = inputOffset; i < inputOffset + inputCount; ++i)
{
var newByte = (byte)(inputBuffer[i] ^ TransformByte());
outputBuffer[outputOffset++] = newByte;
UpdateKeys(newByte);
}
return inputCount;
}
/// <summary>
/// Gets a value indicating whether the current transform can be reused.
/// </summary>
public bool CanReuseTransform
{
get
{
return true;
}
}
/// <summary>
/// Gets the size of the input data blocks in bytes.
/// </summary>
public int InputBlockSize
{
get
{
return 1;
}
}
/// <summary>
/// Gets the size of the output data blocks in bytes.
/// </summary>
public int OutputBlockSize
{
get
{
return 1;
}
}
/// <summary>
/// Gets a value indicating whether multiple blocks can be transformed.
/// </summary>
public bool CanTransformMultipleBlocks
{
get
{
return true;
}
}
#endregion ICryptoTransform Members
#region IDisposable Members
/// <summary>
/// Cleanup internal state.
/// </summary>
public void Dispose()
{
Reset();
}
#endregion IDisposable Members
}
/// <summary>
/// Defines a wrapper object to access the Pkzip algorithm.
/// This class cannot be inherited.
/// </summary>
public sealed class PkzipClassicManaged : PkzipClassic
{
/// <summary>
/// Get / set the applicable block size in bits.
/// </summary>
/// <remarks>The only valid block size is 8.</remarks>
public override int BlockSize
{
get
{
return 8;
}
set
{
if (value != 8)
{
throw new CryptographicException("Block size is invalid");
}
}
}
/// <summary>
/// Get an array of legal <see cref="KeySizes">key sizes.</see>
/// </summary>
public override KeySizes[] LegalKeySizes
{
get
{
KeySizes[] keySizes = new KeySizes[1];
keySizes[0] = new KeySizes(12 * 8, 12 * 8, 0);
return keySizes;
}
}
/// <summary>
/// Generate an initial vector.
/// </summary>
public override void GenerateIV()
{
// Do nothing.
}
/// <summary>
/// Get an array of legal <see cref="KeySizes">block sizes</see>.
/// </summary>
public override KeySizes[] LegalBlockSizes
{
get
{
KeySizes[] keySizes = new KeySizes[1];
keySizes[0] = new KeySizes(1 * 8, 1 * 8, 0);
return keySizes;
}
}
/// <summary>
/// Get / set the key value applicable.
/// </summary>
public override byte[] Key
{
get
{
if (key_ == null)
{
GenerateKey();
}
return (byte[])key_.Clone();
}
set
{
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
if (value.Length != 12)
{
throw new CryptographicException("Key size is illegal");
}
key_ = (byte[])value.Clone();
}
}
/// <summary>
/// Generate a new random key.
/// </summary>
public override void GenerateKey()
{
key_ = new byte[12];
using (var rng = new RNGCryptoServiceProvider())
{
rng.GetBytes(key_);
}
}
/// <summary>
/// Create an encryptor.
/// </summary>
/// <param name="rgbKey">The key to use for this encryptor.</param>
/// <param name="rgbIV">Initialisation vector for the new encryptor.</param>
/// <returns>Returns a new PkzipClassic encryptor</returns>
public override ICryptoTransform CreateEncryptor(
byte[] rgbKey,
byte[] rgbIV)
{
key_ = rgbKey;
return new PkzipClassicEncryptCryptoTransform(Key);
}
/// <summary>
/// Create a decryptor.
/// </summary>
/// <param name="rgbKey">Keys to use for this new decryptor.</param>
/// <param name="rgbIV">Initialisation vector for the new decryptor.</param>
/// <returns>Returns a new decryptor.</returns>
public override ICryptoTransform CreateDecryptor(
byte[] rgbKey,
byte[] rgbIV)
{
key_ = rgbKey;
return new PkzipClassicDecryptCryptoTransform(Key);
}
#region Instance Fields
private byte[] key_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,230 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using ICSharpCode.SharpZipLib.Core;
using ICSharpCode.SharpZipLib.Zip;
namespace ICSharpCode.SharpZipLib.Encryption
{
/// <summary>
/// Encrypts and decrypts AES ZIP
/// </summary>
/// <remarks>
/// Based on information from http://www.winzip.com/aes_info.htm
/// and http://www.gladman.me.uk/cryptography_technology/fileencrypt/
/// </remarks>
internal class ZipAESStream : CryptoStream
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="stream">The stream on which to perform the cryptographic transformation.</param>
/// <param name="transform">Instance of ZipAESTransform</param>
/// <param name="mode">Read or Write</param>
public ZipAESStream(Stream stream, ZipAESTransform transform, CryptoStreamMode mode)
: base(stream, transform, mode)
{
_stream = stream;
_transform = transform;
_slideBuffer = new byte[1024];
// mode:
// CryptoStreamMode.Read means we read from "stream" and pass decrypted to our Read() method.
// Write bypasses this stream and uses the Transform directly.
if (mode != CryptoStreamMode.Read)
{
throw new Exception("ZipAESStream only for read");
}
}
// The final n bytes of the AES stream contain the Auth Code.
private const int AUTH_CODE_LENGTH = 10;
// Blocksize is always 16 here, even for AES-256 which has transform.InputBlockSize of 32.
private const int CRYPTO_BLOCK_SIZE = 16;
// total length of block + auth code
private const int BLOCK_AND_AUTH = CRYPTO_BLOCK_SIZE + AUTH_CODE_LENGTH;
private Stream _stream;
private ZipAESTransform _transform;
private byte[] _slideBuffer;
private int _slideBufStartPos;
private int _slideBufFreePos;
// Buffer block transforms to enable partial reads
private byte[] _transformBuffer = null;// new byte[CRYPTO_BLOCK_SIZE];
private int _transformBufferFreePos;
private int _transformBufferStartPos;
// Do we have some buffered data available?
private bool HasBufferedData =>_transformBuffer != null && _transformBufferStartPos < _transformBufferFreePos;
/// <summary>
/// Reads a sequence of bytes from the current CryptoStream into buffer,
/// and advances the position within the stream by the number of bytes read.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
// Nothing to do
if (count == 0)
return 0;
// If we have buffered data, read that first
int nBytes = 0;
if (HasBufferedData)
{
nBytes = ReadBufferedData(buffer, offset, count);
// Read all requested data from the buffer
if (nBytes == count)
return nBytes;
offset += nBytes;
count -= nBytes;
}
// Read more data from the input, if available
if (_slideBuffer != null)
nBytes += ReadAndTransform(buffer, offset, count);
return nBytes;
}
/// <inheritdoc/>
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
var readCount = Read(buffer, offset, count);
return Task.FromResult(readCount);
}
// Read data from the underlying stream and decrypt it
private int ReadAndTransform(byte[] buffer, int offset, int count)
{
int nBytes = 0;
while (nBytes < count)
{
int bytesLeftToRead = count - nBytes;
// Calculate buffer quantities vs read-ahead size, and check for sufficient free space
int byteCount = _slideBufFreePos - _slideBufStartPos;
// Need to handle final block and Auth Code specially, but don't know total data length.
// Maintain a read-ahead equal to the length of (crypto block + Auth Code).
// When that runs out we can detect these final sections.
int lengthToRead = BLOCK_AND_AUTH - byteCount;
if (_slideBuffer.Length - _slideBufFreePos < lengthToRead)
{
// Shift the data to the beginning of the buffer
int iTo = 0;
for (int iFrom = _slideBufStartPos; iFrom < _slideBufFreePos; iFrom++, iTo++)
{
_slideBuffer[iTo] = _slideBuffer[iFrom];
}
_slideBufFreePos -= _slideBufStartPos; // Note the -=
_slideBufStartPos = 0;
}
int obtained = StreamUtils.ReadRequestedBytes(_stream, _slideBuffer, _slideBufFreePos, lengthToRead);
_slideBufFreePos += obtained;
// Recalculate how much data we now have
byteCount = _slideBufFreePos - _slideBufStartPos;
if (byteCount >= BLOCK_AND_AUTH)
{
var read = TransformAndBufferBlock(buffer, offset, bytesLeftToRead, CRYPTO_BLOCK_SIZE);
nBytes += read;
offset += read;
}
else
{
// Last round.
if (byteCount > AUTH_CODE_LENGTH)
{
// At least one byte of data plus auth code
int finalBlock = byteCount - AUTH_CODE_LENGTH;
nBytes += TransformAndBufferBlock(buffer, offset, bytesLeftToRead, finalBlock);
}
else if (byteCount < AUTH_CODE_LENGTH)
throw new ZipException("Internal error missed auth code"); // Coding bug
// Final block done. Check Auth code.
byte[] calcAuthCode = _transform.GetAuthCode();
for (int i = 0; i < AUTH_CODE_LENGTH; i++)
{
if (calcAuthCode[i] != _slideBuffer[_slideBufStartPos + i])
{
throw new ZipException("AES Authentication Code does not match. This is a super-CRC check on the data in the file after compression and encryption. \r\n"
+ "The file may be damaged.");
}
}
// don't need this any more, so use it as a 'complete' flag
_slideBuffer = null;
break; // Reached the auth code
}
}
return nBytes;
}
// read some buffered data
private int ReadBufferedData(byte[] buffer, int offset, int count)
{
int copyCount = Math.Min(count, _transformBufferFreePos - _transformBufferStartPos);
Array.Copy(_transformBuffer, _transformBufferStartPos, buffer, offset, copyCount);
_transformBufferStartPos += copyCount;
return copyCount;
}
// Perform the crypto transform, and buffer the data if less than one block has been requested.
private int TransformAndBufferBlock(byte[] buffer, int offset, int count, int blockSize)
{
// If the requested data is greater than one block, transform it directly into the output
// If it's smaller, do it into a temporary buffer and copy the requested part
bool bufferRequired = (blockSize > count);
if (bufferRequired && _transformBuffer == null)
_transformBuffer = new byte[CRYPTO_BLOCK_SIZE];
var targetBuffer = bufferRequired ? _transformBuffer : buffer;
var targetOffset = bufferRequired ? 0 : offset;
// Transform the data
_transform.TransformBlock(_slideBuffer,
_slideBufStartPos,
blockSize,
targetBuffer,
targetOffset);
_slideBufStartPos += blockSize;
if (!bufferRequired)
{
return blockSize;
}
else
{
Array.Copy(_transformBuffer, 0, buffer, offset, count);
_transformBufferStartPos = count;
_transformBufferFreePos = blockSize;
return count;
}
}
/// <summary>
/// Writes a sequence of bytes to the current stream and advances the current position within this stream by the number of bytes written.
/// </summary>
/// <param name="buffer">An array of bytes. This method copies count bytes from buffer to the current stream. </param>
/// <param name="offset">The byte offset in buffer at which to begin copying bytes to the current stream. </param>
/// <param name="count">The number of bytes to be written to the current stream. </param>
public override void Write(byte[] buffer, int offset, int count)
{
// ZipAESStream is used for reading but not for writing. Writing uses the ZipAESTransform directly.
throw new NotImplementedException();
}
}
}

View File

@@ -0,0 +1,222 @@
using System;
using System.Security.Cryptography;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Encryption
{
/// <summary>
/// Transforms stream using AES in CTR mode
/// </summary>
internal class ZipAESTransform : ICryptoTransform
{
class IncrementalHash : HMACSHA1
{
bool _finalised;
public IncrementalHash(byte[] key) : base(key) { }
public static IncrementalHash CreateHMAC(string n, byte[] key) => new IncrementalHash(key);
public void AppendData(byte[] buffer, int offset, int count) => TransformBlock(buffer, offset, count, buffer, offset);
public byte[] GetHashAndReset()
{
if (!_finalised)
{
byte[] dummy = new byte[0];
TransformFinalBlock(dummy, 0, 0);
_finalised = true;
}
return Hash;
}
}
static class HashAlgorithmName
{
public static string SHA1 = null;
}
private const int PWD_VER_LENGTH = 2;
// WinZip use iteration count of 1000 for PBKDF2 key generation
private const int KEY_ROUNDS = 1000;
// For 128-bit AES (16 bytes) the encryption is implemented as expected.
// For 256-bit AES (32 bytes) WinZip do full 256 bit AES of the nonce to create the encryption
// block but use only the first 16 bytes of it, and discard the second half.
private const int ENCRYPT_BLOCK = 16;
private int _blockSize;
private readonly ICryptoTransform _encryptor;
private readonly byte[] _counterNonce;
private byte[] _encryptBuffer;
private int _encrPos;
private byte[] _pwdVerifier;
private IncrementalHash _hmacsha1;
private byte[] _authCode = null;
private bool _writeMode;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="key">Password string</param>
/// <param name="saltBytes">Random bytes, length depends on encryption strength.
/// 128 bits = 8 bytes, 192 bits = 12 bytes, 256 bits = 16 bytes.</param>
/// <param name="blockSize">The encryption strength, in bytes eg 16 for 128 bits.</param>
/// <param name="writeMode">True when creating a zip, false when reading. For the AuthCode.</param>
///
public ZipAESTransform(string key, byte[] saltBytes, int blockSize, bool writeMode)
{
if (blockSize != 16 && blockSize != 32) // 24 valid for AES but not supported by Winzip
throw new Exception("Invalid blocksize " + blockSize + ". Must be 16 or 32.");
if (saltBytes.Length != blockSize / 2)
throw new Exception("Invalid salt len. Must be " + blockSize / 2 + " for blocksize " + blockSize);
// initialise the encryption buffer and buffer pos
_blockSize = blockSize;
_encryptBuffer = new byte[_blockSize];
_encrPos = ENCRYPT_BLOCK;
// Performs the equivalent of derive_key in Dr Brian Gladman's pwd2key.c
var pdb = new Rfc2898DeriveBytes(key, saltBytes, KEY_ROUNDS);
var rm = Aes.Create();
rm.Mode = CipherMode.ECB; // No feedback from cipher for CTR mode
_counterNonce = new byte[_blockSize];
byte[] key1bytes = pdb.GetBytes(_blockSize);
byte[] key2bytes = pdb.GetBytes(_blockSize);
// Use empty IV for AES
_encryptor = rm.CreateEncryptor(key1bytes, new byte[16]);
_pwdVerifier = pdb.GetBytes(PWD_VER_LENGTH);
//
_hmacsha1 = IncrementalHash.CreateHMAC(HashAlgorithmName.SHA1, key2bytes);
_writeMode = writeMode;
}
/// <summary>
/// Implement the ICryptoTransform method.
/// </summary>
public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, byte[] outputBuffer, int outputOffset)
{
// Pass the data stream to the hash algorithm for generating the Auth Code.
// This does not change the inputBuffer. Do this before decryption for read mode.
if (!_writeMode)
{
_hmacsha1.AppendData(inputBuffer, inputOffset, inputCount);
}
// Encrypt with AES in CTR mode. Regards to Dr Brian Gladman for this.
int ix = 0;
while (ix < inputCount)
{
if (_encrPos == ENCRYPT_BLOCK)
{
/* increment encryption nonce */
int j = 0;
while (++_counterNonce[j] == 0)
{
++j;
}
/* encrypt the nonce to form next xor buffer */
_encryptor.TransformBlock(_counterNonce, 0, _blockSize, _encryptBuffer, 0);
_encrPos = 0;
}
outputBuffer[ix + outputOffset] = (byte)(inputBuffer[ix + inputOffset] ^ _encryptBuffer[_encrPos++]);
//
ix++;
}
if (_writeMode)
{
// This does not change the buffer.
_hmacsha1.AppendData(outputBuffer, outputOffset, inputCount);
}
return inputCount;
}
/// <summary>
/// Returns the 2 byte password verifier
/// </summary>
public byte[] PwdVerifier
{
get
{
return _pwdVerifier;
}
}
/// <summary>
/// Returns the 10 byte AUTH CODE to be checked or appended immediately following the AES data stream.
/// </summary>
public byte[] GetAuthCode()
{
if (_authCode == null)
{
_authCode = _hmacsha1.GetHashAndReset();
}
return _authCode;
}
#region ICryptoTransform Members
/// <summary>
/// Not implemented.
/// </summary>
public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount)
{
if (inputCount > 0)
{
throw new NotImplementedException("TransformFinalBlock is not implemented and inputCount is greater than 0");
}
return Empty.Array<byte>();
}
/// <summary>
/// Gets the size of the input data blocks in bytes.
/// </summary>
public int InputBlockSize
{
get
{
return _blockSize;
}
}
/// <summary>
/// Gets the size of the output data blocks in bytes.
/// </summary>
public int OutputBlockSize
{
get
{
return _blockSize;
}
}
/// <summary>
/// Gets a value indicating whether multiple blocks can be transformed.
/// </summary>
public bool CanTransformMultipleBlocks
{
get
{
return true;
}
}
/// <summary>
/// Gets a value indicating whether the current transform can be reused.
/// </summary>
public bool CanReuseTransform
{
get
{
return true;
}
}
/// <summary>
/// Cleanup internal state.
/// </summary>
public void Dispose()
{
_encryptor.Dispose();
}
#endregion ICryptoTransform Members
}
}

View File

@@ -0,0 +1,92 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.GZip
{
using static Zip.Compression.Deflater;
/// <summary>
/// An example class to demonstrate compression and decompression of GZip streams.
/// </summary>
public static class GZip
{
/// <summary>
/// Decompress the <paramref name="inStream">input</paramref> writing
/// uncompressed data to the <paramref name="outStream">output stream</paramref>
/// </summary>
/// <param name="inStream">The readable stream containing data to decompress.</param>
/// <param name="outStream">The output stream to receive the decompressed data.</param>
/// <param name="isStreamOwner">Both streams are closed on completion if true.</param>
/// <exception cref="ArgumentNullException">Input or output stream is null</exception>
public static void Decompress(Stream inStream, Stream outStream, bool isStreamOwner)
{
if (inStream == null)
throw new ArgumentNullException(nameof(inStream), "Input stream is null");
if (outStream == null)
throw new ArgumentNullException(nameof(outStream), "Output stream is null");
try
{
using (GZipInputStream gzipInput = new GZipInputStream(inStream))
{
gzipInput.IsStreamOwner = isStreamOwner;
Core.StreamUtils.Copy(gzipInput, outStream, new byte[4096]);
}
}
finally
{
if (isStreamOwner)
{
// inStream is closed by the GZipInputStream if stream owner
outStream.Dispose();
}
}
}
/// <summary>
/// Compress the <paramref name="inStream">input stream</paramref> sending
/// result data to <paramref name="outStream">output stream</paramref>
/// </summary>
/// <param name="inStream">The readable stream to compress.</param>
/// <param name="outStream">The output stream to receive the compressed data.</param>
/// <param name="isStreamOwner">Both streams are closed on completion if true.</param>
/// <param name="bufferSize">Deflate buffer size, minimum 512</param>
/// <param name="level">Deflate compression level, 0-9</param>
/// <exception cref="ArgumentNullException">Input or output stream is null</exception>
/// <exception cref="ArgumentOutOfRangeException">Buffer Size is smaller than 512</exception>
/// <exception cref="ArgumentOutOfRangeException">Compression level outside 0-9</exception>
public static void Compress(Stream inStream, Stream outStream, bool isStreamOwner, int bufferSize = 512, int level = 6)
{
if (inStream == null)
throw new ArgumentNullException(nameof(inStream), "Input stream is null");
if (outStream == null)
throw new ArgumentNullException(nameof(outStream), "Output stream is null");
if (bufferSize < 512)
throw new ArgumentOutOfRangeException(nameof(bufferSize), "Deflate buffer size must be >= 512");
if (level < NO_COMPRESSION || level > BEST_COMPRESSION)
throw new ArgumentOutOfRangeException(nameof(level), "Compression level must be 0-9");
try
{
using (GZipOutputStream gzipOutput = new GZipOutputStream(outStream, bufferSize))
{
gzipOutput.SetLevel(level);
gzipOutput.IsStreamOwner = isStreamOwner;
Core.StreamUtils.Copy(inStream, gzipOutput, new byte[bufferSize]);
}
}
finally
{
if (isStreamOwner)
{
// outStream is closed by the GZipOutputStream if stream owner
inStream.Dispose();
}
}
}
}
}

View File

@@ -0,0 +1,78 @@
using System;
using System.Text;
namespace ICSharpCode.SharpZipLib.GZip
{
/// <summary>
/// This class contains constants used for gzip.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")]
sealed public class GZipConstants
{
/// <summary>
/// First GZip identification byte
/// </summary>
public const byte ID1 = 0x1F;
/// <summary>
/// Second GZip identification byte
/// </summary>
public const byte ID2 = 0x8B;
/// <summary>
/// Deflate compression method
/// </summary>
public const byte CompressionMethodDeflate = 0x8;
/// <summary>
/// Get the GZip specified encoding (CP-1252 if supported, otherwise ASCII)
/// </summary>
public static Encoding Encoding
{
get
{
try
{
return Encoding.GetEncoding(1252);
}
catch
{
return Encoding.ASCII;
}
}
}
}
/// <summary>
/// GZip header flags
/// </summary>
[Flags]
public enum GZipFlags: byte
{
/// <summary>
/// Text flag hinting that the file is in ASCII
/// </summary>
FTEXT = 0x1 << 0,
/// <summary>
/// CRC flag indicating that a CRC16 preceeds the data
/// </summary>
FHCRC = 0x1 << 1,
/// <summary>
/// Extra flag indicating that extra fields are present
/// </summary>
FEXTRA = 0x1 << 2,
/// <summary>
/// Filename flag indicating that the original filename is present
/// </summary>
FNAME = 0x1 << 3,
/// <summary>
/// Flag bit mask indicating that a comment is present
/// </summary>
FCOMMENT = 0x1 << 4,
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.GZip
{
/// <summary>
/// GZipException represents exceptions specific to GZip classes and code.
/// </summary>
[Serializable]
public class GZipException : SharpZipBaseException
{
/// <summary>
/// Initialise a new instance of <see cref="GZipException" />.
/// </summary>
public GZipException()
{
}
/// <summary>
/// Initialise a new instance of <see cref="GZipException" /> with its message string.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
public GZipException(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of <see cref="GZipException" />.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
/// <param name="innerException">The <see cref="Exception"/> that caused this exception.</param>
public GZipException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the GZipException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected GZipException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,361 @@
using ICSharpCode.SharpZipLib.Checksum;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.GZip
{
/// <summary>
/// This filter stream is used to decompress a "GZIP" format stream.
/// The "GZIP" format is described baseInputStream RFC 1952.
///
/// author of the original java version : John Leuner
/// </summary>
/// <example> This sample shows how to unzip a gzipped file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Core;
/// using ICSharpCode.SharpZipLib.GZip;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream inStream = new GZipInputStream(File.OpenRead(args[0])))
/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
/// byte[] buffer = new byte[4096];
/// StreamUtils.Copy(inStream, outStream, buffer);
/// }
/// }
/// }
/// </code>
/// </example>
public class GZipInputStream : InflaterInputStream
{
#region Instance Fields
/// <summary>
/// CRC-32 value for uncompressed data
/// </summary>
protected Crc32 crc;
/// <summary>
/// Flag to indicate if we've read the GZIP header yet for the current member (block of compressed data).
/// This is tracked per-block as the file is parsed.
/// </summary>
private bool readGZIPHeader;
/// <summary>
/// Flag to indicate if at least one block in a stream with concatenated blocks was read successfully.
/// This allows us to exit gracefully if downstream data is not in gzip format.
/// </summary>
private bool completedLastBlock;
private string fileName;
#endregion Instance Fields
#region Constructors
/// <summary>
/// Creates a GZipInputStream with the default buffer size
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream GZIP format)
/// </param>
public GZipInputStream(Stream baseInputStream)
: this(baseInputStream, 4096)
{
}
/// <summary>
/// Creates a GZIPInputStream with the specified buffer size
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream GZIP format)
/// </param>
/// <param name="size">
/// Size of the buffer to use
/// </param>
public GZipInputStream(Stream baseInputStream, int size)
: base(baseInputStream, new Inflater(true), size)
{
}
#endregion Constructors
#region Stream overrides
/// <summary>
/// Reads uncompressed data into an array of bytes
/// </summary>
/// <param name="buffer">
/// The buffer to read uncompressed data into
/// </param>
/// <param name="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name="count">
/// The number of uncompressed bytes to be read
/// </param>
/// <returns>Returns the number of bytes actually read.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
// A GZIP file can contain multiple blocks of compressed data, although this is quite rare.
// A compressed block could potentially be empty, so we need to loop until we reach EOF or
// we find data.
while (true)
{
// If we haven't read the header for this block, read it
if (!readGZIPHeader)
{
// Try to read header. If there is no header (0 bytes available), this is EOF. If there is
// an incomplete header, this will throw an exception.
try
{
if (!ReadHeader())
{
return 0;
}
}
catch (Exception ex) when (completedLastBlock && (ex is GZipException || ex is EndOfStreamException))
{
// if we completed the last block (i.e. we're in a stream that has multiple blocks concatenated
// we want to return gracefully from any header parsing exceptions since sometimes there may
// be trailing garbage on a stream
return 0;
}
}
// Try to read compressed data
int bytesRead = base.Read(buffer, offset, count);
if (bytesRead > 0)
{
crc.Update(new ArraySegment<byte>(buffer, offset, bytesRead));
}
// If this is the end of stream, read the footer
if (inf.IsFinished)
{
ReadFooter();
}
// Attempting to read 0 bytes will never yield any bytesRead, so we return instead of looping forever
if (bytesRead > 0 || count == 0)
{
return bytesRead;
}
}
}
/// <summary>
/// Retrieves the filename header field for the block last read
/// </summary>
/// <returns></returns>
public string GetFilename()
{
return fileName;
}
#endregion Stream overrides
#region Support routines
private bool ReadHeader()
{
// Initialize CRC for this block
crc = new Crc32();
// Make sure there is data in file. We can't rely on ReadLeByte() to fill the buffer, as this could be EOF,
// which is fine, but ReadLeByte() throws an exception if it doesn't find data, so we do this part ourselves.
if (inputBuffer.Available <= 0)
{
inputBuffer.Fill();
if (inputBuffer.Available <= 0)
{
// No header, EOF.
return false;
}
}
var headCRC = new Crc32();
// 1. Check the two magic bytes
var magic = inputBuffer.ReadLeByte();
headCRC.Update(magic);
if (magic != GZipConstants.ID1)
{
throw new GZipException("Error GZIP header, first magic byte doesn't match");
}
magic = inputBuffer.ReadLeByte();
if (magic != GZipConstants.ID2)
{
throw new GZipException("Error GZIP header, second magic byte doesn't match");
}
headCRC.Update(magic);
// 2. Check the compression type (must be 8)
var compressionType = inputBuffer.ReadLeByte();
if (compressionType != GZipConstants.CompressionMethodDeflate)
{
throw new GZipException("Error GZIP header, data not in deflate format");
}
headCRC.Update(compressionType);
// 3. Check the flags
var flagsByte = inputBuffer.ReadLeByte();
headCRC.Update(flagsByte);
// 3.1 Check the reserved bits are zero
if ((flagsByte & 0xE0) != 0)
{
throw new GZipException("Reserved flag bits in GZIP header != 0");
}
var flags = (GZipFlags)flagsByte;
// 4.-6. Skip the modification time, extra flags, and OS type
for (int i = 0; i < 6; i++)
{
headCRC.Update(inputBuffer.ReadLeByte());
}
// 7. Read extra field
if (flags.HasFlag(GZipFlags.FEXTRA))
{
// XLEN is total length of extra subfields, we will skip them all
var len1 = inputBuffer.ReadLeByte();
var len2 = inputBuffer.ReadLeByte();
headCRC.Update(len1);
headCRC.Update(len2);
int extraLen = (len2 << 8) | len1; // gzip is LSB first
for (int i = 0; i < extraLen; i++)
{
headCRC.Update(inputBuffer.ReadLeByte());
}
}
// 8. Read file name
if (flags.HasFlag(GZipFlags.FNAME))
{
var fname = new byte[1024];
var fnamePos = 0;
int readByte;
while ((readByte = inputBuffer.ReadLeByte()) > 0)
{
if (fnamePos < 1024)
{
fname[fnamePos++] = (byte)readByte;
}
headCRC.Update(readByte);
}
headCRC.Update(readByte);
fileName = GZipConstants.Encoding.GetString(fname, 0, fnamePos);
}
else
{
fileName = null;
}
// 9. Read comment
if (flags.HasFlag(GZipFlags.FCOMMENT))
{
int readByte;
while ((readByte = inputBuffer.ReadLeByte()) > 0)
{
headCRC.Update(readByte);
}
headCRC.Update(readByte);
}
// 10. Read header CRC
if (flags.HasFlag(GZipFlags.FHCRC))
{
int tempByte;
int crcval = inputBuffer.ReadLeByte();
if (crcval < 0)
{
throw new EndOfStreamException("EOS reading GZIP header");
}
tempByte = inputBuffer.ReadLeByte();
if (tempByte < 0)
{
throw new EndOfStreamException("EOS reading GZIP header");
}
crcval = (crcval << 8) | tempByte;
if (crcval != ((int)headCRC.Value & 0xffff))
{
throw new GZipException("Header CRC value mismatch");
}
}
readGZIPHeader = true;
return true;
}
private void ReadFooter()
{
byte[] footer = new byte[8];
// End of stream; reclaim all bytes from inf, read the final byte count, and reset the inflator
long bytesRead = inf.TotalOut & 0xffffffff;
inputBuffer.Available += inf.RemainingInput;
inf.Reset();
// Read footer from inputBuffer
int needed = 8;
while (needed > 0)
{
int count = inputBuffer.ReadClearTextBuffer(footer, 8 - needed, needed);
if (count <= 0)
{
throw new EndOfStreamException("EOS reading GZIP footer");
}
needed -= count; // Jewel Jan 16
}
// Calculate CRC
int crcval = (footer[0] & 0xff) | ((footer[1] & 0xff) << 8) | ((footer[2] & 0xff) << 16) | (footer[3] << 24);
if (crcval != (int)crc.Value)
{
throw new GZipException("GZIP crc sum mismatch, theirs \"" + crcval + "\" and ours \"" + (int)crc.Value);
}
// NOTE The total here is the original total modulo 2 ^ 32.
uint total =
(uint)((uint)footer[4] & 0xff) |
(uint)(((uint)footer[5] & 0xff) << 8) |
(uint)(((uint)footer[6] & 0xff) << 16) |
(uint)((uint)footer[7] << 24);
if (bytesRead != total)
{
throw new GZipException("Number of bytes mismatch in footer");
}
// Mark header read as false so if another header exists, we'll continue reading through the file
readGZIPHeader = false;
// Indicate that we succeeded on at least one block so we can exit gracefully if there is trailing garbage downstream
completedLastBlock = true;
}
#endregion Support routines
}
}

View File

@@ -0,0 +1,293 @@
using ICSharpCode.SharpZipLib.Checksum;
using ICSharpCode.SharpZipLib.Zip.Compression;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.GZip
{
/// <summary>
/// This filter stream is used to compress a stream into a "GZIP" stream.
/// The "GZIP" format is described in RFC 1952.
///
/// author of the original java version : John Leuner
/// </summary>
/// <example> This sample shows how to gzip a file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.GZip;
/// using ICSharpCode.SharpZipLib.Core;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream s = new GZipOutputStream(File.Create(args[0] + ".gz")))
/// using (FileStream fs = File.OpenRead(args[0])) {
/// byte[] writeData = new byte[4096];
/// Streamutils.Copy(s, fs, writeData);
/// }
/// }
/// }
/// }
/// </code>
/// </example>
public class GZipOutputStream : DeflaterOutputStream
{
private enum OutputState
{
Header,
Footer,
Finished,
Closed,
};
#region Instance Fields
/// <summary>
/// CRC-32 value for uncompressed data
/// </summary>
protected Crc32 crc = new Crc32();
private OutputState state_ = OutputState.Header;
private string fileName;
private GZipFlags flags = 0;
#endregion Instance Fields
#region Constructors
/// <summary>
/// Creates a GzipOutputStream with the default buffer size
/// </summary>
/// <param name="baseOutputStream">
/// The stream to read data (to be compressed) from
/// </param>
public GZipOutputStream(Stream baseOutputStream)
: this(baseOutputStream, 4096)
{
}
/// <summary>
/// Creates a GZipOutputStream with the specified buffer size
/// </summary>
/// <param name="baseOutputStream">
/// The stream to read data (to be compressed) from
/// </param>
/// <param name="size">
/// Size of the buffer to use
/// </param>
public GZipOutputStream(Stream baseOutputStream, int size) : base(baseOutputStream, new Deflater(Deflater.DEFAULT_COMPRESSION, true), size)
{
}
#endregion Constructors
#region Public API
/// <summary>
/// Sets the active compression level (0-9). The new level will be activated
/// immediately.
/// </summary>
/// <param name="level">The compression level to set.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// Level specified is not supported.
/// </exception>
/// <see cref="Deflater"/>
public void SetLevel(int level)
{
if (level < Deflater.NO_COMPRESSION || level > Deflater.BEST_COMPRESSION)
throw new ArgumentOutOfRangeException(nameof(level), "Compression level must be 0-9");
deflater_.SetLevel(level);
}
/// <summary>
/// Get the current compression level.
/// </summary>
/// <returns>The current compression level.</returns>
public int GetLevel()
{
return deflater_.GetLevel();
}
/// <summary>
/// Original filename
/// </summary>
public string FileName
{
get => fileName;
set
{
fileName = CleanFilename(value);
if (string.IsNullOrEmpty(fileName))
{
flags &= ~GZipFlags.FNAME;
}
else
{
flags |= GZipFlags.FNAME;
}
}
}
#endregion Public API
#region Stream overrides
/// <summary>
/// Write given buffer to output updating crc
/// </summary>
/// <param name="buffer">Buffer to write</param>
/// <param name="offset">Offset of first byte in buf to write</param>
/// <param name="count">Number of bytes to write</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (state_ == OutputState.Header)
{
WriteHeader();
}
if (state_ != OutputState.Footer)
{
throw new InvalidOperationException("Write not permitted in current state");
}
crc.Update(new ArraySegment<byte>(buffer, offset, count));
base.Write(buffer, offset, count);
}
/// <summary>
/// Writes remaining compressed output data to the output stream
/// and closes it.
/// </summary>
protected override void Dispose(bool disposing)
{
try
{
Finish();
}
finally
{
if (state_ != OutputState.Closed)
{
state_ = OutputState.Closed;
if (IsStreamOwner)
{
baseOutputStream_.Dispose();
}
}
}
}
/// <summary>
/// Flushes the stream by ensuring the header is written, and then calling <see cref="DeflaterOutputStream.Flush">Flush</see>
/// on the deflater.
/// </summary>
public override void Flush()
{
if (state_ == OutputState.Header)
{
WriteHeader();
}
base.Flush();
}
#endregion Stream overrides
#region DeflaterOutputStream overrides
/// <summary>
/// Finish compression and write any footer information required to stream
/// </summary>
public override void Finish()
{
// If no data has been written a header should be added.
if (state_ == OutputState.Header)
{
WriteHeader();
}
if (state_ == OutputState.Footer)
{
state_ = OutputState.Finished;
base.Finish();
var totalin = (uint)(deflater_.TotalIn & 0xffffffff);
var crcval = (uint)(crc.Value & 0xffffffff);
byte[] gzipFooter;
unchecked
{
gzipFooter = new byte[] {
(byte) crcval, (byte) (crcval >> 8),
(byte) (crcval >> 16), (byte) (crcval >> 24),
(byte) totalin, (byte) (totalin >> 8),
(byte) (totalin >> 16), (byte) (totalin >> 24)
};
}
baseOutputStream_.Write(gzipFooter, 0, gzipFooter.Length);
}
}
#endregion DeflaterOutputStream overrides
#region Support Routines
private static string CleanFilename(string path)
=> path.Substring(path.LastIndexOf('/') + 1);
private void WriteHeader()
{
if (state_ == OutputState.Header)
{
state_ = OutputState.Footer;
var mod_time = (int)((DateTime.Now.Ticks - new DateTime(1970, 1, 1).Ticks) / 10000000L); // Ticks give back 100ns intervals
byte[] gzipHeader = {
// The two magic bytes
GZipConstants.ID1,
GZipConstants.ID2,
// The compression type
GZipConstants.CompressionMethodDeflate,
// The flags (not set)
(byte)flags,
// The modification time
(byte) mod_time, (byte) (mod_time >> 8),
(byte) (mod_time >> 16), (byte) (mod_time >> 24),
// The extra flags
0,
// The OS type (unknown)
255
};
baseOutputStream_.Write(gzipHeader, 0, gzipHeader.Length);
if (flags.HasFlag(GZipFlags.FNAME))
{
var fname = GZipConstants.Encoding.GetBytes(fileName);
baseOutputStream_.Write(fname, 0, fname.Length);
// End filename string with a \0
baseOutputStream_.Write(new byte[] { 0 }, 0, 1);
}
}
}
#endregion Support Routines
}
}

View File

@@ -0,0 +1,63 @@
namespace ICSharpCode.SharpZipLib.Lzw
{
/// <summary>
/// This class contains constants used for LZW
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")]
sealed public class LzwConstants
{
/// <summary>
/// Magic number found at start of LZW header: 0x1f 0x9d
/// </summary>
public const int MAGIC = 0x1f9d;
/// <summary>
/// Maximum number of bits per code
/// </summary>
public const int MAX_BITS = 16;
/* 3rd header byte:
* bit 0..4 Number of compression bits
* bit 5 Extended header
* bit 6 Free
* bit 7 Block mode
*/
/// <summary>
/// Mask for 'number of compression bits'
/// </summary>
public const int BIT_MASK = 0x1f;
/// <summary>
/// Indicates the presence of a fourth header byte
/// </summary>
public const int EXTENDED_MASK = 0x20;
//public const int FREE_MASK = 0x40;
/// <summary>
/// Reserved bits
/// </summary>
public const int RESERVED_MASK = 0x60;
/// <summary>
/// Block compression: if table is full and compression rate is dropping,
/// clear the dictionary.
/// </summary>
public const int BLOCK_MODE_MASK = 0x80;
/// <summary>
/// LZW file header size (in bytes)
/// </summary>
public const int HDR_SIZE = 3;
/// <summary>
/// Initial number of bits per code
/// </summary>
public const int INIT_BITS = 9;
private LzwConstants()
{
}
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.Lzw
{
/// <summary>
/// LzwException represents exceptions specific to LZW classes and code.
/// </summary>
[Serializable]
public class LzwException : SharpZipBaseException
{
/// <summary>
/// Initialise a new instance of <see cref="LzwException" />.
/// </summary>
public LzwException()
{
}
/// <summary>
/// Initialise a new instance of <see cref="LzwException" /> with its message string.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
public LzwException(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of <see cref="LzwException" />.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
/// <param name="innerException">The <see cref="Exception"/> that caused this exception.</param>
public LzwException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the LzwException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected LzwException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,572 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Lzw
{
/// <summary>
/// This filter stream is used to decompress a LZW format stream.
/// Specifically, a stream that uses the LZC compression method.
/// This file format is usually associated with the .Z file extension.
///
/// See http://en.wikipedia.org/wiki/Compress
/// See http://wiki.wxwidgets.org/Development:_Z_File_Format
///
/// The file header consists of 3 (or optionally 4) bytes. The first two bytes
/// contain the magic marker "0x1f 0x9d", followed by a byte of flags.
///
/// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c
/// code in the gzip package.
/// </summary>
/// <example> This sample shows how to unzip a compressed file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Core;
/// using ICSharpCode.SharpZipLib.LZW;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0])))
/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
/// byte[] buffer = new byte[4096];
/// StreamUtils.Copy(inStream, outStream, buffer);
/// // OR
/// inStream.Read(buffer, 0, buffer.Length);
/// // now do something with the buffer
/// }
/// }
/// }
/// </code>
/// </example>
public class LzwInputStream : Stream
{
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Creates a LzwInputStream
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream LZW format)
/// </param>
public LzwInputStream(Stream baseInputStream)
{
this.baseInputStream = baseInputStream;
}
/// <summary>
/// See <see cref="System.IO.Stream.ReadByte"/>
/// </summary>
/// <returns></returns>
public override int ReadByte()
{
int b = Read(one, 0, 1);
if (b == 1)
return (one[0] & 0xff);
return -1;
}
/// <summary>
/// Reads decompressed data into the provided buffer byte array
/// </summary>
/// <param name ="buffer">
/// The array to read and decompress data into
/// </param>
/// <param name ="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name ="count">
/// The number of bytes to decompress
/// </param>
/// <returns>The number of bytes read. Zero signals the end of stream</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (!headerParsed)
ParseHeader();
if (eof)
return 0;
int start = offset;
/* Using local copies of various variables speeds things up by as
* much as 30% in Java! Performance not tested in C#.
*/
int[] lTabPrefix = tabPrefix;
byte[] lTabSuffix = tabSuffix;
byte[] lStack = stack;
int lNBits = nBits;
int lMaxCode = maxCode;
int lMaxMaxCode = maxMaxCode;
int lBitMask = bitMask;
int lOldCode = oldCode;
byte lFinChar = finChar;
int lStackP = stackP;
int lFreeEnt = freeEnt;
byte[] lData = data;
int lBitPos = bitPos;
// empty stack if stuff still left
int sSize = lStack.Length - lStackP;
if (sSize > 0)
{
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
}
if (count == 0)
{
stackP = lStackP;
return offset - start;
}
// loop, filling local buffer until enough data has been decompressed
MainLoop:
do
{
if (end < EXTRA)
{
Fill();
}
int bitIn = (got > 0) ? (end - end % lNBits) << 3 :
(end << 3) - (lNBits - 1);
while (lBitPos < bitIn)
{
#region A
// handle 1-byte reads correctly
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
maxMaxCode = lMaxMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
// check for code-width expansion
if (lFreeEnt > lMaxCode)
{
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) +
nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits++;
lMaxCode = (lNBits == maxBits) ? lMaxMaxCode :
(1 << lNBits) - 1;
lBitMask = (1 << lNBits) - 1;
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion A
#region B
// read next code
int pos = lBitPos >> 3;
int code = (((lData[pos] & 0xFF) |
((lData[pos + 1] & 0xFF) << 8) |
((lData[pos + 2] & 0xFF) << 16)) >>
(lBitPos & 0x7)) & lBitMask;
lBitPos += lNBits;
// handle first iteration
if (lOldCode == -1)
{
if (code >= 256)
throw new LzwException("corrupt input: " + code + " > 255");
lFinChar = (byte)(lOldCode = code);
buffer[offset++] = lFinChar;
count--;
continue;
}
// handle CLEAR code
if (code == TBL_CLEAR && blockMode)
{
Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length);
lFreeEnt = TBL_FIRST - 1;
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits = LzwConstants.INIT_BITS;
lMaxCode = (1 << lNBits) - 1;
lBitMask = lMaxCode;
// Code tables reset
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion B
#region C
// setup
int inCode = code;
lStackP = lStack.Length;
// Handle KwK case
if (code >= lFreeEnt)
{
if (code > lFreeEnt)
{
throw new LzwException("corrupt input: code=" + code +
", freeEnt=" + lFreeEnt);
}
lStack[--lStackP] = lFinChar;
code = lOldCode;
}
// Generate output characters in reverse order
while (code >= 256)
{
lStack[--lStackP] = lTabSuffix[code];
code = lTabPrefix[code];
}
lFinChar = lTabSuffix[code];
buffer[offset++] = lFinChar;
count--;
// And put them out in forward order
sSize = lStack.Length - lStackP;
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
#endregion C
#region D
// generate new entry in table
if (lFreeEnt < lMaxMaxCode)
{
lTabPrefix[lFreeEnt] = lOldCode;
lTabSuffix[lFreeEnt] = lFinChar;
lFreeEnt++;
}
// Remember previous code
lOldCode = inCode;
// if output buffer full, then return
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
#endregion D
} // while
lBitPos = ResetBuf(lBitPos);
} while (got > 0); // do..while
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
eof = true;
return offset - start;
}
/// <summary>
/// Moves the unread data in the buffer to the beginning and resets
/// the pointers.
/// </summary>
/// <param name="bitPosition"></param>
/// <returns></returns>
private int ResetBuf(int bitPosition)
{
int pos = bitPosition >> 3;
Array.Copy(data, pos, data, 0, end - pos);
end -= pos;
return 0;
}
private void Fill()
{
got = baseInputStream.Read(data, end, data.Length - 1 - end);
if (got > 0)
{
end += got;
}
}
private void ParseHeader()
{
headerParsed = true;
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = baseInputStream.Read(hdr, 0, hdr.Length);
// Check the magic marker
if (result < 0)
throw new LzwException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new LzwException(String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0], hdr[1]));
}
// Check the 3rd header byte
blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0;
maxBits = hdr[2] & LzwConstants.BIT_MASK;
if (maxBits > LzwConstants.MAX_BITS)
{
throw new LzwException("Stream compressed with " + maxBits +
" bits, but decompression can only handle " +
LzwConstants.MAX_BITS + " bits.");
}
if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0)
{
throw new LzwException("Unsupported bits set in the header.");
}
// Initialize variables
maxMaxCode = 1 << maxBits;
nBits = LzwConstants.INIT_BITS;
maxCode = (1 << nBits) - 1;
bitMask = maxCode;
oldCode = -1;
finChar = 0;
freeEnt = blockMode ? TBL_FIRST : 256;
tabPrefix = new int[1 << maxBits];
tabSuffix = new byte[1 << maxBits];
stack = new byte[1 << maxBits];
stackP = stack.Length;
for (int idx = 255; idx >= 0; idx--)
tabSuffix[idx] = (byte)idx;
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get
{
return baseInputStream.CanRead;
}
}
/// <summary>
/// Gets a value of false indicating seeking is not supported for this stream.
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Gets a value of false indicating that this stream is not writeable.
/// </summary>
public override bool CanWrite
{
get
{
return false;
}
}
/// <summary>
/// A value representing the length of the stream in bytes.
/// </summary>
public override long Length
{
get
{
return got;
}
}
/// <summary>
/// The current position within the stream.
/// Throws a NotSupportedException when attempting to set the position
/// </summary>
/// <exception cref="NotSupportedException">Attempting to set the position</exception>
public override long Position
{
get
{
return baseInputStream.Position;
}
set
{
throw new NotSupportedException("InflaterInputStream Position not supported");
}
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
baseInputStream.Flush();
}
/// <summary>
/// Sets the position within the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="offset">The relative offset to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("Seek not supported");
}
/// <summary>
/// Set the length of the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The new length value for the stream.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("InflaterInputStream SetLength not supported");
}
/// <summary>
/// Writes a sequence of bytes to stream and advances the current position
/// This method always throws a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing data to write.</param>
/// <param name="offset">The offset of the first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("InflaterInputStream Write not supported");
}
/// <summary>
/// Writes one byte to the current stream and advances the current position
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The byte to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("InflaterInputStream WriteByte not supported");
}
/// <summary>
/// Closes the input stream. When <see cref="IsStreamOwner"></see>
/// is true the underlying stream is also closed.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
if (IsStreamOwner)
{
baseInputStream.Dispose();
}
}
}
#endregion Stream Overrides
#region Instance Fields
private Stream baseInputStream;
/// <summary>
/// Flag indicating wether this instance has been closed or not.
/// </summary>
private bool isClosed;
private readonly byte[] one = new byte[1];
private bool headerParsed;
// string table stuff
private const int TBL_CLEAR = 0x100;
private const int TBL_FIRST = TBL_CLEAR + 1;
private int[] tabPrefix;
private byte[] tabSuffix;
private readonly int[] zeros = new int[256];
private byte[] stack;
// various state
private bool blockMode;
private int nBits;
private int maxBits;
private int maxMaxCode;
private int maxCode;
private int bitMask;
private int oldCode;
private byte finChar;
private int stackP;
private int freeEnt;
// input buffer
private readonly byte[] data = new byte[1024 * 8];
private int bitPos;
private int end;
private int got;
private bool eof;
private const int EXTRA = 64;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,55 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// This exception is used to indicate that there is a problem
/// with a TAR archive header.
/// </summary>
[Serializable]
public class InvalidHeaderException : TarException
{
/// <summary>
/// Initialise a new instance of the InvalidHeaderException class.
/// </summary>
public InvalidHeaderException()
{
}
/// <summary>
/// Initialises a new instance of the InvalidHeaderException class with a specified message.
/// </summary>
/// <param name="message">Message describing the exception cause.</param>
public InvalidHeaderException(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of InvalidHeaderException
/// </summary>
/// <param name="message">Message describing the problem.</param>
/// <param name="exception">The exception that is the cause of the current exception.</param>
public InvalidHeaderException(string message, Exception exception)
: base(message, exception)
{
}
/// <summary>
/// Initializes a new instance of the InvalidHeaderException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected InvalidHeaderException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,599 @@
using System;
using System.IO;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// The TarBuffer class implements the tar archive concept
/// of a buffered input stream. This concept goes back to the
/// days of blocked tape drives and special io devices. In the
/// C# universe, the only real function that this class
/// performs is to ensure that files have the correct "record"
/// size, or other tars will complain.
/// <p>
/// You should never have a need to access this class directly.
/// TarBuffers are created by Tar IO Streams.
/// </p>
/// </summary>
public class TarBuffer
{
/* A quote from GNU tar man file on blocking and records
A `tar' archive file contains a series of blocks. Each block
contains `BLOCKSIZE' bytes. Although this format may be thought of as
being on magnetic tape, other media are often used.
Each file archived is represented by a header block which describes
the file, followed by zero or more blocks which give the contents of
the file. At the end of the archive file there may be a block filled
with binary zeros as an end-of-file marker. A reasonable system should
write a block of zeros at the end, but must not assume that such a
block exists when reading an archive.
The blocks may be "blocked" for physical I/O operations. Each
record of N blocks is written with a single 'write ()'
operation. On magnetic tapes, the result of such a write is a single
record. When writing an archive, the last record of blocks should be
written at the full size, with blocks after the zero block containing
all zeros. When reading an archive, a reasonable system should
properly handle an archive whose last record is shorter than the rest,
or which contains garbage records after a zero block.
*/
#region Constants
/// <summary>
/// The size of a block in a tar archive in bytes.
/// </summary>
/// <remarks>This is 512 bytes.</remarks>
public const int BlockSize = 512;
/// <summary>
/// The number of blocks in a default record.
/// </summary>
/// <remarks>
/// The default value is 20 blocks per record.
/// </remarks>
public const int DefaultBlockFactor = 20;
/// <summary>
/// The size in bytes of a default record.
/// </summary>
/// <remarks>
/// The default size is 10KB.
/// </remarks>
public const int DefaultRecordSize = BlockSize * DefaultBlockFactor;
#endregion Constants
/// <summary>
/// Get the record size for this buffer
/// </summary>
/// <value>The record size in bytes.
/// This is equal to the <see cref="BlockFactor"/> multiplied by the <see cref="BlockSize"/></value>
public int RecordSize
{
get
{
return recordSize;
}
}
/// <summary>
/// Get the TAR Buffer's record size.
/// </summary>
/// <returns>The record size in bytes.
/// This is equal to the <see cref="BlockFactor"/> multiplied by the <see cref="BlockSize"/></returns>
[Obsolete("Use RecordSize property instead")]
public int GetRecordSize()
{
return recordSize;
}
/// <summary>
/// Get the Blocking factor for the buffer
/// </summary>
/// <value>This is the number of blocks in each record.</value>
public int BlockFactor
{
get
{
return blockFactor;
}
}
/// <summary>
/// Get the TAR Buffer's block factor
/// </summary>
/// <returns>The block factor; the number of blocks per record.</returns>
[Obsolete("Use BlockFactor property instead")]
public int GetBlockFactor()
{
return blockFactor;
}
/// <summary>
/// Construct a default TarBuffer
/// </summary>
protected TarBuffer()
{
}
/// <summary>
/// Create TarBuffer for reading with default BlockFactor
/// </summary>
/// <param name="inputStream">Stream to buffer</param>
/// <returns>A new <see cref="TarBuffer"/> suitable for input.</returns>
public static TarBuffer CreateInputTarBuffer(Stream inputStream)
{
if (inputStream == null)
{
throw new ArgumentNullException(nameof(inputStream));
}
return CreateInputTarBuffer(inputStream, DefaultBlockFactor);
}
/// <summary>
/// Construct TarBuffer for reading inputStream setting BlockFactor
/// </summary>
/// <param name="inputStream">Stream to buffer</param>
/// <param name="blockFactor">Blocking factor to apply</param>
/// <returns>A new <see cref="TarBuffer"/> suitable for input.</returns>
public static TarBuffer CreateInputTarBuffer(Stream inputStream, int blockFactor)
{
if (inputStream == null)
{
throw new ArgumentNullException(nameof(inputStream));
}
if (blockFactor <= 0)
{
throw new ArgumentOutOfRangeException(nameof(blockFactor), "Factor cannot be negative");
}
var tarBuffer = new TarBuffer();
tarBuffer.inputStream = inputStream;
tarBuffer.outputStream = null;
tarBuffer.Initialize(blockFactor);
return tarBuffer;
}
/// <summary>
/// Construct TarBuffer for writing with default BlockFactor
/// </summary>
/// <param name="outputStream">output stream for buffer</param>
/// <returns>A new <see cref="TarBuffer"/> suitable for output.</returns>
public static TarBuffer CreateOutputTarBuffer(Stream outputStream)
{
if (outputStream == null)
{
throw new ArgumentNullException(nameof(outputStream));
}
return CreateOutputTarBuffer(outputStream, DefaultBlockFactor);
}
/// <summary>
/// Construct TarBuffer for writing Tar output to streams.
/// </summary>
/// <param name="outputStream">Output stream to write to.</param>
/// <param name="blockFactor">Blocking factor to apply</param>
/// <returns>A new <see cref="TarBuffer"/> suitable for output.</returns>
public static TarBuffer CreateOutputTarBuffer(Stream outputStream, int blockFactor)
{
if (outputStream == null)
{
throw new ArgumentNullException(nameof(outputStream));
}
if (blockFactor <= 0)
{
throw new ArgumentOutOfRangeException(nameof(blockFactor), "Factor cannot be negative");
}
var tarBuffer = new TarBuffer();
tarBuffer.inputStream = null;
tarBuffer.outputStream = outputStream;
tarBuffer.Initialize(blockFactor);
return tarBuffer;
}
/// <summary>
/// Initialization common to all constructors.
/// </summary>
private void Initialize(int archiveBlockFactor)
{
blockFactor = archiveBlockFactor;
recordSize = archiveBlockFactor * BlockSize;
recordBuffer = new byte[RecordSize];
if (inputStream != null)
{
currentRecordIndex = -1;
currentBlockIndex = BlockFactor;
}
else
{
currentRecordIndex = 0;
currentBlockIndex = 0;
}
}
/// <summary>
/// Determine if an archive block indicates End of Archive. End of
/// archive is indicated by a block that consists entirely of null bytes.
/// All remaining blocks for the record should also be null's
/// However some older tars only do a couple of null blocks (Old GNU tar for one)
/// and also partial records
/// </summary>
/// <param name = "block">The data block to check.</param>
/// <returns>Returns true if the block is an EOF block; false otherwise.</returns>
[Obsolete("Use IsEndOfArchiveBlock instead")]
public bool IsEOFBlock(byte[] block)
{
if (block == null)
{
throw new ArgumentNullException(nameof(block));
}
if (block.Length != BlockSize)
{
throw new ArgumentException("block length is invalid");
}
for (int i = 0; i < BlockSize; ++i)
{
if (block[i] != 0)
{
return false;
}
}
return true;
}
/// <summary>
/// Determine if an archive block indicates the End of an Archive has been reached.
/// End of archive is indicated by a block that consists entirely of null bytes.
/// All remaining blocks for the record should also be null's
/// However some older tars only do a couple of null blocks (Old GNU tar for one)
/// and also partial records
/// </summary>
/// <param name = "block">The data block to check.</param>
/// <returns>Returns true if the block is an EOF block; false otherwise.</returns>
public static bool IsEndOfArchiveBlock(byte[] block)
{
if (block == null)
{
throw new ArgumentNullException(nameof(block));
}
if (block.Length != BlockSize)
{
throw new ArgumentException("block length is invalid");
}
for (int i = 0; i < BlockSize; ++i)
{
if (block[i] != 0)
{
return false;
}
}
return true;
}
/// <summary>
/// Skip over a block on the input stream.
/// </summary>
public void SkipBlock()
{
if (inputStream == null)
{
throw new TarException("no input stream defined");
}
if (currentBlockIndex >= BlockFactor)
{
if (!ReadRecord())
{
throw new TarException("Failed to read a record");
}
}
currentBlockIndex++;
}
/// <summary>
/// Read a block from the input stream.
/// </summary>
/// <returns>
/// The block of data read.
/// </returns>
public byte[] ReadBlock()
{
if (inputStream == null)
{
throw new TarException("TarBuffer.ReadBlock - no input stream defined");
}
if (currentBlockIndex >= BlockFactor)
{
if (!ReadRecord())
{
throw new TarException("Failed to read a record");
}
}
byte[] result = new byte[BlockSize];
Array.Copy(recordBuffer, (currentBlockIndex * BlockSize), result, 0, BlockSize);
currentBlockIndex++;
return result;
}
/// <summary>
/// Read a record from data stream.
/// </summary>
/// <returns>
/// false if End-Of-File, else true.
/// </returns>
private bool ReadRecord()
{
if (inputStream == null)
{
throw new TarException("no input stream defined");
}
currentBlockIndex = 0;
int offset = 0;
int bytesNeeded = RecordSize;
while (bytesNeeded > 0)
{
long numBytes = inputStream.Read(recordBuffer, offset, bytesNeeded);
//
// NOTE
// We have found EOF, and the record is not full!
//
// This is a broken archive. It does not follow the standard
// blocking algorithm. However, because we are generous, and
// it requires little effort, we will simply ignore the error
// and continue as if the entire record were read. This does
// not appear to break anything upstream. We used to return
// false in this case.
//
// Thanks to 'Yohann.Roussel@alcatel.fr' for this fix.
//
if (numBytes <= 0)
{
break;
}
offset += (int)numBytes;
bytesNeeded -= (int)numBytes;
}
currentRecordIndex++;
return true;
}
/// <summary>
/// Get the current block number, within the current record, zero based.
/// </summary>
/// <remarks>Block numbers are zero based values</remarks>
/// <seealso cref="RecordSize"/>
public int CurrentBlock
{
get { return currentBlockIndex; }
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Close" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Get the current block number, within the current record, zero based.
/// </summary>
/// <returns>
/// The current zero based block number.
/// </returns>
/// <remarks>
/// The absolute block number = (<see cref="GetCurrentRecordNum">record number</see> * <see cref="BlockFactor">block factor</see>) + <see cref="GetCurrentBlockNum">block number</see>.
/// </remarks>
[Obsolete("Use CurrentBlock property instead")]
public int GetCurrentBlockNum()
{
return currentBlockIndex;
}
/// <summary>
/// Get the current record number.
/// </summary>
/// <returns>
/// The current zero based record number.
/// </returns>
public int CurrentRecord
{
get { return currentRecordIndex; }
}
/// <summary>
/// Get the current record number.
/// </summary>
/// <returns>
/// The current zero based record number.
/// </returns>
[Obsolete("Use CurrentRecord property instead")]
public int GetCurrentRecordNum()
{
return currentRecordIndex;
}
/// <summary>
/// Write a block of data to the archive.
/// </summary>
/// <param name="block">
/// The data to write to the archive.
/// </param>
public void WriteBlock(byte[] block)
{
if (block == null)
{
throw new ArgumentNullException(nameof(block));
}
if (outputStream == null)
{
throw new TarException("TarBuffer.WriteBlock - no output stream defined");
}
if (block.Length != BlockSize)
{
string errorText = string.Format("TarBuffer.WriteBlock - block to write has length '{0}' which is not the block size of '{1}'",
block.Length, BlockSize);
throw new TarException(errorText);
}
if (currentBlockIndex >= BlockFactor)
{
WriteRecord();
}
Array.Copy(block, 0, recordBuffer, (currentBlockIndex * BlockSize), BlockSize);
currentBlockIndex++;
}
/// <summary>
/// Write an archive record to the archive, where the record may be
/// inside of a larger array buffer. The buffer must be "offset plus
/// record size" long.
/// </summary>
/// <param name="buffer">
/// The buffer containing the record data to write.
/// </param>
/// <param name="offset">
/// The offset of the record data within buffer.
/// </param>
public void WriteBlock(byte[] buffer, int offset)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (outputStream == null)
{
throw new TarException("TarBuffer.WriteBlock - no output stream defined");
}
if ((offset < 0) || (offset >= buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + BlockSize) > buffer.Length)
{
string errorText = string.Format("TarBuffer.WriteBlock - record has length '{0}' with offset '{1}' which is less than the record size of '{2}'",
buffer.Length, offset, recordSize);
throw new TarException(errorText);
}
if (currentBlockIndex >= BlockFactor)
{
WriteRecord();
}
Array.Copy(buffer, offset, recordBuffer, (currentBlockIndex * BlockSize), BlockSize);
currentBlockIndex++;
}
/// <summary>
/// Write a TarBuffer record to the archive.
/// </summary>
private void WriteRecord()
{
if (outputStream == null)
{
throw new TarException("TarBuffer.WriteRecord no output stream defined");
}
outputStream.Write(recordBuffer, 0, RecordSize);
outputStream.Flush();
currentBlockIndex = 0;
currentRecordIndex++;
}
/// <summary>
/// WriteFinalRecord writes the current record buffer to output any unwritten data is present.
/// </summary>
/// <remarks>Any trailing bytes are set to zero which is by definition correct behaviour
/// for the end of a tar stream.</remarks>
private void WriteFinalRecord()
{
if (outputStream == null)
{
throw new TarException("TarBuffer.WriteFinalRecord no output stream defined");
}
if (currentBlockIndex > 0)
{
int dataBytes = currentBlockIndex * BlockSize;
Array.Clear(recordBuffer, dataBytes, RecordSize - dataBytes);
WriteRecord();
}
outputStream.Flush();
}
/// <summary>
/// Close the TarBuffer. If this is an output buffer, also flush the
/// current block before closing.
/// </summary>
public void Close()
{
if (outputStream != null)
{
WriteFinalRecord();
if (IsStreamOwner)
{
outputStream.Dispose();
}
outputStream = null;
}
else if (inputStream != null)
{
if (IsStreamOwner)
{
inputStream.Dispose();
}
inputStream = null;
}
}
#region Instance Fields
private Stream inputStream;
private Stream outputStream;
private byte[] recordBuffer;
private int currentBlockIndex;
private int currentRecordIndex;
private int recordSize = DefaultRecordSize;
private int blockFactor = DefaultBlockFactor;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,598 @@
using System;
using System.IO;
using System.Text;
using ICSharpCode.SharpZipLib.Core;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// This class represents an entry in a Tar archive. It consists
/// of the entry's header, as well as the entry's File. Entries
/// can be instantiated in one of three ways, depending on how
/// they are to be used.
/// <p>
/// TarEntries that are created from the header bytes read from
/// an archive are instantiated with the TarEntry( byte[] )
/// constructor. These entries will be used when extracting from
/// or listing the contents of an archive. These entries have their
/// header filled in using the header bytes. They also set the File
/// to null, since they reference an archive entry not a file.</p>
/// <p>
/// TarEntries that are created from files that are to be written
/// into an archive are instantiated with the CreateEntryFromFile(string)
/// pseudo constructor. These entries have their header filled in using
/// the File's information. They also keep a reference to the File
/// for convenience when writing entries.</p>
/// <p>
/// Finally, TarEntries can be constructed from nothing but a name.
/// This allows the programmer to construct the entry by hand, for
/// instance when only an InputStream is available for writing to
/// the archive, and the header information is constructed from
/// other information. In this case the header fields are set to
/// defaults and the File is set to null.</p>
/// <see cref="TarHeader"/>
/// </summary>
public class TarEntry
{
#region Constructors
/// <summary>
/// Initialise a default instance of <see cref="TarEntry"/>.
/// </summary>
private TarEntry()
{
header = new TarHeader();
}
/// <summary>
/// Construct an entry from an archive's header bytes. File is set
/// to null.
/// </summary>
/// <param name = "headerBuffer">
/// The header bytes from a tar archive entry.
/// </param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public TarEntry(byte[] headerBuffer) : this(headerBuffer, null)
{
}
/// <summary>
/// Construct an entry from an archive's header bytes. File is set
/// to null.
/// </summary>
/// <param name = "headerBuffer">
/// The header bytes from a tar archive entry.
/// </param>
/// <param name = "nameEncoding">
/// The <see cref="Encoding"/> used for the Name fields, or null for ASCII only
/// </param>
public TarEntry(byte[] headerBuffer, Encoding nameEncoding)
{
header = new TarHeader();
header.ParseBuffer(headerBuffer, nameEncoding);
}
/// <summary>
/// Construct a TarEntry using the <paramref name="header">header</paramref> provided
/// </summary>
/// <param name="header">Header details for entry</param>
public TarEntry(TarHeader header)
{
if (header == null)
{
throw new ArgumentNullException(nameof(header));
}
this.header = (TarHeader)header.Clone();
}
#endregion Constructors
#region ICloneable Members
/// <summary>
/// Clone this tar entry.
/// </summary>
/// <returns>Returns a clone of this entry.</returns>
public object Clone()
{
var entry = new TarEntry();
entry.file = file;
entry.header = (TarHeader)header.Clone();
entry.Name = Name;
return entry;
}
#endregion ICloneable Members
/// <summary>
/// Construct an entry with only a <paramref name="name">name</paramref>.
/// This allows the programmer to construct the entry's header "by hand".
/// </summary>
/// <param name="name">The name to use for the entry</param>
/// <returns>Returns the newly created <see cref="TarEntry"/></returns>
public static TarEntry CreateTarEntry(string name)
{
var entry = new TarEntry();
TarEntry.NameTarHeader(entry.header, name);
return entry;
}
/// <summary>
/// Construct an entry for a file. File is set to file, and the
/// header is constructed from information from the file.
/// </summary>
/// <param name = "fileName">The file name that the entry represents.</param>
/// <returns>Returns the newly created <see cref="TarEntry"/></returns>
public static TarEntry CreateEntryFromFile(string fileName)
{
var entry = new TarEntry();
entry.GetFileTarHeader(entry.header, fileName);
return entry;
}
/// <summary>
/// Determine if the two entries are equal. Equality is determined
/// by the header names being equal.
/// </summary>
/// <param name="obj">The <see cref="Object"/> to compare with the current Object.</param>
/// <returns>
/// True if the entries are equal; false if not.
/// </returns>
public override bool Equals(object obj)
{
var localEntry = obj as TarEntry;
if (localEntry != null)
{
return Name.Equals(localEntry.Name);
}
return false;
}
/// <summary>
/// Derive a Hash value for the current <see cref="Object"/>
/// </summary>
/// <returns>A Hash code for the current <see cref="Object"/></returns>
public override int GetHashCode()
{
return Name.GetHashCode();
}
/// <summary>
/// Determine if the given entry is a descendant of this entry.
/// Descendancy is determined by the name of the descendant
/// starting with this entry's name.
/// </summary>
/// <param name = "toTest">
/// Entry to be checked as a descendent of this.
/// </param>
/// <returns>
/// True if entry is a descendant of this.
/// </returns>
public bool IsDescendent(TarEntry toTest)
{
if (toTest == null)
{
throw new ArgumentNullException(nameof(toTest));
}
return toTest.Name.StartsWith(Name, StringComparison.Ordinal);
}
/// <summary>
/// Get this entry's header.
/// </summary>
/// <returns>
/// This entry's TarHeader.
/// </returns>
public TarHeader TarHeader
{
get
{
return header;
}
}
/// <summary>
/// Get/Set this entry's name.
/// </summary>
public string Name
{
get
{
return header.Name;
}
set
{
header.Name = value;
}
}
/// <summary>
/// Get/set this entry's user id.
/// </summary>
public int UserId
{
get
{
return header.UserId;
}
set
{
header.UserId = value;
}
}
/// <summary>
/// Get/set this entry's group id.
/// </summary>
public int GroupId
{
get
{
return header.GroupId;
}
set
{
header.GroupId = value;
}
}
/// <summary>
/// Get/set this entry's user name.
/// </summary>
public string UserName
{
get
{
return header.UserName;
}
set
{
header.UserName = value;
}
}
/// <summary>
/// Get/set this entry's group name.
/// </summary>
public string GroupName
{
get
{
return header.GroupName;
}
set
{
header.GroupName = value;
}
}
/// <summary>
/// Convenience method to set this entry's group and user ids.
/// </summary>
/// <param name="userId">
/// This entry's new user id.
/// </param>
/// <param name="groupId">
/// This entry's new group id.
/// </param>
public void SetIds(int userId, int groupId)
{
UserId = userId;
GroupId = groupId;
}
/// <summary>
/// Convenience method to set this entry's group and user names.
/// </summary>
/// <param name="userName">
/// This entry's new user name.
/// </param>
/// <param name="groupName">
/// This entry's new group name.
/// </param>
public void SetNames(string userName, string groupName)
{
UserName = userName;
GroupName = groupName;
}
/// <summary>
/// Get/Set the modification time for this entry
/// </summary>
public DateTime ModTime
{
get
{
return header.ModTime;
}
set
{
header.ModTime = value;
}
}
/// <summary>
/// Get this entry's file.
/// </summary>
/// <returns>
/// This entry's file.
/// </returns>
public string File
{
get
{
return file;
}
}
/// <summary>
/// Get/set this entry's recorded file size.
/// </summary>
public long Size
{
get
{
return header.Size;
}
set
{
header.Size = value;
}
}
/// <summary>
/// Return true if this entry represents a directory, false otherwise
/// </summary>
/// <returns>
/// True if this entry is a directory.
/// </returns>
public bool IsDirectory
{
get
{
if (file != null)
{
return Directory.Exists(file);
}
if (header != null)
{
if ((header.TypeFlag == TarHeader.LF_DIR) || Name.EndsWith("/", StringComparison.Ordinal))
{
return true;
}
}
return false;
}
}
/// <summary>
/// Fill in a TarHeader with information from a File.
/// </summary>
/// <param name="header">
/// The TarHeader to fill in.
/// </param>
/// <param name="file">
/// The file from which to get the header information.
/// </param>
public void GetFileTarHeader(TarHeader header, string file)
{
if (header == null)
{
throw new ArgumentNullException(nameof(header));
}
if (file == null)
{
throw new ArgumentNullException(nameof(file));
}
this.file = file;
// bugfix from torhovl from #D forum:
string name = file;
// 23-Jan-2004 GnuTar allows device names in path where the name is not local to the current directory
if (name.IndexOf(Directory.GetCurrentDirectory(), StringComparison.Ordinal) == 0)
{
name = name.Substring(Directory.GetCurrentDirectory().Length);
}
/*
if (Path.DirectorySeparatorChar == '\\')
{
// check if the OS is Windows
// Strip off drive letters!
if (name.Length > 2)
{
char ch1 = name[0];
char ch2 = name[1];
if (ch2 == ':' && Char.IsLetter(ch1))
{
name = name.Substring(2);
}
}
}
*/
name = name.Replace(Path.DirectorySeparatorChar, '/');
// No absolute pathnames
// Windows (and Posix?) paths can start with UNC style "\\NetworkDrive\",
// so we loop on starting /'s.
while (name.StartsWith("/", StringComparison.Ordinal))
{
name = name.Substring(1);
}
header.LinkName = String.Empty;
header.Name = name;
if (Directory.Exists(file))
{
header.Mode = 1003; // Magic number for security access for a UNIX filesystem
header.TypeFlag = TarHeader.LF_DIR;
if ((header.Name.Length == 0) || header.Name[header.Name.Length - 1] != '/')
{
header.Name = header.Name + "/";
}
header.Size = 0;
}
else
{
header.Mode = 33216; // Magic number for security access for a UNIX filesystem
header.TypeFlag = TarHeader.LF_NORMAL;
header.Size = new FileInfo(file.Replace('/', Path.DirectorySeparatorChar)).Length;
}
header.ModTime = System.IO.File.GetLastWriteTime(file.Replace('/', Path.DirectorySeparatorChar)).ToUniversalTime();
header.DevMajor = 0;
header.DevMinor = 0;
}
/// <summary>
/// Get entries for all files present in this entries directory.
/// If this entry doesnt represent a directory zero entries are returned.
/// </summary>
/// <returns>
/// An array of TarEntry's for this entry's children.
/// </returns>
public TarEntry[] GetDirectoryEntries()
{
if ((file == null) || !Directory.Exists(file))
{
return Empty.Array<TarEntry>();
}
string[] list = Directory.GetFileSystemEntries(file);
TarEntry[] result = new TarEntry[list.Length];
for (int i = 0; i < list.Length; ++i)
{
result[i] = TarEntry.CreateEntryFromFile(list[i]);
}
return result;
}
/// <summary>
/// Write an entry's header information to a header buffer.
/// </summary>
/// <param name = "outBuffer">
/// The tar entry header buffer to fill in.
/// </param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public void WriteEntryHeader(byte[] outBuffer)
{
WriteEntryHeader(outBuffer, null);
}
/// <summary>
/// Write an entry's header information to a header buffer.
/// </summary>
/// <param name = "outBuffer">
/// The tar entry header buffer to fill in.
/// </param>
/// <param name = "nameEncoding">
/// The <see cref="Encoding"/> used for the Name fields, or null for ASCII only
/// </param>
public void WriteEntryHeader(byte[] outBuffer, Encoding nameEncoding)
{
header.WriteHeader(outBuffer, nameEncoding);
}
/// <summary>
/// Convenience method that will modify an entry's name directly
/// in place in an entry header buffer byte array.
/// </summary>
/// <param name="buffer">
/// The buffer containing the entry header to modify.
/// </param>
/// <param name="newName">
/// The new name to place into the header buffer.
/// </param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
static public void AdjustEntryName(byte[] buffer, string newName)
{
AdjustEntryName(buffer, newName, null);
}
/// <summary>
/// Convenience method that will modify an entry's name directly
/// in place in an entry header buffer byte array.
/// </summary>
/// <param name="buffer">
/// The buffer containing the entry header to modify.
/// </param>
/// <param name="newName">
/// The new name to place into the header buffer.
/// </param>
/// <param name="nameEncoding">
/// The <see cref="Encoding"/> used for the Name fields, or null for ASCII only
/// </param>
static public void AdjustEntryName(byte[] buffer, string newName, Encoding nameEncoding)
{
TarHeader.GetNameBytes(newName, buffer, 0, TarHeader.NAMELEN, nameEncoding);
}
/// <summary>
/// Fill in a TarHeader given only the entry's name.
/// </summary>
/// <param name="header">
/// The TarHeader to fill in.
/// </param>
/// <param name="name">
/// The tar entry name.
/// </param>
static public void NameTarHeader(TarHeader header, string name)
{
if (header == null)
{
throw new ArgumentNullException(nameof(header));
}
if (name == null)
{
throw new ArgumentNullException(nameof(name));
}
bool isDir = name.EndsWith("/", StringComparison.Ordinal);
header.Name = name;
header.Mode = isDir ? 1003 : 33216;
header.UserId = 0;
header.GroupId = 0;
header.Size = 0;
header.ModTime = DateTime.UtcNow;
header.TypeFlag = isDir ? TarHeader.LF_DIR : TarHeader.LF_NORMAL;
header.LinkName = String.Empty;
header.UserName = String.Empty;
header.GroupName = String.Empty;
header.DevMajor = 0;
header.DevMinor = 0;
}
#region Instance Fields
/// <summary>
/// The name of the file this entry represents or null if the entry is not based on a file.
/// </summary>
private string file;
/// <summary>
/// The entry's header information.
/// </summary>
private TarHeader header;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Runtime.Serialization;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// TarException represents exceptions specific to Tar classes and code.
/// </summary>
[Serializable]
public class TarException : SharpZipBaseException
{
/// <summary>
/// Initialise a new instance of <see cref="TarException" />.
/// </summary>
public TarException()
{
}
/// <summary>
/// Initialise a new instance of <see cref="TarException" /> with its message string.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
public TarException(string message)
: base(message)
{
}
/// <summary>
/// Initialise a new instance of <see cref="TarException" />.
/// </summary>
/// <param name="message">A <see cref="string"/> that describes the error.</param>
/// <param name="innerException">The <see cref="Exception"/> that caused this exception.</param>
public TarException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the TarException class with serialized data.
/// </summary>
/// <param name="info">
/// The System.Runtime.Serialization.SerializationInfo that holds the serialized
/// object data about the exception being thrown.
/// </param>
/// <param name="context">
/// The System.Runtime.Serialization.StreamingContext that contains contextual information
/// about the source or destination.
/// </param>
protected TarException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
}

View File

@@ -0,0 +1,99 @@
using System.Collections.Generic;
using System.Text;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// Reads the extended header of a Tar stream
/// </summary>
public class TarExtendedHeaderReader
{
private const byte LENGTH = 0;
private const byte KEY = 1;
private const byte VALUE = 2;
private const byte END = 3;
private readonly Dictionary<string, string> headers = new Dictionary<string, string>();
private string[] headerParts = new string[3];
private int bbIndex;
private byte[] byteBuffer;
private char[] charBuffer;
private readonly StringBuilder sb = new StringBuilder();
private readonly Decoder decoder = Encoding.UTF8.GetDecoder();
private int state = LENGTH;
private static readonly byte[] StateNext = new[] { (byte)' ', (byte)'=', (byte)'\n' };
/// <summary>
/// Creates a new <see cref="TarExtendedHeaderReader"/>.
/// </summary>
public TarExtendedHeaderReader()
{
ResetBuffers();
}
/// <summary>
/// Read <paramref name="length"/> bytes from <paramref name="buffer"/>
/// </summary>
/// <param name="buffer"></param>
/// <param name="length"></param>
public void Read(byte[] buffer, int length)
{
for (int i = 0; i < length; i++)
{
byte next = buffer[i];
if (next == StateNext[state])
{
Flush();
headerParts[state] = sb.ToString();
sb.Clear();
if (++state == END)
{
headers.Add(headerParts[KEY], headerParts[VALUE]);
headerParts = new string[3];
state = LENGTH;
}
}
else
{
byteBuffer[bbIndex++] = next;
if (bbIndex == 4)
Flush();
}
}
}
private void Flush()
{
decoder.Convert(byteBuffer, 0, bbIndex, charBuffer, 0, 4, false, out int bytesUsed, out int charsUsed, out bool completed);
sb.Append(charBuffer, 0, charsUsed);
ResetBuffers();
}
private void ResetBuffers()
{
charBuffer = new char[4];
byteBuffer = new byte[4];
bbIndex = 0;
}
/// <summary>
/// Returns the parsed headers as key-value strings
/// </summary>
public Dictionary<string, string> Headers
{
get
{
// TODO: Check for invalid state? -NM 2018-07-01
return headers;
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,771 @@
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// The TarInputStream reads a UNIX tar archive as an InputStream.
/// methods are provided to position at each successive entry in
/// the archive, and the read each entry as a normal input stream
/// using read().
/// </summary>
public class TarInputStream : Stream
{
#region Constructors
/// <summary>
/// Construct a TarInputStream with default block factor
/// </summary>
/// <param name="inputStream">stream to source data from</param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public TarInputStream(Stream inputStream)
: this(inputStream, TarBuffer.DefaultBlockFactor, null)
{
}
/// <summary>
/// Construct a TarInputStream with default block factor
/// </summary>
/// <param name="inputStream">stream to source data from</param>
/// <param name="nameEncoding">The <see cref="Encoding"/> used for the Name fields, or null for ASCII only</param>
public TarInputStream(Stream inputStream, Encoding nameEncoding)
: this(inputStream, TarBuffer.DefaultBlockFactor, nameEncoding)
{
}
/// <summary>
/// Construct a TarInputStream with user specified block factor
/// </summary>
/// <param name="inputStream">stream to source data from</param>
/// <param name="blockFactor">block factor to apply to archive</param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public TarInputStream(Stream inputStream, int blockFactor)
{
this.inputStream = inputStream;
tarBuffer = TarBuffer.CreateInputTarBuffer(inputStream, blockFactor);
encoding = null;
}
/// <summary>
/// Construct a TarInputStream with user specified block factor
/// </summary>
/// <param name="inputStream">stream to source data from</param>
/// <param name="blockFactor">block factor to apply to archive</param>
/// <param name="nameEncoding">The <see cref="Encoding"/> used for the Name fields, or null for ASCII only</param>
public TarInputStream(Stream inputStream, int blockFactor, Encoding nameEncoding)
{
this.inputStream = inputStream;
tarBuffer = TarBuffer.CreateInputTarBuffer(inputStream, blockFactor);
encoding = nameEncoding;
}
#endregion Constructors
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner
{
get { return tarBuffer.IsStreamOwner; }
set { tarBuffer.IsStreamOwner = value; }
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get
{
return inputStream.CanRead;
}
}
/// <summary>
/// Gets a value indicating whether the current stream supports seeking
/// This property always returns false.
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating if the stream supports writing.
/// This property always returns false.
/// </summary>
public override bool CanWrite
{
get
{
return false;
}
}
/// <summary>
/// The length in bytes of the stream
/// </summary>
public override long Length
{
get
{
return inputStream.Length;
}
}
/// <summary>
/// Gets or sets the position within the stream.
/// Setting the Position is not supported and throws a NotSupportedExceptionNotSupportedException
/// </summary>
/// <exception cref="NotSupportedException">Any attempt to set position</exception>
public override long Position
{
get
{
return inputStream.Position;
}
set
{
throw new NotSupportedException("TarInputStream Seek not supported");
}
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
inputStream.Flush();
}
/// <summary>
/// Set the streams position. This operation is not supported and will throw a NotSupportedException
/// </summary>
/// <param name="offset">The offset relative to the origin to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> to start seeking from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("TarInputStream Seek not supported");
}
/// <summary>
/// Sets the length of the stream
/// This operation is not supported and will throw a NotSupportedException
/// </summary>
/// <param name="value">The new stream length.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("TarInputStream SetLength not supported");
}
/// <summary>
/// Writes a block of bytes to this stream using data from a buffer.
/// This operation is not supported and will throw a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing bytes to write.</param>
/// <param name="offset">The offset in the buffer of the frist byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("TarInputStream Write not supported");
}
/// <summary>
/// Writes a byte to the current position in the file stream.
/// This operation is not supported and will throw a NotSupportedException
/// </summary>
/// <param name="value">The byte value to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("TarInputStream WriteByte not supported");
}
/// <summary>
/// Reads a byte from the current tar archive entry.
/// </summary>
/// <returns>A byte cast to an int; -1 if the at the end of the stream.</returns>
public override int ReadByte()
{
byte[] oneByteBuffer = new byte[1];
int num = Read(oneByteBuffer, 0, 1);
if (num <= 0)
{
// return -1 to indicate that no byte was read.
return -1;
}
return oneByteBuffer[0];
}
/// <summary>
/// Reads bytes from the current tar archive entry.
///
/// This method is aware of the boundaries of the current
/// entry in the archive and will deal with them appropriately
/// </summary>
/// <param name="buffer">
/// The buffer into which to place bytes read.
/// </param>
/// <param name="offset">
/// The offset at which to place bytes read.
/// </param>
/// <param name="count">
/// The number of bytes to read.
/// </param>
/// <returns>
/// The number of bytes read, or 0 at end of stream/EOF.
/// </returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
int totalRead = 0;
if (entryOffset >= entrySize)
{
return 0;
}
long numToRead = count;
if ((numToRead + entryOffset) > entrySize)
{
numToRead = entrySize - entryOffset;
}
if (readBuffer != null)
{
int sz = (numToRead > readBuffer.Length) ? readBuffer.Length : (int)numToRead;
Array.Copy(readBuffer, 0, buffer, offset, sz);
if (sz >= readBuffer.Length)
{
readBuffer = null;
}
else
{
int newLen = readBuffer.Length - sz;
byte[] newBuf = new byte[newLen];
Array.Copy(readBuffer, sz, newBuf, 0, newLen);
readBuffer = newBuf;
}
totalRead += sz;
numToRead -= sz;
offset += sz;
}
while (numToRead > 0)
{
byte[] rec = tarBuffer.ReadBlock();
if (rec == null)
{
// Unexpected EOF!
throw new TarException("unexpected EOF with " + numToRead + " bytes unread");
}
var sz = (int)numToRead;
int recLen = rec.Length;
if (recLen > sz)
{
Array.Copy(rec, 0, buffer, offset, sz);
readBuffer = new byte[recLen - sz];
Array.Copy(rec, sz, readBuffer, 0, recLen - sz);
}
else
{
sz = recLen;
Array.Copy(rec, 0, buffer, offset, recLen);
}
totalRead += sz;
numToRead -= sz;
offset += sz;
}
entryOffset += totalRead;
return totalRead;
}
/// <summary>
/// Closes this stream. Calls the TarBuffer's close() method.
/// The underlying stream is closed by the TarBuffer.
/// </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
tarBuffer.Close();
}
}
#endregion Stream Overrides
/// <summary>
/// Set the entry factory for this instance.
/// </summary>
/// <param name="factory">The factory for creating new entries</param>
public void SetEntryFactory(IEntryFactory factory)
{
entryFactory = factory;
}
/// <summary>
/// Get the record size being used by this stream's TarBuffer.
/// </summary>
public int RecordSize
{
get { return tarBuffer.RecordSize; }
}
/// <summary>
/// Get the record size being used by this stream's TarBuffer.
/// </summary>
/// <returns>
/// TarBuffer record size.
/// </returns>
[Obsolete("Use RecordSize property instead")]
public int GetRecordSize()
{
return tarBuffer.RecordSize;
}
/// <summary>
/// Get the available data that can be read from the current
/// entry in the archive. This does not indicate how much data
/// is left in the entire archive, only in the current entry.
/// This value is determined from the entry's size header field
/// and the amount of data already read from the current entry.
/// </summary>
/// <returns>
/// The number of available bytes for the current entry.
/// </returns>
public long Available
{
get
{
return entrySize - entryOffset;
}
}
/// <summary>
/// Skip bytes in the input buffer. This skips bytes in the
/// current entry's data, not the entire archive, and will
/// stop at the end of the current entry's data if the number
/// to skip extends beyond that point.
/// </summary>
/// <param name="skipCount">
/// The number of bytes to skip.
/// </param>
public void Skip(long skipCount)
{
// TODO: REVIEW efficiency of TarInputStream.Skip
// This is horribly inefficient, but it ensures that we
// properly skip over bytes via the TarBuffer...
//
byte[] skipBuf = new byte[8 * 1024];
for (long num = skipCount; num > 0;)
{
int toRead = num > skipBuf.Length ? skipBuf.Length : (int)num;
int numRead = Read(skipBuf, 0, toRead);
if (numRead == -1)
{
break;
}
num -= numRead;
}
}
/// <summary>
/// Return a value of true if marking is supported; false otherwise.
/// </summary>
/// <remarks>Currently marking is not supported, the return value is always false.</remarks>
public bool IsMarkSupported
{
get
{
return false;
}
}
/// <summary>
/// Since we do not support marking just yet, we do nothing.
/// </summary>
/// <param name ="markLimit">
/// The limit to mark.
/// </param>
public void Mark(int markLimit)
{
}
/// <summary>
/// Since we do not support marking just yet, we do nothing.
/// </summary>
public void Reset()
{
}
/// <summary>
/// Get the next entry in this tar archive. This will skip
/// over any remaining data in the current entry, if there
/// is one, and place the input stream at the header of the
/// next entry, and read the header and instantiate a new
/// TarEntry from the header bytes and return that entry.
/// If there are no more entries in the archive, null will
/// be returned to indicate that the end of the archive has
/// been reached.
/// </summary>
/// <returns>
/// The next TarEntry in the archive, or null.
/// </returns>
public TarEntry GetNextEntry()
{
if (hasHitEOF)
{
return null;
}
if (currentEntry != null)
{
SkipToNextEntry();
}
byte[] headerBuf = tarBuffer.ReadBlock();
if (headerBuf == null)
{
hasHitEOF = true;
}
else if (TarBuffer.IsEndOfArchiveBlock(headerBuf))
{
hasHitEOF = true;
// Read the second zero-filled block
tarBuffer.ReadBlock();
}
else
{
hasHitEOF = false;
}
if (hasHitEOF)
{
currentEntry = null;
}
else
{
try
{
var header = new TarHeader();
header.ParseBuffer(headerBuf, encoding);
if (!header.IsChecksumValid)
{
throw new TarException("Header checksum is invalid");
}
this.entryOffset = 0;
this.entrySize = header.Size;
StringBuilder longName = null;
if (header.TypeFlag == TarHeader.LF_GNU_LONGNAME)
{
byte[] nameBuffer = new byte[TarBuffer.BlockSize];
long numToRead = this.entrySize;
longName = new StringBuilder();
while (numToRead > 0)
{
int numRead = this.Read(nameBuffer, 0, (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead));
if (numRead == -1)
{
throw new InvalidHeaderException("Failed to read long name entry");
}
longName.Append(TarHeader.ParseName(nameBuffer, 0, numRead, encoding).ToString());
numToRead -= numRead;
}
SkipToNextEntry();
headerBuf = this.tarBuffer.ReadBlock();
}
else if (header.TypeFlag == TarHeader.LF_GHDR)
{ // POSIX global extended header
// Ignore things we dont understand completely for now
SkipToNextEntry();
headerBuf = this.tarBuffer.ReadBlock();
}
else if (header.TypeFlag == TarHeader.LF_XHDR)
{ // POSIX extended header
byte[] nameBuffer = new byte[TarBuffer.BlockSize];
long numToRead = this.entrySize;
var xhr = new TarExtendedHeaderReader();
while (numToRead > 0)
{
int numRead = this.Read(nameBuffer, 0, (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead));
if (numRead == -1)
{
throw new InvalidHeaderException("Failed to read long name entry");
}
xhr.Read(nameBuffer, numRead);
numToRead -= numRead;
}
if (xhr.Headers.TryGetValue("path", out string name))
{
longName = new StringBuilder(name);
}
SkipToNextEntry();
headerBuf = this.tarBuffer.ReadBlock();
}
else if (header.TypeFlag == TarHeader.LF_GNU_VOLHDR)
{
// TODO: could show volume name when verbose
SkipToNextEntry();
headerBuf = this.tarBuffer.ReadBlock();
}
else if (header.TypeFlag != TarHeader.LF_NORMAL &&
header.TypeFlag != TarHeader.LF_OLDNORM &&
header.TypeFlag != TarHeader.LF_LINK &&
header.TypeFlag != TarHeader.LF_SYMLINK &&
header.TypeFlag != TarHeader.LF_DIR)
{
// Ignore things we dont understand completely for now
SkipToNextEntry();
headerBuf = tarBuffer.ReadBlock();
}
if (entryFactory == null)
{
currentEntry = new TarEntry(headerBuf, encoding);
if (longName != null)
{
currentEntry.Name = longName.ToString();
}
}
else
{
currentEntry = entryFactory.CreateEntry(headerBuf);
}
// Magic was checked here for 'ustar' but there are multiple valid possibilities
// so this is not done anymore.
entryOffset = 0;
// TODO: Review How do we resolve this discrepancy?!
entrySize = this.currentEntry.Size;
}
catch (InvalidHeaderException ex)
{
entrySize = 0;
entryOffset = 0;
currentEntry = null;
string errorText = string.Format("Bad header in record {0} block {1} {2}",
tarBuffer.CurrentRecord, tarBuffer.CurrentBlock, ex.Message);
throw new InvalidHeaderException(errorText);
}
}
return currentEntry;
}
/// <summary>
/// Copies the contents of the current tar archive entry directly into
/// an output stream.
/// </summary>
/// <param name="outputStream">
/// The OutputStream into which to write the entry's data.
/// </param>
public void CopyEntryContents(Stream outputStream)
{
byte[] tempBuffer = new byte[32 * 1024];
while (true)
{
int numRead = Read(tempBuffer, 0, tempBuffer.Length);
if (numRead <= 0)
{
break;
}
outputStream.Write(tempBuffer, 0, numRead);
}
}
private void SkipToNextEntry()
{
long numToSkip = entrySize - entryOffset;
if (numToSkip > 0)
{
Skip(numToSkip);
}
readBuffer = null;
}
/// <summary>
/// This interface is provided, along with the method <see cref="SetEntryFactory"/>, to allow
/// the programmer to have their own <see cref="TarEntry"/> subclass instantiated for the
/// entries return from <see cref="GetNextEntry"/>.
/// </summary>
public interface IEntryFactory
{
// This interface does not considering name encoding.
// How this interface should be?
/// <summary>
/// Create an entry based on name alone
/// </summary>
/// <param name="name">
/// Name of the new EntryPointNotFoundException to create
/// </param>
/// <returns>created TarEntry or descendant class</returns>
TarEntry CreateEntry(string name);
/// <summary>
/// Create an instance based on an actual file
/// </summary>
/// <param name="fileName">
/// Name of file to represent in the entry
/// </param>
/// <returns>
/// Created TarEntry or descendant class
/// </returns>
TarEntry CreateEntryFromFile(string fileName);
/// <summary>
/// Create a tar entry based on the header information passed
/// </summary>
/// <param name="headerBuffer">
/// Buffer containing header information to create an entry from.
/// </param>
/// <returns>
/// Created TarEntry or descendant class
/// </returns>
TarEntry CreateEntry(byte[] headerBuffer);
}
/// <summary>
/// Standard entry factory class creating instances of the class TarEntry
/// </summary>
public class EntryFactoryAdapter : IEntryFactory
{
Encoding nameEncoding;
/// <summary>
/// Construct standard entry factory class with ASCII name encoding
/// </summary>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public EntryFactoryAdapter()
{
}
/// <summary>
/// Construct standard entry factory with name encoding
/// </summary>
/// <param name="nameEncoding">The <see cref="Encoding"/> used for the Name fields, or null for ASCII only</param>
public EntryFactoryAdapter(Encoding nameEncoding)
{
this.nameEncoding = nameEncoding;
}
/// <summary>
/// Create a <see cref="TarEntry"/> based on named
/// </summary>
/// <param name="name">The name to use for the entry</param>
/// <returns>A new <see cref="TarEntry"/></returns>
public TarEntry CreateEntry(string name)
{
return TarEntry.CreateTarEntry(name);
}
/// <summary>
/// Create a tar entry with details obtained from <paramref name="fileName">file</paramref>
/// </summary>
/// <param name="fileName">The name of the file to retrieve details from.</param>
/// <returns>A new <see cref="TarEntry"/></returns>
public TarEntry CreateEntryFromFile(string fileName)
{
return TarEntry.CreateEntryFromFile(fileName);
}
/// <summary>
/// Create an entry based on details in <paramref name="headerBuffer">header</paramref>
/// </summary>
/// <param name="headerBuffer">The buffer containing entry details.</param>
/// <returns>A new <see cref="TarEntry"/></returns>
public TarEntry CreateEntry(byte[] headerBuffer)
{
return new TarEntry(headerBuffer, nameEncoding);
}
}
#region Instance Fields
/// <summary>
/// Flag set when last block has been read
/// </summary>
protected bool hasHitEOF;
/// <summary>
/// Size of this entry as recorded in header
/// </summary>
protected long entrySize;
/// <summary>
/// Number of bytes read for this entry so far
/// </summary>
protected long entryOffset;
/// <summary>
/// Buffer used with calls to <code>Read()</code>
/// </summary>
protected byte[] readBuffer;
/// <summary>
/// Working buffer
/// </summary>
protected TarBuffer tarBuffer;
/// <summary>
/// Current entry being read
/// </summary>
private TarEntry currentEntry;
/// <summary>
/// Factory used to create TarEntry or descendant class instance
/// </summary>
protected IEntryFactory entryFactory;
/// <summary>
/// Stream used as the source of input data.
/// </summary>
private readonly Stream inputStream;
private readonly Encoding encoding;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,522 @@
using System;
using System.IO;
using System.Text;
namespace ICSharpCode.SharpZipLib.Tar
{
/// <summary>
/// The TarOutputStream writes a UNIX tar archive as an OutputStream.
/// Methods are provided to put entries, and then write their contents
/// by writing to this stream using write().
/// </summary>
/// public
public class TarOutputStream : Stream
{
#region Constructors
/// <summary>
/// Construct TarOutputStream using default block factor
/// </summary>
/// <param name="outputStream">stream to write to</param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public TarOutputStream(Stream outputStream)
: this(outputStream, TarBuffer.DefaultBlockFactor)
{
}
/// <summary>
/// Construct TarOutputStream using default block factor
/// </summary>
/// <param name="outputStream">stream to write to</param>
/// <param name="nameEncoding">The <see cref="Encoding"/> used for the Name fields, or null for ASCII only</param>
public TarOutputStream(Stream outputStream, Encoding nameEncoding)
: this(outputStream, TarBuffer.DefaultBlockFactor, nameEncoding)
{
}
/// <summary>
/// Construct TarOutputStream with user specified block factor
/// </summary>
/// <param name="outputStream">stream to write to</param>
/// <param name="blockFactor">blocking factor</param>
[Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")]
public TarOutputStream(Stream outputStream, int blockFactor)
{
if (outputStream == null)
{
throw new ArgumentNullException(nameof(outputStream));
}
this.outputStream = outputStream;
buffer = TarBuffer.CreateOutputTarBuffer(outputStream, blockFactor);
assemblyBuffer = new byte[TarBuffer.BlockSize];
blockBuffer = new byte[TarBuffer.BlockSize];
}
/// <summary>
/// Construct TarOutputStream with user specified block factor
/// </summary>
/// <param name="outputStream">stream to write to</param>
/// <param name="blockFactor">blocking factor</param>
/// <param name="nameEncoding">The <see cref="Encoding"/> used for the Name fields, or null for ASCII only</param>
public TarOutputStream(Stream outputStream, int blockFactor, Encoding nameEncoding)
{
if (outputStream == null)
{
throw new ArgumentNullException(nameof(outputStream));
}
this.outputStream = outputStream;
buffer = TarBuffer.CreateOutputTarBuffer(outputStream, blockFactor);
assemblyBuffer = new byte[TarBuffer.BlockSize];
blockBuffer = new byte[TarBuffer.BlockSize];
this.nameEncoding = nameEncoding;
}
#endregion Constructors
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner
{
get { return buffer.IsStreamOwner; }
set { buffer.IsStreamOwner = value; }
}
/// <summary>
/// true if the stream supports reading; otherwise, false.
/// </summary>
public override bool CanRead
{
get
{
return outputStream.CanRead;
}
}
/// <summary>
/// true if the stream supports seeking; otherwise, false.
/// </summary>
public override bool CanSeek
{
get
{
return outputStream.CanSeek;
}
}
/// <summary>
/// true if stream supports writing; otherwise, false.
/// </summary>
public override bool CanWrite
{
get
{
return outputStream.CanWrite;
}
}
/// <summary>
/// length of stream in bytes
/// </summary>
public override long Length
{
get
{
return outputStream.Length;
}
}
/// <summary>
/// gets or sets the position within the current stream.
/// </summary>
public override long Position
{
get
{
return outputStream.Position;
}
set
{
outputStream.Position = value;
}
}
/// <summary>
/// set the position within the current stream
/// </summary>
/// <param name="offset">The offset relative to the <paramref name="origin"/> to seek to</param>
/// <param name="origin">The <see cref="SeekOrigin"/> to seek from.</param>
/// <returns>The new position in the stream.</returns>
public override long Seek(long offset, SeekOrigin origin)
{
return outputStream.Seek(offset, origin);
}
/// <summary>
/// Set the length of the current stream
/// </summary>
/// <param name="value">The new stream length.</param>
public override void SetLength(long value)
{
outputStream.SetLength(value);
}
/// <summary>
/// Read a byte from the stream and advance the position within the stream
/// by one byte or returns -1 if at the end of the stream.
/// </summary>
/// <returns>The byte value or -1 if at end of stream</returns>
public override int ReadByte()
{
return outputStream.ReadByte();
}
/// <summary>
/// read bytes from the current stream and advance the position within the
/// stream by the number of bytes read.
/// </summary>
/// <param name="buffer">The buffer to store read bytes in.</param>
/// <param name="offset">The index into the buffer to being storing bytes at.</param>
/// <param name="count">The desired number of bytes to read.</param>
/// <returns>The total number of bytes read, or zero if at the end of the stream.
/// The number of bytes may be less than the <paramref name="count">count</paramref>
/// requested if data is not available.</returns>
public override int Read(byte[] buffer, int offset, int count)
{
return outputStream.Read(buffer, offset, count);
}
/// <summary>
/// All buffered data is written to destination
/// </summary>
public override void Flush()
{
outputStream.Flush();
}
/// <summary>
/// Ends the TAR archive without closing the underlying OutputStream.
/// The result is that the EOF block of nulls is written.
/// </summary>
public void Finish()
{
if (IsEntryOpen)
{
CloseEntry();
}
WriteEofBlock();
}
/// <summary>
/// Ends the TAR archive and closes the underlying OutputStream.
/// </summary>
/// <remarks>This means that Finish() is called followed by calling the
/// TarBuffer's Close().</remarks>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
Finish();
buffer.Close();
}
}
/// <summary>
/// Get the record size being used by this stream's TarBuffer.
/// </summary>
public int RecordSize
{
get { return buffer.RecordSize; }
}
/// <summary>
/// Get the record size being used by this stream's TarBuffer.
/// </summary>
/// <returns>
/// The TarBuffer record size.
/// </returns>
[Obsolete("Use RecordSize property instead")]
public int GetRecordSize()
{
return buffer.RecordSize;
}
/// <summary>
/// Get a value indicating whether an entry is open, requiring more data to be written.
/// </summary>
private bool IsEntryOpen
{
get { return (currBytes < currSize); }
}
/// <summary>
/// Put an entry on the output stream. This writes the entry's
/// header and positions the output stream for writing
/// the contents of the entry. Once this method is called, the
/// stream is ready for calls to write() to write the entry's
/// contents. Once the contents are written, closeEntry()
/// <B>MUST</B> be called to ensure that all buffered data
/// is completely written to the output stream.
/// </summary>
/// <param name="entry">
/// The TarEntry to be written to the archive.
/// </param>
public void PutNextEntry(TarEntry entry)
{
if (entry == null)
{
throw new ArgumentNullException(nameof(entry));
}
var namelen = nameEncoding != null ? nameEncoding.GetByteCount(entry.TarHeader.Name) : entry.TarHeader.Name.Length;
if (namelen > TarHeader.NAMELEN)
{
var longHeader = new TarHeader();
longHeader.TypeFlag = TarHeader.LF_GNU_LONGNAME;
longHeader.Name = longHeader.Name + "././@LongLink";
longHeader.Mode = 420;//644 by default
longHeader.UserId = entry.UserId;
longHeader.GroupId = entry.GroupId;
longHeader.GroupName = entry.GroupName;
longHeader.UserName = entry.UserName;
longHeader.LinkName = "";
longHeader.Size = namelen + 1; // Plus one to avoid dropping last char
longHeader.WriteHeader(blockBuffer, nameEncoding);
buffer.WriteBlock(blockBuffer); // Add special long filename header block
int nameCharIndex = 0;
while (nameCharIndex < namelen + 1 /* we've allocated one for the null char, now we must make sure it gets written out */)
{
Array.Clear(blockBuffer, 0, blockBuffer.Length);
TarHeader.GetAsciiBytes(entry.TarHeader.Name, nameCharIndex, this.blockBuffer, 0, TarBuffer.BlockSize, nameEncoding); // This func handles OK the extra char out of string length
nameCharIndex += TarBuffer.BlockSize;
buffer.WriteBlock(blockBuffer);
}
}
entry.WriteEntryHeader(blockBuffer, nameEncoding);
buffer.WriteBlock(blockBuffer);
currBytes = 0;
currSize = entry.IsDirectory ? 0 : entry.Size;
}
/// <summary>
/// Close an entry. This method MUST be called for all file
/// entries that contain data. The reason is that we must
/// buffer data written to the stream in order to satisfy
/// the buffer's block based writes. Thus, there may be
/// data fragments still being assembled that must be written
/// to the output stream before this entry is closed and the
/// next entry written.
/// </summary>
public void CloseEntry()
{
if (assemblyBufferLength > 0)
{
Array.Clear(assemblyBuffer, assemblyBufferLength, assemblyBuffer.Length - assemblyBufferLength);
buffer.WriteBlock(assemblyBuffer);
currBytes += assemblyBufferLength;
assemblyBufferLength = 0;
}
if (currBytes < currSize)
{
string errorText = string.Format(
"Entry closed at '{0}' before the '{1}' bytes specified in the header were written",
currBytes, currSize);
throw new TarException(errorText);
}
}
/// <summary>
/// Writes a byte to the current tar archive entry.
/// This method simply calls Write(byte[], int, int).
/// </summary>
/// <param name="value">
/// The byte to be written.
/// </param>
public override void WriteByte(byte value)
{
Write(new byte[] { value }, 0, 1);
}
/// <summary>
/// Writes bytes to the current tar archive entry. This method
/// is aware of the current entry and will throw an exception if
/// you attempt to write bytes past the length specified for the
/// current entry. The method is also (painfully) aware of the
/// record buffering required by TarBuffer, and manages buffers
/// that are not a multiple of recordsize in length, including
/// assembling records from small buffers.
/// </summary>
/// <param name = "buffer">
/// The buffer to write to the archive.
/// </param>
/// <param name = "offset">
/// The offset in the buffer from which to get bytes.
/// </param>
/// <param name = "count">
/// The number of bytes to write.
/// </param>
public override void Write(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "Cannot be negative");
}
if (buffer.Length - offset < count)
{
throw new ArgumentException("offset and count combination is invalid");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "Cannot be negative");
}
if ((currBytes + count) > currSize)
{
string errorText = string.Format("request to write '{0}' bytes exceeds size in header of '{1}' bytes",
count, this.currSize);
throw new ArgumentOutOfRangeException(nameof(count), errorText);
}
//
// We have to deal with assembly!!!
// The programmer can be writing little 32 byte chunks for all
// we know, and we must assemble complete blocks for writing.
// TODO REVIEW Maybe this should be in TarBuffer? Could that help to
// eliminate some of the buffer copying.
//
if (assemblyBufferLength > 0)
{
if ((assemblyBufferLength + count) >= blockBuffer.Length)
{
int aLen = blockBuffer.Length - assemblyBufferLength;
Array.Copy(assemblyBuffer, 0, blockBuffer, 0, assemblyBufferLength);
Array.Copy(buffer, offset, blockBuffer, assemblyBufferLength, aLen);
this.buffer.WriteBlock(blockBuffer);
currBytes += blockBuffer.Length;
offset += aLen;
count -= aLen;
assemblyBufferLength = 0;
}
else
{
Array.Copy(buffer, offset, assemblyBuffer, assemblyBufferLength, count);
offset += count;
assemblyBufferLength += count;
count -= count;
}
}
//
// When we get here we have EITHER:
// o An empty "assembly" buffer.
// o No bytes to write (count == 0)
//
while (count > 0)
{
if (count < blockBuffer.Length)
{
Array.Copy(buffer, offset, assemblyBuffer, assemblyBufferLength, count);
assemblyBufferLength += count;
break;
}
this.buffer.WriteBlock(buffer, offset);
int bufferLength = blockBuffer.Length;
currBytes += bufferLength;
count -= bufferLength;
offset += bufferLength;
}
}
/// <summary>
/// Write an EOF (end of archive) block to the tar archive.
/// The end of the archive is indicated by two blocks consisting entirely of zero bytes.
/// </summary>
private void WriteEofBlock()
{
Array.Clear(blockBuffer, 0, blockBuffer.Length);
buffer.WriteBlock(blockBuffer);
buffer.WriteBlock(blockBuffer);
}
#region Instance Fields
/// <summary>
/// bytes written for this entry so far
/// </summary>
private long currBytes;
/// <summary>
/// current 'Assembly' buffer length
/// </summary>
private int assemblyBufferLength;
/// <summary>
/// Flag indicating whether this instance has been closed or not.
/// </summary>
private bool isClosed;
/// <summary>
/// Size for the current entry
/// </summary>
protected long currSize;
/// <summary>
/// single block working buffer
/// </summary>
protected byte[] blockBuffer;
/// <summary>
/// 'Assembly' buffer used to assemble data before writing
/// </summary>
protected byte[] assemblyBuffer;
/// <summary>
/// TarBuffer used to provide correct blocking factor
/// </summary>
protected TarBuffer buffer;
/// <summary>
/// the destination stream for the archive contents
/// </summary>
protected Stream outputStream;
/// <summary>
/// name encoding
/// </summary>
protected Encoding nameEncoding;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,604 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This is the Deflater class. The deflater class compresses input
/// with the deflate algorithm described in RFC 1951. It has several
/// compression levels and three different strategies described below.
///
/// This class is <i>not</i> thread safe. This is inherent in the API, due
/// to the split of deflate and setInput.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class Deflater
{
#region Deflater Documentation
/*
* The Deflater can do the following state transitions:
*
* (1) -> INIT_STATE ----> INIT_FINISHING_STATE ---.
* / | (2) (5) |
* / v (5) |
* (3)| SETDICT_STATE ---> SETDICT_FINISHING_STATE |(3)
* \ | (3) | ,--------'
* | | | (3) /
* v v (5) v v
* (1) -> BUSY_STATE ----> FINISHING_STATE
* | (6)
* v
* FINISHED_STATE
* \_____________________________________/
* | (7)
* v
* CLOSED_STATE
*
* (1) If we should produce a header we start in INIT_STATE, otherwise
* we start in BUSY_STATE.
* (2) A dictionary may be set only when we are in INIT_STATE, then
* we change the state as indicated.
* (3) Whether a dictionary is set or not, on the first call of deflate
* we change to BUSY_STATE.
* (4) -- intentionally left blank -- :)
* (5) FINISHING_STATE is entered, when flush() is called to indicate that
* there is no more INPUT. There are also states indicating, that
* the header wasn't written yet.
* (6) FINISHED_STATE is entered, when everything has been flushed to the
* internal pending output buffer.
* (7) At any time (7)
*
*/
#endregion Deflater Documentation
#region Public Constants
/// <summary>
/// The best and slowest compression level. This tries to find very
/// long and distant string repetitions.
/// </summary>
public const int BEST_COMPRESSION = 9;
/// <summary>
/// The worst but fastest compression level.
/// </summary>
public const int BEST_SPEED = 1;
/// <summary>
/// The default compression level.
/// </summary>
public const int DEFAULT_COMPRESSION = -1;
/// <summary>
/// This level won't compress at all but output uncompressed blocks.
/// </summary>
public const int NO_COMPRESSION = 0;
/// <summary>
/// The compression method. This is the only method supported so far.
/// There is no need to use this constant at all.
/// </summary>
public const int DEFLATED = 8;
#endregion Public Constants
#region Public Enum
/// <summary>
/// Compression Level as an enum for safer use
/// </summary>
public enum CompressionLevel
{
/// <summary>
/// The best and slowest compression level. This tries to find very
/// long and distant string repetitions.
/// </summary>
BEST_COMPRESSION = Deflater.BEST_COMPRESSION,
/// <summary>
/// The worst but fastest compression level.
/// </summary>
BEST_SPEED = Deflater.BEST_SPEED,
/// <summary>
/// The default compression level.
/// </summary>
DEFAULT_COMPRESSION = Deflater.DEFAULT_COMPRESSION,
/// <summary>
/// This level won't compress at all but output uncompressed blocks.
/// </summary>
NO_COMPRESSION = Deflater.NO_COMPRESSION,
/// <summary>
/// The compression method. This is the only method supported so far.
/// There is no need to use this constant at all.
/// </summary>
DEFLATED = Deflater.DEFLATED
}
#endregion Public Enum
#region Local Constants
private const int IS_SETDICT = 0x01;
private const int IS_FLUSHING = 0x04;
private const int IS_FINISHING = 0x08;
private const int INIT_STATE = 0x00;
private const int SETDICT_STATE = 0x01;
// private static int INIT_FINISHING_STATE = 0x08;
// private static int SETDICT_FINISHING_STATE = 0x09;
private const int BUSY_STATE = 0x10;
private const int FLUSHING_STATE = 0x14;
private const int FINISHING_STATE = 0x1c;
private const int FINISHED_STATE = 0x1e;
private const int CLOSED_STATE = 0x7f;
#endregion Local Constants
#region Constructors
/// <summary>
/// Creates a new deflater with default compression level.
/// </summary>
public Deflater() : this(DEFAULT_COMPRESSION, false)
{
}
/// <summary>
/// Creates a new deflater with given compression level.
/// </summary>
/// <param name="level">
/// the compression level, a value between NO_COMPRESSION
/// and BEST_COMPRESSION, or DEFAULT_COMPRESSION.
/// </param>
/// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception>
public Deflater(int level) : this(level, false)
{
}
/// <summary>
/// Creates a new deflater with given compression level.
/// </summary>
/// <param name="level">
/// the compression level, a value between NO_COMPRESSION
/// and BEST_COMPRESSION.
/// </param>
/// <param name="noZlibHeaderOrFooter">
/// true, if we should suppress the Zlib/RFC1950 header at the
/// beginning and the adler checksum at the end of the output. This is
/// useful for the GZIP/PKZIP formats.
/// </param>
/// <exception cref="System.ArgumentOutOfRangeException">if lvl is out of range.</exception>
public Deflater(int level, bool noZlibHeaderOrFooter)
{
if (level == DEFAULT_COMPRESSION)
{
level = 6;
}
else if (level < NO_COMPRESSION || level > BEST_COMPRESSION)
{
throw new ArgumentOutOfRangeException(nameof(level));
}
pending = new DeflaterPending();
engine = new DeflaterEngine(pending, noZlibHeaderOrFooter);
this.noZlibHeaderOrFooter = noZlibHeaderOrFooter;
SetStrategy(DeflateStrategy.Default);
SetLevel(level);
Reset();
}
#endregion Constructors
/// <summary>
/// Resets the deflater. The deflater acts afterwards as if it was
/// just created with the same compression level and strategy as it
/// had before.
/// </summary>
public void Reset()
{
state = (noZlibHeaderOrFooter ? BUSY_STATE : INIT_STATE);
totalOut = 0;
pending.Reset();
engine.Reset();
}
/// <summary>
/// Gets the current adler checksum of the data that was processed so far.
/// </summary>
public int Adler
{
get
{
return engine.Adler;
}
}
/// <summary>
/// Gets the number of input bytes processed so far.
/// </summary>
public long TotalIn
{
get
{
return engine.TotalIn;
}
}
/// <summary>
/// Gets the number of output bytes so far.
/// </summary>
public long TotalOut
{
get
{
return totalOut;
}
}
/// <summary>
/// Flushes the current input block. Further calls to deflate() will
/// produce enough output to inflate everything in the current input
/// block. This is not part of Sun's JDK so I have made it package
/// private. It is used by DeflaterOutputStream to implement
/// flush().
/// </summary>
public void Flush()
{
state |= IS_FLUSHING;
}
/// <summary>
/// Finishes the deflater with the current input block. It is an error
/// to give more input after this method was called. This method must
/// be called to force all bytes to be flushed.
/// </summary>
public void Finish()
{
state |= (IS_FLUSHING | IS_FINISHING);
}
/// <summary>
/// Returns true if the stream was finished and no more output bytes
/// are available.
/// </summary>
public bool IsFinished
{
get
{
return (state == FINISHED_STATE) && pending.IsFlushed;
}
}
/// <summary>
/// Returns true, if the input buffer is empty.
/// You should then call setInput().
/// NOTE: This method can also return true when the stream
/// was finished.
/// </summary>
public bool IsNeedingInput
{
get
{
return engine.NeedsInput();
}
}
/// <summary>
/// Sets the data which should be compressed next. This should be only
/// called when needsInput indicates that more input is needed.
/// If you call setInput when needsInput() returns false, the
/// previous input that is still pending will be thrown away.
/// The given byte array should not be changed, before needsInput() returns
/// true again.
/// This call is equivalent to <code>setInput(input, 0, input.length)</code>.
/// </summary>
/// <param name="input">
/// the buffer containing the input data.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if the buffer was finished() or ended().
/// </exception>
public void SetInput(byte[] input)
{
SetInput(input, 0, input.Length);
}
/// <summary>
/// Sets the data which should be compressed next. This should be
/// only called when needsInput indicates that more input is needed.
/// The given byte array should not be changed, before needsInput() returns
/// true again.
/// </summary>
/// <param name="input">
/// the buffer containing the input data.
/// </param>
/// <param name="offset">
/// the start of the data.
/// </param>
/// <param name="count">
/// the number of data bytes of input.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if the buffer was Finish()ed or if previous input is still pending.
/// </exception>
public void SetInput(byte[] input, int offset, int count)
{
if ((state & IS_FINISHING) != 0)
{
throw new InvalidOperationException("Finish() already called");
}
engine.SetInput(input, offset, count);
}
/// <summary>
/// Sets the compression level. There is no guarantee of the exact
/// position of the change, but if you call this when needsInput is
/// true the change of compression level will occur somewhere near
/// before the end of the so far given input.
/// </summary>
/// <param name="level">
/// the new compression level.
/// </param>
public void SetLevel(int level)
{
if (level == DEFAULT_COMPRESSION)
{
level = 6;
}
else if (level < NO_COMPRESSION || level > BEST_COMPRESSION)
{
throw new ArgumentOutOfRangeException(nameof(level));
}
if (this.level != level)
{
this.level = level;
engine.SetLevel(level);
}
}
/// <summary>
/// Get current compression level
/// </summary>
/// <returns>Returns the current compression level</returns>
public int GetLevel()
{
return level;
}
/// <summary>
/// Sets the compression strategy. Strategy is one of
/// DEFAULT_STRATEGY, HUFFMAN_ONLY and FILTERED. For the exact
/// position where the strategy is changed, the same as for
/// SetLevel() applies.
/// </summary>
/// <param name="strategy">
/// The new compression strategy.
/// </param>
public void SetStrategy(DeflateStrategy strategy)
{
engine.Strategy = strategy;
}
/// <summary>
/// Deflates the current input block with to the given array.
/// </summary>
/// <param name="output">
/// The buffer where compressed data is stored
/// </param>
/// <returns>
/// The number of compressed bytes added to the output, or 0 if either
/// IsNeedingInput() or IsFinished returns true or length is zero.
/// </returns>
public int Deflate(byte[] output)
{
return Deflate(output, 0, output.Length);
}
/// <summary>
/// Deflates the current input block to the given array.
/// </summary>
/// <param name="output">
/// Buffer to store the compressed data.
/// </param>
/// <param name="offset">
/// Offset into the output array.
/// </param>
/// <param name="length">
/// The maximum number of bytes that may be stored.
/// </param>
/// <returns>
/// The number of compressed bytes added to the output, or 0 if either
/// needsInput() or finished() returns true or length is zero.
/// </returns>
/// <exception cref="System.InvalidOperationException">
/// If Finish() was previously called.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// If offset or length don't match the array length.
/// </exception>
public int Deflate(byte[] output, int offset, int length)
{
int origLength = length;
if (state == CLOSED_STATE)
{
throw new InvalidOperationException("Deflater closed");
}
if (state < BUSY_STATE)
{
// output header
int header = (DEFLATED +
((DeflaterConstants.MAX_WBITS - 8) << 4)) << 8;
int level_flags = (level - 1) >> 1;
if (level_flags < 0 || level_flags > 3)
{
level_flags = 3;
}
header |= level_flags << 6;
if ((state & IS_SETDICT) != 0)
{
// Dictionary was set
header |= DeflaterConstants.PRESET_DICT;
}
header += 31 - (header % 31);
pending.WriteShortMSB(header);
if ((state & IS_SETDICT) != 0)
{
int chksum = engine.Adler;
engine.ResetAdler();
pending.WriteShortMSB(chksum >> 16);
pending.WriteShortMSB(chksum & 0xffff);
}
state = BUSY_STATE | (state & (IS_FLUSHING | IS_FINISHING));
}
for (; ; )
{
int count = pending.Flush(output, offset, length);
offset += count;
totalOut += count;
length -= count;
if (length == 0 || state == FINISHED_STATE)
{
break;
}
if (!engine.Deflate((state & IS_FLUSHING) != 0, (state & IS_FINISHING) != 0))
{
switch (state)
{
case BUSY_STATE:
// We need more input now
return origLength - length;
case FLUSHING_STATE:
if (level != NO_COMPRESSION)
{
/* We have to supply some lookahead. 8 bit lookahead
* is needed by the zlib inflater, and we must fill
* the next byte, so that all bits are flushed.
*/
int neededbits = 8 + ((-pending.BitCount) & 7);
while (neededbits > 0)
{
/* write a static tree block consisting solely of
* an EOF:
*/
pending.WriteBits(2, 10);
neededbits -= 10;
}
}
state = BUSY_STATE;
break;
case FINISHING_STATE:
pending.AlignToByte();
// Compressed data is complete. Write footer information if required.
if (!noZlibHeaderOrFooter)
{
int adler = engine.Adler;
pending.WriteShortMSB(adler >> 16);
pending.WriteShortMSB(adler & 0xffff);
}
state = FINISHED_STATE;
break;
}
}
}
return origLength - length;
}
/// <summary>
/// Sets the dictionary which should be used in the deflate process.
/// This call is equivalent to <code>setDictionary(dict, 0, dict.Length)</code>.
/// </summary>
/// <param name="dictionary">
/// the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// if SetInput () or Deflate () were already called or another dictionary was already set.
/// </exception>
public void SetDictionary(byte[] dictionary)
{
SetDictionary(dictionary, 0, dictionary.Length);
}
/// <summary>
/// Sets the dictionary which should be used in the deflate process.
/// The dictionary is a byte array containing strings that are
/// likely to occur in the data which should be compressed. The
/// dictionary is not stored in the compressed output, only a
/// checksum. To decompress the output you need to supply the same
/// dictionary again.
/// </summary>
/// <param name="dictionary">
/// The dictionary data
/// </param>
/// <param name="index">
/// The index where dictionary information commences.
/// </param>
/// <param name="count">
/// The number of bytes in the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// If SetInput () or Deflate() were already called or another dictionary was already set.
/// </exception>
public void SetDictionary(byte[] dictionary, int index, int count)
{
if (state != INIT_STATE)
{
throw new InvalidOperationException();
}
state = SETDICT_STATE;
engine.SetDictionary(dictionary, index, count);
}
#region Instance Fields
/// <summary>
/// Compression level.
/// </summary>
private int level;
/// <summary>
/// If true no Zlib/RFC1950 headers or footers are generated
/// </summary>
private bool noZlibHeaderOrFooter;
/// <summary>
/// The current state.
/// </summary>
private int state;
/// <summary>
/// The total bytes of output written.
/// </summary>
private long totalOut;
/// <summary>
/// The pending output.
/// </summary>
private DeflaterPending pending;
/// <summary>
/// The deflater engine.
/// </summary>
private DeflaterEngine engine;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,146 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class contains constants used for deflation.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Naming", "CA1707:Identifiers should not contain underscores", Justification = "kept for backwards compatibility")]
public static class DeflaterConstants
{
/// <summary>
/// Set to true to enable debugging
/// </summary>
public const bool DEBUGGING = false;
/// <summary>
/// Written to Zip file to identify a stored block
/// </summary>
public const int STORED_BLOCK = 0;
/// <summary>
/// Identifies static tree in Zip file
/// </summary>
public const int STATIC_TREES = 1;
/// <summary>
/// Identifies dynamic tree in Zip file
/// </summary>
public const int DYN_TREES = 2;
/// <summary>
/// Header flag indicating a preset dictionary for deflation
/// </summary>
public const int PRESET_DICT = 0x20;
/// <summary>
/// Sets internal buffer sizes for Huffman encoding
/// </summary>
public const int DEFAULT_MEM_LEVEL = 8;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_MATCH = 258;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MIN_MATCH = 3;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_WBITS = 15;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int WSIZE = 1 << MAX_WBITS;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int WMASK = WSIZE - 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_BITS = DEFAULT_MEM_LEVEL + 7;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_SIZE = 1 << HASH_BITS;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_MASK = HASH_SIZE - 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int HASH_SHIFT = (HASH_BITS + MIN_MATCH - 1) / MIN_MATCH;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MIN_LOOKAHEAD = MAX_MATCH + MIN_MATCH + 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int MAX_DIST = WSIZE - MIN_LOOKAHEAD;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int PENDING_BUF_SIZE = 1 << (DEFAULT_MEM_LEVEL + 8);
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int MAX_BLOCK_SIZE = Math.Min(65535, PENDING_BUF_SIZE - 5);
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_STORED = 0;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_FAST = 1;
/// <summary>
/// Internal compression engine constant
/// </summary>
public const int DEFLATE_SLOW = 2;
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] GOOD_LENGTH = { 0, 4, 4, 4, 4, 8, 8, 8, 32, 32 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] MAX_LAZY = { 0, 4, 5, 6, 4, 16, 16, 32, 128, 258 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] NICE_LENGTH = { 0, 8, 16, 32, 16, 32, 128, 128, 258, 258 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] MAX_CHAIN = { 0, 4, 8, 32, 16, 32, 128, 256, 1024, 4096 };
/// <summary>
/// Internal compression engine constant
/// </summary>
public static int[] COMPR_FUNC = { 0, 1, 1, 1, 1, 2, 2, 2, 2, 2 };
}
}

View File

@@ -0,0 +1,946 @@
using ICSharpCode.SharpZipLib.Checksum;
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Strategies for deflater
/// </summary>
public enum DeflateStrategy
{
/// <summary>
/// The default strategy
/// </summary>
Default = 0,
/// <summary>
/// This strategy will only allow longer string repetitions. It is
/// useful for random data with a small character set.
/// </summary>
Filtered = 1,
/// <summary>
/// This strategy will not look for string repetitions at all. It
/// only encodes with Huffman trees (which means, that more common
/// characters get a smaller encoding.
/// </summary>
HuffmanOnly = 2
}
// DEFLATE ALGORITHM:
//
// The uncompressed stream is inserted into the window array. When
// the window array is full the first half is thrown away and the
// second half is copied to the beginning.
//
// The head array is a hash table. Three characters build a hash value
// and they the value points to the corresponding index in window of
// the last string with this hash. The prev array implements a
// linked list of matches with the same hash: prev[index & WMASK] points
// to the previous index with the same hash.
//
/// <summary>
/// Low level compression engine for deflate algorithm which uses a 32K sliding window
/// with secondary compression from Huffman/Shannon-Fano codes.
/// </summary>
public class DeflaterEngine
{
#region Constants
private const int TooFar = 4096;
#endregion Constants
#region Constructors
/// <summary>
/// Construct instance with pending buffer
/// Adler calculation will be performed
/// </summary>
/// <param name="pending">
/// Pending buffer to use
/// </param>
public DeflaterEngine(DeflaterPending pending)
: this (pending, false)
{
}
/// <summary>
/// Construct instance with pending buffer
/// </summary>
/// <param name="pending">
/// Pending buffer to use
/// </param>
/// <param name="noAdlerCalculation">
/// If no adler calculation should be performed
/// </param>
public DeflaterEngine(DeflaterPending pending, bool noAdlerCalculation)
{
this.pending = pending;
huffman = new DeflaterHuffman(pending);
if (!noAdlerCalculation)
adler = new Adler32();
window = new byte[2 * DeflaterConstants.WSIZE];
head = new short[DeflaterConstants.HASH_SIZE];
prev = new short[DeflaterConstants.WSIZE];
// We start at index 1, to avoid an implementation deficiency, that
// we cannot build a repeat pattern at index 0.
blockStart = strstart = 1;
}
#endregion Constructors
/// <summary>
/// Deflate drives actual compression of data
/// </summary>
/// <param name="flush">True to flush input buffers</param>
/// <param name="finish">Finish deflation with the current input.</param>
/// <returns>Returns true if progress has been made.</returns>
public bool Deflate(bool flush, bool finish)
{
bool progress;
do
{
FillWindow();
bool canFlush = flush && (inputOff == inputEnd);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("window: [" + blockStart + "," + strstart + ","
+ lookahead + "], " + compressionFunction + "," + canFlush);
}
#endif
switch (compressionFunction)
{
case DeflaterConstants.DEFLATE_STORED:
progress = DeflateStored(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_FAST:
progress = DeflateFast(canFlush, finish);
break;
case DeflaterConstants.DEFLATE_SLOW:
progress = DeflateSlow(canFlush, finish);
break;
default:
throw new InvalidOperationException("unknown compressionFunction");
}
} while (pending.IsFlushed && progress); // repeat while we have no pending output and progress was made
return progress;
}
/// <summary>
/// Sets input data to be deflated. Should only be called when <code>NeedsInput()</code>
/// returns true
/// </summary>
/// <param name="buffer">The buffer containing input data.</param>
/// <param name="offset">The offset of the first byte of data.</param>
/// <param name="count">The number of bytes of data to use as input.</param>
public void SetInput(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (inputOff < inputEnd)
{
throw new InvalidOperationException("Old input was not completely processed");
}
int end = offset + count;
/* We want to throw an ArrayIndexOutOfBoundsException early. The
* check is very tricky: it also handles integer wrap around.
*/
if ((offset > end) || (end > buffer.Length))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
inputBuf = buffer;
inputOff = offset;
inputEnd = end;
}
/// <summary>
/// Determines if more <see cref="SetInput">input</see> is needed.
/// </summary>
/// <returns>Return true if input is needed via <see cref="SetInput">SetInput</see></returns>
public bool NeedsInput()
{
return (inputEnd == inputOff);
}
/// <summary>
/// Set compression dictionary
/// </summary>
/// <param name="buffer">The buffer containing the dictionary data</param>
/// <param name="offset">The offset in the buffer for the first byte of data</param>
/// <param name="length">The length of the dictionary data.</param>
public void SetDictionary(byte[] buffer, int offset, int length)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (strstart != 1) )
{
throw new InvalidOperationException("strstart not 1");
}
#endif
adler?.Update(new ArraySegment<byte>(buffer, offset, length));
if (length < DeflaterConstants.MIN_MATCH)
{
return;
}
if (length > DeflaterConstants.MAX_DIST)
{
offset += length - DeflaterConstants.MAX_DIST;
length = DeflaterConstants.MAX_DIST;
}
System.Array.Copy(buffer, offset, window, strstart, length);
UpdateHash();
--length;
while (--length > 0)
{
InsertString();
strstart++;
}
strstart += 2;
blockStart = strstart;
}
/// <summary>
/// Reset internal state
/// </summary>
public void Reset()
{
huffman.Reset();
adler?.Reset();
blockStart = strstart = 1;
lookahead = 0;
totalIn = 0;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
for (int i = 0; i < DeflaterConstants.HASH_SIZE; i++)
{
head[i] = 0;
}
for (int i = 0; i < DeflaterConstants.WSIZE; i++)
{
prev[i] = 0;
}
}
/// <summary>
/// Reset Adler checksum
/// </summary>
public void ResetAdler()
{
adler?.Reset();
}
/// <summary>
/// Get current value of Adler checksum
/// </summary>
public int Adler
{
get
{
return (adler != null) ? unchecked((int)adler.Value) : 0;
}
}
/// <summary>
/// Total data processed
/// </summary>
public long TotalIn
{
get
{
return totalIn;
}
}
/// <summary>
/// Get/set the <see cref="DeflateStrategy">deflate strategy</see>
/// </summary>
public DeflateStrategy Strategy
{
get
{
return strategy;
}
set
{
strategy = value;
}
}
/// <summary>
/// Set the deflate level (0-9)
/// </summary>
/// <param name="level">The value to set the level to.</param>
public void SetLevel(int level)
{
if ((level < 0) || (level > 9))
{
throw new ArgumentOutOfRangeException(nameof(level));
}
goodLength = DeflaterConstants.GOOD_LENGTH[level];
max_lazy = DeflaterConstants.MAX_LAZY[level];
niceLength = DeflaterConstants.NICE_LENGTH[level];
max_chain = DeflaterConstants.MAX_CHAIN[level];
if (DeflaterConstants.COMPR_FUNC[level] != compressionFunction)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.WriteLine("Change from " + compressionFunction + " to "
+ DeflaterConstants.COMPR_FUNC[level]);
}
#endif
switch (compressionFunction)
{
case DeflaterConstants.DEFLATE_STORED:
if (strstart > blockStart)
{
huffman.FlushStoredBlock(window, blockStart,
strstart - blockStart, false);
blockStart = strstart;
}
UpdateHash();
break;
case DeflaterConstants.DEFLATE_FAST:
if (strstart > blockStart)
{
huffman.FlushBlock(window, blockStart, strstart - blockStart,
false);
blockStart = strstart;
}
break;
case DeflaterConstants.DEFLATE_SLOW:
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
if (strstart > blockStart)
{
huffman.FlushBlock(window, blockStart, strstart - blockStart, false);
blockStart = strstart;
}
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
break;
}
compressionFunction = DeflaterConstants.COMPR_FUNC[level];
}
}
/// <summary>
/// Fill the window
/// </summary>
public void FillWindow()
{
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
if (strstart >= DeflaterConstants.WSIZE + DeflaterConstants.MAX_DIST)
{
SlideWindow();
}
/* If there is not enough lookahead, but still some input left,
* read in the input
*/
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && inputOff < inputEnd)
{
int more = 2 * DeflaterConstants.WSIZE - lookahead - strstart;
if (more > inputEnd - inputOff)
{
more = inputEnd - inputOff;
}
System.Array.Copy(inputBuf, inputOff, window, strstart + lookahead, more);
adler?.Update(new ArraySegment<byte>(inputBuf, inputOff, more));
inputOff += more;
totalIn += more;
lookahead += more;
}
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
UpdateHash();
}
}
private void UpdateHash()
{
/*
if (DEBUGGING) {
Console.WriteLine("updateHash: "+strstart);
}
*/
ins_h = (window[strstart] << DeflaterConstants.HASH_SHIFT) ^ window[strstart + 1];
}
/// <summary>
/// Inserts the current string in the head hash and returns the previous
/// value for this hash.
/// </summary>
/// <returns>The previous hash value</returns>
private int InsertString()
{
short match;
int hash = ((ins_h << DeflaterConstants.HASH_SHIFT) ^ window[strstart + (DeflaterConstants.MIN_MATCH - 1)]) & DeflaterConstants.HASH_MASK;
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
if (hash != (((window[strstart] << (2*HASH_SHIFT)) ^
(window[strstart + 1] << HASH_SHIFT) ^
(window[strstart + 2])) & HASH_MASK)) {
throw new SharpZipBaseException("hash inconsistent: " + hash + "/"
+window[strstart] + ","
+window[strstart + 1] + ","
+window[strstart + 2] + "," + HASH_SHIFT);
}
}
#endif
prev[strstart & DeflaterConstants.WMASK] = match = head[hash];
head[hash] = unchecked((short)strstart);
ins_h = hash;
return match & 0xffff;
}
private void SlideWindow()
{
Array.Copy(window, DeflaterConstants.WSIZE, window, 0, DeflaterConstants.WSIZE);
matchStart -= DeflaterConstants.WSIZE;
strstart -= DeflaterConstants.WSIZE;
blockStart -= DeflaterConstants.WSIZE;
// Slide the hash table (could be avoided with 32 bit values
// at the expense of memory usage).
for (int i = 0; i < DeflaterConstants.HASH_SIZE; ++i)
{
int m = head[i] & 0xffff;
head[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
// Slide the prev table.
for (int i = 0; i < DeflaterConstants.WSIZE; i++)
{
int m = prev[i] & 0xffff;
prev[i] = (short)(m >= DeflaterConstants.WSIZE ? (m - DeflaterConstants.WSIZE) : 0);
}
}
/// <summary>
/// Find the best (longest) string in the window matching the
/// string starting at strstart.
///
/// Preconditions:
/// <code>
/// strstart + DeflaterConstants.MAX_MATCH &lt;= window.length.</code>
/// </summary>
/// <param name="curMatch"></param>
/// <returns>True if a match greater than the minimum length is found</returns>
private bool FindLongestMatch(int curMatch)
{
int match;
int scan = strstart;
// scanMax is the highest position that we can look at
int scanMax = scan + Math.Min(DeflaterConstants.MAX_MATCH, lookahead) - 1;
int limit = Math.Max(scan - DeflaterConstants.MAX_DIST, 0);
byte[] window = this.window;
short[] prev = this.prev;
int chainLength = this.max_chain;
int niceLength = Math.Min(this.niceLength, lookahead);
matchLen = Math.Max(matchLen, DeflaterConstants.MIN_MATCH - 1);
if (scan + matchLen > scanMax) return false;
byte scan_end1 = window[scan + matchLen - 1];
byte scan_end = window[scan + matchLen];
// Do not waste too much time if we already have a good match:
if (matchLen >= this.goodLength) chainLength >>= 2;
do
{
match = curMatch;
scan = strstart;
if (window[match + matchLen] != scan_end
|| window[match + matchLen - 1] != scan_end1
|| window[match] != window[scan]
|| window[++match] != window[++scan])
{
continue;
}
// scan is set to strstart+1 and the comparison passed, so
// scanMax - scan is the maximum number of bytes we can compare.
// below we compare 8 bytes at a time, so first we compare
// (scanMax - scan) % 8 bytes, so the remainder is a multiple of 8
switch ((scanMax - scan) % 8)
{
case 1:
if (window[++scan] == window[++match]) break;
break;
case 2:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 3:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 4:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 5:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 6:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
case 7:
if (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]) break;
break;
}
if (window[scan] == window[match])
{
/* We check for insufficient lookahead only every 8th comparison;
* the 256th check will be made at strstart + 258 unless lookahead is
* exhausted first.
*/
do
{
if (scan == scanMax)
{
++scan; // advance to first position not matched
++match;
break;
}
}
while (window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]
&& window[++scan] == window[++match]);
}
if (scan - strstart > matchLen)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (ins_h == 0) )
Console.Error.WriteLine("Found match: " + curMatch + "-" + (scan - strstart));
#endif
matchStart = curMatch;
matchLen = scan - strstart;
if (matchLen >= niceLength)
break;
scan_end1 = window[scan - 1];
scan_end = window[scan];
}
} while ((curMatch = (prev[curMatch & DeflaterConstants.WMASK] & 0xffff)) > limit && 0 != --chainLength);
return matchLen >= DeflaterConstants.MIN_MATCH;
}
private bool DeflateStored(bool flush, bool finish)
{
if (!flush && (lookahead == 0))
{
return false;
}
strstart += lookahead;
lookahead = 0;
int storedLength = strstart - blockStart;
if ((storedLength >= DeflaterConstants.MAX_BLOCK_SIZE) || // Block is full
(blockStart < DeflaterConstants.WSIZE && storedLength >= DeflaterConstants.MAX_DIST) || // Block may move out of window
flush)
{
bool lastBlock = finish;
if (storedLength > DeflaterConstants.MAX_BLOCK_SIZE)
{
storedLength = DeflaterConstants.MAX_BLOCK_SIZE;
lastBlock = false;
}
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
Console.WriteLine("storedBlock[" + storedLength + "," + lastBlock + "]");
}
#endif
huffman.FlushStoredBlock(window, blockStart, storedLength, lastBlock);
blockStart += storedLength;
return !(lastBlock || storedLength == 0);
}
return true;
}
private bool DeflateFast(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush)
{
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush)
{
if (lookahead == 0)
{
// We are flushing everything
huffman.FlushBlock(window, blockStart, strstart - blockStart, finish);
blockStart = strstart;
return false;
}
if (strstart > 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD)
{
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int hashHead;
if (lookahead >= DeflaterConstants.MIN_MATCH &&
(hashHead = InsertString()) != 0 &&
strategy != DeflateStrategy.HuffmanOnly &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead))
{
// longestMatch sets matchStart and matchLen
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart + i] != window[matchStart + i]) {
throw new SharpZipBaseException("Match failure");
}
}
}
#endif
bool full = huffman.TallyDist(strstart - matchStart, matchLen);
lookahead -= matchLen;
if (matchLen <= max_lazy && lookahead >= DeflaterConstants.MIN_MATCH)
{
while (--matchLen > 0)
{
++strstart;
InsertString();
}
++strstart;
}
else
{
strstart += matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH - 1)
{
UpdateHash();
}
}
matchLen = DeflaterConstants.MIN_MATCH - 1;
if (!full)
{
continue;
}
}
else
{
// No match found
huffman.TallyLit(window[strstart] & 0xff);
++strstart;
--lookahead;
}
if (huffman.IsFull())
{
bool lastBlock = finish && (lookahead == 0);
huffman.FlushBlock(window, blockStart, strstart - blockStart, lastBlock);
blockStart = strstart;
return !lastBlock;
}
}
return true;
}
private bool DeflateSlow(bool flush, bool finish)
{
if (lookahead < DeflaterConstants.MIN_LOOKAHEAD && !flush)
{
return false;
}
while (lookahead >= DeflaterConstants.MIN_LOOKAHEAD || flush)
{
if (lookahead == 0)
{
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = false;
// We are flushing everything
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && !flush)
{
throw new SharpZipBaseException("Not flushing, but no lookahead");
}
#endif
huffman.FlushBlock(window, blockStart, strstart - blockStart,
finish);
blockStart = strstart;
return false;
}
if (strstart >= 2 * DeflaterConstants.WSIZE - DeflaterConstants.MIN_LOOKAHEAD)
{
/* slide window, as FindLongestMatch needs this.
* This should only happen when flushing and the window
* is almost full.
*/
SlideWindow();
}
int prevMatch = matchStart;
int prevLen = matchLen;
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
int hashHead = InsertString();
if (strategy != DeflateStrategy.HuffmanOnly &&
hashHead != 0 &&
strstart - hashHead <= DeflaterConstants.MAX_DIST &&
FindLongestMatch(hashHead))
{
// longestMatch sets matchStart and matchLen
// Discard match if too small and too far away
if (matchLen <= 5 && (strategy == DeflateStrategy.Filtered || (matchLen == DeflaterConstants.MIN_MATCH && strstart - matchStart > TooFar)))
{
matchLen = DeflaterConstants.MIN_MATCH - 1;
}
}
}
// previous match was better
if ((prevLen >= DeflaterConstants.MIN_MATCH) && (matchLen <= prevLen))
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING)
{
for (int i = 0 ; i < matchLen; i++) {
if (window[strstart-1+i] != window[prevMatch + i])
throw new SharpZipBaseException();
}
}
#endif
huffman.TallyDist(strstart - 1 - prevMatch, prevLen);
prevLen -= 2;
do
{
strstart++;
lookahead--;
if (lookahead >= DeflaterConstants.MIN_MATCH)
{
InsertString();
}
} while (--prevLen > 0);
strstart++;
lookahead--;
prevAvailable = false;
matchLen = DeflaterConstants.MIN_MATCH - 1;
}
else
{
if (prevAvailable)
{
huffman.TallyLit(window[strstart - 1] & 0xff);
}
prevAvailable = true;
strstart++;
lookahead--;
}
if (huffman.IsFull())
{
int len = strstart - blockStart;
if (prevAvailable)
{
len--;
}
bool lastBlock = (finish && (lookahead == 0) && !prevAvailable);
huffman.FlushBlock(window, blockStart, len, lastBlock);
blockStart += len;
return !lastBlock;
}
}
return true;
}
#region Instance Fields
// Hash index of string to be inserted
private int ins_h;
/// <summary>
/// Hashtable, hashing three characters to an index for window, so
/// that window[index]..window[index+2] have this hash code.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
private short[] head;
/// <summary>
/// <code>prev[index &amp; WMASK]</code> points to the previous index that has the
/// same hash code as the string starting at index. This way
/// entries with the same hash code are in a linked list.
/// Note that the array should really be unsigned short, so you need
/// to and the values with 0xffff.
/// </summary>
private short[] prev;
private int matchStart;
// Length of best match
private int matchLen;
// Set if previous match exists
private bool prevAvailable;
private int blockStart;
/// <summary>
/// Points to the current character in the window.
/// </summary>
private int strstart;
/// <summary>
/// lookahead is the number of characters starting at strstart in
/// window that are valid.
/// So window[strstart] until window[strstart+lookahead-1] are valid
/// characters.
/// </summary>
private int lookahead;
/// <summary>
/// This array contains the part of the uncompressed stream that
/// is of relevance. The current character is indexed by strstart.
/// </summary>
private byte[] window;
private DeflateStrategy strategy;
private int max_chain, max_lazy, niceLength, goodLength;
/// <summary>
/// The current compression function.
/// </summary>
private int compressionFunction;
/// <summary>
/// The input data for compression.
/// </summary>
private byte[] inputBuf;
/// <summary>
/// The total bytes of input read.
/// </summary>
private long totalIn;
/// <summary>
/// The offset into inputBuf, where input data starts.
/// </summary>
private int inputOff;
/// <summary>
/// The end offset of the input data.
/// </summary>
private int inputEnd;
private DeflaterPending pending;
private DeflaterHuffman huffman;
/// <summary>
/// The adler checksum
/// </summary>
private Adler32 adler;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,959 @@
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This is the DeflaterHuffman class.
///
/// This class is <i>not</i> thread safe. This is inherent in the API, due
/// to the split of Deflate and SetInput.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class DeflaterHuffman
{
private const int BUFSIZE = 1 << (DeflaterConstants.DEFAULT_MEM_LEVEL + 6);
private const int LITERAL_NUM = 286;
// Number of distance codes
private const int DIST_NUM = 30;
// Number of codes used to transfer bit lengths
private const int BITLEN_NUM = 19;
// repeat previous bit length 3-6 times (2 bits of repeat count)
private const int REP_3_6 = 16;
// repeat a zero length 3-10 times (3 bits of repeat count)
private const int REP_3_10 = 17;
// repeat a zero length 11-138 times (7 bits of repeat count)
private const int REP_11_138 = 18;
private const int EOF_SYMBOL = 256;
// The lengths of the bit length codes are sent in order of decreasing
// probability, to avoid transmitting the lengths for unused bit length codes.
private static readonly int[] BL_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static readonly byte[] bit4Reverse = {
0,
8,
4,
12,
2,
10,
6,
14,
1,
9,
5,
13,
3,
11,
7,
15
};
private static short[] staticLCodes;
private static byte[] staticLLength;
private static short[] staticDCodes;
private static byte[] staticDLength;
private class Tree
{
#region Instance Fields
public short[] freqs;
public byte[] length;
public int minNumCodes;
public int numCodes;
private short[] codes;
private readonly int[] bl_counts;
private readonly int maxLength;
private DeflaterHuffman dh;
#endregion Instance Fields
#region Constructors
public Tree(DeflaterHuffman dh, int elems, int minCodes, int maxLength)
{
this.dh = dh;
this.minNumCodes = minCodes;
this.maxLength = maxLength;
freqs = new short[elems];
bl_counts = new int[maxLength];
}
#endregion Constructors
/// <summary>
/// Resets the internal state of the tree
/// </summary>
public void Reset()
{
for (int i = 0; i < freqs.Length; i++)
{
freqs[i] = 0;
}
codes = null;
length = null;
}
public void WriteSymbol(int code)
{
// if (DeflaterConstants.DEBUGGING) {
// freqs[code]--;
// // Console.Write("writeSymbol("+freqs.length+","+code+"): ");
// }
dh.pending.WriteBits(codes[code] & 0xffff, length[code]);
}
/// <summary>
/// Check that all frequencies are zero
/// </summary>
/// <exception cref="SharpZipBaseException">
/// At least one frequency is non-zero
/// </exception>
public void CheckEmpty()
{
bool empty = true;
for (int i = 0; i < freqs.Length; i++)
{
empty &= freqs[i] == 0;
}
if (!empty)
{
throw new SharpZipBaseException("!Empty");
}
}
/// <summary>
/// Set static codes and length
/// </summary>
/// <param name="staticCodes">new codes</param>
/// <param name="staticLengths">length for new codes</param>
public void SetStaticCodes(short[] staticCodes, byte[] staticLengths)
{
codes = staticCodes;
length = staticLengths;
}
/// <summary>
/// Build dynamic codes and lengths
/// </summary>
public void BuildCodes()
{
int numSymbols = freqs.Length;
int[] nextCode = new int[maxLength];
int code = 0;
codes = new short[freqs.Length];
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("buildCodes: "+freqs.Length);
// }
for (int bits = 0; bits < maxLength; bits++)
{
nextCode[bits] = code;
code += bl_counts[bits] << (15 - bits);
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("bits: " + ( bits + 1) + " count: " + bl_counts[bits]
// +" nextCode: "+code);
// }
}
#if DebugDeflation
if ( DeflaterConstants.DEBUGGING && (code != 65536) )
{
throw new SharpZipBaseException("Inconsistent bl_counts!");
}
#endif
for (int i = 0; i < numCodes; i++)
{
int bits = length[i];
if (bits > 0)
{
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("codes["+i+"] = rev(" + nextCode[bits-1]+"),
// +bits);
// }
codes[i] = BitReverse(nextCode[bits - 1]);
nextCode[bits - 1] += 1 << (16 - bits);
}
}
}
public void BuildTree()
{
int numSymbols = freqs.Length;
/* heap is a priority queue, sorted by frequency, least frequent
* nodes first. The heap is a binary tree, with the property, that
* the parent node is smaller than both child nodes. This assures
* that the smallest node is the first parent.
*
* The binary tree is encoded in an array: 0 is root node and
* the nodes 2*n+1, 2*n+2 are the child nodes of node n.
*/
int[] heap = new int[numSymbols];
int heapLen = 0;
int maxCode = 0;
for (int n = 0; n < numSymbols; n++)
{
int freq = freqs[n];
if (freq != 0)
{
// Insert n into heap
int pos = heapLen++;
int ppos;
while (pos > 0 && freqs[heap[ppos = (pos - 1) / 2]] > freq)
{
heap[pos] = heap[ppos];
pos = ppos;
}
heap[pos] = n;
maxCode = n;
}
}
/* We could encode a single literal with 0 bits but then we
* don't see the literals. Therefore we force at least two
* literals to avoid this case. We don't care about order in
* this case, both literals get a 1 bit code.
*/
while (heapLen < 2)
{
int node = maxCode < 2 ? ++maxCode : 0;
heap[heapLen++] = node;
}
numCodes = Math.Max(maxCode + 1, minNumCodes);
int numLeafs = heapLen;
int[] childs = new int[4 * heapLen - 2];
int[] values = new int[2 * heapLen - 1];
int numNodes = numLeafs;
for (int i = 0; i < heapLen; i++)
{
int node = heap[i];
childs[2 * i] = node;
childs[2 * i + 1] = -1;
values[i] = freqs[node] << 8;
heap[i] = i;
}
/* Construct the Huffman tree by repeatedly combining the least two
* frequent nodes.
*/
do
{
int first = heap[0];
int last = heap[--heapLen];
// Propagate the hole to the leafs of the heap
int ppos = 0;
int path = 1;
while (path < heapLen)
{
if (path + 1 < heapLen && values[heap[path]] > values[heap[path + 1]])
{
path++;
}
heap[ppos] = heap[path];
ppos = path;
path = path * 2 + 1;
}
/* Now propagate the last element down along path. Normally
* it shouldn't go too deep.
*/
int lastVal = values[last];
while ((path = ppos) > 0 && values[heap[ppos = (path - 1) / 2]] > lastVal)
{
heap[path] = heap[ppos];
}
heap[path] = last;
int second = heap[0];
// Create a new node father of first and second
last = numNodes++;
childs[2 * last] = first;
childs[2 * last + 1] = second;
int mindepth = Math.Min(values[first] & 0xff, values[second] & 0xff);
values[last] = lastVal = values[first] + values[second] - mindepth + 1;
// Again, propagate the hole to the leafs
ppos = 0;
path = 1;
while (path < heapLen)
{
if (path + 1 < heapLen && values[heap[path]] > values[heap[path + 1]])
{
path++;
}
heap[ppos] = heap[path];
ppos = path;
path = ppos * 2 + 1;
}
// Now propagate the new element down along path
while ((path = ppos) > 0 && values[heap[ppos = (path - 1) / 2]] > lastVal)
{
heap[path] = heap[ppos];
}
heap[path] = last;
} while (heapLen > 1);
if (heap[0] != childs.Length / 2 - 1)
{
throw new SharpZipBaseException("Heap invariant violated");
}
BuildLength(childs);
}
/// <summary>
/// Get encoded length
/// </summary>
/// <returns>Encoded length, the sum of frequencies * lengths</returns>
public int GetEncodedLength()
{
int len = 0;
for (int i = 0; i < freqs.Length; i++)
{
len += freqs[i] * length[i];
}
return len;
}
/// <summary>
/// Scan a literal or distance tree to determine the frequencies of the codes
/// in the bit length tree.
/// </summary>
public void CalcBLFreq(Tree blTree)
{
int max_count; /* max repeat count */
int min_count; /* min repeat count */
int count; /* repeat count of the current code */
int curlen = -1; /* length of current code */
int i = 0;
while (i < numCodes)
{
count = 1;
int nextlen = length[i];
if (nextlen == 0)
{
max_count = 138;
min_count = 3;
}
else
{
max_count = 6;
min_count = 3;
if (curlen != nextlen)
{
blTree.freqs[nextlen]++;
count = 0;
}
}
curlen = nextlen;
i++;
while (i < numCodes && curlen == length[i])
{
i++;
if (++count >= max_count)
{
break;
}
}
if (count < min_count)
{
blTree.freqs[curlen] += (short)count;
}
else if (curlen != 0)
{
blTree.freqs[REP_3_6]++;
}
else if (count <= 10)
{
blTree.freqs[REP_3_10]++;
}
else
{
blTree.freqs[REP_11_138]++;
}
}
}
/// <summary>
/// Write tree values
/// </summary>
/// <param name="blTree">Tree to write</param>
public void WriteTree(Tree blTree)
{
int max_count; // max repeat count
int min_count; // min repeat count
int count; // repeat count of the current code
int curlen = -1; // length of current code
int i = 0;
while (i < numCodes)
{
count = 1;
int nextlen = length[i];
if (nextlen == 0)
{
max_count = 138;
min_count = 3;
}
else
{
max_count = 6;
min_count = 3;
if (curlen != nextlen)
{
blTree.WriteSymbol(nextlen);
count = 0;
}
}
curlen = nextlen;
i++;
while (i < numCodes && curlen == length[i])
{
i++;
if (++count >= max_count)
{
break;
}
}
if (count < min_count)
{
while (count-- > 0)
{
blTree.WriteSymbol(curlen);
}
}
else if (curlen != 0)
{
blTree.WriteSymbol(REP_3_6);
dh.pending.WriteBits(count - 3, 2);
}
else if (count <= 10)
{
blTree.WriteSymbol(REP_3_10);
dh.pending.WriteBits(count - 3, 3);
}
else
{
blTree.WriteSymbol(REP_11_138);
dh.pending.WriteBits(count - 11, 7);
}
}
}
private void BuildLength(int[] childs)
{
this.length = new byte[freqs.Length];
int numNodes = childs.Length / 2;
int numLeafs = (numNodes + 1) / 2;
int overflow = 0;
for (int i = 0; i < maxLength; i++)
{
bl_counts[i] = 0;
}
// First calculate optimal bit lengths
int[] lengths = new int[numNodes];
lengths[numNodes - 1] = 0;
for (int i = numNodes - 1; i >= 0; i--)
{
if (childs[2 * i + 1] != -1)
{
int bitLength = lengths[i] + 1;
if (bitLength > maxLength)
{
bitLength = maxLength;
overflow++;
}
lengths[childs[2 * i]] = lengths[childs[2 * i + 1]] = bitLength;
}
else
{
// A leaf node
int bitLength = lengths[i];
bl_counts[bitLength - 1]++;
this.length[childs[2 * i]] = (byte)lengths[i];
}
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Tree "+freqs.Length+" lengths:");
// for (int i=0; i < numLeafs; i++) {
// //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]]
// + " len: "+length[childs[2*i]]);
// }
// }
if (overflow == 0)
{
return;
}
int incrBitLen = maxLength - 1;
do
{
// Find the first bit length which could increase:
while (bl_counts[--incrBitLen] == 0)
{
}
// Move this node one down and remove a corresponding
// number of overflow nodes.
do
{
bl_counts[incrBitLen]--;
bl_counts[++incrBitLen]++;
overflow -= 1 << (maxLength - 1 - incrBitLen);
} while (overflow > 0 && incrBitLen < maxLength - 1);
} while (overflow > 0);
/* We may have overshot above. Move some nodes from maxLength to
* maxLength-1 in that case.
*/
bl_counts[maxLength - 1] += overflow;
bl_counts[maxLength - 2] -= overflow;
/* Now recompute all bit lengths, scanning in increasing
* frequency. It is simpler to reconstruct all lengths instead of
* fixing only the wrong ones. This idea is taken from 'ar'
* written by Haruhiko Okumura.
*
* The nodes were inserted with decreasing frequency into the childs
* array.
*/
int nodePtr = 2 * numLeafs;
for (int bits = maxLength; bits != 0; bits--)
{
int n = bl_counts[bits - 1];
while (n > 0)
{
int childPtr = 2 * childs[nodePtr++];
if (childs[childPtr + 1] == -1)
{
// We found another leaf
length[childs[childPtr]] = (byte)bits;
n--;
}
}
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("*** After overflow elimination. ***");
// for (int i=0; i < numLeafs; i++) {
// //Console.WriteLine("Node "+childs[2*i]+" freq: "+freqs[childs[2*i]]
// + " len: "+length[childs[2*i]]);
// }
// }
}
}
#region Instance Fields
/// <summary>
/// Pending buffer to use
/// </summary>
public DeflaterPending pending;
private Tree literalTree;
private Tree distTree;
private Tree blTree;
// Buffer for distances
private short[] d_buf;
private byte[] l_buf;
private int last_lit;
private int extra_bits;
#endregion Instance Fields
static DeflaterHuffman()
{
// See RFC 1951 3.2.6
// Literal codes
staticLCodes = new short[LITERAL_NUM];
staticLLength = new byte[LITERAL_NUM];
int i = 0;
while (i < 144)
{
staticLCodes[i] = BitReverse((0x030 + i) << 8);
staticLLength[i++] = 8;
}
while (i < 256)
{
staticLCodes[i] = BitReverse((0x190 - 144 + i) << 7);
staticLLength[i++] = 9;
}
while (i < 280)
{
staticLCodes[i] = BitReverse((0x000 - 256 + i) << 9);
staticLLength[i++] = 7;
}
while (i < LITERAL_NUM)
{
staticLCodes[i] = BitReverse((0x0c0 - 280 + i) << 8);
staticLLength[i++] = 8;
}
// Distance codes
staticDCodes = new short[DIST_NUM];
staticDLength = new byte[DIST_NUM];
for (i = 0; i < DIST_NUM; i++)
{
staticDCodes[i] = BitReverse(i << 11);
staticDLength[i] = 5;
}
}
/// <summary>
/// Construct instance with pending buffer
/// </summary>
/// <param name="pending">Pending buffer to use</param>
public DeflaterHuffman(DeflaterPending pending)
{
this.pending = pending;
literalTree = new Tree(this, LITERAL_NUM, 257, 15);
distTree = new Tree(this, DIST_NUM, 1, 15);
blTree = new Tree(this, BITLEN_NUM, 4, 7);
d_buf = new short[BUFSIZE];
l_buf = new byte[BUFSIZE];
}
/// <summary>
/// Reset internal state
/// </summary>
public void Reset()
{
last_lit = 0;
extra_bits = 0;
literalTree.Reset();
distTree.Reset();
blTree.Reset();
}
/// <summary>
/// Write all trees to pending buffer
/// </summary>
/// <param name="blTreeCodes">The number/rank of treecodes to send.</param>
public void SendAllTrees(int blTreeCodes)
{
blTree.BuildCodes();
literalTree.BuildCodes();
distTree.BuildCodes();
pending.WriteBits(literalTree.numCodes - 257, 5);
pending.WriteBits(distTree.numCodes - 1, 5);
pending.WriteBits(blTreeCodes - 4, 4);
for (int rank = 0; rank < blTreeCodes; rank++)
{
pending.WriteBits(blTree.length[BL_ORDER[rank]], 3);
}
literalTree.WriteTree(blTree);
distTree.WriteTree(blTree);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
blTree.CheckEmpty();
}
#endif
}
/// <summary>
/// Compress current buffer writing data to pending buffer
/// </summary>
public void CompressBlock()
{
for (int i = 0; i < last_lit; i++)
{
int litlen = l_buf[i] & 0xff;
int dist = d_buf[i];
if (dist-- != 0)
{
// if (DeflaterConstants.DEBUGGING) {
// Console.Write("["+(dist+1)+","+(litlen+3)+"]: ");
// }
int lc = Lcode(litlen);
literalTree.WriteSymbol(lc);
int bits = (lc - 261) / 4;
if (bits > 0 && bits <= 5)
{
pending.WriteBits(litlen & ((1 << bits) - 1), bits);
}
int dc = Dcode(dist);
distTree.WriteSymbol(dc);
bits = dc / 2 - 1;
if (bits > 0)
{
pending.WriteBits(dist & ((1 << bits) - 1), bits);
}
}
else
{
// if (DeflaterConstants.DEBUGGING) {
// if (litlen > 32 && litlen < 127) {
// Console.Write("("+(char)litlen+"): ");
// } else {
// Console.Write("{"+litlen+"}: ");
// }
// }
literalTree.WriteSymbol(litlen);
}
}
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
Console.Write("EOF: ");
}
#endif
literalTree.WriteSymbol(EOF_SYMBOL);
#if DebugDeflation
if (DeflaterConstants.DEBUGGING) {
literalTree.CheckEmpty();
distTree.CheckEmpty();
}
#endif
}
/// <summary>
/// Flush block to output with no compression
/// </summary>
/// <param name="stored">Data to write</param>
/// <param name="storedOffset">Index of first byte to write</param>
/// <param name="storedLength">Count of bytes to write</param>
/// <param name="lastBlock">True if this is the last block</param>
public void FlushStoredBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock)
{
#if DebugDeflation
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Flushing stored block "+ storedLength);
// }
#endif
pending.WriteBits((DeflaterConstants.STORED_BLOCK << 1) + (lastBlock ? 1 : 0), 3);
pending.AlignToByte();
pending.WriteShort(storedLength);
pending.WriteShort(~storedLength);
pending.WriteBlock(stored, storedOffset, storedLength);
Reset();
}
/// <summary>
/// Flush block to output with compression
/// </summary>
/// <param name="stored">Data to flush</param>
/// <param name="storedOffset">Index of first byte to flush</param>
/// <param name="storedLength">Count of bytes to flush</param>
/// <param name="lastBlock">True if this is the last block</param>
public void FlushBlock(byte[] stored, int storedOffset, int storedLength, bool lastBlock)
{
literalTree.freqs[EOF_SYMBOL]++;
// Build trees
literalTree.BuildTree();
distTree.BuildTree();
// Calculate bitlen frequency
literalTree.CalcBLFreq(blTree);
distTree.CalcBLFreq(blTree);
// Build bitlen tree
blTree.BuildTree();
int blTreeCodes = 4;
for (int i = 18; i > blTreeCodes; i--)
{
if (blTree.length[BL_ORDER[i]] > 0)
{
blTreeCodes = i + 1;
}
}
int opt_len = 14 + blTreeCodes * 3 + blTree.GetEncodedLength() +
literalTree.GetEncodedLength() + distTree.GetEncodedLength() +
extra_bits;
int static_len = extra_bits;
for (int i = 0; i < LITERAL_NUM; i++)
{
static_len += literalTree.freqs[i] * staticLLength[i];
}
for (int i = 0; i < DIST_NUM; i++)
{
static_len += distTree.freqs[i] * staticDLength[i];
}
if (opt_len >= static_len)
{
// Force static trees
opt_len = static_len;
}
if (storedOffset >= 0 && storedLength + 4 < opt_len >> 3)
{
// Store Block
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("Storing, since " + storedLength + " < " + opt_len
// + " <= " + static_len);
// }
FlushStoredBlock(stored, storedOffset, storedLength, lastBlock);
}
else if (opt_len == static_len)
{
// Encode with static tree
pending.WriteBits((DeflaterConstants.STATIC_TREES << 1) + (lastBlock ? 1 : 0), 3);
literalTree.SetStaticCodes(staticLCodes, staticLLength);
distTree.SetStaticCodes(staticDCodes, staticDLength);
CompressBlock();
Reset();
}
else
{
// Encode with dynamic tree
pending.WriteBits((DeflaterConstants.DYN_TREES << 1) + (lastBlock ? 1 : 0), 3);
SendAllTrees(blTreeCodes);
CompressBlock();
Reset();
}
}
/// <summary>
/// Get value indicating if internal buffer is full
/// </summary>
/// <returns>true if buffer is full</returns>
public bool IsFull()
{
return last_lit >= BUFSIZE;
}
/// <summary>
/// Add literal to buffer
/// </summary>
/// <param name="literal">Literal value to add to buffer.</param>
/// <returns>Value indicating internal buffer is full</returns>
public bool TallyLit(int literal)
{
// if (DeflaterConstants.DEBUGGING) {
// if (lit > 32 && lit < 127) {
// //Console.WriteLine("("+(char)lit+")");
// } else {
// //Console.WriteLine("{"+lit+"}");
// }
// }
d_buf[last_lit] = 0;
l_buf[last_lit++] = (byte)literal;
literalTree.freqs[literal]++;
return IsFull();
}
/// <summary>
/// Add distance code and length to literal and distance trees
/// </summary>
/// <param name="distance">Distance code</param>
/// <param name="length">Length</param>
/// <returns>Value indicating if internal buffer is full</returns>
public bool TallyDist(int distance, int length)
{
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("[" + distance + "," + length + "]");
// }
d_buf[last_lit] = (short)distance;
l_buf[last_lit++] = (byte)(length - 3);
int lc = Lcode(length - 3);
literalTree.freqs[lc]++;
if (lc >= 265 && lc < 285)
{
extra_bits += (lc - 261) / 4;
}
int dc = Dcode(distance - 1);
distTree.freqs[dc]++;
if (dc >= 4)
{
extra_bits += dc / 2 - 1;
}
return IsFull();
}
/// <summary>
/// Reverse the bits of a 16 bit value.
/// </summary>
/// <param name="toReverse">Value to reverse bits</param>
/// <returns>Value with bits reversed</returns>
public static short BitReverse(int toReverse)
{
return (short)(bit4Reverse[toReverse & 0xF] << 12 |
bit4Reverse[(toReverse >> 4) & 0xF] << 8 |
bit4Reverse[(toReverse >> 8) & 0xF] << 4 |
bit4Reverse[toReverse >> 12]);
}
private static int Lcode(int length)
{
if (length == 255)
{
return 285;
}
int code = 257;
while (length >= 8)
{
code += 4;
length >>= 1;
}
return code + length;
}
private static int Dcode(int distance)
{
int code = 0;
while (distance >= 4)
{
code += 2;
distance >>= 1;
}
return code + distance;
}
}
}

View File

@@ -0,0 +1,17 @@
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class stores the pending output of the Deflater.
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class DeflaterPending : PendingBuffer
{
/// <summary>
/// Construct instance with default buffer size
/// </summary>
public DeflaterPending() : base(DeflaterConstants.PENDING_BUF_SIZE)
{
}
}
}

View File

@@ -0,0 +1,887 @@
using ICSharpCode.SharpZipLib.Checksum;
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Inflater is used to decompress data that has been compressed according
/// to the "deflate" standard described in rfc1951.
///
/// By default Zlib (rfc1950) headers and footers are expected in the input.
/// You can use constructor <code> public Inflater(bool noHeader)</code> passing true
/// if there is no Zlib header information
///
/// The usage is as following. First you have to set some input with
/// <code>SetInput()</code>, then Inflate() it. If inflate doesn't
/// inflate any bytes there may be three reasons:
/// <ul>
/// <li>IsNeedingInput() returns true because the input buffer is empty.
/// You have to provide more input with <code>SetInput()</code>.
/// NOTE: IsNeedingInput() also returns true when, the stream is finished.
/// </li>
/// <li>IsNeedingDictionary() returns true, you have to provide a preset
/// dictionary with <code>SetDictionary()</code>.</li>
/// <li>IsFinished returns true, the inflater has finished.</li>
/// </ul>
/// Once the first output byte is produced, a dictionary will not be
/// needed at a later stage.
///
/// author of the original java version : John Leuner, Jochen Hoenicke
/// </summary>
public class Inflater
{
#region Constants/Readonly
/// <summary>
/// Copy lengths for literal codes 257..285
/// </summary>
private static readonly int[] CPLENS = {
3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258
};
/// <summary>
/// Extra bits for literal codes 257..285
/// </summary>
private static readonly int[] CPLEXT = {
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0
};
/// <summary>
/// Copy offsets for distance codes 0..29
/// </summary>
private static readonly int[] CPDIST = {
1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
8193, 12289, 16385, 24577
};
/// <summary>
/// Extra bits for distance codes
/// </summary>
private static readonly int[] CPDEXT = {
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
12, 12, 13, 13
};
/// <summary>
/// These are the possible states for an inflater
/// </summary>
private const int DECODE_HEADER = 0;
private const int DECODE_DICT = 1;
private const int DECODE_BLOCKS = 2;
private const int DECODE_STORED_LEN1 = 3;
private const int DECODE_STORED_LEN2 = 4;
private const int DECODE_STORED = 5;
private const int DECODE_DYN_HEADER = 6;
private const int DECODE_HUFFMAN = 7;
private const int DECODE_HUFFMAN_LENBITS = 8;
private const int DECODE_HUFFMAN_DIST = 9;
private const int DECODE_HUFFMAN_DISTBITS = 10;
private const int DECODE_CHKSUM = 11;
private const int FINISHED = 12;
#endregion Constants/Readonly
#region Instance Fields
/// <summary>
/// This variable contains the current state.
/// </summary>
private int mode;
/// <summary>
/// The adler checksum of the dictionary or of the decompressed
/// stream, as it is written in the header resp. footer of the
/// compressed stream.
/// Only valid if mode is DECODE_DICT or DECODE_CHKSUM.
/// </summary>
private int readAdler;
/// <summary>
/// The number of bits needed to complete the current state. This
/// is valid, if mode is DECODE_DICT, DECODE_CHKSUM,
/// DECODE_HUFFMAN_LENBITS or DECODE_HUFFMAN_DISTBITS.
/// </summary>
private int neededBits;
private int repLength;
private int repDist;
private int uncomprLen;
/// <summary>
/// True, if the last block flag was set in the last block of the
/// inflated stream. This means that the stream ends after the
/// current block.
/// </summary>
private bool isLastBlock;
/// <summary>
/// The total number of inflated bytes.
/// </summary>
private long totalOut;
/// <summary>
/// The total number of bytes set with setInput(). This is not the
/// value returned by the TotalIn property, since this also includes the
/// unprocessed input.
/// </summary>
private long totalIn;
/// <summary>
/// This variable stores the noHeader flag that was given to the constructor.
/// True means, that the inflated stream doesn't contain a Zlib header or
/// footer.
/// </summary>
private bool noHeader;
private readonly StreamManipulator input;
private OutputWindow outputWindow;
private InflaterDynHeader dynHeader;
private InflaterHuffmanTree litlenTree, distTree;
private Adler32 adler;
#endregion Instance Fields
#region Constructors
/// <summary>
/// Creates a new inflater or RFC1951 decompressor
/// RFC1950/Zlib headers and footers will be expected in the input data
/// </summary>
public Inflater() : this(false)
{
}
/// <summary>
/// Creates a new inflater.
/// </summary>
/// <param name="noHeader">
/// True if no RFC1950/Zlib header and footer fields are expected in the input data
///
/// This is used for GZIPed/Zipped input.
///
/// For compatibility with
/// Sun JDK you should provide one byte of input more than needed in
/// this case.
/// </param>
public Inflater(bool noHeader)
{
this.noHeader = noHeader;
if (!noHeader)
this.adler = new Adler32();
input = new StreamManipulator();
outputWindow = new OutputWindow();
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
}
#endregion Constructors
/// <summary>
/// Resets the inflater so that a new stream can be decompressed. All
/// pending input and output will be discarded.
/// </summary>
public void Reset()
{
mode = noHeader ? DECODE_BLOCKS : DECODE_HEADER;
totalIn = 0;
totalOut = 0;
input.Reset();
outputWindow.Reset();
dynHeader = null;
litlenTree = null;
distTree = null;
isLastBlock = false;
adler?.Reset();
}
/// <summary>
/// Decodes a zlib/RFC1950 header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// The header is invalid.
/// </exception>
private bool DecodeHeader()
{
int header = input.PeekBits(16);
if (header < 0)
{
return false;
}
input.DropBits(16);
// The header is written in "wrong" byte order
header = ((header << 8) | (header >> 8)) & 0xffff;
if (header % 31 != 0)
{
throw new SharpZipBaseException("Header checksum illegal");
}
if ((header & 0x0f00) != (Deflater.DEFLATED << 8))
{
throw new SharpZipBaseException("Compression Method unknown");
}
/* Maximum size of the backwards window in bits.
* We currently ignore this, but we could use it to make the
* inflater window more space efficient. On the other hand the
* full window (15 bits) is needed most times, anyway.
int max_wbits = ((header & 0x7000) >> 12) + 8;
*/
if ((header & 0x0020) == 0)
{ // Dictionary flag?
mode = DECODE_BLOCKS;
}
else
{
mode = DECODE_DICT;
neededBits = 32;
}
return true;
}
/// <summary>
/// Decodes the dictionary checksum after the deflate header.
/// </summary>
/// <returns>
/// False if more input is needed.
/// </returns>
private bool DecodeDict()
{
while (neededBits > 0)
{
int dictByte = input.PeekBits(8);
if (dictByte < 0)
{
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | dictByte;
neededBits -= 8;
}
return false;
}
/// <summary>
/// Decodes the huffman encoded symbols in the input stream.
/// </summary>
/// <returns>
/// false if more input is needed, true if output window is
/// full or the current block ends.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool DecodeHuffman()
{
int free = outputWindow.GetFreeSpace();
while (free >= 258)
{
int symbol;
switch (mode)
{
case DECODE_HUFFMAN:
// This is the inner loop so it is optimized a bit
while (((symbol = litlenTree.GetSymbol(input)) & ~0xff) == 0)
{
outputWindow.Write(symbol);
if (--free < 258)
{
return true;
}
}
if (symbol < 257)
{
if (symbol < 0)
{
return false;
}
else
{
// symbol == 256: end of block
distTree = null;
litlenTree = null;
mode = DECODE_BLOCKS;
return true;
}
}
try
{
repLength = CPLENS[symbol - 257];
neededBits = CPLEXT[symbol - 257];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep length code");
}
goto case DECODE_HUFFMAN_LENBITS; // fall through
case DECODE_HUFFMAN_LENBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_LENBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repLength += i;
}
mode = DECODE_HUFFMAN_DIST;
goto case DECODE_HUFFMAN_DIST; // fall through
case DECODE_HUFFMAN_DIST:
symbol = distTree.GetSymbol(input);
if (symbol < 0)
{
return false;
}
try
{
repDist = CPDIST[symbol];
neededBits = CPDEXT[symbol];
}
catch (Exception)
{
throw new SharpZipBaseException("Illegal rep dist code");
}
goto case DECODE_HUFFMAN_DISTBITS; // fall through
case DECODE_HUFFMAN_DISTBITS:
if (neededBits > 0)
{
mode = DECODE_HUFFMAN_DISTBITS;
int i = input.PeekBits(neededBits);
if (i < 0)
{
return false;
}
input.DropBits(neededBits);
repDist += i;
}
outputWindow.Repeat(repLength, repDist);
free -= repLength;
mode = DECODE_HUFFMAN;
break;
default:
throw new SharpZipBaseException("Inflater unknown mode");
}
}
return true;
}
/// <summary>
/// Decodes the adler checksum after the deflate stream.
/// </summary>
/// <returns>
/// false if more input is needed.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// If checksum doesn't match.
/// </exception>
private bool DecodeChksum()
{
while (neededBits > 0)
{
int chkByte = input.PeekBits(8);
if (chkByte < 0)
{
return false;
}
input.DropBits(8);
readAdler = (readAdler << 8) | chkByte;
neededBits -= 8;
}
if ((int)adler?.Value != readAdler)
{
throw new SharpZipBaseException("Adler chksum doesn't match: " + (int)adler?.Value + " vs. " + readAdler);
}
mode = FINISHED;
return false;
}
/// <summary>
/// Decodes the deflated stream.
/// </summary>
/// <returns>
/// false if more input is needed, or if finished.
/// </returns>
/// <exception cref="SharpZipBaseException">
/// if deflated stream is invalid.
/// </exception>
private bool Decode()
{
switch (mode)
{
case DECODE_HEADER:
return DecodeHeader();
case DECODE_DICT:
return DecodeDict();
case DECODE_CHKSUM:
return DecodeChksum();
case DECODE_BLOCKS:
if (isLastBlock)
{
if (noHeader)
{
mode = FINISHED;
return false;
}
else
{
input.SkipToByteBoundary();
neededBits = 32;
mode = DECODE_CHKSUM;
return true;
}
}
int type = input.PeekBits(3);
if (type < 0)
{
return false;
}
input.DropBits(3);
isLastBlock |= (type & 1) != 0;
switch (type >> 1)
{
case DeflaterConstants.STORED_BLOCK:
input.SkipToByteBoundary();
mode = DECODE_STORED_LEN1;
break;
case DeflaterConstants.STATIC_TREES:
litlenTree = InflaterHuffmanTree.defLitLenTree;
distTree = InflaterHuffmanTree.defDistTree;
mode = DECODE_HUFFMAN;
break;
case DeflaterConstants.DYN_TREES:
dynHeader = new InflaterDynHeader(input);
mode = DECODE_DYN_HEADER;
break;
default:
throw new SharpZipBaseException("Unknown block type " + type);
}
return true;
case DECODE_STORED_LEN1:
{
if ((uncomprLen = input.PeekBits(16)) < 0)
{
return false;
}
input.DropBits(16);
mode = DECODE_STORED_LEN2;
}
goto case DECODE_STORED_LEN2; // fall through
case DECODE_STORED_LEN2:
{
int nlen = input.PeekBits(16);
if (nlen < 0)
{
return false;
}
input.DropBits(16);
if (nlen != (uncomprLen ^ 0xffff))
{
throw new SharpZipBaseException("broken uncompressed block");
}
mode = DECODE_STORED;
}
goto case DECODE_STORED; // fall through
case DECODE_STORED:
{
int more = outputWindow.CopyStored(input, uncomprLen);
uncomprLen -= more;
if (uncomprLen == 0)
{
mode = DECODE_BLOCKS;
return true;
}
return !input.IsNeedingInput;
}
case DECODE_DYN_HEADER:
if (!dynHeader.AttemptRead())
{
return false;
}
litlenTree = dynHeader.LiteralLengthTree;
distTree = dynHeader.DistanceTree;
mode = DECODE_HUFFMAN;
goto case DECODE_HUFFMAN; // fall through
case DECODE_HUFFMAN:
case DECODE_HUFFMAN_LENBITS:
case DECODE_HUFFMAN_DIST:
case DECODE_HUFFMAN_DISTBITS:
return DecodeHuffman();
case FINISHED:
return false;
default:
throw new SharpZipBaseException("Inflater.Decode unknown mode");
}
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
public void SetDictionary(byte[] buffer)
{
SetDictionary(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the preset dictionary. This should only be called, if
/// needsDictionary() returns true and it should set the same
/// dictionary, that was used for deflating. The getAdler()
/// function returns the checksum of the dictionary needed.
/// </summary>
/// <param name="buffer">
/// The dictionary.
/// </param>
/// <param name="index">
/// The index into buffer where the dictionary starts.
/// </param>
/// <param name="count">
/// The number of bytes in the dictionary.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No dictionary is needed.
/// </exception>
/// <exception cref="SharpZipBaseException">
/// The adler checksum for the buffer is invalid
/// </exception>
public void SetDictionary(byte[] buffer, int index, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (index < 0)
{
throw new ArgumentOutOfRangeException(nameof(index));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (!IsNeedingDictionary)
{
throw new InvalidOperationException("Dictionary is not needed");
}
adler?.Update(new ArraySegment<byte>(buffer, index, count));
if (adler != null && (int)adler.Value != readAdler)
{
throw new SharpZipBaseException("Wrong adler checksum");
}
adler?.Reset();
outputWindow.CopyDict(buffer, index, count);
mode = DECODE_BLOCKS;
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// the input.
/// </param>
public void SetInput(byte[] buffer)
{
SetInput(buffer, 0, buffer.Length);
}
/// <summary>
/// Sets the input. This should only be called, if needsInput()
/// returns true.
/// </summary>
/// <param name="buffer">
/// The source of input data
/// </param>
/// <param name="index">
/// The index into buffer where the input starts.
/// </param>
/// <param name="count">
/// The number of bytes of input to use.
/// </param>
/// <exception cref="System.InvalidOperationException">
/// No input is needed.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// The index and/or count are wrong.
/// </exception>
public void SetInput(byte[] buffer, int index, int count)
{
input.SetInput(buffer, index, count);
totalIn += (long)count;
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether IsNeedingDictionary(),
/// IsNeedingInput() or IsFinished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <returns>
/// The number of bytes written to the buffer, 0 if no further
/// output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if buffer has length 0.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
return Inflate(buffer, 0, buffer.Length);
}
/// <summary>
/// Inflates the compressed stream to the output buffer. If this
/// returns 0, you should check, whether needsDictionary(),
/// needsInput() or finished() returns true, to determine why no
/// further output is produced.
/// </summary>
/// <param name="buffer">
/// the output buffer.
/// </param>
/// <param name="offset">
/// the offset in buffer where storing starts.
/// </param>
/// <param name="count">
/// the maximum number of bytes to output.
/// </param>
/// <returns>
/// the number of bytes written to the buffer, 0 if no further output can be produced.
/// </returns>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if count is less than 0.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// if the index and / or count are wrong.
/// </exception>
/// <exception cref="System.FormatException">
/// if deflated stream is invalid.
/// </exception>
public int Inflate(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "count cannot be negative");
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset), "offset cannot be negative");
}
if (offset + count > buffer.Length)
{
throw new ArgumentException("count exceeds buffer bounds");
}
// Special case: count may be zero
if (count == 0)
{
if (!IsFinished)
{ // -jr- 08-Nov-2003 INFLATE_BUG fix..
Decode();
}
return 0;
}
int bytesCopied = 0;
do
{
if (mode != DECODE_CHKSUM)
{
/* Don't give away any output, if we are waiting for the
* checksum in the input stream.
*
* With this trick we have always:
* IsNeedingInput() and not IsFinished()
* implies more output can be produced.
*/
int more = outputWindow.CopyOutput(buffer, offset, count);
if (more > 0)
{
adler?.Update(new ArraySegment<byte>(buffer, offset, more));
offset += more;
bytesCopied += more;
totalOut += (long)more;
count -= more;
if (count == 0)
{
return bytesCopied;
}
}
}
} while (Decode() || ((outputWindow.GetAvailable() > 0) && (mode != DECODE_CHKSUM)));
return bytesCopied;
}
/// <summary>
/// Returns true, if the input buffer is empty.
/// You should then call setInput().
/// NOTE: This method also returns true when the stream is finished.
/// </summary>
public bool IsNeedingInput
{
get
{
return input.IsNeedingInput;
}
}
/// <summary>
/// Returns true, if a preset dictionary is needed to inflate the input.
/// </summary>
public bool IsNeedingDictionary
{
get
{
return mode == DECODE_DICT && neededBits == 0;
}
}
/// <summary>
/// Returns true, if the inflater has finished. This means, that no
/// input is needed and no output can be produced.
/// </summary>
public bool IsFinished
{
get
{
return mode == FINISHED && outputWindow.GetAvailable() == 0;
}
}
/// <summary>
/// Gets the adler checksum. This is either the checksum of all
/// uncompressed bytes returned by inflate(), or if needsDictionary()
/// returns true (and thus no output was yet produced) this is the
/// adler checksum of the expected dictionary.
/// </summary>
/// <returns>
/// the adler checksum.
/// </returns>
public int Adler
{
get
{
if (IsNeedingDictionary)
{
return readAdler;
}
else if (adler != null)
{
return (int)adler.Value;
}
else
{
return 0;
}
}
}
/// <summary>
/// Gets the total number of output bytes returned by Inflate().
/// </summary>
/// <returns>
/// the total number of output bytes.
/// </returns>
public long TotalOut
{
get
{
return totalOut;
}
}
/// <summary>
/// Gets the total number of processed compressed input bytes.
/// </summary>
/// <returns>
/// The total number of bytes of processed input bytes.
/// </returns>
public long TotalIn
{
get
{
return totalIn - (long)RemainingInput;
}
}
/// <summary>
/// Gets the number of unprocessed input bytes. Useful, if the end of the
/// stream is reached and you want to further process the bytes after
/// the deflate stream.
/// </summary>
/// <returns>
/// The number of bytes of the input which have not been processed.
/// </returns>
public int RemainingInput
{
// TODO: This should be a long?
get
{
return input.AvailableBytes;
}
}
}
}

View File

@@ -0,0 +1,151 @@
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.Collections.Generic;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
internal class InflaterDynHeader
{
#region Constants
// maximum number of literal/length codes
private const int LITLEN_MAX = 286;
// maximum number of distance codes
private const int DIST_MAX = 30;
// maximum data code lengths to read
private const int CODELEN_MAX = LITLEN_MAX + DIST_MAX;
// maximum meta code length codes to read
private const int META_MAX = 19;
private static readonly int[] MetaCodeLengthIndex =
{ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
#endregion Constants
/// <summary>
/// Continue decoding header from <see cref="input"/> until more bits are needed or decoding has been completed
/// </summary>
/// <returns>Returns whether decoding could be completed</returns>
public bool AttemptRead()
=> !state.MoveNext() || state.Current;
public InflaterDynHeader(StreamManipulator input)
{
this.input = input;
stateMachine = CreateStateMachine();
state = stateMachine.GetEnumerator();
}
private IEnumerable<bool> CreateStateMachine()
{
// Read initial code length counts from header
while (!input.TryGetBits(5, ref litLenCodeCount, 257)) yield return false;
while (!input.TryGetBits(5, ref distanceCodeCount, 1)) yield return false;
while (!input.TryGetBits(4, ref metaCodeCount, 4)) yield return false;
var dataCodeCount = litLenCodeCount + distanceCodeCount;
if (litLenCodeCount > LITLEN_MAX) throw new ValueOutOfRangeException(nameof(litLenCodeCount));
if (distanceCodeCount > DIST_MAX) throw new ValueOutOfRangeException(nameof(distanceCodeCount));
if (metaCodeCount > META_MAX) throw new ValueOutOfRangeException(nameof(metaCodeCount));
// Load code lengths for the meta tree from the header bits
for (int i = 0; i < metaCodeCount; i++)
{
while (!input.TryGetBits(3, ref codeLengths, MetaCodeLengthIndex[i])) yield return false;
}
var metaCodeTree = new InflaterHuffmanTree(codeLengths);
// Decompress the meta tree symbols into the data table code lengths
int index = 0;
while (index < dataCodeCount)
{
byte codeLength;
int symbol;
while ((symbol = metaCodeTree.GetSymbol(input)) < 0) yield return false;
if (symbol < 16)
{
// append literal code length
codeLengths[index++] = (byte)symbol;
}
else
{
int repeatCount = 0;
if (symbol == 16) // Repeat last code length 3..6 times
{
if (index == 0)
throw new StreamDecodingException("Cannot repeat previous code length when no other code length has been read");
codeLength = codeLengths[index - 1];
// 2 bits + 3, [3..6]
while (!input.TryGetBits(2, ref repeatCount, 3)) yield return false;
}
else if (symbol == 17) // Repeat zero 3..10 times
{
codeLength = 0;
// 3 bits + 3, [3..10]
while (!input.TryGetBits(3, ref repeatCount, 3)) yield return false;
}
else // (symbol == 18), Repeat zero 11..138 times
{
codeLength = 0;
// 7 bits + 11, [11..138]
while (!input.TryGetBits(7, ref repeatCount, 11)) yield return false;
}
if (index + repeatCount > dataCodeCount)
throw new StreamDecodingException("Cannot repeat code lengths past total number of data code lengths");
while (repeatCount-- > 0)
codeLengths[index++] = codeLength;
}
}
if (codeLengths[256] == 0)
throw new StreamDecodingException("Inflater dynamic header end-of-block code missing");
litLenTree = new InflaterHuffmanTree(new ArraySegment<byte>(codeLengths, 0, litLenCodeCount));
distTree = new InflaterHuffmanTree(new ArraySegment<byte>(codeLengths, litLenCodeCount, distanceCodeCount));
yield return true;
}
/// <summary>
/// Get literal/length huffman tree, must not be used before <see cref="AttemptRead"/> has returned true
/// </summary>
/// <exception cref="StreamDecodingException">If hader has not been successfully read by the state machine</exception>
public InflaterHuffmanTree LiteralLengthTree
=> litLenTree ?? throw new StreamDecodingException("Header properties were accessed before header had been successfully read");
/// <summary>
/// Get distance huffman tree, must not be used before <see cref="AttemptRead"/> has returned true
/// </summary>
/// <exception cref="StreamDecodingException">If hader has not been successfully read by the state machine</exception>
public InflaterHuffmanTree DistanceTree
=> distTree ?? throw new StreamDecodingException("Header properties were accessed before header had been successfully read");
#region Instance Fields
private readonly StreamManipulator input;
private readonly IEnumerator<bool> state;
private readonly IEnumerable<bool> stateMachine;
private byte[] codeLengths = new byte[CODELEN_MAX];
private InflaterHuffmanTree litLenTree;
private InflaterHuffmanTree distTree;
private int litLenCodeCount, distanceCodeCount, metaCodeCount;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,237 @@
using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
using System;
using System.Collections.Generic;
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// Huffman tree used for inflation
/// </summary>
public class InflaterHuffmanTree
{
#region Constants
private const int MAX_BITLEN = 15;
#endregion Constants
#region Instance Fields
private short[] tree;
#endregion Instance Fields
/// <summary>
/// Literal length tree
/// </summary>
public static InflaterHuffmanTree defLitLenTree;
/// <summary>
/// Distance tree
/// </summary>
public static InflaterHuffmanTree defDistTree;
static InflaterHuffmanTree()
{
try
{
byte[] codeLengths = new byte[288];
int i = 0;
while (i < 144)
{
codeLengths[i++] = 8;
}
while (i < 256)
{
codeLengths[i++] = 9;
}
while (i < 280)
{
codeLengths[i++] = 7;
}
while (i < 288)
{
codeLengths[i++] = 8;
}
defLitLenTree = new InflaterHuffmanTree(codeLengths);
codeLengths = new byte[32];
i = 0;
while (i < 32)
{
codeLengths[i++] = 5;
}
defDistTree = new InflaterHuffmanTree(codeLengths);
}
catch (Exception)
{
throw new SharpZipBaseException("InflaterHuffmanTree: static tree length illegal");
}
}
#region Constructors
/// <summary>
/// Constructs a Huffman tree from the array of code lengths.
/// </summary>
/// <param name = "codeLengths">
/// the array of code lengths
/// </param>
public InflaterHuffmanTree(IList<byte> codeLengths)
{
BuildTree(codeLengths);
}
#endregion Constructors
private void BuildTree(IList<byte> codeLengths)
{
int[] blCount = new int[MAX_BITLEN + 1];
int[] nextCode = new int[MAX_BITLEN + 1];
for (int i = 0; i < codeLengths.Count; i++)
{
int bits = codeLengths[i];
if (bits > 0)
{
blCount[bits]++;
}
}
int code = 0;
int treeSize = 512;
for (int bits = 1; bits <= MAX_BITLEN; bits++)
{
nextCode[bits] = code;
code += blCount[bits] << (16 - bits);
if (bits >= 10)
{
/* We need an extra table for bit lengths >= 10. */
int start = nextCode[bits] & 0x1ff80;
int end = code & 0x1ff80;
treeSize += (end - start) >> (16 - bits);
}
}
/* -jr comment this out! doesnt work for dynamic trees and pkzip 2.04g
if (code != 65536)
{
throw new SharpZipBaseException("Code lengths don't add up properly.");
}
*/
/* Now create and fill the extra tables from longest to shortest
* bit len. This way the sub trees will be aligned.
*/
tree = new short[treeSize];
int treePtr = 512;
for (int bits = MAX_BITLEN; bits >= 10; bits--)
{
int end = code & 0x1ff80;
code -= blCount[bits] << (16 - bits);
int start = code & 0x1ff80;
for (int i = start; i < end; i += 1 << 7)
{
tree[DeflaterHuffman.BitReverse(i)] = (short)((-treePtr << 4) | bits);
treePtr += 1 << (bits - 9);
}
}
for (int i = 0; i < codeLengths.Count; i++)
{
int bits = codeLengths[i];
if (bits == 0)
{
continue;
}
code = nextCode[bits];
int revcode = DeflaterHuffman.BitReverse(code);
if (bits <= 9)
{
do
{
tree[revcode] = (short)((i << 4) | bits);
revcode += 1 << bits;
} while (revcode < 512);
}
else
{
int subTree = tree[revcode & 511];
int treeLen = 1 << (subTree & 15);
subTree = -(subTree >> 4);
do
{
tree[subTree | (revcode >> 9)] = (short)((i << 4) | bits);
revcode += 1 << bits;
} while (revcode < treeLen);
}
nextCode[bits] = code + (1 << (16 - bits));
}
}
/// <summary>
/// Reads the next symbol from input. The symbol is encoded using the
/// huffman tree.
/// </summary>
/// <param name="input">
/// input the input source.
/// </param>
/// <returns>
/// the next symbol, or -1 if not enough input is available.
/// </returns>
public int GetSymbol(StreamManipulator input)
{
int lookahead, symbol;
if ((lookahead = input.PeekBits(9)) >= 0)
{
symbol = tree[lookahead];
int bitlen = symbol & 15;
if (symbol >= 0)
{
if(bitlen == 0){
throw new SharpZipBaseException("Encountered invalid codelength 0");
}
input.DropBits(bitlen);
return symbol >> 4;
}
int subtree = -(symbol >> 4);
if ((lookahead = input.PeekBits(bitlen)) >= 0)
{
symbol = tree[subtree | (lookahead >> 9)];
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
int bits = input.AvailableBits;
lookahead = input.PeekBits(bits);
symbol = tree[subtree | (lookahead >> 9)];
if ((symbol & 15) <= bits)
{
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
return -1;
}
}
}
else // Less than 9 bits
{
int bits = input.AvailableBits;
lookahead = input.PeekBits(bits);
symbol = tree[lookahead];
if (symbol >= 0 && (symbol & 15) <= bits)
{
input.DropBits(symbol & 15);
return symbol >> 4;
}
else
{
return -1;
}
}
}
}
}

View File

@@ -0,0 +1,268 @@
namespace ICSharpCode.SharpZipLib.Zip.Compression
{
/// <summary>
/// This class is general purpose class for writing data to a buffer.
///
/// It allows you to write bits as well as bytes
/// Based on DeflaterPending.java
///
/// author of the original java version : Jochen Hoenicke
/// </summary>
public class PendingBuffer
{
#region Instance Fields
/// <summary>
/// Internal work buffer
/// </summary>
private readonly byte[] buffer;
private int start;
private int end;
private uint bits;
private int bitCount;
#endregion Instance Fields
#region Constructors
/// <summary>
/// construct instance using default buffer size of 4096
/// </summary>
public PendingBuffer() : this(4096)
{
}
/// <summary>
/// construct instance using specified buffer size
/// </summary>
/// <param name="bufferSize">
/// size to use for internal buffer
/// </param>
public PendingBuffer(int bufferSize)
{
buffer = new byte[bufferSize];
}
#endregion Constructors
/// <summary>
/// Clear internal state/buffers
/// </summary>
public void Reset()
{
start = end = bitCount = 0;
}
/// <summary>
/// Write a byte to buffer
/// </summary>
/// <param name="value">
/// The value to write
/// </param>
public void WriteByte(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
}
/// <summary>
/// Write a short value to buffer LSB first
/// </summary>
/// <param name="value">
/// The value to write.
/// </param>
public void WriteShort(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
buffer[end++] = unchecked((byte)(value >> 8));
}
/// <summary>
/// write an integer LSB first
/// </summary>
/// <param name="value">The value to write.</param>
public void WriteInt(int value)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)value);
buffer[end++] = unchecked((byte)(value >> 8));
buffer[end++] = unchecked((byte)(value >> 16));
buffer[end++] = unchecked((byte)(value >> 24));
}
/// <summary>
/// Write a block of data to buffer
/// </summary>
/// <param name="block">data to write</param>
/// <param name="offset">offset of first byte to write</param>
/// <param name="length">number of bytes to write</param>
public void WriteBlock(byte[] block, int offset, int length)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
System.Array.Copy(block, offset, buffer, end, length);
end += length;
}
/// <summary>
/// The number of bits written to the buffer
/// </summary>
public int BitCount
{
get
{
return bitCount;
}
}
/// <summary>
/// Align internal buffer on a byte boundary
/// </summary>
public void AlignToByte()
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
if (bitCount > 0)
{
buffer[end++] = unchecked((byte)bits);
if (bitCount > 8)
{
buffer[end++] = unchecked((byte)(bits >> 8));
}
}
bits = 0;
bitCount = 0;
}
/// <summary>
/// Write bits to internal buffer
/// </summary>
/// <param name="b">source of bits</param>
/// <param name="count">number of bits to write</param>
public void WriteBits(int b, int count)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
// if (DeflaterConstants.DEBUGGING) {
// //Console.WriteLine("writeBits("+b+","+count+")");
// }
#endif
bits |= (uint)(b << bitCount);
bitCount += count;
if (bitCount >= 16)
{
buffer[end++] = unchecked((byte)bits);
buffer[end++] = unchecked((byte)(bits >> 8));
bits >>= 16;
bitCount -= 16;
}
}
/// <summary>
/// Write a short value to internal buffer most significant byte first
/// </summary>
/// <param name="s">value to write</param>
public void WriteShortMSB(int s)
{
#if DebugDeflation
if (DeflaterConstants.DEBUGGING && (start != 0) )
{
throw new SharpZipBaseException("Debug check: start != 0");
}
#endif
buffer[end++] = unchecked((byte)(s >> 8));
buffer[end++] = unchecked((byte)s);
}
/// <summary>
/// Indicates if buffer has been flushed
/// </summary>
public bool IsFlushed
{
get
{
return end == 0;
}
}
/// <summary>
/// Flushes the pending buffer into the given output array. If the
/// output array is to small, only a partial flush is done.
/// </summary>
/// <param name="output">The output array.</param>
/// <param name="offset">The offset into output array.</param>
/// <param name="length">The maximum number of bytes to store.</param>
/// <returns>The number of bytes flushed.</returns>
public int Flush(byte[] output, int offset, int length)
{
if (bitCount >= 8)
{
buffer[end++] = unchecked((byte)bits);
bits >>= 8;
bitCount -= 8;
}
if (length > end - start)
{
length = end - start;
System.Array.Copy(buffer, start, output, offset, length);
start = 0;
end = 0;
}
else
{
System.Array.Copy(buffer, start, output, offset, length);
start += length;
}
return length;
}
/// <summary>
/// Convert internal buffer to byte array.
/// Buffer is empty on completion
/// </summary>
/// <returns>
/// The internal buffer contents converted to a byte array.
/// </returns>
public byte[] ToByteArray()
{
AlignToByte();
byte[] result = new byte[end - start];
System.Array.Copy(buffer, start, result, 0, result.Length);
start = 0;
end = 0;
return result;
}
}
}

View File

@@ -0,0 +1,438 @@
using ICSharpCode.SharpZipLib.Encryption;
using System;
using System.IO;
using System.Security.Cryptography;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// A special stream deflating or compressing the bytes that are
/// written to it. It uses a Deflater to perform actual deflating.<br/>
/// Authors of the original java version : Tom Tromey, Jochen Hoenicke
/// </summary>
public class DeflaterOutputStream : Stream
{
#region Constructors
/// <summary>
/// Creates a new DeflaterOutputStream with a default Deflater and default buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// the output stream where deflated output should be written.
/// </param>
public DeflaterOutputStream(Stream baseOutputStream)
: this(baseOutputStream, new Deflater(), 512)
{
}
/// <summary>
/// Creates a new DeflaterOutputStream with the given Deflater and
/// default buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// the output stream where deflated output should be written.
/// </param>
/// <param name="deflater">
/// the underlying deflater.
/// </param>
public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater)
: this(baseOutputStream, deflater, 512)
{
}
/// <summary>
/// Creates a new DeflaterOutputStream with the given Deflater and
/// buffer size.
/// </summary>
/// <param name="baseOutputStream">
/// The output stream where deflated output is written.
/// </param>
/// <param name="deflater">
/// The underlying deflater to use
/// </param>
/// <param name="bufferSize">
/// The buffer size in bytes to use when deflating (minimum value 512)
/// </param>
/// <exception cref="ArgumentOutOfRangeException">
/// bufsize is less than or equal to zero.
/// </exception>
/// <exception cref="ArgumentException">
/// baseOutputStream does not support writing
/// </exception>
/// <exception cref="ArgumentNullException">
/// deflater instance is null
/// </exception>
public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater, int bufferSize)
{
if (baseOutputStream == null)
{
throw new ArgumentNullException(nameof(baseOutputStream));
}
if (baseOutputStream.CanWrite == false)
{
throw new ArgumentException("Must support writing", nameof(baseOutputStream));
}
if (bufferSize < 512)
{
throw new ArgumentOutOfRangeException(nameof(bufferSize));
}
baseOutputStream_ = baseOutputStream;
buffer_ = new byte[bufferSize];
deflater_ = deflater ?? throw new ArgumentNullException(nameof(deflater));
}
#endregion Constructors
#region Public API
/// <summary>
/// Finishes the stream by calling finish() on the deflater.
/// </summary>
/// <exception cref="SharpZipBaseException">
/// Not all input is deflated
/// </exception>
public virtual void Finish()
{
deflater_.Finish();
while (!deflater_.IsFinished)
{
int len = deflater_.Deflate(buffer_, 0, buffer_.Length);
if (len <= 0)
{
break;
}
if (cryptoTransform_ != null)
{
EncryptBlock(buffer_, 0, len);
}
baseOutputStream_.Write(buffer_, 0, len);
}
if (!deflater_.IsFinished)
{
throw new SharpZipBaseException("Can't deflate all input?");
}
baseOutputStream_.Flush();
if (cryptoTransform_ != null)
{
if (cryptoTransform_ is ZipAESTransform)
{
AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode();
}
cryptoTransform_.Dispose();
cryptoTransform_ = null;
}
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Allows client to determine if an entry can be patched after its added
/// </summary>
public bool CanPatchEntries
{
get
{
return baseOutputStream_.CanSeek;
}
}
#endregion Public API
#region Encryption
/// <summary>
/// The CryptoTransform currently being used to encrypt the compressed data.
/// </summary>
protected ICryptoTransform cryptoTransform_;
/// <summary>
/// Returns the 10 byte AUTH CODE to be appended immediately following the AES data stream.
/// </summary>
protected byte[] AESAuthCode;
/// <summary>
/// Encrypt a block of data
/// </summary>
/// <param name="buffer">
/// Data to encrypt. NOTE the original contents of the buffer are lost
/// </param>
/// <param name="offset">
/// Offset of first byte in buffer to encrypt
/// </param>
/// <param name="length">
/// Number of bytes in buffer to encrypt
/// </param>
protected void EncryptBlock(byte[] buffer, int offset, int length)
{
cryptoTransform_.TransformBlock(buffer, 0, length, buffer, 0);
}
#endregion Encryption
#region Deflation Support
/// <summary>
/// Deflates everything in the input buffers. This will call
/// <code>def.deflate()</code> until all bytes from the input buffers
/// are processed.
/// </summary>
protected void Deflate()
{
Deflate(false);
}
private void Deflate(bool flushing)
{
while (flushing || !deflater_.IsNeedingInput)
{
int deflateCount = deflater_.Deflate(buffer_, 0, buffer_.Length);
if (deflateCount <= 0)
{
break;
}
if (cryptoTransform_ != null)
{
EncryptBlock(buffer_, 0, deflateCount);
}
baseOutputStream_.Write(buffer_, 0, deflateCount);
}
if (!deflater_.IsNeedingInput)
{
throw new SharpZipBaseException("DeflaterOutputStream can't deflate all input?");
}
}
#endregion Deflation Support
#region Stream Overrides
/// <summary>
/// Gets value indicating stream can be read from
/// </summary>
public override bool CanRead
{
get
{
return false;
}
}
/// <summary>
/// Gets a value indicating if seeking is supported for this stream
/// This property always returns false
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Get value indicating if this stream supports writing
/// </summary>
public override bool CanWrite
{
get
{
return baseOutputStream_.CanWrite;
}
}
/// <summary>
/// Get current length of stream
/// </summary>
public override long Length
{
get
{
return baseOutputStream_.Length;
}
}
/// <summary>
/// Gets the current position within the stream.
/// </summary>
/// <exception cref="NotSupportedException">Any attempt to set position</exception>
public override long Position
{
get
{
return baseOutputStream_.Position;
}
set
{
throw new NotSupportedException("Position property not supported");
}
}
/// <summary>
/// Sets the current position of this stream to the given value. Not supported by this class!
/// </summary>
/// <param name="offset">The offset relative to the <paramref name="origin"/> to seek.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("DeflaterOutputStream Seek not supported");
}
/// <summary>
/// Sets the length of this stream to the given value. Not supported by this class!
/// </summary>
/// <param name="value">The new stream length.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("DeflaterOutputStream SetLength not supported");
}
/// <summary>
/// Read a byte from stream advancing position by one
/// </summary>
/// <returns>The byte read cast to an int. THe value is -1 if at the end of the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override int ReadByte()
{
throw new NotSupportedException("DeflaterOutputStream ReadByte not supported");
}
/// <summary>
/// Read a block of bytes from stream
/// </summary>
/// <param name="buffer">The buffer to store read data in.</param>
/// <param name="offset">The offset to start storing at.</param>
/// <param name="count">The maximum number of bytes to read.</param>
/// <returns>The actual number of bytes read. Zero if end of stream is detected.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("DeflaterOutputStream Read not supported");
}
/// <summary>
/// Flushes the stream by calling <see cref="Flush">Flush</see> on the deflater and then
/// on the underlying stream. This ensures that all bytes are flushed.
/// </summary>
public override void Flush()
{
deflater_.Flush();
Deflate(true);
baseOutputStream_.Flush();
}
/// <summary>
/// Calls <see cref="Finish"/> and closes the underlying
/// stream when <see cref="IsStreamOwner"></see> is true.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed_)
{
isClosed_ = true;
try
{
Finish();
if (cryptoTransform_ != null)
{
GetAuthCodeIfAES();
cryptoTransform_.Dispose();
cryptoTransform_ = null;
}
}
finally
{
if (IsStreamOwner)
{
baseOutputStream_.Dispose();
}
}
}
}
/// <summary>
/// Get the Auth code for AES encrypted entries
/// </summary>
protected void GetAuthCodeIfAES()
{
if (cryptoTransform_ is ZipAESTransform)
{
AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode();
}
}
/// <summary>
/// Writes a single byte to the compressed output stream.
/// </summary>
/// <param name="value">
/// The byte value.
/// </param>
public override void WriteByte(byte value)
{
byte[] b = new byte[1];
b[0] = value;
Write(b, 0, 1);
}
/// <summary>
/// Writes bytes from an array to the compressed stream.
/// </summary>
/// <param name="buffer">
/// The byte array
/// </param>
/// <param name="offset">
/// The offset into the byte array where to start.
/// </param>
/// <param name="count">
/// The number of bytes to write.
/// </param>
public override void Write(byte[] buffer, int offset, int count)
{
deflater_.SetInput(buffer, offset, count);
Deflate();
}
#endregion Stream Overrides
#region Instance Fields
/// <summary>
/// This buffer is used temporarily to retrieve the bytes from the
/// deflater and write them to the underlying output stream.
/// </summary>
private byte[] buffer_;
/// <summary>
/// The deflater which is used to deflate the stream.
/// </summary>
protected Deflater deflater_;
/// <summary>
/// Base stream the deflater depends on.
/// </summary>
protected Stream baseOutputStream_;
private bool isClosed_;
#endregion Instance Fields
}
}

View File

@@ -0,0 +1,713 @@
using System;
using System.IO;
using System.Security.Cryptography;
namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams
{
/// <summary>
/// An input buffer customised for use by <see cref="InflaterInputStream"/>
/// </summary>
/// <remarks>
/// The buffer supports decryption of incoming data.
/// </remarks>
public class InflaterInputBuffer
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="InflaterInputBuffer"/> with a default buffer size
/// </summary>
/// <param name="stream">The stream to buffer.</param>
public InflaterInputBuffer(Stream stream) : this(stream, 4096)
{
}
/// <summary>
/// Initialise a new instance of <see cref="InflaterInputBuffer"/>
/// </summary>
/// <param name="stream">The stream to buffer.</param>
/// <param name="bufferSize">The size to use for the buffer</param>
/// <remarks>A minimum buffer size of 1KB is permitted. Lower sizes are treated as 1KB.</remarks>
public InflaterInputBuffer(Stream stream, int bufferSize)
{
inputStream = stream;
if (bufferSize < 1024)
{
bufferSize = 1024;
}
rawData = new byte[bufferSize];
clearText = rawData;
}
#endregion Constructors
/// <summary>
/// Get the length of bytes in the <see cref="RawData"/>
/// </summary>
public int RawLength
{
get
{
return rawLength;
}
}
/// <summary>
/// Get the contents of the raw data buffer.
/// </summary>
/// <remarks>This may contain encrypted data.</remarks>
public byte[] RawData
{
get
{
return rawData;
}
}
/// <summary>
/// Get the number of useable bytes in <see cref="ClearText"/>
/// </summary>
public int ClearTextLength
{
get
{
return clearTextLength;
}
}
/// <summary>
/// Get the contents of the clear text buffer.
/// </summary>
public byte[] ClearText
{
get
{
return clearText;
}
}
/// <summary>
/// Get/set the number of bytes available
/// </summary>
public int Available
{
get { return available; }
set { available = value; }
}
/// <summary>
/// Call <see cref="Inflater.SetInput(byte[], int, int)"/> passing the current clear text buffer contents.
/// </summary>
/// <param name="inflater">The inflater to set input for.</param>
public void SetInflaterInput(Inflater inflater)
{
if (available > 0)
{
inflater.SetInput(clearText, clearTextLength - available, available);
available = 0;
}
}
/// <summary>
/// Fill the buffer from the underlying input stream.
/// </summary>
public void Fill()
{
rawLength = 0;
int toRead = rawData.Length;
while (toRead > 0 && inputStream.CanRead)
{
int count = inputStream.Read(rawData, rawLength, toRead);
if (count <= 0)
{
break;
}
rawLength += count;
toRead -= count;
}
if (cryptoTransform != null)
{
clearTextLength = cryptoTransform.TransformBlock(rawData, 0, rawLength, clearText, 0);
}
else
{
clearTextLength = rawLength;
}
available = clearTextLength;
}
/// <summary>
/// Read a buffer directly from the input stream
/// </summary>
/// <param name="buffer">The buffer to fill</param>
/// <returns>Returns the number of bytes read.</returns>
public int ReadRawBuffer(byte[] buffer)
{
return ReadRawBuffer(buffer, 0, buffer.Length);
}
/// <summary>
/// Read a buffer directly from the input stream
/// </summary>
/// <param name="outBuffer">The buffer to read into</param>
/// <param name="offset">The offset to start reading data into.</param>
/// <param name="length">The number of bytes to read.</param>
/// <returns>Returns the number of bytes read.</returns>
public int ReadRawBuffer(byte[] outBuffer, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
int currentOffset = offset;
int currentLength = length;
while (currentLength > 0)
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
return 0;
}
}
int toCopy = Math.Min(currentLength, available);
System.Array.Copy(rawData, rawLength - (int)available, outBuffer, currentOffset, toCopy);
currentOffset += toCopy;
currentLength -= toCopy;
available -= toCopy;
}
return length;
}
/// <summary>
/// Read clear text data from the input stream.
/// </summary>
/// <param name="outBuffer">The buffer to add data to.</param>
/// <param name="offset">The offset to start adding data at.</param>
/// <param name="length">The number of bytes to read.</param>
/// <returns>Returns the number of bytes actually read.</returns>
public int ReadClearTextBuffer(byte[] outBuffer, int offset, int length)
{
if (length < 0)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
int currentOffset = offset;
int currentLength = length;
while (currentLength > 0)
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
return 0;
}
}
int toCopy = Math.Min(currentLength, available);
Array.Copy(clearText, clearTextLength - (int)available, outBuffer, currentOffset, toCopy);
currentOffset += toCopy;
currentLength -= toCopy;
available -= toCopy;
}
return length;
}
/// <summary>
/// Read a <see cref="byte"/> from the input stream.
/// </summary>
/// <returns>Returns the byte read.</returns>
public byte ReadLeByte()
{
if (available <= 0)
{
Fill();
if (available <= 0)
{
throw new ZipException("EOF in header");
}
}
byte result = rawData[rawLength - available];
available -= 1;
return result;
}
/// <summary>
/// Read an <see cref="short"/> in little endian byte order.
/// </summary>
/// <returns>The short value read case to an int.</returns>
public int ReadLeShort()
{
return ReadLeByte() | (ReadLeByte() << 8);
}
/// <summary>
/// Read an <see cref="int"/> in little endian byte order.
/// </summary>
/// <returns>The int value read.</returns>
public int ReadLeInt()
{
return ReadLeShort() | (ReadLeShort() << 16);
}
/// <summary>
/// Read a <see cref="long"/> in little endian byte order.
/// </summary>
/// <returns>The long value read.</returns>
public long ReadLeLong()
{
return (uint)ReadLeInt() | ((long)ReadLeInt() << 32);
}
/// <summary>
/// Get/set the <see cref="ICryptoTransform"/> to apply to any data.
/// </summary>
/// <remarks>Set this value to null to have no transform applied.</remarks>
public ICryptoTransform CryptoTransform
{
set
{
cryptoTransform = value;
if (cryptoTransform != null)
{
if (rawData == clearText)
{
if (internalClearText == null)
{
internalClearText = new byte[rawData.Length];
}
clearText = internalClearText;
}
clearTextLength = rawLength;
if (available > 0)
{
cryptoTransform.TransformBlock(rawData, rawLength - available, available, clearText, rawLength - available);
}
}
else
{
clearText = rawData;
clearTextLength = rawLength;
}
}
}
#region Instance Fields
private int rawLength;
private byte[] rawData;
private int clearTextLength;
private byte[] clearText;
private byte[] internalClearText;
private int available;
private ICryptoTransform cryptoTransform;
private Stream inputStream;
#endregion Instance Fields
}
/// <summary>
/// This filter stream is used to decompress data compressed using the "deflate"
/// format. The "deflate" format is described in RFC 1951.
///
/// This stream may form the basis for other decompression filters, such
/// as the <see cref="ICSharpCode.SharpZipLib.GZip.GZipInputStream">GZipInputStream</see>.
///
/// Author of the original java version : John Leuner.
/// </summary>
public class InflaterInputStream : Stream
{
#region Constructors
/// <summary>
/// Create an InflaterInputStream with the default decompressor
/// and a default buffer size of 4KB.
/// </summary>
/// <param name = "baseInputStream">
/// The InputStream to read bytes from
/// </param>
public InflaterInputStream(Stream baseInputStream)
: this(baseInputStream, new Inflater(), 4096)
{
}
/// <summary>
/// Create an InflaterInputStream with the specified decompressor
/// and a default buffer size of 4KB.
/// </summary>
/// <param name = "baseInputStream">
/// The source of input data
/// </param>
/// <param name = "inf">
/// The decompressor used to decompress data read from baseInputStream
/// </param>
public InflaterInputStream(Stream baseInputStream, Inflater inf)
: this(baseInputStream, inf, 4096)
{
}
/// <summary>
/// Create an InflaterInputStream with the specified decompressor
/// and the specified buffer size.
/// </summary>
/// <param name = "baseInputStream">
/// The InputStream to read bytes from
/// </param>
/// <param name = "inflater">
/// The decompressor to use
/// </param>
/// <param name = "bufferSize">
/// Size of the buffer to use
/// </param>
public InflaterInputStream(Stream baseInputStream, Inflater inflater, int bufferSize)
{
if (baseInputStream == null)
{
throw new ArgumentNullException(nameof(baseInputStream));
}
if (inflater == null)
{
throw new ArgumentNullException(nameof(inflater));
}
if (bufferSize <= 0)
{
throw new ArgumentOutOfRangeException(nameof(bufferSize));
}
this.baseInputStream = baseInputStream;
this.inf = inflater;
inputBuffer = new InflaterInputBuffer(baseInputStream, bufferSize);
}
#endregion Constructors
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = true;
/// <summary>
/// Skip specified number of bytes of uncompressed data
/// </summary>
/// <param name ="count">
/// Number of bytes to skip
/// </param>
/// <returns>
/// The number of bytes skipped, zero if the end of
/// stream has been reached
/// </returns>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="count">The number of bytes</paramref> to skip is less than or equal to zero.
/// </exception>
public long Skip(long count)
{
if (count <= 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
// v0.80 Skip by seeking if underlying stream supports it...
if (baseInputStream.CanSeek)
{
baseInputStream.Seek(count, SeekOrigin.Current);
return count;
}
else
{
int length = 2048;
if (count < length)
{
length = (int)count;
}
byte[] tmp = new byte[length];
int readCount = 1;
long toSkip = count;
while ((toSkip > 0) && (readCount > 0))
{
if (toSkip < length)
{
length = (int)toSkip;
}
readCount = baseInputStream.Read(tmp, 0, length);
toSkip -= readCount;
}
return count - toSkip;
}
}
/// <summary>
/// Clear any cryptographic state.
/// </summary>
protected void StopDecrypting()
{
inputBuffer.CryptoTransform = null;
}
/// <summary>
/// Returns 0 once the end of the stream (EOF) has been reached.
/// Otherwise returns 1.
/// </summary>
public virtual int Available
{
get
{
return inf.IsFinished ? 0 : 1;
}
}
/// <summary>
/// Fills the buffer with more data to decompress.
/// </summary>
/// <exception cref="SharpZipBaseException">
/// Stream ends early
/// </exception>
protected void Fill()
{
// Protect against redundant calls
if (inputBuffer.Available <= 0)
{
inputBuffer.Fill();
if (inputBuffer.Available <= 0)
{
throw new SharpZipBaseException("Unexpected EOF");
}
}
inputBuffer.SetInflaterInput(inf);
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get
{
return baseInputStream.CanRead;
}
}
/// <summary>
/// Gets a value of false indicating seeking is not supported for this stream.
/// </summary>
public override bool CanSeek
{
get
{
return false;
}
}
/// <summary>
/// Gets a value of false indicating that this stream is not writeable.
/// </summary>
public override bool CanWrite
{
get
{
return false;
}
}
/// <summary>
/// A value representing the length of the stream in bytes.
/// </summary>
public override long Length
{
get
{
//return inputBuffer.RawLength;
throw new NotSupportedException("InflaterInputStream Length is not supported");
}
}
/// <summary>
/// The current position within the stream.
/// Throws a NotSupportedException when attempting to set the position
/// </summary>
/// <exception cref="NotSupportedException">Attempting to set the position</exception>
public override long Position
{
get
{
return baseInputStream.Position;
}
set
{
throw new NotSupportedException("InflaterInputStream Position not supported");
}
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
baseInputStream.Flush();
}
/// <summary>
/// Sets the position within the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="offset">The relative offset to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("Seek not supported");
}
/// <summary>
/// Set the length of the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The new length value for the stream.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("InflaterInputStream SetLength not supported");
}
/// <summary>
/// Writes a sequence of bytes to stream and advances the current position
/// This method always throws a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing data to write.</param>
/// <param name="offset">The offset of the first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("InflaterInputStream Write not supported");
}
/// <summary>
/// Writes one byte to the current stream and advances the current position
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The byte to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("InflaterInputStream WriteByte not supported");
}
/// <summary>
/// Closes the input stream. When <see cref="IsStreamOwner"></see>
/// is true the underlying stream is also closed.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
if (IsStreamOwner)
{
baseInputStream.Dispose();
}
}
}
/// <summary>
/// Reads decompressed data into the provided buffer byte array
/// </summary>
/// <param name ="buffer">
/// The array to read and decompress data into
/// </param>
/// <param name ="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name ="count">
/// The number of bytes to decompress
/// </param>
/// <returns>The number of bytes read. Zero signals the end of stream</returns>
/// <exception cref="SharpZipBaseException">
/// Inflater needs a dictionary
/// </exception>
public override int Read(byte[] buffer, int offset, int count)
{
if (inf.IsNeedingDictionary)
{
throw new SharpZipBaseException("Need a dictionary");
}
int remainingBytes = count;
while (true)
{
int bytesRead = inf.Inflate(buffer, offset, remainingBytes);
offset += bytesRead;
remainingBytes -= bytesRead;
if (remainingBytes == 0 || inf.IsFinished)
{
break;
}
if (inf.IsNeedingInput)
{
Fill();
}
else if (bytesRead == 0)
{
throw new ZipException("Invalid input data");
}
}
return count - remainingBytes;
}
#endregion Stream Overrides
#region Instance Fields
/// <summary>
/// Decompressor for this stream
/// </summary>
protected Inflater inf;
/// <summary>
/// <see cref="InflaterInputBuffer">Input buffer</see> for this stream.
/// </summary>
protected InflaterInputBuffer inputBuffer;
/// <summary>
/// Base stream the inflater reads from.
/// </summary>
private Stream baseInputStream;
/// <summary>
/// The compressed size
/// </summary>
protected long csize;
/// <summary>
/// Flag indicating whether this instance has been closed or not.
/// </summary>
private bool isClosed;
#endregion Instance Fields
}
}

Some files were not shown because too many files have changed in this diff Show More