diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 4ed271ae2..000000000 --- a/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,17 +0,0 @@ -### Steps to reproduce -1. -2. -3. - -### Expected behavior -Tell us what should happen - -### Actual behavior -Tell us what happens instead - -### Version of SharpZipLib - -### Obtained from (only keep the relevant lines) -- Compiled from source, commit: _______ -- Downloaded from GitHub -- Package installed using NuGet diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml new file mode 100644 index 000000000..a1620f07a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -0,0 +1,92 @@ +name: 🐛 Bug report +description: Create a report to help us improve +labels: ["bug"] + +body: + - type: textarea + id: description + attributes: + label: Describe the bug + description: A clear and concise description of what the bug is + validations: + required: true + + - type: input + id: reproduce-code + attributes: + description: | + If possible, the best way to display an issue is by making a reproducable code snippet available at jsfiddle. + Create a dotnet fiddle which reproduces your issue. You can use [this template](https://p1k.se/sharpziplib-repro) or [create a new one](https://dotnetfiddle.net/). + placeholder: https://dotnetfiddle.net/r39r0c0d3 + label: Reproduction Code + + - type: textarea + id: reproduce-steps + attributes: + label: Steps to reproduce + description: Steps to reproduce the behavior + placeholder: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + + - id: expected + type: textarea + attributes: + label: Expected behavior + description: A clear and concise description of what you expected to happen. + validations: + required: true + + - id: operating-system + type: dropdown + attributes: + label: Operating System + multiple: true + options: + - Windows + - macOS + - Linux + validations: + required: false + + - id: framework + type: dropdown + attributes: + label: Framework Version + multiple: true + options: + - .NET 7 + - .NET 6 + - .NET 5 + - .NET Core v3 and earlier + - .NET Framework 4.x + - Unity + - Other + validations: + required: false + + - id: tags + type: dropdown + attributes: + label: Tags + description: What areas are your issue related to? + multiple: true + options: + - ZIP + - GZip + - Tar + - BZip2 + - Encoding + - Encryption + - Documentation + - Async + - Performance + + - type: textarea + attributes: + label: Additional context + description: Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..5a0d4a50e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,8 @@ +blank_issues_enabled: false +contact_links: + - name: Ask a question + url: https://github.com/icsharpcode/SharpZipLib/discussions/new?category=q-a + about: Post any questions in QA discussions instead of creating an issue + - name: Discuss + url: https://github.com/icsharpcode/SharpZipLib/discussions/new + about: Discuss with other community members diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..5683f0e84 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,52 @@ +name: 💡 Feature request +description: Have a new idea/feature ? Please suggest! +labels: ["enhancement"] +body: + - type: textarea + id: description + attributes: + label: Is your feature request related to a problem? Please describe. + description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + validations: + required: true + + - type: textarea + id: solution + attributes: + label: Describe the solution you'd like + description: A clear and concise description of what you want to happen. + validations: + required: true + + - type: textarea + id: alternatives + attributes: + label: Describe alternatives you've considered + description: A clear and concise description of any alternative solutions or features you've considered. + validations: + required: true + + - id: tags + type: dropdown + attributes: + label: Tags + description: What areas are your feature request related to? + multiple: true + options: + - ZIP + - GZip + - Tar + - BZip2 + - Encoding + - Encryption + - Documentation + - Async + - Performance + + - type: textarea + id: extrainfo + attributes: + label: Additional context + description: Add any other context or screenshots about the feature request here. + validations: + required: false diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index b6b0eeb2a..a77a0e278 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -14,27 +14,27 @@ on: jobs: Build: - runs-on: ${{ matrix.os }}-latest + runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: - os: [ubuntu, windows, macos] - target: [netstandard2.0, netstandard2.1] - include: - - os: windows - target: net45 + os: [ubuntu-latest, windows-latest, macos-latest] + target: [netstandard2.0, netstandard2.1, net6.0] env: LIB_PROJ: src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: ref: ${{ github.events.inputs.tag }} fetch-depth: 0 - - name: Setup .NET Core + - name: Setup .NET uses: actions/setup-dotnet@v1 with: - dotnet-version: '3.1.x' + dotnet-version: '6.0.x' + + - name: Show .NET info + run: dotnet --info - name: Build library (Debug) run: dotnet build -c debug -f ${{ matrix.target }} ${{ env.LIB_PROJ }} @@ -49,17 +49,17 @@ jobs: matrix: # Windows testing is combined with code coverage os: [ubuntu, macos] - target: [netcoreapp3.1] + target: [net6.0] steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - name: Setup .NET Core - if: matrix.target == 'netcoreapp3.1' + if: matrix.target == 'net6.0' uses: actions/setup-dotnet@v1 with: - dotnet-version: '3.1.x' + dotnet-version: '6.0.x' - name: Restore test dependencies run: dotnet restore @@ -73,20 +73,20 @@ jobs: CodeCov: name: Code Coverage - runs-on: windows-latest + runs-on: windows-2019 env: DOTCOVER_VER: 2021.1.2 DOTCOVER_PKG: jetbrains.dotcover.commandlinetools COVER_SNAPSHOT: SharpZipLib.dcvr steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: fetch-depth: 0 - name: Setup .NET uses: actions/setup-dotnet@v1 with: - dotnet-version: '3.1.x' + dotnet-version: '6.0.x' # NOTE: This is the temporary fix for https://github.com/actions/virtual-environments/issues/1090 - name: Cleanup before restore @@ -132,14 +132,14 @@ jobs: - name: Setup .NET Core uses: actions/setup-dotnet@v1 with: - dotnet-version: '5.0.x' + dotnet-version: '6.0.x' - name: Build library for .NET Standard 2.0 run: dotnet build -c Release -f netstandard2.0 ${{ env.PKG_PROPS }} ${{ env.PKG_PROJ }} - name: Build library for .NET Standard 2.1 run: dotnet build -c Release -f netstandard2.1 ${{ env.PKG_PROPS }} ${{ env.PKG_PROJ }} - - name: Build library for .NET Framework 4.5 - run: dotnet build -c Release -f net45 ${{ env.PKG_PROPS }} ${{ env.PKG_PROJ }} + - name: Build library for .NET 6.0 + run: dotnet build -c Release -f net6.0 ${{ env.PKG_PROPS }} ${{ env.PKG_PROJ }} - name: Add PR suffix to package if: ${{ github.event_name == 'pull_request' }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 549469d55..54ce5b08b 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,11 +39,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v1 + uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -54,7 +54,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v1 + uses: github/codeql-action/autobuild@v2 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -68,4 +68,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v1 + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/issue.yml b/.github/workflows/issue.yml new file mode 100644 index 000000000..8f0fe77d8 --- /dev/null +++ b/.github/workflows/issue.yml @@ -0,0 +1,30 @@ +name: Apply labels from issue + +on: + issues: + types: [opened, edited] + +jobs: + Process_Issue: + runs-on: ubuntu-latest + steps: + - name: Parse Issue Forms Body + id: parse + uses: zentered/issue-forms-body-parser@v1.4.3 + - name: Apply labels from tags + uses: actions/github-script@v6 + env: + PARSED_DATA: "${{ steps.parse.outputs.data }}" + with: + script: | + const parsed = JSON.parse(process.env["PARSED_DATA"]); + const tags = parsed.tags.text; + console.log('Parsed tags:', tags); + const labels = tags.split(',').map( t => t.trim().toLowerCase() ); + console.log('Applying labels:', labels); + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels, + }) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 60d98ba9f..bb6f67445 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,28 +12,52 @@ on: required: true jobs: - docfx: - runs-on: ubuntu-latest - name: Update DocFX documentation + build: + runs-on: windows-latest + name: Generate DocFX documentation steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 with: ref: ${{ github.events.inputs.tag }} - - uses: nikeee/docfx-action@v1.0.0 - name: Build Documentation + + - name: Setup .NET + uses: actions/setup-dotnet@v1 with: - args: docs/help/docfx.json + dotnet-version: '6.0.x' + + - name: Build project + run: dotnet build -f netstandard2.0 src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj - - uses: JamesIves/github-pages-deploy-action@3.6.2 - name: Publish documentation to Github Pages - with: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - BRANCH: gh-pages - FOLDER: docs/help/_site - TARGET_FOLDER: help - CLEAN: false + - name: Install docfx + run: choco install docfx + + - name: Build Documentation + run: docfx docs/help/docfx.json --warningsAsErrors - name: Upload documentation as artifact uses: actions/upload-artifact@v2 with: + name: site path: docs/help/_site + + deploy: + needs: [build] # The second job must depend on the first one to complete before running and uses ubuntu-latest instead of windows. + runs-on: ubuntu-latest + name: Update github pages docs + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Download Artifacts # The built project is downloaded into the 'site' folder. + uses: actions/download-artifact@v1 + with: + name: site + + - name: Publish documentation to Github Pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + branch: gh-pages + folder: site + target-folder: help + clean: false diff --git a/.globalconfig b/.globalconfig new file mode 100644 index 000000000..dbb1ed562 --- /dev/null +++ b/.globalconfig @@ -0,0 +1,3 @@ +is_global = true +global_level = 1 +dotnet_diagnostic.CA2007.severity = warning diff --git a/ICSharpCode.SharpZipLib.sln b/ICSharpCode.SharpZipLib.sln index cab9675b5..0c4c6c5f4 100644 --- a/ICSharpCode.SharpZipLib.sln +++ b/ICSharpCode.SharpZipLib.sln @@ -15,8 +15,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib", " EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib.Tests", "test\ICSharpCode.SharpZipLib.Tests\ICSharpCode.SharpZipLib.Tests.csproj", "{82211166-9C45-4603-8E3A-2CA2EFFCBC26}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib.TestBootstrapper", "test\ICSharpCode.SharpZipLib.TestBootstrapper\ICSharpCode.SharpZipLib.TestBootstrapper.csproj", "{535D7365-C5B1-4253-9233-D72D972CA851}" -EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ICSharpCode.SharpZipLib.Benchmark", "benchmark\ICSharpCode.SharpZipLib.Benchmark\ICSharpCode.SharpZipLib.Benchmark.csproj", "{C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}" EndProject Global @@ -33,10 +31,6 @@ Global {82211166-9C45-4603-8E3A-2CA2EFFCBC26}.Debug|Any CPU.Build.0 = Debug|Any CPU {82211166-9C45-4603-8E3A-2CA2EFFCBC26}.Release|Any CPU.ActiveCfg = Release|Any CPU {82211166-9C45-4603-8E3A-2CA2EFFCBC26}.Release|Any CPU.Build.0 = Release|Any CPU - {535D7365-C5B1-4253-9233-D72D972CA851}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {535D7365-C5B1-4253-9233-D72D972CA851}.Debug|Any CPU.Build.0 = Debug|Any CPU - {535D7365-C5B1-4253-9233-D72D972CA851}.Release|Any CPU.ActiveCfg = Release|Any CPU - {535D7365-C5B1-4253-9233-D72D972CA851}.Release|Any CPU.Build.0 = Release|Any CPU {C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Debug|Any CPU.Build.0 = Debug|Any CPU {C51E638B-DDD0-48B6-A6BD-EBC4E6A104C7}.Release|Any CPU.ActiveCfg = Release|Any CPU diff --git a/README.md b/README.md index a27570f45..5f09f1f01 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# SharpZipLib [![Build Status](https://github.com/icsharpcode/SharpZipLib/actions/workflows/build-test.yml/badge.svg?branch=master)](https://github.com/icsharpcode/SharpZipLib/actions/workflows/build-test.yml) [![NuGet Version](https://img.shields.io/nuget/v/SharpZipLib.svg)](https://www.nuget.org/packages/SharpZipLib/) +# SharpZipLib [![Build Status](https://github.com/icsharpcode/SharpZipLib/actions/workflows/build-test.yml/badge.svg?branch=master)](https://github.com/icsharpcode/SharpZipLib/actions/workflows/build-test.yml) [![NuGet Version](https://img.shields.io/nuget/v/SharpZipLib.svg)](https://www.nuget.org/packages/SharpZipLib/) [![openupm](https://img.shields.io/npm/v/org.icsharpcode.sharpziplib?label=openupm®istry_uri=https://package.openupm.com)](https://openupm.com/packages/org.icsharpcode.sharpziplib/) Introduction ------------ diff --git a/benchmark/.globalconfig b/benchmark/.globalconfig new file mode 100644 index 000000000..14f57bc66 --- /dev/null +++ b/benchmark/.globalconfig @@ -0,0 +1,3 @@ +is_global = true +global_level = 2 +dotnet_diagnostic.CA2007.severity = none diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/ICSharpCode.SharpZipLib.Benchmark.csproj b/benchmark/ICSharpCode.SharpZipLib.Benchmark/ICSharpCode.SharpZipLib.Benchmark.csproj index 81a8ad598..7fa26f80f 100644 --- a/benchmark/ICSharpCode.SharpZipLib.Benchmark/ICSharpCode.SharpZipLib.Benchmark.csproj +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/ICSharpCode.SharpZipLib.Benchmark.csproj @@ -1,18 +1,16 @@  - - Exe - netcoreapp2.1;netcoreapp3.1;net461 - + + Exe + net462;net6.0 + - - 0.12.1 - + - + diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Program.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Program.cs index 9c79e6551..3a7beebbb 100644 --- a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Program.cs +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Program.cs @@ -1,6 +1,4 @@ -using System; -using BenchmarkDotNet; -using BenchmarkDotNet.Configs; +using BenchmarkDotNet.Configs; using BenchmarkDotNet.Jobs; using BenchmarkDotNet.Running; using BenchmarkDotNet.Toolchains.CsProj; @@ -11,9 +9,8 @@ public class MultipleRuntimes : ManualConfig { public MultipleRuntimes() { - AddJob(Job.Default.WithToolchain(CsProjClassicNetToolchain.Net461).AsBaseline()); // NET 4.6.1 - AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.NetCoreApp21)); // .NET Core 2.1 - AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.NetCoreApp31)); // .NET Core 3.1 + AddJob(Job.Default.WithToolchain(CsProjClassicNetToolchain.Net462).AsBaseline()); // NET 4.6.2 + AddJob(Job.Default.WithToolchain(CsProjCoreToolchain.NetCoreApp60)); // .NET 6.0 } } diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarInputStream.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarInputStream.cs new file mode 100644 index 000000000..b59a217ab --- /dev/null +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarInputStream.cs @@ -0,0 +1,82 @@ +using System; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using BenchmarkDotNet.Attributes; +using ICSharpCode.SharpZipLib.Tar; + +namespace ICSharpCode.SharpZipLib.Benchmark.Tar +{ + [MemoryDiagnoser] + [Config(typeof(MultipleRuntimes))] + public class TarInputStream + { + private readonly byte[] archivedData; + private readonly byte[] readBuffer = new byte[1024]; + + public TarInputStream() + { + using (var outputMemoryStream = new MemoryStream()) + { + using (var zipOutputStream = + new ICSharpCode.SharpZipLib.Tar.TarOutputStream(outputMemoryStream, Encoding.UTF8)) + { + var tarEntry = TarEntry.CreateTarEntry("some file"); + tarEntry.Size = 1024 * 1024; + zipOutputStream.PutNextEntry(tarEntry); + + var rng = RandomNumberGenerator.Create(); + var inputBuffer = new byte[1024]; + rng.GetBytes(inputBuffer); + + for (int i = 0; i < 1024; i++) + { + zipOutputStream.Write(inputBuffer, 0, inputBuffer.Length); + } + } + + archivedData = outputMemoryStream.ToArray(); + } + } + + [Benchmark] + public long ReadTarInputStream() + { + using (var memoryStream = new MemoryStream(archivedData)) + using (var zipInputStream = new ICSharpCode.SharpZipLib.Tar.TarInputStream(memoryStream, Encoding.UTF8)) + { + var entry = zipInputStream.GetNextEntry(); + + while (zipInputStream.Read(readBuffer, 0, readBuffer.Length) > 0) + { + } + + return entry.Size; + } + } + + [Benchmark] + public async Task ReadTarInputStreamAsync() + { + using (var memoryStream = new MemoryStream(archivedData)) + using (var zipInputStream = new ICSharpCode.SharpZipLib.Tar.TarInputStream(memoryStream, Encoding.UTF8)) + { + var entry = await zipInputStream.GetNextEntryAsync(CancellationToken.None); + +#if NETCOREAPP2_1_OR_GREATER + while (await zipInputStream.ReadAsync(readBuffer.AsMemory()) > 0) + { + } +#else + while (await zipInputStream.ReadAsync(readBuffer, 0, readBuffer.Length) > 0) + { + } +#endif + + return entry.Size; + } + } + } +} diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarOutputStream.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarOutputStream.cs new file mode 100644 index 000000000..f24e83e35 --- /dev/null +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Tar/TarOutputStream.cs @@ -0,0 +1,64 @@ +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using BenchmarkDotNet.Attributes; +using ICSharpCode.SharpZipLib.Tar; + +namespace ICSharpCode.SharpZipLib.Benchmark.Tar +{ + [MemoryDiagnoser] + [Config(typeof(MultipleRuntimes))] + public class TarOutputStream + { + private readonly byte[] backingArray = new byte[1024 * 1024 + (6 * 1024)]; + private readonly byte[] inputBuffer = new byte[1024]; + private static readonly RandomNumberGenerator _rng = RandomNumberGenerator.Create(); + + [Benchmark] + public void WriteTarOutputStream() + { + using (var outputMemoryStream = new MemoryStream(backingArray)) + { + using (var tarOutputStream = + new ICSharpCode.SharpZipLib.Tar.TarOutputStream(outputMemoryStream, Encoding.UTF8)) + { + var tarEntry = TarEntry.CreateTarEntry("some file"); + tarEntry.Size = 1024 * 1024; + tarOutputStream.PutNextEntry(tarEntry); + + _rng.GetBytes(inputBuffer); + + for (int i = 0; i < 1024; i++) + { + tarOutputStream.Write(inputBuffer, 0, inputBuffer.Length); + } + } + } + } + + [Benchmark] + public async Task WriteTarOutputStreamAsync() + { + using (var outputMemoryStream = new MemoryStream(backingArray)) + { + using (var tarOutputStream = + new ICSharpCode.SharpZipLib.Tar.TarOutputStream(outputMemoryStream, Encoding.UTF8)) + { + var tarEntry = TarEntry.CreateTarEntry("some file"); + tarEntry.Size = 1024 * 1024; + + await tarOutputStream.PutNextEntryAsync(tarEntry, CancellationToken.None); + + _rng.GetBytes(inputBuffer); + + for (int i = 0; i < 1024; i++) + { + await tarOutputStream.WriteAsync(inputBuffer, 0, inputBuffer.Length); + } + } + } + } + } +} diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipFile.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipFile.cs new file mode 100644 index 000000000..0a84e0b88 --- /dev/null +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipFile.cs @@ -0,0 +1,63 @@ +using System; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; +using BenchmarkDotNet.Attributes; +using ICSharpCode.SharpZipLib.Zip; + +namespace ICSharpCode.SharpZipLib.Benchmark.Zip +{ + [MemoryDiagnoser] + [Config(typeof(MultipleRuntimes))] + public class ZipFile + { + private readonly byte[] readBuffer = new byte[4096]; + private string zipFileWithLargeAmountOfEntriesPath; + + [GlobalSetup] + public async Task GlobalSetup() + { + SharpZipLibOptions.InflaterPoolSize = 4; + + // large real-world test file from test262 repository + string commitSha = "2e4e0e6b8ebe3348a207144204cb6d7a5571c863"; + zipFileWithLargeAmountOfEntriesPath = Path.Combine(Path.GetTempPath(), $"{commitSha}.zip"); + if (!File.Exists(zipFileWithLargeAmountOfEntriesPath)) + { + var uri = $"https://github.com/tc39/test262/archive/{commitSha}.zip"; + + Console.WriteLine("Loading test262 repository archive from {0}", uri); + + using (var client = new HttpClient()) + { + using (var downloadStream = await client.GetStreamAsync(uri)) + { + using (var writeStream = File.OpenWrite(zipFileWithLargeAmountOfEntriesPath)) + { + await downloadStream.CopyToAsync(writeStream); + Console.WriteLine("File downloaded and saved to {0}", zipFileWithLargeAmountOfEntriesPath); + } + } + } + } + + } + + [Benchmark] + public void ReadLargeZipFile() + { + using (var file = new SharpZipLib.Zip.ZipFile(zipFileWithLargeAmountOfEntriesPath)) + { + foreach (ZipEntry entry in file) + { + using (var stream = file.GetInputStream(entry)) + { + while (stream.Read(readBuffer, 0, readBuffer.Length) > 0) + { + } + } + } + } + } + } +} diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipInputStream.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipInputStream.cs index eb099ebfd..2e0c057d8 100644 --- a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipInputStream.cs +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipInputStream.cs @@ -1,5 +1,4 @@ -using System; -using System.IO; +using System.IO; using BenchmarkDotNet.Attributes; namespace ICSharpCode.SharpZipLib.Benchmark.Zip @@ -15,7 +14,8 @@ public class ZipInputStream byte[] zippedData; byte[] readBuffer = new byte[4096]; - public ZipInputStream() + [GlobalSetup] + public void GlobalSetup() { using (var memoryStream = new MemoryStream()) { diff --git a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipOutputStream.cs b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipOutputStream.cs index ed125c1c7..c4e8620e3 100644 --- a/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipOutputStream.cs +++ b/benchmark/ICSharpCode.SharpZipLib.Benchmark/Zip/ZipOutputStream.cs @@ -1,5 +1,4 @@ -using System; -using System.IO; +using System.IO; using System.Threading.Tasks; using BenchmarkDotNet.Attributes; @@ -16,7 +15,8 @@ public class ZipOutputStream byte[] outputBuffer; byte[] inputBuffer; - public ZipOutputStream() + [GlobalSetup] + public void GlobalSetup() { inputBuffer = new byte[ChunkSize]; outputBuffer = new byte[N]; diff --git a/docs/help/docfx.json b/docs/help/docfx.json index b2123cfa2..1da3079e4 100644 --- a/docs/help/docfx.json +++ b/docs/help/docfx.json @@ -16,7 +16,7 @@ ], "dest": "api", "properties": { - "TargetFramework": "NETSTANDARD2" + "TargetFramework": "netstandard2.0" } } ], @@ -65,7 +65,7 @@ ], "globalMetadata": { "_appTitle": "SharpZipLib Help", - "_appFooter": "Copyright © 2000-2019 SharpZipLib Contributors", + "_appFooter": "Copyright © 2000-2022 SharpZipLib Contributors", "_gitContribute": { "repo": "https://github.com/icsharpcode/SharpZipLib", "branch": "master" diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_BZip2/Cmd_BZip2.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_BZip2/Cmd_BZip2.csproj index 07039ab9d..b1536ff0f 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_BZip2/Cmd_BZip2.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_BZip2/Cmd_BZip2.csproj @@ -101,7 +101,7 @@ copy Cmd_BZip2.exe bunzip2.exe - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Checksum/Cmd_Checksum.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Checksum/Cmd_Checksum.csproj index 1509d6080..d5d9f6cfe 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Checksum/Cmd_Checksum.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Checksum/Cmd_Checksum.csproj @@ -100,7 +100,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_GZip/Cmd_GZip.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_GZip/Cmd_GZip.csproj index d5e021825..9ebaa8ca6 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_GZip/Cmd_GZip.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_GZip/Cmd_GZip.csproj @@ -100,7 +100,7 @@ copy Cmd_GZip.exe gunzip.exe - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Tar/Cmd_Tar.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Tar/Cmd_Tar.csproj index 4f6bb416a..d40eef52e 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Tar/Cmd_Tar.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_Tar/Cmd_Tar.csproj @@ -91,7 +91,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_ZipInfo/Cmd_ZipInfo.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_ZipInfo/Cmd_ZipInfo.csproj index 0e3d31240..311fcb85d 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_ZipInfo/Cmd_ZipInfo.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/Cmd_ZipInfo/Cmd_ZipInfo.csproj @@ -99,7 +99,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/CreateZipFile/CreateZipFile.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/CreateZipFile/CreateZipFile.csproj index 61dcf0166..efd2cd464 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/CreateZipFile/CreateZipFile.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/CreateZipFile/CreateZipFile.csproj @@ -107,7 +107,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/FastZip/FastZip.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/FastZip/FastZip.csproj index 6a948c6b5..efacf9ff8 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/FastZip/FastZip.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/FastZip/FastZip.csproj @@ -90,7 +90,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/sz/sz.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/sz/sz.csproj index 02a096db6..121d55859 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/sz/sz.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/sz/sz.csproj @@ -85,7 +85,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/unzipfile/unzipfile.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/unzipfile/unzipfile.csproj index fa3f6b8fc..43403d1e4 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/unzipfile/unzipfile.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/unzipfile/unzipfile.csproj @@ -61,7 +61,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/viewzipfile/viewzipfile.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/viewzipfile/viewzipfile.csproj index 0b10efd15..4734de832 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/viewzipfile/viewzipfile.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/viewzipfile/viewzipfile.csproj @@ -61,7 +61,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/cs/zf/zf.csproj b/samples/ICSharpCode.SharpZipLib.Samples/cs/zf/zf.csproj index e8d8b757f..1dbf75744 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/cs/zf/zf.csproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/cs/zf/zf.csproj @@ -85,7 +85,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/vb/CreateZipFile/CreateZipFile.vbproj b/samples/ICSharpCode.SharpZipLib.Samples/vb/CreateZipFile/CreateZipFile.vbproj index 2057acd9f..42db45963 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/vb/CreateZipFile/CreateZipFile.vbproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/vb/CreateZipFile/CreateZipFile.vbproj @@ -95,7 +95,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/vb/WpfCreateZipFile/WpfCreateZipFile.vbproj b/samples/ICSharpCode.SharpZipLib.Samples/vb/WpfCreateZipFile/WpfCreateZipFile.vbproj index e6ccebddc..d86ec9e00 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/vb/WpfCreateZipFile/WpfCreateZipFile.vbproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/vb/WpfCreateZipFile/WpfCreateZipFile.vbproj @@ -171,7 +171,7 @@ - 1.3.1 + 1.3.3 1.0.2 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/vb/minibzip2/minibzip2.vbproj b/samples/ICSharpCode.SharpZipLib.Samples/vb/minibzip2/minibzip2.vbproj index 618adb8ad..e15a05ec6 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/vb/minibzip2/minibzip2.vbproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/vb/minibzip2/minibzip2.vbproj @@ -112,7 +112,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/vb/viewzipfile/viewzipfile.vbproj b/samples/ICSharpCode.SharpZipLib.Samples/vb/viewzipfile/viewzipfile.vbproj index 429d77bfc..c85b1082e 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/vb/viewzipfile/viewzipfile.vbproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/vb/viewzipfile/viewzipfile.vbproj @@ -90,7 +90,7 @@ - 1.3.1 + 1.3.3 diff --git a/samples/ICSharpCode.SharpZipLib.Samples/vb/zipfiletest/zipfiletest.vbproj b/samples/ICSharpCode.SharpZipLib.Samples/vb/zipfiletest/zipfiletest.vbproj index ddde8318d..b8b02293e 100644 --- a/samples/ICSharpCode.SharpZipLib.Samples/vb/zipfiletest/zipfiletest.vbproj +++ b/samples/ICSharpCode.SharpZipLib.Samples/vb/zipfiletest/zipfiletest.vbproj @@ -100,7 +100,7 @@ - 1.3.1 + 1.3.3 diff --git a/src/ICSharpCode.SharpZipLib/AssemblyInfo.cs b/src/ICSharpCode.SharpZipLib/AssemblyInfo.cs new file mode 100644 index 000000000..8f8e62016 --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/AssemblyInfo.cs @@ -0,0 +1,3 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("ICSharpCode.SharpZipLib.Tests, PublicKey=0024000004800000940000000602000000240000525341310004000001000100b9a14ea8fc9d7599e0e82a1292a23103f0210e2f928a0f466963af23fffadba59dcc8c9e26ecd114d7c0b4179e4bc93b1656b7ee2d4a67dd7c1992653e0d9cc534f7914b6f583b022e0a7aa8a430f407932f9a6806f0fc64d61e78d5ae01aa8f8233196719d44da2c50a2d1cfa3f7abb7487b3567a4f0456aa6667154c6749b1")] diff --git a/src/ICSharpCode.SharpZipLib/Core/ByteOrderUtils.cs b/src/ICSharpCode.SharpZipLib/Core/ByteOrderUtils.cs new file mode 100644 index 000000000..14b096207 --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Core/ByteOrderUtils.cs @@ -0,0 +1,130 @@ +using System.IO; +using System.Runtime.CompilerServices; +using System.Threading.Tasks; +using CT = System.Threading.CancellationToken; + +// ReSharper disable MemberCanBePrivate.Global +// ReSharper disable InconsistentNaming + +namespace ICSharpCode.SharpZipLib.Core +{ + internal static class ByteOrderStreamExtensions + { + internal static byte[] SwappedBytes(ushort value) => new[] {(byte)value, (byte)(value >> 8)}; + internal static byte[] SwappedBytes(short value) => new[] {(byte)value, (byte)(value >> 8)}; + internal static byte[] SwappedBytes(uint value) => new[] {(byte)value, (byte)(value >> 8), (byte)(value >> 16), (byte)(value >> 24)}; + internal static byte[] SwappedBytes(int value) => new[] {(byte)value, (byte)(value >> 8), (byte)(value >> 16), (byte)(value >> 24)}; + + internal static byte[] SwappedBytes(long value) => new[] { + (byte)value, (byte)(value >> 8), (byte)(value >> 16), (byte)(value >> 24), + (byte)(value >> 32), (byte)(value >> 40), (byte)(value >> 48), (byte)(value >> 56) + }; + + internal static byte[] SwappedBytes(ulong value) => new[] { + (byte)value, (byte)(value >> 8), (byte)(value >> 16), (byte)(value >> 24), + (byte)(value >> 32), (byte)(value >> 40), (byte)(value >> 48), (byte)(value >> 56) + }; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static long SwappedS64(byte[] bytes) => ( + (long)bytes[0] << 0 | (long)bytes[1] << 8 | (long)bytes[2] << 16 | (long)bytes[3] << 24 | + (long)bytes[4] << 32 | (long)bytes[5] << 40 | (long)bytes[6] << 48 | (long)bytes[7] << 56); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static ulong SwappedU64(byte[] bytes) => ( + (ulong)bytes[0] << 0 | (ulong)bytes[1] << 8 | (ulong)bytes[2] << 16 | (ulong)bytes[3] << 24 | + (ulong)bytes[4] << 32 | (ulong)bytes[5] << 40 | (ulong)bytes[6] << 48 | (ulong)bytes[7] << 56); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static int SwappedS32(byte[] bytes) => bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static uint SwappedU32(byte[] bytes) => (uint) SwappedS32(bytes); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static short SwappedS16(byte[] bytes) => (short)(bytes[0] | bytes[1] << 8); + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static ushort SwappedU16(byte[] bytes) => (ushort) SwappedS16(bytes); + + internal static byte[] ReadBytes(this Stream stream, int count) + { + var bytes = new byte[count]; + var remaining = count; + while (remaining > 0) + { + var bytesRead = stream.Read(bytes, count - remaining, remaining); + if (bytesRead < 1) throw new EndOfStreamException(); + remaining -= bytesRead; + } + + return bytes; + } + + /// Read an unsigned short in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int ReadLEShort(this Stream stream) => SwappedS16(ReadBytes(stream, 2)); + + /// Read an int in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static int ReadLEInt(this Stream stream) => SwappedS32(ReadBytes(stream, 4)); + + /// Read a long in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static long ReadLELong(this Stream stream) => SwappedS64(ReadBytes(stream, 8)); + + /// Write an unsigned short in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLEShort(this Stream stream, int value) => stream.Write(SwappedBytes(value), 0, 2); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLEShortAsync(this Stream stream, int value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 2, ct).ConfigureAwait(false); + + /// Write a ushort in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLEUshort(this Stream stream, ushort value) => stream.Write(SwappedBytes(value), 0, 2); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLEUshortAsync(this Stream stream, ushort value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 2, ct).ConfigureAwait(false); + + /// Write an int in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLEInt(this Stream stream, int value) => stream.Write(SwappedBytes(value), 0, 4); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLEIntAsync(this Stream stream, int value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 4, ct).ConfigureAwait(false); + + /// Write a uint in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLEUint(this Stream stream, uint value) => stream.Write(SwappedBytes(value), 0, 4); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLEUintAsync(this Stream stream, uint value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 4, ct).ConfigureAwait(false); + + /// Write a long in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLELong(this Stream stream, long value) => stream.Write(SwappedBytes(value), 0, 8); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLELongAsync(this Stream stream, long value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 8, ct).ConfigureAwait(false); + + /// Write a ulong in little endian byte order. + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void WriteLEUlong(this Stream stream, ulong value) => stream.Write(SwappedBytes(value), 0, 8); + + /// + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static async Task WriteLEUlongAsync(this Stream stream, ulong value, CT ct) + => await stream.WriteAsync(SwappedBytes(value), 0, 8, ct).ConfigureAwait(false); + } +} diff --git a/src/ICSharpCode.SharpZipLib/Core/ExactMemoryPool.cs b/src/ICSharpCode.SharpZipLib/Core/ExactMemoryPool.cs new file mode 100644 index 000000000..d03ca2ecf --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Core/ExactMemoryPool.cs @@ -0,0 +1,71 @@ +using System; +using System.Buffers; + +namespace ICSharpCode.SharpZipLib.Core +{ + /// + /// A MemoryPool that will return a Memory which is exactly the length asked for using the bufferSize parameter. + /// This is in contrast to the default ArrayMemoryPool which will return a Memory of equal size to the underlying + /// array which at least as long as the minBufferSize parameter. + /// Note: The underlying array may be larger than the slice of Memory + /// + /// + internal sealed class ExactMemoryPool : MemoryPool + { + public new static readonly MemoryPool Shared = new ExactMemoryPool(); + + public override IMemoryOwner Rent(int bufferSize = -1) + { + if ((uint)bufferSize > int.MaxValue || bufferSize < 0) + { + throw new ArgumentOutOfRangeException(nameof(bufferSize)); + } + + return new ExactMemoryPoolBuffer(bufferSize); + } + + protected override void Dispose(bool disposing) + { + } + + public override int MaxBufferSize => int.MaxValue; + + private sealed class ExactMemoryPoolBuffer : IMemoryOwner, IDisposable + { + private T[] array; + private readonly int size; + + public ExactMemoryPoolBuffer(int size) + { + this.size = size; + this.array = ArrayPool.Shared.Rent(size); + } + + public Memory Memory + { + get + { + T[] array = this.array; + if (array == null) + { + throw new ObjectDisposedException(nameof(ExactMemoryPoolBuffer)); + } + + return new Memory(array).Slice(0, size); + } + } + + public void Dispose() + { + T[] array = this.array; + if (array == null) + { + return; + } + + this.array = null; + ArrayPool.Shared.Return(array); + } + } + } +} diff --git a/src/ICSharpCode.SharpZipLib/Core/InflaterPool.cs b/src/ICSharpCode.SharpZipLib/Core/InflaterPool.cs new file mode 100644 index 000000000..39db32e8c --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Core/InflaterPool.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Concurrent; +using ICSharpCode.SharpZipLib.Zip.Compression; + +namespace ICSharpCode.SharpZipLib.Core +{ + /// + /// Pool for instances as they can be costly due to byte array allocations. + /// + internal sealed class InflaterPool + { + private readonly ConcurrentQueue noHeaderPool = new ConcurrentQueue(); + private readonly ConcurrentQueue headerPool = new ConcurrentQueue(); + + internal static InflaterPool Instance { get; } = new InflaterPool(); + + private InflaterPool() + { + } + + internal Inflater Rent(bool noHeader = false) + { + if (SharpZipLibOptions.InflaterPoolSize <= 0) + { + return new Inflater(noHeader); + } + + var pool = GetPool(noHeader); + + PooledInflater inf; + if (pool.TryDequeue(out var inflater)) + { + inf = inflater; + inf.Reset(); + } + else + { + inf = new PooledInflater(noHeader); + } + + return inf; + } + + internal void Return(Inflater inflater) + { + if (SharpZipLibOptions.InflaterPoolSize <= 0) + { + return; + } + + if (!(inflater is PooledInflater pooledInflater)) + { + throw new ArgumentException("Returned inflater was not a pooled one"); + } + + var pool = GetPool(inflater.noHeader); + if (pool.Count < SharpZipLibOptions.InflaterPoolSize) + { + pooledInflater.Reset(); + pool.Enqueue(pooledInflater); + } + } + + private ConcurrentQueue GetPool(bool noHeader) => noHeader ? noHeaderPool : headerPool; + } +} diff --git a/src/ICSharpCode.SharpZipLib/Core/PathUtils.cs b/src/ICSharpCode.SharpZipLib/Core/PathUtils.cs index b8d0dd409..52f01d079 100644 --- a/src/ICSharpCode.SharpZipLib/Core/PathUtils.cs +++ b/src/ICSharpCode.SharpZipLib/Core/PathUtils.cs @@ -16,6 +16,9 @@ public static class PathUtils /// The path with the root removed if it was present; path otherwise. public static string DropPathRoot(string path) { + // No need to drop anything + if (path == string.Empty) return path; + var invalidChars = Path.GetInvalidPathChars(); // If the first character after the root is a ':', .NET < 4.6.2 throws var cleanRootSep = path.Length >= 3 && path[1] == ':' && path[2] == ':'; @@ -26,7 +29,7 @@ public static string DropPathRoot(string path) var cleanPath = new string(path.Take(258) .Select( (c, i) => invalidChars.Contains(c) || (i == 2 && cleanRootSep) ? '_' : c).ToArray()); - var stripLength = Path.GetPathRoot(cleanPath).Length; + var stripLength = Path.GetPathRoot(cleanPath)?.Length ?? 0; while (path.Length > stripLength && (path[stripLength] == '/' || path[stripLength] == '\\')) stripLength++; return path.Substring(stripLength); } diff --git a/src/ICSharpCode.SharpZipLib/Core/StreamUtils.cs b/src/ICSharpCode.SharpZipLib/Core/StreamUtils.cs index 6d0d9b304..58ffa2070 100644 --- a/src/ICSharpCode.SharpZipLib/Core/StreamUtils.cs +++ b/src/ICSharpCode.SharpZipLib/Core/StreamUtils.cs @@ -1,12 +1,14 @@ using System; using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Core { /// /// Provides simple " utilities. /// - public sealed class StreamUtils + public static class StreamUtils { /// /// Read from a ensuring all the required data is read. @@ -14,7 +16,7 @@ public sealed class StreamUtils /// The stream to read. /// The buffer to fill. /// - static public void ReadFully(Stream stream, byte[] buffer) + public static void ReadFully(Stream stream, byte[] buffer) { ReadFully(stream, buffer, 0, buffer.Length); } @@ -29,7 +31,7 @@ static public void ReadFully(Stream stream, byte[] buffer) /// Required parameter is null /// and or are invalid. /// End of stream is encountered before all the data has been read. - static public void ReadFully(Stream stream, byte[] buffer, int offset, int count) + public static void ReadFully(Stream stream, byte[] buffer, int offset, int count) { if (stream == null) { @@ -73,7 +75,7 @@ static public void ReadFully(Stream stream, byte[] buffer, int offset, int count /// The number of bytes of data to store. /// Required parameter is null /// and or are invalid. - static public int ReadRequestedBytes(Stream stream, byte[] buffer, int offset, int count) + public static int ReadRequestedBytes(Stream stream, byte[] buffer, int offset, int count) { if (stream == null) { @@ -118,7 +120,7 @@ static public int ReadRequestedBytes(Stream stream, byte[] buffer, int offset, i /// The stream to source data from. /// The stream to write data to. /// The buffer to use during copying. - static public void Copy(Stream source, Stream destination, byte[] buffer) + public static void Copy(Stream source, Stream destination, byte[] buffer) { if (source == null) { @@ -169,7 +171,7 @@ static public void Copy(Stream source, Stream destination, byte[] buffer) /// The source for this event. /// The name to use with the event. /// This form is specialised for use within #Zip to support events during archive operations. - static public void Copy(Stream source, Stream destination, + public static void Copy(Stream source, Stream destination, byte[] buffer, ProgressHandler progressHandler, TimeSpan updateInterval, object sender, string name) { Copy(source, destination, buffer, progressHandler, updateInterval, sender, name, -1); @@ -188,7 +190,7 @@ static public void Copy(Stream source, Stream destination, /// A predetermined fixed target value to use with progress updates. /// If the value is negative the target is calculated by looking at the stream. /// This form is specialised for use within #Zip to support events during archive operations. - static public void Copy(Stream source, Stream destination, + public static void Copy(Stream source, Stream destination, byte[] buffer, ProgressHandler progressHandler, TimeSpan updateInterval, object sender, string name, long fixedTarget) @@ -272,13 +274,22 @@ static public void Copy(Stream source, Stream destination, progressHandler(sender, args); } } - - /// - /// Initialise an instance of - /// - private StreamUtils() + + internal static async Task WriteProcToStreamAsync(this Stream targetStream, MemoryStream bufferStream, Action writeProc, CancellationToken ct) { - // Do nothing. + bufferStream.SetLength(0); + writeProc(bufferStream); + bufferStream.Position = 0; + await bufferStream.CopyToAsync(targetStream, 81920, ct).ConfigureAwait(false); + bufferStream.SetLength(0); + } + + internal static async Task WriteProcToStreamAsync(this Stream targetStream, Action writeProc, CancellationToken ct) + { + using (var ms = new MemoryStream()) + { + await WriteProcToStreamAsync(targetStream, ms, writeProc, ct).ConfigureAwait(false); + } } } } diff --git a/src/ICSharpCode.SharpZipLib/Core/StringBuilderPool.cs b/src/ICSharpCode.SharpZipLib/Core/StringBuilderPool.cs new file mode 100644 index 000000000..a1121f0cc --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Core/StringBuilderPool.cs @@ -0,0 +1,22 @@ +using System.Collections.Concurrent; +using System.Text; + +namespace ICSharpCode.SharpZipLib.Core +{ + internal class StringBuilderPool + { + public static StringBuilderPool Instance { get; } = new StringBuilderPool(); + private readonly ConcurrentQueue pool = new ConcurrentQueue(); + + public StringBuilder Rent() + { + return pool.TryDequeue(out var builder) ? builder : new StringBuilder(); + } + + public void Return(StringBuilder builder) + { + builder.Clear(); + pool.Enqueue(builder); + } + } +} diff --git a/src/ICSharpCode.SharpZipLib/Encryption/PkzipClassic.cs b/src/ICSharpCode.SharpZipLib/Encryption/PkzipClassic.cs index 6730c9dee..1c7bd1f28 100644 --- a/src/ICSharpCode.SharpZipLib/Encryption/PkzipClassic.cs +++ b/src/ICSharpCode.SharpZipLib/Encryption/PkzipClassic.cs @@ -6,7 +6,7 @@ namespace ICSharpCode.SharpZipLib.Encryption { /// /// PkzipClassic embodies the classic or original encryption facilities used in Pkzip archives. - /// While it has been superceded by more recent and more powerful algorithms, its still in use and + /// While it has been superseded by more recent and more powerful algorithms, its still in use and /// is viable for preventing casual snooping /// public abstract class PkzipClassic : SymmetricAlgorithm @@ -444,7 +444,7 @@ public override byte[] Key public override void GenerateKey() { key_ = new byte[12]; - using (var rng = new RNGCryptoServiceProvider()) + using (var rng = RandomNumberGenerator.Create()) { rng.GetBytes(key_); } diff --git a/src/ICSharpCode.SharpZipLib/Encryption/ZipAESStream.cs b/src/ICSharpCode.SharpZipLib/Encryption/ZipAESStream.cs index 80ce0b4ab..346b5484b 100644 --- a/src/ICSharpCode.SharpZipLib/Encryption/ZipAESStream.cs +++ b/src/ICSharpCode.SharpZipLib/Encryption/ZipAESStream.cs @@ -40,7 +40,7 @@ public ZipAESStream(Stream stream, ZipAESTransform transform, CryptoStreamMode m } // The final n bytes of the AES stream contain the Auth Code. - private const int AUTH_CODE_LENGTH = 10; + public const int AUTH_CODE_LENGTH = 10; // Blocksize is always 16 here, even for AES-256 which has transform.InputBlockSize of 32. private const int CRYPTO_BLOCK_SIZE = 16; diff --git a/src/ICSharpCode.SharpZipLib/Encryption/ZipAESTransform.cs b/src/ICSharpCode.SharpZipLib/Encryption/ZipAESTransform.cs index 5aced2d71..32c7b8156 100644 --- a/src/ICSharpCode.SharpZipLib/Encryption/ZipAESTransform.cs +++ b/src/ICSharpCode.SharpZipLib/Encryption/ZipAESTransform.cs @@ -1,6 +1,5 @@ using System; using System.Security.Cryptography; -using ICSharpCode.SharpZipLib.Core; namespace ICSharpCode.SharpZipLib.Encryption { @@ -9,31 +8,6 @@ namespace ICSharpCode.SharpZipLib.Encryption /// internal class ZipAESTransform : ICryptoTransform { -#if NET45 - class IncrementalHash : HMACSHA1 - { - bool _finalised; - public IncrementalHash(byte[] key) : base(key) { } - public static IncrementalHash CreateHMAC(string n, byte[] key) => new IncrementalHash(key); - public void AppendData(byte[] buffer, int offset, int count) => TransformBlock(buffer, offset, count, buffer, offset); - public byte[] GetHashAndReset() - { - if (!_finalised) - { - byte[] dummy = new byte[0]; - TransformFinalBlock(dummy, 0, 0); - _finalised = true; - } - return Hash; - } - } - - static class HashAlgorithmName - { - public static string SHA1 = null; - } -#endif - private const int PWD_VER_LENGTH = 2; // WinZip use iteration count of 1000 for PBKDF2 key generation @@ -76,7 +50,11 @@ public ZipAESTransform(string key, byte[] saltBytes, int blockSize, bool writeMo _encrPos = ENCRYPT_BLOCK; // Performs the equivalent of derive_key in Dr Brian Gladman's pwd2key.c +#if NET472_OR_GREATER || NETSTANDARD2_1_OR_GREATER || NETCOREAPP2_0_OR_GREATER + var pdb = new Rfc2898DeriveBytes(key, saltBytes, KEY_ROUNDS, HashAlgorithmName.SHA1); +#else var pdb = new Rfc2898DeriveBytes(key, saltBytes, KEY_ROUNDS); +#endif var rm = Aes.Create(); rm.Mode = CipherMode.ECB; // No feedback from cipher for CTR mode _counterNonce = new byte[_blockSize]; @@ -133,91 +111,67 @@ public int TransformBlock(byte[] inputBuffer, int inputOffset, int inputCount, b /// /// Returns the 2 byte password verifier /// - public byte[] PwdVerifier - { - get - { - return _pwdVerifier; - } - } + public byte[] PwdVerifier => _pwdVerifier; /// /// Returns the 10 byte AUTH CODE to be checked or appended immediately following the AES data stream. /// - public byte[] GetAuthCode() - { - if (_authCode == null) - { - _authCode = _hmacsha1.GetHashAndReset(); - } - return _authCode; - } + public byte[] GetAuthCode() => _authCode ?? (_authCode = _hmacsha1.GetHashAndReset()); #region ICryptoTransform Members /// - /// Not implemented. + /// Transform final block and read auth code /// public byte[] TransformFinalBlock(byte[] inputBuffer, int inputOffset, int inputCount) { - if(inputCount > 0) - { - throw new NotImplementedException("TransformFinalBlock is not implemented and inputCount is greater than 0"); + var buffer = Array.Empty(); + + // FIXME: When used together with `ZipAESStream`, the final block handling is done inside of it instead + // This should not be necessary anymore, and the entire `ZipAESStream` class should be replaced with a plain `CryptoStream` + if (inputCount != 0) { + if (inputCount > ZipAESStream.AUTH_CODE_LENGTH) + { + // At least one byte of data is preceeding the auth code + int finalBlock = inputCount - ZipAESStream.AUTH_CODE_LENGTH; + buffer = new byte[finalBlock]; + TransformBlock(inputBuffer, inputOffset, finalBlock, buffer, 0); + } + else if (inputCount < ZipAESStream.AUTH_CODE_LENGTH) + throw new Zip.ZipException("Auth code missing from input stream"); + + // Read the authcode from the last 10 bytes + _authCode = _hmacsha1.GetHashAndReset(); } - return Empty.Array(); + + + return buffer; } /// /// Gets the size of the input data blocks in bytes. /// - public int InputBlockSize - { - get - { - return _blockSize; - } - } + public int InputBlockSize => _blockSize; /// /// Gets the size of the output data blocks in bytes. /// - public int OutputBlockSize - { - get - { - return _blockSize; - } - } + public int OutputBlockSize => _blockSize; /// /// Gets a value indicating whether multiple blocks can be transformed. /// - public bool CanTransformMultipleBlocks - { - get - { - return true; - } - } + public bool CanTransformMultipleBlocks => true; /// /// Gets a value indicating whether the current transform can be reused. /// - public bool CanReuseTransform - { - get - { - return true; - } - } + public bool CanReuseTransform => true; /// /// Cleanup internal state. /// - public void Dispose() - { - _encryptor.Dispose(); - } + public void Dispose() => _encryptor.Dispose(); #endregion ICryptoTransform Members } diff --git a/src/ICSharpCode.SharpZipLib/GZip/GzipInputStream.cs b/src/ICSharpCode.SharpZipLib/GZip/GzipInputStream.cs index 20a4ded17..feca66b3d 100644 --- a/src/ICSharpCode.SharpZipLib/GZip/GzipInputStream.cs +++ b/src/ICSharpCode.SharpZipLib/GZip/GzipInputStream.cs @@ -4,6 +4,7 @@ using System; using System.IO; using System.Text; +using ICSharpCode.SharpZipLib.Core; namespace ICSharpCode.SharpZipLib.GZip { @@ -82,7 +83,7 @@ public GZipInputStream(Stream baseInputStream) /// Size of the buffer to use /// public GZipInputStream(Stream baseInputStream, int size) - : base(baseInputStream, new Inflater(true), size) + : base(baseInputStream, InflaterPool.Instance.Rent(true), size) { } @@ -334,7 +335,7 @@ private void ReadFooter() int crcval = (footer[0] & 0xff) | ((footer[1] & 0xff) << 8) | ((footer[2] & 0xff) << 16) | (footer[3] << 24); if (crcval != (int)crc.Value) { - throw new GZipException("GZIP crc sum mismatch, theirs \"" + crcval + "\" and ours \"" + (int)crc.Value); + throw new GZipException($"GZIP crc sum mismatch, theirs \"{crcval:x8}\" and ours \"{(int)crc.Value:x8}\""); } // NOTE The total here is the original total modulo 2 ^ 32. diff --git a/src/ICSharpCode.SharpZipLib/GZip/GzipOutputStream.cs b/src/ICSharpCode.SharpZipLib/GZip/GzipOutputStream.cs index 0b1a647fe..d4f1aa4c3 100644 --- a/src/ICSharpCode.SharpZipLib/GZip/GzipOutputStream.cs +++ b/src/ICSharpCode.SharpZipLib/GZip/GzipOutputStream.cs @@ -3,7 +3,9 @@ using ICSharpCode.SharpZipLib.Zip.Compression.Streams; using System; using System.IO; -using System.Text; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.GZip { @@ -136,6 +138,11 @@ public string FileName } } + /// + /// If defined, will use this time instead of the current for the output header + /// + public DateTime? ModifiedTime { get; set; } + #endregion Public API #region Stream overrides @@ -147,21 +154,47 @@ public string FileName /// Offset of first byte in buf to write /// Number of bytes to write public override void Write(byte[] buffer, int offset, int count) + => WriteSyncOrAsync(buffer, offset, count, null).GetAwaiter().GetResult(); + + private async Task WriteSyncOrAsync(byte[] buffer, int offset, int count, CancellationToken? ct) { if (state_ == OutputState.Header) { - WriteHeader(); + if (ct.HasValue) + { + await WriteHeaderAsync(ct.Value).ConfigureAwait(false); + } + else + { + WriteHeader(); + } } if (state_ != OutputState.Footer) - { throw new InvalidOperationException("Write not permitted in current state"); - } - + crc.Update(new ArraySegment(buffer, offset, count)); - base.Write(buffer, offset, count); + + if (ct.HasValue) + { + await base.WriteAsync(buffer, offset, count, ct.Value).ConfigureAwait(false); + } + else + { + base.Write(buffer, offset, count); + } } + /// + /// Asynchronously write given buffer to output updating crc + /// + /// Buffer to write + /// Offset of first byte in buf to write + /// Number of bytes to write + /// The token to monitor for cancellation requests + public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken ct) + => await WriteSyncOrAsync(buffer, offset, count, ct).ConfigureAwait(false); + /// /// Writes remaining compressed output data to the output stream /// and closes it. @@ -185,6 +218,30 @@ protected override void Dispose(bool disposing) } } +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + /// + public override async ValueTask DisposeAsync() + { + try + { + await FinishAsync(CancellationToken.None).ConfigureAwait(false); + } + finally + { + if (state_ != OutputState.Closed) + { + state_ = OutputState.Closed; + if (IsStreamOwner) + { + await baseOutputStream_.DisposeAsync().ConfigureAwait(false); + } + } + + await base.DisposeAsync().ConfigureAwait(false); + } + } +#endif + /// /// Flushes the stream by ensuring the header is written, and then calling Flush /// on the deflater. @@ -199,6 +256,16 @@ public override void Flush() base.Flush(); } + /// + public override async Task FlushAsync(CancellationToken ct) + { + if (state_ == OutputState.Header) + { + await WriteHeaderAsync(ct).ConfigureAwait(false); + } + await base.FlushAsync(ct).ConfigureAwait(false); + } + #endregion Stream overrides #region DeflaterOutputStream overrides @@ -218,74 +285,112 @@ public override void Finish() { state_ = OutputState.Finished; base.Finish(); - - var totalin = (uint)(deflater_.TotalIn & 0xffffffff); - var crcval = (uint)(crc.Value & 0xffffffff); - - byte[] gzipFooter; - - unchecked - { - gzipFooter = new byte[] { - (byte) crcval, (byte) (crcval >> 8), - (byte) (crcval >> 16), (byte) (crcval >> 24), - - (byte) totalin, (byte) (totalin >> 8), - (byte) (totalin >> 16), (byte) (totalin >> 24) - }; - } - + var gzipFooter = GetFooter(); baseOutputStream_.Write(gzipFooter, 0, gzipFooter.Length); } } + + /// + public override async Task FinishAsync(CancellationToken ct) + { + // If no data has been written a header should be added. + if (state_ == OutputState.Header) + { + await WriteHeaderAsync(ct).ConfigureAwait(false); + } + + if (state_ == OutputState.Footer) + { + state_ = OutputState.Finished; + await base.FinishAsync(ct).ConfigureAwait(false); + var gzipFooter = GetFooter(); + await baseOutputStream_.WriteAsync(gzipFooter, 0, gzipFooter.Length, ct).ConfigureAwait(false); + } + } #endregion DeflaterOutputStream overrides #region Support Routines - private static string CleanFilename(string path) - => path.Substring(path.LastIndexOf('/') + 1); - - private void WriteHeader() + private byte[] GetFooter() { - if (state_ == OutputState.Header) - { - state_ = OutputState.Footer; + var totalin = (uint)(deflater_.TotalIn & 0xffffffff); + var crcval = (uint)(crc.Value & 0xffffffff); - var mod_time = (int)((DateTime.Now.Ticks - new DateTime(1970, 1, 1).Ticks) / 10000000L); // Ticks give back 100ns intervals - byte[] gzipHeader = { - // The two magic bytes - GZipConstants.ID1, - GZipConstants.ID2, + byte[] gzipFooter; - // The compression type - GZipConstants.CompressionMethodDeflate, + unchecked + { + gzipFooter = new [] { + (byte) crcval, + (byte) (crcval >> 8), + (byte) (crcval >> 16), + (byte) (crcval >> 24), + (byte) totalin, + (byte) (totalin >> 8), + (byte) (totalin >> 16), + (byte) (totalin >> 24), + }; + } - // The flags (not set) - (byte)flags, + return gzipFooter; + } - // The modification time - (byte) mod_time, (byte) (mod_time >> 8), - (byte) (mod_time >> 16), (byte) (mod_time >> 24), + private byte[] GetHeader() + { + var modifiedUtc = ModifiedTime?.ToUniversalTime() ?? DateTime.UtcNow; + var modTime = (int)((modifiedUtc - new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).Ticks / 10000000L); // Ticks give back 100ns intervals + byte[] gzipHeader = { + // The two magic bytes + GZipConstants.ID1, + GZipConstants.ID2, - // The extra flags - 0, + // The compression type + GZipConstants.CompressionMethodDeflate, - // The OS type (unknown) - 255 - }; + // The flags (not set) + (byte)flags, - baseOutputStream_.Write(gzipHeader, 0, gzipHeader.Length); + // The modification time + (byte) modTime, (byte) (modTime >> 8), + (byte) (modTime >> 16), (byte) (modTime >> 24), - if (flags.HasFlag(GZipFlags.FNAME)) - { - var fname = GZipConstants.Encoding.GetBytes(fileName); - baseOutputStream_.Write(fname, 0, fname.Length); + // The extra flags + 0, - // End filename string with a \0 - baseOutputStream_.Write(new byte[] { 0 }, 0, 1); - } + // The OS type (unknown) + 255 + }; + + if (!flags.HasFlag(GZipFlags.FNAME)) + { + return gzipHeader; } + + + return gzipHeader + .Concat(GZipConstants.Encoding.GetBytes(fileName)) + .Concat(new byte []{0}) // End filename string with a \0 + .ToArray(); + } + + private static string CleanFilename(string path) + => path.Substring(path.LastIndexOf('/') + 1); + + private void WriteHeader() + { + if (state_ != OutputState.Header) return; + state_ = OutputState.Footer; + var gzipHeader = GetHeader(); + baseOutputStream_.Write(gzipHeader, 0, gzipHeader.Length); + } + + private async Task WriteHeaderAsync(CancellationToken ct) + { + if (state_ != OutputState.Header) return; + state_ = OutputState.Footer; + var gzipHeader = GetHeader(); + await baseOutputStream_.WriteAsync(gzipHeader, 0, gzipHeader.Length, ct).ConfigureAwait(false); } #endregion Support Routines diff --git a/src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj b/src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj index ca37ba1ae..49a1cd5cd 100644 --- a/src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj +++ b/src/ICSharpCode.SharpZipLib/ICSharpCode.SharpZipLib.csproj @@ -1,8 +1,10 @@  - netstandard2.0;netstandard2.1;net45 - True + netstandard2.0;netstandard2.1;net6.0 + true + true + true ../../assets/ICSharpCode.SharpZipLib.snk true true @@ -11,8 +13,8 @@ - 1.3.3 - $(Version).11 + 1.4.2 + $(Version).13 $(FileVersion) SharpZipLib ICSharpCode @@ -22,23 +24,28 @@ http://icsharpcode.github.io/SharpZipLib/ images/sharpziplib-nuget-256x256.png https://github.com/icsharpcode/SharpZipLib - Copyright © 2000-2021 SharpZipLib Contributors + Copyright © 2000-2022 SharpZipLib Contributors Compression Library Zip GZip BZip2 LZW Tar en-US -Please see https://github.com/icsharpcode/SharpZipLib/wiki/Release-1.3.3 for more information. +Please see https://github.com/icsharpcode/SharpZipLib/wiki/Release-1.4.2 for more information. https://github.com/icsharpcode/SharpZipLib - - + + + + + + + True images - + diff --git a/src/ICSharpCode.SharpZipLib/SharpZipLibOptions.cs b/src/ICSharpCode.SharpZipLib/SharpZipLibOptions.cs new file mode 100644 index 000000000..a6694e71e --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/SharpZipLibOptions.cs @@ -0,0 +1,15 @@ +using ICSharpCode.SharpZipLib.Zip.Compression; + +namespace ICSharpCode.SharpZipLib +{ + /// + /// Global options to alter behavior. + /// + public static class SharpZipLibOptions + { + /// + /// The max pool size allowed for reusing instances, defaults to 0 (disabled). + /// + public static int InflaterPoolSize { get; set; } = 0; + } +} diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarArchive.cs b/src/ICSharpCode.SharpZipLib/Tar/TarArchive.cs index 6db6b23b9..878649017 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarArchive.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarArchive.cs @@ -356,8 +356,7 @@ public string RootPath { throw new ObjectDisposedException("TarArchive"); } - // Convert to forward slashes for matching. Trim trailing / for correct final path - rootPath = value.Replace('\\', '/').TrimEnd('/'); + rootPath = value.ToTarArchivePath().TrimEnd('/'); } } @@ -660,7 +659,9 @@ private void ExtractEntry(string destDir, TarEntry entry, bool allowParentTraver string destFile = Path.Combine(destDir, name); var destFileDir = Path.GetDirectoryName(Path.GetFullPath(destFile)) ?? ""; - if (!allowParentTraversal && !destFileDir.StartsWith(destDir, StringComparison.InvariantCultureIgnoreCase)) + var isRootDir = entry.IsDirectory && entry.Name == ""; + + if (!allowParentTraversal && !isRootDir && !destFileDir.StartsWith(destDir, StringComparison.InvariantCultureIgnoreCase)) { throw new InvalidNameException("Parent traversal in paths is not allowed"); } diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarBuffer.cs b/src/ICSharpCode.SharpZipLib/Tar/TarBuffer.cs index 744c13189..a0f9bab80 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarBuffer.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarBuffer.cs @@ -1,5 +1,8 @@ using System; +using System.Buffers; using System.IO; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tar { @@ -72,10 +75,7 @@ or which contains garbage records after a zero block. /// This is equal to the multiplied by the public int RecordSize { - get - { - return recordSize; - } + get { return recordSize; } } /// @@ -95,10 +95,7 @@ public int GetRecordSize() /// This is the number of blocks in each record. public int BlockFactor { - get - { - return blockFactor; - } + get { return blockFactor; } } /// @@ -207,7 +204,7 @@ private void Initialize(int archiveBlockFactor) { blockFactor = archiveBlockFactor; recordSize = archiveBlockFactor * BlockSize; - recordBuffer = new byte[RecordSize]; + recordBuffer = ArrayPool.Shared.Rent(RecordSize); if (inputStream != null) { @@ -289,7 +286,14 @@ public static bool IsEndOfArchiveBlock(byte[] block) /// /// Skip over a block on the input stream. /// - public void SkipBlock() + public void SkipBlock() => SkipBlockAsync(CancellationToken.None, false).GetAwaiter().GetResult(); + + /// + /// Skip over a block on the input stream. + /// + public Task SkipBlockAsync(CancellationToken ct) => SkipBlockAsync(ct, true).AsTask(); + + private async ValueTask SkipBlockAsync(CancellationToken ct, bool isAsync) { if (inputStream == null) { @@ -298,7 +302,7 @@ public void SkipBlock() if (currentBlockIndex >= BlockFactor) { - if (!ReadRecord()) + if (!await ReadRecordAsync(ct, isAsync).ConfigureAwait(false)) { throw new TarException("Failed to read a record"); } @@ -322,7 +326,7 @@ public byte[] ReadBlock() if (currentBlockIndex >= BlockFactor) { - if (!ReadRecord()) + if (!ReadRecordAsync(CancellationToken.None, false).GetAwaiter().GetResult()) { throw new TarException("Failed to read a record"); } @@ -335,13 +339,37 @@ public byte[] ReadBlock() return result; } + internal async ValueTask ReadBlockIntAsync(byte[] buffer, CancellationToken ct, bool isAsync) + { + if (buffer.Length != BlockSize) + { + throw new ArgumentException("BUG: buffer must have length BlockSize"); + } + + if (inputStream == null) + { + throw new TarException("TarBuffer.ReadBlock - no input stream defined"); + } + + if (currentBlockIndex >= BlockFactor) + { + if (!await ReadRecordAsync(ct, isAsync).ConfigureAwait(false)) + { + throw new TarException("Failed to read a record"); + } + } + + recordBuffer.AsSpan().Slice(currentBlockIndex * BlockSize, BlockSize).CopyTo(buffer); + currentBlockIndex++; + } + /// /// Read a record from data stream. /// /// /// false if End-Of-File, else true. /// - private bool ReadRecord() + private async ValueTask ReadRecordAsync(CancellationToken ct, bool isAsync) { if (inputStream == null) { @@ -355,7 +383,9 @@ private bool ReadRecord() while (bytesNeeded > 0) { - long numBytes = inputStream.Read(recordBuffer, offset, bytesNeeded); + long numBytes = isAsync + ? await inputStream.ReadAsync(recordBuffer, offset, bytesNeeded, ct).ConfigureAwait(false) + : inputStream.Read(recordBuffer, offset, bytesNeeded); // // NOTE @@ -372,6 +402,11 @@ private bool ReadRecord() // if (numBytes <= 0) { + // Fill the rest of the buffer with 0 to clear any left over data in the shared buffer + for (; offset < RecordSize; offset++) + { + recordBuffer[offset] = 0; + } break; } @@ -438,6 +473,18 @@ public int GetCurrentRecordNum() return currentRecordIndex; } + /// + /// Write a block of data to the archive. + /// + /// + /// The data to write to the archive. + /// + /// + public ValueTask WriteBlockAsync(byte[] block, CancellationToken ct) + { + return WriteBlockAsync(block, 0, ct); + } + /// /// Write a block of data to the archive. /// @@ -446,30 +493,24 @@ public int GetCurrentRecordNum() /// public void WriteBlock(byte[] block) { - if (block == null) - { - throw new ArgumentNullException(nameof(block)); - } - - if (outputStream == null) - { - throw new TarException("TarBuffer.WriteBlock - no output stream defined"); - } - - if (block.Length != BlockSize) - { - string errorText = string.Format("TarBuffer.WriteBlock - block to write has length '{0}' which is not the block size of '{1}'", - block.Length, BlockSize); - throw new TarException(errorText); - } - - if (currentBlockIndex >= BlockFactor) - { - WriteRecord(); - } + WriteBlock(block, 0); + } - Array.Copy(block, 0, recordBuffer, (currentBlockIndex * BlockSize), BlockSize); - currentBlockIndex++; + /// + /// Write an archive record to the archive, where the record may be + /// inside of a larger array buffer. The buffer must be "offset plus + /// record size" long. + /// + /// + /// The buffer containing the record data to write. + /// + /// + /// The offset of the record data within buffer. + /// + /// + public ValueTask WriteBlockAsync(byte[] buffer, int offset, CancellationToken ct) + { + return WriteBlockAsync(buffer, offset, ct, true); } /// @@ -484,6 +525,11 @@ public void WriteBlock(byte[] block) /// The offset of the record data within buffer. /// public void WriteBlock(byte[] buffer, int offset) + { + WriteBlockAsync(buffer, offset, CancellationToken.None, false).GetAwaiter().GetResult(); + } + + internal async ValueTask WriteBlockAsync(byte[] buffer, int offset, CancellationToken ct, bool isAsync) { if (buffer == null) { @@ -502,14 +548,15 @@ public void WriteBlock(byte[] buffer, int offset) if ((offset + BlockSize) > buffer.Length) { - string errorText = string.Format("TarBuffer.WriteBlock - record has length '{0}' with offset '{1}' which is less than the record size of '{2}'", + string errorText = string.Format( + "TarBuffer.WriteBlock - record has length '{0}' with offset '{1}' which is less than the record size of '{2}'", buffer.Length, offset, recordSize); throw new TarException(errorText); } if (currentBlockIndex >= BlockFactor) { - WriteRecord(); + await WriteRecordAsync(CancellationToken.None, isAsync).ConfigureAwait(false); } Array.Copy(buffer, offset, recordBuffer, (currentBlockIndex * BlockSize), BlockSize); @@ -520,15 +567,23 @@ public void WriteBlock(byte[] buffer, int offset) /// /// Write a TarBuffer record to the archive. /// - private void WriteRecord() + private async ValueTask WriteRecordAsync(CancellationToken ct, bool isAsync) { if (outputStream == null) { throw new TarException("TarBuffer.WriteRecord no output stream defined"); } - outputStream.Write(recordBuffer, 0, RecordSize); - outputStream.Flush(); + if (isAsync) + { + await outputStream.WriteAsync(recordBuffer, 0, RecordSize, ct).ConfigureAwait(false); + await outputStream.FlushAsync(ct).ConfigureAwait(false); + } + else + { + outputStream.Write(recordBuffer, 0, RecordSize); + outputStream.Flush(); + } currentBlockIndex = 0; currentRecordIndex++; @@ -539,7 +594,7 @@ private void WriteRecord() /// /// Any trailing bytes are set to zero which is by definition correct behaviour /// for the end of a tar stream. - private void WriteFinalRecord() + private async ValueTask WriteFinalRecordAsync(CancellationToken ct, bool isAsync) { if (outputStream == null) { @@ -550,36 +605,77 @@ private void WriteFinalRecord() { int dataBytes = currentBlockIndex * BlockSize; Array.Clear(recordBuffer, dataBytes, RecordSize - dataBytes); - WriteRecord(); + await WriteRecordAsync(ct, isAsync).ConfigureAwait(false); } - outputStream.Flush(); + if (isAsync) + { + await outputStream.FlushAsync(ct).ConfigureAwait(false); + } + else + { + outputStream.Flush(); + } } /// /// Close the TarBuffer. If this is an output buffer, also flush the /// current block before closing. /// - public void Close() + public void Close() => CloseAsync(CancellationToken.None, false).GetAwaiter().GetResult(); + + /// + /// Close the TarBuffer. If this is an output buffer, also flush the + /// current block before closing. + /// + public Task CloseAsync(CancellationToken ct) => CloseAsync(ct, true).AsTask(); + + private async ValueTask CloseAsync(CancellationToken ct, bool isAsync) { if (outputStream != null) { - WriteFinalRecord(); + await WriteFinalRecordAsync(ct, isAsync).ConfigureAwait(false); if (IsStreamOwner) { - outputStream.Dispose(); + if (isAsync) + { +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + await outputStream.DisposeAsync().ConfigureAwait(false); +#else + outputStream.Dispose(); +#endif + } + else + { + outputStream.Dispose(); + } } + outputStream = null; } else if (inputStream != null) { if (IsStreamOwner) { - inputStream.Dispose(); + if (isAsync) + { +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + await inputStream.DisposeAsync().ConfigureAwait(false); +#else + inputStream.Dispose(); +#endif + } + else + { + inputStream.Dispose(); + } } + inputStream = null; } + + ArrayPool.Shared.Return(recordBuffer); } #region Instance Fields diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarEntry.cs b/src/ICSharpCode.SharpZipLib/Tar/TarEntry.cs index 262c12ad3..82c813367 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarEntry.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarEntry.cs @@ -114,7 +114,8 @@ public object Clone() public static TarEntry CreateTarEntry(string name) { var entry = new TarEntry(); - TarEntry.NameTarHeader(entry.header, name); + + entry.NameTarHeader(name); return entry; } @@ -188,10 +189,7 @@ public bool IsDescendent(TarEntry toTest) /// public TarHeader TarHeader { - get - { - return header; - } + get { return header; } } /// @@ -199,14 +197,8 @@ public TarHeader TarHeader /// public string Name { - get - { - return header.Name; - } - set - { - header.Name = value; - } + get { return header.Name; } + set { header.Name = value; } } /// @@ -214,14 +206,8 @@ public string Name /// public int UserId { - get - { - return header.UserId; - } - set - { - header.UserId = value; - } + get { return header.UserId; } + set { header.UserId = value; } } /// @@ -229,14 +215,8 @@ public int UserId /// public int GroupId { - get - { - return header.GroupId; - } - set - { - header.GroupId = value; - } + get { return header.GroupId; } + set { header.GroupId = value; } } /// @@ -244,14 +224,8 @@ public int GroupId /// public string UserName { - get - { - return header.UserName; - } - set - { - header.UserName = value; - } + get { return header.UserName; } + set { header.UserName = value; } } /// @@ -259,14 +233,8 @@ public string UserName /// public string GroupName { - get - { - return header.GroupName; - } - set - { - header.GroupName = value; - } + get { return header.GroupName; } + set { header.GroupName = value; } } /// @@ -304,14 +272,8 @@ public void SetNames(string userName, string groupName) /// public DateTime ModTime { - get - { - return header.ModTime; - } - set - { - header.ModTime = value; - } + get { return header.ModTime; } + set { header.ModTime = value; } } /// @@ -322,10 +284,7 @@ public DateTime ModTime /// public string File { - get - { - return file; - } + get { return file; } } /// @@ -333,14 +292,8 @@ public string File /// public long Size { - get - { - return header.Size; - } - set - { - header.Size = value; - } + get { return header.Size; } + set { header.Size = value; } } /// @@ -419,15 +372,10 @@ public void GetFileTarHeader(TarHeader header, string file) } */ - name = name.Replace(Path.DirectorySeparatorChar, '/'); - // No absolute pathnames // Windows (and Posix?) paths can start with UNC style "\\NetworkDrive\", // so we loop on starting /'s. - while (name.StartsWith("/", StringComparison.Ordinal)) - { - name = name.Substring(1); - } + name = name.ToTarArchivePath(); header.LinkName = String.Empty; header.Name = name; @@ -450,7 +398,8 @@ public void GetFileTarHeader(TarHeader header, string file) header.Size = new FileInfo(file.Replace('/', Path.DirectorySeparatorChar)).Length; } - header.ModTime = System.IO.File.GetLastWriteTime(file.Replace('/', Path.DirectorySeparatorChar)).ToUniversalTime(); + header.ModTime = System.IO.File.GetLastWriteTime(file.Replace('/', Path.DirectorySeparatorChar)) + .ToUniversalTime(); header.DevMajor = 0; header.DevMinor = 0; } @@ -543,19 +492,11 @@ static public void AdjustEntryName(byte[] buffer, string newName, Encoding nameE /// /// Fill in a TarHeader given only the entry's name. /// - /// - /// The TarHeader to fill in. - /// /// /// The tar entry name. /// - static public void NameTarHeader(TarHeader header, string name) + public void NameTarHeader(string name) { - if (header == null) - { - throw new ArgumentNullException(nameof(header)); - } - if (name == null) { throw new ArgumentNullException(nameof(name)); diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarExtendedHeaderReader.cs b/src/ICSharpCode.SharpZipLib/Tar/TarExtendedHeaderReader.cs index d1d438ad0..b711e6d54 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarExtendedHeaderReader.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarExtendedHeaderReader.cs @@ -1,4 +1,5 @@ -using System.Collections.Generic; +using System; +using System.Collections.Generic; using System.Text; namespace ICSharpCode.SharpZipLib.Tar @@ -26,7 +27,10 @@ public class TarExtendedHeaderReader private int state = LENGTH; - private static readonly byte[] StateNext = new[] { (byte)' ', (byte)'=', (byte)'\n' }; + private int currHeaderLength; + private int currHeaderRead; + + private static readonly byte[] StateNext = { (byte)' ', (byte)'=', (byte)'\n' }; /// /// Creates a new . @@ -46,23 +50,46 @@ public void Read(byte[] buffer, int length) for (int i = 0; i < length; i++) { byte next = buffer[i]; + + var foundStateEnd = state == VALUE + ? currHeaderRead == currHeaderLength -1 + : next == StateNext[state]; - if (next == StateNext[state]) + if (foundStateEnd) { Flush(); headerParts[state] = sb.ToString(); sb.Clear(); - + if (++state == END) { - headers.Add(headerParts[KEY], headerParts[VALUE]); + if (!headers.ContainsKey(headerParts[KEY])) + { + headers.Add(headerParts[KEY], headerParts[VALUE]); + } + headerParts = new string[3]; + currHeaderLength = 0; + currHeaderRead = 0; state = LENGTH; } + else + { + currHeaderRead++; + } + + + if (state != VALUE) continue; + + if (int.TryParse(headerParts[LENGTH], out var vl)) + { + currHeaderLength = vl; + } } else { byteBuffer[bbIndex++] = next; + currHeaderRead++; if (bbIndex == 4) Flush(); } diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarHeader.cs b/src/ICSharpCode.SharpZipLib/Tar/TarHeader.cs index 3bd1bdffe..2ef3777aa 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarHeader.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarHeader.cs @@ -1,5 +1,7 @@ using System; +using System.Buffers; using System.Text; +using ICSharpCode.SharpZipLib.Core; namespace ICSharpCode.SharpZipLib.Tar { @@ -124,106 +126,106 @@ public class TarHeader /// /// Normal file type. /// - public const byte LF_NORMAL = (byte)'0'; + public const byte LF_NORMAL = (byte) '0'; /// /// Link file type. /// - public const byte LF_LINK = (byte)'1'; + public const byte LF_LINK = (byte) '1'; /// /// Symbolic link file type. /// - public const byte LF_SYMLINK = (byte)'2'; + public const byte LF_SYMLINK = (byte) '2'; /// /// Character device file type. /// - public const byte LF_CHR = (byte)'3'; + public const byte LF_CHR = (byte) '3'; /// /// Block device file type. /// - public const byte LF_BLK = (byte)'4'; + public const byte LF_BLK = (byte) '4'; /// /// Directory file type. /// - public const byte LF_DIR = (byte)'5'; + public const byte LF_DIR = (byte) '5'; /// /// FIFO (pipe) file type. /// - public const byte LF_FIFO = (byte)'6'; + public const byte LF_FIFO = (byte) '6'; /// /// Contiguous file type. /// - public const byte LF_CONTIG = (byte)'7'; + public const byte LF_CONTIG = (byte) '7'; /// /// Posix.1 2001 global extended header /// - public const byte LF_GHDR = (byte)'g'; + public const byte LF_GHDR = (byte) 'g'; /// /// Posix.1 2001 extended header /// - public const byte LF_XHDR = (byte)'x'; + public const byte LF_XHDR = (byte) 'x'; // POSIX allows for upper case ascii type as extensions /// /// Solaris access control list file type /// - public const byte LF_ACL = (byte)'A'; + public const byte LF_ACL = (byte) 'A'; /// /// GNU dir dump file type /// This is a dir entry that contains the names of files that were in the /// dir at the time the dump was made /// - public const byte LF_GNU_DUMPDIR = (byte)'D'; + public const byte LF_GNU_DUMPDIR = (byte) 'D'; /// /// Solaris Extended Attribute File /// - public const byte LF_EXTATTR = (byte)'E'; + public const byte LF_EXTATTR = (byte) 'E'; /// /// Inode (metadata only) no file content /// - public const byte LF_META = (byte)'I'; + public const byte LF_META = (byte) 'I'; /// /// Identifies the next file on the tape as having a long link name /// - public const byte LF_GNU_LONGLINK = (byte)'K'; + public const byte LF_GNU_LONGLINK = (byte) 'K'; /// /// Identifies the next file on the tape as having a long name /// - public const byte LF_GNU_LONGNAME = (byte)'L'; + public const byte LF_GNU_LONGNAME = (byte) 'L'; /// /// Continuation of a file that began on another volume /// - public const byte LF_GNU_MULTIVOL = (byte)'M'; + public const byte LF_GNU_MULTIVOL = (byte) 'M'; /// /// For storing filenames that dont fit in the main header (old GNU) /// - public const byte LF_GNU_NAMES = (byte)'N'; + public const byte LF_GNU_NAMES = (byte) 'N'; /// /// GNU Sparse file /// - public const byte LF_GNU_SPARSE = (byte)'S'; + public const byte LF_GNU_SPARSE = (byte) 'S'; /// /// GNU Tape/volume header ignore on extraction /// - public const byte LF_GNU_VOLHDR = (byte)'V'; + public const byte LF_GNU_VOLHDR = (byte) 'V'; /// /// The magic tag representing a POSIX tar archive. (would be written with a trailing NULL) @@ -235,7 +237,7 @@ public class TarHeader /// public const string GNU_TMAGIC = "ustar "; - private const long timeConversionFactor = 10000000L; // 1 tick == 100 nanoseconds + private const long timeConversionFactor = 10000000L; // 1 tick == 100 nanoseconds private static readonly DateTime dateTime1970 = new DateTime(1970, 1, 1, 0, 0, 0, 0); #endregion Constants @@ -277,6 +279,7 @@ public string Name { throw new ArgumentNullException(nameof(value)); } + name = value; } } @@ -339,6 +342,7 @@ public long Size { throw new ArgumentOutOfRangeException(nameof(value), "Cannot be less than zero"); } + size = value; } } @@ -359,6 +363,7 @@ public DateTime ModTime { throw new ArgumentOutOfRangeException(nameof(value), "ModTime cannot be before Jan 1st 1970"); } + modTime = new DateTime(value.Year, value.Month, value.Day, value.Hour, value.Minute, value.Second); } } @@ -401,6 +406,7 @@ public string LinkName { throw new ArgumentNullException(nameof(value)); } + linkName = value; } } @@ -418,6 +424,7 @@ public string Magic { throw new ArgumentNullException(nameof(value)); } + magic = value; } } @@ -428,10 +435,7 @@ public string Magic /// Thrown when attempting to set Version to null. public string Version { - get - { - return version; - } + get { return version; } set { @@ -439,6 +443,7 @@ public string Version { throw new ArgumentNullException(nameof(value)); } + version = value; } } @@ -462,6 +467,7 @@ public string UserName { currentUser = currentUser.Substring(0, UNAMELEN); } + userName = currentUser; } } @@ -539,17 +545,18 @@ public void ParseBuffer(byte[] header, Encoding nameEncoding) } int offset = 0; + var headerSpan = header.AsSpan(); - name = ParseName(header, offset, NAMELEN, nameEncoding).ToString(); + name = ParseName(headerSpan.Slice(offset, NAMELEN), nameEncoding); offset += NAMELEN; - mode = (int)ParseOctal(header, offset, MODELEN); + mode = (int) ParseOctal(header, offset, MODELEN); offset += MODELEN; - UserId = (int)ParseOctal(header, offset, UIDLEN); + UserId = (int) ParseOctal(header, offset, UIDLEN); offset += UIDLEN; - GroupId = (int)ParseOctal(header, offset, GIDLEN); + GroupId = (int) ParseOctal(header, offset, GIDLEN); offset += GIDLEN; Size = ParseBinaryOrOctal(header, offset, SIZELEN); @@ -558,35 +565,35 @@ public void ParseBuffer(byte[] header, Encoding nameEncoding) ModTime = GetDateTimeFromCTime(ParseOctal(header, offset, MODTIMELEN)); offset += MODTIMELEN; - checksum = (int)ParseOctal(header, offset, CHKSUMLEN); + checksum = (int) ParseOctal(header, offset, CHKSUMLEN); offset += CHKSUMLEN; TypeFlag = header[offset++]; - LinkName = ParseName(header, offset, NAMELEN, nameEncoding).ToString(); + LinkName = ParseName(headerSpan.Slice(offset, NAMELEN), nameEncoding); offset += NAMELEN; - Magic = ParseName(header, offset, MAGICLEN, nameEncoding).ToString(); + Magic = ParseName(headerSpan.Slice(offset, MAGICLEN), nameEncoding); offset += MAGICLEN; if (Magic == "ustar") { - Version = ParseName(header, offset, VERSIONLEN, nameEncoding).ToString(); + Version = ParseName(headerSpan.Slice(offset, VERSIONLEN), nameEncoding); offset += VERSIONLEN; - UserName = ParseName(header, offset, UNAMELEN, nameEncoding).ToString(); + UserName = ParseName(headerSpan.Slice(offset, UNAMELEN), nameEncoding); offset += UNAMELEN; - GroupName = ParseName(header, offset, GNAMELEN, nameEncoding).ToString(); + GroupName = ParseName(headerSpan.Slice(offset, GNAMELEN), nameEncoding); offset += GNAMELEN; - DevMajor = (int)ParseOctal(header, offset, DEVLEN); + DevMajor = (int) ParseOctal(header, offset, DEVLEN); offset += DEVLEN; - DevMinor = (int)ParseOctal(header, offset, DEVLEN); + DevMinor = (int) ParseOctal(header, offset, DEVLEN); offset += DEVLEN; - string prefix = ParseName(header, offset, PREFIXLEN, nameEncoding).ToString(); + string prefix = ParseName(headerSpan.Slice(offset, PREFIXLEN), nameEncoding); if (!string.IsNullOrEmpty(prefix)) Name = prefix + '/' + Name; } @@ -640,7 +647,7 @@ public void WriteHeader(byte[] outBuffer, Encoding nameEncoding) int csOffset = offset; for (int c = 0; c < CHKSUMLEN; ++c) { - outBuffer[offset++] = (byte)' '; + outBuffer[offset++] = (byte) ' '; } outBuffer[offset++] = TypeFlag; @@ -690,25 +697,26 @@ public override bool Equals(object obj) if (localHeader != null) { result = (name == localHeader.name) - && (mode == localHeader.mode) - && (UserId == localHeader.UserId) - && (GroupId == localHeader.GroupId) - && (Size == localHeader.Size) - && (ModTime == localHeader.ModTime) - && (Checksum == localHeader.Checksum) - && (TypeFlag == localHeader.TypeFlag) - && (LinkName == localHeader.LinkName) - && (Magic == localHeader.Magic) - && (Version == localHeader.Version) - && (UserName == localHeader.UserName) - && (GroupName == localHeader.GroupName) - && (DevMajor == localHeader.DevMajor) - && (DevMinor == localHeader.DevMinor); + && (mode == localHeader.mode) + && (UserId == localHeader.UserId) + && (GroupId == localHeader.GroupId) + && (Size == localHeader.Size) + && (ModTime == localHeader.ModTime) + && (Checksum == localHeader.Checksum) + && (TypeFlag == localHeader.TypeFlag) + && (LinkName == localHeader.LinkName) + && (Magic == localHeader.Magic) + && (Version == localHeader.Version) + && (UserName == localHeader.UserName) + && (GroupName == localHeader.GroupName) + && (DevMajor == localHeader.DevMajor) + && (DevMinor == localHeader.DevMinor); } else { result = false; } + return result; } @@ -719,7 +727,7 @@ public override bool Equals(object obj) /// Value to apply as a default for userName. /// Value to apply as a default for groupId. /// Value to apply as a default for groupName. - static internal void SetValueDefaults(int userId, string userName, int groupId, string groupName) + internal static void SetValueDefaults(int userId, string userName, int groupId, string groupName) { defaultUserId = userIdAsSet = userId; defaultUser = userNameAsSet = userName; @@ -727,7 +735,7 @@ static internal void SetValueDefaults(int userId, string userName, int groupId, defaultGroupName = groupNameAsSet = groupName; } - static internal void RestoreSetValues() + internal static void RestoreSetValues() { defaultUserId = userIdAsSet; defaultUser = userNameAsSet; @@ -737,7 +745,7 @@ static internal void RestoreSetValues() // Return value that may be stored in octal or binary. Length must exceed 8. // - static private long ParseBinaryOrOctal(byte[] header, int offset, int length) + private static long ParseBinaryOrOctal(byte[] header, int offset, int length) { if (header[offset] >= 0x80) { @@ -747,8 +755,10 @@ static private long ParseBinaryOrOctal(byte[] header, int offset, int length) { result = result << 8 | header[offset + pos]; } + return result; } + return ParseOctal(header, offset, length); } @@ -759,7 +769,7 @@ static private long ParseBinaryOrOctal(byte[] header, int offset, int length) /// The offset into the buffer from which to parse. /// The number of header bytes to parse. /// The long equivalent of the octal string. - static public long ParseOctal(byte[] header, int offset, int length) + public static long ParseOctal(byte[] header, int offset, int length) { if (header == null) { @@ -777,14 +787,14 @@ static public long ParseOctal(byte[] header, int offset, int length) break; } - if (header[i] == (byte)' ' || header[i] == '0') + if (header[i] == (byte) ' ' || header[i] == '0') { if (stillPadding) { continue; } - if (header[i] == (byte)' ') + if (header[i] == (byte) ' ') { break; } @@ -814,9 +824,9 @@ static public long ParseOctal(byte[] header, int offset, int length) /// The name parsed. /// [Obsolete("No Encoding for Name field is specified, any non-ASCII bytes will be discarded")] - static public StringBuilder ParseName(byte[] header, int offset, int length) + public static string ParseName(byte[] header, int offset, int length) { - return ParseName(header, offset, length, null); + return ParseName(header.AsSpan().Slice(offset, length), null); } /// @@ -825,66 +835,50 @@ static public StringBuilder ParseName(byte[] header, int offset, int length) /// /// The header buffer from which to parse. /// - /// - /// The offset into the buffer from which to parse. - /// - /// - /// The number of header bytes to parse. - /// /// /// name encoding, or null for ASCII only /// /// /// The name parsed. /// - static public StringBuilder ParseName(byte[] header, int offset, int length, Encoding encoding) + public static string ParseName(ReadOnlySpan header, Encoding encoding) { - if (header == null) - { - throw new ArgumentNullException(nameof(header)); - } - - if (offset < 0) - { - throw new ArgumentOutOfRangeException(nameof(offset), "Cannot be less than zero"); - } - - if (length < 0) - { - throw new ArgumentOutOfRangeException(nameof(length), "Cannot be less than zero"); - } - - if (offset + length > header.Length) - { - throw new ArgumentException("Exceeds header size", nameof(length)); - } - - var result = new StringBuilder(length); + var builder = StringBuilderPool.Instance.Rent(); int count = 0; - if(encoding == null) + if (encoding == null) { - for (int i = offset; i < offset + length; ++i) + for (int i = 0; i < header.Length; ++i) { - if (header[i] == 0) + var b = header[i]; + if (b == 0) { break; } - result.Append((char)header[i]); + + builder.Append((char) b); } } else { - for(int i = offset; i < offset + length; ++i, ++count) + for (int i = 0; i < header.Length; ++i, ++count) { - if(header[i] == 0) + if (header[i] == 0) { break; } } - result.Append(encoding.GetString(header, offset, count)); + +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + var value = encoding.GetString(header.Slice(0, count)); +#else + var value = encoding.GetString(header.ToArray(), 0, count); +#endif + builder.Append(value); } + var result = builder.ToString(); + StringBuilderPool.Instance.Return(builder); return result; } @@ -926,7 +920,8 @@ public static int GetNameBytes(string name, int nameOffset, byte[] buffer, int b /// The number of characters/bytes to add /// name encoding, or null for ASCII only /// The next free index in the - public static int GetNameBytes(string name, int nameOffset, byte[] buffer, int bufferOffset, int length, Encoding encoding) + public static int GetNameBytes(string name, int nameOffset, byte[] buffer, int bufferOffset, int length, + Encoding encoding) { if (name == null) { @@ -939,20 +934,24 @@ public static int GetNameBytes(string name, int nameOffset, byte[] buffer, int b } int i; - if(encoding != null) + if (encoding != null) { // it can be more sufficient if using Span or unsafe - var nameArray = name.ToCharArray(nameOffset, Math.Min(name.Length - nameOffset, length)); + ReadOnlySpan nameArray = + name.AsSpan().Slice(nameOffset, Math.Min(name.Length - nameOffset, length)); + var charArray = ArrayPool.Shared.Rent(nameArray.Length); + nameArray.CopyTo(charArray); + // it can be more sufficient if using Span(or unsafe?) and ArrayPool for temporary buffer - var bytes = encoding.GetBytes(nameArray, 0, nameArray.Length); - i = Math.Min(bytes.Length, length); - Array.Copy(bytes, 0, buffer, bufferOffset, i); + var bytesLength = encoding.GetBytes(charArray, 0, nameArray.Length, buffer, bufferOffset); + ArrayPool.Shared.Return(charArray); + i = Math.Min(bytesLength, length); } else { for (i = 0; i < length && nameOffset + i < name.Length; ++i) { - buffer[bufferOffset + i] = (byte)name[nameOffset + i]; + buffer[bufferOffset + i] = (byte) name[nameOffset + i]; } } @@ -960,8 +959,10 @@ public static int GetNameBytes(string name, int nameOffset, byte[] buffer, int b { buffer[bufferOffset + i] = 0; } + return bufferOffset + length; } + /// /// Add an entry name to the buffer /// @@ -1060,6 +1061,7 @@ public static int GetNameBytes(string name, byte[] buffer, int offset, int lengt return GetNameBytes(name, 0, buffer, offset, length, encoding); } + /// /// Add a string to a buffer as a collection of ascii bytes. /// @@ -1085,7 +1087,8 @@ public static int GetAsciiBytes(string toAdd, int nameOffset, byte[] buffer, int /// The number of ascii characters to add. /// String encoding, or null for ASCII only /// The next free index in the buffer. - public static int GetAsciiBytes(string toAdd, int nameOffset, byte[] buffer, int bufferOffset, int length, Encoding encoding) + public static int GetAsciiBytes(string toAdd, int nameOffset, byte[] buffer, int bufferOffset, int length, + Encoding encoding) { if (toAdd == null) { @@ -1098,11 +1101,11 @@ public static int GetAsciiBytes(string toAdd, int nameOffset, byte[] buffer, int } int i; - if(encoding == null) + if (encoding == null) { for (i = 0; i < length && nameOffset + i < toAdd.Length; ++i) { - buffer[bufferOffset + i] = (byte)toAdd[nameOffset + i]; + buffer[bufferOffset + i] = (byte) toAdd[nameOffset + i]; } } else @@ -1114,6 +1117,7 @@ public static int GetAsciiBytes(string toAdd, int nameOffset, byte[] buffer, int i = Math.Min(bytes.Length, length); Array.Copy(bytes, 0, buffer, bufferOffset, i); } + // If length is beyond the toAdd string length (which is OK by the prev loop condition), eg if a field has fixed length and the string is shorter, make sure all of the extra chars are written as NULLs, so that the reader func would ignore them and get back the original string for (; i < length; ++i) buffer[bufferOffset + i] = 0; @@ -1155,14 +1159,14 @@ public static int GetOctalBytes(long value, byte[] buffer, int offset, int lengt { for (long v = value; (localIndex >= 0) && (v > 0); --localIndex) { - buffer[offset + localIndex] = (byte)((byte)'0' + (byte)(v & 7)); + buffer[offset + localIndex] = (byte) ((byte) '0' + (byte) (v & 7)); v >>= 3; } } for (; localIndex >= 0; --localIndex) { - buffer[offset + localIndex] = (byte)'0'; + buffer[offset + localIndex] = (byte) '0'; } return offset + length; @@ -1179,16 +1183,19 @@ public static int GetOctalBytes(long value, byte[] buffer, int offset, int lengt private static int GetBinaryOrOctalBytes(long value, byte[] buffer, int offset, int length) { if (value > 0x1FFFFFFFF) - { // Octal 77777777777 (11 digits) - // Put value as binary, right-justified into the buffer. Set high order bit of left-most byte. + { + // Octal 77777777777 (11 digits) + // Put value as binary, right-justified into the buffer. Set high order bit of left-most byte. for (int pos = length - 1; pos > 0; pos--) { - buffer[offset + pos] = (byte)value; + buffer[offset + pos] = (byte) value; value = value >> 8; } + buffer[offset] = 0x80; return offset + length; } + return GetOctalBytes(value, buffer, offset, length); } @@ -1222,6 +1229,7 @@ private static int ComputeCheckSum(byte[] buffer) { sum += buffer[i]; } + return sum; } @@ -1240,19 +1248,20 @@ private static int MakeCheckSum(byte[] buffer) for (int i = 0; i < CHKSUMLEN; ++i) { - sum += (byte)' '; + sum += (byte) ' '; } for (int i = CHKSUMOFS + CHKSUMLEN; i < buffer.Length; ++i) { sum += buffer[i]; } + return sum; } private static int GetCTime(DateTime dateTime) { - return unchecked((int)((dateTime.Ticks - dateTime1970.Ticks) / timeConversionFactor)); + return unchecked((int) ((dateTime.Ticks - dateTime1970.Ticks) / timeConversionFactor)); } private static DateTime GetDateTimeFromCTime(long ticks) @@ -1267,6 +1276,7 @@ private static DateTime GetDateTimeFromCTime(long ticks) { result = dateTime1970; } + return result; } @@ -1294,16 +1304,16 @@ private static DateTime GetDateTimeFromCTime(long ticks) #region Class Fields // Values used during recursive operations. - static internal int userIdAsSet; + internal static int userIdAsSet; - static internal int groupIdAsSet; - static internal string userNameAsSet; - static internal string groupNameAsSet = "None"; + internal static int groupIdAsSet; + internal static string userNameAsSet; + internal static string groupNameAsSet = "None"; - static internal int defaultUserId; - static internal int defaultGroupId; - static internal string defaultGroupName = "None"; - static internal string defaultUser; + internal static int defaultUserId; + internal static int defaultGroupId; + internal static string defaultGroupName = "None"; + internal static string defaultUser; #endregion Class Fields } diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarInputStream.cs b/src/ICSharpCode.SharpZipLib/Tar/TarInputStream.cs index f1a3622de..2cd646ae9 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarInputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarInputStream.cs @@ -1,6 +1,10 @@ using System; +using System.Buffers; using System.IO; using System.Text; +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Core; namespace ICSharpCode.SharpZipLib.Tar { @@ -23,6 +27,7 @@ public TarInputStream(Stream inputStream) : this(inputStream, TarBuffer.DefaultBlockFactor, null) { } + /// /// Construct a TarInputStream with default block factor /// @@ -79,10 +84,7 @@ public bool IsStreamOwner /// public override bool CanRead { - get - { - return inputStream.CanRead; - } + get { return inputStream.CanRead; } } /// @@ -91,10 +93,7 @@ public override bool CanRead /// public override bool CanSeek { - get - { - return false; - } + get { return false; } } /// @@ -103,10 +102,7 @@ public override bool CanSeek /// public override bool CanWrite { - get - { - return false; - } + get { return false; } } /// @@ -114,10 +110,7 @@ public override bool CanWrite /// public override long Length { - get - { - return inputStream.Length; - } + get { return inputStream.Length; } } /// @@ -127,14 +120,8 @@ public override long Length /// Any attempt to set position public override long Position { - get - { - return inputStream.Position; - } - set - { - throw new NotSupportedException("TarInputStream Seek not supported"); - } + get { return inputStream.Position; } + set { throw new NotSupportedException("TarInputStream Seek not supported"); } } /// @@ -145,6 +132,15 @@ public override void Flush() inputStream.Flush(); } + /// + /// Flushes the baseInputStream + /// + /// + public override async Task FlushAsync(CancellationToken cancellationToken) + { + await inputStream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + /// /// Set the streams position. This operation is not supported and will throw a NotSupportedException /// @@ -198,16 +194,65 @@ public override void WriteByte(byte value) /// A byte cast to an int; -1 if the at the end of the stream. public override int ReadByte() { - byte[] oneByteBuffer = new byte[1]; - int num = Read(oneByteBuffer, 0, 1); + var oneByteBuffer = ArrayPool.Shared.Rent(1); + var num = Read(oneByteBuffer, 0, 1); if (num <= 0) { // return -1 to indicate that no byte was read. return -1; } - return oneByteBuffer[0]; + + var result = oneByteBuffer[0]; + ArrayPool.Shared.Return(oneByteBuffer); + return result; + } + + + /// + /// Reads bytes from the current tar archive entry. + /// + /// This method is aware of the boundaries of the current + /// entry in the archive and will deal with them appropriately + /// + /// + /// The buffer into which to place bytes read. + /// + /// + /// The offset at which to place bytes read. + /// + /// + /// The number of bytes to read. + /// + /// + /// + /// The number of bytes read, or 0 at end of stream/EOF. + /// + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + return ReadAsync(buffer.AsMemory().Slice(offset, count), cancellationToken, true).AsTask(); } +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + /// + /// Reads bytes from the current tar archive entry. + /// + /// This method is aware of the boundaries of the current + /// entry in the archive and will deal with them appropriately + /// + /// + /// The buffer into which to place bytes read. + /// + /// + /// + /// The number of bytes read, or 0 at end of stream/EOF. + /// + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = + new CancellationToken()) + { + return ReadAsync(buffer, cancellationToken, true); + } +#endif + /// /// Reads bytes from the current tar archive entry. /// @@ -233,6 +278,13 @@ public override int Read(byte[] buffer, int offset, int count) throw new ArgumentNullException(nameof(buffer)); } + return ReadAsync(buffer.AsMemory().Slice(offset, count), CancellationToken.None, false).GetAwaiter() + .GetResult(); + } + + private async ValueTask ReadAsync(Memory buffer, CancellationToken ct, bool isAsync) + { + int offset = 0; int totalRead = 0; if (entryOffset >= entrySize) @@ -240,7 +292,7 @@ public override int Read(byte[] buffer, int offset, int count) return 0; } - long numToRead = count; + long numToRead = buffer.Length; if ((numToRead + entryOffset) > entrySize) { @@ -249,19 +301,22 @@ public override int Read(byte[] buffer, int offset, int count) if (readBuffer != null) { - int sz = (numToRead > readBuffer.Length) ? readBuffer.Length : (int)numToRead; + int sz = (numToRead > readBuffer.Memory.Length) ? readBuffer.Memory.Length : (int)numToRead; - Array.Copy(readBuffer, 0, buffer, offset, sz); + readBuffer.Memory.Slice(0, sz).CopyTo(buffer.Slice(offset, sz)); - if (sz >= readBuffer.Length) + if (sz >= readBuffer.Memory.Length) { + readBuffer.Dispose(); readBuffer = null; } else { - int newLen = readBuffer.Length - sz; - byte[] newBuf = new byte[newLen]; - Array.Copy(readBuffer, sz, newBuf, 0, newLen); + int newLen = readBuffer.Memory.Length - sz; + var newBuf = ExactMemoryPool.Shared.Rent(newLen); + readBuffer.Memory.Slice(sz, newLen).CopyTo(newBuf.Memory); + readBuffer.Dispose(); + readBuffer = newBuf; } @@ -270,28 +325,27 @@ public override int Read(byte[] buffer, int offset, int count) offset += sz; } + var recLen = TarBuffer.BlockSize; + var recBuf = ArrayPool.Shared.Rent(recLen); + while (numToRead > 0) { - byte[] rec = tarBuffer.ReadBlock(); - if (rec == null) - { - // Unexpected EOF! - throw new TarException("unexpected EOF with " + numToRead + " bytes unread"); - } + await tarBuffer.ReadBlockIntAsync(recBuf, ct, isAsync).ConfigureAwait(false); var sz = (int)numToRead; - int recLen = rec.Length; if (recLen > sz) { - Array.Copy(rec, 0, buffer, offset, sz); - readBuffer = new byte[recLen - sz]; - Array.Copy(rec, sz, readBuffer, 0, recLen - sz); + recBuf.AsSpan().Slice(0, sz).CopyTo(buffer.Slice(offset, sz).Span); + readBuffer?.Dispose(); + + readBuffer = ExactMemoryPool.Shared.Rent(recLen - sz); + recBuf.AsSpan().Slice(sz, recLen - sz).CopyTo(readBuffer.Memory.Span); } else { sz = recLen; - Array.Copy(rec, 0, buffer, offset, recLen); + recBuf.AsSpan().CopyTo(buffer.Slice(offset, recLen).Span); } totalRead += sz; @@ -299,6 +353,8 @@ public override int Read(byte[] buffer, int offset, int count) offset += sz; } + ArrayPool.Shared.Return(recBuf); + entryOffset += totalRead; return totalRead; @@ -316,6 +372,17 @@ protected override void Dispose(bool disposing) } } +#if NETSTANDARD2_1_OR_GREATER || NETCOREAPP3_1_OR_GREATER + /// + /// Closes this stream. Calls the TarBuffer's close() method. + /// The underlying stream is closed by the TarBuffer. + /// + public override async ValueTask DisposeAsync() + { + await tarBuffer.CloseAsync(CancellationToken.None).ConfigureAwait(false); + } +#endif + #endregion Stream Overrides /// @@ -359,10 +426,7 @@ public int GetRecordSize() /// public long Available { - get - { - return entrySize - entryOffset; - } + get { return entrySize - entryOffset; } } /// @@ -374,25 +438,42 @@ public long Available /// /// The number of bytes to skip. /// - public void Skip(long skipCount) + /// + private Task SkipAsync(long skipCount, CancellationToken ct) => SkipAsync(skipCount, ct, true).AsTask(); + + /// + /// Skip bytes in the input buffer. This skips bytes in the + /// current entry's data, not the entire archive, and will + /// stop at the end of the current entry's data if the number + /// to skip extends beyond that point. + /// + /// + /// The number of bytes to skip. + /// + private void Skip(long skipCount) => + SkipAsync(skipCount, CancellationToken.None, false).GetAwaiter().GetResult(); + + private async ValueTask SkipAsync(long skipCount, CancellationToken ct, bool isAsync) { // TODO: REVIEW efficiency of TarInputStream.Skip // This is horribly inefficient, but it ensures that we // properly skip over bytes via the TarBuffer... // - byte[] skipBuf = new byte[8 * 1024]; - - for (long num = skipCount; num > 0;) + var length = 8 * 1024; + using (var skipBuf = ExactMemoryPool.Shared.Rent(length)) { - int toRead = num > skipBuf.Length ? skipBuf.Length : (int)num; - int numRead = Read(skipBuf, 0, toRead); - - if (numRead == -1) + for (long num = skipCount; num > 0;) { - break; - } + int toRead = num > length ? length : (int)num; + int numRead = await ReadAsync(skipBuf.Memory.Slice(0, toRead), ct, isAsync).ConfigureAwait(false); - num -= numRead; + if (numRead == -1) + { + break; + } + + num -= numRead; + } } } @@ -402,10 +483,7 @@ public void Skip(long skipCount) /// Currently marking is not supported, the return value is always false. public bool IsMarkSupported { - get - { - return false; - } + get { return false; } } /// @@ -438,7 +516,24 @@ public void Reset() /// /// The next TarEntry in the archive, or null. /// - public TarEntry GetNextEntry() + public Task GetNextEntryAsync(CancellationToken ct) => GetNextEntryAsync(ct, true).AsTask(); + + /// + /// Get the next entry in this tar archive. This will skip + /// over any remaining data in the current entry, if there + /// is one, and place the input stream at the header of the + /// next entry, and read the header and instantiate a new + /// TarEntry from the header bytes and return that entry. + /// If there are no more entries in the archive, null will + /// be returned to indicate that the end of the archive has + /// been reached. + /// + /// + /// The next TarEntry in the archive, or null. + /// + public TarEntry GetNextEntry() => GetNextEntryAsync(CancellationToken.None, false).GetAwaiter().GetResult(); + + private async ValueTask GetNextEntryAsync(CancellationToken ct, bool isAsync) { if (hasHitEOF) { @@ -447,21 +542,18 @@ public TarEntry GetNextEntry() if (currentEntry != null) { - SkipToNextEntry(); + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); } - byte[] headerBuf = tarBuffer.ReadBlock(); + byte[] headerBuf = ArrayPool.Shared.Rent(TarBuffer.BlockSize); + await tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); - if (headerBuf == null) - { - hasHitEOF = true; - } - else if (TarBuffer.IsEndOfArchiveBlock(headerBuf)) + if (TarBuffer.IsEndOfArchiveBlock(headerBuf)) { hasHitEOF = true; // Read the second zero-filled block - tarBuffer.ReadBlock(); + await tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); } else { @@ -471,6 +563,7 @@ public TarEntry GetNextEntry() if (hasHitEOF) { currentEntry = null; + readBuffer?.Dispose(); } else { @@ -482,50 +575,61 @@ public TarEntry GetNextEntry() { throw new TarException("Header checksum is invalid"); } + this.entryOffset = 0; this.entrySize = header.Size; - StringBuilder longName = null; + string longName = null; if (header.TypeFlag == TarHeader.LF_GNU_LONGNAME) { - byte[] nameBuffer = new byte[TarBuffer.BlockSize]; - long numToRead = this.entrySize; - - longName = new StringBuilder(); - - while (numToRead > 0) + using (var nameBuffer = ExactMemoryPool.Shared.Rent(TarBuffer.BlockSize)) { - int numRead = this.Read(nameBuffer, 0, (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead)); + long numToRead = this.entrySize; - if (numRead == -1) + var longNameBuilder = StringBuilderPool.Instance.Rent(); + + while (numToRead > 0) { - throw new InvalidHeaderException("Failed to read long name entry"); + var length = (numToRead > TarBuffer.BlockSize ? TarBuffer.BlockSize : (int)numToRead); + int numRead = await ReadAsync(nameBuffer.Memory.Slice(0, length), ct, isAsync).ConfigureAwait(false); + + if (numRead == -1) + { + throw new InvalidHeaderException("Failed to read long name entry"); + } + + longNameBuilder.Append(TarHeader.ParseName(nameBuffer.Memory.Slice(0, numRead).Span, + encoding)); + numToRead -= numRead; } - longName.Append(TarHeader.ParseName(nameBuffer, 0, numRead, encoding).ToString()); - numToRead -= numRead; - } + longName = longNameBuilder.ToString(); + StringBuilderPool.Instance.Return(longNameBuilder); - SkipToNextEntry(); - headerBuf = this.tarBuffer.ReadBlock(); + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); + await this.tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); + } } else if (header.TypeFlag == TarHeader.LF_GHDR) - { // POSIX global extended header - // Ignore things we dont understand completely for now - SkipToNextEntry(); - headerBuf = this.tarBuffer.ReadBlock(); + { + // POSIX global extended header + // Ignore things we dont understand completely for now + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); + await this.tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); } else if (header.TypeFlag == TarHeader.LF_XHDR) - { // POSIX extended header - byte[] nameBuffer = new byte[TarBuffer.BlockSize]; + { + // POSIX extended header + byte[] nameBuffer = ArrayPool.Shared.Rent(TarBuffer.BlockSize); long numToRead = this.entrySize; var xhr = new TarExtendedHeaderReader(); while (numToRead > 0) { - int numRead = this.Read(nameBuffer, 0, (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead)); + var length = (numToRead > nameBuffer.Length ? nameBuffer.Length : (int)numToRead); + int numRead = await ReadAsync(nameBuffer.AsMemory().Slice(0, length), ct, isAsync).ConfigureAwait(false); if (numRead == -1) { @@ -536,42 +640,47 @@ public TarEntry GetNextEntry() numToRead -= numRead; } + ArrayPool.Shared.Return(nameBuffer); + if (xhr.Headers.TryGetValue("path", out string name)) { - longName = new StringBuilder(name); + longName = name; } - SkipToNextEntry(); - headerBuf = this.tarBuffer.ReadBlock(); + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); + await this.tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); } else if (header.TypeFlag == TarHeader.LF_GNU_VOLHDR) { // TODO: could show volume name when verbose - SkipToNextEntry(); - headerBuf = this.tarBuffer.ReadBlock(); + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); + await this.tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); } else if (header.TypeFlag != TarHeader.LF_NORMAL && - header.TypeFlag != TarHeader.LF_OLDNORM && - header.TypeFlag != TarHeader.LF_LINK && - header.TypeFlag != TarHeader.LF_SYMLINK && - header.TypeFlag != TarHeader.LF_DIR) + header.TypeFlag != TarHeader.LF_OLDNORM && + header.TypeFlag != TarHeader.LF_LINK && + header.TypeFlag != TarHeader.LF_SYMLINK && + header.TypeFlag != TarHeader.LF_DIR) { // Ignore things we dont understand completely for now - SkipToNextEntry(); - headerBuf = tarBuffer.ReadBlock(); + await SkipToNextEntryAsync(ct, isAsync).ConfigureAwait(false); + await tarBuffer.ReadBlockIntAsync(headerBuf, ct, isAsync).ConfigureAwait(false); } if (entryFactory == null) { currentEntry = new TarEntry(headerBuf, encoding); + readBuffer?.Dispose(); + if (longName != null) { - currentEntry.Name = longName.ToString(); + currentEntry.Name = longName; } } else { currentEntry = entryFactory.CreateEntry(headerBuf); + readBuffer?.Dispose(); } // Magic was checked here for 'ustar' but there are multiple valid possibilities @@ -587,11 +696,16 @@ public TarEntry GetNextEntry() entrySize = 0; entryOffset = 0; currentEntry = null; + readBuffer?.Dispose(); + string errorText = string.Format("Bad header in record {0} block {1} {2}", tarBuffer.CurrentRecord, tarBuffer.CurrentBlock, ex.Message); throw new InvalidHeaderException(errorText); } } + + ArrayPool.Shared.Return(headerBuf); + return currentEntry; } @@ -602,30 +716,55 @@ public TarEntry GetNextEntry() /// /// The OutputStream into which to write the entry's data. /// - public void CopyEntryContents(Stream outputStream) + /// + public Task CopyEntryContentsAsync(Stream outputStream, CancellationToken ct) => + CopyEntryContentsAsync(outputStream, ct, true).AsTask(); + + /// + /// Copies the contents of the current tar archive entry directly into + /// an output stream. + /// + /// + /// The OutputStream into which to write the entry's data. + /// + public void CopyEntryContents(Stream outputStream) => + CopyEntryContentsAsync(outputStream, CancellationToken.None, false).GetAwaiter().GetResult(); + + private async ValueTask CopyEntryContentsAsync(Stream outputStream, CancellationToken ct, bool isAsync) { - byte[] tempBuffer = new byte[32 * 1024]; + byte[] tempBuffer = ArrayPool.Shared.Rent(32 * 1024); while (true) { - int numRead = Read(tempBuffer, 0, tempBuffer.Length); + int numRead = await ReadAsync(tempBuffer, ct, isAsync).ConfigureAwait(false); if (numRead <= 0) { break; } - outputStream.Write(tempBuffer, 0, numRead); + + if (isAsync) + { + await outputStream.WriteAsync(tempBuffer, 0, numRead, ct).ConfigureAwait(false); + } + else + { + outputStream.Write(tempBuffer, 0, numRead); + } } + + ArrayPool.Shared.Return(tempBuffer); } - private void SkipToNextEntry() + private async ValueTask SkipToNextEntryAsync(CancellationToken ct, bool isAsync) { long numToSkip = entrySize - entryOffset; if (numToSkip > 0) { - Skip(numToSkip); + await SkipAsync(numToSkip, ct, isAsync).ConfigureAwait(false); } + readBuffer?.Dispose(); readBuffer = null; } @@ -676,6 +815,7 @@ public interface IEntryFactory public class EntryFactoryAdapter : IEntryFactory { Encoding nameEncoding; + /// /// Construct standard entry factory class with ASCII name encoding /// @@ -683,6 +823,7 @@ public class EntryFactoryAdapter : IEntryFactory public EntryFactoryAdapter() { } + /// /// Construct standard entry factory with name encoding /// @@ -691,6 +832,7 @@ public EntryFactoryAdapter(Encoding nameEncoding) { this.nameEncoding = nameEncoding; } + /// /// Create a based on named /// @@ -742,7 +884,7 @@ public TarEntry CreateEntry(byte[] headerBuffer) /// /// Buffer used with calls to Read() /// - protected byte[] readBuffer; + protected IMemoryOwner readBuffer; /// /// Working buffer diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarOutputStream.cs b/src/ICSharpCode.SharpZipLib/Tar/TarOutputStream.cs index 7c52e6c7c..be4f6cc79 100644 --- a/src/ICSharpCode.SharpZipLib/Tar/TarOutputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Tar/TarOutputStream.cs @@ -1,6 +1,9 @@ using System; +using System.Buffers; using System.IO; using System.Text; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tar { @@ -50,8 +53,8 @@ public TarOutputStream(Stream outputStream, int blockFactor) this.outputStream = outputStream; buffer = TarBuffer.CreateOutputTarBuffer(outputStream, blockFactor); - assemblyBuffer = new byte[TarBuffer.BlockSize]; - blockBuffer = new byte[TarBuffer.BlockSize]; + assemblyBuffer = ArrayPool.Shared.Rent(TarBuffer.BlockSize); + blockBuffer = ArrayPool.Shared.Rent(TarBuffer.BlockSize); } /// @@ -70,8 +73,8 @@ public TarOutputStream(Stream outputStream, int blockFactor, Encoding nameEncodi this.outputStream = outputStream; buffer = TarBuffer.CreateOutputTarBuffer(outputStream, blockFactor); - assemblyBuffer = new byte[TarBuffer.BlockSize]; - blockBuffer = new byte[TarBuffer.BlockSize]; + assemblyBuffer = ArrayPool.Shared.Rent(TarBuffer.BlockSize); + blockBuffer = ArrayPool.Shared.Rent(TarBuffer.BlockSize); this.nameEncoding = nameEncoding; } @@ -94,10 +97,7 @@ public bool IsStreamOwner /// public override bool CanRead { - get - { - return outputStream.CanRead; - } + get { return outputStream.CanRead; } } /// @@ -105,10 +105,7 @@ public override bool CanRead /// public override bool CanSeek { - get - { - return outputStream.CanSeek; - } + get { return outputStream.CanSeek; } } /// @@ -116,10 +113,7 @@ public override bool CanSeek /// public override bool CanWrite { - get - { - return outputStream.CanWrite; - } + get { return outputStream.CanWrite; } } /// @@ -127,10 +121,7 @@ public override bool CanWrite /// public override long Length { - get - { - return outputStream.Length; - } + get { return outputStream.Length; } } /// @@ -138,14 +129,8 @@ public override long Length /// public override long Position { - get - { - return outputStream.Position; - } - set - { - outputStream.Position = value; - } + get { return outputStream.Position; } + set { outputStream.Position = value; } } /// @@ -193,6 +178,23 @@ public override int Read(byte[] buffer, int offset, int count) return outputStream.Read(buffer, offset, count); } + /// + /// read bytes from the current stream and advance the position within the + /// stream by the number of bytes read. + /// + /// The buffer to store read bytes in. + /// The index into the buffer to being storing bytes at. + /// The desired number of bytes to read. + /// + /// The total number of bytes read, or zero if at the end of the stream. + /// The number of bytes may be less than the count + /// requested if data is not available. + public override async Task ReadAsync(byte[] buffer, int offset, int count, + CancellationToken cancellationToken) + { + return await outputStream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false); + } + /// /// All buffered data is written to destination /// @@ -201,17 +203,34 @@ public override void Flush() outputStream.Flush(); } + /// + /// All buffered data is written to destination + /// + public override async Task FlushAsync(CancellationToken cancellationToken) + { + await outputStream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + /// /// Ends the TAR archive without closing the underlying OutputStream. /// The result is that the EOF block of nulls is written. /// - public void Finish() + public void Finish() => FinishAsync(CancellationToken.None, false).GetAwaiter().GetResult(); + + /// + /// Ends the TAR archive without closing the underlying OutputStream. + /// The result is that the EOF block of nulls is written. + /// + public Task FinishAsync(CancellationToken cancellationToken) => FinishAsync(cancellationToken, true); + + private async Task FinishAsync(CancellationToken cancellationToken, bool isAsync) { if (IsEntryOpen) { - CloseEntry(); + await CloseEntryAsync(cancellationToken, isAsync).ConfigureAwait(false); } - WriteEofBlock(); + + await WriteEofBlockAsync(cancellationToken, isAsync).ConfigureAwait(false); } /// @@ -226,6 +245,9 @@ protected override void Dispose(bool disposing) isClosed = true; Finish(); buffer.Close(); + + ArrayPool.Shared.Return(assemblyBuffer); + ArrayPool.Shared.Return(blockBuffer); } } @@ -269,44 +291,70 @@ private bool IsEntryOpen /// /// The TarEntry to be written to the archive. /// - public void PutNextEntry(TarEntry entry) + /// + public Task PutNextEntryAsync(TarEntry entry, CancellationToken cancellationToken) => + PutNextEntryAsync(entry, cancellationToken, true); + + /// + /// Put an entry on the output stream. This writes the entry's + /// header and positions the output stream for writing + /// the contents of the entry. Once this method is called, the + /// stream is ready for calls to write() to write the entry's + /// contents. Once the contents are written, closeEntry() + /// MUST be called to ensure that all buffered data + /// is completely written to the output stream. + /// + /// + /// The TarEntry to be written to the archive. + /// + public void PutNextEntry(TarEntry entry) => + PutNextEntryAsync(entry, CancellationToken.None, false).GetAwaiter().GetResult(); + + private async Task PutNextEntryAsync(TarEntry entry, CancellationToken cancellationToken, bool isAsync) { if (entry == null) { throw new ArgumentNullException(nameof(entry)); } - var namelen = nameEncoding != null ? nameEncoding.GetByteCount(entry.TarHeader.Name) : entry.TarHeader.Name.Length; + var namelen = nameEncoding != null + ? nameEncoding.GetByteCount(entry.TarHeader.Name) + : entry.TarHeader.Name.Length; if (namelen > TarHeader.NAMELEN) { var longHeader = new TarHeader(); longHeader.TypeFlag = TarHeader.LF_GNU_LONGNAME; longHeader.Name = longHeader.Name + "././@LongLink"; - longHeader.Mode = 420;//644 by default + longHeader.Mode = 420; //644 by default longHeader.UserId = entry.UserId; longHeader.GroupId = entry.GroupId; longHeader.GroupName = entry.GroupName; longHeader.UserName = entry.UserName; longHeader.LinkName = ""; - longHeader.Size = namelen + 1; // Plus one to avoid dropping last char + longHeader.Size = namelen + 1; // Plus one to avoid dropping last char longHeader.WriteHeader(blockBuffer, nameEncoding); - buffer.WriteBlock(blockBuffer); // Add special long filename header block + // Add special long filename header block + await buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); int nameCharIndex = 0; - while (nameCharIndex < namelen + 1 /* we've allocated one for the null char, now we must make sure it gets written out */) + while + (nameCharIndex < + namelen + 1 /* we've allocated one for the null char, now we must make sure it gets written out */) { Array.Clear(blockBuffer, 0, blockBuffer.Length); - TarHeader.GetAsciiBytes(entry.TarHeader.Name, nameCharIndex, this.blockBuffer, 0, TarBuffer.BlockSize, nameEncoding); // This func handles OK the extra char out of string length + TarHeader.GetAsciiBytes(entry.TarHeader.Name, nameCharIndex, this.blockBuffer, 0, + TarBuffer.BlockSize, nameEncoding); // This func handles OK the extra char out of string length nameCharIndex += TarBuffer.BlockSize; - buffer.WriteBlock(blockBuffer); + + await buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); } } entry.WriteEntryHeader(blockBuffer, nameEncoding); - buffer.WriteBlock(blockBuffer); + await buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); currBytes = 0; @@ -322,13 +370,26 @@ public void PutNextEntry(TarEntry entry) /// to the output stream before this entry is closed and the /// next entry written. /// - public void CloseEntry() + public Task CloseEntryAsync(CancellationToken cancellationToken) => CloseEntryAsync(cancellationToken, true); + + /// + /// Close an entry. This method MUST be called for all file + /// entries that contain data. The reason is that we must + /// buffer data written to the stream in order to satisfy + /// the buffer's block based writes. Thus, there may be + /// data fragments still being assembled that must be written + /// to the output stream before this entry is closed and the + /// next entry written. + /// + public void CloseEntry() => CloseEntryAsync(CancellationToken.None, false).GetAwaiter().GetResult(); + + private async Task CloseEntryAsync(CancellationToken cancellationToken, bool isAsync) { if (assemblyBufferLength > 0) { Array.Clear(assemblyBuffer, assemblyBufferLength, assemblyBuffer.Length - assemblyBufferLength); - buffer.WriteBlock(assemblyBuffer); + await buffer.WriteBlockAsync(assemblyBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); currBytes += assemblyBufferLength; assemblyBufferLength = 0; @@ -352,7 +413,10 @@ public void CloseEntry() /// public override void WriteByte(byte value) { - Write(new byte[] { value }, 0, 1); + var oneByteArray = ArrayPool.Shared.Rent(1); + oneByteArray[0] = value; + Write(oneByteArray, 0, 1); + ArrayPool.Shared.Return(oneByteArray); } /// @@ -373,7 +437,32 @@ public override void WriteByte(byte value) /// /// The number of bytes to write. /// - public override void Write(byte[] buffer, int offset, int count) + public override void Write(byte[] buffer, int offset, int count) => + WriteAsync(buffer, offset, count, CancellationToken.None, false).GetAwaiter().GetResult(); + + /// + /// Writes bytes to the current tar archive entry. This method + /// is aware of the current entry and will throw an exception if + /// you attempt to write bytes past the length specified for the + /// current entry. The method is also (painfully) aware of the + /// record buffering required by TarBuffer, and manages buffers + /// that are not a multiple of recordsize in length, including + /// assembling records from small buffers. + /// + /// + /// The buffer to write to the archive. + /// + /// + /// The offset in the buffer from which to get bytes. + /// + /// + /// The number of bytes to write. + /// + /// + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => + WriteAsync(buffer, offset, count, cancellationToken, true); + + private async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken, bool isAsync) { if (buffer == null) { @@ -418,7 +507,7 @@ public override void Write(byte[] buffer, int offset, int count) Array.Copy(assemblyBuffer, 0, blockBuffer, 0, assemblyBufferLength); Array.Copy(buffer, offset, blockBuffer, assemblyBufferLength, aLen); - this.buffer.WriteBlock(blockBuffer); + await this.buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); currBytes += blockBuffer.Length; @@ -450,7 +539,7 @@ public override void Write(byte[] buffer, int offset, int count) break; } - this.buffer.WriteBlock(buffer, offset); + await this.buffer.WriteBlockAsync(buffer, offset, cancellationToken, isAsync).ConfigureAwait(false); int bufferLength = blockBuffer.Length; currBytes += bufferLength; @@ -463,11 +552,11 @@ public override void Write(byte[] buffer, int offset, int count) /// Write an EOF (end of archive) block to the tar archive. /// The end of the archive is indicated by two blocks consisting entirely of zero bytes. /// - private void WriteEofBlock() + private async Task WriteEofBlockAsync(CancellationToken cancellationToken, bool isAsync) { Array.Clear(blockBuffer, 0, blockBuffer.Length); - buffer.WriteBlock(blockBuffer); - buffer.WriteBlock(blockBuffer); + await buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); + await buffer.WriteBlockAsync(blockBuffer, 0, cancellationToken, isAsync).ConfigureAwait(false); } #region Instance Fields diff --git a/src/ICSharpCode.SharpZipLib/Tar/TarStringExtension.cs b/src/ICSharpCode.SharpZipLib/Tar/TarStringExtension.cs new file mode 100644 index 000000000..433c6a424 --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Tar/TarStringExtension.cs @@ -0,0 +1,13 @@ +using System.IO; +using ICSharpCode.SharpZipLib.Core; + +namespace ICSharpCode.SharpZipLib.Tar +{ + internal static class TarStringExtension + { + public static string ToTarArchivePath(this string s) + { + return PathUtils.DropPathRoot(s).Replace(Path.DirectorySeparatorChar, '/'); + } + } +} diff --git a/src/ICSharpCode.SharpZipLib/Zip/Compression/Inflater.cs b/src/ICSharpCode.SharpZipLib/Zip/Compression/Inflater.cs index 439b4c601..5bf2a985e 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/Compression/Inflater.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/Compression/Inflater.cs @@ -137,7 +137,7 @@ public class Inflater /// True means, that the inflated stream doesn't contain a Zlib header or /// footer. /// - private bool noHeader; + internal bool noHeader; private readonly StreamManipulator input; private OutputWindow outputWindow; diff --git a/src/ICSharpCode.SharpZipLib/Zip/Compression/PooledInflater.cs b/src/ICSharpCode.SharpZipLib/Zip/Compression/PooledInflater.cs new file mode 100644 index 000000000..0828de3ef --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Zip/Compression/PooledInflater.cs @@ -0,0 +1,14 @@ +using ICSharpCode.SharpZipLib.Core; + +namespace ICSharpCode.SharpZipLib.Zip.Compression +{ + /// + /// A marker type for pooled version of an inflator that we can return back to . + /// + internal sealed class PooledInflater : Inflater + { + public PooledInflater(bool noHeader) : base(noHeader) + { + } + } +} diff --git a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs index b6d4025d1..a7e6807ca 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/DeflaterOutputStream.cs @@ -2,6 +2,9 @@ using System; using System.IO; using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams { @@ -105,10 +108,7 @@ public virtual void Finish() break; } - if (cryptoTransform_ != null) - { - EncryptBlock(buffer_, 0, len); - } + EncryptBlock(buffer_, 0, len); baseOutputStream_.Write(buffer_, 0, len); } @@ -131,6 +131,47 @@ public virtual void Finish() } } + /// + /// Finishes the stream by calling finish() on the deflater. + /// + /// The that can be used to cancel the operation. + /// + /// Not all input is deflated + /// + public virtual async Task FinishAsync(CancellationToken ct) + { + deflater_.Finish(); + while (!deflater_.IsFinished) + { + int len = deflater_.Deflate(buffer_, 0, buffer_.Length); + if (len <= 0) + { + break; + } + + EncryptBlock(buffer_, 0, len); + + await baseOutputStream_.WriteAsync(buffer_, 0, len, ct).ConfigureAwait(false); + } + + if (!deflater_.IsFinished) + { + throw new SharpZipBaseException("Can't deflate all input?"); + } + + await baseOutputStream_.FlushAsync(ct).ConfigureAwait(false); + + if (cryptoTransform_ != null) + { + if (cryptoTransform_ is ZipAESTransform) + { + AESAuthCode = ((ZipAESTransform)cryptoTransform_).GetAuthCode(); + } + cryptoTransform_.Dispose(); + cryptoTransform_ = null; + } + } + /// /// Gets or sets a flag indicating ownership of underlying stream. /// When the flag is true will close the underlying stream also. @@ -163,6 +204,14 @@ public bool CanPatchEntries /// protected byte[] AESAuthCode; + /// + public Encoding ZipCryptoEncoding { + get => _stringCodec.ZipCryptoEncoding; + set { + _stringCodec = _stringCodec.WithZipCryptoEncoding(value); + } + } + /// /// Encrypt a block of data /// @@ -177,6 +226,7 @@ public bool CanPatchEntries /// protected void EncryptBlock(byte[] buffer, int offset, int length) { + if(cryptoTransform_ is null) return; cryptoTransform_.TransformBlock(buffer, 0, length, buffer, 0); } @@ -190,11 +240,9 @@ protected void EncryptBlock(byte[] buffer, int offset, int length) /// are processed. /// protected void Deflate() - { - Deflate(false); - } + => DeflateSyncOrAsync(false, null).GetAwaiter().GetResult(); - private void Deflate(bool flushing) + private async Task DeflateSyncOrAsync(bool flushing, CancellationToken? ct) { while (flushing || !deflater_.IsNeedingInput) { @@ -204,12 +252,17 @@ private void Deflate(bool flushing) { break; } - if (cryptoTransform_ != null) + + EncryptBlock(buffer_, 0, deflateCount); + + if (ct.HasValue) + { + await baseOutputStream_.WriteAsync(buffer_, 0, deflateCount, ct.Value).ConfigureAwait(false); + } + else { - EncryptBlock(buffer_, 0, deflateCount); + baseOutputStream_.Write(buffer_, 0, deflateCount); } - - baseOutputStream_.Write(buffer_, 0, deflateCount); } if (!deflater_.IsNeedingInput) @@ -335,10 +388,23 @@ public override int Read(byte[] buffer, int offset, int count) public override void Flush() { deflater_.Flush(); - Deflate(true); + DeflateSyncOrAsync(true, null).GetAwaiter().GetResult(); baseOutputStream_.Flush(); } + /// + /// Asynchronously clears all buffers for this stream, causes any buffered data to be written to the underlying device, and monitors cancellation requests. + /// + /// + /// The token to monitor for cancellation requests. The default value is . + /// + public override async Task FlushAsync(CancellationToken cancellationToken) + { + deflater_.Flush(); + await DeflateSyncOrAsync(true, cancellationToken).ConfigureAwait(false); + await baseOutputStream_.FlushAsync(cancellationToken).ConfigureAwait(false); + } + /// /// Calls and closes the underlying /// stream when is true. @@ -369,6 +435,38 @@ protected override void Dispose(bool disposing) } } +#if NETSTANDARD2_1 || NETCOREAPP3_0_OR_GREATER + /// + /// Calls and closes the underlying + /// stream when is true. + /// + public override async ValueTask DisposeAsync() + { + if (!isClosed_) + { + isClosed_ = true; + + try + { + await FinishAsync(CancellationToken.None).ConfigureAwait(false); + if (cryptoTransform_ != null) + { + GetAuthCodeIfAES(); + cryptoTransform_.Dispose(); + cryptoTransform_ = null; + } + } + finally + { + if (IsStreamOwner) + { + await baseOutputStream_.DisposeAsync().ConfigureAwait(false); + } + } + } + } +#endif + /// /// Get the Auth code for AES encrypted entries /// @@ -411,6 +509,27 @@ public override void Write(byte[] buffer, int offset, int count) Deflate(); } + /// + /// Asynchronously writes a sequence of bytes to the current stream, advances the current position within this stream by the number of bytes written, and monitors cancellation requests. + /// + /// + /// The byte array + /// + /// + /// The offset into the byte array where to start. + /// + /// + /// The number of bytes to write. + /// + /// + /// The token to monitor for cancellation requests. The default value is . + /// + public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken ct) + { + deflater_.SetInput(buffer, offset, count); + await DeflateSyncOrAsync(false, ct).ConfigureAwait(false); + } + #endregion Stream Overrides #region Instance Fields @@ -433,6 +552,9 @@ public override void Write(byte[] buffer, int offset, int count) private bool isClosed_; + /// + protected StringCodec _stringCodec = ZipStrings.GetStringCodec(); + #endregion Instance Fields } } diff --git a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs index 7790474d2..980ffc701 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/Compression/Streams/InflaterInputStream.cs @@ -1,6 +1,7 @@ using System; using System.IO; using System.Security.Cryptography; +using ICSharpCode.SharpZipLib.Core; namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams { @@ -339,7 +340,7 @@ public class InflaterInputStream : Stream /// The InputStream to read bytes from /// public InflaterInputStream(Stream baseInputStream) - : this(baseInputStream, new Inflater(), 4096) + : this(baseInputStream, InflaterPool.Instance.Rent(), 4096) { } @@ -630,6 +631,12 @@ protected override void Dispose(bool disposing) baseInputStream.Dispose(); } } + + if (inf is PooledInflater inflater) + { + InflaterPool.Instance.Return(inflater); + } + inf = null; } /// diff --git a/src/ICSharpCode.SharpZipLib/Zip/FastZip.cs b/src/ICSharpCode.SharpZipLib/Zip/FastZip.cs index 01725f4c3..baa1771cb 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/FastZip.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/FastZip.cs @@ -345,6 +345,29 @@ public Deflater.CompressionLevel CompressionLevel set { compressionLevel_ = value; } } + /// + /// Reflects the opposite of the internal , setting it to false overrides the encoding used for reading and writing zip entries + /// + public bool UseUnicode + { + get => !_stringCodec.ForceZipLegacyEncoding; + set => _stringCodec.ForceZipLegacyEncoding = !value; + } + + /// Gets or sets the code page used for reading/writing zip file entries when unicode is disabled + public int LegacyCodePage + { + get => _stringCodec.CodePage; + set => _stringCodec = StringCodec.FromCodePage(value); + } + + /// + public StringCodec StringCodec + { + get => _stringCodec; + set => _stringCodec = value; + } + #endregion Properties #region Delegates @@ -456,7 +479,7 @@ private void CreateZip(Stream outputStream, string sourceDirectory, bool recurse NameTransform = new ZipNameTransform(sourceDirectory); sourceDirectory_ = sourceDirectory; - using (outputStream_ = new ZipOutputStream(outputStream)) + using (outputStream_ = new ZipOutputStream(outputStream, _stringCodec)) { outputStream_.SetLevel((int)CompressionLevel); outputStream_.IsStreamOwner = !leaveOpen; @@ -556,7 +579,7 @@ public void ExtractZip(Stream inputStream, string targetDirectory, directoryFilter_ = new NameFilter(directoryFilter); restoreDateTimeOnExtract_ = restoreDateTime; - using (zipFile_ = new ZipFile(inputStream, !isStreamOwner)) + using (zipFile_ = new ZipFile(inputStream, !isStreamOwner, _stringCodec)) { if (password_ != null) { @@ -631,6 +654,10 @@ private void ProcessFile(object sender, ScanEventArgs e) using (FileStream stream = File.Open(e.Name, FileMode.Open, FileAccess.Read, FileShare.Read)) { ZipEntry entry = entryFactory_.MakeFileEntry(e.Name); + if (_stringCodec.ForceZipLegacyEncoding) + { + entry.IsUnicodeText = false; + } // Set up AES encryption for the entry if required. ConfigureEntryEncryption(entry); @@ -967,7 +994,7 @@ private static bool NameIsValid(string name) private INameTransform extractNameTransform_; private UseZip64 useZip64_ = UseZip64.Dynamic; private CompressionLevel compressionLevel_ = CompressionLevel.DEFAULT_COMPRESSION; - + private StringCodec _stringCodec = ZipStrings.GetStringCodec(); private string password_; #endregion Instance Fields diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipConstants.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipConstants.cs index eadf33901..b16fdefdf 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipConstants.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipConstants.cs @@ -231,6 +231,21 @@ public enum GeneralBitFlags /// ReservedPkware15 = 0x8000 } + + /// + /// Helpers for + /// + public static class GeneralBitFlagsExtensions + { + /// + /// This is equivalent of in .NET Core, but since the .NET FW + /// version is really slow (due to un-/boxing and reflection) we use this wrapper. + /// + /// + /// + /// + public static bool Includes(this GeneralBitFlags flagData, GeneralBitFlags flag) => (flag & flagData) != 0; + } #endregion Enumerations @@ -471,48 +486,29 @@ public static class ZipConstants public const int ENDSIG = 'P' | ('K' << 8) | (5 << 16) | (6 << 24); #endregion Header Signatures + } + /// + /// GeneralBitFlags helper extensions + /// + public static class GenericBitFlagsExtensions + { /// - /// Default encoding used for string conversion. 0 gives the default system OEM code page. - /// Using the default code page isnt the full solution necessarily - /// there are many variable factors, codepage 850 is often a good choice for - /// European users, however be careful about compatability. - /// - [Obsolete("Use ZipStrings instead")] - public static int DefaultCodePage - { - get => ZipStrings.CodePage; - set => ZipStrings.CodePage = value; - } - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToString instead")] - public static string ConvertToString(byte[] data, int count) - => ZipStrings.ConvertToString(data, count); - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToString instead")] - public static string ConvertToString(byte[] data) - => ZipStrings.ConvertToString(data); - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToStringExt instead")] - public static string ConvertToStringExt(int flags, byte[] data, int count) - => ZipStrings.ConvertToStringExt(flags, data, count); - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToStringExt instead")] - public static string ConvertToStringExt(int flags, byte[] data) - => ZipStrings.ConvertToStringExt(flags, data); - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToArray instead")] - public static byte[] ConvertToArray(string str) - => ZipStrings.ConvertToArray(str); - - /// Deprecated wrapper for - [Obsolete("Use ZipStrings.ConvertToArray instead")] - public static byte[] ConvertToArray(int flags, string str) - => ZipStrings.ConvertToArray(flags, str); + /// Efficiently check if any of the flags are set without enum un-/boxing + /// + /// + /// + /// Returns whether any of flags are set + public static bool HasAny(this GeneralBitFlags target, GeneralBitFlags flags) + => ((int)target & (int)flags) != 0; + + /// + /// Efficiently check if all the flags are set without enum un-/boxing + /// + /// + /// + /// Returns whether the flags are all set + public static bool HasAll(this GeneralBitFlags target, GeneralBitFlags flags) + => ((int)target & (int)flags) == (int)flags; } } diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipEntry.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipEntry.cs index ffeee1883..b0bf15821 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipEntry.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipEntry.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Text; namespace ICSharpCode.SharpZipLib.Zip { @@ -150,7 +151,7 @@ private enum Known : byte /// The name passed is null /// public ZipEntry(string name) - : this(name, 0, ZipConstants.VersionMadeBy, CompressionMethod.Deflated) + : this(name, 0, ZipConstants.VersionMadeBy, CompressionMethod.Deflated, true) { } @@ -171,7 +172,7 @@ public ZipEntry(string name) /// internal ZipEntry(string name, int versionRequiredToExtract) : this(name, versionRequiredToExtract, ZipConstants.VersionMadeBy, - CompressionMethod.Deflated) + CompressionMethod.Deflated, true) { } @@ -182,6 +183,7 @@ internal ZipEntry(string name, int versionRequiredToExtract) /// Version and HostSystem Information /// Minimum required zip feature version required to extract this entry /// Compression method for this entry. + /// Whether the entry uses unicode for name and comment /// /// The name passed is null /// @@ -193,7 +195,7 @@ internal ZipEntry(string name, int versionRequiredToExtract) /// It is not generally useful, use the constructor specifying the name only. /// internal ZipEntry(string name, int versionRequiredToExtract, int madeByInfo, - CompressionMethod method) + CompressionMethod method, bool unicode) { if (name == null) { @@ -216,7 +218,7 @@ internal ZipEntry(string name, int versionRequiredToExtract, int madeByInfo, this.versionToExtract = (ushort)versionRequiredToExtract; this.method = method; - IsUnicodeText = ZipStrings.UseUnicode; + IsUnicodeText = unicode; } /// diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipEntryFactory.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipEntryFactory.cs index 1e40baaff..ccbb26968 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipEntryFactory.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipEntryFactory.cs @@ -68,7 +68,7 @@ public enum TimeSetting public ZipEntryFactory() { nameTransform_ = new ZipNameTransform(); - isUnicodeText_ = ZipStrings.UseUnicode; + isUnicodeText_ = true; } /// @@ -162,7 +162,7 @@ public int SetAttributes } /// - /// Get set a value indicating whether unidoce text should be set on. + /// Get set a value indicating whether unicode text should be set on. /// public bool IsUnicodeText { diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipExtraData.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipExtraData.cs index 4e075dc8d..cc2e74490 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipExtraData.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipExtraData.cs @@ -15,7 +15,7 @@ public interface ITaggedData /// /// Get the ID for this tagged data value. /// - short TagID { get; } + ushort TagID { get; } /// /// Set the contents of this instance from the data passed. @@ -41,7 +41,7 @@ public class RawTaggedData : ITaggedData /// Initialise a new instance. /// /// The tag ID. - public RawTaggedData(short tag) + public RawTaggedData(ushort tag) { _tag = tag; } @@ -51,7 +51,7 @@ public RawTaggedData(short tag) /// /// Get the ID for this tagged data value. /// - public short TagID + public ushort TagID { get { return _tag; } set { _tag = value; } @@ -100,7 +100,7 @@ public byte[] Data /// /// The tag ID for this instance. /// - private short _tag; + private ushort _tag; private byte[] _data; @@ -139,7 +139,7 @@ public enum Flags : byte /// /// Get the ID /// - public short TagID + public ushort TagID { get { return 0x5455; } } @@ -153,16 +153,15 @@ public short TagID public void SetData(byte[] data, int index, int count) { using (MemoryStream ms = new MemoryStream(data, index, count, false)) - using (ZipHelperStream helperStream = new ZipHelperStream(ms)) { // bit 0 if set, modification time is present // bit 1 if set, access time is present // bit 2 if set, creation time is present - _flags = (Flags)helperStream.ReadByte(); + _flags = (Flags)ms.ReadByte(); if (((_flags & Flags.ModificationTime) != 0)) { - int iTime = helperStream.ReadLEInt(); + int iTime = ms.ReadLEInt(); _modificationTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) + new TimeSpan(0, 0, 0, iTime, 0); @@ -173,7 +172,7 @@ public void SetData(byte[] data, int index, int count) if ((_flags & Flags.AccessTime) != 0) { - int iTime = helperStream.ReadLEInt(); + int iTime = ms.ReadLEInt(); _lastAccessTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) + new TimeSpan(0, 0, 0, iTime, 0); @@ -181,7 +180,7 @@ public void SetData(byte[] data, int index, int count) if ((_flags & Flags.CreateTime) != 0) { - int iTime = helperStream.ReadLEInt(); + int iTime = ms.ReadLEInt(); _createTime = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc) + new TimeSpan(0, 0, 0, iTime, 0); @@ -196,27 +195,25 @@ public void SetData(byte[] data, int index, int count) public byte[] GetData() { using (MemoryStream ms = new MemoryStream()) - using (ZipHelperStream helperStream = new ZipHelperStream(ms)) { - helperStream.IsStreamOwner = false; - helperStream.WriteByte((byte)_flags); // Flags + ms.WriteByte((byte)_flags); // Flags if ((_flags & Flags.ModificationTime) != 0) { TimeSpan span = _modificationTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc); var seconds = (int)span.TotalSeconds; - helperStream.WriteLEInt(seconds); + ms.WriteLEInt(seconds); } if ((_flags & Flags.AccessTime) != 0) { TimeSpan span = _lastAccessTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc); var seconds = (int)span.TotalSeconds; - helperStream.WriteLEInt(seconds); + ms.WriteLEInt(seconds); } if ((_flags & Flags.CreateTime) != 0) { TimeSpan span = _createTime - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc); var seconds = (int)span.TotalSeconds; - helperStream.WriteLEInt(seconds); + ms.WriteLEInt(seconds); } return ms.ToArray(); } @@ -328,7 +325,7 @@ public class NTTaggedData : ITaggedData /// /// Get the ID for this tagged data value. /// - public short TagID + public ushort TagID { get { return 10; } } @@ -342,24 +339,23 @@ public short TagID public void SetData(byte[] data, int index, int count) { using (MemoryStream ms = new MemoryStream(data, index, count, false)) - using (ZipHelperStream helperStream = new ZipHelperStream(ms)) { - helperStream.ReadLEInt(); // Reserved - while (helperStream.Position < helperStream.Length) + ms.ReadLEInt(); // Reserved + while (ms.Position < ms.Length) { - int ntfsTag = helperStream.ReadLEShort(); - int ntfsLength = helperStream.ReadLEShort(); + int ntfsTag = ms.ReadLEShort(); + int ntfsLength = ms.ReadLEShort(); if (ntfsTag == 1) { if (ntfsLength >= 24) { - long lastModificationTicks = helperStream.ReadLELong(); + long lastModificationTicks = ms.ReadLELong(); _lastModificationTime = DateTime.FromFileTimeUtc(lastModificationTicks); - long lastAccessTicks = helperStream.ReadLELong(); + long lastAccessTicks = ms.ReadLELong(); _lastAccessTime = DateTime.FromFileTimeUtc(lastAccessTicks); - long createTimeTicks = helperStream.ReadLELong(); + long createTimeTicks = ms.ReadLELong(); _createTime = DateTime.FromFileTimeUtc(createTimeTicks); } break; @@ -367,7 +363,7 @@ public void SetData(byte[] data, int index, int count) else { // An unknown NTFS tag so simply skip it. - helperStream.Seek(ntfsLength, SeekOrigin.Current); + ms.Seek(ntfsLength, SeekOrigin.Current); } } } @@ -380,15 +376,13 @@ public void SetData(byte[] data, int index, int count) public byte[] GetData() { using (MemoryStream ms = new MemoryStream()) - using (ZipHelperStream helperStream = new ZipHelperStream(ms)) - { - helperStream.IsStreamOwner = false; - helperStream.WriteLEInt(0); // Reserved - helperStream.WriteLEShort(1); // Tag - helperStream.WriteLEShort(24); // Length = 3 x 8. - helperStream.WriteLELong(_lastModificationTime.ToFileTimeUtc()); - helperStream.WriteLELong(_lastAccessTime.ToFileTimeUtc()); - helperStream.WriteLELong(_createTime.ToFileTimeUtc()); + { + ms.WriteLEInt(0); // Reserved + ms.WriteLEShort(1); // Tag + ms.WriteLEShort(24); // Length = 3 x 8. + ms.WriteLELong(_lastModificationTime.ToFileTimeUtc()); + ms.WriteLELong(_lastAccessTime.ToFileTimeUtc()); + ms.WriteLELong(_createTime.ToFileTimeUtc()); return ms.ToArray(); } } diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipFile.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipFile.cs index 3bd66ffeb..7fc1c5592 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipFile.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipFile.cs @@ -7,6 +7,7 @@ using System.Collections; using System.Collections.Generic; using System.IO; +using System.Linq; using System.Security.Cryptography; using System.Text; @@ -313,7 +314,7 @@ public enum FileUpdateMode /// } /// /// - public class ZipFile : IEnumerable, IDisposable + public class ZipFile : IEnumerable, IDisposable { #region KeyHandling @@ -367,7 +368,7 @@ public string Password } else { - key = PkzipClassic.GenerateKeys(ZipStrings.ConvertToArray(value)); + key = PkzipClassic.GenerateKeys(ZipCryptoEncoding.GetBytes(value)); } rawPassword_ = value; @@ -397,13 +398,36 @@ private bool HaveKeys /// /// The file doesn't contain a valid zip archive. /// - public ZipFile(string name) + public ZipFile(string name) : + this(name, null) + { + + } + + /// + /// Opens a Zip file with the given name for reading. + /// + /// The name of the file to open. + /// + /// The argument supplied is null. + /// + /// An i/o error occurs + /// + /// + /// The file doesn't contain a valid zip archive. + /// + public ZipFile(string name, StringCodec stringCodec) { name_ = name ?? throw new ArgumentNullException(nameof(name)); baseStream_ = File.Open(name, FileMode.Open, FileAccess.Read, FileShare.Read); isStreamOwner = true; + if (stringCodec != null) + { + _stringCodec = stringCodec; + } + try { ReadEntries(); @@ -510,7 +534,31 @@ public ZipFile(Stream stream) : /// /// The stream argument is null. /// - public ZipFile(Stream stream, bool leaveOpen) + public ZipFile(Stream stream, bool leaveOpen) : + this(stream, leaveOpen, null) + { + + } + + /// + /// Opens a Zip file reading the given . + /// + /// The to read archive data from. + /// true to leave the stream open when the ZipFile is disposed, false to dispose of it + /// + /// + /// An i/o error occurs + /// + /// + /// The stream doesn't contain a valid zip archive.
+ ///
+ /// + /// The stream doesnt support seeking. + /// + /// + /// The stream argument is null. + /// + public ZipFile(Stream stream, bool leaveOpen, StringCodec stringCodec) { if (stream == null) { @@ -525,6 +573,11 @@ public ZipFile(Stream stream, bool leaveOpen) baseStream_ = stream; isStreamOwner = !leaveOpen; + if (stringCodec != null) + { + _stringCodec = stringCodec; + } + if (baseStream_.Length > 0) { try @@ -725,6 +778,27 @@ public ZipEntry this[int index] } } + + /// + public Encoding ZipCryptoEncoding + { + get => _stringCodec.ZipCryptoEncoding; + set => _stringCodec = _stringCodec.WithZipCryptoEncoding(value); + } + + /// + public StringCodec StringCodec + { + set { + _stringCodec = value; + if (!isNewArchive_) + { + // Since the string codec was changed + ReadEntries(); + } + } + } + #endregion Properties #region Input Handling @@ -736,7 +810,31 @@ public ZipEntry this[int index] /// /// The Zip file has been closed. /// - public IEnumerator GetEnumerator() + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Gets an enumerator for the Zip entries in this Zip file. + /// + /// Returns an for this archive. + /// + /// The Zip file has been closed. + /// + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + + /// + /// Gets an enumerator for the Zip entries in this Zip file. + /// + /// Returns an for this archive. + /// + /// The Zip file has been closed. + /// + public ZipEntryEnumerator GetEnumerator() { if (isDisposed_) { @@ -880,7 +978,7 @@ public Stream GetInputStream(long entryIndex) case CompressionMethod.Deflated: // No need to worry about ownership and closing as underlying stream close does nothing. - result = new InflaterInputStream(result, new Inflater(true)); + result = new InflaterInputStream(result, InflaterPool.Instance.Rent(true)); break; case CompressionMethod.BZip2: @@ -1003,10 +1101,8 @@ public bool TestArchive(bool testData, TestStrategy strategy, ZipTestResultHandl if ((this[entryIndex].Flags & (int)GeneralBitFlags.Descriptor) != 0) { - var helper = new ZipHelperStream(baseStream_); var data = new DescriptorData(); - helper.ReadDataDescriptor(this[entryIndex].LocalHeaderRequiresZip64, data); - + ZipFormat.ReadDataDescriptor(baseStream_, this[entryIndex].LocalHeaderRequiresZip64, data); if (checkCRC && this[entryIndex].Crc != data.Crc) { status.AddError(); @@ -1065,6 +1161,7 @@ public bool TestArchive(bool testData, TestStrategy strategy, ZipTestResultHandl [Flags] private enum HeaderTest { + None = 0x0, Extract = 0x01, // Check that this header represents an entry whose data can be extracted Header = 0x02, // Check that this header contents are valid } @@ -1091,13 +1188,12 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) if (signature != ZipConstants.LocalHeaderSignature) { - throw new ZipException(string.Format("Wrong local header signature at 0x{0:x}, expected 0x{1:x8}, actual 0x{2:x8}", - entryAbsOffset, ZipConstants.LocalHeaderSignature, signature)); + throw new ZipException($"Wrong local header signature at 0x{entryAbsOffset:x}, expected 0x{ZipConstants.LocalHeaderSignature:x8}, actual 0x{signature:x8}"); } var extractVersion = (short)(ReadLEUshort() & 0x00ff); - var localFlags = (short)ReadLEUshort(); - var compressionMethod = (short)ReadLEUshort(); + var localFlags = (GeneralBitFlags)ReadLEUshort(); + var compressionMethod = (CompressionMethod)ReadLEUshort(); var fileTime = (short)ReadLEUshort(); var fileDate = (short)ReadLEUshort(); uint crcValue = ReadLEUint(); @@ -1115,7 +1211,7 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) var localExtraData = new ZipExtraData(extraData); // Extra data / zip64 checks - if (localExtraData.Find(1)) + if (localExtraData.Find(headerID: 1)) { // 2010-03-04 Forum 10512: removed checks for version >= ZipConstants.VersionZip64 // and size or compressedSize = MaxValue, due to rogue creators. @@ -1123,15 +1219,15 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) size = localExtraData.ReadLong(); compressedSize = localExtraData.ReadLong(); - if ((localFlags & (int)GeneralBitFlags.Descriptor) != 0) + if (localFlags.HasAny(GeneralBitFlags.Descriptor)) { // These may be valid if patched later - if ((size != -1) && (size != entry.Size)) + if ((size != 0) && (size != entry.Size)) { throw new ZipException("Size invalid for descriptor"); } - if ((compressedSize != -1) && (compressedSize != entry.CompressedSize)) + if ((compressedSize != 0) && (compressedSize != entry.CompressedSize)) { throw new ZipException("Compressed size invalid for descriptor"); } @@ -1156,15 +1252,19 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) throw new ZipException("Compression method not supported"); } - if ((extractVersion > ZipConstants.VersionMadeBy) - || ((extractVersion > 20) && (extractVersion < ZipConstants.VersionZip64))) + if (extractVersion > ZipConstants.VersionMadeBy + || (extractVersion > 20 && extractVersion < ZipConstants.VersionZip64)) { - throw new ZipException(string.Format("Version required to extract this entry not supported ({0})", extractVersion)); + throw new ZipException($"Version required to extract this entry not supported ({extractVersion})"); } - if ((localFlags & (int)(GeneralBitFlags.Patched | GeneralBitFlags.StrongEncryption | GeneralBitFlags.EnhancedCompress | GeneralBitFlags.HeaderMasked)) != 0) + const GeneralBitFlags notSupportedFlags = GeneralBitFlags.Patched + | GeneralBitFlags.StrongEncryption + | GeneralBitFlags.EnhancedCompress + | GeneralBitFlags.HeaderMasked; + if (localFlags.HasAny(notSupportedFlags)) { - throw new ZipException("The library does not support the zip version required to extract this entry"); + throw new ZipException($"The library does not support the zip features required to extract this entry ({localFlags & notSupportedFlags:F})"); } } } @@ -1188,51 +1288,53 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) (extractVersion != 63) ) { - throw new ZipException(string.Format("Version required to extract this entry is invalid ({0})", extractVersion)); + throw new ZipException($"Version required to extract this entry is invalid ({extractVersion})"); } + var localEncoding = _stringCodec.ZipInputEncoding(localFlags); + // Local entry flags dont have reserved bit set on. - if ((localFlags & (int)(GeneralBitFlags.ReservedPKware4 | GeneralBitFlags.ReservedPkware14 | GeneralBitFlags.ReservedPkware15)) != 0) + if (localFlags.HasAny(GeneralBitFlags.ReservedPKware4 | GeneralBitFlags.ReservedPkware14 | GeneralBitFlags.ReservedPkware15)) { throw new ZipException("Reserved bit flags cannot be set."); } // Encryption requires extract version >= 20 - if (((localFlags & (int)GeneralBitFlags.Encrypted) != 0) && (extractVersion < 20)) + if (localFlags.HasAny(GeneralBitFlags.Encrypted) && extractVersion < 20) { - throw new ZipException(string.Format("Version required to extract this entry is too low for encryption ({0})", extractVersion)); + throw new ZipException($"Version required to extract this entry is too low for encryption ({extractVersion})"); } // Strong encryption requires encryption flag to be set and extract version >= 50. - if ((localFlags & (int)GeneralBitFlags.StrongEncryption) != 0) + if (localFlags.HasAny(GeneralBitFlags.StrongEncryption)) { - if ((localFlags & (int)GeneralBitFlags.Encrypted) == 0) + if (!localFlags.HasAny(GeneralBitFlags.Encrypted)) { throw new ZipException("Strong encryption flag set but encryption flag is not set"); } if (extractVersion < 50) { - throw new ZipException(string.Format("Version required to extract this entry is too low for encryption ({0})", extractVersion)); + throw new ZipException($"Version required to extract this entry is too low for encryption ({extractVersion})"); } } // Patched entries require extract version >= 27 - if (((localFlags & (int)GeneralBitFlags.Patched) != 0) && (extractVersion < 27)) + if (localFlags.HasAny(GeneralBitFlags.Patched) && extractVersion < 27) { - throw new ZipException(string.Format("Patched data requires higher version than ({0})", extractVersion)); + throw new ZipException($"Patched data requires higher version than ({extractVersion})"); } // Central header flags match local entry flags. - if (localFlags != entry.Flags) + if ((int)localFlags != entry.Flags) { - throw new ZipException("Central header/local header flags mismatch"); + throw new ZipException($"Central header/local header flags mismatch ({(GeneralBitFlags)entry.Flags:F} vs {localFlags:F})"); } // Central header compression method matches local entry - if (entry.CompressionMethodForHeader != (CompressionMethod)compressionMethod) + if (entry.CompressionMethodForHeader != compressionMethod) { - throw new ZipException("Central header/local header compression method mismatch"); + throw new ZipException($"Central header/local header compression method mismatch ({entry.CompressionMethodForHeader:G} vs {compressionMethod:G})"); } if (entry.Version != extractVersion) @@ -1241,7 +1343,7 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) } // Strong encryption and extract version match - if ((localFlags & (int)GeneralBitFlags.StrongEncryption) != 0) + if (localFlags.HasAny(GeneralBitFlags.StrongEncryption)) { if (extractVersion < 62) { @@ -1249,15 +1351,15 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) } } - if ((localFlags & (int)GeneralBitFlags.HeaderMasked) != 0) + if (localFlags.HasAny(GeneralBitFlags.HeaderMasked)) { - if ((fileTime != 0) || (fileDate != 0)) + if (fileTime != 0 || fileDate != 0) { throw new ZipException("Header masked set but date/time values non-zero"); } } - if ((localFlags & (int)GeneralBitFlags.Descriptor) == 0) + if (!localFlags.HasAny(GeneralBitFlags.Descriptor)) { if (crcValue != (uint)entry.Crc) { @@ -1266,8 +1368,8 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) } // Crc valid for empty entry. - // This will also apply to streamed entries where size isnt known and the header cant be patched - if ((size == 0) && (compressedSize == 0)) + // This will also apply to streamed entries where size isn't known and the header cant be patched + if (size == 0 && compressedSize == 0) { if (crcValue != 0) { @@ -1283,7 +1385,7 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) } // Name data has already been read convert it and compare. - string localName = ZipStrings.ConvertToStringExt(localFlags, nameData); + string localName = localEncoding.GetString(nameData); // Central directory and local entry name match if (localName != entry.Name) @@ -1327,23 +1429,18 @@ private long TestLocalHeader(ZipEntry entry, HeaderTest tests) // Size can be verified only if it is known in the local header. // it will always be known in the central header. - if (((localFlags & (int)GeneralBitFlags.Descriptor) == 0) || + if (!localFlags.HasAny(GeneralBitFlags.Descriptor) || ((size > 0 || compressedSize > 0) && entry.Size > 0)) { - if ((size != 0) - && (size != entry.Size)) + if (size != 0 && size != entry.Size) { - throw new ZipException( - string.Format("Size mismatch between central header({0}) and local header({1})", - entry.Size, size)); + throw new ZipException($"Size mismatch between central header ({entry.Size}) and local header ({size})"); } - if ((compressedSize != 0) + if (compressedSize != 0 && (compressedSize != entry.CompressedSize && compressedSize != 0xFFFFFFFF && compressedSize != -1)) { - throw new ZipException( - string.Format("Compressed size mismatch between central header({0}) and local header({1})", - entry.CompressedSize, compressedSize)); + throw new ZipException($"Compressed size mismatch between central header({entry.CompressedSize}) and local header({compressedSize})"); } } @@ -1572,21 +1669,18 @@ public void CommitUpdate() { RunUpdates(); } - else if (commentEdited_) + else if (commentEdited_ && !isNewArchive_) { UpdateCommentOnly(); } else { // Create an empty archive if none existed originally. - if (entries_.Length == 0) - { - byte[] theComment = (newComment_ != null) ? newComment_.RawComment : ZipStrings.ConvertToArray(comment_); - using (ZipHelperStream zhs = new ZipHelperStream(baseStream_)) - { - zhs.WriteEndOfCentralDirectory(0, 0, 0, theComment); - } - } + if (entries_.Length != 0) return; + byte[] theComment = (newComment_ != null) + ? newComment_.RawComment + : _stringCodec.ZipArchiveCommentEncoding.GetBytes(comment_); + ZipFormat.WriteEndOfCentralDirectory(baseStream_, 0, 0, 0, theComment); } } finally @@ -1619,7 +1713,7 @@ public void SetComment(string comment) CheckUpdating(); - newComment_ = new ZipString(comment); + newComment_ = new ZipString(comment, _stringCodec.ZipArchiveCommentEncoding); if (newComment_.RawLength > 0xffff) { @@ -2147,7 +2241,8 @@ private void WriteLocalEntryHeader(ZipUpdate update) WriteLEInt((int)entry.Size); } - byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name); + var entryEncoding = _stringCodec.ZipInputEncoding(entry.Flags); + byte[] name = entryEncoding.GetBytes(entry.Name); if (name.Length > 0xFFFF) { @@ -2254,7 +2349,8 @@ private int WriteCentralDirectoryHeader(ZipEntry entry) WriteLEInt((int)entry.Size); } - byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name); + var entryEncoding = _stringCodec.ZipInputEncoding(entry.Flags); + byte[] name = entryEncoding.GetBytes(entry.Name); if (name.Length > 0xFFFF) { @@ -2487,7 +2583,7 @@ private void CopyBytes(ZipUpdate update, Stream destination, Stream source, /// The descriptor size, zero if there isn't one. private static int GetDescriptorSize(ZipUpdate update, bool includingSignature) { - if (!((GeneralBitFlags)update.Entry.Flags).HasFlag(GeneralBitFlags.Descriptor)) + if (!((GeneralBitFlags)update.Entry.Flags).HasAny(GeneralBitFlags.Descriptor)) return 0; var descriptorWithSignature = update.Entry.LocalHeaderRequiresZip64 @@ -2728,8 +2824,7 @@ private void AddEntry(ZipFile workFile, ZipUpdate update) if ((update.OutEntry.Flags & (int)GeneralBitFlags.Descriptor) == (int)GeneralBitFlags.Descriptor) { - var helper = new ZipHelperStream(workFile.baseStream_); - helper.WriteDataDescriptor(update.OutEntry); + ZipFormat.WriteDataDescriptor(workFile.baseStream_, update.OutEntry); } } } @@ -2866,15 +2961,11 @@ private void UpdateCommentOnly() { long baseLength = baseStream_.Length; - ZipHelperStream updateFile = null; + Stream updateFile; if (archiveStorage_.UpdateMode == FileUpdateMode.Safe) { - Stream copyStream = archiveStorage_.MakeTemporaryCopy(baseStream_); - updateFile = new ZipHelperStream(copyStream) - { - IsStreamOwner = true - }; + updateFile = archiveStorage_.MakeTemporaryCopy(baseStream_); baseStream_.Dispose(); baseStream_ = null; @@ -2891,21 +2982,21 @@ private void UpdateCommentOnly() // Need to tidy up the archive storage interface and contract basically. baseStream_ = archiveStorage_.OpenForDirectUpdate(baseStream_); - updateFile = new ZipHelperStream(baseStream_); + updateFile = baseStream_; } else { baseStream_.Dispose(); baseStream_ = null; - updateFile = new ZipHelperStream(Name); + updateFile = new FileStream(Name, FileMode.Open, FileAccess.ReadWrite); } } - using (updateFile) + try { long locatedCentralDirOffset = - updateFile.LocateBlockWithSignature(ZipConstants.EndOfCentralDirectorySignature, - baseLength, ZipConstants.EndOfCentralRecordBaseSize, 0xffff); + ZipFormat.LocateBlockWithSignature(updateFile, ZipConstants.EndOfCentralDirectorySignature, + baseLength, ZipConstants.EndOfCentralRecordBaseSize, 0xffff); if (locatedCentralDirOffset < 0) { throw new ZipException("Cannot find central directory"); @@ -2920,6 +3011,11 @@ private void UpdateCommentOnly() updateFile.Write(rawComment, 0, rawComment.Length); updateFile.SetLength(updateFile.Position); } + finally + { + if(updateFile != baseStream_) + updateFile.Dispose(); + } if (archiveStorage_.UpdateMode == FileUpdateMode.Safe) { @@ -3081,11 +3177,9 @@ private void RunUpdates() } } - byte[] theComment = (newComment_ != null) ? newComment_.RawComment : ZipStrings.ConvertToArray(comment_); - using (ZipHelperStream zhs = new ZipHelperStream(workFile.baseStream_)) - { - zhs.WriteEndOfCentralDirectory(updateCount_, sizeEntries, centralDirOffset, theComment); - } + byte[] theComment = newComment_?.RawComment ?? _stringCodec.ZipArchiveCommentEncoding.GetBytes(comment_); + ZipFormat.WriteEndOfCentralDirectory(workFile.baseStream_, updateCount_, + sizeEntries, centralDirOffset, theComment); endOfStream = workFile.baseStream_.Position; @@ -3426,13 +3520,8 @@ private ulong ReadLEUlong() #endregion Reading // NOTE this returns the offset of the first byte after the signature. - private long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) - { - using (ZipHelperStream les = new ZipHelperStream(baseStream_)) - { - return les.LocateBlockWithSignature(signature, endLocation, minimumBlockSize, maximumVariableData); - } - } + private long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) + => ZipFormat.LocateBlockWithSignature(baseStream_, signature, endLocation, minimumBlockSize, maximumVariableData); /// /// Search for and read the central directory of a zip file filling the entries array. @@ -3481,7 +3570,7 @@ private void ReadEntries() byte[] comment = new byte[commentSize]; StreamUtils.ReadFully(baseStream_, comment); - comment_ = ZipStrings.ConvertToString(comment); + comment_ = _stringCodec.ZipArchiveCommentEncoding.GetString(comment); } else { @@ -3489,20 +3578,16 @@ private void ReadEntries() } bool isZip64 = false; - bool requireZip64 = false; - + // Check if zip64 header information is required. - if ((thisDiskNumber == 0xffff) || - (startCentralDirDisk == 0xffff) || - (entriesForThisDisk == 0xffff) || - (entriesForWholeCentralDir == 0xffff) || - (centralDirSize == 0xffffffff) || - (offsetOfCentralDir == 0xffffffff)) - { - requireZip64 = true; - } - - // #357 - always check for the existance of the Zip64 central directory. + bool requireZip64 = thisDiskNumber == 0xffff || + startCentralDirDisk == 0xffff || + entriesForThisDisk == 0xffff || + entriesForWholeCentralDir == 0xffff || + centralDirSize == 0xffffffff || + offsetOfCentralDir == 0xffffffff; + + // #357 - always check for the existence of the Zip64 central directory. // #403 - Take account of the fixed size of the locator when searching. // Subtract from locatedEndOfCentralDir so that the endLocation is the location of EndOfCentralDirectorySignature, // rather than the data following the signature. @@ -3536,7 +3621,7 @@ private void ReadEntries() if (sig64 != ZipConstants.Zip64CentralFileHeaderSignature) { - throw new ZipException(string.Format("Invalid Zip64 Central directory signature at {0:X}", offset64)); + throw new ZipException($"Invalid Zip64 Central directory signature at {offset64:X}"); } // NOTE: Record size = SizeOfFixedFields + SizeOfVariableData - 12. @@ -3591,18 +3676,23 @@ private void ReadEntries() int extraLen = ReadLEUshort(); int commentLen = ReadLEUshort(); - int diskStartNo = ReadLEUshort(); // Not currently used - int internalAttributes = ReadLEUshort(); // Not currently used + + // ReSharper disable once UnusedVariable, Currently unused but needs to be read to offset the stream + int diskStartNo = ReadLEUshort(); + // ReSharper disable once UnusedVariable, Currently unused but needs to be read to offset the stream + int internalAttributes = ReadLEUshort(); uint externalAttributes = ReadLEUint(); long offset = ReadLEUint(); byte[] buffer = new byte[Math.Max(nameLen, commentLen)]; + var entryEncoding = _stringCodec.ZipInputEncoding(bitFlags); StreamUtils.ReadFully(baseStream_, buffer, 0, nameLen); - string name = ZipStrings.ConvertToStringExt(bitFlags, buffer, nameLen); + string name = entryEncoding.GetString(buffer, 0, nameLen); + var unicode = entryEncoding.IsZipUnicode(); - var entry = new ZipEntry(name, versionToExtract, versionMadeBy, (CompressionMethod)method) + var entry = new ZipEntry(name, versionToExtract, versionMadeBy, (CompressionMethod)method, unicode) { Crc = crc & 0xffffffffL, Size = size & 0xffffffffL, @@ -3614,7 +3704,7 @@ private void ReadEntries() ExternalFileAttributes = (int)externalAttributes }; - if ((bitFlags & 8) == 0) + if (!entry.HasFlag(GeneralBitFlags.Descriptor)) { entry.CryptoCheckValue = (byte)(crc >> 24); } @@ -3635,7 +3725,7 @@ private void ReadEntries() if (commentLen > 0) { StreamUtils.ReadFully(baseStream_, buffer, 0, commentLen); - entry.Comment = ZipStrings.ConvertToStringExt(bitFlags, buffer, commentLen); + entry.Comment = entryEncoding.GetString(buffer, 0, commentLen); } entries_[i] = entry; @@ -3657,9 +3747,15 @@ private void ReadEntries() /// private long LocateEntry(ZipEntry entry) { - return TestLocalHeader(entry, HeaderTest.Extract); + return TestLocalHeader(entry, SkipLocalEntryTestsOnLocate ? HeaderTest.None : HeaderTest.Extract); } + /// + /// Skip the verification of the local header when reading an archive entry. Set this to attempt to read the + /// entries even if the headers should indicate that doing so would fail or produce an unexpected output. + /// + public bool SkipLocalEntryTestsOnLocate { get; set; } = false; + private Stream CreateAndInitDecryptionStream(Stream baseStream, ZipEntry entry) { CryptoStream result = null; @@ -3676,15 +3772,15 @@ private Stream CreateAndInitDecryptionStream(Stream baseStream, ZipEntry entry) } int saltLen = entry.AESSaltLen; byte[] saltBytes = new byte[saltLen]; - int saltIn = StreamUtils.ReadRequestedBytes(baseStream, saltBytes, 0, saltLen); - if (saltIn != saltLen) - throw new ZipException("AES Salt expected " + saltLen + " got " + saltIn); - // + int saltIn = StreamUtils.ReadRequestedBytes(baseStream, saltBytes, offset: 0, saltLen); + + if (saltIn != saltLen) throw new ZipException($"AES Salt expected {saltLen} git {saltIn}"); + byte[] pwdVerifyRead = new byte[2]; StreamUtils.ReadFully(baseStream, pwdVerifyRead); int blockSize = entry.AESKeySize / 8; // bits to bytes - var decryptor = new ZipAESTransform(rawPassword_, saltBytes, blockSize, false); + var decryptor = new ZipAESTransform(rawPassword_, saltBytes, blockSize, writeMode: false); byte[] pwdVerifyCalc = decryptor.PwdVerifier; if (pwdVerifyCalc[0] != pwdVerifyRead[0] || pwdVerifyCalc[1] != pwdVerifyRead[1]) throw new ZipException("Invalid password for AES"); @@ -3697,8 +3793,7 @@ private Stream CreateAndInitDecryptionStream(Stream baseStream, ZipEntry entry) } else { - if ((entry.Version < ZipConstants.VersionStrongEncryption) - || (entry.Flags & (int)GeneralBitFlags.StrongEncryption) == 0) + if (entry.Version < ZipConstants.VersionStrongEncryption || !entry.HasFlag(GeneralBitFlags.StrongEncryption)) { var classicManaged = new PkzipClassicManaged(); @@ -3723,31 +3818,29 @@ private Stream CreateAndInitDecryptionStream(Stream baseStream, ZipEntry entry) private Stream CreateAndInitEncryptionStream(Stream baseStream, ZipEntry entry) { - CryptoStream result = null; - if ((entry.Version < ZipConstants.VersionStrongEncryption) - || (entry.Flags & (int)GeneralBitFlags.StrongEncryption) == 0) - { - var classicManaged = new PkzipClassicManaged(); + if (entry.Version >= ZipConstants.VersionStrongEncryption && + entry.HasFlag(GeneralBitFlags.StrongEncryption)) return null; - OnKeysRequired(entry.Name); - if (HaveKeys == false) - { - throw new ZipException("No password available for encrypted stream"); - } + var classicManaged = new PkzipClassicManaged(); - // Closing a CryptoStream will close the base stream as well so wrap it in an UncompressedStream - // which doesnt do this. - result = new CryptoStream(new UncompressedStream(baseStream), - classicManaged.CreateEncryptor(key, null), CryptoStreamMode.Write); + OnKeysRequired(entry.Name); + if (HaveKeys == false) + { + throw new ZipException("No password available for encrypted stream"); + } - if ((entry.Crc < 0) || (entry.Flags & 8) != 0) - { - WriteEncryptionHeader(result, entry.DosTime << 16); - } - else - { - WriteEncryptionHeader(result, entry.Crc); - } + // Closing a CryptoStream will close the base stream as well so wrap it in an UncompressedStream + // which doesnt do this. + var result = new CryptoStream(new UncompressedStream(baseStream), + classicManaged.CreateEncryptor(key, null), CryptoStreamMode.Write); + + if (entry.Crc < 0 || entry.HasFlag(GeneralBitFlags.Descriptor)) + { + WriteEncryptionHeader(result, entry.DosTime << 16); + } + else + { + WriteEncryptionHeader(result, entry.Crc); } return result; } @@ -3765,12 +3858,12 @@ private static void CheckClassicPassword(CryptoStream classicCryptoStream, ZipEn private static void WriteEncryptionHeader(Stream stream, long crcValue) { byte[] cryptBuffer = new byte[ZipConstants.CryptoHeaderSize]; - using (var rng = new RNGCryptoServiceProvider()) + using (var rng = RandomNumberGenerator.Create()) { rng.GetBytes(cryptBuffer); } cryptBuffer[11] = (byte)(crcValue >> 24); - stream.Write(cryptBuffer, 0, cryptBuffer.Length); + stream.Write(cryptBuffer, offset: 0, cryptBuffer.Length); } #endregion Internal routines @@ -3779,7 +3872,7 @@ private static void WriteEncryptionHeader(Stream stream, long crcValue) private bool isDisposed_; private string name_; - private string comment_; + private string comment_ = string.Empty; private string rawPassword_; private Stream baseStream_; private bool isStreamOwner; @@ -3787,6 +3880,7 @@ private static void WriteEncryptionHeader(Stream stream, long crcValue) private ZipEntry[] entries_; private byte[] key; private bool isNewArchive_; + private StringCodec _stringCodec = ZipStrings.GetStringCodec(); // Default is dynamic which is not backwards compatible and can cause problems // with XP's built in compression which cant read Zip64 archives. @@ -3825,19 +3919,23 @@ private class ZipString /// Initialise a with a string. /// /// The textual string form. - public ZipString(string comment) + /// + public ZipString(string comment, Encoding encoding) { comment_ = comment; isSourceString_ = true; + _encoding = encoding; } /// /// Initialise a using a string in its binary 'raw' form. /// /// - public ZipString(byte[] rawString) + /// + public ZipString(byte[] rawString, Encoding encoding) { rawComment_ = rawString; + _encoding = encoding; } #endregion Constructors @@ -3846,10 +3944,7 @@ public ZipString(byte[] rawString) /// Get a value indicating the original source of data for this instance. /// True if the source was a string; false if the source was binary data. ///
- public bool IsSourceString - { - get { return isSourceString_; } - } + public bool IsSourceString => isSourceString_; /// /// Get the length of the comment when represented as raw bytes. @@ -3894,7 +3989,7 @@ private void MakeTextAvailable() { if (comment_ == null) { - comment_ = ZipStrings.ConvertToString(rawComment_); + comment_ = _encoding.GetString(rawComment_); } } @@ -3902,7 +3997,7 @@ private void MakeBytesAvailable() { if (rawComment_ == null) { - rawComment_ = ZipStrings.ConvertToArray(comment_); + rawComment_ = _encoding.GetBytes(comment_); } } @@ -3911,7 +4006,7 @@ private void MakeBytesAvailable() /// /// The to convert to a string. /// The textual equivalent for the input value. - static public implicit operator string(ZipString zipString) + public static implicit operator string(ZipString zipString) { zipString.MakeTextAvailable(); return zipString.comment_; @@ -3922,6 +4017,7 @@ static public implicit operator string(ZipString zipString) private string comment_; private byte[] rawComment_; private readonly bool isSourceString_; + private readonly Encoding _encoding; #endregion Instance Fields } @@ -3929,20 +4025,26 @@ static public implicit operator string(ZipString zipString) /// /// An enumerator for Zip entries /// - private class ZipEntryEnumerator : IEnumerator + public struct ZipEntryEnumerator : IEnumerator { #region Constructors + /// + /// Constructs a new instance of . + /// + /// Entries to iterate. public ZipEntryEnumerator(ZipEntry[] entries) { array = entries; + index = -1; } #endregion Constructors #region IEnumerator Members - public object Current + /// + public ZipEntry Current { get { @@ -3950,22 +4052,32 @@ public object Current } } + /// + object IEnumerator.Current => Current; + + /// public void Reset() { index = -1; } + /// public bool MoveNext() { return (++index < array.Length); } + /// + public void Dispose() + { + } + #endregion IEnumerator Members #region Instance Fields private ZipEntry[] array; - private int index = -1; + private int index; #endregion Instance Fields } diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipFormat.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipFormat.cs new file mode 100644 index 000000000..ec63d7943 --- /dev/null +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipFormat.cs @@ -0,0 +1,598 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Core; + +namespace ICSharpCode.SharpZipLib.Zip +{ + /// + /// Holds data pertinent to a data descriptor. + /// + public class DescriptorData + { + private long _crc; + + /// + /// Get /set the compressed size of data. + /// + public long CompressedSize { get; set; } + + /// + /// Get / set the uncompressed size of data + /// + public long Size { get; set; } + + /// + /// Get /set the crc value. + /// + public long Crc + { + get => _crc; + set => _crc = (value & 0xffffffff); + } + } + + internal struct EntryPatchData + { + public long SizePatchOffset { get; set; } + + public long CrcPatchOffset { get; set; } + } + + /// + /// This class assists with writing/reading from Zip files. + /// + internal static class ZipFormat + { + // Write the local file header + // TODO: ZipFormat.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage + internal static int WriteLocalHeader(Stream stream, ZipEntry entry, out EntryPatchData patchData, + bool headerInfoAvailable, bool patchEntryHeader, long streamOffset, StringCodec stringCodec) + { + patchData = new EntryPatchData(); + + stream.WriteLEInt(ZipConstants.LocalHeaderSignature); + stream.WriteLEShort(entry.Version); + stream.WriteLEShort(entry.Flags); + stream.WriteLEShort((byte)entry.CompressionMethodForHeader); + stream.WriteLEInt((int)entry.DosTime); + + if (headerInfoAvailable) + { + stream.WriteLEInt((int)entry.Crc); + if (entry.LocalHeaderRequiresZip64) + { + stream.WriteLEInt(-1); + stream.WriteLEInt(-1); + } + else + { + stream.WriteLEInt((int)entry.CompressedSize + entry.EncryptionOverheadSize); + stream.WriteLEInt((int)entry.Size); + } + } + else + { + if (patchEntryHeader) + patchData.CrcPatchOffset = streamOffset + stream.Position; + + stream.WriteLEInt(0); // Crc + + if (patchEntryHeader) + patchData.SizePatchOffset = streamOffset + stream.Position; + + // For local header both sizes appear in Zip64 Extended Information + if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) + { + stream.WriteLEInt(-1); + stream.WriteLEInt(-1); + } + else + { + stream.WriteLEInt(0); // Compressed size + stream.WriteLEInt(0); // Uncompressed size + } + } + + byte[] name = stringCodec.ZipEncoding(entry.IsUnicodeText).GetBytes(entry.Name); + + if (name.Length > 0xFFFF) + { + throw new ZipException("Entry name too long."); + } + + var ed = new ZipExtraData(entry.ExtraData); + + if (entry.LocalHeaderRequiresZip64) + { + ed.StartNewEntry(); + if (headerInfoAvailable) + { + ed.AddLeLong(entry.Size); + ed.AddLeLong(entry.CompressedSize + entry.EncryptionOverheadSize); + } + else + { + // If the sizes are stored in the descriptor, the local Zip64 sizes should be 0 + ed.AddLeLong(0); + ed.AddLeLong(0); + } + ed.AddNewEntry(1); + + if (!ed.Find(1)) + { + throw new ZipException("Internal error cant find extra data"); + } + + patchData.SizePatchOffset = ed.CurrentReadIndex; + } + else + { + ed.Delete(1); + } + + if (entry.AESKeySize > 0) + { + AddExtraDataAES(entry, ed); + } + byte[] extra = ed.GetEntryData(); + + stream.WriteLEShort(name.Length); + stream.WriteLEShort(extra.Length); + + if (name.Length > 0) + { + stream.Write(name, 0, name.Length); + } + + if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) + { + patchData.SizePatchOffset += streamOffset + stream.Position; + } + + if (extra.Length > 0) + { + stream.Write(extra, 0, extra.Length); + } + + return ZipConstants.LocalHeaderBaseSize + name.Length + extra.Length; + } + + /// + /// Locates a block with the desired . + /// + /// + /// The signature to find. + /// Location, marking the end of block. + /// Minimum size of the block. + /// The maximum variable data. + /// Returns the offset of the first byte after the signature; -1 if not found + internal static long LocateBlockWithSignature(Stream stream, int signature, long endLocation, int minimumBlockSize, int maximumVariableData) + { + long pos = endLocation - minimumBlockSize; + if (pos < 0) + { + return -1; + } + + long giveUpMarker = Math.Max(pos - maximumVariableData, 0); + + // TODO: This loop could be optimized for speed. + do + { + if (pos < giveUpMarker) + { + return -1; + } + stream.Seek(pos--, SeekOrigin.Begin); + } while (stream.ReadLEInt() != signature); + + return stream.Position; + } + + /// + public static async Task WriteZip64EndOfCentralDirectoryAsync(Stream stream, long noOfEntries, + long sizeEntries, long centralDirOffset, CancellationToken cancellationToken) + { + await stream.WriteProcToStreamAsync(s => WriteZip64EndOfCentralDirectory(s, noOfEntries, sizeEntries, centralDirOffset), cancellationToken).ConfigureAwait(false); + } + + /// + /// Write Zip64 end of central directory records (File header and locator). + /// + /// + /// The number of entries in the central directory. + /// The size of entries in the central directory. + /// The offset of the central directory. + internal static void WriteZip64EndOfCentralDirectory(Stream stream, long noOfEntries, long sizeEntries, long centralDirOffset) + { + long centralSignatureOffset = centralDirOffset + sizeEntries; + stream.WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature); + stream.WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12) + stream.WriteLEShort(ZipConstants.VersionMadeBy); // Version made by + stream.WriteLEShort(ZipConstants.VersionZip64); // Version to extract + stream.WriteLEInt(0); // Number of this disk + stream.WriteLEInt(0); // number of the disk with the start of the central directory + stream.WriteLELong(noOfEntries); // No of entries on this disk + stream.WriteLELong(noOfEntries); // Total No of entries in central directory + stream.WriteLELong(sizeEntries); // Size of the central directory + stream.WriteLELong(centralDirOffset); // offset of start of central directory + // zip64 extensible data sector not catered for here (variable size) + + // Write the Zip64 end of central directory locator + stream.WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature); + + // no of the disk with the start of the zip64 end of central directory + stream.WriteLEInt(0); + + // relative offset of the zip64 end of central directory record + stream.WriteLELong(centralSignatureOffset); + + // total number of disks + stream.WriteLEInt(1); + } + + /// + public static async Task WriteEndOfCentralDirectoryAsync(Stream stream, long noOfEntries, long sizeEntries, + long start, byte[] comment, CancellationToken cancellationToken) + => await stream.WriteProcToStreamAsync(s + => WriteEndOfCentralDirectory(s, noOfEntries, sizeEntries, start, comment), cancellationToken).ConfigureAwait(false); + + /// + /// Write the required records to end the central directory. + /// + /// + /// The number of entries in the directory. + /// The size of the entries in the directory. + /// The start of the central directory. + /// The archive comment. (This can be null). + + internal static void WriteEndOfCentralDirectory(Stream stream, long noOfEntries, long sizeEntries, long start, byte[] comment) + { + if (noOfEntries >= 0xffff || + start >= 0xffffffff || + sizeEntries >= 0xffffffff) + { + WriteZip64EndOfCentralDirectory(stream, noOfEntries, sizeEntries, start); + } + + stream.WriteLEInt(ZipConstants.EndOfCentralDirectorySignature); + + // TODO: ZipFile Multi disk handling not done + stream.WriteLEShort(0); // number of this disk + stream.WriteLEShort(0); // no of disk with start of central dir + + // Number of entries + if (noOfEntries >= 0xffff) + { + stream.WriteLEUshort(0xffff); // Zip64 marker + stream.WriteLEUshort(0xffff); + } + else + { + stream.WriteLEShort((short)noOfEntries); // entries in central dir for this disk + stream.WriteLEShort((short)noOfEntries); // total entries in central directory + } + + // Size of the central directory + if (sizeEntries >= 0xffffffff) + { + stream.WriteLEUint(0xffffffff); // Zip64 marker + } + else + { + stream.WriteLEInt((int)sizeEntries); + } + + // offset of start of central directory + if (start >= 0xffffffff) + { + stream.WriteLEUint(0xffffffff); // Zip64 marker + } + else + { + stream.WriteLEInt((int)start); + } + + var commentLength = comment?.Length ?? 0; + + if (commentLength > 0xffff) + { + throw new ZipException($"Comment length ({commentLength}) is larger than 64K"); + } + + stream.WriteLEShort(commentLength); + + if (commentLength > 0) + { + stream.Write(comment, 0, commentLength); + } + } + + + + /// + /// Write a data descriptor. + /// + /// + /// The entry to write a descriptor for. + /// Returns the number of descriptor bytes written. + internal static int WriteDataDescriptor(Stream stream, ZipEntry entry) + { + if (entry == null) + { + throw new ArgumentNullException(nameof(entry)); + } + + int result = 0; + + // Add data descriptor if flagged as required + if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) + { + // The signature is not PKZIP originally but is now described as optional + // in the PKZIP Appnote documenting the format. + stream.WriteLEInt(ZipConstants.DataDescriptorSignature); + stream.WriteLEInt(unchecked((int)(entry.Crc))); + + result += 8; + + if (entry.LocalHeaderRequiresZip64) + { + stream.WriteLELong(entry.CompressedSize); + stream.WriteLELong(entry.Size); + result += 16; + } + else + { + stream.WriteLEInt((int)entry.CompressedSize); + stream.WriteLEInt((int)entry.Size); + result += 8; + } + } + + return result; + } + + /// + /// Read data descriptor at the end of compressed data. + /// + /// + /// if set to true [zip64]. + /// The data to fill in. + /// Returns the number of bytes read in the descriptor. + internal static void ReadDataDescriptor(Stream stream, bool zip64, DescriptorData data) + { + int intValue = stream.ReadLEInt(); + + // In theory this may not be a descriptor according to PKZIP appnote. + // In practice its always there. + if (intValue != ZipConstants.DataDescriptorSignature) + { + throw new ZipException("Data descriptor signature not found"); + } + + data.Crc = stream.ReadLEInt(); + + if (zip64) + { + data.CompressedSize = stream.ReadLELong(); + data.Size = stream.ReadLELong(); + } + else + { + data.CompressedSize = stream.ReadLEInt(); + data.Size = stream.ReadLEInt(); + } + } + + internal static int WriteEndEntry(Stream stream, ZipEntry entry, StringCodec stringCodec) + { + stream.WriteLEInt(ZipConstants.CentralHeaderSignature); + stream.WriteLEShort((entry.HostSystem << 8) | entry.VersionMadeBy); + stream.WriteLEShort(entry.Version); + stream.WriteLEShort(entry.Flags); + stream.WriteLEShort((short)entry.CompressionMethodForHeader); + stream.WriteLEInt((int)entry.DosTime); + stream.WriteLEInt((int)entry.Crc); + + if (entry.IsZip64Forced() || + (entry.CompressedSize >= uint.MaxValue)) + { + stream.WriteLEInt(-1); + } + else + { + stream.WriteLEInt((int)entry.CompressedSize); + } + + if (entry.IsZip64Forced() || + (entry.Size >= uint.MaxValue)) + { + stream.WriteLEInt(-1); + } + else + { + stream.WriteLEInt((int)entry.Size); + } + + byte[] name = stringCodec.ZipOutputEncoding.GetBytes(entry.Name); + + if (name.Length > 0xffff) + { + throw new ZipException("Name too long."); + } + + var ed = new ZipExtraData(entry.ExtraData); + + if (entry.CentralHeaderRequiresZip64) + { + ed.StartNewEntry(); + if (entry.IsZip64Forced() || + (entry.Size >= 0xffffffff)) + { + ed.AddLeLong(entry.Size); + } + + if (entry.IsZip64Forced() || + (entry.CompressedSize >= 0xffffffff)) + { + ed.AddLeLong(entry.CompressedSize); + } + + if (entry.Offset >= 0xffffffff) + { + ed.AddLeLong(entry.Offset); + } + + ed.AddNewEntry(1); + } + else + { + ed.Delete(1); + } + + if (entry.AESKeySize > 0) + { + AddExtraDataAES(entry, ed); + } + byte[] extra = ed.GetEntryData(); + + byte[] entryComment = !(entry.Comment is null) + ? stringCodec.ZipOutputEncoding.GetBytes(entry.Comment) + : Empty.Array(); + + if (entryComment.Length > 0xffff) + { + throw new ZipException("Comment too long."); + } + + stream.WriteLEShort(name.Length); + stream.WriteLEShort(extra.Length); + stream.WriteLEShort(entryComment.Length); + stream.WriteLEShort(0); // disk number + stream.WriteLEShort(0); // internal file attributes + // external file attributes + + if (entry.ExternalFileAttributes != -1) + { + stream.WriteLEInt(entry.ExternalFileAttributes); + } + else + { + if (entry.IsDirectory) + { // mark entry as directory (from nikolam.AT.perfectinfo.com) + stream.WriteLEInt(16); + } + else + { + stream.WriteLEInt(0); + } + } + + if (entry.Offset >= uint.MaxValue) + { + stream.WriteLEInt(-1); + } + else + { + stream.WriteLEInt((int)entry.Offset); + } + + if (name.Length > 0) + { + stream.Write(name, 0, name.Length); + } + + if (extra.Length > 0) + { + stream.Write(extra, 0, extra.Length); + } + + if (entryComment.Length > 0) + { + stream.Write(entryComment, 0, entryComment.Length); + } + + return ZipConstants.CentralHeaderBaseSize + name.Length + extra.Length + entryComment.Length; + } + + internal static void AddExtraDataAES(ZipEntry entry, ZipExtraData extraData) + { + // Vendor Version: AE-1 IS 1. AE-2 is 2. With AE-2 no CRC is required and 0 is stored. + const int VENDOR_VERSION = 2; + // Vendor ID is the two ASCII characters "AE". + const int VENDOR_ID = 0x4541; //not 6965; + extraData.StartNewEntry(); + // Pack AES extra data field see http://www.winzip.com/aes_info.htm + //extraData.AddLeShort(7); // Data size (currently 7) + extraData.AddLeShort(VENDOR_VERSION); // 2 = AE-2 + extraData.AddLeShort(VENDOR_ID); // "AE" + extraData.AddData(entry.AESEncryptionStrength); // 1 = 128, 2 = 192, 3 = 256 + extraData.AddLeShort((int)entry.CompressionMethod); // The actual compression method used to compress the file + extraData.AddNewEntry(0x9901); + } + + internal static async Task PatchLocalHeaderAsync(Stream stream, ZipEntry entry, + EntryPatchData patchData, CancellationToken ct) + { + var initialPos = stream.Position; + + // Update CRC + stream.Seek(patchData.CrcPatchOffset, SeekOrigin.Begin); + await stream.WriteLEIntAsync((int)entry.Crc, ct).ConfigureAwait(false); + + // Update Sizes + if (entry.LocalHeaderRequiresZip64) + { + if (patchData.SizePatchOffset == -1) + { + throw new ZipException("Entry requires zip64 but this has been turned off"); + } + // Seek to the Zip64 Extra Data + stream.Seek(patchData.SizePatchOffset, SeekOrigin.Begin); + + // Note: The order of the size fields is reversed when compared to the local header! + await stream.WriteLELongAsync(entry.Size, ct).ConfigureAwait(false); + await stream.WriteLELongAsync(entry.CompressedSize, ct).ConfigureAwait(false); + } + else + { + await stream.WriteLEIntAsync((int)entry.CompressedSize, ct).ConfigureAwait(false); + await stream.WriteLEIntAsync((int)entry.Size, ct).ConfigureAwait(false); + } + + stream.Seek(initialPos, SeekOrigin.Begin); + } + + internal static void PatchLocalHeaderSync(Stream stream, ZipEntry entry, + EntryPatchData patchData) + { + var initialPos = stream.Position; + stream.Seek(patchData.CrcPatchOffset, SeekOrigin.Begin); + stream.WriteLEInt((int)entry.Crc); + + if (entry.LocalHeaderRequiresZip64) + { + if (patchData.SizePatchOffset == -1) + { + throw new ZipException("Entry requires zip64 but this has been turned off"); + } + + // Seek to the Zip64 Extra Data + stream.Seek(patchData.SizePatchOffset, SeekOrigin.Begin); + + // Note: The order of the size fields is reversed when compared to the local header! + stream.WriteLELong(entry.Size); + stream.WriteLELong(entry.CompressedSize); + } + else + { + stream.WriteLEInt((int)entry.CompressedSize); + stream.WriteLEInt((int)entry.Size); + } + + stream.Seek(initialPos, SeekOrigin.Begin); + } + } +} diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipHelperStream.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipHelperStream.cs index da65630c6..e69de29bb 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipHelperStream.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipHelperStream.cs @@ -1,629 +0,0 @@ -using System; -using System.IO; - -namespace ICSharpCode.SharpZipLib.Zip -{ - /// - /// Holds data pertinent to a data descriptor. - /// - public class DescriptorData - { - /// - /// Get /set the compressed size of data. - /// - public long CompressedSize - { - get { return compressedSize; } - set { compressedSize = value; } - } - - /// - /// Get / set the uncompressed size of data - /// - public long Size - { - get { return size; } - set { size = value; } - } - - /// - /// Get /set the crc value. - /// - public long Crc - { - get { return crc; } - set { crc = (value & 0xffffffff); } - } - - #region Instance Fields - - private long size; - private long compressedSize; - private long crc; - - #endregion Instance Fields - } - - internal class EntryPatchData - { - public long SizePatchOffset - { - get { return sizePatchOffset_; } - set { sizePatchOffset_ = value; } - } - - public long CrcPatchOffset - { - get { return crcPatchOffset_; } - set { crcPatchOffset_ = value; } - } - - #region Instance Fields - - private long sizePatchOffset_; - private long crcPatchOffset_; - - #endregion Instance Fields - } - - /// - /// This class assists with writing/reading from Zip files. - /// - internal class ZipHelperStream : Stream - { - #region Constructors - - /// - /// Initialise an instance of this class. - /// - /// The name of the file to open. - public ZipHelperStream(string name) - { - stream_ = new FileStream(name, FileMode.Open, FileAccess.ReadWrite); - isOwner_ = true; - } - - /// - /// Initialise a new instance of . - /// - /// The stream to use. - public ZipHelperStream(Stream stream) - { - stream_ = stream; - } - - #endregion Constructors - - /// - /// Get / set a value indicating whether the underlying stream is owned or not. - /// - /// If the stream is owned it is closed when this instance is closed. - public bool IsStreamOwner - { - get { return isOwner_; } - set { isOwner_ = value; } - } - - #region Base Stream Methods - - public override bool CanRead - { - get { return stream_.CanRead; } - } - - public override bool CanSeek - { - get { return stream_.CanSeek; } - } - - public override bool CanTimeout - { - get { return stream_.CanTimeout; } - } - - public override long Length - { - get { return stream_.Length; } - } - - public override long Position - { - get { return stream_.Position; } - set { stream_.Position = value; } - } - - public override bool CanWrite - { - get { return stream_.CanWrite; } - } - - public override void Flush() - { - stream_.Flush(); - } - - public override long Seek(long offset, SeekOrigin origin) - { - return stream_.Seek(offset, origin); - } - - public override void SetLength(long value) - { - stream_.SetLength(value); - } - - public override int Read(byte[] buffer, int offset, int count) - { - return stream_.Read(buffer, offset, count); - } - - public override void Write(byte[] buffer, int offset, int count) - { - stream_.Write(buffer, offset, count); - } - - /// - /// Close the stream. - /// - /// - /// The underlying stream is closed only if is true. - /// - protected override void Dispose(bool disposing) - { - Stream toClose = stream_; - stream_ = null; - if (isOwner_ && (toClose != null)) - { - isOwner_ = false; - toClose.Dispose(); - } - } - - #endregion Base Stream Methods - - // Write the local file header - // TODO: ZipHelperStream.WriteLocalHeader is not yet used and needs checking for ZipFile and ZipOuptutStream usage - private void WriteLocalHeader(ZipEntry entry, EntryPatchData patchData) - { - CompressionMethod method = entry.CompressionMethod; - bool headerInfoAvailable = true; // How to get this? - bool patchEntryHeader = false; - - WriteLEInt(ZipConstants.LocalHeaderSignature); - - WriteLEShort(entry.Version); - WriteLEShort(entry.Flags); - WriteLEShort((byte)method); - WriteLEInt((int)entry.DosTime); - - if (headerInfoAvailable == true) - { - WriteLEInt((int)entry.Crc); - if (entry.LocalHeaderRequiresZip64) - { - WriteLEInt(-1); - WriteLEInt(-1); - } - else - { - WriteLEInt(entry.IsCrypted ? (int)entry.CompressedSize + ZipConstants.CryptoHeaderSize : (int)entry.CompressedSize); - WriteLEInt((int)entry.Size); - } - } - else - { - if (patchData != null) - { - patchData.CrcPatchOffset = stream_.Position; - } - WriteLEInt(0); // Crc - - if (patchData != null) - { - patchData.SizePatchOffset = stream_.Position; - } - - // For local header both sizes appear in Zip64 Extended Information - if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) - { - WriteLEInt(-1); - WriteLEInt(-1); - } - else - { - WriteLEInt(0); // Compressed size - WriteLEInt(0); // Uncompressed size - } - } - - byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name); - - if (name.Length > 0xFFFF) - { - throw new ZipException("Entry name too long."); - } - - var ed = new ZipExtraData(entry.ExtraData); - - if (entry.LocalHeaderRequiresZip64 && (headerInfoAvailable || patchEntryHeader)) - { - ed.StartNewEntry(); - if (headerInfoAvailable) - { - ed.AddLeLong(entry.Size); - ed.AddLeLong(entry.CompressedSize); - } - else - { - ed.AddLeLong(-1); - ed.AddLeLong(-1); - } - ed.AddNewEntry(1); - - if (!ed.Find(1)) - { - throw new ZipException("Internal error cant find extra data"); - } - - if (patchData != null) - { - patchData.SizePatchOffset = ed.CurrentReadIndex; - } - } - else - { - ed.Delete(1); - } - - byte[] extra = ed.GetEntryData(); - - WriteLEShort(name.Length); - WriteLEShort(extra.Length); - - if (name.Length > 0) - { - stream_.Write(name, 0, name.Length); - } - - if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) - { - patchData.SizePatchOffset += stream_.Position; - } - - if (extra.Length > 0) - { - stream_.Write(extra, 0, extra.Length); - } - } - - /// - /// Locates a block with the desired . - /// - /// The signature to find. - /// Location, marking the end of block. - /// Minimum size of the block. - /// The maximum variable data. - /// Returns the offset of the first byte after the signature; -1 if not found - public long LocateBlockWithSignature(int signature, long endLocation, int minimumBlockSize, int maximumVariableData) - { - long pos = endLocation - minimumBlockSize; - if (pos < 0) - { - return -1; - } - - long giveUpMarker = Math.Max(pos - maximumVariableData, 0); - - // TODO: This loop could be optimised for speed. - do - { - if (pos < giveUpMarker) - { - return -1; - } - Seek(pos--, SeekOrigin.Begin); - } while (ReadLEInt() != signature); - - return Position; - } - - /// - /// Write Zip64 end of central directory records (File header and locator). - /// - /// The number of entries in the central directory. - /// The size of entries in the central directory. - /// The offset of the central directory. - public void WriteZip64EndOfCentralDirectory(long noOfEntries, long sizeEntries, long centralDirOffset) - { - long centralSignatureOffset = centralDirOffset + sizeEntries; - WriteLEInt(ZipConstants.Zip64CentralFileHeaderSignature); - WriteLELong(44); // Size of this record (total size of remaining fields in header or full size - 12) - WriteLEShort(ZipConstants.VersionMadeBy); // Version made by - WriteLEShort(ZipConstants.VersionZip64); // Version to extract - WriteLEInt(0); // Number of this disk - WriteLEInt(0); // number of the disk with the start of the central directory - WriteLELong(noOfEntries); // No of entries on this disk - WriteLELong(noOfEntries); // Total No of entries in central directory - WriteLELong(sizeEntries); // Size of the central directory - WriteLELong(centralDirOffset); // offset of start of central directory - // zip64 extensible data sector not catered for here (variable size) - - // Write the Zip64 end of central directory locator - WriteLEInt(ZipConstants.Zip64CentralDirLocatorSignature); - - // no of the disk with the start of the zip64 end of central directory - WriteLEInt(0); - - // relative offset of the zip64 end of central directory record - WriteLELong(centralSignatureOffset); - - // total number of disks - WriteLEInt(1); - } - - /// - /// Write the required records to end the central directory. - /// - /// The number of entries in the directory. - /// The size of the entries in the directory. - /// The start of the central directory. - /// The archive comment. (This can be null). - public void WriteEndOfCentralDirectory(long noOfEntries, long sizeEntries, - long startOfCentralDirectory, byte[] comment) - { - if ((noOfEntries >= 0xffff) || - (startOfCentralDirectory >= 0xffffffff) || - (sizeEntries >= 0xffffffff)) - { - WriteZip64EndOfCentralDirectory(noOfEntries, sizeEntries, startOfCentralDirectory); - } - - WriteLEInt(ZipConstants.EndOfCentralDirectorySignature); - - // TODO: ZipFile Multi disk handling not done - WriteLEShort(0); // number of this disk - WriteLEShort(0); // no of disk with start of central dir - - // Number of entries - if (noOfEntries >= 0xffff) - { - WriteLEUshort(0xffff); // Zip64 marker - WriteLEUshort(0xffff); - } - else - { - WriteLEShort((short)noOfEntries); // entries in central dir for this disk - WriteLEShort((short)noOfEntries); // total entries in central directory - } - - // Size of the central directory - if (sizeEntries >= 0xffffffff) - { - WriteLEUint(0xffffffff); // Zip64 marker - } - else - { - WriteLEInt((int)sizeEntries); - } - - // offset of start of central directory - if (startOfCentralDirectory >= 0xffffffff) - { - WriteLEUint(0xffffffff); // Zip64 marker - } - else - { - WriteLEInt((int)startOfCentralDirectory); - } - - int commentLength = (comment != null) ? comment.Length : 0; - - if (commentLength > 0xffff) - { - throw new ZipException(string.Format("Comment length({0}) is too long can only be 64K", commentLength)); - } - - WriteLEShort(commentLength); - - if (commentLength > 0) - { - Write(comment, 0, comment.Length); - } - } - - #region LE value reading/writing - - /// - /// Read an unsigned short in little endian byte order. - /// - /// Returns the value read. - /// - /// An i/o error occurs. - /// - /// - /// The file ends prematurely - /// - public int ReadLEShort() - { - int byteValue1 = stream_.ReadByte(); - - if (byteValue1 < 0) - { - throw new EndOfStreamException(); - } - - int byteValue2 = stream_.ReadByte(); - if (byteValue2 < 0) - { - throw new EndOfStreamException(); - } - - return byteValue1 | (byteValue2 << 8); - } - - /// - /// Read an int in little endian byte order. - /// - /// Returns the value read. - /// - /// An i/o error occurs. - /// - /// - /// The file ends prematurely - /// - public int ReadLEInt() - { - return ReadLEShort() | (ReadLEShort() << 16); - } - - /// - /// Read a long in little endian byte order. - /// - /// The value read. - public long ReadLELong() - { - return (uint)ReadLEInt() | ((long)ReadLEInt() << 32); - } - - /// - /// Write an unsigned short in little endian byte order. - /// - /// The value to write. - public void WriteLEShort(int value) - { - stream_.WriteByte((byte)(value & 0xff)); - stream_.WriteByte((byte)((value >> 8) & 0xff)); - } - - /// - /// Write a ushort in little endian byte order. - /// - /// The value to write. - public void WriteLEUshort(ushort value) - { - stream_.WriteByte((byte)(value & 0xff)); - stream_.WriteByte((byte)(value >> 8)); - } - - /// - /// Write an int in little endian byte order. - /// - /// The value to write. - public void WriteLEInt(int value) - { - WriteLEShort(value); - WriteLEShort(value >> 16); - } - - /// - /// Write a uint in little endian byte order. - /// - /// The value to write. - public void WriteLEUint(uint value) - { - WriteLEUshort((ushort)(value & 0xffff)); - WriteLEUshort((ushort)(value >> 16)); - } - - /// - /// Write a long in little endian byte order. - /// - /// The value to write. - public void WriteLELong(long value) - { - WriteLEInt((int)value); - WriteLEInt((int)(value >> 32)); - } - - /// - /// Write a ulong in little endian byte order. - /// - /// The value to write. - public void WriteLEUlong(ulong value) - { - WriteLEUint((uint)(value & 0xffffffff)); - WriteLEUint((uint)(value >> 32)); - } - - #endregion LE value reading/writing - - /// - /// Write a data descriptor. - /// - /// The entry to write a descriptor for. - /// Returns the number of descriptor bytes written. - public int WriteDataDescriptor(ZipEntry entry) - { - if (entry == null) - { - throw new ArgumentNullException(nameof(entry)); - } - - int result = 0; - - // Add data descriptor if flagged as required - if ((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0) - { - // The signature is not PKZIP originally but is now described as optional - // in the PKZIP Appnote documenting the format. - WriteLEInt(ZipConstants.DataDescriptorSignature); - WriteLEInt(unchecked((int)(entry.Crc))); - - result += 8; - - if (entry.LocalHeaderRequiresZip64) - { - WriteLELong(entry.CompressedSize); - WriteLELong(entry.Size); - result += 16; - } - else - { - WriteLEInt((int)entry.CompressedSize); - WriteLEInt((int)entry.Size); - result += 8; - } - } - - return result; - } - - /// - /// Read data descriptor at the end of compressed data. - /// - /// if set to true [zip64]. - /// The data to fill in. - /// Returns the number of bytes read in the descriptor. - public void ReadDataDescriptor(bool zip64, DescriptorData data) - { - int intValue = ReadLEInt(); - - // In theory this may not be a descriptor according to PKZIP appnote. - // In practice its always there. - if (intValue != ZipConstants.DataDescriptorSignature) - { - throw new ZipException("Data descriptor signature not found"); - } - - data.Crc = ReadLEInt(); - - if (zip64) - { - data.CompressedSize = ReadLELong(); - data.Size = ReadLELong(); - } - else - { - data.CompressedSize = ReadLEInt(); - data.Size = ReadLEInt(); - } - } - - #region Instance Fields - - private bool isOwner_; - private Stream stream_; - - #endregion Instance Fields - } -} diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs index cccac6639..37e9e8ba8 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipInputStream.cs @@ -1,9 +1,11 @@ using ICSharpCode.SharpZipLib.Checksum; using ICSharpCode.SharpZipLib.Encryption; -using ICSharpCode.SharpZipLib.Zip.Compression; using ICSharpCode.SharpZipLib.Zip.Compression.Streams; using System; +using System.Diagnostics; using System.IO; +using ICSharpCode.SharpZipLib.Core; +using ICSharpCode.SharpZipLib.Zip.Compression; namespace ICSharpCode.SharpZipLib.Zip { @@ -76,6 +78,7 @@ public class ZipInputStream : InflaterInputStream private CompressionMethod method; private int flags; private string password; + private readonly StringCodec _stringCodec = ZipStrings.GetStringCodec(); #endregion Instance Fields @@ -86,7 +89,7 @@ public class ZipInputStream : InflaterInputStream ///
/// The underlying providing data. public ZipInputStream(Stream baseInputStream) - : base(baseInputStream, new Inflater(true)) + : base(baseInputStream, InflaterPool.Instance.Rent(true)) { internalReader = new ReadDataHandler(ReadingNotAvailable); } @@ -97,11 +100,26 @@ public ZipInputStream(Stream baseInputStream) /// The underlying providing data. /// Size of the buffer. public ZipInputStream(Stream baseInputStream, int bufferSize) - : base(baseInputStream, new Inflater(true), bufferSize) + : base(baseInputStream, InflaterPool.Instance.Rent(true), bufferSize) { internalReader = new ReadDataHandler(ReadingNotAvailable); } + /// + /// Creates a new Zip input stream, for reading a zip archive. + /// + /// The underlying providing data. + /// + public ZipInputStream(Stream baseInputStream, StringCodec stringCodec) + : base(baseInputStream, new Inflater(true)) + { + internalReader = new ReadDataHandler(ReadingNotAvailable); + if (stringCodec != null) + { + _stringCodec = stringCodec; + } + } + #endregion Constructors /// @@ -180,31 +198,12 @@ public ZipEntry GetNextEntry() CloseEntry(); } - int header = inputBuffer.ReadLeInt(); - - if (header == ZipConstants.CentralHeaderSignature || - header == ZipConstants.EndOfCentralDirectorySignature || - header == ZipConstants.CentralHeaderDigitalSignature || - header == ZipConstants.ArchiveExtraDataSignature || - header == ZipConstants.Zip64CentralFileHeaderSignature) + if (!SkipUntilNextEntry()) { - // No more individual entries exist Dispose(); return null; } - // -jr- 07-Dec-2003 Ignore spanning temporary signatures if found - // Spanning signature is same as descriptor signature and is untested as yet. - if ((header == ZipConstants.SpanningTempSignature) || (header == ZipConstants.SpanningSignature)) - { - header = inputBuffer.ReadLeInt(); - } - - if (header != ZipConstants.LocalHeaderSignature) - { - throw new ZipException("Wrong Local header signature: 0x" + String.Format("{0:X}", header)); - } - var versionRequiredToExtract = (short)inputBuffer.ReadLeShort(); flags = inputBuffer.ReadLeShort(); @@ -221,9 +220,11 @@ public ZipEntry GetNextEntry() byte[] buffer = new byte[nameLen]; inputBuffer.ReadRawBuffer(buffer); - string name = ZipStrings.ConvertToStringExt(flags, buffer); + var entryEncoding = _stringCodec.ZipInputEncoding(flags); + string name = entryEncoding.GetString(buffer); + var unicode = entryEncoding.IsZipUnicode(); - entry = new ZipEntry(name, versionRequiredToExtract, ZipConstants.VersionMadeBy, method) + entry = new ZipEntry(name, versionRequiredToExtract, ZipConstants.VersionMadeBy, method, unicode) { Flags = flags, }; @@ -300,6 +301,54 @@ public ZipEntry GetNextEntry() return entry; } + /// + /// Reads bytes from the input stream until either a local file header signature, or another signature + /// indicating that no more entries should be present, is found. + /// + /// Thrown if the end of the input stream is reached without any signatures found + /// Returns whether the found signature is for a local entry header + private bool SkipUntilNextEntry() + { + // First let's skip all null bytes since it's the sane padding to add when updating an entry with smaller size + var paddingSkipped = 0; + while(inputBuffer.ReadLeByte() == 0) { + paddingSkipped++; + } + + // Last byte read was not actually consumed, restore the offset + inputBuffer.Available += 1; + if(paddingSkipped > 0) { + Debug.WriteLine("Skipped {0} null byte(s) before reading signature", paddingSkipped); + } + + var offset = 0; + // Read initial header quad directly after the last entry + var header = (uint)inputBuffer.ReadLeInt(); + do + { + switch (header) + { + case ZipConstants.CentralHeaderSignature: + case ZipConstants.EndOfCentralDirectorySignature: + case ZipConstants.CentralHeaderDigitalSignature: + case ZipConstants.ArchiveExtraDataSignature: + case ZipConstants.Zip64CentralFileHeaderSignature: + Debug.WriteLine("Non-entry signature found at offset {0,2}: 0x{1:x8}", offset, header); + // No more individual entries exist + return false; + + case ZipConstants.LocalHeaderSignature: + Debug.WriteLine("Entry local header signature found at offset {0,2}: 0x{1:x8}", offset, header); + return true; + default: + // Current header quad did not match any signature, shift in another byte + header = (uint) (inputBuffer.ReadLeByte() << 24) | (header >> 8); + offset++; + break; + } + } while (true); // Loop until we either get an EOF exception or we find the next signature + } + /// /// Read data descriptor at the end of compressed data. /// @@ -397,6 +446,7 @@ public void CloseEntry() if ((inputBuffer.Available > csize) && (csize >= 0)) { + // Buffer can contain entire entry data. Internally offsetting position inside buffer inputBuffer.Available = (int)((long)inputBuffer.Available - csize); } else @@ -524,7 +574,7 @@ private int InitialRead(byte[] destination, int offset, int count) // Generate and set crypto transform... var managed = new PkzipClassicManaged(); - byte[] key = PkzipClassic.GenerateKeys(ZipStrings.ConvertToArray(password)); + byte[] key = PkzipClassic.GenerateKeys(_stringCodec.ZipCryptoEncoding.GetBytes(password)); inputBuffer.CryptoTransform = managed.CreateDecryptor(key, null); diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipOutputStream.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipOutputStream.cs index 79d65f560..2cc36df22 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipOutputStream.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipOutputStream.cs @@ -6,7 +6,10 @@ using System; using System.Collections.Generic; using System.IO; +using System.Linq; using System.Security.Cryptography; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Zip { @@ -78,6 +81,16 @@ public ZipOutputStream(Stream baseOutputStream, int bufferSize) { } + /// + /// Creates a new Zip output stream, writing a zip archive. + /// + /// The output stream to which the archive contents are written. + /// + public ZipOutputStream(Stream baseOutputStream, StringCodec stringCodec) : this(baseOutputStream) + { + _stringCodec = stringCodec; + } + #endregion Constructors /// @@ -103,8 +116,7 @@ public bool IsFinished /// public void SetComment(string comment) { - // TODO: Its not yet clear how to handle unicode comments here. - byte[] commentBytes = ZipStrings.ConvertToArray(comment); + byte[] commentBytes = _stringCodec.ZipArchiveCommentEncoding.GetBytes(comment); if (commentBytes.Length > 0xffff) { throw new ArgumentOutOfRangeException(nameof(comment)); @@ -217,17 +229,10 @@ private void WriteLeLong(long value) // Apply any configured transforms/cleaning to the name of the supplied entry. private void TransformEntryName(ZipEntry entry) { - if (this.NameTransform != null) - { - if (entry.IsDirectory) - { - entry.Name = this.NameTransform.TransformDirectory(entry.Name); - } - else - { - entry.Name = this.NameTransform.TransformFile(entry.Name); - } - } + if (NameTransform == null) return; + entry.Name = entry.IsDirectory + ? NameTransform.TransformDirectory(entry.Name) + : NameTransform.TransformFile(entry.Name); } /// @@ -244,7 +249,7 @@ private void TransformEntryName(ZipEntry entry) /// if entry passed is null. /// /// - /// if an I/O error occured. + /// if an I/O error occurred. /// /// /// if stream was finished @@ -259,21 +264,111 @@ private void TransformEntryName(ZipEntry entry) /// public void PutNextEntry(ZipEntry entry) { - if (entry == null) + if (curEntry != null) { - throw new ArgumentNullException(nameof(entry)); + CloseEntry(); } - if (entries == null) + PutNextEntry(baseOutputStream_, entry); + + if (entry.IsCrypted) { - throw new InvalidOperationException("ZipOutputStream was finished"); + WriteOutput(GetEntryEncryptionHeader(entry)); } + } - if (curEntry != null) + /// + /// Starts a new passthrough Zip entry. It automatically closes the previous + /// entry if present. + /// Passthrough entry is an entry that is created from compressed data. + /// It is useful to avoid recompression to save CPU resources if compressed data is already disposable. + /// All entry elements bar name, crc, size and compressed size are optional, but must be correct if present. + /// Compression should be set to Deflated. + /// + /// + /// the entry. + /// + /// + /// if entry passed is null. + /// + /// + /// if an I/O error occurred. + /// + /// + /// if stream was finished. + /// + /// + /// Crc is not set
+ /// Size is not set
+ /// CompressedSize is not set
+ /// CompressionMethod is not Deflate
+ /// Too many entries in the Zip file
+ /// Entry name is too long
+ /// Finish has already been called
+ ///
+ /// + /// The Compression method specified for the entry is unsupported
+ /// Entry is encrypted
+ ///
+ public void PutNextPassthroughEntry(ZipEntry entry) + { + if(curEntry != null) { CloseEntry(); } + if(entry.Crc < 0) + { + throw new ZipException("Crc must be set for passthrough entry"); + } + + if(entry.Size < 0) + { + throw new ZipException("Size must be set for passthrough entry"); + } + + if(entry.CompressedSize < 0) + { + throw new ZipException("CompressedSize must be set for passthrough entry"); + } + + if(entry.CompressionMethod != CompressionMethod.Deflated) + { + throw new NotImplementedException("Only Deflated entries are supported for passthrough"); + } + + if(!string.IsNullOrEmpty(Password)) + { + throw new NotImplementedException("Encrypted passthrough entries are not supported"); + } + + PutNextEntry(baseOutputStream_, entry, 0, true); + } + + + private void WriteOutput(byte[] bytes) + => baseOutputStream_.Write(bytes, 0, bytes.Length); + + private Task WriteOutputAsync(byte[] bytes) + => baseOutputStream_.WriteAsync(bytes, 0, bytes.Length); + + private byte[] GetEntryEncryptionHeader(ZipEntry entry) => + entry.AESKeySize > 0 + ? InitializeAESPassword(entry, Password) + : CreateZipCryptoHeader(entry.Crc < 0 ? entry.DosTime << 16 : entry.Crc); + + internal void PutNextEntry(Stream stream, ZipEntry entry, long streamOffset = 0, bool passthroughEntry = false) + { + if (entry == null) + { + throw new ArgumentNullException(nameof(entry)); + } + + if (entries == null) + { + throw new InvalidOperationException("ZipOutputStream was finished"); + } + if (entries.Count == int.MaxValue) { throw new ZipException("Too many entries for Zip file"); @@ -293,6 +388,8 @@ public void PutNextEntry(ZipEntry entry) throw new InvalidOperationException("The Password property must be set before AES encrypted entries can be added"); } + entryIsPassthrough = passthroughEntry; + int compressionLevel = defaultCompressionLevel; // Clear flags that the library manages internally @@ -302,7 +399,7 @@ public void PutNextEntry(ZipEntry entry) bool headerInfoAvailable; // No need to compress - definitely no data. - if (entry.Size == 0) + if (entry.Size == 0 && !entryIsPassthrough) { entry.CompressedSize = entry.Size; entry.Crc = 0; @@ -365,160 +462,79 @@ public void PutNextEntry(ZipEntry entry) entry.CompressionMethod = (CompressionMethod)method; curMethod = method; - sizePatchPos = -1; if ((useZip64_ == UseZip64.On) || ((entry.Size < 0) && (useZip64_ == UseZip64.Dynamic))) { entry.ForceZip64(); } - // Write the local file header - WriteLeInt(ZipConstants.LocalHeaderSignature); - - WriteLeShort(entry.Version); - WriteLeShort(entry.Flags); - WriteLeShort((byte)entry.CompressionMethodForHeader); - WriteLeInt((int)entry.DosTime); - - // TODO: Refactor header writing. Its done in several places. - if (headerInfoAvailable) - { - WriteLeInt((int)entry.Crc); - if (entry.LocalHeaderRequiresZip64) - { - WriteLeInt(-1); - WriteLeInt(-1); - } - else - { - WriteLeInt((int)entry.CompressedSize + entry.EncryptionOverheadSize); - WriteLeInt((int)entry.Size); - } - } - else - { - if (patchEntryHeader) - { - crcPatchPos = baseOutputStream_.Position; - } - WriteLeInt(0); // Crc - - if (patchEntryHeader) - { - sizePatchPos = baseOutputStream_.Position; - } - - // For local header both sizes appear in Zip64 Extended Information - if (entry.LocalHeaderRequiresZip64 || patchEntryHeader) - { - WriteLeInt(-1); - WriteLeInt(-1); - } - else - { - WriteLeInt(0); // Compressed size - WriteLeInt(0); // Uncompressed size - } - } - - // Apply any required transforms to the entry name, and then convert to byte array format. + // Apply any required transforms to the entry name TransformEntryName(entry); - byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name); - - if (name.Length > 0xFFFF) - { - throw new ZipException("Entry name too long."); - } - - var ed = new ZipExtraData(entry.ExtraData); - - if (entry.LocalHeaderRequiresZip64) - { - ed.StartNewEntry(); - if (headerInfoAvailable) - { - ed.AddLeLong(entry.Size); - ed.AddLeLong(entry.CompressedSize + entry.EncryptionOverheadSize); - } - else - { - ed.AddLeLong(-1); - ed.AddLeLong(-1); - } - ed.AddNewEntry(1); - - if (!ed.Find(1)) - { - throw new ZipException("Internal error cant find extra data"); - } - - if (patchEntryHeader) - { - sizePatchPos = ed.CurrentReadIndex; - } - } - else - { - ed.Delete(1); - } - - if (entry.AESKeySize > 0) - { - AddExtraDataAES(entry, ed); - } - byte[] extra = ed.GetEntryData(); - WriteLeShort(name.Length); - WriteLeShort(extra.Length); - - if (name.Length > 0) - { - baseOutputStream_.Write(name, 0, name.Length); - } - - if (entry.LocalHeaderRequiresZip64 && patchEntryHeader) - { - sizePatchPos += baseOutputStream_.Position; - } + // Write the local file header + offset += ZipFormat.WriteLocalHeader(stream, entry, out var entryPatchData, + headerInfoAvailable, patchEntryHeader, streamOffset, _stringCodec); - if (extra.Length > 0) - { - baseOutputStream_.Write(extra, 0, extra.Length); - } + patchData = entryPatchData; - offset += ZipConstants.LocalHeaderBaseSize + name.Length + extra.Length; // Fix offsetOfCentraldir for AES if (entry.AESKeySize > 0) offset += entry.AESOverheadSize; // Activate the entry. curEntry = entry; + size = 0; + + if(entryIsPassthrough) + return; + crc.Reset(); if (method == CompressionMethod.Deflated) { deflater_.Reset(); deflater_.SetLevel(compressionLevel); } - size = 0; + } - if (entry.IsCrypted) + /// + /// Starts a new Zip entry. It automatically closes the previous + /// entry if present. + /// All entry elements bar name are optional, but must be correct if present. + /// If the compression method is stored and the output is not patchable + /// the compression for that entry is automatically changed to deflate level 0 + /// + /// + /// the entry. + /// + /// The that can be used to cancel the operation. + /// + /// if entry passed is null. + /// + /// + /// if an I/O error occured. + /// + /// + /// if stream was finished + /// + /// + /// Too many entries in the Zip file
+ /// Entry name is too long
+ /// Finish has already been called
+ ///
+ /// + /// The Compression method specified for the entry is unsupported. + /// + public async Task PutNextEntryAsync(ZipEntry entry, CancellationToken ct = default) + { + if (curEntry != null) await CloseEntryAsync(ct).ConfigureAwait(false); + var position = CanPatchEntries ? baseOutputStream_.Position : -1; + await baseOutputStream_.WriteProcToStreamAsync(s => { - if (entry.AESKeySize > 0) - { - WriteAESHeader(entry); - } - else - { - if (entry.Crc < 0) - { // so testing Zip will says its ok - WriteEncryptionHeader(entry.DosTime << 16); - } - else - { - WriteEncryptionHeader(entry.Crc); - } - } - } + PutNextEntry(s, entry, position); + }, ct).ConfigureAwait(false); + + if (!entry.IsCrypted) return; + await WriteOutputAsync(GetEntryEncryptionHeader(entry)).ConfigureAwait(false); } /// @@ -535,37 +551,98 @@ public void PutNextEntry(ZipEntry entry) /// public void CloseEntry() { - if (curEntry == null) + // Note: This method will run synchronously + FinishCompressionSyncOrAsync(null).GetAwaiter().GetResult(); + WriteEntryFooter(baseOutputStream_); + + // Patch the header if possible + if (patchEntryHeader) { - throw new InvalidOperationException("No open entry"); + patchEntryHeader = false; + ZipFormat.PatchLocalHeaderSync(baseOutputStream_, curEntry, patchData); } - long csize = size; + entries.Add(curEntry); + curEntry = null; + } + + private async Task FinishCompressionSyncOrAsync(CancellationToken? ct) + { + // Compression handled externally + if (entryIsPassthrough) return; // First finish the deflater, if appropriate if (curMethod == CompressionMethod.Deflated) { if (size >= 0) { - base.Finish(); - csize = deflater_.TotalOut; + if (ct.HasValue) { + await base.FinishAsync(ct.Value).ConfigureAwait(false); + } else { + base.Finish(); + } } else { deflater_.Reset(); } } - else if (curMethod == CompressionMethod.Stored) + if (curMethod == CompressionMethod.Stored) { // This is done by Finish() for Deflated entries, but we need to do it // ourselves for Stored ones base.GetAuthCodeIfAES(); } + return; + } + + /// + public async Task CloseEntryAsync(CancellationToken ct) + { + await FinishCompressionSyncOrAsync(ct).ConfigureAwait(false); + await baseOutputStream_.WriteProcToStreamAsync(WriteEntryFooter, ct).ConfigureAwait(false); + + // Patch the header if possible + if (patchEntryHeader) + { + patchEntryHeader = false; + await ZipFormat.PatchLocalHeaderAsync(baseOutputStream_, curEntry, patchData, ct).ConfigureAwait(false); + } + + entries.Add(curEntry); + curEntry = null; + } + + internal void WriteEntryFooter(Stream stream) + { + if (curEntry == null) + { + throw new InvalidOperationException("No open entry"); + } + + if(entryIsPassthrough) + { + if(curEntry.CompressedSize != size) + { + throw new ZipException($"compressed size was {size}, but {curEntry.CompressedSize} expected"); + } + + offset += size; + return; + } + + long csize = size; + + if (curMethod == CompressionMethod.Deflated && size >= 0) + { + csize = deflater_.TotalOut; + } + // Write the AES Authentication Code (a hash of the compressed and encrypted data) if (curEntry.AESKeySize > 0) { - baseOutputStream_.Write(AESAuthCode, 0, 10); + stream.Write(AESAuthCode, 0, 10); // Always use 0 as CRC for AE-2 format curEntry.Crc = 0; } @@ -606,94 +683,72 @@ public void CloseEntry() curEntry.CompressedSize += curEntry.EncryptionOverheadSize; } - // Patch the header if possible - if (patchEntryHeader) - { - patchEntryHeader = false; - - long curPos = baseOutputStream_.Position; - baseOutputStream_.Seek(crcPatchPos, SeekOrigin.Begin); - WriteLeInt((int)curEntry.Crc); - - if (curEntry.LocalHeaderRequiresZip64) - { - if (sizePatchPos == -1) - { - throw new ZipException("Entry requires zip64 but this has been turned off"); - } - - baseOutputStream_.Seek(sizePatchPos, SeekOrigin.Begin); - WriteLeLong(curEntry.Size); - WriteLeLong(curEntry.CompressedSize); - } - else - { - WriteLeInt((int)curEntry.CompressedSize); - WriteLeInt((int)curEntry.Size); - } - baseOutputStream_.Seek(curPos, SeekOrigin.Begin); - } - // Add data descriptor if flagged as required if ((curEntry.Flags & 8) != 0) { - WriteLeInt(ZipConstants.DataDescriptorSignature); - WriteLeInt(unchecked((int)curEntry.Crc)); + stream.WriteLEInt(ZipConstants.DataDescriptorSignature); + stream.WriteLEInt(unchecked((int)curEntry.Crc)); if (curEntry.LocalHeaderRequiresZip64) { - WriteLeLong(curEntry.CompressedSize); - WriteLeLong(curEntry.Size); + stream.WriteLELong(curEntry.CompressedSize); + stream.WriteLELong(curEntry.Size); offset += ZipConstants.Zip64DataDescriptorSize; } else { - WriteLeInt((int)curEntry.CompressedSize); - WriteLeInt((int)curEntry.Size); + stream.WriteLEInt((int)curEntry.CompressedSize); + stream.WriteLEInt((int)curEntry.Size); offset += ZipConstants.DataDescriptorSize; } } - - entries.Add(curEntry); - curEntry = null; } - /// - /// Initializes encryption keys based on given . - /// - /// The password. - private void InitializePassword(string password) - { - var pkManaged = new PkzipClassicManaged(); - byte[] key = PkzipClassic.GenerateKeys(ZipStrings.ConvertToArray(password)); - cryptoTransform_ = pkManaged.CreateEncryptor(key, null); - } + + // File format for AES: + // Size (bytes) Content + // ------------ ------- + // Variable Salt value + // 2 Password verification value + // Variable Encrypted file data + // 10 Authentication code + // + // Value in the "compressed size" fields of the local file header and the central directory entry + // is the total size of all the items listed above. In other words, it is the total size of the + // salt value, password verification value, encrypted data, and authentication code. + /// /// Initializes encryption keys based on given password. /// - private void InitializeAESPassword(ZipEntry entry, string rawPassword, - out byte[] salt, out byte[] pwdVerifier) + protected byte[] InitializeAESPassword(ZipEntry entry, string rawPassword) { - salt = new byte[entry.AESSaltLen]; - + var salt = new byte[entry.AESSaltLen]; // Salt needs to be cryptographically random, and unique per file + if (_aesRnd == null) + _aesRnd = RandomNumberGenerator.Create(); _aesRnd.GetBytes(salt); - int blockSize = entry.AESKeySize / 8; // bits to bytes cryptoTransform_ = new ZipAESTransform(rawPassword, salt, blockSize, true); - pwdVerifier = ((ZipAESTransform)cryptoTransform_).PwdVerifier; - } - private void WriteEncryptionHeader(long crcValue) + var headBytes = new byte[salt.Length + 2]; + + Array.Copy(salt, headBytes, salt.Length); + Array.Copy(((ZipAESTransform)cryptoTransform_).PwdVerifier, 0, + headBytes, headBytes.Length - 2, 2); + + return headBytes; + } + + private byte[] CreateZipCryptoHeader(long crcValue) { offset += ZipConstants.CryptoHeaderSize; - InitializePassword(Password); + InitializeZipCryptoPassword(Password); byte[] cryptBuffer = new byte[ZipConstants.CryptoHeaderSize]; - using (var rng = new RNGCryptoServiceProvider()) + using (var rng = RandomNumberGenerator.Create()) { rng.GetBytes(cryptBuffer); } @@ -701,47 +756,21 @@ private void WriteEncryptionHeader(long crcValue) cryptBuffer[11] = (byte)(crcValue >> 24); EncryptBlock(cryptBuffer, 0, cryptBuffer.Length); - baseOutputStream_.Write(cryptBuffer, 0, cryptBuffer.Length); - } - private static void AddExtraDataAES(ZipEntry entry, ZipExtraData extraData) - { - // Vendor Version: AE-1 IS 1. AE-2 is 2. With AE-2 no CRC is required and 0 is stored. - const int VENDOR_VERSION = 2; - // Vendor ID is the two ASCII characters "AE". - const int VENDOR_ID = 0x4541; //not 6965; - extraData.StartNewEntry(); - // Pack AES extra data field see http://www.winzip.com/aes_info.htm - //extraData.AddLeShort(7); // Data size (currently 7) - extraData.AddLeShort(VENDOR_VERSION); // 2 = AE-2 - extraData.AddLeShort(VENDOR_ID); // "AE" - extraData.AddData(entry.AESEncryptionStrength); // 1 = 128, 2 = 192, 3 = 256 - extraData.AddLeShort((int)entry.CompressionMethod); // The actual compression method used to compress the file - extraData.AddNewEntry(0x9901); + return cryptBuffer; } - - // Replaces WriteEncryptionHeader for AES - // - private void WriteAESHeader(ZipEntry entry) + + /// + /// Initializes encryption keys based on given . + /// + /// The password. + private void InitializeZipCryptoPassword(string password) { - byte[] salt; - byte[] pwdVerifier; - InitializeAESPassword(entry, Password, out salt, out pwdVerifier); - // File format for AES: - // Size (bytes) Content - // ------------ ------- - // Variable Salt value - // 2 Password verification value - // Variable Encrypted file data - // 10 Authentication code - // - // Value in the "compressed size" fields of the local file header and the central directory entry - // is the total size of all the items listed above. In other words, it is the total size of the - // salt value, password verification value, encrypted data, and authentication code. - baseOutputStream_.Write(salt, 0, salt.Length); - baseOutputStream_.Write(pwdVerifier, 0, pwdVerifier.Length); + var pkManaged = new PkzipClassicManaged(); + byte[] key = PkzipClassic.GenerateKeys(ZipCryptoEncoding.GetBytes(password)); + cryptoTransform_ = pkManaged.CreateEncryptor(key, null); } - + /// /// Writes the given buffer to the current entry. /// @@ -751,6 +780,13 @@ private void WriteAESHeader(ZipEntry entry) /// Archive size is invalid /// No entry is active. public override void Write(byte[] buffer, int offset, int count) + => WriteSyncOrAsync(buffer, offset, count, null).GetAwaiter().GetResult(); + + /// + public override async Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken ct) + => await WriteSyncOrAsync(buffer, offset, count, ct).ConfigureAwait(false); + + private async Task WriteSyncOrAsync(byte[] buffer, int offset, int count, CancellationToken? ct) { if (curEntry == null) { @@ -777,40 +813,52 @@ public override void Write(byte[] buffer, int offset, int count) throw new ArgumentException("Invalid offset/count combination"); } - if (curEntry.AESKeySize == 0) + if (curEntry.AESKeySize == 0 && !entryIsPassthrough) { - // Only update CRC if AES is not enabled + // Only update CRC if AES is not enabled and entry is not a passthrough one crc.Update(new ArraySegment(buffer, offset, count)); } size += count; - switch (curMethod) + if (curMethod == CompressionMethod.Stored || entryIsPassthrough) { - case CompressionMethod.Deflated: - base.Write(buffer, offset, count); - break; - - case CompressionMethod.Stored: - if (Password != null) + if (Password != null) + { + CopyAndEncrypt(buffer, offset, count); + } + else + { + if (ct.HasValue) { - CopyAndEncrypt(buffer, offset, count); + await baseOutputStream_.WriteAsync(buffer, offset, count, ct.Value).ConfigureAwait(false); } else { baseOutputStream_.Write(buffer, offset, count); } - break; + } + } + else + { + if (ct.HasValue) + { + await base.WriteAsync(buffer, offset, count, ct.Value).ConfigureAwait(false); + } + else + { + base.Write(buffer, offset, count); + } } } private void CopyAndEncrypt(byte[] buffer, int offset, int count) { - const int CopyBufferSize = 4096; - byte[] localBuffer = new byte[CopyBufferSize]; + const int copyBufferSize = 4096; + byte[] localBuffer = new byte[copyBufferSize]; while (count > 0) { - int bufferCount = (count < CopyBufferSize) ? count : CopyBufferSize; + int bufferCount = (count < copyBufferSize) ? count : copyBufferSize; Array.Copy(buffer, offset, localBuffer, 0, bufferCount); EncryptBlock(localBuffer, 0, bufferCount); @@ -849,144 +897,48 @@ public override void Finish() long numEntries = entries.Count; long sizeEntries = 0; - foreach (ZipEntry entry in entries) + foreach (var entry in entries) { - WriteLeInt(ZipConstants.CentralHeaderSignature); - WriteLeShort((entry.HostSystem << 8) | entry.VersionMadeBy); - WriteLeShort(entry.Version); - WriteLeShort(entry.Flags); - WriteLeShort((short)entry.CompressionMethodForHeader); - WriteLeInt((int)entry.DosTime); - WriteLeInt((int)entry.Crc); - - if (entry.IsZip64Forced() || - (entry.CompressedSize >= uint.MaxValue)) - { - WriteLeInt(-1); - } - else - { - WriteLeInt((int)entry.CompressedSize); - } - - if (entry.IsZip64Forced() || - (entry.Size >= uint.MaxValue)) - { - WriteLeInt(-1); - } - else - { - WriteLeInt((int)entry.Size); - } - - byte[] name = ZipStrings.ConvertToArray(entry.Flags, entry.Name); - - if (name.Length > 0xffff) - { - throw new ZipException("Name too long."); - } - - var ed = new ZipExtraData(entry.ExtraData); - - if (entry.CentralHeaderRequiresZip64) - { - ed.StartNewEntry(); - if (entry.IsZip64Forced() || - (entry.Size >= 0xffffffff)) - { - ed.AddLeLong(entry.Size); - } - - if (entry.IsZip64Forced() || - (entry.CompressedSize >= 0xffffffff)) - { - ed.AddLeLong(entry.CompressedSize); - } + sizeEntries += ZipFormat.WriteEndEntry(baseOutputStream_, entry, _stringCodec); + } - if (entry.Offset >= 0xffffffff) - { - ed.AddLeLong(entry.Offset); - } + ZipFormat.WriteEndOfCentralDirectory(baseOutputStream_, numEntries, sizeEntries, offset, zipComment); - ed.AddNewEntry(1); - } - else - { - ed.Delete(1); - } + entries = null; + } - if (entry.AESKeySize > 0) + /// > + public override async Task FinishAsync(CancellationToken ct) + { + using (var ms = new MemoryStream()) + { + if (entries == null) { - AddExtraDataAES(entry, ed); + return; } - byte[] extra = ed.GetEntryData(); - byte[] entryComment = - (entry.Comment != null) ? - ZipStrings.ConvertToArray(entry.Flags, entry.Comment) : - Empty.Array(); - - if (entryComment.Length > 0xffff) + if (curEntry != null) { - throw new ZipException("Comment too long."); + await CloseEntryAsync(ct).ConfigureAwait(false); } - WriteLeShort(name.Length); - WriteLeShort(extra.Length); - WriteLeShort(entryComment.Length); - WriteLeShort(0); // disk number - WriteLeShort(0); // internal file attributes - // external file attributes + long numEntries = entries.Count; + long sizeEntries = 0; - if (entry.ExternalFileAttributes != -1) + foreach (var entry in entries) { - WriteLeInt(entry.ExternalFileAttributes); - } - else - { - if (entry.IsDirectory) - { // mark entry as directory (from nikolam.AT.perfectinfo.com) - WriteLeInt(16); - } - else + await baseOutputStream_.WriteProcToStreamAsync(ms, s => { - WriteLeInt(0); - } - } - - if (entry.Offset >= uint.MaxValue) - { - WriteLeInt(-1); - } - else - { - WriteLeInt((int)entry.Offset); - } - - if (name.Length > 0) - { - baseOutputStream_.Write(name, 0, name.Length); - } - - if (extra.Length > 0) - { - baseOutputStream_.Write(extra, 0, extra.Length); + sizeEntries += ZipFormat.WriteEndEntry(s, entry, _stringCodec); + }, ct).ConfigureAwait(false); } - if (entryComment.Length > 0) - { - baseOutputStream_.Write(entryComment, 0, entryComment.Length); - } - - sizeEntries += ZipConstants.CentralHeaderBaseSize + name.Length + extra.Length + entryComment.Length; - } + await baseOutputStream_.WriteProcToStreamAsync(ms, s + => ZipFormat.WriteEndOfCentralDirectory(s, numEntries, sizeEntries, offset, zipComment), + ct).ConfigureAwait(false); - using (ZipHelperStream zhs = new ZipHelperStream(baseOutputStream_)) - { - zhs.WriteEndOfCentralDirectory(numEntries, sizeEntries, offset, zipComment); + entries = null; } - - entries = null; } /// @@ -1022,6 +974,8 @@ public override void Flush() /// private ZipEntry curEntry; + private bool entryIsPassthrough; + private int defaultCompressionLevel = Deflater.DEFAULT_COMPRESSION; private CompressionMethod curMethod = CompressionMethod.Deflated; @@ -1047,14 +1001,9 @@ public override void Flush() private bool patchEntryHeader; /// - /// Position to patch crc - /// - private long crcPatchPos = -1; - - /// - /// Position to patch size. + /// The values to patch in the entry local header /// - private long sizePatchPos = -1; + private EntryPatchData patchData; // Default is dynamic which is not backwards compatible and can cause problems // with XP's built in compression which cant read Zip64 archives. diff --git a/src/ICSharpCode.SharpZipLib/Zip/ZipStrings.cs b/src/ICSharpCode.SharpZipLib/Zip/ZipStrings.cs index 2d0c4cff4..3eab416ef 100644 --- a/src/ICSharpCode.SharpZipLib/Zip/ZipStrings.cs +++ b/src/ICSharpCode.SharpZipLib/Zip/ZipStrings.cs @@ -4,191 +4,257 @@ namespace ICSharpCode.SharpZipLib.Zip { + internal static class EncodingExtensions + { + public static bool IsZipUnicode(this Encoding e) + => e.Equals(StringCodec.UnicodeZipEncoding); + } + /// - /// This static class contains functions for encoding and decoding zip file strings + /// Deprecated way of setting zip encoding provided for backwards compability. + /// Use when possible. /// + /// + /// If any ZipStrings properties are being modified, it will enter a backwards compatibility mode, mimicking the + /// old behaviour where a single instance was shared between all Zip* instances. + /// public static class ZipStrings { - static ZipStrings() + static StringCodec CompatCodec = StringCodec.Default; + + private static bool compatibilityMode; + + /// + /// Returns a new instance or the shared backwards compatible instance. + /// + /// + public static StringCodec GetStringCodec() + => compatibilityMode ? CompatCodec : StringCodec.Default; + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static int CodePage { - try + get => CompatCodec.CodePage; + set { - var platformCodepage = Encoding.GetEncoding(0).CodePage; - SystemDefaultCodePage = (platformCodepage == 1 || platformCodepage == 2 || platformCodepage == 3 || platformCodepage == 42) ? FallbackCodePage : platformCodepage; + CompatCodec = new StringCodec(CompatCodec.ForceZipLegacyEncoding, Encoding.GetEncoding(value)) + { + ZipArchiveCommentEncoding = CompatCodec.ZipArchiveCommentEncoding, + ZipCryptoEncoding = CompatCodec.ZipCryptoEncoding, + }; + compatibilityMode = true; } - catch + } + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static int SystemDefaultCodePage => StringCodec.SystemDefaultCodePage; + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static bool UseUnicode + { + get => !CompatCodec.ForceZipLegacyEncoding; + set { - SystemDefaultCodePage = FallbackCodePage; + CompatCodec = new StringCodec(!value, CompatCodec.LegacyEncoding) + { + ZipArchiveCommentEncoding = CompatCodec.ZipArchiveCommentEncoding, + ZipCryptoEncoding = CompatCodec.ZipCryptoEncoding, + }; + compatibilityMode = true; } } - /// Code page backing field + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + private static bool HasUnicodeFlag(int flags) + => ((GeneralBitFlags)flags).HasFlag(GeneralBitFlags.UnicodeText); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static string ConvertToString(byte[] data, int count) + => CompatCodec.ZipOutputEncoding.GetString(data, 0, count); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static string ConvertToString(byte[] data) + => CompatCodec.ZipOutputEncoding.GetString(data); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static string ConvertToStringExt(int flags, byte[] data, int count) + => CompatCodec.ZipEncoding(HasUnicodeFlag(flags)).GetString(data, 0, count); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static string ConvertToStringExt(int flags, byte[] data) + => CompatCodec.ZipEncoding(HasUnicodeFlag(flags)).GetString(data); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static byte[] ConvertToArray(string str) + => ConvertToArray(0, str); + + /// + [Obsolete("Use ZipFile/Zip*Stream StringCodec instead")] + public static byte[] ConvertToArray(int flags, string str) + => (string.IsNullOrEmpty(str)) + ? Empty.Array() + : CompatCodec.ZipEncoding(HasUnicodeFlag(flags)).GetBytes(str); + } + + /// + /// Utility class for resolving the encoding used for reading and writing strings + /// + public class StringCodec + { + internal StringCodec(bool forceLegacyEncoding, Encoding legacyEncoding) + { + LegacyEncoding = legacyEncoding; + ForceZipLegacyEncoding = forceLegacyEncoding; + ZipArchiveCommentEncoding = legacyEncoding; + ZipCryptoEncoding = legacyEncoding; + } + + /// + /// Creates a StringCodec that uses the system default encoder or UTF-8 depending on whether the zip entry Unicode flag is set + /// + public static StringCodec Default + => new StringCodec(false, SystemDefaultEncoding); + + /// + /// Creates a StringCodec that uses an encoding from the specified code page except for zip entries with the Unicode flag + /// + public static StringCodec FromCodePage(int codePage) + => new StringCodec(false, Encoding.GetEncoding(codePage)); + + /// + /// Creates a StringCodec that uses an the specified encoding, except for zip entries with the Unicode flag + /// + public static StringCodec FromEncoding(Encoding encoding) + => new StringCodec(false, encoding); + + /// + /// Creates a StringCodec that uses the zip specification encoder or UTF-8 depending on whether the zip entry Unicode flag is set + /// + public static StringCodec WithStrictSpecEncoding() + => new StringCodec(false, Encoding.GetEncoding(ZipSpecCodePage)); + + /// + /// If set, use the encoding set by for zip entries instead of the defaults + /// + public bool ForceZipLegacyEncoding { get; internal set; } + + /// + /// The default encoding used for ZipCrypto passwords in zip files, set to + /// for greatest compability. + /// + public static Encoding DefaultZipCryptoEncoding => SystemDefaultEncoding; + + /// + /// Returns the encoding for an output . + /// Unless overriden by it returns . + /// + public Encoding ZipOutputEncoding => ZipEncoding(!ForceZipLegacyEncoding); + + /// + /// Returns if is set, otherwise it returns the encoding indicated by + /// + public Encoding ZipEncoding(bool unicode) + => unicode ? UnicodeZipEncoding : LegacyEncoding; + + /// + /// Returns the appropriate encoding for an input according to . + /// If overridden by , it always returns the encoding indicated by . + /// + /// + /// + public Encoding ZipInputEncoding(GeneralBitFlags flags) + => ZipEncoding(!ForceZipLegacyEncoding && flags.HasAny(GeneralBitFlags.UnicodeText)); + + /// + public Encoding ZipInputEncoding(int flags) => ZipInputEncoding((GeneralBitFlags)flags); + + /// Code page encoding, used for non-unicode strings /// /// The original Zip specification (https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT) states /// that file names should only be encoded with IBM Code Page 437 or UTF-8. /// In practice, most zip apps use OEM or system encoding (typically cp437 on Windows). - /// Let's be good citizens and default to UTF-8 http://utf8everywhere.org/ /// - private static int codePage = AutomaticCodePage; - - /// Automatically select codepage while opening archive - /// see https://github.com/icsharpcode/SharpZipLib/pull/280#issuecomment-433608324 - /// - private const int AutomaticCodePage = -1; + public Encoding LegacyEncoding { get; internal set; } /// - /// Encoding used for string conversion. Setting this to 65001 (UTF-8) will - /// also set the Language encoding flag to indicate UTF-8 encoded file names. + /// Returns the UTF-8 code page (65001) used for zip entries with unicode flag set /// - public static int CodePage - { - get - { - return codePage == AutomaticCodePage? Encoding.UTF8.CodePage:codePage; - } - set - { - if ((value < 0) || (value > 65535) || - (value == 1) || (value == 2) || (value == 3) || (value == 42)) - { - throw new ArgumentOutOfRangeException(nameof(value)); - } + public static readonly Encoding UnicodeZipEncoding = Encoding.UTF8; - codePage = value; - } - } + /// + /// Code page used for non-unicode strings and legacy zip encoding (if is set). + /// Default value is + /// + public int CodePage => LegacyEncoding.CodePage; - private const int FallbackCodePage = 437; + /// + /// The non-unicode code page that should be used according to the zip specification + /// + public const int ZipSpecCodePage = 437; /// - /// Attempt to get the operating system default codepage, or failing that, to - /// the fallback code page IBM 437. + /// Operating system default codepage. /// - public static int SystemDefaultCodePage { get; } + public static int SystemDefaultCodePage => SystemDefaultEncoding.CodePage; /// - /// Get whether the default codepage is set to UTF-8. Setting this property to false will - /// set the to + /// The system default encoding. /// - /// - /// Get OEM codepage from NetFX, which parses the NLP file with culture info table etc etc. - /// But sometimes it yields the special value of 1 which is nicknamed CodePageNoOEM in sources (might also mean CP_OEMCP, but Encoding puts it so). - /// This was observed on Ukranian and Hindu systems. - /// Given this value, throws an . - /// So replace it with , (IBM 437 which is the default code page in a default Windows installation console. - /// - public static bool UseUnicode - { - get - { - return codePage == Encoding.UTF8.CodePage; - } - set - { - if (value) - { - codePage = Encoding.UTF8.CodePage; - } - else - { - codePage = SystemDefaultCodePage; - } - } - } + public static Encoding SystemDefaultEncoding => Encoding.GetEncoding(0); /// - /// Convert a portion of a byte array to a string using + /// The encoding used for the zip archive comment. Defaults to the encoding for , since + /// no unicode flag can be set for it in the files. /// - /// - /// Data to convert to string - /// - /// - /// Number of bytes to convert starting from index 0 - /// - /// - /// data[0]..data[count - 1] converted to a string - /// - public static string ConvertToString(byte[] data, int count) - => data == null - ? string.Empty - : Encoding.GetEncoding(CodePage).GetString(data, 0, count); + public Encoding ZipArchiveCommentEncoding { get; internal set; } /// - /// Convert a byte array to a string using + /// The encoding used for the ZipCrypto passwords. Defaults to . /// - /// - /// Byte array to convert - /// - /// - /// dataconverted to a string - /// - public static string ConvertToString(byte[] data) - => ConvertToString(data, data.Length); - - private static Encoding EncodingFromFlag(int flags) - => ((flags & (int)GeneralBitFlags.UnicodeText) != 0) - ? Encoding.UTF8 - : Encoding.GetEncoding( - // if CodePage wasn't set manually and no utf flag present - // then we must use SystemDefault (old behavior) - // otherwise, CodePage should be preferred over SystemDefault - // see https://github.com/icsharpcode/SharpZipLib/issues/274 - codePage == AutomaticCodePage? - SystemDefaultCodePage: - codePage); - - /// - /// Convert a byte array to a string using - /// - /// The applicable general purpose bits flags - /// - /// Byte array to convert - /// - /// The number of bytes to convert. - /// - /// dataconverted to a string - /// - public static string ConvertToStringExt(int flags, byte[] data, int count) - => (data == null) - ? string.Empty - : EncodingFromFlag(flags).GetString(data, 0, count); + public Encoding ZipCryptoEncoding { get; internal set; } /// - /// Convert a byte array to a string using + /// Create a copy of this StringCodec with the specified zip archive comment encoding /// - /// - /// Byte array to convert - /// - /// The applicable general purpose bits flags - /// - /// dataconverted to a string - /// - public static string ConvertToStringExt(int flags, byte[] data) - => ConvertToStringExt(flags, data, data.Length); + /// + /// + public StringCodec WithZipArchiveCommentEncoding(Encoding commentEncoding) + => new StringCodec(ForceZipLegacyEncoding, LegacyEncoding) + { + ZipArchiveCommentEncoding = commentEncoding, + ZipCryptoEncoding = ZipCryptoEncoding + }; /// - /// Convert a string to a byte array using + /// Create a copy of this StringCodec with the specified zip crypto password encoding /// - /// - /// String to convert to an array - /// - /// Converted array - public static byte[] ConvertToArray(string str) - => str == null - ? Empty.Array() - : Encoding.GetEncoding(CodePage).GetBytes(str); + /// + /// + public StringCodec WithZipCryptoEncoding(Encoding cryptoEncoding) + => new StringCodec(ForceZipLegacyEncoding, LegacyEncoding) + { + ZipArchiveCommentEncoding = ZipArchiveCommentEncoding, + ZipCryptoEncoding = cryptoEncoding + }; /// - /// Convert a string to a byte array using + /// Create a copy of this StringCodec that ignores the Unicode flag when reading entries /// - /// The applicable general purpose bits flags - /// - /// String to convert to an array - /// - /// Converted array - public static byte[] ConvertToArray(int flags, string str) - => (string.IsNullOrEmpty(str)) - ? Empty.Array() - : EncodingFromFlag(flags).GetBytes(str); + /// + public StringCodec WithForcedLegacyEncoding() + => new StringCodec(true, LegacyEncoding) + { + ZipArchiveCommentEncoding = ZipArchiveCommentEncoding, + ZipCryptoEncoding = ZipCryptoEncoding + }; } } diff --git a/test/.globalconfig b/test/.globalconfig new file mode 100644 index 000000000..14f57bc66 --- /dev/null +++ b/test/.globalconfig @@ -0,0 +1,3 @@ +is_global = true +global_level = 2 +dotnet_diagnostic.CA2007.severity = none diff --git a/test/ICSharpCode.SharpZipLib.TestBootstrapper/ICSharpCode.SharpZipLib.TestBootstrapper.csproj b/test/ICSharpCode.SharpZipLib.TestBootstrapper/ICSharpCode.SharpZipLib.TestBootstrapper.csproj deleted file mode 100644 index 3e3ba13d6..000000000 --- a/test/ICSharpCode.SharpZipLib.TestBootstrapper/ICSharpCode.SharpZipLib.TestBootstrapper.csproj +++ /dev/null @@ -1,21 +0,0 @@ - - - - Exe - netcoreapp3.1 - - - - - - - - - - - - - - - - diff --git a/test/ICSharpCode.SharpZipLib.TestBootstrapper/Program.cs b/test/ICSharpCode.SharpZipLib.TestBootstrapper/Program.cs deleted file mode 100644 index 4a030de1f..000000000 --- a/test/ICSharpCode.SharpZipLib.TestBootstrapper/Program.cs +++ /dev/null @@ -1,14 +0,0 @@ -using NUnitLite; -using System.Reflection; - -namespace ICSharpCode.SharpZipLib.TestBootstrapper -{ - public class Program - { - private static void Main(string[] args) - { - new AutoRun(typeof(Tests.Base.InflaterDeflaterTestSuite).GetTypeInfo().Assembly) - .Execute(args); - } - } -} diff --git a/test/ICSharpCode.SharpZipLib.Tests/BZip2/Bzip2Tests.cs b/test/ICSharpCode.SharpZipLib.Tests/BZip2/Bzip2Tests.cs index 8d6febc1b..62d5a7874 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/BZip2/Bzip2Tests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/BZip2/Bzip2Tests.cs @@ -1,7 +1,6 @@ using ICSharpCode.SharpZipLib.BZip2; using ICSharpCode.SharpZipLib.Tests.TestSupport; using NUnit.Framework; -using System; using System.IO; namespace ICSharpCode.SharpZipLib.Tests.BZip2 @@ -24,34 +23,30 @@ public void BasicRoundTrip() { var ms = new MemoryStream(); var outStream = new BZip2OutputStream(ms); + + var buf = Utils.GetDummyBytes(size: 10000, RandomSeed); - byte[] buf = new byte[10000]; - var rnd = new Random(RandomSeed); - rnd.NextBytes(buf); - - outStream.Write(buf, 0, buf.Length); + outStream.Write(buf, offset: 0, buf.Length); outStream.Close(); ms = new MemoryStream(ms.GetBuffer()); - ms.Seek(0, SeekOrigin.Begin); + ms.Seek(offset: 0, SeekOrigin.Begin); - using (BZip2InputStream inStream = new BZip2InputStream(ms)) + using BZip2InputStream inStream = new BZip2InputStream(ms); + var buf2 = new byte[buf.Length]; + var pos = 0; + while (true) { - byte[] buf2 = new byte[buf.Length]; - int pos = 0; - while (true) + var numRead = inStream.Read(buf2, pos, count: 4096); + if (numRead <= 0) { - int numRead = inStream.Read(buf2, pos, 4096); - if (numRead <= 0) - { - break; - } - pos += numRead; + break; } + pos += numRead; + } - for (int i = 0; i < buf.Length; ++i) - { - Assert.AreEqual(buf2[i], buf[i]); - } + for (var i = 0; i < buf.Length; ++i) + { + Assert.AreEqual(buf2[i], buf[i]); } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Base/InflaterDeflaterTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Base/InflaterDeflaterTests.cs index e6e3c4125..e9ba0ad77 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Base/InflaterDeflaterTests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Base/InflaterDeflaterTests.cs @@ -6,6 +6,7 @@ using System.IO; using System.Security; using System.Text; +using System.Threading; using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tests.Base @@ -60,20 +61,10 @@ private MemoryStream Deflate(byte[] data, int level, bool zlib) return memoryStream; } - private static byte[] GetRandomTestData(int size) - { - byte[] buffer = new byte[size]; - var rnd = new Random(RandomSeed); - rnd.NextBytes(buffer); - - return buffer; - } - private void RandomDeflateInflate(int size, int level, bool zlib) { - byte[] buffer = GetRandomTestData(size); - - MemoryStream ms = Deflate(buffer, level, zlib); + var buffer = Utils.GetDummyBytes(size, RandomSeed); + var ms = Deflate(buffer, level, zlib); Inflate(ms, buffer, level, zlib); } @@ -123,16 +114,15 @@ private async Task DeflateAsync(byte[] data, int level, bool zlib) outStream.IsStreamOwner = false; await outStream.WriteAsync(data, 0, data.Length); await outStream.FlushAsync(); - outStream.Finish(); + await outStream.FinishAsync(CancellationToken.None); } return memoryStream; } private async Task RandomDeflateInflateAsync(int size, int level, bool zlib) { - byte[] buffer = GetRandomTestData(size); - - MemoryStream ms = await DeflateAsync(buffer, level, zlib); + var buffer = Utils.GetDummyBytes(size, RandomSeed); + var ms = await DeflateAsync(buffer, level, zlib); await InflateAsync(ms, buffer, level, zlib); } @@ -179,24 +169,21 @@ public void InflateDeflateZlib([Range(0, 9)] int level) [Category("Async")] public async Task InflateDeflateZlibAsync([Range(0, 9)] int level) { - await RandomDeflateInflateAsync(100000, level, true); + await RandomDeflateInflateAsync(size: 100000, level, zlib: true); } private delegate void RunCompress(byte[] buffer); - private int runLevel; - private bool runZlib; - private long runCount; - private readonly Random runRandom = new Random(RandomSeed); + private int _runLevel; + private bool _runZlib; private void DeflateAndInflate(byte[] buffer) { - ++runCount; - MemoryStream ms = Deflate(buffer, runLevel, runZlib); - Inflate(ms, buffer, runLevel, runZlib); + var ms = Deflate(buffer, _runLevel, _runZlib); + Inflate(ms, buffer, _runLevel, _runZlib); } - private void TryVariants(RunCompress test, byte[] buffer, int index) + private void TryVariants(RunCompress test, byte[] buffer, Random random, int index) { int worker = 0; while (worker <= 255) @@ -204,33 +191,34 @@ private void TryVariants(RunCompress test, byte[] buffer, int index) buffer[index] = (byte)worker; if (index < buffer.Length - 1) { - TryVariants(test, buffer, index + 1); + TryVariants(test, buffer, random, index + 1); } else { test(buffer); } - worker += runRandom.Next(256); + worker += random.Next(maxValue: 256); } } private void TryManyVariants(int level, bool zlib, RunCompress test, byte[] buffer) { - runLevel = level; - runZlib = zlib; - TryVariants(test, buffer, 0); + var random = new Random(RandomSeed); + _runLevel = level; + _runZlib = zlib; + TryVariants(test, buffer, random, 0); } // TODO: Fix this - //[Test] - //[Category("Base")] - //public void SmallBlocks() - //{ - // byte[] buffer = new byte[10]; - // Array.Clear(buffer, 0, buffer.Length); - // TryManyVariants(0, false, new RunCompress(DeflateAndInflate), buffer); - //} + [Test] + [Category("Base")] + [Explicit("Long-running")] + public void SmallBlocks() + { + var buffer = new byte[10]; + TryManyVariants(level: 0, zlib: false, DeflateAndInflate, buffer); + } /// /// Basic inflate/deflate test diff --git a/test/ICSharpCode.SharpZipLib.Tests/Core/ByteOrderUtilsTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Core/ByteOrderUtilsTests.cs new file mode 100644 index 000000000..1a5d271ff --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Core/ByteOrderUtilsTests.cs @@ -0,0 +1,137 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using NUnit.Framework; +using BO = ICSharpCode.SharpZipLib.Core.ByteOrderStreamExtensions; +using ICSharpCode.SharpZipLib.Core; + +// ReSharper disable InconsistentNaming + +namespace ICSharpCode.SharpZipLib.Tests.Core +{ + [TestFixture] + [Category("Core")] + public class ByteOrderUtilsTests + { + private const short native16 = 0x1234; + private static readonly byte[] swapped16 = { 0x34, 0x12 }; + + private const int native32 = 0x12345678; + private static readonly byte[] swapped32 = { 0x78, 0x56, 0x34, 0x12 }; + + private const long native64 = 0x123456789abcdef0; + private static readonly byte[] swapped64 = { 0xf0, 0xde, 0xbc, 0x9a, 0x78, 0x56, 0x34, 0x12 }; + + [Test] + public void ToSwappedBytes() + { + Assert.AreEqual(swapped16, BO.SwappedBytes(native16)); + Assert.AreEqual(swapped16, BO.SwappedBytes((ushort)native16)); + + Assert.AreEqual(swapped32, BO.SwappedBytes(native32)); + Assert.AreEqual(swapped32, BO.SwappedBytes((uint)native32)); + + Assert.AreEqual(swapped64, BO.SwappedBytes(native64)); + Assert.AreEqual(swapped64, BO.SwappedBytes((ulong)native64)); + } + + [Test] + public void FromSwappedBytes() + { + Assert.AreEqual(native16, BO.SwappedS16(swapped16)); + Assert.AreEqual(native16, BO.SwappedU16(swapped16)); + + Assert.AreEqual(native32, BO.SwappedS32(swapped32)); + Assert.AreEqual(native32, BO.SwappedU32(swapped32)); + + Assert.AreEqual(native64, BO.SwappedS64(swapped64)); + Assert.AreEqual(native64, BO.SwappedU64(swapped64)); + } + + [Test] + public void ReadLESigned16() + => TestReadLE(native16, 2, BO.ReadLEShort); + + [Test] + public void ReadLESigned32() + => TestReadLE(native32,4, BO.ReadLEInt); + + [Test] + public void ReadLESigned64() + => TestReadLE(native64,8, BO.ReadLELong); + + [Test] + public void WriteLESigned16() + => TestWriteLE(swapped16, s => s.WriteLEShort(native16)); + + [Test] + public void WriteLESigned32() + => TestWriteLE(swapped32, s => s.WriteLEInt(native32)); + + [Test] + public void WriteLESigned64() + => TestWriteLE(swapped64, s => s.WriteLELong(native64)); + + [Test] + public void WriteLEUnsigned16() + => TestWriteLE(swapped16, s => s.WriteLEUshort((ushort)native16)); + + [Test] + public void WriteLEUnsigned32() + => TestWriteLE(swapped32, s => s.WriteLEUint(native32)); + + [Test] + public void WriteLEUnsigned64() + => TestWriteLE(swapped64, s => s.WriteLEUlong(native64)); + + [Test] + public async Task WriteLEAsyncSigned16() + => await TestWriteLEAsync(swapped16, (int)native16, BO.WriteLEShortAsync); + + [Test] + public async Task WriteLEAsyncUnsigned16() + => await TestWriteLEAsync(swapped16, (ushort)native16, BO.WriteLEUshortAsync); + + [Test] + public async Task WriteLEAsyncSigned32() + => await TestWriteLEAsync(swapped32, native32, BO.WriteLEIntAsync); + [Test] + public async Task WriteLEAsyncUnsigned32() + => await TestWriteLEAsync(swapped32, (uint)native32, BO.WriteLEUintAsync); + + [Test] + public async Task WriteLEAsyncSigned64() + => await TestWriteLEAsync(swapped64, native64, BO.WriteLELongAsync); + [Test] + public async Task WriteLEAsyncUnsigned64() + => await TestWriteLEAsync(swapped64, (ulong)native64, BO.WriteLEUlongAsync); + + + private static void TestReadLE(T expected, int bytes, Func read) + { + using (var ms = new MemoryStream(swapped64, 8 - bytes, bytes)) + { + Assert.AreEqual(expected, read(ms)); + } + } + + private static void TestWriteLE(byte[] expected, Action write) + { + using (var ms = new MemoryStream()) + { + write(ms); + Assert.AreEqual(expected, ms.ToArray()); + } + } + + private static async Task TestWriteLEAsync(byte[] expected, T input, Func write) + { + using (var ms = new MemoryStream()) + { + await write(ms, input, CancellationToken.None); + Assert.AreEqual(expected, ms.ToArray()); + } + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/Core/StringBuilderPoolTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Core/StringBuilderPoolTests.cs new file mode 100644 index 000000000..85d8c65a9 --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Core/StringBuilderPoolTests.cs @@ -0,0 +1,77 @@ +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Core; +using NUnit.Framework; + +namespace ICSharpCode.SharpZipLib.Tests.Core +{ + [TestFixture] + public class StringBuilderPoolTests + { + [Test] + [Category("Core")] + public void RoundTrip() + { + var pool = new StringBuilderPool(); + var builder1 = pool.Rent(); + pool.Return(builder1); + var builder2 = pool.Rent(); + Assert.AreEqual(builder1, builder2); + } + + [Test] + [Category("Core")] + public void ReturnsClears() + { + var pool = new StringBuilderPool(); + var builder1 = pool.Rent(); + builder1.Append("Hello"); + pool.Return(builder1); + Assert.AreEqual(0, builder1.Length); + } + + [Test] + [Category("Core")] + public async Task ThreadSafeAsync() + { + // use a lot of threads to increase the likelihood of errors + var concurrency = 100; + + var pool = new StringBuilderPool(); + var gate = new TaskCompletionSource(); + var startedTasks = new Task[concurrency]; + var completedTasks = new Task[concurrency]; + for (int i = 0; i < concurrency; i++) + { + var started = new TaskCompletionSource(); + startedTasks[i] = started.Task; + var captured = i; + completedTasks[i] = Task.Run(async () => + { + started.SetResult(true); + await gate.Task; + var builder = pool.Rent(); + builder.Append("Hello "); + builder.Append(captured); + var str = builder.ToString(); + pool.Return(builder); + return str; + }); + } + + // make sure all the threads have started + await Task.WhenAll(startedTasks); + + // let them all loose at the same time + gate.SetResult(true); + + // make sure every thread produces the expected string and hence had its own StringBuilder + var results = await Task.WhenAll(completedTasks); + for (int i = 0; i < concurrency; i++) + { + var result = results[i]; + Assert.AreEqual($"Hello {i}", result); + } + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipAsyncTests.cs b/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipAsyncTests.cs new file mode 100644 index 000000000..b259af8cf --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipAsyncTests.cs @@ -0,0 +1,186 @@ +using System; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.GZip; +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using NUnit.Framework; + +namespace ICSharpCode.SharpZipLib.Tests.GZip +{ + [TestFixture] + public class GZipAsyncTests + { + [Test] + [Category("GZip")] + [Category("Async")] + public async Task SmallBufferDecompressionAsync([Values(0, 1, 3)] int seed) + { + var outputBufferSize = 100000; + var outputBuffer = new byte[outputBufferSize]; + var inputBuffer = Utils.GetDummyBytes(outputBufferSize * 4, seed); + +#if NETCOREAPP3_1_OR_GREATER + await using var msGzip = new MemoryStream(); + await using (var gzos = new GZipOutputStream(msGzip){IsStreamOwner = false}) + { + await gzos.WriteAsync(inputBuffer, 0, inputBuffer.Length); + } + + msGzip.Seek(0, SeekOrigin.Begin); + + using (var gzis = new GZipInputStream(msGzip)) + await using (var msRaw = new MemoryStream()) + { + int readOut; + while ((readOut = gzis.Read(outputBuffer, 0, outputBuffer.Length)) > 0) + { + await msRaw.WriteAsync(outputBuffer, 0, readOut); + } + + var resultBuffer = msRaw.ToArray(); + for (var i = 0; i < resultBuffer.Length; i++) + { + Assert.AreEqual(inputBuffer[i], resultBuffer[i]); + } + } +#else + using var msGzip = new MemoryStream(); + using (var gzos = new GZipOutputStream(msGzip){IsStreamOwner = false}) + { + await gzos.WriteAsync(inputBuffer, 0, inputBuffer.Length); + } + + msGzip.Seek(0, SeekOrigin.Begin); + + using (var gzis = new GZipInputStream(msGzip)) + using (var msRaw = new MemoryStream()) + { + int readOut; + while ((readOut = gzis.Read(outputBuffer, 0, outputBuffer.Length)) > 0) + { + await msRaw.WriteAsync(outputBuffer, 0, readOut); + } + + var resultBuffer = msRaw.ToArray(); + for (var i = 0; i < resultBuffer.Length; i++) + { + Assert.AreEqual(inputBuffer[i], resultBuffer[i]); + } + } +#endif + } + + /// + /// Basic compress/decompress test + /// + [Test] + [Category("GZip")] + [Category("Async")] + public async Task OriginalFilenameAsync() + { + var content = "FileContents"; + +#if NETCOREAPP3_1_OR_GREATER + await using var ms = new MemoryStream(); + await using (var outStream = new GZipOutputStream(ms) { IsStreamOwner = false }) + { + outStream.FileName = "/path/to/file.ext"; + outStream.Write(Encoding.ASCII.GetBytes(content)); + } +#else + var ms = new MemoryStream(); + var outStream = new GZipOutputStream(ms){ IsStreamOwner = false }; + outStream.FileName = "/path/to/file.ext"; + var bytes = Encoding.ASCII.GetBytes(content); + outStream.Write(bytes, 0, bytes.Length); + await outStream.FinishAsync(System.Threading.CancellationToken.None); + outStream.Dispose(); + +#endif + ms.Seek(0, SeekOrigin.Begin); + + using (var inStream = new GZipInputStream(ms)) + { + var readBuffer = new byte[content.Length]; + inStream.Read(readBuffer, 0, readBuffer.Length); + Assert.AreEqual(content, Encoding.ASCII.GetString(readBuffer)); + Assert.AreEqual("file.ext", inStream.GetFilename()); + } + } + + /// + /// Test creating an empty gzip stream using async + /// + [Test] + [Category("GZip")] + [Category("Async")] + public async Task EmptyGZipStreamAsync() + { +#if NETCOREAPP3_1_OR_GREATER + await using var ms = new MemoryStream(); + await using (var outStream = new GZipOutputStream(ms) { IsStreamOwner = false }) + { + // No content + } +#else + var ms = new MemoryStream(); + var outStream = new GZipOutputStream(ms){ IsStreamOwner = false }; + await outStream.FinishAsync(System.Threading.CancellationToken.None); + outStream.Dispose(); + +#endif + ms.Seek(0, SeekOrigin.Begin); + + using (var inStream = new GZipInputStream(ms)) + using (var reader = new StreamReader(inStream)) + { + var content = await reader.ReadToEndAsync(); + Assert.IsEmpty(content); + } + } + + [Test] + [Category("GZip")] + [Category("Async")] + public async Task WriteGZipStreamToAsyncOnlyStream() + { +#if NETSTANDARD2_1 || NETCOREAPP3_0_OR_GREATER + var content = Encoding.ASCII.GetBytes("a"); + var modTime = DateTime.UtcNow; + + await using (var msAsync = new MemoryStreamWithoutSync()) + { + await using (var outStream = new GZipOutputStream(msAsync) { IsStreamOwner = false }) + { + outStream.ModifiedTime = modTime; + await outStream.WriteAsync(content); + } + + using var msSync = new MemoryStream(); + using (var outStream = new GZipOutputStream(msSync) { IsStreamOwner = false }) + { + outStream.ModifiedTime = modTime; + outStream.Write(content); + } + + var syncBytes = string.Join(' ', msSync.ToArray()); + var asyncBytes = string.Join(' ', msAsync.ToArray()); + + Assert.AreEqual(syncBytes, asyncBytes, "Sync and Async compressed streams are not equal"); + + // Since GZipInputStream isn't async yet we need to read from it from a regular MemoryStream + using (var readStream = new MemoryStream(msAsync.ToArray())) + using (var inStream = new GZipInputStream(readStream)) + using (var reader = new StreamReader(inStream)) + { + Assert.AreEqual(content, await reader.ReadToEndAsync()); + } + } +#else + await Task.CompletedTask; + Assert.Ignore("AsyncDispose is not supported"); +#endif + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipTests.cs b/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipTests.cs index 8a9f61d69..3241fd134 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipTests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/GZip/GZipTests.cs @@ -4,6 +4,8 @@ using System; using System.IO; using System.Text; +using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tests.GZip { @@ -23,9 +25,7 @@ public void TestGZip() var ms = new MemoryStream(); var outStream = new GZipOutputStream(ms); - byte[] buf = new byte[100000]; - var rnd = new Random(); - rnd.NextBytes(buf); + var buf = Utils.GetDummyBytes(size: 100000); outStream.Write(buf, 0, buf.Length); outStream.Flush(); @@ -64,17 +64,15 @@ public void TestGZip() [Category("GZip")] public void DelayedHeaderWriteNoData() { - var ms = new MemoryStream(); - Assert.AreEqual(0, ms.Length); + using var ms = new MemoryStream(); + Assert.Zero(ms.Length); - using (GZipOutputStream outStream = new GZipOutputStream(ms)) + using (new GZipOutputStream(ms)) { - Assert.AreEqual(0, ms.Length); + Assert.Zero(ms.Length); } - byte[] data = ms.ToArray(); - - Assert.IsTrue(data.Length > 0); + Assert.NotZero(ms.ToArray().Length); } @@ -260,7 +258,7 @@ public void DoubleClose() s.Close(); memStream = new TrackedMemoryStream(); - using (GZipOutputStream no2 = new GZipOutputStream(memStream)) + using (new GZipOutputStream(memStream)) { s.Close(); } @@ -273,14 +271,7 @@ public void WriteAfterFinish() var s = new GZipOutputStream(memStream); s.Finish(); - try - { - s.WriteByte(7); - Assert.Fail("Write should fail"); - } - catch - { - } + Assert.Throws(() => s.WriteByte(value: 7), "Write should fail"); } [Test] @@ -290,14 +281,7 @@ public void WriteAfterClose() var s = new GZipOutputStream(memStream); s.Close(); - try - { - s.WriteByte(7); - Assert.Fail("Write should fail"); - } - catch - { - } + Assert.Throws(() => s.WriteByte(value: 7), "Write should fail"); } /// @@ -311,9 +295,7 @@ public void TrailingGarbage() var outStream = new GZipOutputStream(ms); // input buffer to be compressed - byte[] buf = new byte[100000]; - var rnd = new Random(); - rnd.NextBytes(buf); + var buf = Utils.GetDummyBytes(size: 100000, seed: 3); // compress input buffer outStream.Write(buf, 0, buf.Length); @@ -321,9 +303,7 @@ public void TrailingGarbage() outStream.Finish(); // generate random trailing garbage and add to the compressed stream - byte[] garbage = new byte[4096]; - rnd.NextBytes(garbage); - ms.Write(garbage, 0, garbage.Length); + Utils.WriteDummyData(ms, size: 4096, seed: 4); // rewind the concatenated stream ms.Seek(0, SeekOrigin.Begin); @@ -336,7 +316,7 @@ public void TrailingGarbage() int count = buf2.Length; while (true) { - int numRead = inStream.Read(buf2, currentIndex, count); + var numRead = inStream.Read(buf2, currentIndex, count); if (numRead <= 0) { break; @@ -346,7 +326,7 @@ public void TrailingGarbage() } /* ASSERT */ - Assert.AreEqual(0, count); + Assert.Zero(count); for (int i = 0; i < buf.Length; ++i) { Assert.AreEqual(buf2[i], buf[i]); @@ -365,9 +345,7 @@ public void FlushToUnderlyingStream() var ms = new MemoryStream(); var outStream = new GZipOutputStream(ms); - byte[] buf = new byte[100000]; - var rnd = new Random(); - rnd.NextBytes(buf); + byte[] buf = Utils.GetDummyBytes(size: 100000); outStream.Write(buf, 0, buf.Length); // Flush output stream but don't finish it yet @@ -410,52 +388,35 @@ public void FlushToUnderlyingStream() [Test] [Category("GZip")] - public void SmallBufferDecompression() + public void SmallBufferDecompression([Values(0, 1, 3)] int seed) { var outputBufferSize = 100000; - var inputBufferSize = outputBufferSize * 4; - var outputBuffer = new byte[outputBufferSize]; - var inputBuffer = new byte[inputBufferSize]; - - using (var msGzip = new MemoryStream()) + var inputBuffer = Utils.GetDummyBytes(outputBufferSize * 4, seed); + + using var msGzip = new MemoryStream(); + using (var gzos = new GZipOutputStream(msGzip){IsStreamOwner = false}) { - using (var gzos = new GZipOutputStream(msGzip)) + gzos.Write(inputBuffer, 0, inputBuffer.Length); + } + + msGzip.Seek(0, SeekOrigin.Begin); + + using (var gzis = new GZipInputStream(msGzip)) + using (var msRaw = new MemoryStream()) + { + int readOut; + while ((readOut = gzis.Read(outputBuffer, 0, outputBuffer.Length)) > 0) { - gzos.IsStreamOwner = false; - - var rnd = new Random(0); - rnd.NextBytes(inputBuffer); - gzos.Write(inputBuffer, 0, inputBuffer.Length); - - gzos.Flush(); - gzos.Finish(); + msRaw.Write(outputBuffer, 0, readOut); } - msGzip.Seek(0, SeekOrigin.Begin); - - - using (var gzis = new GZipInputStream(msGzip)) - using (var msRaw = new MemoryStream()) + var resultBuffer = msRaw.ToArray(); + for (var i = 0; i < resultBuffer.Length; i++) { - - int readOut; - while ((readOut = gzis.Read(outputBuffer, 0, outputBuffer.Length)) > 0) - { - msRaw.Write(outputBuffer, 0, readOut); - } - - var resultBuffer = msRaw.ToArray(); - - for (var i = 0; i < resultBuffer.Length; i++) - { - Assert.AreEqual(inputBuffer[i], resultBuffer[i]); - } - - + Assert.AreEqual(inputBuffer[i], resultBuffer[i]); } } - } /// @@ -467,18 +428,13 @@ public void SmallBufferDecompression() /// [Test] [Category("Zip")] - public void ShouldGracefullyHandleReadingANonReableStream() + public void ShouldGracefullyHandleReadingANonReadableStream() { MemoryStream ms = new SelfClosingStream(); using (var gzos = new GZipOutputStream(ms)) { gzos.IsStreamOwner = false; - - byte[] buf = new byte[100000]; - var rnd = new Random(); - rnd.NextBytes(buf); - - gzos.Write(buf, 0, buf.Length); + Utils.WriteDummyData(gzos, size: 100000); } ms.Seek(0, SeekOrigin.Begin); @@ -526,30 +482,26 @@ public void OriginalFilename() var content = "FileContents"; - using (var ms = new MemoryStream()) + using var ms = new MemoryStream(); + using (var outStream = new GZipOutputStream(ms) { IsStreamOwner = false }) { - using (var outStream = new GZipOutputStream(ms) { IsStreamOwner = false }) - { - outStream.FileName = "/path/to/file.ext"; + outStream.FileName = "/path/to/file.ext"; - var writeBuffer = Encoding.ASCII.GetBytes(content); - outStream.Write(writeBuffer, 0, writeBuffer.Length); - outStream.Flush(); - outStream.Finish(); - } + var writeBuffer = Encoding.ASCII.GetBytes(content); + outStream.Write(writeBuffer, 0, writeBuffer.Length); + outStream.Flush(); + outStream.Finish(); + } - ms.Seek(0, SeekOrigin.Begin); + ms.Seek(0, SeekOrigin.Begin); - using (var inStream = new GZipInputStream(ms)) - { - var readBuffer = new byte[content.Length]; - inStream.Read(readBuffer, 0, readBuffer.Length); - Assert.AreEqual(content, Encoding.ASCII.GetString(readBuffer)); - Assert.AreEqual("file.ext", inStream.GetFilename()); - } - + using (var inStream = new GZipInputStream(ms)) + { + var readBuffer = new byte[content.Length]; + inStream.Read(readBuffer, 0, readBuffer.Length); + Assert.AreEqual(content, Encoding.ASCII.GetString(readBuffer)); + Assert.AreEqual("file.ext", inStream.GetFilename()); } - } } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/ICSharpCode.SharpZipLib.Tests.csproj b/test/ICSharpCode.SharpZipLib.Tests/ICSharpCode.SharpZipLib.Tests.csproj index 12183fcdd..8e9745e96 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/ICSharpCode.SharpZipLib.Tests.csproj +++ b/test/ICSharpCode.SharpZipLib.Tests/ICSharpCode.SharpZipLib.Tests.csproj @@ -2,18 +2,21 @@ Library - netcoreapp3.1;net46 + net6.0;net462 - 8 + true + ..\..\assets\ICSharpCode.SharpZipLib.snk + true + 8.0 - - - - - + + + + + @@ -25,4 +28,10 @@ + + + ICSharpCode.SharpZipLib.snk + + + diff --git a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarArchiveTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarArchiveTests.cs index 374a9b1e3..d9e32194a 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarArchiveTests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarArchiveTests.cs @@ -2,6 +2,7 @@ using System.Text; using ICSharpCode.SharpZipLib.Core; using ICSharpCode.SharpZipLib.Tar; +using ICSharpCode.SharpZipLib.Tests.TestSupport; using static ICSharpCode.SharpZipLib.Tests.TestSupport.Utils; using NUnit.Framework; @@ -56,9 +57,9 @@ public void ExtractingContentsOnWindowsWithDisallowedPathsFails(string outputDir public void ExtractTarOK(string outputDir, string fileName, bool allowTraverse) { var fileContent = Encoding.UTF8.GetBytes("file content"); - using var tempDir = new TempDir(); + using var tempDir = GetTempDir(); - var tempPath = tempDir.Fullpath; + var tempPath = tempDir.FullName; var extractPath = Path.Combine(tempPath, outputDir); var expectedOutputFile = Path.Combine(extractPath, fileName); diff --git a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarBufferTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarBufferTests.cs new file mode 100644 index 000000000..6f7ffedca --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarBufferTests.cs @@ -0,0 +1,116 @@ +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Tar; +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using NUnit.Framework; + +namespace ICSharpCode.SharpZipLib.Tests.Tar +{ + [TestFixture] + public class TarBufferTests + { + [Test] + public void TestSimpleReadWrite() + { + var ms = new MemoryStream(); + var reader = TarBuffer.CreateInputTarBuffer(ms, 1); + var writer = TarBuffer.CreateOutputTarBuffer(ms, 1); + writer.IsStreamOwner = false; + + var block = Utils.GetDummyBytes(TarBuffer.BlockSize); + + writer.WriteBlock(block); + writer.WriteBlock(block); + writer.WriteBlock(block); + writer.Close(); + + ms.Seek(0, SeekOrigin.Begin); + + var block0 = reader.ReadBlock(); + var block1 = reader.ReadBlock(); + var block2 = reader.ReadBlock(); + Assert.AreEqual(block, block0); + Assert.AreEqual(block, block1); + Assert.AreEqual(block, block2); + writer.Close(); + } + + [Test] + public void TestSkipBlock() + { + var ms = new MemoryStream(); + var reader = TarBuffer.CreateInputTarBuffer(ms, 1); + var writer = TarBuffer.CreateOutputTarBuffer(ms, 1); + writer.IsStreamOwner = false; + + var block0 = Utils.GetDummyBytes(TarBuffer.BlockSize); + var block1 = Utils.GetDummyBytes(TarBuffer.BlockSize); + + writer.WriteBlock(block0); + writer.WriteBlock(block1); + writer.Close(); + + ms.Seek(0, SeekOrigin.Begin); + + reader.SkipBlock(); + var block = reader.ReadBlock(); + Assert.AreEqual(block, block1); + writer.Close(); + } + + [Test] + public async Task TestSimpleReadWriteAsync() + { + var ms = new MemoryStream(); + var reader = TarBuffer.CreateInputTarBuffer(ms, 1); + var writer = TarBuffer.CreateOutputTarBuffer(ms, 1); + writer.IsStreamOwner = false; + + var block = Utils.GetDummyBytes(TarBuffer.BlockSize); + + await writer.WriteBlockAsync(block, CancellationToken.None); + await writer.WriteBlockAsync(block, CancellationToken.None); + await writer.WriteBlockAsync(block, CancellationToken.None); + await writer.CloseAsync(CancellationToken.None); + + ms.Seek(0, SeekOrigin.Begin); + + var block0 = new byte[TarBuffer.BlockSize]; + await reader.ReadBlockIntAsync(block0, CancellationToken.None, true); + var block1 = new byte[TarBuffer.BlockSize]; + await reader.ReadBlockIntAsync(block1, CancellationToken.None, true); + var block2 = new byte[TarBuffer.BlockSize]; + await reader.ReadBlockIntAsync(block2, CancellationToken.None, true); + Assert.AreEqual(block, block0); + Assert.AreEqual(block, block1); + Assert.AreEqual(block, block2); + await writer.CloseAsync(CancellationToken.None); + } + + [Test] + public async Task TestSkipBlockAsync() + { + var ms = new MemoryStream(); + var reader = TarBuffer.CreateInputTarBuffer(ms, 1); + var writer = TarBuffer.CreateOutputTarBuffer(ms, 1); + writer.IsStreamOwner = false; + + var block0 = Utils.GetDummyBytes(TarBuffer.BlockSize); + var block1 = Utils.GetDummyBytes(TarBuffer.BlockSize); + + await writer.WriteBlockAsync(block0, CancellationToken.None); + await writer.WriteBlockAsync(block1, CancellationToken.None); + await writer.CloseAsync(CancellationToken.None); + + ms.Seek(0, SeekOrigin.Begin); + + await reader.SkipBlockAsync(CancellationToken.None); + var block = new byte[TarBuffer.BlockSize]; + await reader.ReadBlockIntAsync(block, CancellationToken.None, true); + Assert.AreEqual(block, block1); + await writer.CloseAsync(CancellationToken.None); + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarInputStreamTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarInputStreamTests.cs new file mode 100644 index 000000000..a69cfdf6d --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarInputStreamTests.cs @@ -0,0 +1,111 @@ +using System; +using System.Buffers; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Tar; +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using NUnit.Framework; + +namespace ICSharpCode.SharpZipLib.Tests.Tar +{ + public class TarInputStreamTests + { + [Test] + public void TestRead() + { + var entryBytes = Utils.GetDummyBytes(2000); + using var ms = new MemoryStream(); + using (var tos = new TarOutputStream(ms, Encoding.UTF8) { IsStreamOwner = false }) + { + var e = TarEntry.CreateTarEntry("some entry"); + e.Size = entryBytes.Length; + tos.PutNextEntry(e); + tos.Write(entryBytes, 0, entryBytes.Length); + tos.CloseEntry(); + } + + ms.Seek(0, SeekOrigin.Begin); + + using var tis = new TarInputStream(ms, Encoding.UTF8); + var entry = tis.GetNextEntry(); + Assert.AreEqual("some entry", entry.Name); + var buffer = new byte[1000]; // smaller than 2 blocks + var read0 = tis.Read(buffer, 0, buffer.Length); + Assert.AreEqual(1000, read0); + Assert.AreEqual(entryBytes.AsSpan(0, 1000).ToArray(), buffer); + + var read1 = tis.Read(buffer, 0, 5); + Assert.AreEqual(5, read1); + Assert.AreEqual(entryBytes.AsSpan(1000, 5).ToArray(), buffer.AsSpan().Slice(0, 5).ToArray()); + + var read2 = tis.Read(buffer, 0, 20); + Assert.AreEqual(20, read2); + Assert.AreEqual(entryBytes.AsSpan(1005, 20).ToArray(), buffer.AsSpan().Slice(0, 20).ToArray()); + + var read3 = tis.Read(buffer, 0, 975); + Assert.AreEqual(975, read3); + Assert.AreEqual(entryBytes.AsSpan(1025, 975).ToArray(), buffer.AsSpan().Slice(0, 975).ToArray()); + } + + [Test] + public async Task TestReadAsync() + { + var entryBytes = Utils.GetDummyBytes(2000); + using var ms = new MemoryStream(); + using (var tos = new TarOutputStream(ms, Encoding.UTF8) { IsStreamOwner = false }) + { + var e = TarEntry.CreateTarEntry("some entry"); + e.Size = entryBytes.Length; + await tos.PutNextEntryAsync(e, CancellationToken.None); + await tos.WriteAsync(entryBytes, 0, entryBytes.Length); + await tos.CloseEntryAsync(CancellationToken.None); + } + + ms.Seek(0, SeekOrigin.Begin); + + using var tis = new TarInputStream(ms, Encoding.UTF8); + var entry = await tis.GetNextEntryAsync(CancellationToken.None); + Assert.AreEqual("some entry", entry.Name); + var buffer = new byte[1000]; // smaller than 2 blocks + var read0 = await tis.ReadAsync(buffer, 0, buffer.Length); + Assert.AreEqual(1000, read0); + Assert.AreEqual(entryBytes.AsSpan(0, 1000).ToArray(), buffer); + + var read1 = await tis.ReadAsync(buffer, 0, 5); + Assert.AreEqual(5, read1); + Assert.AreEqual(entryBytes.AsSpan(1000, 5).ToArray(), buffer.AsSpan().Slice(0, 5).ToArray()); + + var read2 = await tis.ReadAsync(buffer, 0, 20); + Assert.AreEqual(20, read2); + Assert.AreEqual(entryBytes.AsSpan(1005, 20).ToArray(), buffer.AsSpan().Slice(0, 20).ToArray()); + + var read3 = await tis.ReadAsync(buffer, 0, 975); + Assert.AreEqual(975, read3); + Assert.AreEqual(entryBytes.AsSpan(1025, 975).ToArray(), buffer.AsSpan().Slice(0, 975).ToArray()); + } + + [Test] + public void ReadEmptyStreamWhenArrayPoolIsDirty() + { + // Rent an array with the same size as the tar buffer from the array pool + var buffer = ArrayPool.Shared.Rent(TarBuffer.DefaultRecordSize); + + // Fill the array with anything but 0 + Utils.FillArray(buffer, 0x8b); + + // Return the now dirty buffer to the array pool + ArrayPool.Shared.Return(buffer); + + Assert.DoesNotThrow(() => + { + using var emptyStream = new MemoryStream(Array.Empty()); + using var tarInputStream = new TarInputStream(emptyStream, Encoding.UTF8); + while (tarInputStream.GetNextEntry() is { } tarEntry) + { + } + }, "reading from an empty input stream should not cause an error"); + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarTests.cs index 5cdd9404e..e49035afa 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Tar/TarTests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Tar/TarTests.cs @@ -5,6 +5,9 @@ using System; using System.IO; using System.Text; +using System.Threading; +using System.Threading.Tasks; +using NUnit.Framework.Internal; namespace ICSharpCode.SharpZipLib.Tests.Tar { @@ -35,8 +38,8 @@ public void Setup() public void EmptyTar() { var ms = new MemoryStream(); - int recordSize = 0; - using (TarArchive tarOut = TarArchive.CreateOutputTarArchive(ms)) + int recordSize; + using (var tarOut = TarArchive.CreateOutputTarArchive(ms)) { recordSize = tarOut.RecordSize; } @@ -48,12 +51,12 @@ public void EmptyTar() ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); - using (TarArchive tarIn = TarArchive.CreateInputTarArchive(ms2, null)) + using (var tarIn = TarArchive.CreateInputTarArchive(ms2, nameEncoding: null)) { entryCount = 0; tarIn.ProgressMessageEvent += EntryCounter; tarIn.ListContents(); - Assert.AreEqual(0, entryCount, "Expected 0 tar entries"); + Assert.Zero(entryCount, "Expected 0 tar entries"); } } @@ -64,27 +67,24 @@ public void EmptyTar() [Category("Tar")] public void BlockFactorHandling() { - const int MinimumBlockFactor = 1; - const int MaximumBlockFactor = 64; - const int FillFactor = 2; + const int minimumBlockFactor = 1; + const int maximumBlockFactor = 64; + const int fillFactor = 2; - for (int factor = MinimumBlockFactor; factor < MaximumBlockFactor; ++factor) + for (var factor = minimumBlockFactor; factor < maximumBlockFactor; ++factor) { var ms = new MemoryStream(); - using (TarOutputStream tarOut = new TarOutputStream(ms, factor, null)) + using (var tarOut = new TarOutputStream(ms, factor, nameEncoding: null)) { - TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); - entry.Size = (TarBuffer.BlockSize * factor * FillFactor); + var entry = TarEntry.CreateTarEntry("TestEntry"); + entry.Size = TarBuffer.BlockSize * factor * fillFactor; tarOut.PutNextEntry(entry); - byte[] buffer = new byte[TarBuffer.BlockSize]; - - var r = new Random(); - r.NextBytes(buffer); + var buffer = Utils.GetDummyBytes(TarBuffer.BlockSize); // Last block is a partial one - for (int i = 0; i < factor * FillFactor; ++i) + for (var i = 0; i < factor * fillFactor; ++i) { tarOut.Write(buffer, 0, buffer.Length); } @@ -94,7 +94,7 @@ public void BlockFactorHandling() Assert.IsNotNull(tarData, "Data written is null"); // Blocks = Header + Data Blocks + Zero block + Record trailer - int usedBlocks = 1 + (factor * FillFactor) + 2; + int usedBlocks = 1 + (factor * fillFactor) + 2; int totalBlocks = usedBlocks + (factor - 1); totalBlocks /= factor; totalBlocks *= factor; @@ -102,24 +102,22 @@ public void BlockFactorHandling() Assert.AreEqual(TarBuffer.BlockSize * totalBlocks, tarData.Length, "Tar file should contain {0} blocks in length", totalBlocks); - if (usedBlocks < totalBlocks) + if (usedBlocks >= totalBlocks) continue; + + // Start at first byte after header. + var byteIndex = TarBuffer.BlockSize * ((factor * fillFactor) + 1); + while (byteIndex < tarData.Length) { - // Start at first byte after header. - int byteIndex = TarBuffer.BlockSize * ((factor * FillFactor) + 1); - while (byteIndex < tarData.Length) - { - int blockNumber = byteIndex / TarBuffer.BlockSize; - int offset = blockNumber % TarBuffer.BlockSize; - Assert.AreEqual(0, tarData[byteIndex], - string.Format("Trailing block data should be null iteration {0} block {1} offset {2} index {3}", - factor, - blockNumber, offset, byteIndex)); - byteIndex += 1; - } + var blockNumber = byteIndex / TarBuffer.BlockSize; + var offset = blockNumber % TarBuffer.BlockSize; + Assert.AreEqual(0, tarData[byteIndex], + "Trailing block data should be null iteration {0} block {1} offset {2} index {3}", + factor, blockNumber, offset, byteIndex); + byteIndex += 1; } } } - + /// /// Check that the tar trailer only contains nulls. /// @@ -127,13 +125,13 @@ public void BlockFactorHandling() [Category("Tar")] public void TrailerContainsNulls() { - const int TestBlockFactor = 3; + const int testBlockFactor = 3; - for (int iteration = 0; iteration < TestBlockFactor * 2; ++iteration) + for (int iteration = 0; iteration < testBlockFactor * 2; ++iteration) { var ms = new MemoryStream(); - using (TarOutputStream tarOut = new TarOutputStream(ms, TestBlockFactor, null)) + using (TarOutputStream tarOut = new TarOutputStream(ms, testBlockFactor, null)) { TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); if (iteration > 0) @@ -142,10 +140,7 @@ public void TrailerContainsNulls() } tarOut.PutNextEntry(entry); - byte[] buffer = new byte[TarBuffer.BlockSize]; - - var r = new Random(); - r.NextBytes(buffer); + byte[] buffer = Utils.GetDummyBytes(TarBuffer.BlockSize); if (iteration > 0) { @@ -167,9 +162,9 @@ public void TrailerContainsNulls() // Blocks = Header + Data Blocks + Zero block + Record trailer int usedBlocks = 1 + iteration + 2; - int totalBlocks = usedBlocks + (TestBlockFactor - 1); - totalBlocks /= TestBlockFactor; - totalBlocks *= TestBlockFactor; + int totalBlocks = usedBlocks + (testBlockFactor - 1); + totalBlocks /= testBlockFactor; + totalBlocks *= testBlockFactor; Assert.AreEqual(TarBuffer.BlockSize * totalBlocks, tarData.Length, string.Format("Tar file should be {0} blocks in length", totalBlocks)); @@ -195,7 +190,7 @@ public void TrailerContainsNulls() private void TryLongName(string name) { var ms = new MemoryStream(); - using (TarOutputStream tarOut = new TarOutputStream(ms, null)) + using (TarOutputStream tarOut = new TarOutputStream(ms, nameEncoding: null)) { DateTime modTime = DateTime.Now; @@ -207,7 +202,7 @@ private void TryLongName(string name) ms2.Write(ms.GetBuffer(), 0, ms.GetBuffer().Length); ms2.Seek(0, SeekOrigin.Begin); - using (TarInputStream tarIn = new TarInputStream(ms2, null)) + using (TarInputStream tarIn = new TarInputStream(ms2, nameEncoding: null)) { TarEntry nextEntry = tarIn.GetNextEntry(); @@ -290,20 +285,15 @@ public void ExtendedHeaderLongName() var buffer = new byte[2560]; var truncated = Convert.FromBase64String(input64); Array.Copy(truncated, buffer, truncated.Length); - truncated = null; - using (var ms = new MemoryStream(buffer)) - using (var tis = new TarInputStream(ms, null)) - { - var entry = tis.GetNextEntry(); - Assert.IsNotNull(entry, "Entry is null"); - - Assert.IsNotNull(entry.Name, "Entry name is null"); - - Assert.AreEqual(expectedName.Length, entry.Name.Length, $"Entry name is truncated to {entry.Name.Length} bytes."); - - Assert.AreEqual(expectedName, entry.Name, "Entry name does not match expected value"); - } + using var ms = new MemoryStream(buffer); + using var tis = new TarInputStream(ms, nameEncoding: null); + var entry = tis.GetNextEntry(); + + Assert.IsNotNull(entry, "Entry is null"); + Assert.IsNotNull(entry.Name, "Entry name is null"); + Assert.AreEqual(expectedName.Length, entry.Name.Length, $"Entry name is truncated to {entry.Name.Length} bytes."); + Assert.AreEqual(expectedName, entry.Name, "Entry name does not match expected value"); } /// @@ -394,11 +384,9 @@ public void HeaderEquality() public void Checksum() { var ms = new MemoryStream(); - using (TarOutputStream tarOut = new TarOutputStream(ms, null)) + using (var tarOut = new TarOutputStream(ms, nameEncoding: null)) { - DateTime modTime = DateTime.Now; - - TarEntry entry = TarEntry.CreateTarEntry("TestEntry"); + var entry = TarEntry.CreateTarEntry("TestEntry"); entry.TarHeader.Mode = 12345; tarOut.PutNextEntry(entry); @@ -409,7 +397,7 @@ public void Checksum() ms2.Seek(0, SeekOrigin.Begin); TarEntry nextEntry; - using (TarInputStream tarIn = new TarInputStream(ms2, null)) + using (var tarIn = new TarInputStream(ms2, nameEncoding: null)) { nextEntry = tarIn.GetNextEntry(); Assert.IsTrue(nextEntry.TarHeader.IsChecksumValid, "Checksum should be valid"); @@ -421,20 +409,9 @@ public void Checksum() ms3.Write(new byte[] { 34 }, 0, 1); ms3.Seek(0, SeekOrigin.Begin); - using (TarInputStream tarIn = new TarInputStream(ms3, null)) + using (var tarIn = new TarInputStream(ms3, nameEncoding: null)) { - bool trapped = false; - - try - { - nextEntry = tarIn.GetNextEntry(); - } - catch (TarException) - { - trapped = true; - } - - Assert.IsTrue(trapped, "Checksum should be invalid"); + Assert.Throws(() => tarIn.GetNextEntry(), "Checksum should be invalid"); } } @@ -703,37 +680,35 @@ public void EndBlockHandling() long outCount, inCount; - using (var ms = new MemoryStream()) + using var ms = new MemoryStream(); + using (var tarOut = TarArchive.CreateOutputTarArchive(ms)) + using (var dummyFile = Utils.GetDummyFile(dummySize)) { - using (var tarOut = TarArchive.CreateOutputTarArchive(ms)) - using (var dummyFile = Utils.GetDummyFile(dummySize)) - { - tarOut.IsStreamOwner = false; - tarOut.WriteEntry(TarEntry.CreateEntryFromFile(dummyFile.Filename), false); - } + tarOut.IsStreamOwner = false; + tarOut.WriteEntry(TarEntry.CreateEntryFromFile(dummyFile), recurse: false); + } - outCount = ms.Position; - ms.Seek(0, SeekOrigin.Begin); + outCount = ms.Position; + ms.Seek(0, SeekOrigin.Begin); - using (var tarIn = TarArchive.CreateInputTarArchive(ms, null)) - using (var tempDir = new Utils.TempDir()) - { - tarIn.IsStreamOwner = false; - tarIn.ExtractContents(tempDir.Fullpath); + using (var tarIn = TarArchive.CreateInputTarArchive(ms, nameEncoding: null)) + using (var tempDir = Utils.GetTempDir()) + { + tarIn.IsStreamOwner = false; + tarIn.ExtractContents(tempDir); - foreach (var file in Directory.GetFiles(tempDir.Fullpath, "*", SearchOption.AllDirectories)) - { - Console.WriteLine($"Extracted \"{file}\""); - } + foreach (var file in Directory.GetFiles(tempDir, "*", SearchOption.AllDirectories)) + { + Console.WriteLine($"Extracted \"{file}\""); } + } - inCount = ms.Position; + inCount = ms.Position; - Console.WriteLine($"Output count: {outCount}"); - Console.WriteLine($"Input count: {inCount}"); + Console.WriteLine($"Output count: {outCount}"); + Console.WriteLine($"Input count: {inCount}"); - Assert.AreEqual(inCount, outCount, "Bytes read and bytes written should be equal"); - } + Assert.AreEqual(inCount, outCount, "Bytes read and bytes written should be equal"); } [Test] @@ -742,14 +717,14 @@ public void EndBlockHandling() [Explicit("Long Running")] public void WriteThroughput() { - const string EntryName = "LargeTarEntry"; + const string entryName = "LargeTarEntry"; PerformanceTesting.TestWrite(TestDataSize.Large, bs => { - var tos = new TarOutputStream(bs, null); + var tos = new TarOutputStream(bs, nameEncoding: null); tos.PutNextEntry(new TarEntry(new TarHeader() { - Name = EntryName, + Name = entryName, Size = (int)TestDataSize.Large, })); return tos; @@ -766,7 +741,7 @@ public void WriteThroughput() [Explicit("Long Running")] public void SingleLargeEntry() { - const string EntryName = "LargeTarEntry"; + const string entryName = "LargeTarEntry"; const TestDataSize dataSize = TestDataSize.Large; PerformanceTesting.TestReadWrite( @@ -776,7 +751,7 @@ public void SingleLargeEntry() var tis = new TarInputStream(bs, null); var entry = tis.GetNextEntry(); - Assert.AreEqual(EntryName, entry.Name); + Assert.AreEqual(entryName, entry.Name); return tis; }, output: bs => @@ -784,7 +759,7 @@ public void SingleLargeEntry() var tos = new TarOutputStream(bs, null); tos.PutNextEntry(new TarEntry(new TarHeader() { - Name = EntryName, + Name = entryName, Size = (int)dataSize, })); return tos; @@ -801,44 +776,40 @@ public void SingleLargeEntry() [Category("Tar")] public void ExtractingCorruptTarShouldntLeakFiles() { - using (var memoryStream = new MemoryStream()) + using var memoryStream = new MemoryStream(); + //Create a tar.gz in the output stream + using (var gzipStream = new GZipOutputStream(memoryStream)) { - //Create a tar.gz in the output stream - using (var gzipStream = new GZipOutputStream(memoryStream)) - { - gzipStream.IsStreamOwner = false; + gzipStream.IsStreamOwner = false; - using (var tarOut = TarArchive.CreateOutputTarArchive(gzipStream)) - using (var dummyFile = Utils.GetDummyFile(32000)) - { - tarOut.IsStreamOwner = false; - tarOut.WriteEntry(TarEntry.CreateEntryFromFile(dummyFile.Filename), false); - } - } - - // corrupt archive - make sure the file still has more than one block - memoryStream.SetLength(16000); - memoryStream.Seek(0, SeekOrigin.Begin); - - // try to extract - using (var gzipStream = new GZipInputStream(memoryStream)) + using (var tarOut = TarArchive.CreateOutputTarArchive(gzipStream)) + using (var dummyFile = Utils.GetDummyFile(size: 32000)) { - string tempDirName; - gzipStream.IsStreamOwner = false; + tarOut.IsStreamOwner = false; + tarOut.WriteEntry(TarEntry.CreateEntryFromFile(dummyFile), recurse: false); + } + } - using (var tempDir = new Utils.TempDir()) - { - tempDirName = tempDir.Fullpath; + // corrupt archive - make sure the file still has more than one block + memoryStream.SetLength(16000); + memoryStream.Seek(0, SeekOrigin.Begin); - using (var tarIn = TarArchive.CreateInputTarArchive(gzipStream, null)) - { - tarIn.IsStreamOwner = false; - Assert.Throws(() => tarIn.ExtractContents(tempDir.Fullpath)); - } - } + // try to extract + using (var gzipStream = new GZipInputStream(memoryStream)) + { + gzipStream.IsStreamOwner = false; - Assert.That(Directory.Exists(tempDirName), Is.False, "Temporary folder should have been removed"); + using var tempDir = Utils.GetTempDir(); + using (var tarIn = TarArchive.CreateInputTarArchive(gzipStream, nameEncoding: null)) + { + tarIn.IsStreamOwner = false; + Assert.Throws(() => tarIn.ExtractContents(tempDir)); } + + // Try to remove the output directory to check if any file handles are still being held + Assert.DoesNotThrow(() => tempDir.Delete()); + + Assert.That(tempDir.Exists, Is.False, "Temporary folder should have been removed"); } } [TestCase(10, "utf-8")] @@ -859,7 +830,7 @@ public void ParseHeaderWithEncoding(int length, string encodingName) reparseHeader.ParseBuffer(headerbytes, enc); Assert.AreEqual(name, reparseHeader.Name); // top 100 bytes are name field in tar header - for (int i = 0;i < encodedName.Length;i++) + for (int i = 0; i < encodedName.Length; i++) { Assert.AreEqual(encodedName[i], headerbytes[i]); } @@ -871,33 +842,76 @@ public void ParseHeaderWithEncoding(int length, string encodingName) [TestCase(100, "shift-jis")] [TestCase(128, "shift-jis")] [Category("Tar")] - public void StreamWithJapaneseName(int length, string encodingName) + public async Task StreamWithJapaneseNameAsync(int length, string encodingName) { // U+3042 is Japanese Hiragana // https://unicode.org/charts/PDF/U3040.pdf var entryName = new string((char)0x3042, length); var data = new byte[32]; var encoding = Encoding.GetEncoding(encodingName); - using(var memoryStream = new MemoryStream()) + using (var memoryStream = new MemoryStream()) { - using(var tarOutput = new TarOutputStream(memoryStream, encoding)) + using (var tarOutput = new TarOutputStream(memoryStream, encoding)) { var entry = TarEntry.CreateTarEntry(entryName); entry.Size = 32; tarOutput.PutNextEntry(entry); tarOutput.Write(data, 0, data.Length); } + using(var memInput = new MemoryStream(memoryStream.ToArray())) using(var inputStream = new TarInputStream(memInput, encoding)) { var buf = new byte[64]; - var entry = inputStream.GetNextEntry(); + var entry = await inputStream.GetNextEntryAsync(CancellationToken.None); Assert.AreEqual(entryName, entry.Name); - var bytesread = inputStream.Read(buf, 0, buf.Length); + var bytesread = await inputStream.ReadAsync(buf, 0, buf.Length, CancellationToken.None); Assert.AreEqual(data.Length, bytesread); } File.WriteAllBytes(Path.Combine(Path.GetTempPath(), $"jpnametest_{length}_{encodingName}.tar"), memoryStream.ToArray()); } } + /// + /// This test could be considered integration test. it creates a tar archive with the root directory specified + /// Then extracts it and compares the two folders. This used to fail on unix due to issues with root folder handling + /// in the tar archive. + /// + [Test] + [Category("Tar")] + public void RootPathIsRespected() + { + using (var extractDirectory = new TempDir()) + using (var tarFileName = new TempFile()) + using (var tempDirectory = new TempDir()) + { + tempDirectory.CreateDummyFile(); + + using (var tarFile = File.Open(tarFileName.FullName, FileMode.Create)) + { + using (var tarOutputStream = TarArchive.CreateOutputTarArchive(tarFile)) + { + tarOutputStream.RootPath = tempDirectory.FullName; + var entry = TarEntry.CreateEntryFromFile(tempDirectory.FullName); + tarOutputStream.WriteEntry(entry, true); + } + } + + using (var file = File.OpenRead(tarFileName.FullName)) + { + using (var archive = TarArchive.CreateInputTarArchive(file, Encoding.UTF8)) + { + archive.ExtractContents(extractDirectory.FullName); + } + } + + var expectationDirectory = new DirectoryInfo(tempDirectory.FullName); + foreach (var checkFile in expectationDirectory.GetFiles("", SearchOption.AllDirectories)) + { + var relativePath = checkFile.FullName.Substring(expectationDirectory.FullName.Length + 1); + FileAssert.Exists(Path.Combine(extractDirectory.FullName, relativePath)); + FileAssert.AreEqual(checkFile.FullName, Path.Combine(extractDirectory.FullName, relativePath)); + } + } + } } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/RingBuffer.cs b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/RingBuffer.cs index d4b75e3cf..c8ee11881 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/RingBuffer.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/RingBuffer.cs @@ -510,7 +510,7 @@ public void Threaded() private void Reader() { - var r = new Random(); + var r = new Random(Utils.DefaultSeed); byte nextValue = 0; while (readTarget_ > 0) @@ -541,7 +541,7 @@ private void Reader() private void Writer() { - var r = new Random(); + var r = new Random(Utils.DefaultSeed); byte nextValue = 0; while (writeTarget_ > 0) diff --git a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Streams.cs b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Streams.cs index 3f5ae552a..2d1b00fb8 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Streams.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Streams.cs @@ -1,6 +1,7 @@ using System; using System.IO; using System.Threading; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tests.TestSupport { @@ -177,14 +178,87 @@ public class MemoryStreamWithoutSeek : TrackedMemoryStream /// /// /// true if the stream is open. - public override bool CanSeek + public override bool CanSeek => false; + + /// + public override long Position { - get - { - return false; + get => throw new NotSupportedException("Getting position is not supported"); + set => throw new NotSupportedException("Setting position is not supported"); + } + + } + +#if NETSTANDARD2_1 || NETCOREAPP3_0_OR_GREATER + /// + /// A that does not support non-async operations. + /// + /// + /// This could not be done by extending MemoryStream itself, since other instances of MemoryStream tries to us a faster (non-async) method of copying + /// if it detects that it's a (subclass of) MemoryStream. + /// + public class MemoryStreamWithoutSync : Stream + { + MemoryStream _inner = new MemoryStream(); + + public override bool CanRead => _inner.CanRead; + public override bool CanSeek => _inner.CanSeek; + public override bool CanWrite => _inner.CanWrite; + public override long Length => _inner.Length; + public override long Position { get => _inner.Position; set => _inner.Position = value; } + + public byte[] ToArray() => _inner.ToArray(); + + public override void Flush() => throw new NotSupportedException($"Non-async call to {nameof(Flush)}"); + + + public override void CopyTo(Stream destination, int bufferSize) => throw new NotSupportedException($"Non-async call to {nameof(CopyTo)}"); + public override void Write(ReadOnlySpan buffer) => throw new NotSupportedException($"Non-async call to {nameof(Write)}"); + public override int Read(Span buffer) => throw new NotSupportedException($"Non-async call to {nameof(Read)}"); + + public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException($"Non-async call to {nameof(Write)}"); + public override void WriteByte(byte value) => throw new NotSupportedException($"Non-async call to {nameof(Write)}"); + + public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException($"Non-async call to {nameof(Read)}"); + public override int ReadByte() => throw new NotSupportedException($"Non-async call to {nameof(ReadByte)}"); + + // Even though our mock stream is writing synchronously, this should not fail the tests + public override async Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) + { + var buf = new byte[bufferSize]; + while(_inner.Read(buf, 0, bufferSize) > 0) { + await destination.WriteAsync(buf, cancellationToken); } } + public override Task FlushAsync(CancellationToken cancellationToken) => TaskFromBlocking(() => _inner.Flush()); + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => TaskFromBlocking(() => _inner.Write(buffer, offset, count)); + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => Task.FromResult(_inner.Read(buffer, offset, count)); + public override ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) => ValueTaskFromBlocking(() => _inner.Write(buffer.Span)); + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) => ValueTask.FromResult(_inner.Read(buffer.Span)); + + static Task TaskFromBlocking(Action action) + { + action(); + return Task.CompletedTask; + } + + static ValueTask ValueTaskFromBlocking(Action action) + { + action(); + return ValueTask.CompletedTask; + } + + public override long Seek(long offset, SeekOrigin origin) + { + return _inner.Seek(offset, origin); + } + + public override void SetLength(long value) + { + _inner.SetLength(value); + } } +#endif /// /// A that cannot be read but supports infinite writes. diff --git a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/StringTesting.cs b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/StringTesting.cs index 3d67a9c70..e1d7a1fb0 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/StringTesting.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/StringTesting.cs @@ -1,4 +1,5 @@ using System.Collections.Generic; +using System.Linq; namespace ICSharpCode.SharpZipLib.Tests.TestSupport { @@ -6,36 +7,20 @@ public static class StringTesting { static StringTesting() { - AddLanguage("Chinese", "測試.txt", "big5"); - AddLanguage("Greek", "Ϗΰ.txt", "windows-1253"); - AddLanguage("Nordic", "Åæ.txt", "windows-1252"); - AddLanguage("Arabic", "ڀڅ.txt", "windows-1256"); - AddLanguage("Russian", "Прйвёт.txt", "windows-1251"); - } - - private static void AddLanguage(string language, string filename, string encoding) - { - languages.Add(language); - filenames.Add(filename); - encodings.Add(encoding); - entries++; + TestSamples = new [] + { + ("Chinese", "測試.txt", "big5"), + ("Greek", "Ϗΰ.txt", "windows-1253"), + ("Nordic", "Åæ.txt", "windows-1252"), + ("Arabic", "ڀڅ.txt", "windows-1256"), + ("Russian", "Прйвёт.txt", "windows-1251"), + }; } - private static int entries = 0; - private static List languages = new List(); - private static List filenames = new List(); - private static List encodings = new List(); + public static (string language, string filename, string encoding)[] TestSamples { get; } - public static IEnumerable Languages => filenames.AsReadOnly(); - public static IEnumerable Filenames => filenames.AsReadOnly(); - public static IEnumerable Encodings => filenames.AsReadOnly(); - - public static IEnumerable<(string language, string filename, string encoding)> GetTestSamples() - { - for (int i = 0; i < entries; i++) - { - yield return (languages[i], filenames[i], encodings[i]); - } - } + public static IEnumerable Languages => TestSamples.Select(s => s.language); + public static IEnumerable Filenames => TestSamples.Select(s => s.filename); + public static IEnumerable Encodings => TestSamples.Select(s => s.encoding); } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Utils.cs b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Utils.cs index 33d6e3e9b..ca1838500 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Utils.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/Utils.cs @@ -1,7 +1,11 @@ using NUnit.Framework; using System; +using System.Diagnostics; using System.IO; using System.Text; +using ICSharpCode.SharpZipLib.Tests.Zip; +using System.Linq; +using System.Threading.Tasks; namespace ICSharpCode.SharpZipLib.Tests.TestSupport { @@ -12,7 +16,8 @@ public static class Utils { public static int DummyContentLength = 16; - private static Random random = new Random(); + internal const int DefaultSeed = 5; + private static Random random = new Random(DefaultSeed); /// /// Returns the system root for the current platform (usually c:\ for windows and / for others) @@ -40,125 +45,242 @@ private static void Compare(byte[] a, byte[] b) } } - public static void WriteDummyData(string fileName, int size = -1) + /// + /// Write pseudo-random data to , + /// creating it if it does not exist or truncating it otherwise + /// + /// + /// + /// + public static void WriteDummyData(string fileName, int size, int seed = DefaultSeed) { - using(var fs = File.OpenWrite(fileName)) - { - WriteDummyData(fs, size); - } + using var fs = File.Create(fileName); + WriteDummyData(fs, size, seed); } - public static void WriteDummyData(Stream stream, int size = -1) + /// + /// Write pseudo-random data to + /// + /// + /// + /// + public static void WriteDummyData(Stream stream, int size, int seed = DefaultSeed) { - var bytes = (size < 0) - ? Encoding.ASCII.GetBytes(DateTime.UtcNow.Ticks.ToString("x16")) - : new byte[size]; - - if(size > 0) - { - random.NextBytes(bytes); - } - - stream.Write(bytes, 0, bytes.Length); + var bytes = GetDummyBytes(size, seed); + stream.Write(bytes, offset: 0, bytes.Length); + } + + /// + /// Creates a buffer of pseudo-random bytes + /// + /// + /// + /// + public static byte[] GetDummyBytes(int size, int seed = DefaultSeed) + { + var random = new Random(seed); + var bytes = new byte[size]; + random.NextBytes(bytes); + return bytes; + } + + public static async Task WriteDummyDataAsync(Stream stream, int size = -1) + { + var bytes = GetDummyBytes(size); + await stream.WriteAsync(bytes, 0, bytes.Length); } - public static TempFile GetDummyFile(int size = -1) + /// + /// Returns a file reference with bytes of dummy data written to it + /// + /// + /// + public static TempFile GetDummyFile(int size = 16) { var tempFile = new TempFile(); - WriteDummyData(tempFile.Filename, size); + using var fs = tempFile.Create(); + WriteDummyData(fs, size); return tempFile; } + /// + /// Returns a randomized file/directory name (without any path) using a generated GUID + /// + /// public static string GetDummyFileName() - => $"{random.Next():x8}{random.Next():x8}{random.Next():x8}"; + => string.Concat(Guid.NewGuid().ToByteArray().Select(b => $"{b:x2}")); - public class TempFile : IDisposable - { - public string Filename { get; internal set; } + /// + /// Returns a reference to a temporary directory that deletes it's contents when disposed + /// + /// + public static TempDir GetTempDir() => new TempDir(); - public TempFile() + /// + /// Returns a reference to a temporary file that deletes it's referred file when disposed + /// + /// + public static TempFile GetTempFile() => new TempFile(); + + public static void PatchFirstEntrySize(Stream stream, int newSize) + { + using(stream) { - Filename = Path.GetTempFileName(); + var sizeBytes = BitConverter.GetBytes(newSize); + + stream.Seek(18, SeekOrigin.Begin); + stream.Write(sizeBytes, 0, 4); + stream.Write(sizeBytes, 0, 4); } + } - #region IDisposable Support + public static void FillArray(byte[] buffer, byte value) + { +#if NET6_0_OR_GREATER + Array.Fill(buffer, value); +#else + for(var i = 0; i < buffer.Length; i++) buffer[i] = value; +#endif + } + } + + public class TestTraceListener : TraceListener + { + private readonly TextWriter _writer; + public TestTraceListener(TextWriter writer) + { + _writer = writer; + } - private bool disposed = false; // To detect redundant calls + public override void WriteLine(string message) => _writer.WriteLine(message); + public override void Write(string message) => _writer.Write(message); + } + + public class TempFile : FileSystemInfo, IDisposable + { + private FileInfo _fileInfo; - protected virtual void Dispose(bool disposing) - { - if (!disposed) - { - if (disposing && File.Exists(Filename)) - { - try - { - File.Delete(Filename); - } - catch { } - } - - disposed = true; - } - } + public override string Name => _fileInfo.Name; + public override bool Exists => _fileInfo.Exists; + public string DirectoryName => _fileInfo.DirectoryName; - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } + public override string FullName => _fileInfo.FullName; - #endregion IDisposable Support - } + public byte[] ReadAllBytes() => File.ReadAllBytes(_fileInfo.FullName); - public class TempDir : IDisposable - { - public string Fullpath { get; internal set; } + public static implicit operator string(TempFile tf) => tf._fileInfo.FullName; + + public override void Delete() + { + if(!Exists) return; + _fileInfo.Delete(); + } - public TempDir() - { - Fullpath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName()); - Directory.CreateDirectory(Fullpath); - } + public FileStream Open(FileMode mode, FileAccess access) => _fileInfo.Open(mode, access); + public FileStream Open(FileMode mode) => _fileInfo.Open(mode); + public FileStream Create() => _fileInfo.Create(); - #region IDisposable Support + public static TempFile WithDummyData(int size, string dirPath = null, string filename = null, int seed = Utils.DefaultSeed) + { + var tempFile = new TempFile(dirPath, filename); + Utils.WriteDummyData(tempFile.FullName, size, seed); + return tempFile; + } - private bool disposed = false; // To detect redundant calls + internal TempFile(string dirPath = null, string filename = null) + { + dirPath ??= Path.GetTempPath(); + filename ??= Utils.GetDummyFileName(); + _fileInfo = new FileInfo(Path.Combine(dirPath, filename)); + } - protected virtual void Dispose(bool disposing) - { - if (!disposed) - { - if (disposing && Directory.Exists(Fullpath)) - { - try - { - Directory.Delete(Fullpath, true); - } - catch { } - } - - disposed = true; - } - } +#region IDisposable Support - public void Dispose() - { - Dispose(true); - GC.SuppressFinalize(this); - } + private bool _disposed; // To detect redundant calls - internal string CreateDummyFile(int size = -1) - => CreateDummyFile(GetDummyFileName(), size); + protected virtual void Dispose(bool disposing) + { + if (_disposed) return; + if (disposing) + { + try + { + Delete(); + } + catch + { + // ignored + } + } - internal string CreateDummyFile(string name, int size = -1) - { - var fileName = Path.Combine(Fullpath, name); - WriteDummyData(fileName, size); - return fileName; - } + _disposed = true; + } - #endregion IDisposable Support - } + public void Dispose() + { + Dispose(disposing: true); + GC.SuppressFinalize(this); + } + +#endregion IDisposable Support } + + + + public class TempDir : FileSystemInfo, IDisposable + { + public override string Name => Path.GetFileName(FullName); + public override bool Exists => Directory.Exists(FullName); + + public static implicit operator string(TempDir td) => td.FullName; + + public override void Delete() + { + if(!Exists) return; + Directory.Delete(FullPath, recursive: true); + } + + public TempDir() + { + FullPath = Path.Combine(Path.GetTempPath(), Utils.GetDummyFileName()); + Directory.CreateDirectory(FullPath); + } + + public TempFile CreateDummyFile(int size = 16, int seed = Utils.DefaultSeed) + => CreateDummyFile(null, size); + + public TempFile CreateDummyFile(string name, int size = 16, int seed = Utils.DefaultSeed) + => TempFile.WithDummyData(size, FullPath, name, seed); + + public TempFile GetFile(string fileName) => new TempFile(FullPath, fileName); + +#region IDisposable Support + + private bool _disposed; // To detect redundant calls + + protected virtual void Dispose(bool disposing) + { + if (_disposed) return; + if (disposing) + { + try + { + Delete(); + } + catch + { + // ignored + } + } + _disposed = true; + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + +#endregion IDisposable Support + } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/ZipTesting.cs b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/ZipTesting.cs index 688b91dc3..7311da7a2 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/TestSupport/ZipTesting.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/TestSupport/ZipTesting.cs @@ -1,5 +1,9 @@ using ICSharpCode.SharpZipLib.Zip; +using NUnit.Framework.Constraints; +using NUnit.Framework; +using System.Collections.Generic; using System.IO; +using System.Linq; namespace ICSharpCode.SharpZipLib.Tests.TestSupport { @@ -8,30 +12,112 @@ namespace ICSharpCode.SharpZipLib.Tests.TestSupport /// internal static class ZipTesting { - /// - /// Tests the archive. - /// - /// The data. - /// - public static bool TestArchive(byte[] data) + public static void AssertValidZip(Stream stream, string password = null, bool usesAes = true) { - return TestArchive(data, null); + using var zipFile = new ZipFile(stream) + { + IsStreamOwner = false, + Password = password, + }; + + Assert.That(zipFile, Does.PassTestArchive()); + + if (!string.IsNullOrEmpty(password) && usesAes) + { + Assert.Ignore("ZipInputStream does not support AES"); + } + + stream.Seek(0, SeekOrigin.Begin); + + Assert.DoesNotThrow(() => + { + using var zis = new ZipInputStream(stream){Password = password}; + while (zis.GetNextEntry() != null) + { + new StreamReader(zis).ReadToEnd(); + } + }, "Archive could not be read by ZipInputStream"); } + } - /// - /// Tests the archive. - /// - /// The data. - /// The password. - /// true if archive tests ok; false otherwise. - public static bool TestArchive(byte[] data, string password) + public class TestArchiveReport + { + internal const string PassingArchive = "Passing Archive"; + + readonly List _messages = new List(); + public void HandleTestResults(TestStatus status, string message) { - using (MemoryStream ms = new MemoryStream(data)) - using (ZipFile zipFile = new ZipFile(ms)) + if (string.IsNullOrWhiteSpace(message)) return; + _messages.Add(message); + } + + public override string ToString() => _messages.Any() ? string.Join(", ", _messages) : PassingArchive; + } + + public class PassesTestArchiveConstraint : Constraint + { + private readonly string _password; + private readonly bool _testData; + + public PassesTestArchiveConstraint(string password = null, bool testData = true) + { + _password = password; + _testData = testData; + } + + public override string Description => TestArchiveReport.PassingArchive; + + public override ConstraintResult ApplyTo(TActual actual) + { + MemoryStream ms = null; + try { - zipFile.Password = password; - return zipFile.TestArchive(true); + if (!(actual is ZipFile zipFile)) + { + if (!(actual is byte[] rawArchive)) + { + return new ConstraintResult(this, actual, ConstraintStatus.Failure); + } + + ms = new MemoryStream(rawArchive); + zipFile = new ZipFile(ms){Password = _password}; + } + + var report = new TestArchiveReport(); + + return new ConstraintResult( + this, report, zipFile.TestArchive( + _testData, + TestStrategy.FindAllErrors, + report.HandleTestResults + ) + ? ConstraintStatus.Success + : ConstraintStatus.Failure); } + finally + { + ms?.Dispose(); + } + } + } + + public static class ZipTestingConstraintExtensions + { + public static IResolveConstraint PassTestArchive(this ConstraintExpression expression, string password = null, bool testData = true) + { + var constraint = new PassesTestArchiveConstraint(password, testData); + expression.Append(constraint); + return constraint; } } + + /// + public class Does: NUnit.Framework.Does + { + public static IResolveConstraint PassTestArchive(string password = null, bool testData = true) + => new PassesTestArchiveConstraint(password, testData); + + public static IResolveConstraint PassTestArchive(bool testData) + => new PassesTestArchiveConstraint(password: null, testData); + } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/FastZipHandling.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/FastZipHandling.cs index fce26c2c4..3858f38f8 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/FastZipHandling.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/FastZipHandling.cs @@ -6,6 +6,7 @@ using System.IO; using System.Linq; using System.Text; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; using TimeSetting = ICSharpCode.SharpZipLib.Zip.ZipEntryFactory.TimeSetting; namespace ICSharpCode.SharpZipLib.Tests.Zip @@ -40,7 +41,7 @@ public void Basics() ZipEntry entry = zf[0]; Assert.AreEqual(tempName1, entry.Name); Assert.AreEqual(1, entry.Size); - Assert.IsTrue(zf.TestArchive(true)); + Assert.That(zf, Does.PassTestArchive()); zf.Close(); } @@ -103,12 +104,12 @@ public void ExtractEmptyDirectories() [Category("CreatesTempFile")] public void CreateEmptyDirectories(string password) { - using (var tempFilePath = new Utils.TempDir()) + using (var tempFilePath = Utils.GetTempDir()) { - string name = Path.Combine(tempFilePath.Fullpath, "x.zip"); + string name = Path.Combine(tempFilePath.FullName, "x.zip"); // Create empty test folders (The folder that we'll zip, and the test sub folder). - string archiveRootDir = Path.Combine(tempFilePath.Fullpath, ZipTempDir); + string archiveRootDir = Path.Combine(tempFilePath.FullName, ZipTempDir); string targetDir = Path.Combine(archiveRootDir, "floyd"); Directory.CreateDirectory(targetDir); @@ -118,7 +119,7 @@ public void CreateEmptyDirectories(string password) CreateEmptyDirectories = true, Password = password, }; - fastZip.CreateZip(name, archiveRootDir, true, null); + fastZip.CreateZip(name, archiveRootDir, recurse: true, fileFilter: null); // Test that the archive contains the empty folder entry using (var zipFile = new ZipFile(name)) @@ -128,7 +129,7 @@ public void CreateEmptyDirectories(string password) var folderEntry = zipFile.GetEntry("floyd/"); Assert.That(folderEntry.IsDirectory, Is.True, "The entry must be a folder"); - Assert.IsTrue(zipFile.TestArchive(true)); + Assert.That(zipFile, Does.PassTestArchive()); } } } @@ -138,25 +139,24 @@ public void CreateEmptyDirectories(string password) [Category("CreatesTempFile")] public void ContentEqualAfterAfterArchived([Values(0, 1, 64)]int contentSize) { - using(var sourceDir = new Utils.TempDir()) - using(var targetDir = new Utils.TempDir()) - using(var zipFile = Utils.GetDummyFile(0)) - { - var sourceFile = sourceDir.CreateDummyFile(contentSize); - var sourceContent = File.ReadAllBytes(sourceFile); - new FastZip().CreateZip(zipFile.Filename, sourceDir.Fullpath, true, null); - - Assert.DoesNotThrow(() => - { - new FastZip().ExtractZip(zipFile.Filename, targetDir.Fullpath, null); - }, "Exception during extraction of test archive"); + using var sourceDir = Utils.GetTempDir(); + using var targetDir = Utils.GetTempDir(); + using var zipFile = Utils.GetTempFile(); + + var sourceFile = sourceDir.CreateDummyFile(contentSize); + var sourceContent = sourceFile.ReadAllBytes(); + new FastZip().CreateZip(zipFile.FullName, sourceDir.FullName, recurse: true, fileFilter: null); + + Assert.DoesNotThrow(() => + { + new FastZip().ExtractZip(zipFile, targetDir, fileFilter: null); + }, "Exception during extraction of test archive"); - var targetFile = Path.Combine(targetDir.Fullpath, Path.GetFileName(sourceFile)); - var targetContent = File.ReadAllBytes(targetFile); + var targetFile = Path.Combine(targetDir, Path.GetFileName(sourceFile)); + var targetContent = File.ReadAllBytes(targetFile); - Assert.AreEqual(sourceContent.Length, targetContent.Length, "Extracted file size does not match source file size"); - Assert.AreEqual(sourceContent, targetContent, "Extracted content does not match source content"); - } + Assert.AreEqual(sourceContent.Length, targetContent.Length, "Extracted file size does not match source file size"); + Assert.AreEqual(sourceContent, targetContent, "Extracted content does not match source content"); } [Test] @@ -167,6 +167,7 @@ public void ContentEqualAfterAfterArchived([Values(0, 1, 64)]int contentSize) public void Encryption(ZipEncryptionMethod encryptionMethod) { const string tempName1 = "a.dat"; + const int tempSize = 1; var target = new MemoryStream(); @@ -174,7 +175,7 @@ public void Encryption(ZipEncryptionMethod encryptionMethod) Assert.IsNotNull(tempFilePath, "No permission to execute this test?"); string addFile = Path.Combine(tempFilePath, tempName1); - MakeTempFile(addFile, 1); + MakeTempFile(addFile, tempSize); try { @@ -190,17 +191,13 @@ public void Encryption(ZipEncryptionMethod encryptionMethod) using (ZipFile zf = new ZipFile(archive)) { zf.Password = "Ahoy"; - Assert.AreEqual(1, zf.Count); - ZipEntry entry = zf[0]; - Assert.AreEqual(tempName1, entry.Name); - Assert.AreEqual(1, entry.Size); - Assert.IsTrue(zf.TestArchive(true, TestStrategy.FindFirstError, (status, message) => - { - if(!string.IsNullOrEmpty(message)) { - Console.WriteLine($"{message} ({status.Entry?.Name ?? "-"})"); - } - })); - Assert.IsTrue(entry.IsCrypted); + Assert.That(zf.Count, Is.EqualTo(1)); + var entry = zf[0]; + Assert.That(entry.Name, Is.EqualTo(tempName1)); + Assert.That(entry.Size, Is.EqualTo(tempSize)); + Assert.That(entry.IsCrypted); + + Assert.That(zf, Does.PassTestArchive()); switch (encryptionMethod) { @@ -230,64 +227,60 @@ public void CreateExceptions() { Assert.Throws(() => { - using (var tempDir = new Utils.TempDir()) - { - var fastZip = new FastZip(); - var badPath = Path.Combine(Path.GetTempPath(), Utils.GetDummyFileName()); - var addFile = Path.Combine(tempDir.Fullpath, "test.zip"); - fastZip.CreateZip(addFile, badPath, false, null); - } + using var tempDir = Utils.GetTempDir(); + var fastZip = new FastZip(); + var badPath = Path.Combine(Path.GetTempPath(), Utils.GetDummyFileName()); + var addFile = tempDir.GetFile("test.zip"); + fastZip.CreateZip(addFile, badPath, recurse: false, fileFilter: null); }); } #region String testing helper - private void TestFileNames(params string[] names) - => TestFileNames((IEnumerable)names); - - private void TestFileNames(IEnumerable names) + private void TestFileNames(int codePage, IReadOnlyList names) { var zippy = new FastZip(); + if (codePage > 0) + { + zippy.UseUnicode = false; + zippy.LegacyCodePage = codePage; + } - using (var tempDir = new Utils.TempDir()) - using (var tempZip = new Utils.TempFile()) + using var tempDir = Utils.GetTempDir(); + using var tempZip = Utils.GetTempFile(); + int nameCount = 0; + foreach (var name in names) { - int nameCount = 0; - foreach (var name in names) - { - tempDir.CreateDummyFile(name); - nameCount++; - } + tempDir.CreateDummyFile(name); + nameCount++; + } - zippy.CreateZip(tempZip.Filename, tempDir.Fullpath, true, null); + zippy.CreateZip(tempZip, tempDir, recurse: true, fileFilter: null); - using (ZipFile z = new ZipFile(tempZip.Filename)) - { - Assert.AreEqual(nameCount, z.Count); - foreach (var name in names) - { - var index = z.FindEntry(name, true); + using var zf = new ZipFile(tempZip, zippy.StringCodec); + Assert.AreEqual(nameCount, zf.Count); + foreach (var name in names) + { + var index = zf.FindEntry(name, ignoreCase: true); - Assert.AreNotEqual(-1, index, "Zip entry \"{0}\" not found", name); + Assert.AreNotEqual(expected: -1, index, "Zip entry \"{0}\" not found", name); - var entry = z[index]; + var entry = zf[index]; - if (ZipStrings.UseUnicode) - { - Assert.IsTrue(entry.IsUnicodeText, "Zip entry #{0} not marked as unicode", index); - } - else - { - Assert.IsFalse(entry.IsUnicodeText, "Zip entry #{0} marked as unicode", index); - } + if (zippy.UseUnicode) + { + Assert.IsTrue(entry.IsUnicodeText, "Zip entry #{0} not marked as unicode", index); + } + else + { + Assert.IsFalse(entry.IsUnicodeText, "Zip entry #{0} marked as unicode", index); + } - Assert.AreEqual(name, entry.Name); + Assert.AreEqual(name, entry.Name); - var nameBytes = string.Join(" ", Encoding.BigEndianUnicode.GetBytes(entry.Name).Select(b => b.ToString("x2"))); + var nameBytes = string.Join(" ", Encoding.BigEndianUnicode.GetBytes(entry.Name).Select(b => b.ToString("x2"))); - Console.WriteLine($" - Zip entry: {entry.Name} ({nameBytes})"); - } - } + Console.WriteLine($" - Zip entry: {entry.Name} ({nameBytes})"); } } @@ -298,15 +291,7 @@ private void TestFileNames(IEnumerable names) [Category("Unicode")] public void UnicodeText() { - var preCp = ZipStrings.CodePage; - try - { - TestFileNames(StringTesting.Filenames); - } - finally - { - ZipStrings.CodePage = preCp; - } + TestFileNames(0, StringTesting.Filenames.ToArray()); } [Test] @@ -314,35 +299,26 @@ public void UnicodeText() [Category("Unicode")] public void NonUnicodeText() { - var preCp = ZipStrings.CodePage; - try + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + foreach (var (language, filename, encoding) in StringTesting.TestSamples) { - Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + Console.WriteLine($"{language} filename \"{filename}\" using \"{encoding}\":"); - foreach ((string language, string filename, string encoding) in StringTesting.GetTestSamples()) + // TODO: samples of this test must be reversible + // Some samples can't be restored back with their encoding. + // test wasn't failing only because SystemDefaultCodepage is 65001 on Net.Core and + // old behaviour actually was using Unicode instead of user's passed codepage + var encoder = Encoding.GetEncoding(encoding); + var bytes = encoder.GetBytes(filename); + var restoredString = encoder.GetString(bytes); + if(string.CompareOrdinal(filename, restoredString) != 0) { - Console.WriteLine($"{language} filename \"{filename}\" using \"{encoding}\":"); - - // TODO: samples of this test must be reversible - // Some samples can't be restored back with their encoding. - // test wasn't failing only because SystemDefaultCodepage is 65001 on Net.Core and - // old behaviour actually was using Unicode instead of user's passed codepage - var encoder = Encoding.GetEncoding(encoding); - var bytes = encoder.GetBytes(filename); - var restoredString = encoder.GetString(bytes); - if(string.CompareOrdinal(filename, restoredString) != 0) - { - Console.WriteLine($"Sample for language {language} with value of {filename} is skipped, because it's irreversable"); - continue; - } - - ZipStrings.CodePage = Encoding.GetEncoding(encoding).CodePage; - TestFileNames(filename); + Console.WriteLine($"Sample for language {language} with value of {filename} is skipped, because it's irreversable"); + continue; } - } - finally - { - ZipStrings.CodePage = preCp; + + TestFileNames(Encoding.GetEncoding(encoding).CodePage, new [] { filename }); } } @@ -385,6 +361,7 @@ public void ExtractExceptions() public void ReadingOfLockedDataFiles() { const string tempName1 = "a.dat"; + const int tempSize = 1; var target = new MemoryStream(); @@ -392,7 +369,7 @@ public void ReadingOfLockedDataFiles() Assert.IsNotNull(tempFilePath, "No permission to execute this test?"); string addFile = Path.Combine(tempFilePath, tempName1); - MakeTempFile(addFile, 1); + MakeTempFile(addFile, tempSize); try { @@ -405,11 +382,11 @@ public void ReadingOfLockedDataFiles() var archive = new MemoryStream(target.ToArray()); using (ZipFile zf = new ZipFile(archive)) { - Assert.AreEqual(1, zf.Count); - ZipEntry entry = zf[0]; - Assert.AreEqual(tempName1, entry.Name); - Assert.AreEqual(1, entry.Size); - Assert.IsTrue(zf.TestArchive(true)); + Assert.That(zf.Count, Is.EqualTo(1)); + var entry = zf[0]; + Assert.That(entry.Name, Is.EqualTo(tempName1)); + Assert.That(entry.Size, Is.EqualTo(tempSize)); + Assert.That(zf, Does.PassTestArchive()); zf.Close(); } @@ -426,6 +403,7 @@ public void ReadingOfLockedDataFiles() public void NonAsciiPasswords() { const string tempName1 = "a.dat"; + const int tempSize = 1; var target = new MemoryStream(); @@ -433,7 +411,7 @@ public void NonAsciiPasswords() Assert.IsNotNull(tempFilePath, "No permission to execute this test?"); string addFile = Path.Combine(tempFilePath, tempName1); - MakeTempFile(addFile, 1); + MakeTempFile(addFile, tempSize); string password = "abc\u0066\u0393"; try @@ -447,12 +425,12 @@ public void NonAsciiPasswords() using (ZipFile zf = new ZipFile(archive)) { zf.Password = password; - Assert.AreEqual(1, zf.Count); - ZipEntry entry = zf[0]; - Assert.AreEqual(tempName1, entry.Name); - Assert.AreEqual(1, entry.Size); - Assert.IsTrue(zf.TestArchive(true)); - Assert.IsTrue(entry.IsCrypted); + Assert.That(zf.Count, Is.EqualTo(1)); + var entry = zf[0]; + Assert.That(entry.Name, Is.EqualTo(tempName1)); + Assert.That(entry.Size, Is.EqualTo(tempSize)); + Assert.That(zf, Does.PassTestArchive()); + Assert.That(entry.IsCrypted); } } finally @@ -658,33 +636,29 @@ public void SetDirectoryModifiedDate() public void CreateZipShouldLeaveOutputStreamOpenIfRequested(bool leaveOpen) { const string tempFileName = "a(2).dat"; + const int tempSize = 16; - using (var tempFolder = new Utils.TempDir()) - { - // Create test input file - string addFile = Path.Combine(tempFolder.Fullpath, tempFileName); - MakeTempFile(addFile, 16); + using var tempFolder = Utils.GetTempDir(); + // Create test input file + tempFolder.CreateDummyFile(tempFileName, tempSize); - // Create the zip with fast zip - var target = new TrackedMemoryStream(); - var fastZip = new FastZip(); + // Create the zip with fast zip + var target = new TrackedMemoryStream(); + var fastZip = new FastZip(); - fastZip.CreateZip(target, tempFolder.Fullpath, false, @"a\(2\)\.dat", null, leaveOpen: leaveOpen); + fastZip.CreateZip(target, tempFolder, recurse: false, @"a\(2\)\.dat", directoryFilter: null, leaveOpen); - // Check that the output stream was disposed (or not) as expected - Assert.That(target.IsDisposed, Is.Not.EqualTo(leaveOpen), "IsDisposed should be the opposite of leaveOpen"); + // Check that the output stream was disposed (or not) as expected + Assert.That(target.IsDisposed, Is.Not.EqualTo(leaveOpen), "IsDisposed should be the opposite of leaveOpen"); - // Check that the file contents are correct in both cases - var archive = new MemoryStream(target.ToArray()); - using (ZipFile zf = new ZipFile(archive)) - { - Assert.AreEqual(1, zf.Count); - ZipEntry entry = zf[0]; - Assert.AreEqual(tempFileName, entry.Name); - Assert.AreEqual(16, entry.Size); - Assert.IsTrue(zf.TestArchive(true)); - } - } + // Check that the file contents are correct in both cases + var archive = new MemoryStream(target.ToArray()); + using var zf = new ZipFile(archive); + Assert.That(zf.Count, Is.EqualTo(1)); + var entry = zf[0]; + Assert.That(entry.Name, Is.EqualTo(tempFileName)); + Assert.That(entry.Size, Is.EqualTo(tempSize)); + Assert.That(zf, Does.PassTestArchive()); } [Category("Zip")] @@ -748,15 +722,13 @@ public void ExtractZipShouldSetTimeOnFilesFromConstructorTimeSetting(TimeSetting } var fastZip = new FastZip(timeSetting); - using (var extractDir = new Utils.TempDir()) - { - fastZip.ExtractZip(archiveStream, extractDir.Fullpath, FastZip.Overwrite.Always, - _ => true, "", "", true, true, false); - var fi = new FileInfo(Path.Combine(extractDir.Fullpath, SingleEntryFileName)); - var actualTime = FileTimeFromTimeSetting(fi, timeSetting); - // Assert that the time is within +/- 2s of the target time to allow for timing/rounding discrepancies - Assert.LessOrEqual(Math.Abs((targetTime - actualTime).TotalSeconds), 2); - } + using var extractDir = Utils.GetTempDir(); + fastZip.ExtractZip(archiveStream, extractDir.FullName, FastZip.Overwrite.Always, + _ => true, "", "", restoreDateTime: true, isStreamOwner: true, allowParentTraversal: false); + var fi = new FileInfo(Path.Combine(extractDir.FullName, SingleEntryFileName)); + var actualTime = FileTimeFromTimeSetting(fi, timeSetting); + // Assert that the time is within +/- 2s of the target time to allow for timing/rounding discrepancies + Assert.LessOrEqual(Math.Abs((targetTime - actualTime).TotalSeconds), 2); } [Category("Zip")] @@ -770,15 +742,13 @@ public void ExtractZipShouldSetTimeOnFilesFromConstructorDateTime(DateTimeKind d // Extract the archive with a fixed time override var targetTime = ExpectedFixedTime(dtk); var fastZip = new FastZip(targetTime); - using (var extractDir = new Utils.TempDir()) - { - fastZip.ExtractZip(target, extractDir.Fullpath, FastZip.Overwrite.Always, - _ => true, "", "", true, true, false); - var fi = new FileInfo(Path.Combine(extractDir.Fullpath, SingleEntryFileName)); - var fileTime = FileTimeFromTimeSetting(fi, TimeSetting.Fixed); - if (fileTime.Kind != dtk) fileTime = fileTime.ToUniversalTime(); - Assert.AreEqual(targetTime, fileTime); - } + using var extractDir = Utils.GetTempDir(); + fastZip.ExtractZip(target, extractDir.FullName, FastZip.Overwrite.Always, + _ => true, "", "", restoreDateTime: true, isStreamOwner: true, allowParentTraversal: false); + var fi = new FileInfo(Path.Combine(extractDir.FullName, SingleEntryFileName)); + var fileTime = FileTimeFromTimeSetting(fi, TimeSetting.Fixed); + if (fileTime.Kind != dtk) fileTime = fileTime.ToUniversalTime(); + Assert.AreEqual(targetTime, fileTime); } [Category("Zip")] @@ -792,13 +762,11 @@ public void ExtractZipShouldSetTimeOnFilesWithEmptyConstructor(DateTimeKind dtk) // Extract the archive with an empty constructor var fastZip = new FastZip(); - using (var extractDir = new Utils.TempDir()) - { - fastZip.ExtractZip(target, extractDir.Fullpath, FastZip.Overwrite.Always, - _ => true, "", "", true, true, false); - var fi = new FileInfo(Path.Combine(extractDir.Fullpath, SingleEntryFileName)); - Assert.AreEqual(targetTime, FileTimeFromTimeSetting(fi, TimeSetting.Fixed)); - } + using var extractDir = Utils.GetTempDir(); + fastZip.ExtractZip(target, extractDir.FullName, FastZip.Overwrite.Always, + _ => true, "", "", restoreDateTime: true, isStreamOwner: true, allowParentTraversal: false); + var fi = new FileInfo(Path.Combine(extractDir.FullName, SingleEntryFileName)); + Assert.AreEqual(targetTime, FileTimeFromTimeSetting(fi, TimeSetting.Fixed)); } private static bool IsLastAccessTime(TimeSetting ts) @@ -851,17 +819,15 @@ private static TrackedMemoryStream CreateFastZipTestArchiveWithAnEntry(FastZip f { var target = new TrackedMemoryStream(); - using (var tempFolder = new Utils.TempDir()) - { - - // Create test input file - var addFile = Path.Combine(tempFolder.Fullpath, SingleEntryFileName); - MakeTempFile(addFile, 16); - var fi = new FileInfo(addFile); - alterFile?.Invoke(fi); + using var tempFolder = Utils.GetTempDir(); + // Create test input file + var addFile = Path.Combine(tempFolder.FullName, SingleEntryFileName); + MakeTempFile(addFile, 16); + var fi = new FileInfo(addFile); + alterFile?.Invoke(fi); - fastZip.CreateZip(target, tempFolder.Fullpath, false, SingleEntryFileName, null, leaveOpen: true); - } + fastZip.CreateZip(target, tempFolder.FullName, recurse: false, + SingleEntryFileName, directoryFilter: null, leaveOpen: true); return target; } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/GeneralHandling.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/GeneralHandling.cs index c3e32064c..f3bf9a995 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/GeneralHandling.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/GeneralHandling.cs @@ -1,5 +1,7 @@ -using ICSharpCode.SharpZipLib.Tests.TestSupport; +using ICSharpCode.SharpZipLib.Checksum; +using ICSharpCode.SharpZipLib.Tests.TestSupport; using ICSharpCode.SharpZipLib.Zip; +using ICSharpCode.SharpZipLib.Zip.Compression.Streams; using NUnit.Framework; using System; using System.IO; @@ -7,6 +9,7 @@ using System.Runtime.Serialization.Formatters.Binary; using System.Security; using System.Text; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; namespace ICSharpCode.SharpZipLib.Tests.Zip { @@ -16,18 +19,6 @@ namespace ICSharpCode.SharpZipLib.Tests.Zip [TestFixture] public class GeneralHandling : ZipBase { - private void AddRandomDataToEntry(ZipOutputStream zipStream, int size) - { - if (size > 0) - { - byte[] data = new byte[size]; - var rnd = new Random(); - rnd.NextBytes(data); - - zipStream.Write(data, 0, data.Length); - } - } - private void ExerciseZip(CompressionMethod method, int compressionLevel, int size, string password, bool canSeek) { @@ -82,57 +73,6 @@ private void ExerciseZip(CompressionMethod method, int compressionLevel, } } - private string DescribeAttributes(FieldAttributes attributes) - { - string att = string.Empty; - if ((FieldAttributes.Public & attributes) != 0) - { - att = att + "Public,"; - } - - if ((FieldAttributes.Static & attributes) != 0) - { - att = att + "Static,"; - } - - if ((FieldAttributes.Literal & attributes) != 0) - { - att = att + "Literal,"; - } - - if ((FieldAttributes.HasDefault & attributes) != 0) - { - att = att + "HasDefault,"; - } - - if ((FieldAttributes.InitOnly & attributes) != 0) - { - att = att + "InitOnly,"; - } - - if ((FieldAttributes.Assembly & attributes) != 0) - { - att = att + "Assembly,"; - } - - if ((FieldAttributes.FamANDAssem & attributes) != 0) - { - att = att + "FamANDAssembly,"; - } - - if ((FieldAttributes.FamORAssem & attributes) != 0) - { - att = att + "FamORAssembly,"; - } - - if ((FieldAttributes.HasFieldMarshal & attributes) != 0) - { - att = att + "HasFieldMarshal,"; - } - - return att; - } - /// /// Invalid passwords should be detected early if possible, seekable stream /// Note: Have a 1/255 chance of failing due to CRC collision (hence retried once) @@ -340,39 +280,35 @@ public void BasicStoredNonSeekable() [Test] [Category("Zip")] - public void StoredNonSeekableKnownSizeNoCrc() + [TestCase(21348, null)] + [TestCase(24692, "Mabutu")] + public void StoredNonSeekableKnownSizeNoCrc(int targetSize, string password) { - // This cannot be stored directly as the crc is not be known. - const int TargetSize = 21348; - const string Password = null; + // This cannot be stored directly as the crc is not known. MemoryStream ms = new MemoryStreamWithoutSeek(); using (ZipOutputStream outStream = new ZipOutputStream(ms)) { - outStream.Password = Password; + outStream.Password = password; outStream.IsStreamOwner = false; var entry = new ZipEntry("dummyfile.tst"); entry.CompressionMethod = CompressionMethod.Stored; // The bit thats in question is setting the size before its added to the archive. - entry.Size = TargetSize; + entry.Size = targetSize; outStream.PutNextEntry(entry); Assert.AreEqual(CompressionMethod.Deflated, entry.CompressionMethod, "Entry should be deflated"); Assert.AreEqual(-1, entry.CompressedSize, "Compressed size should be known"); - var rnd = new Random(); - - int size = TargetSize; - byte[] original = new byte[size]; - rnd.NextBytes(original); + byte[] original = Utils.GetDummyBytes(targetSize); // Although this could be written in one chunk doing it in lumps // throws up buffering problems including with encryption the original // source for this change. - int index = 0; + int index = 0, size = targetSize; while (size > 0) { int count = (size > 0x200) ? 0x200 : size; @@ -381,53 +317,7 @@ public void StoredNonSeekableKnownSizeNoCrc() index += count; } } - Assert.IsTrue(ZipTesting.TestArchive(ms.ToArray())); - } - - [Test] - [Category("Zip")] - public void StoredNonSeekableKnownSizeNoCrcEncrypted() - { - // This cant be stored directly as the crc is not known - const int TargetSize = 24692; - const string Password = "Mabutu"; - - MemoryStream ms = new MemoryStreamWithoutSeek(); - - using (ZipOutputStream outStream = new ZipOutputStream(ms)) - { - outStream.Password = Password; - outStream.IsStreamOwner = false; - var entry = new ZipEntry("dummyfile.tst"); - entry.CompressionMethod = CompressionMethod.Stored; - - // The bit thats in question is setting the size before its added to the archive. - entry.Size = TargetSize; - - outStream.PutNextEntry(entry); - - Assert.AreEqual(CompressionMethod.Deflated, entry.CompressionMethod, "Entry should be stored"); - Assert.AreEqual(-1, entry.CompressedSize, "Compressed size should be known"); - - var rnd = new Random(); - - int size = TargetSize; - byte[] original = new byte[size]; - rnd.NextBytes(original); - - // Although this could be written in one chunk doing it in lumps - // throws up buffering problems including with encryption the original - // source for this change. - int index = 0; - while (size > 0) - { - int count = (size > 0x200) ? 0x200 : size; - outStream.Write(original, index, count); - size -= 0x200; - index += count; - } - } - Assert.IsTrue(ZipTesting.TestArchive(ms.ToArray(), Password)); + Assert.That(ms.ToArray(), Does.PassTestArchive(password)); } /// @@ -500,10 +390,10 @@ public void MixedEncryptedAndPlain() int extractCount = 0; int extractIndex = 0; - ZipEntry entry; + byte[] decompressedData = new byte[100]; - while ((entry = inStream.GetNextEntry()) != null) + while (inStream.GetNextEntry() != null) { extractCount = decompressedData.Length; extractIndex = 0; @@ -529,7 +419,7 @@ public void MixedEncryptedAndPlain() [Category("Zip")] public void BasicStoredEncrypted() { - ExerciseZip(CompressionMethod.Stored, 0, 50000, "Rosebud", true); + ExerciseZip(CompressionMethod.Stored, compressionLevel: 0, size: 50000, "Rosebud", canSeek: true); } /// @@ -540,7 +430,7 @@ public void BasicStoredEncrypted() [Category("Zip")] public void BasicStoredEncryptedNonSeekable() { - ExerciseZip(CompressionMethod.Stored, 0, 50000, "Rosebud", false); + ExerciseZip(CompressionMethod.Stored, compressionLevel: 0, size: 50000, "Rosebud", canSeek: false); } /// @@ -562,12 +452,12 @@ public void StoredNonSeekableConvertToDeflate() outStream.PutNextEntry(entry); Assert.AreEqual(0, outStream.GetLevel(), "Compression level invalid"); - AddRandomDataToEntry(outStream, 100); + Utils.WriteDummyData(outStream, 100); entry = new ZipEntry("2.tst"); entry.CompressionMethod = CompressionMethod.Deflated; outStream.PutNextEntry(entry); Assert.AreEqual(8, outStream.GetLevel(), "Compression level invalid"); - AddRandomDataToEntry(outStream, 100); + Utils.WriteDummyData(outStream, 100); outStream.Close(); } @@ -856,20 +746,17 @@ private object UnZipZeroLength(byte[] zipped) return result; } - private void CheckNameConversion(string toCheck) - { - byte[] intermediate = ZipStrings.ConvertToArray(toCheck); - string final = ZipStrings.ConvertToString(intermediate); - - Assert.AreEqual(toCheck, final, "Expected identical result"); - } - [Test] [Category("Zip")] - public void NameConversion() + [TestCase("Hello")] + [TestCase("a/b/c/d/e/f/g/h/SomethingLikeAnArchiveName.txt")] + public void LegacyNameConversion(string name) { - CheckNameConversion("Hello"); - CheckNameConversion("a/b/c/d/e/f/g/h/SomethingLikeAnArchiveName.txt"); + var encoding = StringCodec.Default.ZipEncoding(false); + byte[] intermediate = encoding.GetBytes(name); + string final = encoding.GetString(intermediate); + + Assert.AreEqual(name, final, "Expected identical result"); } [Test] @@ -878,22 +765,22 @@ public void UnicodeNameConversion() { Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); - ZipStrings.CodePage = 850; + var codec = StringCodec.FromCodePage(850); string sample = "Hello world"; byte[] rawData = Encoding.ASCII.GetBytes(sample); - string converted = ZipStrings.ConvertToStringExt(0, rawData); + var converted = codec.LegacyEncoding.GetString(rawData); Assert.AreEqual(sample, converted); - converted = ZipStrings.ConvertToStringExt((int)GeneralBitFlags.UnicodeText, rawData); + converted = codec.ZipInputEncoding(GeneralBitFlags.UnicodeText).GetString(rawData); Assert.AreEqual(sample, converted); // This time use some greek characters sample = "\u03A5\u03d5\u03a3"; rawData = Encoding.UTF8.GetBytes(sample); - converted = ZipStrings.ConvertToStringExt((int)GeneralBitFlags.UnicodeText, rawData); + converted = codec.ZipInputEncoding(GeneralBitFlags.UnicodeText).GetString(rawData); Assert.AreEqual(sample, converted); } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/PassthroughTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/PassthroughTests.cs new file mode 100644 index 000000000..954e339b1 --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/PassthroughTests.cs @@ -0,0 +1,141 @@ +using System; +using System.IO; +using System.IO.Compression; +using System.Text; +using ICSharpCode.SharpZipLib.Checksum; +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using ICSharpCode.SharpZipLib.Zip; +using NUnit.Framework; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; + +namespace ICSharpCode.SharpZipLib.Tests.Zip +{ + [TestFixture] + public class PassthroughTests + { + [Test] + [Category("Zip")] + public void AddingValidPrecompressedEntryToZipOutputStream() + { + using var ms = new MemoryStream(); + + using (var outStream = new ZipOutputStream(ms){IsStreamOwner = false}) + { + var (compressedData, crc, size) = CreateDeflatedData(); + var entry = new ZipEntry("dummyfile.tst") + { + CompressionMethod = CompressionMethod.Deflated, + Size = size, + Crc = (uint)crc.Value, + CompressedSize = compressedData.Length, + }; + + outStream.PutNextPassthroughEntry(entry); + + compressedData.CopyTo(outStream); + } + + Assert.That(ms.ToArray(), Does.PassTestArchive()); + } + + private static (MemoryStream, Crc32, int) CreateDeflatedData() + { + var data = Encoding.UTF8.GetBytes("Hello, world"); + + var crc = new Crc32(); + crc.Update(data); + + var compressedData = new MemoryStream(); + using(var gz = new DeflateStream(compressedData, CompressionMode.Compress, leaveOpen: true)) + { + gz.Write(data, 0, data.Length); + } + compressedData.Position = 0; + + return (compressedData, crc, data.Length); + } + + [Test] + [Category("Zip")] + public void AddingPrecompressedEntryToZipOutputStreamWithInvalidSize() + { + using var outStream = new ZipOutputStream(new MemoryStream()); + var (compressedData, crc, size) = CreateDeflatedData(); + outStream.Password = "mockpassword"; + var entry = new ZipEntry("dummyfile.tst") + { + CompressionMethod = CompressionMethod.Stored, + Crc = (uint)crc.Value, + CompressedSize = compressedData.Length, + }; + + Assert.Throws(() => + { + outStream.PutNextPassthroughEntry(entry); + }); + } + + + [Test] + [Category("Zip")] + public void AddingPrecompressedEntryToZipOutputStreamWithInvalidCompressedSize() + { + using var outStream = new ZipOutputStream(new MemoryStream()); + var (compressedData, crc, size) = CreateDeflatedData(); + outStream.Password = "mockpassword"; + var entry = new ZipEntry("dummyfile.tst") + { + CompressionMethod = CompressionMethod.Stored, + Size = size, + Crc = (uint)crc.Value, + }; + + Assert.Throws(() => + { + outStream.PutNextPassthroughEntry(entry); + }); + } + + [Test] + [Category("Zip")] + public void AddingPrecompressedEntryToZipOutputStreamWithNonSupportedMethod() + { + using var outStream = new ZipOutputStream(new MemoryStream()); + var (compressedData, crc, size) = CreateDeflatedData(); + outStream.Password = "mockpassword"; + var entry = new ZipEntry("dummyfile.tst") + { + CompressionMethod = CompressionMethod.LZMA, + Size = size, + Crc = (uint)crc.Value, + CompressedSize = compressedData.Length, + }; + + Assert.Throws(() => + { + outStream.PutNextPassthroughEntry(entry); + }); + } + + [Test] + [Category("Zip")] + public void AddingPrecompressedEntryToZipOutputStreamWithEncryption() + { + using var outStream = new ZipOutputStream(new MemoryStream()); + var (compressedData, crc, size) = CreateDeflatedData(); + outStream.Password = "mockpassword"; + var entry = new ZipEntry("dummyfile.tst") + { + CompressionMethod = CompressionMethod.Deflated, + Size = size, + Crc = (uint)crc.Value, + CompressedSize = compressedData.Length, + }; + + Assert.Throws(() => + { + outStream.PutNextPassthroughEntry(entry); + }); + } + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/StreamHandling.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/StreamHandling.cs index 7a336592a..3e8ab732c 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/StreamHandling.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/StreamHandling.cs @@ -3,7 +3,11 @@ using ICSharpCode.SharpZipLib.Zip; using NUnit.Framework; using System; +using System.Diagnostics; using System.IO; +using System.Linq; +using System.Text; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; namespace ICSharpCode.SharpZipLib.Tests.Zip { @@ -13,6 +17,12 @@ namespace ICSharpCode.SharpZipLib.Tests.Zip [TestFixture] public class StreamHandling : ZipBase { + private TestTraceListener Listener; + [SetUp] + public void Init() => Trace.Listeners.Add(Listener = new TestTraceListener(TestContext.Out)); + [TearDown] + public void Deinit() => Trace.Listeners.Remove(Listener); + private void MustFailRead(Stream s, byte[] buffer, int offset, int count) { bool exception = false; @@ -44,9 +54,9 @@ public void ParameterHandling() ms.Seek(0, SeekOrigin.Begin); var inStream = new ZipInputStream(ms); - ZipEntry e = inStream.GetNextEntry(); + inStream.GetNextEntry(); - MustFailRead(inStream, null, 0, 0); + MustFailRead(inStream, buffer: null, 0, 0); MustFailRead(inStream, buffer, -1, 1); MustFailRead(inStream, buffer, 0, 11); MustFailRead(inStream, buffer, 7, 5); @@ -77,7 +87,7 @@ public void Zip64Descriptor() outStream.WriteByte(89); outStream.Close(); - Assert.IsTrue(ZipTesting.TestArchive(msw.ToArray())); + Assert.That(msw.ToArray(), Does.PassTestArchive()); msw = new MemoryStreamWithoutSeek(); outStream = new ZipOutputStream(msw); @@ -88,7 +98,7 @@ public void Zip64Descriptor() outStream.WriteByte(89); outStream.Close(); - Assert.IsTrue(ZipTesting.TestArchive(msw.ToArray())); + Assert.That(msw.ToArray(), Does.PassTestArchive()); } [Test] @@ -110,18 +120,16 @@ public void ReadAndWriteZip64NonSeekable() outStream.Close(); } - Assert.IsTrue(ZipTesting.TestArchive(msw.ToArray())); - - msw.Position = 0; + var msBytes = msw.ToArray(); + Assert.That(msBytes, Does.PassTestArchive()); - using (ZipInputStream zis = new ZipInputStream(msw)) + using (var zis = new ZipInputStream(new MemoryStream(msBytes))) { while (zis.GetNextEntry() != null) { - int len = 0; - int bufferSize = 1024; - byte[] buffer = new byte[bufferSize]; - while ((len = zis.Read(buffer, 0, bufferSize)) > 0) + const int bufferSize = 1024; + var buffer = new byte[bufferSize]; + while (zis.Read(buffer, 0, bufferSize) > 0) { // Reading the data is enough } @@ -148,7 +156,7 @@ public void EntryWithNoDataAndZip64() outStream.Finish(); outStream.Close(); - Assert.IsTrue(ZipTesting.TestArchive(msw.ToArray())); + Assert.That(msw.ToArray(), Does.PassTestArchive()); } /// @@ -241,46 +249,40 @@ public void WriteZipStreamWithNoCompression([Values(0, 1, 256)] int contentLengt { var buffer = new byte[255]; - using (var dummyZip = Utils.GetDummyFile(0)) - using (var inputFile = Utils.GetDummyFile(contentLength)) - { - // Filename is manually cleaned here to prevent this test from failing while ZipEntry doesn't automatically clean it - var inputFileName = ZipEntry.CleanName(inputFile.Filename); + using var dummyZip = Utils.GetTempFile(); + using var inputFile = Utils.GetDummyFile(contentLength); + // Filename is manually cleaned here to prevent this test from failing while ZipEntry doesn't automatically clean it + var inputFileName = ZipEntry.CleanName(inputFile); - using (var zipFileStream = File.OpenWrite(dummyZip.Filename)) - using (var zipOutputStream = new ZipOutputStream(zipFileStream)) - using (var inputFileStream = File.OpenRead(inputFile.Filename)) + using (var zipFileStream = File.OpenWrite(dummyZip)) + using (var zipOutputStream = new ZipOutputStream(zipFileStream)) + using (var inputFileStream = File.OpenRead(inputFile)) + { + zipOutputStream.PutNextEntry(new ZipEntry(inputFileName) { - zipOutputStream.PutNextEntry(new ZipEntry(inputFileName) - { - CompressionMethod = CompressionMethod.Stored, - }); - - StreamUtils.Copy(inputFileStream, zipOutputStream, buffer); - } + CompressionMethod = CompressionMethod.Stored, + }); - using (var zf = new ZipFile(dummyZip.Filename)) - { - var inputBytes = File.ReadAllBytes(inputFile.Filename); + StreamUtils.Copy(inputFileStream, zipOutputStream, buffer); + } - var entry = zf.GetEntry(inputFileName); - Assert.IsNotNull(entry, "No entry matching source file \"{0}\" found in archive, found \"{1}\"", inputFileName, zf[0].Name); + using (var zf = new ZipFile(dummyZip)) + { + var inputBytes = File.ReadAllBytes(inputFile); - Assert.DoesNotThrow(() => - { - using (var entryStream = zf.GetInputStream(entry)) - { - var outputBytes = new byte[entryStream.Length]; - entryStream.Read(outputBytes, 0, outputBytes.Length); + var entry = zf.GetEntry(inputFileName); + Assert.IsNotNull(entry, "No entry matching source file \"{0}\" found in archive, found \"{1}\"", inputFileName, zf[0].Name); - Assert.AreEqual(inputBytes, outputBytes, "Archive content does not match the source content"); - } - }, "Failed to locate entry stream in archive"); + Assert.DoesNotThrow(() => + { + using var entryStream = zf.GetInputStream(entry); + var outputBytes = new byte[entryStream.Length]; + entryStream.Read(outputBytes, 0, outputBytes.Length); - Assert.IsTrue(zf.TestArchive(testData: true), "Archive did not pass TestArchive"); - } + Assert.AreEqual(inputBytes, outputBytes, "Archive content does not match the source content"); + }, "Failed to locate entry stream in archive"); - + Assert.That(zf, Does.PassTestArchive()); } } @@ -288,26 +290,25 @@ public void WriteZipStreamWithNoCompression([Values(0, 1, 256)] int contentLengt [Category("Zip")] public void ZipEntryFileNameAutoClean() { - using (var dummyZip = Utils.GetDummyFile(0)) - using (var inputFile = Utils.GetDummyFile()) { - using (var zipFileStream = File.OpenWrite(dummyZip.Filename)) - using (var zipOutputStream = new ZipOutputStream(zipFileStream)) - using (var inputFileStream = File.OpenRead(inputFile.Filename)) + using var dummyZip = Utils.GetDummyFile(0); + using var inputFile = Utils.GetDummyFile(); + using (var zipFileStream = File.OpenWrite(dummyZip)) + using (var zipOutputStream = new ZipOutputStream(zipFileStream)) + using (var inputFileStream = File.OpenRead(inputFile)) + { + // New ZipEntry created with a full file name path as it's name + zipOutputStream.PutNextEntry(new ZipEntry(inputFile) { - // New ZipEntry created with a full file name path as it's name - zipOutputStream.PutNextEntry(new ZipEntry(inputFile.Filename) - { - CompressionMethod = CompressionMethod.Stored, - }); + CompressionMethod = CompressionMethod.Stored, + }); - inputFileStream.CopyTo(zipOutputStream); - } + inputFileStream.CopyTo(zipOutputStream); + } - using (var zf = new ZipFile(dummyZip.Filename)) - { - // The ZipEntry name should have been automatically cleaned - Assert.AreEqual(ZipEntry.CleanName(inputFile.Filename), zf[0].Name); - } + using (var zf = new ZipFile(dummyZip)) + { + // The ZipEntry name should have been automatically cleaned + Assert.AreEqual(ZipEntry.CleanName(inputFile), zf[0].Name); } } @@ -424,7 +425,7 @@ public void WriteThroughput() [Explicit("Long Running")] public void SingleLargeEntry() { - const string EntryName = "CantSeek"; + const string entryName = "CantSeek"; PerformanceTesting.TestReadWrite( size: TestDataSize.Large, @@ -433,14 +434,14 @@ public void SingleLargeEntry() var zis = new ZipInputStream(bs); var entry = zis.GetNextEntry(); - Assert.AreEqual(EntryName, entry.Name); + Assert.AreEqual(entryName, entry.Name); Assert.IsTrue((entry.Flags & (int)GeneralBitFlags.Descriptor) != 0); return zis; }, output: bs => { var zos = new ZipOutputStream(bs); - zos.PutNextEntry(new ZipEntry(EntryName)); + zos.PutNextEntry(new ZipEntry(entryName)); return zos; } ); @@ -458,20 +459,18 @@ public void SingleLargeEntry() [Category("Zip")] public void ShouldReadBZip2EntryButNotDecompress() { - var fileBytes = System.Convert.FromBase64String(BZip2CompressedZip); + var fileBytes = Convert.FromBase64String(BZip2CompressedZip); - using (var input = new MemoryStream(fileBytes, false)) - { - var zis = new ZipInputStream(input); - var entry = zis.GetNextEntry(); + using var input = new MemoryStream(fileBytes, writable: false); + var zis = new ZipInputStream(input); + var entry = zis.GetNextEntry(); - Assert.That(entry.Name, Is.EqualTo("a.dat"), "Should be able to get entry name"); - Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Entry should be BZip2 compressed"); - Assert.That(zis.CanDecompressEntry, Is.False, "Should not be able to decompress BZip2 entry"); + Assert.That(entry.Name, Is.EqualTo("a.dat"), "Should be able to get entry name"); + Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Entry should be BZip2 compressed"); + Assert.That(zis.CanDecompressEntry, Is.False, "Should not be able to decompress BZip2 entry"); - var buffer = new byte[1]; - Assert.Throws(() => zis.Read(buffer, 0, 1), "Trying to read the stream should throw"); - } + var buffer = new byte[1]; + Assert.Throws(() => zis.Read(buffer, 0, 1), "Trying to read the stream should throw"); } /// @@ -510,49 +509,80 @@ public void ShouldBeAbleToReadEntriesWithInvalidFileNames() [Category("Zip")] public void AddingAnAESEntryWithNoPasswordShouldThrow() { - using (var memoryStream = new MemoryStream()) - { - using (var outStream = new ZipOutputStream(memoryStream)) - { - var newEntry = new ZipEntry("test") { AESKeySize = 256 }; + using var memoryStream = new MemoryStream(); + using var outStream = new ZipOutputStream(memoryStream); + var newEntry = new ZipEntry("test") { AESKeySize = 256 }; - Assert.Throws(() => outStream.PutNextEntry(newEntry)); - } - } + Assert.Throws(() => outStream.PutNextEntry(newEntry)); } [Test] [Category("Zip")] public void ShouldThrowDescriptiveExceptionOnUncompressedDescriptorEntry() { - using (var ms = new MemoryStreamWithoutSeek()) + using var ms = new MemoryStreamWithoutSeek(); + using (var zos = new ZipOutputStream(ms)) + { + zos.IsStreamOwner = false; + var entry = new ZipEntry("testentry"); + entry.CompressionMethod = CompressionMethod.Stored; + entry.Flags |= (int)GeneralBitFlags.Descriptor; + zos.PutNextEntry(entry); + zos.Write(new byte[1], 0, 1); + zos.CloseEntry(); + } + + // Patch the Compression Method, since ZipOutputStream automatically changes it to Deflate when descriptors are used + ms.Seek(8, SeekOrigin.Begin); + ms.WriteByte((byte)CompressionMethod.Stored); + ms.Seek(0, SeekOrigin.Begin); + + using (var zis = new ZipInputStream(ms)) { - using (var zos = new ZipOutputStream(ms)) + zis.IsStreamOwner = false; + var buf = new byte[32]; + zis.GetNextEntry(); + + Assert.Throws(typeof(StreamUnsupportedException), () => { - zos.IsStreamOwner = false; - var entry = new ZipEntry("testentry"); - entry.CompressionMethod = CompressionMethod.Stored; - entry.Flags |= (int)GeneralBitFlags.Descriptor; - zos.PutNextEntry(entry); - zos.Write(new byte[1], 0, 1); - zos.CloseEntry(); + zis.Read(buf, 0, buf.Length); + }); + } + } + + [Test] + [Category("Zip")] + public void IteratingOverEntriesInDirectUpdatedArchive([Values(0x0, 0x80)] byte padding) + { + using (var tempFile = new TempFile()) + { + using (var zf = ZipFile.Create(tempFile)) + { + zf.BeginUpdate(); + // Add a "large" file, where the bottom 1023 bytes will become padding + var contentsAndPadding = Enumerable.Repeat(padding, count: 1024).ToArray(); + zf.Add(new MemoryDataSource(contentsAndPadding), "FirstFile", CompressionMethod.Stored); + // Add a second file after the first one + zf.Add(new StringMemoryDataSource("fileContents"), "SecondFile", CompressionMethod.Stored); + zf.CommitUpdate(); } - // Patch the Compression Method, since ZipOutputStream automatically changes it to Deflate when descriptors are used - ms.Seek(8, SeekOrigin.Begin); - ms.WriteByte((byte)CompressionMethod.Stored); - ms.Seek(0, SeekOrigin.Begin); + // Since ZipFile doesn't support UpdateCommand.Modify yet we'll have to simulate it by patching the header + Utils.PatchFirstEntrySize(tempFile.Open(FileMode.Open), 1); - using (var zis = new ZipInputStream(ms)) + // Iterate updated entries + using (var fs = File.OpenRead(tempFile)) + using (var zis = new ZipInputStream(fs)) { - zis.IsStreamOwner = false; - var buf = new byte[32]; - zis.GetNextEntry(); - - Assert.Throws(typeof(StreamUnsupportedException), () => - { - zis.Read(buf, 0, buf.Length); - }); + var firstEntry = zis.GetNextEntry(); + Assert.NotNull(firstEntry); + Assert.AreEqual(1, firstEntry.CompressedSize); + Assert.AreEqual(1, firstEntry.Size); + + var secondEntry = zis.GetNextEntry(); + Assert.NotNull(secondEntry, "Zip entry following padding not found"); + var contents = new StreamReader(zis, Encoding.UTF8, false, 128, true).ReadToEnd(); + Assert.AreEqual("fileContents", contents); } } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipEncryptionHandling.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipEncryptionHandling.cs index f3a240d30..0cf7395cb 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipEncryptionHandling.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipEncryptionHandling.cs @@ -5,6 +5,7 @@ using System.Text; using ICSharpCode.SharpZipLib.Tests.TestSupport; using System.Threading.Tasks; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; namespace ICSharpCode.SharpZipLib.Tests.Zip { @@ -105,7 +106,7 @@ public void ZipFileAesDecryption() } } - Assert.That(zipFile.TestArchive(false), Is.True, "Encrypted archive should pass validation."); + Assert.That(zipFile, Does.PassTestArchive(testData: false), "Encrypted archive should pass validation."); } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipFileHandling.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipFileHandling.cs index a9a7583fc..c25059da4 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipFileHandling.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipFileHandling.cs @@ -6,6 +6,7 @@ using System.IO; using System.Text; using System.Threading.Tasks; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; namespace ICSharpCode.SharpZipLib.Tests.Zip { @@ -61,7 +62,7 @@ public void Zip64Entries() } zipFile.CommitUpdate(); - Assert.IsTrue(zipFile.TestArchive(true)); + Assert.That(zipFile, Does.PassTestArchive()); Assert.AreEqual(target, zipFile.Count, "Incorrect number of entries stored"); } } @@ -80,7 +81,7 @@ public void EmbeddedArchive() f.Add(m, "a.dat"); f.Add(m, "b.dat"); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } byte[] rawArchive = memStream.ToArray(); @@ -116,7 +117,7 @@ public void Zip64Useage() f.Add(m, "a.dat"); f.Add(m, "b.dat"); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } byte[] rawArchive = memStream.ToArray(); @@ -211,7 +212,7 @@ public void FakeZip64Locator() f.BeginUpdate(new MemoryArchiveStorage()); f.Add(m, "a.dat", CompressionMethod.Stored); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } memStream.Seek(0, SeekOrigin.Begin); @@ -238,30 +239,30 @@ public void Zip64Offset() [Category("Zip")] public void BasicEncryption() { - const string TestValue = "0001000"; + const string testValue = "0001000"; var memStream = new MemoryStream(); - using (ZipFile f = new ZipFile(memStream)) + using (var zf = new ZipFile(memStream)) { - f.IsStreamOwner = false; - f.Password = "Hello"; + zf.IsStreamOwner = false; + zf.Password = "Hello"; - var m = new StringMemoryDataSource(TestValue); - f.BeginUpdate(new MemoryArchiveStorage()); - f.Add(m, "a.dat"); - f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true), "Archive test should pass"); + var m = new StringMemoryDataSource(testValue); + zf.BeginUpdate(new MemoryArchiveStorage()); + zf.Add(m, "a.dat"); + zf.CommitUpdate(); + Assert.IsTrue(zf.TestArchive(testData: true), "Archive test should pass"); } - using (ZipFile g = new ZipFile(memStream)) + using (var zf = new ZipFile(memStream)) { - g.Password = "Hello"; - ZipEntry ze = g[0]; + zf.Password = "Hello"; + var ze = zf[0]; Assert.IsTrue(ze.IsCrypted, "Entry should be encrypted"); - using (StreamReader r = new StreamReader(g.GetInputStream(0))) + using (var r = new StreamReader(zf.GetInputStream(entryIndex: 0))) { - string data = r.ReadToEnd(); - Assert.AreEqual(TestValue, data); + var data = r.ReadToEnd(); + Assert.AreEqual(testValue, data); } } } @@ -271,38 +272,38 @@ public void BasicEncryption() [Category("CreatesTempFile")] public void BasicEncryptionToDisk() { - const string TestValue = "0001000"; - string tempFile = GetTempFilePath(); + const string testValue = "0001000"; + var tempFile = GetTempFilePath(); Assert.IsNotNull(tempFile, "No permission to execute this test?"); tempFile = Path.Combine(tempFile, "SharpZipTest.Zip"); - using (ZipFile f = ZipFile.Create(tempFile)) + using (var zf = ZipFile.Create(tempFile)) { - f.Password = "Hello"; + zf.Password = "Hello"; - var m = new StringMemoryDataSource(TestValue); - f.BeginUpdate(); - f.Add(m, "a.dat"); - f.CommitUpdate(); + var m = new StringMemoryDataSource(testValue); + zf.BeginUpdate(); + zf.Add(m, "a.dat"); + zf.CommitUpdate(); } - using (ZipFile f = new ZipFile(tempFile)) + using (var zf = new ZipFile(tempFile)) { - f.Password = "Hello"; - Assert.IsTrue(f.TestArchive(true), "Archive test should pass"); + zf.Password = "Hello"; + Assert.IsTrue(zf.TestArchive(testData: true), "Archive test should pass"); } - using (ZipFile g = new ZipFile(tempFile)) + using (var zf = new ZipFile(tempFile)) { - g.Password = "Hello"; - ZipEntry ze = g[0]; + zf.Password = "Hello"; + ZipEntry ze = zf[0]; Assert.IsTrue(ze.IsCrypted, "Entry should be encrypted"); - using (StreamReader r = new StreamReader(g.GetInputStream(0))) + using (var r = new StreamReader(zf.GetInputStream(entryIndex: 0))) { - string data = r.ReadToEnd(); - Assert.AreEqual(TestValue, data); + var data = r.ReadToEnd(); + Assert.AreEqual(testValue, data); } } @@ -313,14 +314,14 @@ public void BasicEncryptionToDisk() [Category("Zip")] public void AddEncryptedEntriesToExistingArchive() { - const string TestValue = "0001000"; + const string testValue = "0001000"; var memStream = new MemoryStream(); using (ZipFile f = new ZipFile(memStream)) { f.IsStreamOwner = false; f.UseZip64 = UseZip64.Off; - var m = new StringMemoryDataSource(TestValue); + var m = new StringMemoryDataSource(testValue); f.BeginUpdate(new MemoryArchiveStorage()); f.Add(m, "a.dat"); f.CommitUpdate(); @@ -335,10 +336,10 @@ public void AddEncryptedEntriesToExistingArchive() using (StreamReader r = new StreamReader(g.GetInputStream(0))) { string data = r.ReadToEnd(); - Assert.AreEqual(TestValue, data); + Assert.AreEqual(testValue, data); } - var n = new StringMemoryDataSource(TestValue); + var n = new StringMemoryDataSource(testValue); g.Password = "Axolotyl"; g.UseZip64 = UseZip64.Off; @@ -353,7 +354,7 @@ public void AddEncryptedEntriesToExistingArchive() using (StreamReader r = new StreamReader(g.GetInputStream(0))) { string data = r.ReadToEnd(); - Assert.AreEqual(TestValue, data); + Assert.AreEqual(testValue, data); } } } @@ -367,7 +368,7 @@ private void TryDeleting(byte[] master, int totalEntries, int additions, params { f.IsStreamOwner = false; Assert.AreEqual(totalEntries, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); f.BeginUpdate(new MemoryArchiveStorage()); for (int i = 0; i < additions; ++i) @@ -401,7 +402,7 @@ private void TryDeleting(byte[] master, int totalEntries, int additions, params { f.IsStreamOwner = false; Assert.AreEqual(totalEntries, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); f.BeginUpdate(new MemoryArchiveStorage()); for (int i = 0; i < additions; ++i) @@ -445,7 +446,7 @@ public void AddAndDeleteEntriesMemory() f.Add(new StringMemoryDataSource("Mr C"), @"c\c.dat"); f.Add(new StringMemoryDataSource("Mrs D was a star"), @"d\d.dat"); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); foreach (ZipEntry entry in f) { Console.WriteLine($" - {entry.Name}"); @@ -506,27 +507,27 @@ public void AddAndDeleteEntries() f.Add(addFile2); f.AddDirectory(addDirectory); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } using (ZipFile f = new ZipFile(tempFile)) { Assert.AreEqual(3, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); // Delete file f.BeginUpdate(); f.Delete(f[0]); f.CommitUpdate(); Assert.AreEqual(2, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); // Delete directory f.BeginUpdate(); f.Delete(f[1]); f.CommitUpdate(); Assert.AreEqual(1, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } File.Delete(addFile); @@ -632,7 +633,7 @@ public void AddToEmptyArchive() f.Add(addFile); f.CommitUpdate(); Assert.AreEqual(1, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } using (ZipFile f = new ZipFile(tempFile)) @@ -642,7 +643,7 @@ public void AddToEmptyArchive() f.Delete(f[0]); f.CommitUpdate(); Assert.AreEqual(0, f.Count); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); f.Close(); } @@ -667,7 +668,7 @@ public void CreateEmptyArchive() { f.BeginUpdate(); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); f.Close(); } @@ -684,28 +685,25 @@ public void CreateEmptyArchive() [Category("CreatesTempFile")] public void CreateArchiveWithNoCompression() { + using var sourceFile = Utils.GetDummyFile(); + using var zipFile = Utils.GetDummyFile(0); + var inputContent = File.ReadAllText(sourceFile); + using (var zf = ZipFile.Create(zipFile)) + { + zf.BeginUpdate(); + zf.Add(sourceFile, CompressionMethod.Stored); + zf.CommitUpdate(); + Assert.That(zf, Does.PassTestArchive()); + zf.Close(); + } - using (var sourceFile = Utils.GetDummyFile()) - using (var zipFile = Utils.GetDummyFile(0)) + using (var zf = new ZipFile(zipFile)) { - var inputContent = File.ReadAllText(sourceFile.Filename); - using (ZipFile f = ZipFile.Create(zipFile.Filename)) + Assert.AreEqual(1, zf.Count); + using (var sr = new StreamReader(zf.GetInputStream(zf[0]))) { - f.BeginUpdate(); - f.Add(sourceFile.Filename, CompressionMethod.Stored); - f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); - f.Close(); - } - - using (ZipFile f = new ZipFile(zipFile.Filename)) - { - Assert.AreEqual(1, f.Count); - using (var sr = new StreamReader(f.GetInputStream(f[0]))) - { - var outputContent = sr.ReadToEnd(); - Assert.AreEqual(inputContent, outputContent, "extracted content does not match source content"); - } + var outputContent = sr.ReadToEnd(); + Assert.AreEqual(inputContent, outputContent, "extracted content does not match source content"); } } } @@ -867,7 +865,7 @@ public void ArchiveTesting() using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsTrue(testFile.TestArchive(true), "Unexpected error in archive detected"); + Assert.That(testFile, Does.PassTestArchive(), "Unexpected error in archive detected"); byte[] corrupted = new byte[compressedData.Length]; Array.Copy(compressedData, corrupted, compressedData.Length); @@ -878,7 +876,7 @@ public void ArchiveTesting() using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsFalse(testFile.TestArchive(true), "Error in archive not detected"); + Assert.That(testFile, Does.Not.PassTestArchive(), "Error in archive not detected"); } } @@ -892,7 +890,7 @@ private void TestDirectoryEntry(MemoryStream s) var ms2 = new MemoryStream(s.ToArray()); using (ZipFile zf = new ZipFile(ms2)) { - Assert.IsTrue(zf.TestArchive(true)); + Assert.That(zf, Does.PassTestArchive()); } } @@ -916,7 +914,7 @@ private void TestEncryptedDirectoryEntry(MemoryStream s, int aesKeySize) var ms2 = new MemoryStream(s.ToArray()); using (ZipFile zf = new ZipFile(ms2)) { - Assert.IsTrue(zf.TestArchive(true)); + Assert.That(zf, Does.PassTestArchive()); } } @@ -945,7 +943,7 @@ public void Crypto_AddEncryptedEntryToExistingArchiveSafe() testFile.Add(new StringMemoryDataSource("No3"), "No3", CompressionMethod.Stored); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); rawData = ms.ToArray(); } @@ -953,14 +951,14 @@ public void Crypto_AddEncryptedEntryToExistingArchiveSafe() using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); testFile.BeginUpdate(new MemoryArchiveStorage(FileUpdateMode.Safe)); testFile.Password = "pwd"; testFile.Add(new StringMemoryDataSource("Zapata!"), "encrypttest.xml"); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); int entryIndex = testFile.FindEntry("encrypttest.xml", true); Assert.IsNotNull(entryIndex >= 0); @@ -983,12 +981,12 @@ public void Crypto_AddEncryptedEntryToExistingArchiveDirect() testFile.Add(new StringMemoryDataSource("No3"), "No3", CompressionMethod.Stored); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); testFile.IsStreamOwner = true; testFile.BeginUpdate(); @@ -996,7 +994,7 @@ public void Crypto_AddEncryptedEntryToExistingArchiveDirect() testFile.Add(new StringMemoryDataSource("Zapata!"), "encrypttest.xml"); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); int entryIndex = testFile.FindEntry("encrypttest.xml", true); Assert.IsNotNull(entryIndex >= 0); @@ -1009,45 +1007,40 @@ public void Crypto_AddEncryptedEntryToExistingArchiveDirect() [Category("Unicode")] public void UnicodeNames() { - using (var memStream = new MemoryStream()) + using var memStream = new MemoryStream(); + using (var f = new ZipFile(memStream)) { - using (ZipFile f = new ZipFile(memStream)) - { - f.IsStreamOwner = false; - - f.BeginUpdate(new MemoryArchiveStorage()); - foreach ((string language, string name, _) in StringTesting.GetTestSamples()) - { - f.Add(new StringMemoryDataSource(language), name, - CompressionMethod.Deflated, true); - } - f.CommitUpdate(); + f.IsStreamOwner = false; - Assert.IsTrue(f.TestArchive(true)); - } - memStream.Seek(0, SeekOrigin.Begin); - using (var zf = new ZipFile(memStream)) + f.BeginUpdate(new MemoryArchiveStorage()); + foreach (var (language, name, _) in StringTesting.TestSamples) { - foreach (string name in StringTesting.Filenames) - { - //int index = zf.FindEntry(name, true); - var content = ""; - var index = zf.FindEntry(name, true); - var entry = zf[index]; + f.Add(new StringMemoryDataSource(language), name, + CompressionMethod.Deflated, useUnicodeText: true); + } + f.CommitUpdate(); - using (var entryStream = zf.GetInputStream(entry)) - using (var sr = new StreamReader(entryStream)) - { - content = sr.ReadToEnd(); - } + Assert.That(f, Does.PassTestArchive()); + } + memStream.Seek(0, SeekOrigin.Begin); + using (var zf = new ZipFile(memStream)) + { + foreach (var name in StringTesting.Filenames) + { + string content; + var index = zf.FindEntry(name, ignoreCase: true); + var entry = zf[index]; - //var content = + using (var entryStream = zf.GetInputStream(entry)) + using (var sr = new StreamReader(entryStream)) + { + content = sr.ReadToEnd(); + } - Console.WriteLine($"Entry #{index}: {name}, Content: {content}"); + TestContext.WriteLine($"Entry #{index}: {name}, Content: {content}"); - Assert.IsTrue(index >= 0); - Assert.AreEqual(name, entry.Name); - } + Assert.IsTrue(index >= 0); + Assert.AreEqual(name, entry.Name); } } } @@ -1067,12 +1060,12 @@ public void UpdateCommentOnlyInMemory() testFile.Add(new StringMemoryDataSource("No3"), "No3", CompressionMethod.Stored); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("", testFile.ZipFileComment); testFile.IsStreamOwner = false; @@ -1080,12 +1073,12 @@ public void UpdateCommentOnlyInMemory() testFile.SetComment("Here is my comment"); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(ms)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("Here is my comment", testFile.ZipFileComment); } } @@ -1112,24 +1105,24 @@ public void UpdateCommentOnlyOnDisk() testFile.Add(new StringMemoryDataSource("No3"), "No3", CompressionMethod.Stored); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(tempFile)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("", testFile.ZipFileComment); testFile.BeginUpdate(new DiskArchiveStorage(testFile, FileUpdateMode.Direct)); testFile.SetComment("Here is my comment"); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(tempFile)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("Here is my comment", testFile.ZipFileComment); } File.Delete(tempFile); @@ -1143,24 +1136,24 @@ public void UpdateCommentOnlyOnDisk() testFile.Add(new StringMemoryDataSource("No3"), "No3", CompressionMethod.Stored); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(tempFile)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("", testFile.ZipFileComment); testFile.BeginUpdate(); testFile.SetComment("Here is my comment"); testFile.CommitUpdate(); - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); } using (ZipFile testFile = new ZipFile(tempFile)) { - Assert.IsTrue(testFile.TestArchive(true)); + Assert.That(testFile, Does.PassTestArchive()); Assert.AreEqual("Here is my comment", testFile.ZipFileComment); } File.Delete(tempFile); @@ -1193,7 +1186,7 @@ public void NameFactory() CompressionMethod.Deflated, true); } f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); foreach (string name in names) { @@ -1243,7 +1236,7 @@ public void NestedArchive() using (ZipFile nested = new ZipFile(zipFile.GetInputStream(0))) { - Assert.IsTrue(nested.TestArchive(true)); + Assert.That(nested, Does.PassTestArchive()); Assert.AreEqual(1, nested.Count); Stream nestedStream = nested.GetInputStream(0); @@ -1463,57 +1456,28 @@ public void FileStreamNotClosedWhenNotOwner() } /// - /// Check that input stream is closed when construction fails and leaveOpen is false + /// Check that input stream is only closed when construction fails and leaveOpen is false /// [Test] [Category("Zip")] - public void StreamClosedOnError() + public void StreamClosedOnError([Values(true, false)] bool leaveOpen) { var ms = new TrackedMemoryStream(new byte[32]); Assert.IsFalse(ms.IsClosed, "Underlying stream should NOT be closed initially"); - bool blewUp = false; - try + Assert.Throws(() => { - using (var zipFile = new ZipFile(ms, false)) - { - Assert.Fail("Exception not thrown"); - } - } - catch - { - blewUp = true; - } + using var zf = new ZipFile(ms, leaveOpen); + }, "Should have failed to load the file"); - Assert.IsTrue(blewUp, "Should have failed to load the file"); - Assert.IsTrue(ms.IsClosed, "Underlying stream should be closed"); - } - - /// - /// Check that input stream is not closed when construction fails and leaveOpen is true - /// - [Test] - [Category("Zip")] - public void StreamNotClosedOnError() - { - var ms = new TrackedMemoryStream(new byte[32]); - - Assert.IsFalse(ms.IsClosed, "Underlying stream should NOT be closed initially"); - bool blewUp = false; - try + if (leaveOpen) { - using (var zipFile = new ZipFile(ms, true)) - { - Assert.Fail("Exception not thrown"); - } + Assert.IsFalse(ms.IsClosed, "Underlying stream should NOT be closed"); } - catch + else { - blewUp = true; + Assert.IsTrue(ms.IsClosed, "Underlying stream should be closed"); } - - Assert.IsTrue(blewUp, "Should have failed to load the file"); - Assert.IsFalse(ms.IsClosed, "Underlying stream should NOT be closed"); } [Test] @@ -1586,17 +1550,15 @@ public void HostSystemPersistedFromZipFile() public void AddingAnAESEncryptedEntryShouldThrow() { var memStream = new MemoryStream(); - using (ZipFile zof = new ZipFile(memStream)) + using var zof = new ZipFile(memStream); + var entry = new ZipEntry("test") { - var entry = new ZipEntry("test") - { - AESKeySize = 256 - }; + AESKeySize = 256, + }; - zof.BeginUpdate(); - var exception = Assert.Throws(() => zof.Add(new StringMemoryDataSource("foo"), entry)); - Assert.That(exception.Message, Is.EqualTo("Creation of AES encrypted entries is not supported")); - } + zof.BeginUpdate(); + var exception = Assert.Throws(() => zof.Add(new StringMemoryDataSource("foo"), entry)); + Assert.That(exception?.Message, Is.EqualTo("Creation of AES encrypted entries is not supported")); } /// @@ -1608,35 +1570,33 @@ public void AddingAnAESEncryptedEntryShouldThrow() public void AddFileWithAlternateName() { // Create a unique name that will be different from the file name - string fileName = Guid.NewGuid().ToString(); + var fileName = Utils.GetDummyFileName(); - using (var sourceFile = Utils.GetDummyFile()) - using (var outputFile = Utils.GetDummyFile(0)) + using var sourceFile = Utils.GetDummyFile(size: 16); + using var outputFile = Utils.GetTempFile(); + var inputContent = File.ReadAllText(sourceFile); + using (var zf = ZipFile.Create(outputFile)) { - var inputContent = File.ReadAllText(sourceFile.Filename); - using (ZipFile f = ZipFile.Create(outputFile.Filename)) - { - f.BeginUpdate(); + zf.BeginUpdate(); - // Add a file with the unique display name - f.Add(sourceFile.Filename, fileName); + // Add a file with the unique display name + zf.Add(sourceFile, fileName); - f.CommitUpdate(); - f.Close(); - } + zf.CommitUpdate(); + zf.Close(); + } - using (ZipFile zipFile = new ZipFile(outputFile.Filename)) - { - Assert.That(zipFile.Count, Is.EqualTo(1)); + using (var zipFile = new ZipFile(outputFile)) + { + Assert.That(zipFile.Count, Is.EqualTo(1)); - var fileEntry = zipFile.GetEntry(fileName); - Assert.That(fileEntry, Is.Not.Null); + var fileEntry = zipFile.GetEntry(fileName); + Assert.That(fileEntry, Is.Not.Null); - using (var sr = new StreamReader(zipFile.GetInputStream(fileEntry))) - { - var outputContent = sr.ReadToEnd(); - Assert.AreEqual(inputContent, outputContent, "extracted content does not match source content"); - } + using (var sr = new StreamReader(zipFile.GetInputStream(fileEntry))) + { + var outputContent = sr.ReadToEnd(); + Assert.AreEqual(inputContent, outputContent, "extracted content does not match source content"); } } } @@ -1666,7 +1626,7 @@ public void ZipWithBZip2Compression(bool encryptEntries) var m2 = new StringMemoryDataSource("DeflateCompressed"); f.Add(m2, "b.dat", CompressionMethod.Deflated); f.CommitUpdate(); - Assert.IsTrue(f.TestArchive(true)); + Assert.That(f, Does.PassTestArchive()); } memStream.Seek(0, SeekOrigin.Begin); @@ -1713,7 +1673,7 @@ public void ZipWithBZip2Compression(bool encryptEntries) [Category("Zip")] public void ShouldReadBZip2ZipCreatedBy7Zip() { - const string BZip2CompressedZipCreatedBy7Zip = + const string bZip2CompressedZipCreatedBy7Zip = "UEsDBC4AAAAMAIa50U4/rHf5qwAAAK8AAAAJAAAASGVsbG8udHh0QlpoOTFBWSZTWTL8pwYAA" + "BWfgEhlUAAiLUgQP+feMCAAiCKaeiaBobU9JiaAMGmoak9GmRNqPUDQ9T1PQsz/t9B6YvEdvF" + "5dhwXzGE1ooO41A6TtATBEFxFUq6trGtUcSJDyWWWj/S2VwY15fy3IqHi3hHUS+K76zdoDzQa" + @@ -1721,26 +1681,20 @@ public void ShouldReadBZip2ZipCreatedBy7Zip() "AAwAhrnRTj+sd/mrAAAArwAAAAkAJAAAAAAAAAAgAAAAAAAAAEhlbGxvLnR4dAoAIAAAAAAAA" + "QAYAO97MLZZJdUB73swtlkl1QEK0UTFWCXVAVBLBQYAAAAAAQABAFsAAADSAAAAAAA="; - const string OriginalText = + const string originalText = "SharpZipLib (#ziplib, formerly NZipLib) is a compression library that supports Zip files using both stored and deflate compression methods, PKZIP 2.0 style and AES encryption."; - var fileBytes = System.Convert.FromBase64String(BZip2CompressedZipCreatedBy7Zip); + var fileBytes = Convert.FromBase64String(bZip2CompressedZipCreatedBy7Zip); - using (var input = new MemoryStream(fileBytes, false)) - { - using (ZipFile f = new ZipFile(input)) - { - var entry = f.GetEntry("Hello.txt"); - Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Compression method should be BZip2"); - Assert.That(entry.Version, Is.EqualTo(ZipConstants.VersionBZip2), "Entry version should be 46"); + using var input = new MemoryStream(fileBytes, writable: false); + using var zf = new ZipFile(input); + var entry = zf.GetEntry("Hello.txt"); + Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Compression method should be BZip2"); + Assert.That(entry.Version, Is.EqualTo(ZipConstants.VersionBZip2), "Entry version should be 46"); - using (var reader = new StreamReader(f.GetInputStream(entry))) - { - string contents = reader.ReadToEnd(); - Assert.That(contents, Is.EqualTo(OriginalText), "extract string must match original string"); - } - } - } + using var reader = new StreamReader(zf.GetInputStream(entry)); + var contents = reader.ReadToEnd(); + Assert.That(contents, Is.EqualTo(originalText), "extract string must match original string"); } /// @@ -1750,7 +1704,7 @@ public void ShouldReadBZip2ZipCreatedBy7Zip() [Category("Zip")] public void ShouldReadAESBZip2ZipCreatedBy7Zip() { - const string BZip2CompressedZipCreatedBy7Zip = + const string bZip2CompressedZipCreatedBy7Zip = "UEsDBDMAAQBjAIa50U4AAAAAxwAAAK8AAAAJAAsASGVsbG8udHh0AZkHAAIAQUUDDAAYg6jqf" + "kvZClVMOtgmqKT0/8I9fMPgo96myxw9hLQUhKj1Qczi3fT7QIhAnAKU+u03nA8rCKGWmDI5Qz" + "qPREy95boQVDPwmwEsWksv3GAWzMfzZUhmB/TgIJlA34a4yP0f2ucy3/QCQYo8QcHjBtjWX5b" + @@ -1759,30 +1713,24 @@ public void ShouldReadAESBZip2ZipCreatedBy7Zip() "wAAAAkALwAAAAAAAAAgAAAAAAAAAEhlbGxvLnR4dAoAIAAAAAAAAQAYAO97MLZZJdUBYdnjul" + "kl1QEK0UTFWCXVAQGZBwACAEFFAwwAUEsFBgAAAAABAAEAZgAAAPkAAAAAAA=="; - const string OriginalText = + const string originalText = "SharpZipLib (#ziplib, formerly NZipLib) is a compression library that supports Zip files using both stored and deflate compression methods, PKZIP 2.0 style and AES encryption."; - var fileBytes = System.Convert.FromBase64String(BZip2CompressedZipCreatedBy7Zip); + var fileBytes = Convert.FromBase64String(bZip2CompressedZipCreatedBy7Zip); - using (var input = new MemoryStream(fileBytes, false)) - { - using (ZipFile f = new ZipFile(input)) - { - f.Password = "password"; + using var input = new MemoryStream(fileBytes, writable: false); + using var zf = new ZipFile(input); + zf.Password = "password"; - var entry = f.GetEntry("Hello.txt"); - Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Compression method should be BZip2"); - Assert.That(entry.Version, Is.EqualTo(ZipConstants.VERSION_AES), "Entry version should be 51"); - Assert.That(entry.IsCrypted, Is.True, "Entry should be encrypted"); - Assert.That(entry.AESKeySize, Is.EqualTo(256), "AES Keysize should be 256"); + var entry = zf.GetEntry("Hello.txt"); + Assert.That(entry.CompressionMethod, Is.EqualTo(CompressionMethod.BZip2), "Compression method should be BZip2"); + Assert.That(entry.Version, Is.EqualTo(ZipConstants.VERSION_AES), "Entry version should be 51"); + Assert.That(entry.IsCrypted, Is.True, "Entry should be encrypted"); + Assert.That(entry.AESKeySize, Is.EqualTo(256), "AES Keysize should be 256"); - using (var reader = new StreamReader(f.GetInputStream(entry))) - { - string contents = reader.ReadToEnd(); - Assert.That(contents, Is.EqualTo(OriginalText), "extract string must match original string"); - } - } - } + using var reader = new StreamReader(zf.GetInputStream(entry)); + var contents = reader.ReadToEnd(); + Assert.That(contents, Is.EqualTo(originalText), "extract string must match original string"); } /// @@ -1807,7 +1755,7 @@ public void TestDescriptorUpdateOnDelete(UseZip64 useZip64) } var zipData = msw.ToArray(); - Assert.IsTrue(ZipTesting.TestArchive(zipData)); + Assert.That(zipData, Does.PassTestArchive()); using (var memoryStream = new MemoryStream(zipData)) { @@ -1822,7 +1770,7 @@ public void TestDescriptorUpdateOnDelete(UseZip64 useZip64) using (var zipFile = new ZipFile(memoryStream, leaveOpen: true)) { - Assert.That(zipFile.TestArchive(true), Is.True); + Assert.That(zipFile, Does.PassTestArchive()); } } } @@ -1846,7 +1794,7 @@ public void TestDescriptorUpdateOnAdd(UseZip64 useZip64) } var zipData = msw.ToArray(); - Assert.IsTrue(ZipTesting.TestArchive(zipData)); + Assert.That(zipData, Does.PassTestArchive()); using (var memoryStream = new MemoryStream()) { @@ -1863,9 +1811,31 @@ public void TestDescriptorUpdateOnAdd(UseZip64 useZip64) using (var zipFile = new ZipFile(memoryStream, leaveOpen: true)) { - Assert.That(zipFile.TestArchive(true), Is.True); + Assert.That(zipFile, Does.PassTestArchive()); } } } + + /// + /// Check that Zip files can be created with an empty file name + /// + [Test] + [Category("Zip")] + public void HandlesEmptyFileName() + { + using var ms = new MemoryStream(); + using (var zos = new ZipOutputStream(ms){IsStreamOwner = false}) + { + zos.PutNextEntry(new ZipEntry(String.Empty)); + Utils.WriteDummyData(zos, 64); + } + ms.Seek(0, SeekOrigin.Begin); + using (var zis = new ZipInputStream(ms){IsStreamOwner = false}) + { + var entry = zis.GetNextEntry(); + Assert.That(entry.Name, Is.Empty); + Assert.That(zis.ReadBytes(64).Length, Is.EqualTo(64)); + } + } } } diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStreamAsyncTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStreamAsyncTests.cs new file mode 100644 index 000000000..d228e5ee4 --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStreamAsyncTests.cs @@ -0,0 +1,154 @@ +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using ICSharpCode.SharpZipLib.Zip; +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using NUnit.Framework; + +namespace ICSharpCode.SharpZipLib.Tests.Zip +{ + [TestFixture] + public class ZipStreamAsyncTests + { + [Test] + [Category("Zip")] + [Category("Async")] + public async Task WriteZipStreamUsingAsync() + { +#if NETSTANDARD2_1 || NETCOREAPP3_0_OR_GREATER + await using var ms = new MemoryStream(); + + await using (var outStream = new ZipOutputStream(ms){IsStreamOwner = false}) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + } + + ZipTesting.AssertValidZip(ms); +#else + await Task.CompletedTask; + Assert.Ignore("Async Using is not supported"); +#endif + } + + [Test] + [Category("Zip")] + [Category("Async")] + public async Task WriteZipStreamAsync () + { + using var ms = new MemoryStream(); + + using(var outStream = new ZipOutputStream(ms) { IsStreamOwner = false }) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.FinishAsync(CancellationToken.None); + } + + ZipTesting.AssertValidZip(ms); + } + + + [Test] + [Category("Zip")] + [Category("Async")] + public async Task WriteZipStreamWithAesAsync() + { + using var ms = new MemoryStream(); + var password = "f4ls3p0s1t1v3"; + + using (var outStream = new ZipOutputStream(ms){IsStreamOwner = false, Password = password}) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile"){AESKeySize = 256}); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile"){AESKeySize = 256}); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.FinishAsync(CancellationToken.None); + } + + ZipTesting.AssertValidZip(ms, password); + } + + [Test] + [Category("Zip")] + [Category("Async")] + public async Task WriteZipStreamWithZipCryptoAsync() + { + using var ms = new MemoryStream(); + var password = "f4ls3p0s1t1v3"; + + using (var outStream = new ZipOutputStream(ms){IsStreamOwner = false, Password = password}) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile"){AESKeySize = 0}); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile"){AESKeySize = 0}); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.FinishAsync(CancellationToken.None); + } + + ZipTesting.AssertValidZip(ms, password, false); + } + + [Test] + [Category("Zip")] + [Category("Async")] + public async Task WriteReadOnlyZipStreamAsync () + { + using var ms = new MemoryStreamWithoutSeek(); + + using(var outStream = new ZipOutputStream(ms) { IsStreamOwner = false }) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile")); + await Utils.WriteDummyDataAsync(outStream, 12); + + await outStream.FinishAsync(CancellationToken.None); + } + + ZipTesting.AssertValidZip(new MemoryStream(ms.ToArray())); + } + + [Test] + [Category("Zip")] + [Category("Async")] + [TestCase(12, Description = "Small files")] + [TestCase(12000, Description = "Large files")] + public async Task WriteZipStreamToAsyncOnlyStream (int fileSize) + { +#if NETSTANDARD2_1 || NETCOREAPP3_0_OR_GREATER + await using(var ms = new MemoryStreamWithoutSync()){ + await using(var outStream = new ZipOutputStream(ms) { IsStreamOwner = false }) + { + await outStream.PutNextEntryAsync(new ZipEntry("FirstFile")); + await Utils.WriteDummyDataAsync(outStream, fileSize); + + await outStream.PutNextEntryAsync(new ZipEntry("SecondFile")); + await Utils.WriteDummyDataAsync(outStream, fileSize); + + await outStream.FinishAsync(CancellationToken.None); + await outStream.DisposeAsync(); + } + + ZipTesting.AssertValidZip(new MemoryStream(ms.ToArray())); + } +#else + await Task.CompletedTask; + Assert.Ignore("AsyncDispose is not supported"); +#endif + } + + } +} \ No newline at end of file diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStringsTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStringsTests.cs new file mode 100644 index 000000000..cd213df6e --- /dev/null +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipStringsTests.cs @@ -0,0 +1,239 @@ +using ICSharpCode.SharpZipLib.Tests.TestSupport; +using ICSharpCode.SharpZipLib.Tests.Zip; +using ICSharpCode.SharpZipLib.Zip; +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using Does = ICSharpCode.SharpZipLib.Tests.TestSupport.Does; + +// As there is no way to order the test namespace execution order we use a name that should be alphabetically sorted before any other namespace +// This is because we have one test that only works when no encoding provider has been loaded which is not reversable once done. +namespace ICSharpCode.SharpZipLib.Tests._Zip +{ + [TestFixture] + [Order(1)] + public class ZipStringsTests + { + [Test] + [Order(1)] + // NOTE: This test needs to be run before any test registering CodePagesEncodingProvider.Instance + public void TestSystemDefaultEncoding() + { + Console.WriteLine($"Default encoding before registering provider: {Encoding.GetEncoding(0).EncodingName}"); + Encoding.RegisterProvider(new TestEncodingProvider()); + Console.WriteLine($"Default encoding after registering provider: {Encoding.GetEncoding(0).EncodingName}"); + + // Initialize a default StringCodec + var sc = StringCodec.Default; + + var legacyEncoding = sc.ZipEncoding(false); + Assert.That(legacyEncoding.EncodingName, Is.EqualTo(TestEncodingProvider.DefaultEncodingName)); + Assert.That(legacyEncoding.CodePage, Is.EqualTo(TestEncodingProvider.DefaultEncodingCodePage)); + } + + [Test] + [Order(2)] + public void TestFastZipRoundTripWithCodePage() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + using var ms = new MemoryStream(); + using var zipFile = new TempFile(); + using var srcDir = new TempDir(); + using var dstDir = new TempDir(); + + srcDir.CreateDummyFile("file1"); + srcDir.CreateDummyFile("слово"); + + foreach(var f in Directory.EnumerateFiles(srcDir.FullName)) + { + Console.WriteLine(f); + } + + var fzCreate = new FastZip() { StringCodec = StringCodec.FromCodePage(866), UseUnicode = false }; + fzCreate.CreateZip(zipFile, srcDir.FullName, true, null); + + var fzExtract = new FastZip() { StringCodec = StringCodec.FromCodePage(866) }; + fzExtract.ExtractZip(zipFile, dstDir.FullName, null); + + foreach (var f in Directory.EnumerateFiles(dstDir.FullName)) + { + Console.WriteLine(f); + } + + Assert.That(dstDir.GetFile("file1").FullName, Does.Exist); + Assert.That(dstDir.GetFile("слово").FullName, Does.Exist); + } + + + [Test] + [Order(2)] + public void TestZipFileRoundTripWithCodePage() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + using var ms = new MemoryStream(); + using (var zf = ZipFile.Create(ms)) + { + zf.StringCodec = StringCodec.FromCodePage(866); + zf.BeginUpdate(); + zf.Add(MemoryDataSource.Empty, "file1", CompressionMethod.Stored, useUnicodeText: false); + zf.Add(MemoryDataSource.Empty, "слово", CompressionMethod.Stored, useUnicodeText: false); + zf.CommitUpdate(); + } + + ms.Seek(0, SeekOrigin.Begin); + + using (var zf = new ZipFile(ms, false, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + Assert.That(zf.GetEntry("file1"), Is.Not.Null); + Assert.That(zf.GetEntry("слово"), Is.Not.Null); + } + + } + + [Test] + [Order(2)] + public void TestZipStreamRoundTripWithCodePage() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + using var ms = new MemoryStream(); + using (var zos = new ZipOutputStream(ms, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + zos.PutNextEntry(new ZipEntry("file1") { IsUnicodeText = false }); + zos.PutNextEntry(new ZipEntry("слово") { IsUnicodeText = false }); + } + + ms.Seek(0, SeekOrigin.Begin); + + using (var zis = new ZipInputStream(ms, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + Assert.That(zis.GetNextEntry().Name, Is.EqualTo("file1")); + Assert.That(zis.GetNextEntry().Name, Is.EqualTo("слово")); + } + + } + + [Test] + [Order(2)] + public void TestZipCryptoPasswordEncodingRoundtrip() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + var content = Utils.GetDummyBytes(32); + + using var ms = new MemoryStream(); + using (var zos = new ZipOutputStream(ms, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + zos.Password = "слово"; + zos.PutNextEntry(new ZipEntry("file1")); + zos.Write(content, 0, content.Length); + } + + ms.Seek(0, SeekOrigin.Begin); + + using (var zis = new ZipInputStream(ms, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + zis.Password = "слово"; + var entry = zis.GetNextEntry(); + var output = new byte[32]; + Assert.That(zis.Read(output, 0, 32), Is.EqualTo(32)); + Assert.That(output, Is.EqualTo(content)); + } + + } + + [Test] + [Order(2)] + public void TestZipStreamCommentEncodingRoundtrip() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + var content = Utils.GetDummyBytes(32); + + using var ms = new MemoryStream(); + using (var zos = new ZipOutputStream(ms, StringCodec.FromCodePage(866)) { IsStreamOwner = false }) + { + zos.SetComment("слово"); + } + + ms.Seek(0, SeekOrigin.Begin); + + using var zf = new ZipFile(ms, false, StringCodec.FromCodePage(866)); + Assert.That(zf.ZipFileComment, Is.EqualTo("слово")); + } + + + [Test] + [Order(2)] + public void TestZipFileCommentEncodingRoundtrip() + { + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); + + var content = Utils.GetDummyBytes(32); + + using var ms = new MemoryStream(); + using (var zf = ZipFile.Create(ms)) + { + zf.StringCodec = StringCodec.FromCodePage(866); + zf.BeginUpdate(); + zf.SetComment("слово"); + zf.CommitUpdate(); + } + + ms.Seek(0, SeekOrigin.Begin); + + using (var zf = new ZipFile(ms, false, StringCodec.FromCodePage(866))) + { + Assert.That(zf.ZipFileComment, Is.EqualTo("слово")); + } + } + } + + + internal class TestEncodingProvider : EncodingProvider + { + internal static string DefaultEncodingName = "TestDefaultEncoding"; + internal static int DefaultEncodingCodePage = -37; + + class TestDefaultEncoding : Encoding + { + public override string EncodingName => DefaultEncodingName; + public override int CodePage => DefaultEncodingCodePage; + + public override int GetByteCount(char[] chars, int index, int count) + => UTF8.GetByteCount(chars, index, count); + + public override int GetBytes(char[] chars, int charIndex, int charCount, byte[] bytes, int byteIndex) + => UTF8.GetBytes(chars, charIndex, charCount, bytes, byteIndex); + + public override int GetCharCount(byte[] bytes, int index, int count) + => UTF8.GetCharCount(bytes, index, count); + + public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) + => UTF8.GetChars(bytes, byteIndex, byteCount, chars, charIndex); + + public override int GetMaxByteCount(int charCount) => UTF8.GetMaxByteCount(charCount); + + public override int GetMaxCharCount(int byteCount) => UTF8.GetMaxCharCount(byteCount); + } + + TestDefaultEncoding testDefaultEncoding = new TestDefaultEncoding(); + + public override Encoding GetEncoding(int codepage) + => (codepage == 0 || codepage == DefaultEncodingCodePage) ? testDefaultEncoding : null; + + public override Encoding GetEncoding(string name) + => DefaultEncodingName == name ? testDefaultEncoding : null; + +#if NET6_0_OR_GREATER + public override IEnumerable GetEncodings() + { + yield return new EncodingInfo(this, DefaultEncodingCodePage, DefaultEncodingName, DefaultEncodingName); + } +#endif + } +} diff --git a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipTests.cs b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipTests.cs index 4a0c9954f..885c976f3 100644 --- a/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipTests.cs +++ b/test/ICSharpCode.SharpZipLib.Tests/Zip/ZipTests.cs @@ -25,8 +25,7 @@ public RuntimeInfo(CompressionMethod method, int compressionLevel, original = new byte[Size]; if (random) { - var rnd = new Random(); - rnd.NextBytes(original); + original = Utils.GetDummyBytes(Size); } else { @@ -122,6 +121,8 @@ public MemoryDataSource(byte[] data) data_ = data; } + public static MemoryDataSource Empty => new MemoryDataSource(Array.Empty()); + #endregion Constructors #region IDataSource Members @@ -251,9 +252,7 @@ protected byte[] MakeInMemoryZip(ref byte[] original, CompressionMethod method, if (size > 0) { - var rnd = new Random(); - original = new byte[size]; - rnd.NextBytes(original); + original = Utils.GetDummyBytes(size); // Although this could be written in one chunk doing it in lumps // throws up buffering problems including with encryption the original