diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..6e51cfa --- /dev/null +++ b/.editorconfig @@ -0,0 +1,17 @@ +root = true + +[*] +charset = utf-8 +end_of_line = crlf +indent_style = tab +insert_final_newline = true +trim_trailing_whitespace = true + +[*.{csproj,config}] +ij_xml_space_inside_empty_tag = true +indent_style = space +indent_size = 2 + +[*.{slnx,yml}] +indent_style = space +indent_size = 2 diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml new file mode 100644 index 0000000..25af122 --- /dev/null +++ b/.github/workflows/build-and-test.yml @@ -0,0 +1,79 @@ +name: Build and test + +on: + push: + pull_request: + paths: + - '**.cs' + - '**.csproj' + workflow_call: + +jobs: + build-and-test: + name: Build ${{ matrix.name }} + runs-on: ${{ matrix.os }} + timeout-minutes: 5 + + permissions: + contents: read + attestations: read + + strategy: + fail-fast: true + matrix: + include: + - { os: ubuntu-24.04, name: linux-x64, ext: so, arch: 'x64' } + - { os: ubuntu-24.04-arm, name: linux-arm64, ext: so, arch: 'arm64' } + - { os: windows-2025, name: win-x64, ext: dll, arch: 'x64' } + - { os: windows-2025, name: win-x86, ext: dll, arch: 'x86' } + - { os: windows-11-arm, name: win-arm64, ext: dll, arch: 'arm64' } + - { os: macos-15, name: osx-arm64, ext: dylib, arch: 'arm64' } + - { os: macos-15-intel, name: osx-x64, ext: dylib, arch: 'x64' } + + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup .NET SDK + if: ${{ matrix.name != 'win-x86' }} + uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0 + with: + dotnet-version: '10.0.x' + + - name: Install dotnet x86 + if: ${{ matrix.name == 'win-x86' }} + run: | + iwr -Uri https://dot.net/v1/dotnet-install.ps1 -OutFile dotnet-install.ps1 -UseBasicParsing + ./dotnet-install.ps1 -Channel 10.0 -Architecture x86 -InstallDir "C:/Program Files (x86)/dotnet" + + - name: Show info + run: dotnet --info + + - name: Install dependencies + run: | + dotnet restore ./ZstdNet/ZstdNet.csproj --runtime ${{ matrix.name }} + dotnet restore ./ZstdNet.Tests/ZstdNet.Tests.csproj --runtime ${{ matrix.name }} + + - name: Download native libs + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + run-id: 22015842187 # https://github.com/skbkontur/ZstdNet/actions/runs/22015842187 + merge-multiple: true + name: ${{ matrix.name }} + path: ./ZstdNet/ + + - name: Check attestation + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_FORCE_TTY: '100%' + run: | + gh attestation verify ./ZstdNet/runtimes/${{ matrix.name }}/native/libzstd.${{ matrix.ext }} --repo skbkontur/ZstdNet + + - name: Build + run: | + dotnet build ./ZstdNet/ZstdNet.csproj --no-restore --runtime ${{ matrix.name }} + dotnet build ./ZstdNet.Tests/ZstdNet.Tests.csproj --no-restore --runtime ${{ matrix.name }} + + - name: Test + run: dotnet test ./ZstdNet.Tests/ZstdNet.Tests.csproj --no-build --runtime ${{ matrix.name }} diff --git a/.github/workflows/build-native.yml b/.github/workflows/build-native.yml new file mode 100644 index 0000000..7e98224 --- /dev/null +++ b/.github/workflows/build-native.yml @@ -0,0 +1,74 @@ +name: Build Native + +on: + workflow_dispatch: + +jobs: + build: + name: Build ${{ matrix.name }} + runs-on: ${{ matrix.os }} + + permissions: + id-token: write + contents: read + attestations: write + + strategy: + fail-fast: true + matrix: + include: + - { os: ubuntu-24.04, name: linux-x64, ext: so, args: '' } + - { os: ubuntu-24.04, name: linux-arm64, ext: so, args: '-DCMAKE_SYSTEM_PROCESSOR=aarch64 -DCMAKE_C_COMPILER=aarch64-linux-gnu-gcc' } + - { os: windows-2025, name: win-x64, ext: dll, args: '-DZSTD_USE_STATIC_RUNTIME=ON' } + - { os: windows-2025, name: win-x86, ext: dll, args: '-DZSTD_USE_STATIC_RUNTIME=ON -A Win32' } + - { os: windows-2025, name: win-arm64, ext: dll, args: '-DZSTD_USE_STATIC_RUNTIME=ON -A ARM64' } + - { os: macos-15, name: osx-arm64, ext: dylib, args: '' } + - { os: macos-15-intel, name: osx-x64, ext: dylib, args: '' } + + steps: + - name: Checkout zstd sources + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + repository: facebook/zstd + ref: f8745da6ff1ad1e7bab384bd1f9d742439278e99 # v1.5.7 + + - name: Install GCC ARM64 + if: ${{ matrix.name == 'linux-arm64' }} + run: sudo apt-get update && sudo apt-get install gcc-aarch64-linux-gnu -y + + - name: Configure CMake + run: > + cmake -S build/cmake -B output + ${{ matrix.args }} + -DCMAKE_BUILD_TYPE=Release + -DCMAKE_COMPILE_WARNING_AS_ERROR=ON + -DBUILD_SHARED_LIBS=ON + -DZSTD_BUILD_TESTS=OFF + -DZSTD_BUILD_SHARED=ON + -DZSTD_BUILD_STATIC=OFF + -DZSTD_BUILD_PROGRAMS=OFF + -DZSTD_LIB_DEPRECATED=OFF + -DZSTD_LEGACY_SUPPORT=0 + + - name: Build using CMake + run: cmake --build output --config Release + + - name: Rename shared lib file (Windows) + if: ${{ startsWith(runner.os, 'win') }} + run: mv output/lib/Release/zstd.dll output/lib/libzstd.dll + + - name: Move shared lib to runtimes + run: | + mkdir -p artifacts/runtimes/${{ matrix.name }}/native/ + cp output/lib/libzstd.${{ matrix.ext }} artifacts/runtimes/${{ matrix.name }}/native/ + + - name: Provenance attestation + uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0 + with: + subject-path: artifacts/runtimes/*/native/libzstd.* + + - name: Upload artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: ${{ matrix.name }} + path: artifacts/ diff --git a/.github/workflows/pack-and-publish.yml b/.github/workflows/pack-and-publish.yml new file mode 100644 index 0000000..4b6cbcb --- /dev/null +++ b/.github/workflows/pack-and-publish.yml @@ -0,0 +1,63 @@ +name: Pack and publish + +on: + workflow_dispatch: + +jobs: + pack-and-publish: + runs-on: ubuntu-24.04 + environment: + name: production + timeout-minutes: 5 + + permissions: + id-token: write + contents: read + attestations: write + + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup .NET SDK + uses: actions/setup-dotnet@baa11fbfe1d6520db94683bd5c7a3818018e4309 # v5.1.0 + with: + dotnet-version: '10.0.103' + + - name: Specify .NET version + run: dotnet new globaljson --sdk-version 10.0.103 --roll-forward disable + + - name: Show .NET info + run: dotnet --info + + - name: Download native libs + uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + run-id: 22015842187 # https://github.com/skbkontur/ZstdNet/actions/runs/22015842187 + merge-multiple: true + path: ./ZstdNet/ + + - name: Check native libs attestation + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GH_FORCE_TTY: '100%' + run: | + find ./ZstdNet/runtimes -path "*/native/libzstd.*" -type f -print -exec gh attestation verify {} --repo skbkontur/ZstdNet ';' + + - name: Pack + run: dotnet pack --configuration Release --output ./ + + - name: Attest NuGet package provenance + uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0 + with: + subject-path: './ZstdNet.*.nupkg' + + - name: Upload artifacts + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: package + path: './ZstdNet.*.nupkg' + + - name: Publish Nuget package + run: dotnet nuget push ZstdNet.*.nupkg --api-key "${{ secrets.NUGET_API_KEY }}" --source https://api.nuget.org/v3/index.json --skip-duplicate diff --git a/LICENSE b/LICENSE index bc5e1ba..5535b32 100644 --- a/LICENSE +++ b/LICENSE @@ -2,7 +2,7 @@ BSD License For ZstdNet software -Copyright (c) 2016-present, SKB Kontur. All rights reserved. +Copyright (c) 2016-2026, SKB Kontur. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: diff --git a/README.md b/README.md index ee72bdd..98a1303 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,35 @@ ZstdNet ======= +[![Build and test](https://github.com/skbkontur/ZstdNet/actions/workflows/build-and-test.yml/badge.svg)](https://github.com/skbkontur/ZstdNet/actions/workflows/build-and-test.yml) [![NuGet](https://img.shields.io/nuget/v/ZstdNet.svg)](https://www.nuget.org/packages/ZstdNet/) +[![NuGet Downloads](https://img.shields.io/nuget/dt/ZstdNet.svg)](https://www.nuget.org/packages/ZstdNet) -**ZstdNet** is a wrapper of **Zstd** native library for .NET languages. It has the following features: +**ZstdNet** is a wrapper of **Zstd** native library for .NET languages targeting `netstadard2.{0|1}`. +ZstdNet NuGet package includes pre-built native shared libraries for [various platforms](.github/workflows/build-native.yml), including `win`, `linux`, `osx`. -* Compression and decompression of byte arrays +The package relies on the [dotnet runtime identifier resolution mechanism](https://learn.microsoft.com/en-us/dotnet/core/rid-catalog). +For .NET Framework, the package provides a [targets](ZstdNet/build/ZstdNet.targets) fallback, which requires an explicit selection of the platform — `x86`, `x64` or `ARM64`). + +If you need to resolve native dependency at runtime, you can use the `NativeLibrary.SetDllImportResolver` (see an [example](ZstdNet.Tests/NativeResolver.cs)). +And for .NET Framework — `SetDllDirectory` from `kernel32.dll`. + +Provenance attestation is enabled for all artifacts in this repository including native libs, see page [Attestations](https://github.com/skbkontur/ZstdNet/attestations). + +### Features + +* Compression and decompression of byte arrays and `Span` * Streaming compression and decompression -* Generation of Dictionaries from a collection of samples +* Advanced parameters support +* Generation of dictionaries Take a look on a library reference or unit tests to explore its behavior in different situations. Zstd ---- -**Zstd**, short for Zstandard, is a fast lossless compression algorithm, which -provides both good compression ratio _and_ speed for your standard compression -needs. "Standard" translates into everyday situations which neither look for -highest possible ratio (which LZMA and ZPAQ cover) nor extreme speeds (which -LZ4 covers). Zstandard is licensed under [BSD 3-Clause License](ZstdNet/build/LICENSE). +**Zstandard**, or **zstd** as short version, is a fast lossless compression algorithm, +targeting real-time compression scenarios at zlib-level and better compression ratios. **Zstd** is initially developed by Yann Collet and the source is available at: https://github.com/facebook/zstd @@ -33,19 +44,13 @@ http://fastcompression.blogspot.ru/2016/02/compressing-small-data.html Reference --------- -### Requirements - -*ZstdNet* requires *libzstd* >= v1.4.0. Both 32-bit and 64-bit versions are supported. -The corresponding DLLs are included in this repository cross-compiled using -`(i686|x86_64)-w64-mingw32-gcc -DZSTD_MULTITHREAD -DZSTD_LEGACY_SUPPORT=0 -pthread -s`. -Note that `ZSTD_LEGACY_SUPPORT=0` means "do not support legacy formats" to minimize the binary size. - ### Exceptions The wrapper throws `ZstdException` in case of malformed data or an error inside *libzstd*. If the given destination buffer is too small, `ZstdException` with `ZSTD_error_dstSize_tooSmall` error code is thrown away. -Check [zstd_errors.h](https://github.com/facebook/zstd/blob/v1.4.5/lib/common/zstd_errors.h#L52) for more info. + +Check [zstd_errors.h](https://github.com/facebook/zstd/blob/v1.5.7/lib/zstd_errors.h#L60) for more info. ### Compressor class @@ -148,7 +153,7 @@ performance and memory overhead. - `IReadOnlyDictionary advancedParams` — advanced API provides a way to set specific parameters during compression. For example, it allows you to compress with multiple threads, enable long distance matching mode and more. - Check [zstd.h](https://github.com/facebook/zstd/blob/v1.4.5/lib/zstd.h#L265) for additional info. + Check [zstd.h](https://github.com/facebook/zstd/blob/v1.5.7/lib/zstd.h#L349) for additional info. Specified options will be exposed in read-only fields. @@ -266,7 +271,7 @@ performance and memory overhead. Default is `null` (no dictionary). - `IReadOnlyDictionary advancedParams` — advanced decompression API that allows you to set parameters like maximum memory usage. - Check [zstd.h](https://github.com/facebook/zstd/blob/v1.4.5/lib/zstd.h#L513) for additional info. + Check [zstd.h](https://github.com/facebook/zstd/blob/v1.5.7/lib/zstd.h#L640) for additional info. Specified options will be exposed in read-only fields. @@ -295,6 +300,6 @@ performance and memory overhead. Wrapper Authors --------------- -Copyright (c) 2016-present [SKB Kontur](https://kontur.ru/eng/about) +Copyright (c) 2016-2026 [SKB Kontur](https://kontur.ru/eng/about) *ZstdNet* is distributed under [BSD 3-Clause License](LICENSE). diff --git a/ZstdNet.Benchmarks/CompressionBenchmarks.cs b/ZstdNet.Benchmarks/CompressionBenchmarks.cs index 86db92d..225785e 100644 --- a/ZstdNet.Benchmarks/CompressionBenchmarks.cs +++ b/ZstdNet.Benchmarks/CompressionBenchmarks.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.Runtime.InteropServices; using System.Threading.Tasks; using BenchmarkDotNet.Attributes; @@ -22,6 +23,16 @@ public class CompressionOverheadBenchmarks private readonly Compressor CompressorAdvanced = new Compressor(new CompressionOptions(null, new Dictionary())); private readonly Decompressor Decompressor = new Decompressor(); + // Native dependencies are not added to the deps.json file via ProjectReference + // https://github.com/dotnet/sdk/issues/10575 + static CompressionOverheadBenchmarks() + { + var ext = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dll" : RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? "dylib" : "so"; + NativeLibrary.SetDllImportResolver(typeof(DictBuilder).Assembly, (name, a, b) => name == "libzstd" + ? NativeLibrary.Load(Path.Combine(AppContext.BaseDirectory, "runtimes", RuntimeInformation.RuntimeIdentifier, "native", $"libzstd.{ext}")) + : IntPtr.Zero); + } + [GlobalSetup] public void GlobalSetup() { @@ -55,10 +66,7 @@ public void CompressStream(int zstdBufferSize, int copyBufferSize) [Arguments(7, 13)] public async Task CompressStreamAsync(int zstdBufferSize, int copyBufferSize) { -#if !NET48 - await -#endif - using var compressionStream = new CompressionStream(Stream.Null, CompressionOptions.Default, zstdBufferSize); + await using var compressionStream = new CompressionStream(Stream.Null, CompressionOptions.Default, zstdBufferSize); await new MemoryStream(Data).CopyToAsync(compressionStream, copyBufferSize); } @@ -74,10 +82,7 @@ public void DecompressStream(int zstdBufferSize, int copyBufferSize) [Arguments(7, 13)] public async Task DecompressStreamAsync(int zstdBufferSize, int copyBufferSize) { -#if !NET48 - await -#endif - using var decompressionStream = new DecompressionStream(new MemoryStream(CompressedStreamData), zstdBufferSize); + await using var decompressionStream = new DecompressionStream(new MemoryStream(CompressedStreamData), zstdBufferSize); await decompressionStream.CopyToAsync(Stream.Null, copyBufferSize); } } diff --git a/ZstdNet.Benchmarks/ZstdNet.Benchmarks.csproj b/ZstdNet.Benchmarks/ZstdNet.Benchmarks.csproj index c7d1a6f..4406749 100644 --- a/ZstdNet.Benchmarks/ZstdNet.Benchmarks.csproj +++ b/ZstdNet.Benchmarks/ZstdNet.Benchmarks.csproj @@ -2,13 +2,13 @@ Exe - net48;netcoreapp3.1 - AnyCPU + net10.0 8.0 + false - + diff --git a/ZstdNet.Tests/Binding_Tests.cs b/ZstdNet.Tests/Binding_Tests.cs index 096c010..87a52fb 100644 --- a/ZstdNet.Tests/Binding_Tests.cs +++ b/ZstdNet.Tests/Binding_Tests.cs @@ -3,11 +3,11 @@ using System.Linq; using System.Text; using System.Threading; -using NUnit.Framework; +using Microsoft.VisualStudio.TestTools.UnitTesting; namespace ZstdNet.Tests { - [TestFixture] + [TestClass] public class Binding_Tests { public enum CompressionLevel @@ -17,8 +17,14 @@ public enum CompressionLevel Max } - [Test] - public void CompressAndDecompress_workCorrectly([Values(false, true)] bool useDictionary, [Values(CompressionLevel.Min, CompressionLevel.Default, CompressionLevel.Max)] CompressionLevel level) + [TestMethod] + [DataRow(true, CompressionLevel.Min)] + [DataRow(true, CompressionLevel.Default)] + [DataRow(true, CompressionLevel.Max)] + [DataRow(false, CompressionLevel.Min)] + [DataRow(false, CompressionLevel.Default)] + [DataRow(false, CompressionLevel.Max)] + public void CompressAndDecompress_workCorrectly(bool useDictionary, CompressionLevel level) { var data = GenerateSample(); @@ -33,8 +39,10 @@ public void CompressAndDecompress_workCorrectly([Values(false, true)] bool useDi CollectionAssert.AreEqual(data, CompressAndDecompress(data, dict, compressionLevel)); } - [Test] - public void CompressAndDecompress_worksCorrectly_advanced([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_worksCorrectly_advanced(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -59,7 +67,7 @@ public void CompressAndDecompress_worksCorrectly_advanced([Values(false, true)] } } - [Test] + [TestMethod] public void DecompressWithDictionary_worksCorrectly_onDataCompressedWithoutIt() { var data = GenerateSample(); @@ -77,7 +85,7 @@ public void DecompressWithDictionary_worksCorrectly_onDataCompressedWithoutIt() CollectionAssert.AreEqual(data, decompressed); } - [Test] + [TestMethod] public void DecompressWithoutDictionary_throwsZstdException_onDataCompressedWithIt() { var data = GenerateSample(); @@ -92,7 +100,7 @@ public void DecompressWithoutDictionary_throwsZstdException_onDataCompressedWith Assert.Throws(() => decompressor.Unwrap(compressed)); } - [Test] + [TestMethod] public void DecompressWithAnotherDictionary_throwsZstdException() { var data = GenerateSample(); @@ -110,7 +118,7 @@ public void DecompressWithAnotherDictionary_throwsZstdException() Assert.Throws(() => decompressor.Unwrap(compressed)); } - [Test] + [TestMethod] public void Compress_reducesDataSize() { var data = GenerateSample(); @@ -119,10 +127,10 @@ public void Compress_reducesDataSize() using(var compressor = new Compressor()) compressed = compressor.Wrap(data); - Assert.Greater(data.Length, compressed.Length); + Assert.IsGreaterThan(compressed.Length, data.Length); } - [Test] + [TestMethod] public void Compress_worksBetter_withDictionary() { var data = GenerateSample(); @@ -135,11 +143,13 @@ public void Compress_worksBetter_withDictionary() using(var compressor = new Compressor(options)) compressedWithDict = compressor.Wrap(data); - Assert.Greater(compressedWithoutDict.Length, compressedWithDict.Length); + Assert.IsGreaterThan(compressedWithDict.Length, compressedWithoutDict.Length); } - [Test] - public void Decompress_throwsZstdException_onInvalidData([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_throwsZstdException_onInvalidData(bool useDictionary) { var data = GenerateSample(); // This isn't data in compressed format var dict = useDictionary ? BuildDictionary() : null; @@ -149,8 +159,10 @@ public void Decompress_throwsZstdException_onInvalidData([Values(false, true)] b Assert.Throws(() => decompressor.Unwrap(data)); } - [Test] - public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_throwsZstdException_onMalformedDecompressedSize(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -163,12 +175,12 @@ public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(f var frameHeader = compressed[4]; // Ensure that we malform decompressed size in the right place if(useDictionary) { - Assert.AreEqual(frameHeader, 0x63); + Assert.AreEqual(0x63, frameHeader); compressed[9]--; } else { - Assert.AreEqual(frameHeader, 0x60); + Assert.AreEqual(0x60, frameHeader); compressed[5]--; } @@ -178,8 +190,10 @@ public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(f Assert.Throws(() => decompressor.Unwrap(compressed)); } - [Test] - public void Decompress_throwsArgumentOutOfRangeException_onTooBigData([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_throwsArgumentOutOfRangeException_onTooBigData(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -197,8 +211,10 @@ public void Decompress_throwsArgumentOutOfRangeException_onTooBigData([Values(fa } } - [Test] - public void Compress_canRead_fromArraySegment([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Compress_canRead_fromArraySegment(bool useDictionary) { var data = GenerateSample(); var segment = new ArraySegment(data, 2, data.Length - 5); @@ -214,11 +230,13 @@ public void Compress_canRead_fromArraySegment([Values(false, true)] bool useDict using(var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); - CollectionAssert.AreEqual(segment, decompressed); + CollectionAssert.AreEqual(segment.ToArray(), decompressed); } - [Test] - public void CompressAndDecompress_workCorrectly_spans([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_spans(bool useDictionary) { var buffer = GenerateSample(); @@ -245,8 +263,10 @@ public void CompressAndDecompress_workCorrectly_spans([Values(false, true)] bool CollectionAssert.AreEqual(data.ToArray(), decompressed.ToArray()); } - [Test] - public void Decompress_canRead_fromArraySegment([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_canRead_fromArraySegment(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -267,8 +287,10 @@ public void Decompress_canRead_fromArraySegment([Values(false, true)] bool useDi CollectionAssert.AreEqual(data, decompressed); } - [Test] - public void Compress_canWrite_toGivenBuffer([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Compress_canWrite_toGivenBuffer(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -288,8 +310,10 @@ public void Compress_canWrite_toGivenBuffer([Values(false, true)] bool useDictio CollectionAssert.AreEqual(data, decompressed); } - [Test] - public void Decompress_canWrite_toGivenBuffer([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_canWrite_toGivenBuffer(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -307,11 +331,13 @@ public void Decompress_canWrite_toGivenBuffer([Values(false, true)] bool useDict using(var decompressor = new Decompressor(options)) decompressedSize = decompressor.Unwrap(compressed, decompressed, offset); - CollectionAssert.AreEqual(data, decompressed.Skip(offset).Take(decompressedSize)); + CollectionAssert.AreEqual(data, decompressed.Skip(offset).Take(decompressedSize).ToArray()); } - [Test] - public void Compress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Compress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -326,8 +352,10 @@ public void Compress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Valu } } - [Test] - public void Decompress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void Decompress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; @@ -348,8 +376,10 @@ public void Decompress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Va } } - [Test] - public void CompressAndDecompress_workCorrectly_onEmptyBuffer([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_onEmptyBuffer(bool useDictionary) { var data = new byte[0]; var dict = useDictionary ? BuildDictionary() : null; @@ -357,8 +387,10 @@ public void CompressAndDecompress_workCorrectly_onEmptyBuffer([Values(false, tru CollectionAssert.AreEqual(data, CompressAndDecompress(data, dict)); } - [Test] - public void CompressAndDecompress_workCorrectly_onOneByteBuffer([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_onOneByteBuffer(bool useDictionary) { var data = new byte[] {42}; var dict = useDictionary ? BuildDictionary() : null; @@ -366,8 +398,10 @@ public void CompressAndDecompress_workCorrectly_onOneByteBuffer([Values(false, t CollectionAssert.AreEqual(data, CompressAndDecompress(data, dict)); } - [Test] - public void CompressAndDecompress_workCorrectly_onArraysOfDifferentSizes([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_onArraysOfDifferentSizes(bool useDictionary) { var dict = useDictionary ? BuildDictionary() : null; using(var compressionOptions = new CompressionOptions(dict)) @@ -386,8 +420,10 @@ public void CompressAndDecompress_workCorrectly_onArraysOfDifferentSizes([Values } } - [Test] - public void CompressAndDecompress_workCorrectly_ifDifferentInstancesRunInDifferentThreads([Values(false, true)] bool useDictionary) + [TestMethod] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_ifDifferentInstancesRunInDifferentThreads(bool useDictionary) { var dict = useDictionary ? BuildDictionary() : null; using(var compressionOptions = new CompressionOptions(dict)) @@ -411,8 +447,10 @@ public void CompressAndDecompress_workCorrectly_ifDifferentInstancesRunInDiffere }); } - [Test, Explicit("stress")] - public void CompressAndDecompress_workCorrectly_stress([Values(false, true)] bool useDictionary) + [TestMethod, CICondition(ConditionMode.Exclude, IgnoreMessage = "stress"), TestCategory("Explicit")] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecompress_workCorrectly_stress(bool useDictionary) { long i = 0L; var data = GenerateBuffer(65536); @@ -434,8 +472,10 @@ public void CompressAndDecompress_workCorrectly_stress([Values(false, true)] boo }); } - [Test, Explicit("memory consuming")] - public void CompressAndDecomress_workCorrectly_2GB([Values(false, true)] bool useDictionary) + [TestMethod, CICondition(ConditionMode.Exclude, IgnoreMessage = "memory consuming"), TestCategory("Explicit")] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecomress_workCorrectly_2GB(bool useDictionary) { var data = new byte[MaxByteArrayLength]; Array.Fill(data, 0xff, 100, 10000000); @@ -454,8 +494,10 @@ public void CompressAndDecomress_workCorrectly_2GB([Values(false, true)] bool us Assert.IsTrue(data.SequenceEqual(CompressAndDecompress(data, dict))); } - [Test, Explicit("memory consuming")] - public void CompressAndDecomress_throwsDstSizeTooSmall_Over2GB([Values(false, true)] bool useDictionary) + [TestMethod, CICondition(ConditionMode.Exclude, IgnoreMessage = "memory consuming"), TestCategory("Explicit")] + [DataRow(true)] + [DataRow(false)] + public void CompressAndDecomress_throwsDstSizeTooSmall_Over2GB(bool useDictionary) { var data = new byte[MaxByteArrayLength]; new Random(1337).NextBytes(data); //NOTE: Uncompressible data @@ -470,7 +512,7 @@ public void CompressAndDecomress_throwsDstSizeTooSmall_Over2GB([Values(false, tr } } - [Test, Explicit("stress")] + [TestMethod, CICondition(ConditionMode.Exclude, IgnoreMessage = "stress"), TestCategory("Explicit")] public void TrainDictionaryParallel() { var buffer = Enumerable.Range(0, 100000).Select(i => unchecked((byte)(i * i))).ToArray(); @@ -479,8 +521,8 @@ public void TrainDictionaryParallel() .ToArray(); var dict = DictBuilder.TrainFromBuffer(samples); - Assert.Greater(dict.Length, 0); - Assert.LessOrEqual(dict.Length, DictBuilder.DefaultDictCapacity); + Assert.IsGreaterThan(0, dict.Length); + Assert.IsLessThanOrEqualTo(DictBuilder.DefaultDictCapacity, dict.Length); Enumerable.Range(0, 100000) .AsParallel().WithDegreeOfParallelism(Environment.ProcessorCount * 4) diff --git a/ZstdNet.Tests/NativeResolver.cs b/ZstdNet.Tests/NativeResolver.cs new file mode 100644 index 0000000..7be7f9d --- /dev/null +++ b/ZstdNet.Tests/NativeResolver.cs @@ -0,0 +1,23 @@ +using System; +using System.IO; +using System.Runtime.InteropServices; +using Microsoft.VisualStudio.TestTools.UnitTesting; + +[assembly: DoNotParallelize] + +namespace ZstdNet.Tests; + +[TestClass] +// Native dependencies are not added to the deps.json file via ProjectReference +// https://github.com/dotnet/sdk/issues/10575 +public class NativeResolver +{ + [AssemblyInitialize] + public static void SetLibZstdResolver(TestContext _) + { + var ext = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? "dll" : RuntimeInformation.IsOSPlatform(OSPlatform.OSX) ? "dylib" : "so"; + NativeLibrary.SetDllImportResolver(typeof(DictBuilder).Assembly, (name, _, _) => name == "libzstd" + ? NativeLibrary.Load(Path.Combine(AppContext.BaseDirectory, "runtimes", RuntimeInformation.RuntimeIdentifier, "native", $"libzstd.{ext}")) + : IntPtr.Zero); + } +} diff --git a/ZstdNet.Tests/SteamingCompressionTests.cs b/ZstdNet.Tests/SteamingCompressionTests.cs index 98303ff..4d1331f 100644 --- a/ZstdNet.Tests/SteamingCompressionTests.cs +++ b/ZstdNet.Tests/SteamingCompressionTests.cs @@ -4,7 +4,7 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; -using NUnit.Framework; +using Microsoft.VisualStudio.TestTools.UnitTesting; namespace ZstdNet.Tests { @@ -43,10 +43,10 @@ public static byte[] GetBuffer(int length, DataFill dataFill) } } - [TestFixture] + [TestClass] public class SteamingTests { - [Test] + [TestMethod] public void StreamingCompressionZeroAndOneByte() { var data = new byte[] {0, 0, 0, 1, 2, 3, 4, 0, 0, 0}; @@ -83,15 +83,15 @@ public void StreamingCompressionZeroAndOneByte() Assert.AreEqual(1, decompressionStream.ReadAsync(new Memory(result, 6, 1)).GetAwaiter().GetResult()); } - Assert.AreEqual(data, result); + CollectionAssert.AreEqual(data, result); } - - [TestCase(new byte[0], 0, 0)] - [TestCase(new byte[] {1, 2, 3}, 1, 2)] - [TestCase(new byte[] {1, 2, 3}, 0, 2)] - [TestCase(new byte[] {1, 2, 3}, 1, 1)] - [TestCase(new byte[] {1, 2, 3}, 0, 3)] + [TestMethod] + [DataRow(new byte[0], 0, 0)] + [DataRow(new byte[] {1, 2, 3}, 1, 2)] + [DataRow(new byte[] {1, 2, 3}, 0, 2)] + [DataRow(new byte[] {1, 2, 3}, 1, 1)] + [DataRow(new byte[] {1, 2, 3}, 0, 3)] public void StreamingCompressionSimpleWrite(byte[] data, int offset, int count) { var tempStream = new MemoryStream(); @@ -107,10 +107,10 @@ public void StreamingCompressionSimpleWrite(byte[] data, int offset, int count) var dataToCompress = new byte[count]; Array.Copy(data, offset, dataToCompress, 0, count); - Assert.AreEqual(dataToCompress, resultStream.ToArray()); + CollectionAssert.AreEqual(dataToCompress, resultStream.ToArray()); } - [Test] + [TestMethod] public void StreamingCompressionKeepReferenceToDict() { var dict = TrainDict(); @@ -146,12 +146,13 @@ public void StreamingCompressionKeepReferenceToDict() } } - [TestCase(1)] - [TestCase(2)] - [TestCase(3)] - [TestCase(5)] - [TestCase(9)] - [TestCase(10)] + [TestMethod] + [DataRow(1)] + [DataRow(2)] + [DataRow(3)] + [DataRow(5)] + [DataRow(9)] + [DataRow(10)] public void StreamingDecompressionSimpleRead(int readCount) { var data = new byte[] {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; @@ -169,17 +170,17 @@ public void StreamingDecompressionSimpleRead(int readCount) int totalBytesRead = 0; while((bytesRead = decompressionStream.Read(buffer, totalBytesRead, Math.Min(readCount, buffer.Length - totalBytesRead))) > 0) { - Assert.LessOrEqual(bytesRead, readCount); + Assert.IsLessThanOrEqualTo(readCount, bytesRead); totalBytesRead += bytesRead; } Assert.AreEqual(data.Length, totalBytesRead); } - Assert.AreEqual(data, buffer); + CollectionAssert.AreEqual(data, buffer); } - [Test] + [TestMethod] public void StreamingCompressionFlushDataFromInternalBuffers() { var testBuffer = new byte[1]; @@ -190,7 +191,7 @@ public void StreamingCompressionFlushDataFromInternalBuffers() compressionStream.Write(testBuffer, 0, testBuffer.Length); compressionStream.Flush(); - Assert.Greater(tempStream.Length, 0); + Assert.IsGreaterThan(0, tempStream.Length); tempStream.Seek(0, SeekOrigin.Begin); //NOTE: without ZSTD_endStream call on compression @@ -198,11 +199,11 @@ public void StreamingCompressionFlushDataFromInternalBuffers() using(var decompressionStream = new DecompressionStream(tempStream)) decompressionStream.CopyTo(resultStream); - Assert.AreEqual(testBuffer, resultStream.ToArray()); + CollectionAssert.AreEqual(testBuffer, resultStream.ToArray()); } } - [Test] + [TestMethod] public void CompressionImprovesWithDictionary() { var dict = TrainDict(); @@ -220,7 +221,7 @@ public void CompressionImprovesWithDictionary() using(var compressionStream = new CompressionStream(dictResultStream, compressionOptions)) dataStream.CopyTo(compressionStream); - Assert.Greater(normalResultStream.Length, dictResultStream.Length); + Assert.IsGreaterThan(dictResultStream.Length, normalResultStream.Length); dictResultStream.Seek(0, SeekOrigin.Begin); @@ -228,10 +229,10 @@ public void CompressionImprovesWithDictionary() using(var decompressionStream = new DecompressionStream(dictResultStream, new DecompressionOptions(dict))) decompressionStream.CopyTo(resultStream); - Assert.AreEqual(dataStream.ToArray(), resultStream.ToArray()); + CollectionAssert.AreEqual(dataStream.ToArray(), resultStream.ToArray()); } - [Test] + [TestMethod] public void CompressionShrinksData() { var dataStream = DataGenerator.GetLargeStream(DataFill.Sequential); @@ -240,10 +241,10 @@ public void CompressionShrinksData() using(var compressionStream = new CompressionStream(resultStream)) dataStream.CopyTo(compressionStream); - Assert.Greater(dataStream.Length, resultStream.Length); + Assert.IsGreaterThan(resultStream.Length, dataStream.Length); } - [Test] + [TestMethod] public void RoundTrip_BatchToStreaming() { var data = DataGenerator.GetLargeBuffer(DataFill.Sequential); @@ -256,10 +257,10 @@ public void RoundTrip_BatchToStreaming() using(var decompressionStream = new DecompressionStream(new MemoryStream(compressed))) decompressionStream.CopyTo(resultStream); - Assert.AreEqual(data, resultStream.ToArray()); + CollectionAssert.AreEqual(data, resultStream.ToArray()); } - [Test] + [TestMethod] public void RoundTrip_StreamingToBatch() { var dataStream = DataGenerator.GetLargeStream(DataFill.Sequential); @@ -272,14 +273,28 @@ public void RoundTrip_StreamingToBatch() using(var decompressor = new Decompressor()) Assert.AreEqual(dataStream.Length, decompressor.Unwrap(tempStream.ToArray(), resultBuffer, 0, false)); - Assert.AreEqual(dataStream.ToArray(), resultBuffer); + CollectionAssert.AreEqual(dataStream.ToArray(), resultBuffer); + } + + public static IEnumerable GetCombinations() + { + var useDict = new[] { false, true }; + var advanced = new[] { false, true }; + var zstdBufferSize = new[] {1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1}; + var copyBufferSize = new[] {1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1}; + + var combinations = from p1 in useDict + from p2 in advanced + from p3 in zstdBufferSize + from p4 in copyBufferSize + select new object[] { p1, p2, p3, p4 }; + + return combinations; } - [Test, Combinatorial, Parallelizable(ParallelScope.Children)] - public void RoundTrip_StreamingToStreaming( - [Values(false, true)] bool useDict, [Values(false, true)] bool advanced, - [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int zstdBufferSize, - [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int copyBufferSize) + [TestMethod] + [DynamicData(nameof(GetCombinations))] + public void RoundTrip_StreamingToStreaming(bool useDict, bool advanced, int zstdBufferSize, int copyBufferSize) { var dict = useDict ? TrainDict() : null; var testStream = DataGenerator.GetLargeStream(DataFill.Sequential); @@ -307,14 +322,12 @@ public void RoundTrip_StreamingToStreaming( resultStream.Write(buffer, offset, bytesRead); } - Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); + CollectionAssert.AreEqual(testStream.ToArray(), resultStream.ToArray()); } - [Test, Combinatorial, Parallelizable(ParallelScope.Children)] - public async Task RoundTrip_StreamingToStreamingAsync( - [Values(false, true)] bool useDict, [Values(false, true)] bool advanced, - [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int zstdBufferSize, - [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int copyBufferSize) + [TestMethod] + [DynamicData(nameof(GetCombinations))] + public async Task RoundTrip_StreamingToStreamingAsync(bool useDict, bool advanced, int zstdBufferSize, int copyBufferSize) { var dict = useDict ? TrainDict() : null; var testStream = DataGenerator.GetLargeStream(DataFill.Sequential); @@ -342,11 +355,15 @@ public async Task RoundTrip_StreamingToStreamingAsync( await resultStream.WriteAsync(buffer, offset, bytesRead); } - Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); + CollectionAssert.AreEqual(testStream.ToArray(), resultStream.ToArray()); } - [Test, Explicit("stress")] - public void RoundTrip_StreamingToStreaming_Stress([Values(true, false)] bool useDict, [Values(true, false)] bool async) + [TestMethod, CICondition(ConditionMode.Exclude, IgnoreMessage = "stress"), TestCategory("Explicit")] + [DataRow(true, true)] + [DataRow(true, false)] + [DataRow(false, true)] + [DataRow(false, false)] + public void RoundTrip_StreamingToStreaming_Stress(bool useDict, bool async) { long i = 0; var dict = useDict ? TrainDict() : null; @@ -392,7 +409,7 @@ public void RoundTrip_StreamingToStreaming_Stress([Values(true, false)] bool use } } - Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); + CollectionAssert.AreEqual(testStream.ToArray(), resultStream.ToArray()); }); GC.KeepAlive(compressionOptions); GC.KeepAlive(decompressionOptions); diff --git a/ZstdNet.Tests/ZstdNet.Tests.csproj b/ZstdNet.Tests/ZstdNet.Tests.csproj index e9626e5..cab4559 100644 --- a/ZstdNet.Tests/ZstdNet.Tests.csproj +++ b/ZstdNet.Tests/ZstdNet.Tests.csproj @@ -1,16 +1,16 @@ - netcoreapp3.1 + net10.0 false - AnyCPU;x64;x86 - 8.0 + 13 + false - - - + + + diff --git a/ZstdNet.sln b/ZstdNet.sln deleted file mode 100644 index 1ccd916..0000000 --- a/ZstdNet.sln +++ /dev/null @@ -1,65 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 16 -VisualStudioVersion = 16.0.30523.141 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ZstdNet", "ZstdNet\ZstdNet.csproj", "{8ADBEB19-A508-471D-87A0-7443EE086E9B}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ZstdNet.Tests", "ZstdNet.Tests\ZstdNet.Tests.csproj", "{8DD3694E-9532-4659-AA9F-BB01CBB9341B}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ZstdNet.Benchmarks", "ZstdNet.Benchmarks\ZstdNet.Benchmarks.csproj", "{6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|x64.ActiveCfg = Debug|x64 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|x64.Build.0 = Debug|x64 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|x86.ActiveCfg = Debug|x86 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Debug|x86.Build.0 = Debug|x86 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|Any CPU.Build.0 = Release|Any CPU - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|x64.ActiveCfg = Release|x64 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|x64.Build.0 = Release|x64 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|x86.ActiveCfg = Release|x86 - {8ADBEB19-A508-471D-87A0-7443EE086E9B}.Release|x86.Build.0 = Release|x86 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|x64.ActiveCfg = Debug|x64 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|x64.Build.0 = Debug|x64 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|x86.ActiveCfg = Debug|x86 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Debug|x86.Build.0 = Debug|x86 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|Any CPU.Build.0 = Release|Any CPU - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|x64.ActiveCfg = Release|x64 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|x64.Build.0 = Release|x64 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|x86.ActiveCfg = Release|x86 - {8DD3694E-9532-4659-AA9F-BB01CBB9341B}.Release|x86.Build.0 = Release|x86 - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|x64.ActiveCfg = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|x64.Build.0 = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|x86.ActiveCfg = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Debug|x86.Build.0 = Debug|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|Any CPU.Build.0 = Release|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|x64.ActiveCfg = Release|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|x64.Build.0 = Release|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|x86.ActiveCfg = Release|Any CPU - {6ACD682F-6E1B-4C7E-B60E-66302AD9E2E3}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(ExtensibilityGlobals) = postSolution - SolutionGuid = {799921F9-704F-4A35-A7D6-066C28B93B2E} - EndGlobalSection -EndGlobal diff --git a/ZstdNet.slnx b/ZstdNet.slnx new file mode 100644 index 0000000..895cf7f --- /dev/null +++ b/ZstdNet.slnx @@ -0,0 +1,5 @@ + + + + + diff --git a/ZstdNet/CompressionStream.cs b/ZstdNet/CompressionStream.cs index 94bafa2..687ed9a 100644 --- a/ZstdNet/CompressionStream.cs +++ b/ZstdNet/CompressionStream.cs @@ -13,7 +13,7 @@ public class CompressionStream : Stream private readonly Stream innerStream; private readonly byte[] outputBuffer; private readonly int bufferSize; -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 private readonly ReadOnlyMemory outputMemory; #endif @@ -55,12 +55,12 @@ public CompressionStream(Stream stream, CompressionOptions options, int bufferSi this.bufferSize = bufferSize > 0 ? bufferSize : (int)ZSTD_CStreamOutSize().EnsureZstdSuccess(); outputBuffer = ArrayPool.Shared.Rent(this.bufferSize); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 outputMemory = new ReadOnlyMemory(outputBuffer, 0, this.bufferSize); #endif } -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 public override void Write(ReadOnlySpan buffer) { EnsureNotDisposed(); @@ -89,7 +89,7 @@ public override Task WriteAsync(byte[] buffer, int offset, int count, Cancellati EnsureParamsValid(buffer, offset, count); EnsureNotDisposed(); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 return WriteInternalAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken).AsTask(); #else return WriteInternalAsync(new ReadOnlyMemory(buffer, offset, count), cancellationToken); @@ -121,7 +121,7 @@ private void WriteInternal(ReadOnlySpan buffer) } private async -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 ValueTask #else Task @@ -160,7 +160,7 @@ private unsafe UIntPtr Compress(ReadOnlySpan buffer, ref ZSTD_Buffer outpu } } -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 private void FlushOutputBuffer(ReadOnlySpan outputSpan) => innerStream.Write(outputSpan); private ValueTask FlushOutputBufferAsync(ref ZSTD_Buffer output, CancellationToken cancellationToken) @@ -196,7 +196,7 @@ public override Task FlushAsync(CancellationToken cancellationToken) { EnsureNotDisposed(); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 return FlushCompressStreamAsync(ZSTD_EndDirective.ZSTD_e_flush, cancellationToken).AsTask(); #else return FlushCompressStreamAsync(ZSTD_EndDirective.ZSTD_e_flush, cancellationToken); @@ -228,7 +228,7 @@ private void FlushCompressStream(ZSTD_EndDirective directive) } private async -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 ValueTask #else Task @@ -257,7 +257,7 @@ private async public override void SetLength(long value) => throw new NotSupportedException(); public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException(); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 public override async ValueTask DisposeAsync() { await DisposeAsyncCore().ConfigureAwait(false); diff --git a/ZstdNet/DecompressionStream.cs b/ZstdNet/DecompressionStream.cs index d3f8916..252da23 100644 --- a/ZstdNet/DecompressionStream.cs +++ b/ZstdNet/DecompressionStream.cs @@ -13,7 +13,7 @@ public class DecompressionStream : Stream private readonly Stream innerStream; private readonly byte[] inputBuffer; private readonly int bufferSize; -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 private readonly Memory inputMemory; #endif @@ -56,13 +56,13 @@ public DecompressionStream(Stream stream, DecompressionOptions options, int buff this.bufferSize = bufferSize > 0 ? bufferSize : (int)ZSTD_DStreamInSize().EnsureZstdSuccess(); inputBuffer = ArrayPool.Shared.Rent(this.bufferSize); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 inputMemory = new Memory(inputBuffer, 0, this.bufferSize); #endif pos = size = (UIntPtr)this.bufferSize; } -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 public override int Read(Span buffer) { EnsureNotDisposed(); @@ -91,7 +91,7 @@ public override Task ReadAsync(byte[] buffer, int offset, int count, Cancel EnsureParamsValid(buffer, offset, count); EnsureNotDisposed(); -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 return ReadInternalAsync(new Memory(buffer, offset, count), cancellationToken).AsTask(); #else return ReadInternalAsync(new Memory(buffer, offset, count), cancellationToken); @@ -115,7 +115,7 @@ private int ReadInternal(Span buffer) } private async -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 ValueTask #else Task @@ -130,7 +130,7 @@ private async if(input.IsFullyConsumed) { int bytesRead; -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 if((bytesRead = await innerStream.ReadAsync(inputMemory, cancellationToken).ConfigureAwait(false)) == 0) #else if((bytesRead = await innerStream.ReadAsync(inputBuffer, 0, bufferSize, cancellationToken).ConfigureAwait(false)) == 0) @@ -164,7 +164,7 @@ private unsafe void Decompress(Span buffer, ref ZSTD_Buffer output, ref ZS private int FillInputBuffer(Span inputSpan, ref ZSTD_Buffer input) { -#if !(NET45 || NETSTANDARD2_0) +#if !NETSTANDARD2_0 int bytesRead = innerStream.Read(inputSpan); #else int bytesRead = innerStream.Read(inputBuffer, 0, inputSpan.Length); diff --git a/ZstdNet/ExternMethods.cs b/ZstdNet/ExternMethods.cs index ae16ca4..5bd3edc 100644 --- a/ZstdNet/ExternMethods.cs +++ b/ZstdNet/ExternMethods.cs @@ -1,7 +1,4 @@ using System; -using System.Diagnostics; -using System.IO; -using System.Reflection; using System.Runtime.InteropServices; using size_t = System.UIntPtr; @@ -9,35 +6,6 @@ namespace ZstdNet { internal static class ExternMethods { - static ExternMethods() - { - if(Environment.OSVersion.Platform == PlatformID.Win32NT) - SetWinDllDirectory(); - } - - private static void SetWinDllDirectory() - { - string path; - - var location = Assembly.GetExecutingAssembly().Location; - if(string.IsNullOrEmpty(location) || (path = Path.GetDirectoryName(location)) == null) - { - Trace.TraceWarning($"{nameof(ZstdNet)}: Failed to get executing assembly location"); - return; - } - - // Nuget package - if(Path.GetFileName(path).StartsWith("net", StringComparison.Ordinal) && Path.GetFileName(Path.GetDirectoryName(path)) == "lib" && File.Exists(Path.Combine(path, "../../zstdnet.nuspec"))) - path = Path.Combine(path, "../../build"); - - var platform = Environment.Is64BitProcess ? "x64" : "x86"; - if(!SetDllDirectory(Path.Combine(path, platform))) - Trace.TraceWarning($"{nameof(ZstdNet)}: Failed to set DLL directory to '{path}'"); - } - - [DllImport("kernel32.dll", CharSet = CharSet.Auto, SetLastError = true)] - private static extern bool SetDllDirectory(string path); - private const string DllName = "libzstd"; [DllImport(DllName, CallingConvention = CallingConvention.Cdecl)] @@ -243,6 +211,8 @@ public enum ZSTD_cParameter ZSTD_c_targetLength = 106, ZSTD_c_strategy = 107, + ZSTD_c_targetCBlockSize = 130, + // long distance matching mode parameters ZSTD_c_enableLongDistanceMatching = 160, ZSTD_c_ldmHashLog = 161, @@ -276,20 +246,26 @@ public enum ZSTD_ErrorCode ZSTD_error_frameParameter_windowTooLarge = 16, ZSTD_error_corruption_detected = 20, ZSTD_error_checksum_wrong = 22, + ZSTD_error_literals_headerWrong = 24, ZSTD_error_dictionary_corrupted = 30, ZSTD_error_dictionary_wrong = 32, ZSTD_error_dictionaryCreation_failed = 34, ZSTD_error_parameter_unsupported = 40, + ZSTD_error_parameter_combination_unsupported = 41, ZSTD_error_parameter_outOfBound = 42, ZSTD_error_tableLog_tooLarge = 44, ZSTD_error_maxSymbolValue_tooLarge = 46, ZSTD_error_maxSymbolValue_tooSmall = 48, + ZSTD_error_cannotProduce_uncompressedBlock = 49, + ZSTD_error_stabilityCondition_notRespected = 50, ZSTD_error_stage_wrong = 60, ZSTD_error_init_missing = 62, ZSTD_error_memory_allocation = 64, ZSTD_error_workSpace_tooSmall = 66, ZSTD_error_dstSize_tooSmall = 70, ZSTD_error_srcSize_wrong = 72, - ZSTD_error_dstBuffer_null = 74 + ZSTD_error_dstBuffer_null = 74, + ZSTD_error_noForwardProgress_destFull = 80, + ZSTD_error_noForwardProgress_inputEmpty = 82 } } diff --git a/ZstdNet/ZstdNet.csproj b/ZstdNet/ZstdNet.csproj index 5c5e269..6072099 100644 --- a/ZstdNet/ZstdNet.csproj +++ b/ZstdNet/ZstdNet.csproj @@ -1,36 +1,38 @@  - - 1.4.5 - net45;netstandard2.0;netstandard2.1 - ZstdNet - ZstdNet - SKB Kontur - SKB Kontur - Zstd compression library wrapper for .NET - Copyright © SKB Kontur 2016-present - https://github.com/skbkontur/ZstdNet/blob/master/LICENSE - https://github.com/skbkontur/ZstdNet - zstd zstandard compression - 7.2 - AnyCPU;x64;x86 - true - true - - - - x64\%(FileName)%(Extension) - PreserveNewest - - - x86\%(FileName)%(Extension) - PreserveNewest - - - - - - - - - - \ No newline at end of file + + + 1.5.7-rc2 + netstandard2.0;netstandard2.1 + ZstdNet + ZstdNet + SKB Kontur + SKB Kontur + Zstd compression library wrapper for .NET + Copyright © SKB Kontur 2016-2026 + BSD-3-Clause + https://github.com/skbkontur/ZstdNet + zstd zstandard compression + 7.3 + true + true + true + README.md + true + true + true + true + embedded + + + + + + + + + + + + + + diff --git a/ZstdNet/build/ZstdNet.targets b/ZstdNet/build/ZstdNet.targets index 11adb4c..f934a51 100644 --- a/ZstdNet/build/ZstdNet.targets +++ b/ZstdNet/build/ZstdNet.targets @@ -1,23 +1,17 @@ - - - x64\%(FileName)%(Extension) - PreserveNewest - - - x86\%(FileName)%(Extension) + + + %(FileName)%(Extension) PreserveNewest - - - + %(FileName)%(Extension) PreserveNewest - + %(FileName)%(Extension) PreserveNewest - \ No newline at end of file + diff --git a/ZstdNet/build/x64/libzstd.dll b/ZstdNet/build/x64/libzstd.dll deleted file mode 100644 index a5def68..0000000 Binary files a/ZstdNet/build/x64/libzstd.dll and /dev/null differ diff --git a/ZstdNet/build/x86/libzstd.dll b/ZstdNet/build/x86/libzstd.dll deleted file mode 100644 index 55c5669..0000000 Binary files a/ZstdNet/build/x86/libzstd.dll and /dev/null differ diff --git a/ZstdNet/build/LICENSE b/ZstdNet/runtimes/LICENSE similarity index 78% rename from ZstdNet/build/LICENSE rename to ZstdNet/runtimes/LICENSE index dbdb05e..7580028 100644 --- a/ZstdNet/build/LICENSE +++ b/ZstdNet/runtimes/LICENSE @@ -2,7 +2,7 @@ BSD License For Zstandard software -Copyright (c) 2016-present, Facebook, Inc. All rights reserved. +Copyright (c) Meta Platforms, Inc. and affiliates. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: @@ -14,9 +14,9 @@ are permitted provided that the following conditions are met: this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name Facebook nor the names of its contributors may be used to - endorse or promote products derived from this software without specific - prior written permission. + * Neither the name Facebook, nor Meta, nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED @@ -27,4 +27,4 @@ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.