Skip to content

Commit

Permalink
bugifx: benchmark time error
Browse files Browse the repository at this point in the history
  • Loading branch information
ikpil committed Oct 2, 2023
1 parent 0e7a260 commit 93362d2
Show file tree
Hide file tree
Showing 2 changed files with 137 additions and 31 deletions.
51 changes: 30 additions & 21 deletions src/DotFastLZ.Packaging/SixPack.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@ public static class SixPack
/* for Adler-32 checksum algorithm, see RFC 1950 Section 8.2 */
public const int ADLER32_BASE = 65521;


public static long GetTickCount64()
{
return unchecked((long)(Stopwatch.GetTimestamp() * ((double)TimeSpan.TicksPerSecond / Stopwatch.Frequency)));
}

// update_adler32
public static ulong Adler32(ulong checksum, byte[] buf, long len)
{
Expand Down Expand Up @@ -230,7 +236,7 @@ private static int PackFileCompressed(string input_file, int method, int level,
/* read file and place ifs archive */
long total_read = 0;
long percent = 0;
var beginTick = DateTime.UtcNow.Ticks;
var beginTick = GetTickCount64();
for (;;)
{
int compress_method = method;
Expand Down Expand Up @@ -316,7 +322,7 @@ private static int PackFileCompressed(string input_file, int method, int level,

percent = 1000 - percent;

var elapsedTicks = (DateTime.UtcNow.Ticks - beginTick);
var elapsedTicks = (GetTickCount64() - beginTick);
var elapsedMs = elapsedTicks / TimeSpan.TicksPerMillisecond;
var elapsedMicro = elapsedTicks / (TimeSpan.TicksPerMillisecond / 1000);
Console.Write($"{(int)percent / 10:D2}.{(int)percent % 10:D1}% saved - {elapsedMs} ms, {elapsedMicro} micro");
Expand Down Expand Up @@ -670,7 +676,6 @@ public static int BenchmarkSpeed(int compress_level, string input_file)

/* shamelessly copied from QuickLZ 1.20 test program */
{
long mbs, fastest;

Console.WriteLine("Setting HIGH_PRIORITY_CLASS...");
{
Expand All @@ -681,54 +686,58 @@ public static int BenchmarkSpeed(int compress_level, string input_file)
Console.WriteLine($"Benchmarking FastLZ Level {compress_level}, please wait...");

long u = 0;
int i = bytes_read;
fastest = 0;
long fastest = 0;
long curTicks;
for (int j = 0; j < 3; j++)
{
int y = 0;
mbs = DateTime.UtcNow.Ticks;
while (DateTime.UtcNow.Ticks == mbs)
curTicks = GetTickCount64();
while (GetTickCount64() == curTicks)
{
}

mbs = DateTime.UtcNow.Ticks;
while (DateTime.UtcNow.Ticks - mbs < 3000) /* 1% accuracy with 18.2 timer */
curTicks = GetTickCount64();
while (GetTickCount64() - curTicks < 3000) /* 1% accuracy with 18.2 timer */
{
u = FastLZ.CompressLevel(compress_level, buffer, bytes_read, result);
y++;
}


mbs = (long)(((double)i * (double)y) / ((double)(DateTime.UtcNow.Ticks - mbs) / 1000.0d) / 1000000.0d);
/*printf(" %.1f Mbyte/s ", mbs);*/
if (fastest < mbs) fastest = mbs;
long mbs = (bytes_read * y) / ((GetTickCount64() - curTicks) / TimeSpan.TicksPerMillisecond);
if (fastest < mbs)
{
fastest = mbs;
}
}

Console.WriteLine($"Compressed {i} bytes into {u} bytes ({(u * 100.0 / i):F1}%) at {fastest:F1} Mbyte/s.");
Console.WriteLine($"Compressed {bytes_read} bytes into {u} bytes ({(u * 100.0d / bytes_read):F1}%) at {fastest / (double)1000:F1} Mbyte/s.");

fastest = 0;
long compressed_size = u;
for (int j = 0; j < 3; j++)
{
int y = 0;
mbs = DateTime.UtcNow.Ticks;
while (DateTime.UtcNow.Ticks == mbs)
curTicks = GetTickCount64();
while (GetTickCount64() == curTicks)
{
}

mbs = DateTime.UtcNow.Ticks;
while (DateTime.UtcNow.Ticks - mbs < 3000) /* 1% accuracy with 18.2 timer */
curTicks = GetTickCount64();
while (GetTickCount64() - curTicks < 3000) /* 1% accuracy with 18.2 timer */
{
u = FastLZ.Decompress(result, compressed_size, buffer, bytes_read);
y++;
}

mbs = (long)(((double)i * (double)y) / ((double)(DateTime.UtcNow.Ticks - mbs) / 1000.0d) / 1000000.0d);
/*printf(" %.1f Mbyte/s ", mbs);*/
if (fastest < mbs) fastest = mbs;
long mbs = (bytes_read * y) / ((GetTickCount64() - curTicks) / TimeSpan.TicksPerMillisecond);
if (fastest < mbs)
{
fastest = mbs;
}
}

Console.WriteLine($"\nDecompressed at {fastest:F1} Mbyte/s.\n\n(1 MB = 1000000 byte)");
Console.WriteLine($"\nDecompressed at {fastest / (double)1000:F1} Mbyte/s.\n\n(1 MB = 1000000 byte)");
}

return 0;
Expand Down
117 changes: 107 additions & 10 deletions test/DotFastLZ.Packaging.Tests/SixPackTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using NUnit.Framework;

Expand All @@ -26,33 +27,85 @@ public void TestDetectMagic()
.ToArray()
.Select(x => x)
.ToList();

var copied = new List<byte>();
copied.Add(1); // add head(1)
copied.AddRange(source);
copied.Add(2); // add tail(2);

Assert.That(SixPack.DetectMagic(copied.ToArray(), 0), Is.EqualTo(false));
Assert.That(SixPack.DetectMagic(copied.ToArray(), 1), Is.EqualTo(true));
Assert.That(SixPack.DetectMagic(copied.ToArray(), 2), Is.EqualTo(false));
Assert.That(SixPack.DetectMagic(copied.ToArray(), 3), Is.EqualTo(false));

Assert.That(SixPack.DetectMagic(copied.ToArray(), copied.Count + 1), Is.EqualTo(false));
}

[Test]
public void TestPackFile()
public void TestPackAndUnpack()
{
}
const string filename = "6pack-packing-test.txt";
string fastlz1 = filename + ".fastlz1";
string fastlz2 = filename + ".fastlz2";

[Test]
public void TestUnpackFile()
{
File.Delete(filename);
File.Delete(fastlz1);
File.Delete(fastlz2);

GenerateFile(filename, 1024 * 1024);

// pack
SixPack.PackFile(1, filename, fastlz1);
SixPack.PackFile(2, filename, fastlz2);

var sourceMd5 = ComputeMD5(filename);
File.Delete(filename);
Assert.That(sourceMd5, Is.EqualTo("0e3618ab09fb7e1989a05da990b2911a"));
Assert.That(File.Exists(filename), Is.EqualTo(false));

// checksum
Assert.That(ComputeMD5(fastlz1), Is.EqualTo("07017027a344938392152e47e5389c34"));
Assert.That(ComputeMD5(fastlz2), Is.EqualTo("945b4b347e9b5bd43e86e3b41be09e8b"));

// unpack level1
{
int status1 = SixPack.UnpackFile(fastlz1);
var decompress1Md5 = ComputeMD5(filename);
File.Delete(filename);
File.Delete(fastlz1);

Assert.That(status1, Is.EqualTo(0));
Assert.That(decompress1Md5, Is.EqualTo(sourceMd5));
Assert.That(File.Exists(filename), Is.EqualTo(false));
}

// unpack level2
{
int status2 = SixPack.UnpackFile(fastlz2);
var decompress2Md5 = ComputeMD5(filename);

File.Delete(filename);
File.Delete(fastlz2);

Assert.That(status2, Is.EqualTo(0));
Assert.That(decompress2Md5, Is.EqualTo(sourceMd5));
Assert.That(File.Exists(filename), Is.EqualTo(false));
}
}

[Test]
public void TestBenchmarkSpeed()
{
const string benchmarkFileName = "benchmark.txt";
File.Delete(benchmarkFileName);

GenerateFile(benchmarkFileName, 1024 * 1024 * 8);

int status1 = SixPack.BenchmarkSpeed(1, benchmarkFileName);
int status2 = SixPack.BenchmarkSpeed(2, benchmarkFileName);

Assert.That(status1, Is.EqualTo(0));
Assert.That(status2, Is.EqualTo(0));
}

[Test]
Expand All @@ -71,7 +124,51 @@ public void TestGetFileName()
[Test]
public void TestOpenFile()
{
Assert.That(SixPack.OpenFile("aaaaa.dll", FileMode.Open), Is.Null);
Assert.That(SixPack.OpenFile("aaaaa.dll", FileMode.Create, FileAccess.Write), Is.Not.Null);
const string filename = "open-file-test.txt";
File.Delete(filename);

Assert.That(SixPack.OpenFile(filename, FileMode.Open), Is.Null);
Assert.That(SixPack.OpenFile(filename, FileMode.Create, FileAccess.Write), Is.Not.Null);
}

public long GenerateFile(string filename, long size)
{
var text = @"About Adler32 Checksum Calculator
The Adler32 Checksum Calculator will compute an Adler32 checksum of string.
Adler32 is a checksum algorithm that was invented by Mark Adler.
In contrast to a cyclic redundancy check (CRC) of the same length, it trades reliability for speed.";

var bytes = Encoding.UTF8.GetBytes(text);

using var fs = SixPack.OpenFile(filename, FileMode.Create, FileAccess.Write);

var count = size / bytes.Length;
for (int i = 0; i < count; ++i)
{
fs.Write(bytes);
}

count = size % bytes.Length;
if (0 < count)
{
fs.Write(bytes, 0, (int)count);
}

return size;
}

public static string ComputeMD5(string filePath)
{
using var md5 = MD5.Create();
using var stream = File.OpenRead(filePath);
byte[] hashBytes = md5.ComputeHash(stream);
StringBuilder sb = new StringBuilder();

foreach (byte b in hashBytes)
{
sb.Append(b.ToString("x2"));
}

return sb.ToString();
}
}

0 comments on commit 93362d2

Please sign in to comment.