diff --git a/src/DotFastLZ.Compression/DotFastLZ.Compression.csproj b/src/DotFastLZ.Compression/DotFastLZ.Compression.csproj index e7428b8..1779100 100644 --- a/src/DotFastLZ.Compression/DotFastLZ.Compression.csproj +++ b/src/DotFastLZ.Compression/DotFastLZ.Compression.csproj @@ -1,7 +1,7 @@ - netstandard2.1 + netstandard2.0;netstandard2.1 DotFastLZ.Compression README.md ikpil diff --git a/src/DotFastLZ.Packaging/DotFastLZ.Packaging.csproj b/src/DotFastLZ.Packaging/DotFastLZ.Packaging.csproj index 15519a9..16398c6 100644 --- a/src/DotFastLZ.Packaging/DotFastLZ.Packaging.csproj +++ b/src/DotFastLZ.Packaging/DotFastLZ.Packaging.csproj @@ -1,7 +1,7 @@ - netstandard2.1 + netstandard2.0;netstandard2.1 DotFastLZ.Packaging README.md ikpil diff --git a/src/DotFastLZ.Packaging/SixPack.cs b/src/DotFastLZ.Packaging/SixPack.cs index 2a717b4..c7362a7 100644 --- a/src/DotFastLZ.Packaging/SixPack.cs +++ b/src/DotFastLZ.Packaging/SixPack.cs @@ -19,7 +19,7 @@ public static class SixPack public const string FASTLZ_EXTENSION = ".fastlz"; /* magic identifier for 6pack file */ - public static readonly ReadOnlyMemory SIXPACK_MAGIC = new ReadOnlyMemory(new byte[] { 137, (byte)'6', (byte)'P', (byte)'K', 13, 10, 26, 10 }); + public static readonly byte[] SIXPACK_MAGIC = new byte[] { 137, (byte)'6', (byte)'P', (byte)'K', 13, 10, 26, 10 }; public const int BLOCK_SIZE = (2 * 64 * 1024); /* for Adler-32 checksum algorithm, see RFC 1950 Section 8.2 */ @@ -101,7 +101,7 @@ public static bool DetectMagic(byte[] buffer, int offset) for (int c = 0; c < 8; c++) { - if (buffer[offset + c] != SIXPACK_MAGIC.Span[c]) + if (buffer[offset + c] != SIXPACK_MAGIC[c]) { return false; } @@ -113,7 +113,7 @@ public static bool DetectMagic(byte[] buffer, int offset) // write_magic private static void WriteMagic(FileStream f) { - f.Write(SIXPACK_MAGIC.Span); + f.Write(SIXPACK_MAGIC, 0, SIXPACK_MAGIC.Length); } // write_chunk_header @@ -138,7 +138,7 @@ private static void WriteChunkHeader(FileStream f, int id, int options, long siz buffer[14] = (byte)((extra >> 16) & 255); buffer[15] = (byte)((extra >> 24) & 255); - f.Write(buffer); + f.Write(buffer, 0, buffer.Length); } // read_chunk_header @@ -169,166 +169,167 @@ private static int PackFileCompressed(string input_file, int method, int level, return -1; } - using var ifs = temp; - - /* find size of the file */ - ifs.Seek(0, SeekOrigin.End); - long fsize = ifs.Position; - ifs.Seek(0, SeekOrigin.Begin); - - /* already a 6pack archive? */ - if (DetectMagicByFileStream(ifs)) - { - logger?.Invoke($"Error: file {input_file} is already a 6pack archive!\n"); - return -1; - } - - /* truncate directory prefix, e.g. "foo/bar/FILE.txt" becomes "FILE.txt" */ - string fileName = GetFileName(input_file); - byte[] utf8_shown_name = Encoding.UTF8.GetBytes(fileName); - byte[] shown_name = new byte[utf8_shown_name.Length + 1]; // for cstyle - Array.Fill(shown_name, (byte)0); - Array.Copy(utf8_shown_name, shown_name, utf8_shown_name.Length); - - /* chunk for File Entry */ - byte[] buffer = new byte[BLOCK_SIZE]; - buffer[0] = (byte)(fsize & 255); - buffer[1] = (byte)((fsize >> 8) & 255); - buffer[2] = (byte)((fsize >> 16) & 255); - buffer[3] = (byte)((fsize >> 24) & 255); - buffer[4] = (byte)((fsize >> 32) & 255); - buffer[5] = (byte)((fsize >> 40) & 255); - buffer[6] = (byte)((fsize >> 48) & 255); - buffer[7] = (byte)((fsize >> 56) & 255); - buffer[8] = (byte)(shown_name.Length & 255); // filename length for lowest bit - buffer[9] = (byte)(shown_name.Length >> 8); // filename length for highest bit - - checksum = 1L; - checksum = Adler32(checksum, buffer, 10); - checksum = Adler32(checksum, shown_name, shown_name.Length); - WriteChunkHeader(ofs, 1, 0, 10 + shown_name.Length, checksum, 0); - ofs.Write(buffer, 0, 10); - ofs.Write(shown_name, 0, shown_name.Length); - long total_compressed = 16 + 10 + shown_name.Length; - - /* for progress status */ - string progress; - if (16 < fileName.Length) - { - progress = fileName.Substring(0, 13); - progress += ".. "; - } - else - { - progress = fileName.PadRight(16, ' '); - } - - - logger?.Invoke($"{progress} ["); - for (int c = 0; c < 50; c++) + long fsize = 0; + using (var ifs = temp) { - logger?.Invoke("."); - } - - logger?.Invoke("]\r"); - logger?.Invoke($"{progress} ["); - - /* read file and place ifs archive */ - long total_read = 0; - long percent = 0; - var beginTick = GetTickCount64(); - for (;;) - { - int compress_method = method; - int last_percent = (int)percent; - int bytes_read = ifs.Read(buffer, 0, BLOCK_SIZE); - if (bytes_read == 0) - break; + /* find size of the file */ + ifs.Seek(0, SeekOrigin.End); + fsize = ifs.Position; + ifs.Seek(0, SeekOrigin.Begin); - total_read += bytes_read; + /* already a 6pack archive? */ + if (DetectMagicByFileStream(ifs)) + { + logger?.Invoke($"Error: file {input_file} is already a 6pack archive!\n"); + return -1; + } - /* for progress */ - if (fsize < (1 << 24)) + /* truncate directory prefix, e.g. "foo/bar/FILE.txt" becomes "FILE.txt" */ + string fileName = GetFileName(input_file); + byte[] utf8_shown_name = Encoding.UTF8.GetBytes(fileName); + byte[] shown_name = new byte[utf8_shown_name.Length + 1]; // for cstyle + Array.Copy(utf8_shown_name, shown_name, utf8_shown_name.Length); + + /* chunk for File Entry */ + byte[] buffer = new byte[BLOCK_SIZE]; + buffer[0] = (byte)(fsize & 255); + buffer[1] = (byte)((fsize >> 8) & 255); + buffer[2] = (byte)((fsize >> 16) & 255); + buffer[3] = (byte)((fsize >> 24) & 255); + buffer[4] = (byte)((fsize >> 32) & 255); + buffer[5] = (byte)((fsize >> 40) & 255); + buffer[6] = (byte)((fsize >> 48) & 255); + buffer[7] = (byte)((fsize >> 56) & 255); + buffer[8] = (byte)(shown_name.Length & 255); // filename length for lowest bit + buffer[9] = (byte)(shown_name.Length >> 8); // filename length for highest bit + + checksum = 1L; + checksum = Adler32(checksum, buffer, 10); + checksum = Adler32(checksum, shown_name, shown_name.Length); + WriteChunkHeader(ofs, 1, 0, 10 + shown_name.Length, checksum, 0); + ofs.Write(buffer, 0, 10); + ofs.Write(shown_name, 0, shown_name.Length); + long total_compressed = 16 + 10 + shown_name.Length; + + /* for progress status */ + string progress; + if (16 < fileName.Length) { - percent = total_read * 100 / fsize; + progress = fileName.Substring(0, 13); + progress += ".. "; } else { - percent = total_read / 256 * 100 / (fsize >> 8); + progress = fileName.PadRight(16, ' '); } - percent /= 2; - while (last_percent < (int)percent) - { - logger?.Invoke("#"); - last_percent++; - } - /* too small, don't bother to compress */ - if (bytes_read < 32) + logger?.Invoke($"{progress} ["); + for (int c = 0; c < 50; c++) { - compress_method = 0; + logger?.Invoke("."); } - /* write to output */ - switch (compress_method) + logger?.Invoke("]\r"); + logger?.Invoke($"{progress} ["); + + /* read file and place ifs archive */ + long total_read = 0; + long percent = 0; + var beginTick = GetTickCount64(); + for (;;) { - /* FastLZ */ - case 1: - { - long chunkSize = FastLZ.CompressLevel(level, buffer, bytes_read, result); - checksum = Adler32(1L, result, chunkSize); - WriteChunkHeader(ofs, 17, 1, chunkSize, checksum, bytes_read); - ofs.Write(result, 0, (int)chunkSize); - total_compressed += 16; - total_compressed += chunkSize; - } + int compress_method = method; + int last_percent = (int)percent; + int bytes_read = ifs.Read(buffer, 0, BLOCK_SIZE); + if (bytes_read == 0) break; - /* uncompressed, also fallback method */ - case 0: - default: + total_read += bytes_read; + + /* for progress */ + if (fsize < (1 << 24)) { - checksum = 1L; - checksum = Adler32(checksum, buffer, bytes_read); - WriteChunkHeader(ofs, 17, 0, bytes_read, checksum, bytes_read); - ofs.Write(buffer, 0, bytes_read); - total_compressed += 16; - total_compressed += bytes_read; + percent = total_read * 100 / fsize; + } + else + { + percent = total_read / 256 * 100 / (fsize >> 8); } - break; - } - } - if (total_read != fsize) - { - logger?.Invoke("\n"); - logger?.Invoke($"Error: reading {input_file} failed!\n"); - return -1; - } - else - { - logger?.Invoke("] "); - if (total_compressed < fsize) - { - if (fsize < (1 << 20)) + percent /= 2; + while (last_percent < (int)percent) { - percent = total_compressed * 1000 / fsize; + logger?.Invoke("#"); + last_percent++; } - else + + /* too small, don't bother to compress */ + if (bytes_read < 32) { - percent = total_compressed / 256 * 1000 / (fsize >> 8); + compress_method = 0; } - percent = 1000 - percent; + /* write to output */ + switch (compress_method) + { + /* FastLZ */ + case 1: + { + long chunkSize = FastLZ.CompressLevel(level, buffer, bytes_read, result); + checksum = Adler32(1L, result, chunkSize); + WriteChunkHeader(ofs, 17, 1, chunkSize, checksum, bytes_read); + ofs.Write(result, 0, (int)chunkSize); + total_compressed += 16; + total_compressed += chunkSize; + } + break; - var elapsedTicks = (GetTickCount64() - beginTick); - var elapsedMs = elapsedTicks / TimeSpan.TicksPerMillisecond; - var elapsedMicro = elapsedTicks / (TimeSpan.TicksPerMillisecond / 1000); - logger?.Invoke($"{(int)percent / 10:D2}.{(int)percent % 10:D1}% saved - {elapsedMs} ms, {elapsedMicro} micro"); + /* uncompressed, also fallback method */ + case 0: + default: + { + checksum = 1L; + checksum = Adler32(checksum, buffer, bytes_read); + WriteChunkHeader(ofs, 17, 0, bytes_read, checksum, bytes_read); + ofs.Write(buffer, 0, bytes_read); + total_compressed += 16; + total_compressed += bytes_read; + } + break; + } } - logger?.Invoke("\n"); + if (total_read != fsize) + { + logger?.Invoke("\n"); + logger?.Invoke($"Error: reading {input_file} failed!\n"); + return -1; + } + else + { + logger?.Invoke("] "); + if (total_compressed < fsize) + { + if (fsize < (1 << 20)) + { + percent = total_compressed * 1000 / fsize; + } + else + { + percent = total_compressed / 256 * 1000 / (fsize >> 8); + } + + percent = 1000 - percent; + + var elapsedTicks = (GetTickCount64() - beginTick); + var elapsedMs = elapsedTicks / TimeSpan.TicksPerMillisecond; + var elapsedMicro = elapsedTicks / (TimeSpan.TicksPerMillisecond / 1000); + logger?.Invoke($"{(int)percent / 10:D2}.{(int)percent % 10:D1}% saved - {elapsedMs} ms, {elapsedMicro} micro"); + } + + logger?.Invoke("\n"); + } } return 0; @@ -355,11 +356,12 @@ public static int PackFile(int compress_level, string input_file, string output_ return -1; } - using var ofs = fs; - - WriteMagic(ofs); - result = PackFileCompressed(input_file, 1, compress_level, ofs, logger); - return result; + using (var ofs = fs) + { + WriteMagic(ofs); + result = PackFileCompressed(input_file, 1, compress_level, ofs, logger); + return result; + } } // unpack_file @@ -375,255 +377,256 @@ public static int UnpackFile(string input_file, Action logger = null) return -1; } - using var ifs = tempFs; - - /* find size of the file */ - ifs.Seek(0, SeekOrigin.End); - long fsize = ifs.Position; - ifs.Seek(0, SeekOrigin.Begin); - - /* not a 6pack archive? */ - if (!DetectMagicByFileStream(ifs)) + using (var ifs = tempFs) { - logger?.Invoke($"Error: file {input_file} is not a 6pack archive!\n"); - return -1; - } + /* find size of the file */ + ifs.Seek(0, SeekOrigin.End); + long fsize = ifs.Position; + ifs.Seek(0, SeekOrigin.Begin); - logger?.Invoke($"Archive: {input_file}"); + /* not a 6pack archive? */ + if (!DetectMagicByFileStream(ifs)) + { + logger?.Invoke($"Error: file {input_file} is not a 6pack archive!\n"); + return -1; + } - /* position of first chunk */ - ifs.Seek(8, SeekOrigin.Begin); + logger?.Invoke($"Archive: {input_file}"); - /* initialize */ - string output_file = string.Empty; - FileStream ofs = null; - long total_extracted = 0; - long decompressed_size = 0; - long percent = 0; + /* position of first chunk */ + ifs.Seek(8, SeekOrigin.Begin); - byte[] buffer = new byte[BLOCK_SIZE]; - byte[] compressed_buffer = null; - byte[] decompressed_buffer = null; - long compressed_bufsize = 0; - long decompressed_bufsize = 0; + /* initialize */ + string output_file = string.Empty; + FileStream ofs = null; + long total_extracted = 0; + long decompressed_size = 0; + long percent = 0; - /* main loop */ - for (;;) - { - /* end of file? */ - long pos = ifs.Position; - if (pos >= fsize) - { - break; - } - - ReadChunkHeader( - ifs, - out var chunk_id, - out var chunk_options, - out var chunk_size, - out var chunk_checksum, - out var chunk_extra - ); + byte[] buffer = new byte[BLOCK_SIZE]; + byte[] compressed_buffer = null; + byte[] decompressed_buffer = null; + long compressed_bufsize = 0; + long decompressed_bufsize = 0; - if (chunk_id == 1 && chunk_size > 10 && chunk_size < BLOCK_SIZE) + /* main loop */ + for (;;) { - /* close current file, if any */ - logger?.Invoke("\n"); - if (null != ofs) + /* end of file? */ + long pos = ifs.Position; + if (pos >= fsize) { - ofs.Close(); - ofs = null; + break; } - /* file entry */ - ifs.Read(buffer, 0, (int)chunk_size); - checksum = Adler32(1L, buffer, chunk_size); - if (checksum != chunk_checksum) + ReadChunkHeader( + ifs, + out var chunk_id, + out var chunk_options, + out var chunk_size, + out var chunk_checksum, + out var chunk_extra + ); + + if (chunk_id == 1 && chunk_size > 10 && chunk_size < BLOCK_SIZE) { - logger?.Invoke("\nError: checksum mismatch!\n"); - logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); - return -1; - } + /* close current file, if any */ + logger?.Invoke("\n"); + if (null != ofs) + { + ofs.Close(); + ofs = null; + } + /* file entry */ + ifs.Read(buffer, 0, (int)chunk_size); + checksum = Adler32(1L, buffer, chunk_size); + if (checksum != chunk_checksum) + { + logger?.Invoke("\nError: checksum mismatch!\n"); + logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); + return -1; + } - decompressed_size = FastLZ.ReadUInt32(buffer, 0); - total_extracted = 0; - percent = 0; - /* get file to extract */ - int name_length = FastLZ.ReadUInt16(buffer, 8); - output_file = Encoding.UTF8.GetString(buffer, 10, name_length - 1); - output_file = output_file.Trim(); + decompressed_size = FastLZ.ReadUInt32(buffer, 0); + total_extracted = 0; + percent = 0; - /* check if already exists */ - ofs = OpenFile(output_file, FileMode.Open); - if (null != ofs) - { - ofs.Close(); - ofs = null; - logger?.Invoke($"File {output_file} already exists. Skipped.\n"); - } - else - { - /* create the file */ - ofs = OpenFile(output_file, FileMode.CreateNew, FileAccess.Write, FileShare.Write); - if (null == ofs) + /* get file to extract */ + int name_length = FastLZ.ReadUInt16(buffer, 8); + output_file = Encoding.UTF8.GetString(buffer, 10, name_length - 1); + output_file = output_file.Trim(); + + /* check if already exists */ + ofs = OpenFile(output_file, FileMode.Open); + if (null != ofs) { - logger?.Invoke($"Can't create file {output_file} Skipped.\n"); + ofs.Close(); + ofs = null; + logger?.Invoke($"File {output_file} already exists. Skipped.\n"); } else { - /* for progress status */ - logger?.Invoke("\n"); - string progress; - if (16 < output_file.Length) + /* create the file */ + ofs = OpenFile(output_file, FileMode.CreateNew, FileAccess.Write, FileShare.Write); + if (null == ofs) { - progress = output_file.Substring(0, 13); - progress += ".. "; + logger?.Invoke($"Can't create file {output_file} Skipped.\n"); } else { - progress = output_file.PadRight(16, ' '); - } + /* for progress status */ + logger?.Invoke("\n"); + string progress; + if (16 < output_file.Length) + { + progress = output_file.Substring(0, 13); + progress += ".. "; + } + else + { + progress = output_file.PadRight(16, ' '); + } - logger?.Invoke($"{progress} ["); - for (int c = 0; c < 50; c++) - { - logger?.Invoke("."); - } + logger?.Invoke($"{progress} ["); + for (int c = 0; c < 50; c++) + { + logger?.Invoke("."); + } - logger?.Invoke("]\r"); - logger?.Invoke($"{progress} ["); + logger?.Invoke("]\r"); + logger?.Invoke($"{progress} ["); + } } } - } - if ((chunk_id == 17) && null != ofs && !string.IsNullOrEmpty(output_file) && 0 < decompressed_size) - { - long remaining; - - /* uncompressed */ - switch (chunk_options) + if ((chunk_id == 17) && null != ofs && !string.IsNullOrEmpty(output_file) && 0 < decompressed_size) { - /* stored, simply copy to output */ - case 0: + long remaining; + + /* uncompressed */ + switch (chunk_options) { - /* read one block at at time, write and update checksum */ - total_extracted += chunk_size; - remaining = chunk_size; - checksum = 1L; - for (;;) + /* stored, simply copy to output */ + case 0: { - long r = (BLOCK_SIZE < remaining) ? BLOCK_SIZE : remaining; - long bytes_read = ifs.Read(buffer, 0, (int)r); - if (0 >= bytes_read) + /* read one block at at time, write and update checksum */ + total_extracted += chunk_size; + remaining = chunk_size; + checksum = 1L; + for (;;) { - break; + long r = (BLOCK_SIZE < remaining) ? BLOCK_SIZE : remaining; + long bytes_read = ifs.Read(buffer, 0, (int)r); + if (0 >= bytes_read) + { + break; + } + + ofs.Write(buffer, 0, (int)bytes_read); + checksum = Adler32(checksum, buffer, bytes_read); + remaining -= bytes_read; } - ofs.Write(buffer, 0, (int)bytes_read); - checksum = Adler32(checksum, buffer, bytes_read); - remaining -= bytes_read; - } - - /* verify everything is written correctly */ - if (checksum != chunk_checksum) - { - logger?.Invoke("\nError: checksum mismatch. Aborted.\n"); - logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); + /* verify everything is written correctly */ + if (checksum != chunk_checksum) + { + logger?.Invoke("\nError: checksum mismatch. Aborted.\n"); + logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); + } } - } - break; + break; - /* compressed using FastLZ */ - case 1: - { - /* enlarge input buffer if necessary */ - if (chunk_size > compressed_bufsize) + /* compressed using FastLZ */ + case 1: { - compressed_bufsize = chunk_size; - compressed_buffer = new byte[compressed_bufsize]; - } + /* enlarge input buffer if necessary */ + if (chunk_size > compressed_bufsize) + { + compressed_bufsize = chunk_size; + compressed_buffer = new byte[compressed_bufsize]; + } - /* enlarge output buffer if necessary */ - if (chunk_extra > decompressed_bufsize) - { - decompressed_bufsize = chunk_extra; - decompressed_buffer = new byte[decompressed_bufsize]; - } + /* enlarge output buffer if necessary */ + if (chunk_extra > decompressed_bufsize) + { + decompressed_bufsize = chunk_extra; + decompressed_buffer = new byte[decompressed_bufsize]; + } - /* read and check checksum */ - ifs.Read(compressed_buffer, 0, (int)chunk_size); - checksum = Adler32(1L, compressed_buffer, chunk_size); - total_extracted += chunk_extra; + /* read and check checksum */ + ifs.Read(compressed_buffer, 0, (int)chunk_size); + checksum = Adler32(1L, compressed_buffer, chunk_size); + total_extracted += chunk_extra; - /* verify that the chunk data is correct */ - if (checksum != chunk_checksum) - { - logger?.Invoke("\nError: checksum mismatch. Skipped.\n"); - logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); - } - else - { - /* decompress and verify */ - remaining = FastLZ.Decompress(compressed_buffer, chunk_size, decompressed_buffer, chunk_extra); - if (remaining != chunk_extra) + /* verify that the chunk data is correct */ + if (checksum != chunk_checksum) { - logger?.Invoke("\nError: decompression failed. Skipped.\n"); + logger?.Invoke("\nError: checksum mismatch. Skipped.\n"); + logger?.Invoke($"Got {checksum:X8} Expecting {chunk_checksum:X8}\n"); } else { - ofs.Write(decompressed_buffer, 0, (int)chunk_extra); + /* decompress and verify */ + remaining = FastLZ.Decompress(compressed_buffer, chunk_size, decompressed_buffer, chunk_extra); + if (remaining != chunk_extra) + { + logger?.Invoke("\nError: decompression failed. Skipped.\n"); + } + else + { + ofs.Write(decompressed_buffer, 0, (int)chunk_extra); + } } } - } - break; - - default: - logger?.Invoke($"\nError: unknown compression method ({chunk_options})\n"); - break; - } + break; - /* for progress, if everything is fine */ - //if (null != f) - { - int last_percent = (int)percent; - if (decompressed_size < (1 << 24)) - { - percent = total_extracted * 100 / decompressed_size; - } - else - { - percent = total_extracted / 256 * 100 / (decompressed_size >> 8); + default: + logger?.Invoke($"\nError: unknown compression method ({chunk_options})\n"); + break; } - percent >>= 1; - while (last_percent < (int)percent) + /* for progress, if everything is fine */ + //if (null != f) { - logger?.Invoke("#"); - last_percent++; - } + int last_percent = (int)percent; + if (decompressed_size < (1 << 24)) + { + percent = total_extracted * 100 / decompressed_size; + } + else + { + percent = total_extracted / 256 * 100 / (decompressed_size >> 8); + } - if (total_extracted == decompressed_size) - { - logger?.Invoke($"]\n"); + percent >>= 1; + while (last_percent < (int)percent) + { + logger?.Invoke("#"); + last_percent++; + } + + if (total_extracted == decompressed_size) + { + logger?.Invoke($"]\n"); + } } } - } - /* position of next chunk */ - ifs.Seek(pos + 16 + chunk_size, SeekOrigin.Begin); - } + /* position of next chunk */ + ifs.Seek(pos + 16 + chunk_size, SeekOrigin.Begin); + } - logger?.Invoke("\n"); - logger?.Invoke("\n"); + logger?.Invoke("\n"); + logger?.Invoke("\n"); - /* close working files */ - if (null != ofs) - { - ofs.Close(); + /* close working files */ + if (null != ofs) + { + ofs.Close(); + } } /* so far so good */ @@ -641,109 +644,109 @@ public static int BenchmarkSpeed(int compress_level, string input_file, Action