Skip to content

Commit

Permalink
typo: rename
Browse files Browse the repository at this point in the history
  • Loading branch information
ikpil committed Oct 2, 2023
1 parent 5037d37 commit 2bc6398
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 62 deletions.
121 changes: 62 additions & 59 deletions src/DotFastLZ.Package/SixPack.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ public static class SixPack
/* for Adler-32 checksum algorithm, see RFC 1950 Section 8.2 */
public const int ADLER32_BASE = 65521;

private static ulong update_adler32(ulong checksum, byte[] buf, long len)
// update_adler32
public static ulong Adler32(ulong checksum, byte[] buf, long len)
{
int ptr = 0;
ulong s1 = checksum & 0xffff;
Expand Down Expand Up @@ -69,7 +70,8 @@ private static ulong update_adler32(ulong checksum, byte[] buf, long len)

/* return non-zero if magic sequence is detected */
/* warning: reset the read pointer to the beginning of the file */
public static bool detect_magic(FileStream f)
// detect_magic
public static bool DetectMagic(FileStream f)
{
byte[] buffer = new byte[8];

Expand All @@ -93,14 +95,14 @@ public static bool detect_magic(FileStream f)
return true;
}


public static void write_magic(FileStream f)
// write_magic
private static void WriteMagic(FileStream f)
{
f.Write(sixpack_magic);
}


public static void write_chunk_header(FileStream f, int id, int options, long size, ulong checksum, long extra)
// write_chunk_header
private static void WriteChunkHeader(FileStream f, int id, int options, long size, ulong checksum, long extra)
{
byte[] buffer = new byte[16];

Expand All @@ -124,7 +126,8 @@ public static void write_chunk_header(FileStream f, int id, int options, long si
f.Write(buffer);
}

public static void read_chunk_header(FileStream f, out int id, out int options, out long size, out ulong checksum, out long extra)
// read_chunk_header
private static void ReadChunkHeader(FileStream f, out int id, out int options, out long size, out ulong checksum, out long extra)
{
byte[] buffer = new byte[16];
f.Read(buffer, 0, 16);
Expand All @@ -136,20 +139,9 @@ public static void read_chunk_header(FileStream f, out int id, out int options,
extra = FastLZ.ReadUInt32(buffer, 12) & 0xffffffff;
}

public static string GetFileName(string path)
{
if (string.IsNullOrEmpty(path))
{
return string.Empty;
}

/* truncate directory prefix, e.g. "foo/bar/FILE.txt" becomes "FILE.txt" */
return path
.Split(new char[] { '/', '\\', Path.DirectorySeparatorChar, }, StringSplitOptions.RemoveEmptyEntries)
.Last();
}

public static int pack_file_compressed(string input_file, int method, int level, FileStream f)
// pack_file_compressed
private static int PackFileCompressed(string input_file, int method, int level, FileStream f)
{
ulong checksum;
byte[] result = new byte[BLOCK_SIZE * 2]; /* FIXME twice is too large */
Expand All @@ -170,7 +162,7 @@ public static int pack_file_compressed(string input_file, int method, int level,
ifs.Seek(0, SeekOrigin.Begin);

/* already a 6pack archive? */
if (detect_magic(ifs))
if (DetectMagic(ifs))
{
Console.WriteLine($"Error: file {input_file} is already a 6pack archive!");
return -1;
Expand All @@ -197,9 +189,9 @@ public static int pack_file_compressed(string input_file, int method, int level,
buffer[9] = (byte)(shown_name.Length >> 8); // filename length for highest bit

checksum = 1L;
checksum = update_adler32(checksum, buffer, 10);
checksum = update_adler32(checksum, shown_name, shown_name.Length);
write_chunk_header(f, 1, 0, 10 + shown_name.Length, checksum, 0);
checksum = Adler32(checksum, buffer, 10);
checksum = Adler32(checksum, shown_name, shown_name.Length);
WriteChunkHeader(f, 1, 0, 10 + shown_name.Length, checksum, 0);
f.Write(buffer, 0, 10);
f.Write(shown_name, 0, shown_name.Length);
long total_compressed = 16 + 10 + shown_name.Length;
Expand Down Expand Up @@ -267,8 +259,8 @@ public static int pack_file_compressed(string input_file, int method, int level,
case 1:
{
long chunkSize = FastLZ.CompressLevel(level, buffer, bytes_read, result);
checksum = update_adler32(1L, result, chunkSize);
write_chunk_header(f, 17, 1, chunkSize, checksum, bytes_read);
checksum = Adler32(1L, result, chunkSize);
WriteChunkHeader(f, 17, 1, chunkSize, checksum, bytes_read);
f.Write(result, 0, (int)chunkSize);
total_compressed += 16;
total_compressed += chunkSize;
Expand All @@ -280,8 +272,8 @@ public static int pack_file_compressed(string input_file, int method, int level,
default:
{
checksum = 1L;
checksum = update_adler32(checksum, buffer, bytes_read);
write_chunk_header(f, 17, 0, bytes_read, checksum, bytes_read);
checksum = Adler32(checksum, buffer, bytes_read);
WriteChunkHeader(f, 17, 0, bytes_read, checksum, bytes_read);
f.Write(buffer, 0, bytes_read);
total_compressed += 16;
total_compressed += bytes_read;
Expand Down Expand Up @@ -324,21 +316,9 @@ public static int pack_file_compressed(string input_file, int method, int level,
return 0;
}

public static FileStream OpenFile(string filePath, FileMode mode, FileAccess access = FileAccess.Read, FileShare share = FileShare.Read)
{
try
{
return new FileStream(filePath, mode, access, share);
}
catch (Exception /* e */)
{
//Console.WriteLine(e.Message);
return null;
}
}


public static int pack_file(int compress_level, string input_file, string output_file)
// pack_file
public static int PackFile(int compress_level, string input_file, string output_file)
{
int result;

Expand All @@ -359,12 +339,13 @@ public static int pack_file(int compress_level, string input_file, string output

using var ofs = fs;

write_magic(ofs);
result = pack_file_compressed(input_file, 1, compress_level, ofs);
WriteMagic(ofs);
result = PackFileCompressed(input_file, 1, compress_level, ofs);
return result;
}

public static int unpack_file(string input_file)
// unpack_file
public static int UnpackFile(string input_file)
{
ulong checksum;

Expand All @@ -384,7 +365,7 @@ public static int unpack_file(string input_file)
ifs.Seek(0, SeekOrigin.Begin);

/* not a 6pack archive? */
if (!detect_magic(ifs))
if (!DetectMagic(ifs))
{
Console.WriteLine($"Error: file {input_file} is not a 6pack archive!");
return -1;
Expand All @@ -401,7 +382,7 @@ public static int unpack_file(string input_file)
long total_extracted = 0;
long decompressed_size = 0;
long percent = 0;

byte[] buffer = new byte[BLOCK_SIZE];
byte[] compressed_buffer = null;
byte[] decompressed_buffer = null;
Expand All @@ -418,7 +399,7 @@ public static int unpack_file(string input_file)
break;
}

read_chunk_header(
ReadChunkHeader(
ifs,
out var chunk_id,
out var chunk_options,
Expand All @@ -439,15 +420,15 @@ out var chunk_extra

/* file entry */
ifs.Read(buffer, 0, (int)chunk_size);
checksum = update_adler32(1L, buffer, chunk_size);
checksum = Adler32(1L, buffer, chunk_size);
if (checksum != chunk_checksum)
{
Console.WriteLine("\nError: checksum mismatch!");
Console.WriteLine($"Got {checksum:X8} Expecting {chunk_checksum:X8}");
return -1;
}


decompressed_size = FastLZ.ReadUInt32(buffer, 0);
total_extracted = 0;
percent = 0;
Expand Down Expand Up @@ -524,7 +505,7 @@ out var chunk_extra
}

f.Write(buffer, 0, (int)bytes_read);
checksum = update_adler32(checksum, buffer, bytes_read);
checksum = Adler32(checksum, buffer, bytes_read);
remaining -= bytes_read;
}

Expand Down Expand Up @@ -555,7 +536,7 @@ out var chunk_extra

/* read and check checksum */
ifs.Read(compressed_buffer, 0, (int)chunk_size);
checksum = update_adler32(1L, compressed_buffer, chunk_size);
checksum = Adler32(1L, compressed_buffer, chunk_size);
total_extracted += chunk_extra;

/* verify that the chunk data is correct */
Expand Down Expand Up @@ -614,11 +595,6 @@ out var chunk_extra
Console.WriteLine("");
Console.WriteLine("");

/* free allocated stuff */
// free(compressed_buffer);
// free(decompressed_buffer);
// free(output_file);

/* close working files */
if (null != f)
{
Expand All @@ -629,7 +605,8 @@ out var chunk_extra
return 0;
}

public static int benchmark_speed(int compress_level, string input_file)
// benchmark_speed
public static int BenchmarkSpeed(int compress_level, string input_file)
{
/* sanity check */
var fs = OpenFile(input_file, FileMode.Open);
Expand All @@ -647,7 +624,7 @@ public static int benchmark_speed(int compress_level, string input_file)
ifs.Seek(0, SeekOrigin.Begin);

/* already a 6pack archive? */
if (detect_magic(ifs))
if (DetectMagic(ifs))
{
Console.WriteLine("Error: no benchmark for 6pack archive!");
return -1;
Expand Down Expand Up @@ -742,5 +719,31 @@ public static int benchmark_speed(int compress_level, string input_file)

return 0;
}

public static string GetFileName(string path)
{
if (string.IsNullOrEmpty(path))
{
return string.Empty;
}

/* truncate directory prefix, e.g. "foo/bar/FILE.txt" becomes "FILE.txt" */
return path
.Split(new char[] { '/', '\\', Path.DirectorySeparatorChar, }, StringSplitOptions.RemoveEmptyEntries)
.Last();
}

public static FileStream OpenFile(string filePath, FileMode mode, FileAccess access = FileAccess.Read, FileShare share = FileShare.Read)
{
try
{
return new FileStream(filePath, mode, access, share);
}
catch (Exception /* e */)
{
//Console.WriteLine(e.Message);
return null;
}
}
}
}
4 changes: 2 additions & 2 deletions src/DotFastLZ.SixPackTool/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -139,11 +139,11 @@ public static int Main(string[] args)

if (benchmark)
{
return SixPack.benchmark_speed(compress_level, input_file);
return SixPack.BenchmarkSpeed(compress_level, input_file);
}
else
{
return SixPack.pack_file(compress_level, input_file, output_file);
return SixPack.PackFile(compress_level, input_file, output_file);
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/DotFastLZ.SixUnpackTool/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ public static int Main(string[] args)
for (int i = 0; i < archiveFiles.Count; ++i)
{
var archiveFile = archiveFiles[i];
SixPack.unpack_file(archiveFile);
SixPack.UnpackFile(archiveFile);
}

return 0;
Expand Down

0 comments on commit 2bc6398

Please sign in to comment.