Skip to content

Commit

Permalink
Added support for RS3 caches.
Browse files Browse the repository at this point in the history
  • Loading branch information
Displee committed Dec 27, 2018
1 parent 07e656f commit cf60e6e
Show file tree
Hide file tree
Showing 14 changed files with 454 additions and 98 deletions.
Binary file modified RS2-Cache-Library.jar
Binary file not shown.
Binary file removed lib/commons-compress-1.8.1.jar
Binary file not shown.
Binary file added lib/lzma-java-1.2.jar
Binary file not shown.
52 changes: 41 additions & 11 deletions src/org/displee/CacheLibrary.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import org.displee.io.impl.OutputStream;
import org.displee.progress.ProgressListener;
import org.displee.utilities.Compression;
import org.displee.utilities.Compression.CompressionTypes;
import org.displee.utilities.Compression.CompressionType;
import org.displee.utilities.Constants;

/**
Expand All @@ -23,35 +23,44 @@ public class CacheLibrary {
/**
* An array of indices of this cache.
*/
private Index[] indices;
protected Index[] indices;

/**
* The main random access file (.dat2).
* The main random access file.
*/
private final RandomAccessFile mainFile;
protected RandomAccessFile mainFile;

/**
* The checksum table.
*/
private final ChecksumTable checksumTable;
protected ChecksumTable checksumTable;

/**
* The path to the cache files.
*/
private final String path;
protected final String path;

/**
* The mode.
*/
private final CacheLibraryMode mode;
protected final CacheLibraryMode mode;

/**
* If this library has been closed.
*/
private boolean closed;
protected boolean closed;

/**
* Initialize this cache library.
* Constructs a new {@code CacheLibrary} {@code Object}.
* @param path The path to the cache files.
* @throws IOException If it failed to read the cache files.
*/
public CacheLibrary(String path) throws IOException {
this(path, CacheLibraryMode.CACHED);
}

/**
* Constructs a new {@code CacheLibrary} {@code Object}.
* @param path The path to the cache files.
* @param mode The cache library mode.
* @throws IOException If it failed to read the cache files.
Expand All @@ -61,7 +70,7 @@ public CacheLibrary(String path, CacheLibraryMode mode) throws IOException {
}

/**
* Initialize this cache library.
* Constructs a new {@code CacheLibrary} {@code Object}.
* @param path The path to the cache files.
* @param mode The cache library mode.
* @param listener The progress listener.
Expand All @@ -71,8 +80,20 @@ public CacheLibrary(String path, CacheLibraryMode mode, ProgressListener listene
if (path == null) {
throw new FileNotFoundException("The path to the cache is incorrect.");
}
if (!path.endsWith("/") && !path.endsWith("\\")) {
path += "/";
}
this.path = path;
this.mode = mode;
load(listener);
}

/**
* Load this cache.
* @param listener The progress listener.
* @throws IOException If it failed to read the cache files.
*/
protected void load(ProgressListener listener) throws IOException {
final File main = new File(path + "main_file_cache.dat2");
if (!main.exists()) {
if (listener != null) {
Expand Down Expand Up @@ -113,6 +134,7 @@ public CacheLibrary(String path, CacheLibraryMode mode, ProgressListener listene
listener.notify(progress, "Failed to load index " + i + "...");
}
System.err.println("Failed to read index[id=" + i + ", file=" + file + ", length=" + file.length() + ", main=" + main + ", main_length=" + main.length() + ", indices=" + indices.length + "]");
e.printStackTrace();
}
}
checksumTable.write(new OutputStream(indices.length * Constants.ARCHIVE_HEADER_SIZE));
Expand All @@ -130,7 +152,7 @@ public Index addIndex(boolean named, boolean whirlpool) {
outputStream.writeByte((named ? 0x1 : 0x0) | (whirlpool ? 0x2 : 0x0));
outputStream.writeShort(0);
final int id = indices.length;
if (!checksumTable.writeArchiveInformation(id, Compression.compress(outputStream.flip(), CompressionTypes.GZIP, null, -1))) {
if (!checksumTable.writeArchiveInformation(id, Compression.compress(outputStream.flip(), CompressionType.GZIP, null, -1))) {
throw new RuntimeException("Failed to write the archive information for a new index[id=" + id + "]");
}
indices = Arrays.copyOf(indices, indices.length + 1);
Expand Down Expand Up @@ -256,6 +278,14 @@ public CacheLibraryMode getMode() {
return mode;
}

/**
* Check if this is a RS3 cache.
* @return {@code rs3}
*/
public boolean isRS3() {
return indices.length > 39;
}

/**
* Check if this library has been closed.
* @return {@code }
Expand Down
97 changes: 61 additions & 36 deletions src/org/displee/cache/index/Index.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package org.displee.cache.index;

import java.io.RandomAccessFile;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

import org.displee.CacheLibrary;
Expand All @@ -13,7 +11,7 @@
import org.displee.io.impl.OutputStream;
import org.displee.progress.ProgressListener;
import org.displee.utilities.Compression;
import org.displee.utilities.Compression.CompressionTypes;
import org.displee.utilities.Compression.CompressionType;
import org.displee.utilities.Constants;
import org.displee.utilities.HashGenerator;
import org.displee.utilities.Whirlpool;
Expand Down Expand Up @@ -47,7 +45,7 @@ public class Index extends IndexInformation {
/**
* The compression type.
*/
private CompressionTypes type;
private CompressionType type;

/**
* Constructs a new {@code Index} {@code Object}.
Expand All @@ -60,7 +58,10 @@ public Index(CacheLibrary origin, int id, RandomAccessFile randomAccessFile) {
read();
}

private void read() {
/**
* Read this index.
*/
protected void read() {
if (id < 255) {
final ArchiveInformation archiveInformation = super.origin.getChecksumTable().getArchiveInformation(id);
if (archiveInformation != null) {
Expand All @@ -74,14 +75,28 @@ private void read() {
}
}

/**
* Update this index.
* @return If this index was updated with success.
*/
public boolean update() {
return update(null, null);
}

/**
* Update this index using xteas.
* @param map A map of xteas (example: map.put(23, new int[4]) will create xteas for archive 23).
* @return If this index was updated with success.
*/
public boolean update(Map<Integer, int[]> map) {
return update(null, map);
}

/**
* Update this index.
* @param listener The progress listener.
* @return If this index was updated with success.
*/
public boolean update(ProgressListener listener) {
return update(listener, null);
}
Expand All @@ -96,42 +111,40 @@ public boolean update(ProgressListener listener, Map<Integer, int[]> map) {
boolean updateChecksumTable = false;
int updateCount = 0;
for(Archive archive : archives) {
if (archive == null) {
if (archive == null || !archive.isUpdateRequired()) {
continue;
}
updateCount++;
}
double i = 0;
for (Archive archive : archives) {
if (archive == null) {
if (archive == null || !archive.isUpdateRequired()) {
continue;
}
if (archive.isUpdateRequired()) {
i++;
archive.unFlag();
int[] keys = map == null ? null : map.get(archive.getId());
if (!updateChecksumTable) {
updateChecksumTable = true;
}
if (listener != null) {
listener.notify((i / updateCount) * 80.0, "Repacking archive " + archive.getId() + "...");
}
final byte[] compressed = Compression.compress(archive.write(new OutputStream(0)), super.id == 7 ? CompressionTypes.NONE : CompressionTypes.GZIP, keys, archive.getRevision());
archive.setCRC(HashGenerator.getCRCHash(compressed, 0, compressed.length - 2));
archive.setWhirlpool(Whirlpool.getHash(compressed, 0, compressed.length - 2));
final ArchiveInformation backup = getArchiveInformation(archive.getId());
if (!writeArchiveInformation(archive.getId(), compressed)) {
System.err.println("Could not write the archive information for index[id=" + super.id + ", archive=" + archive.getId() + "]");
System.err.println("Reverting changes...");
if (backup != null) {
if (writeArchiveInformation(archive.getId(), backup.getData())) {
System.out.println("Changes have been reverted.");
} else {
System.err.println("Your cache is corrupt.");
}
i++;
archive.unFlag();
int[] keys = map == null ? null : map.get(archive.getId());
if (!updateChecksumTable) {
updateChecksumTable = true;
}
if (listener != null) {
listener.notify((i / updateCount) * 80.0, "Repacking archive " + archive.getId() + "...");
}
final byte[] compressed = Compression.compress(archive.write(new OutputStream(0)), super.id == 7 ? CompressionType.NONE : CompressionType.GZIP, keys, archive.getRevision());
archive.setCRC(HashGenerator.getCRCHash(compressed, 0, compressed.length - 2));
archive.setWhirlpool(Whirlpool.getHash(compressed, 0, compressed.length - 2));
final ArchiveInformation backup = getArchiveInformation(archive.getId());
if (!writeArchiveInformation(archive.getId(), compressed)) {
System.err.println("Could not write the archive information for index[id=" + super.id + ", archive=" + archive.getId() + "]");
System.err.println("Reverting changes...");
if (backup != null) {
if (writeArchiveInformation(archive.getId(), backup.getData())) {
System.out.println("Changes have been reverted.");
} else {
System.err.println("Your cache is corrupt.");
}
return false;
}
return false;
}
if (origin.getMode() == CacheLibraryMode.UN_CACHED) {
archive.restore();
Expand Down Expand Up @@ -170,11 +183,11 @@ public ArchiveInformation getArchiveInformation(int id) {
throw new RuntimeException("File is too small.");
}
final byte[] buffer = new byte[Constants.ARCHIVE_SIZE];
randomAccessFile.seek(Constants.INDEX_SIZE * id);
randomAccessFile.seek(Constants.INDEX_SIZE * (long) id);
randomAccessFile.read(buffer, 0, Constants.INDEX_SIZE);
final InputStream inputStream = new InputStream(buffer);
final ArchiveInformation archiveInformation = new ArchiveInformation(type, inputStream.read24BitInt(), inputStream.read24BitInt());
if (archiveInformation.getSize() < 0 || archiveInformation.getSize() > 1000000) {
if (archiveInformation.getSize() < 0) {
return null;
} else if (archiveInformation.getPosition() <= 0 || archiveInformation.getPosition() > (super.origin.getMainFile().length() / Constants.ARCHIVE_SIZE)) {
return null;
Expand All @@ -195,12 +208,12 @@ public ArchiveInformation getArchiveInformation(int id) {
if (requiredToRead > archiveDataSize) {
requiredToRead = archiveDataSize;
}
super.origin.getMainFile().seek(archiveInformation.getPosition() * Constants.ARCHIVE_SIZE);
super.origin.getMainFile().seek((long) archiveInformation.getPosition() * Constants.ARCHIVE_SIZE);
super.origin.getMainFile().read(inputStream.getBytes(), 0, requiredToRead + archiveHeaderSize);
inputStream.setOffset(0);
if (!archiveInformation.read(inputStream)) {
throw new RuntimeException("Error, could not read the archive.");
} else if (super.id != archiveInformation.getIndex() || id != archiveInformation.getId() || chunk != archiveInformation.getChunk()) {
} else if (!isIndexValid(archiveInformation.getIndex()) || id != archiveInformation.getId() || chunk != archiveInformation.getChunk()) {
throw new RuntimeException("Error, the read data is incorrect. Data[currentIndex=" + super.id + ", index=" + archiveInformation.getIndex() + ", currentId=" + id + ", id=" + archiveInformation.getId() + ", currentChunk=" + chunk + ", chunk=" + archiveInformation.getChunk() + "]");
} else if (archiveInformation.getNextPosition() < 0 || archiveInformation.getNextPosition() > (super.origin.getMainFile().length() / Constants.ARCHIVE_SIZE)) {
throw new RuntimeException("Error, the next position is invalid.");
Expand All @@ -219,6 +232,15 @@ public ArchiveInformation getArchiveInformation(int id) {
return null;
}

/**
* Check if the argued index is equal to this index.
* @param index The id of the index.
* @return If the index is equal ot this index.
*/
protected boolean isIndexValid(int index) {
return super.id == index;
}

/**
* Write the archive information.
* @param id The id of the archive.
Expand Down Expand Up @@ -347,6 +369,9 @@ public boolean cache(int[][] xteas) {
return cached;
}

/**
* Uncache this index (clears all stored data).
*/
public void uncache() {
for (Archive archive : super.archives) {
archive.restore();
Expand Down Expand Up @@ -390,7 +415,7 @@ public void setCRC(int crc) {
* Geth the compression type.
* @return {@code type}
*/
public CompressionTypes getCompressionType() {
public CompressionType getCompressionType() {
return type;
}

Expand Down
Loading

0 comments on commit cf60e6e

Please sign in to comment.