Moved section compression to a storage backend adaptor to make it configurable

This commit is contained in:
mcrcortex
2024-02-07 14:28:49 +10:00
parent 1ea4b25573
commit f0f5842734
7 changed files with 135 additions and 27 deletions

View File

@@ -6,6 +6,8 @@ import me.cortex.voxy.client.core.rendering.*;
import me.cortex.voxy.client.core.rendering.building.RenderGenerationService;
import me.cortex.voxy.client.core.rendering.post.PostProcessing;
import me.cortex.voxy.client.core.util.DebugUtil;
import me.cortex.voxy.common.storage.CompressionStorageAdaptor;
import me.cortex.voxy.common.storage.ZSTDCompressor;
import me.cortex.voxy.common.storage.rocksdb.RocksDBStorageBackend;
import me.cortex.voxy.common.world.WorldEngine;
import me.cortex.voxy.client.importers.WorldImporter;
@@ -55,7 +57,7 @@ public class VoxelCore {
SharedIndexBuffer.INSTANCE.id();
this.renderer = new Gl46FarWorldRenderer(VoxyConfig.CONFIG.geometryBufferSize, VoxyConfig.CONFIG.maxSections);
System.out.println("Renderer initialized");
this.world = new WorldEngine(new RocksDBStorageBackend(new File(VoxyConfig.CONFIG.storagePath)), VoxyConfig.CONFIG.ingestThreads, VoxyConfig.CONFIG.savingThreads, VoxyConfig.CONFIG.savingCompressionLevel, 5);//"storagefile.db"//"ethoslab.db"
this.world = new WorldEngine(new CompressionStorageAdaptor(new ZSTDCompressor(VoxyConfig.CONFIG.savingCompressionLevel), new RocksDBStorageBackend(new File(VoxyConfig.CONFIG.storagePath))), VoxyConfig.CONFIG.ingestThreads, VoxyConfig.CONFIG.savingThreads, 5);
System.out.println("World engine");
this.renderTracker = new RenderTracker(this.world, this.renderer);

View File

@@ -0,0 +1,68 @@
package me.cortex.voxy.common.storage;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import me.cortex.voxy.common.storage.lmdb.LMDBStorageBackend;
import net.minecraft.util.math.random.RandomSeed;
import org.lwjgl.system.MemoryUtil;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.util.Arrays;
//Compresses the section data
public class CompressionStorageAdaptor extends StorageBackend {
private final StorageCompressor compressor;
private final StorageBackend child;
public CompressionStorageAdaptor(StorageCompressor compressor, StorageBackend child) {
this.compressor = compressor;
this.child = child;
}
@Override
public ByteBuffer getSectionData(long key) {
var data = this.child.getSectionData(key);
if (data == null) {
return null;
}
var decompressed = this.compressor.decompress(data);
MemoryUtil.memFree(data);
return decompressed;
}
@Override
public void setSectionData(long key, ByteBuffer data) {
var cdata = this.compressor.compress(data);
this.child.setSectionData(key, cdata);
MemoryUtil.memFree(cdata);
}
@Override
public void deleteSectionData(long key) {
this.child.deleteSectionData(key);
}
@Override
public void putIdMapping(int id, ByteBuffer data) {
this.child.putIdMapping(id, data);
}
@Override
public Int2ObjectOpenHashMap<byte[]> getIdMappingsData() {
return this.child.getIdMappingsData();
}
@Override
public void flush() {
this.child.flush();
}
@Override
public void close() {
this.compressor.close();
this.child.close();
}
}

View File

@@ -0,0 +1,11 @@
package me.cortex.voxy.common.storage;
import java.nio.ByteBuffer;
public interface StorageCompressor {
ByteBuffer compress(ByteBuffer saveData);
ByteBuffer decompress(ByteBuffer saveData);
void close();
}

View File

@@ -0,0 +1,37 @@
package me.cortex.voxy.common.storage;
import org.lwjgl.system.MemoryUtil;
import java.nio.ByteBuffer;
import static org.lwjgl.util.zstd.Zstd.*;
public class ZSTDCompressor implements StorageCompressor {
private final int level;
public ZSTDCompressor(int level) {
this.level = level;
}
@Override
public ByteBuffer compress(ByteBuffer saveData) {
ByteBuffer compressedData = MemoryUtil.memAlloc((int)ZSTD_COMPRESSBOUND(saveData.remaining()));
long compressedSize = ZSTD_compress(compressedData, saveData, this.level);
compressedData.limit((int) compressedSize);
compressedData.rewind();
return compressedData;
}
@Override
public ByteBuffer decompress(ByteBuffer saveData) {
var decompressed = MemoryUtil.memAlloc(32*32*32*8*2);
long size = ZSTD_decompress(decompressed, saveData);
decompressed.limit((int) size);
return decompressed;
}
@Override
public void close() {
}
}

View File

@@ -9,7 +9,8 @@ import java.nio.ByteBuffer;
import static org.lwjgl.util.zstd.Zstd.*;
public class SaveLoadSystem {
public static ByteBuffer serialize(WorldSection section, int compressionLevel) {
public static ByteBuffer serialize(WorldSection section) {
var data = section.copyData();
var compressed = new Short[data.length];
Long2ShortOpenHashMap LUT = new Long2ShortOpenHashMap();
@@ -47,28 +48,19 @@ public class SaveLoadSystem {
raw.limit(raw.position());
raw.rewind();
ByteBuffer compressedData = MemoryUtil.memAlloc((int)ZSTD_COMPRESSBOUND(raw.remaining()));
long compressedSize = ZSTD_compress(compressedData, raw, compressionLevel);
compressedData.limit((int) compressedSize);
compressedData.rewind();
MemoryUtil.memFree(raw);
//Compress into a key + data pallet format
return compressedData;
return raw;
}
public static boolean deserialize(WorldSection section, ByteBuffer data) {
var decompressed = MemoryUtil.memAlloc(32*32*32*4*2);
long size = ZSTD_decompress(decompressed, data);
decompressed.limit((int) size);
long hash = 0;
long key = decompressed.getLong();
int lutLen = decompressed.getInt();
long key = data.getLong();
int lutLen = data.getInt();
long[] lut = new long[lutLen];
hash = key^(lut.length*1293481298141L);
for (int i = 0; i < lutLen; i++) {
lut[i] = decompressed.getLong();
lut[i] = data.getLong();
hash *= 1230987149811L;
hash += 12831;
hash ^= lut[i];
@@ -79,27 +71,25 @@ public class SaveLoadSystem {
}
for (int i = 0; i < section.data.length; i++) {
short lutId = decompressed.getShort();
short lutId = data.getShort();
section.data[i] = lut[lutId];
hash *= 1230987149811L;
hash += 12831;
hash ^= (lutId*1827631L) ^ section.data[i];
}
long expectedHash = decompressed.getLong();
long expectedHash = data.getLong();
if (expectedHash != hash) {
//throw new IllegalStateException("Hash mismatch got: " + hash + " expected: " + expectedHash);
System.err.println("Hash mismatch got: " + hash + " expected: " + expectedHash + " removing region");
return false;
}
if (decompressed.hasRemaining()) {
if (data.hasRemaining()) {
//throw new IllegalStateException("Decompressed section had excess data");
System.err.println("Decompressed section had excess data removing region");
return false;
}
MemoryUtil.memFree(decompressed);
return true;
}
}

View File

@@ -1,5 +1,6 @@
package me.cortex.voxy.common.world;
import me.cortex.voxy.common.storage.StorageCompressor;
import me.cortex.voxy.common.voxelization.VoxelizedSection;
import me.cortex.voxy.common.world.other.Mapper;
import me.cortex.voxy.common.world.service.SectionSavingService;
@@ -13,7 +14,6 @@ import java.util.function.Consumer;
//Use an LMDB backend to store the world, use a local inmemory cache for lod sections
// automatically manages and invalidates sections of the world as needed
public class WorldEngine {
public final StorageBackend storage;
private final Mapper mapper;
private final ActiveSectionTracker sectionTracker;
@@ -29,14 +29,14 @@ public class WorldEngine {
public Mapper getMapper() {return this.mapper;}
public WorldEngine(StorageBackend storageBackend, int ingestWorkers, int savingServiceWorkers, int compressionLevel, int maxMipLayers) {
public WorldEngine(StorageBackend storageBackend, int ingestWorkers, int savingServiceWorkers, int maxMipLayers) {
this.maxMipLevels = maxMipLayers;
this.storage = storageBackend;
this.mapper = new Mapper(this.storage);
//4 cache size bits means that the section tracker has 16 separate maps that it uses
this.sectionTracker = new ActiveSectionTracker(3, this::unsafeLoadSection);
this.savingService = new SectionSavingService(this, savingServiceWorkers, compressionLevel);
this.savingService = new SectionSavingService(this, savingServiceWorkers);
this.ingestService = new VoxelIngestService(this, ingestWorkers);
}

View File

@@ -1,5 +1,6 @@
package me.cortex.voxy.common.world.service;
import me.cortex.voxy.common.storage.StorageCompressor;
import me.cortex.voxy.common.world.SaveLoadSystem;
import me.cortex.voxy.common.world.WorldEngine;
import me.cortex.voxy.common.world.WorldSection;
@@ -15,13 +16,13 @@ public class SectionSavingService {
private volatile boolean running = true;
private final Thread[] workers;
private final int compressionLevel;
private final ConcurrentLinkedDeque<WorldSection> saveQueue = new ConcurrentLinkedDeque<>();
private final Semaphore saveCounter = new Semaphore(0);
private final WorldEngine world;
public SectionSavingService(WorldEngine worldEngine, int workers, int compressionLevel) {
public SectionSavingService(WorldEngine worldEngine, int workers) {
this.workers = new Thread[workers];
for (int i = 0; i < workers; i++) {
var worker = new Thread(this::saveWorker);
@@ -30,7 +31,6 @@ public class SectionSavingService {
worker.start();
this.workers[i] = worker;
}
this.compressionLevel = compressionLevel;
this.world = worldEngine;
}
@@ -42,7 +42,7 @@ public class SectionSavingService {
section.assertNotFree();
section.inSaveQueue.set(false);
var saveData = SaveLoadSystem.serialize(section, this.compressionLevel);
var saveData = SaveLoadSystem.serialize(section);
this.world.storage.setSectionData(section.key, saveData);
MemoryUtil.memFree(saveData);