new save system + rework some things

This commit is contained in:
mcrcortex
2025-03-31 16:56:19 +10:00
parent 40c4101bec
commit b85e6367b4
13 changed files with 172 additions and 30 deletions

View File

@@ -115,10 +115,12 @@ public class VoxyConfigScreenFactory implements ModMenuApi {
category.addEntry(entryBuilder.startIntSlider(Text.translatable("voxy.config.general.renderDistance"), config.sectionRenderDistance, 2, 64) category.addEntry(entryBuilder.startIntSlider(Text.translatable("voxy.config.general.renderDistance"), config.sectionRenderDistance, 2, 64)
.setTooltip(Text.translatable("voxy.config.general.renderDistance.tooltip")) .setTooltip(Text.translatable("voxy.config.general.renderDistance.tooltip"))
.setSaveConsumer(val -> { .setSaveConsumer(val -> {
config.sectionRenderDistance = val; if (config.sectionRenderDistance != val) {
var wrenderer =((IGetVoxyRenderSystem)(MinecraftClient.getInstance().worldRenderer)); config.sectionRenderDistance = val;
if (wrenderer != null && wrenderer.getVoxyRenderSystem() != null) { var wrenderer = ((IGetVoxyRenderSystem) (MinecraftClient.getInstance().worldRenderer));
wrenderer.getVoxyRenderSystem().setRenderDistance(val); if (wrenderer != null && wrenderer.getVoxyRenderSystem() != null) {
wrenderer.getVoxyRenderSystem().setRenderDistance(val);
}
} }
}) })
.setDefaultValue(DEFAULT.sectionRenderDistance) .setDefaultValue(DEFAULT.sectionRenderDistance)

View File

@@ -173,6 +173,7 @@ public class RenderService<T extends AbstractSectionRenderer<J, ?>, J extends Vi
this.nodeCleaner.free(); this.nodeCleaner.free();
//Release all the unprocessed built geometry //Release all the unprocessed built geometry
this.geometryUpdateQueue.clear(BuiltSection::free); this.geometryUpdateQueue.clear(BuiltSection::free);
this.sectionUpdateQueue.clear(WorldSection::release);//Release anything thats in the queue
} }
public Viewport<?> getViewport() { public Viewport<?> getViewport() {

View File

@@ -9,7 +9,7 @@ import java.util.function.LongConsumer;
import static me.cortex.voxy.common.world.WorldEngine.UPDATE_TYPE_BLOCK_BIT; import static me.cortex.voxy.common.world.WorldEngine.UPDATE_TYPE_BLOCK_BIT;
public class SectionUpdateRouter implements ISectionWatcher { public class SectionUpdateRouter implements ISectionWatcher {
private static final int SLICES = 1<<3; private static final int SLICES = 1<<8;
public interface IChildUpdate {void accept(WorldSection section);} public interface IChildUpdate {void accept(WorldSection section);}
private final Long2ByteOpenHashMap[] slices = new Long2ByteOpenHashMap[SLICES]; private final Long2ByteOpenHashMap[] slices = new Long2ByteOpenHashMap[SLICES];

View File

@@ -3,8 +3,10 @@ package me.cortex.voxy.common.config;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.function.LongConsumer;
public interface IMappingStorage { public interface IMappingStorage {
void iterateStoredSectionPositions(LongConsumer consumer);
void putIdMapping(int id, ByteBuffer data); void putIdMapping(int id, ByteBuffer data);
Int2ObjectOpenHashMap<byte[]> getIdMappingsData(); Int2ObjectOpenHashMap<byte[]> getIdMappingsData();
void flush(); void flush();

View File

@@ -8,12 +8,14 @@ import me.cortex.voxy.common.config.storage.StorageConfig;
import me.cortex.voxy.common.util.ThreadLocalMemoryBuffer; import me.cortex.voxy.common.util.ThreadLocalMemoryBuffer;
import me.cortex.voxy.common.world.SaveLoadSystem; import me.cortex.voxy.common.world.SaveLoadSystem;
import me.cortex.voxy.common.world.SaveLoadSystem2; import me.cortex.voxy.common.world.SaveLoadSystem2;
import me.cortex.voxy.common.world.SaveLoadSystem3;
import me.cortex.voxy.common.world.WorldSection; import me.cortex.voxy.common.world.WorldSection;
import me.cortex.voxy.common.world.other.Mapper; import me.cortex.voxy.common.world.other.Mapper;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.function.LongConsumer;
public class SectionSerializationStorage extends SectionStorage { public class SectionSerializationStorage extends SectionStorage {
private final StorageBackend backend; private final StorageBackend backend;
@@ -26,7 +28,7 @@ public class SectionSerializationStorage extends SectionStorage {
public int loadSection(WorldSection into) { public int loadSection(WorldSection into) {
var data = this.backend.getSectionData(into.key, MEMORY_CACHE.get().createUntrackedUnfreeableReference()); var data = this.backend.getSectionData(into.key, MEMORY_CACHE.get().createUntrackedUnfreeableReference());
if (data != null) { if (data != null) {
if (!SaveLoadSystem.deserialize(into, data)) { if (!SaveLoadSystem3.deserialize(into, data)) {
this.backend.deleteSectionData(into.key); this.backend.deleteSectionData(into.key);
//TODO: regenerate the section from children //TODO: regenerate the section from children
Arrays.fill(into._unsafeGetRawDataArray(), Mapper.AIR); Arrays.fill(into._unsafeGetRawDataArray(), Mapper.AIR);
@@ -46,7 +48,7 @@ public class SectionSerializationStorage extends SectionStorage {
@Override @Override
public void saveSection(WorldSection section) { public void saveSection(WorldSection section) {
var saveData = SaveLoadSystem.serialize(section); var saveData = SaveLoadSystem3.serialize(section);
this.backend.setSectionData(section.key, saveData); this.backend.setSectionData(section.key, saveData);
saveData.free(); saveData.free();
} }
@@ -71,6 +73,11 @@ public class SectionSerializationStorage extends SectionStorage {
this.backend.close(); this.backend.close();
} }
@Override
public void iterateStoredSectionPositions(LongConsumer consumer) {
this.backend.iterateStoredSectionPositions(consumer);
}
public static class Config extends SectionStorageConfig { public static class Config extends SectionStorageConfig {
public StorageConfig storage; public StorageConfig storage;

View File

@@ -10,7 +10,6 @@ import java.util.List;
import java.util.function.LongConsumer; import java.util.function.LongConsumer;
public abstract class StorageBackend implements IMappingStorage { public abstract class StorageBackend implements IMappingStorage {
public abstract void iterateStoredSectionPositions(LongConsumer consumer);
//Implementation may use the scratch buffer as the return value, it MUST NOT free the scratch buffer //Implementation may use the scratch buffer as the return value, it MUST NOT free the scratch buffer
public abstract MemoryBuffer getSectionData(long key, MemoryBuffer scratch); public abstract MemoryBuffer getSectionData(long key, MemoryBuffer scratch);

View File

@@ -34,7 +34,9 @@ public class FragmentedStorageBackendAdaptor extends StorageBackend {
@Override @Override
public void iterateStoredSectionPositions(LongConsumer consumer) { public void iterateStoredSectionPositions(LongConsumer consumer) {
throw new IllegalStateException("Not yet implemented"); for (var backend : this.backends) {
backend.iterateStoredSectionPositions(consumer);
}
} }
//TODO: reencode the key to be shifted one less OR //TODO: reencode the key to be shifted one less OR

View File

@@ -21,6 +21,7 @@ public class RocksDBStorageBackend extends StorageBackend {
private final ColumnFamilyHandle worldSections; private final ColumnFamilyHandle worldSections;
private final ColumnFamilyHandle idMappings; private final ColumnFamilyHandle idMappings;
private final ReadOptions sectionReadOps; private final ReadOptions sectionReadOps;
private final WriteOptions sectionWriteOps;
//NOTE: closes in order //NOTE: closes in order
private final List<AbstractImmutableNativeReference> closeList = new ArrayList<>(); private final List<AbstractImmutableNativeReference> closeList = new ArrayList<>();
@@ -59,7 +60,8 @@ public class RocksDBStorageBackend extends StorageBackend {
final DBOptions options = new DBOptions() final DBOptions options = new DBOptions()
.setCreateIfMissing(true) .setCreateIfMissing(true)
.setCreateMissingColumnFamilies(true); .setCreateMissingColumnFamilies(true)
.setMaxTotalWalSize(1024*1024*512);//512 mb max WAL size
List<ColumnFamilyHandle> handles = new ArrayList<>(); List<ColumnFamilyHandle> handles = new ArrayList<>();
@@ -69,12 +71,14 @@ public class RocksDBStorageBackend extends StorageBackend {
handles); handles);
this.sectionReadOps = new ReadOptions(); this.sectionReadOps = new ReadOptions();
this.sectionWriteOps = new WriteOptions();
this.closeList.addAll(handles); this.closeList.addAll(handles);
this.closeList.add(this.db); this.closeList.add(this.db);
this.closeList.add(options); this.closeList.add(options);
this.closeList.add(cfOpts); this.closeList.add(cfOpts);
this.closeList.add(this.sectionReadOps); this.closeList.add(this.sectionReadOps);
this.closeList.add(this.sectionWriteOps);
this.worldSections = handles.get(1); this.worldSections = handles.get(1);
this.idMappings = handles.get(2); this.idMappings = handles.get(2);
@@ -87,7 +91,19 @@ public class RocksDBStorageBackend extends StorageBackend {
@Override @Override
public void iterateStoredSectionPositions(LongConsumer consumer) { public void iterateStoredSectionPositions(LongConsumer consumer) {
throw new IllegalStateException("Not yet implemented"); try (var stack = MemoryStack.stackPush()) {
ByteBuffer keyBuff = stack.calloc(8);
long keyBuffPtr = MemoryUtil.memAddress(keyBuff);
var iter = this.db.newIterator(this.worldSections, this.sectionReadOps);
iter.seekToFirst();
while (iter.isValid()) {
iter.key(keyBuff);
long key = Long.reverseBytes(MemoryUtil.memGetLong(keyBuffPtr));
consumer.accept(key);
iter.next();
}
iter.close();
}
} }
@Override @Override
@@ -117,10 +133,10 @@ public class RocksDBStorageBackend extends StorageBackend {
//TODO: FIXME, use the ByteBuffer variant //TODO: FIXME, use the ByteBuffer variant
@Override @Override
public void setSectionData(long key, MemoryBuffer data) { public void setSectionData(long key, MemoryBuffer data) {
try { try (var stack = MemoryStack.stackPush()) {
var buffer = new byte[(int) data.size]; var keyBuff = stack.calloc(8);
UnsafeUtil.memcpy(data.address, buffer); MemoryUtil.memPutLong(MemoryUtil.memAddress(keyBuff), Long.reverseBytes(key));
this.db.put(this.worldSections, longToBytes(key), buffer); this.db.put(this.worldSections, this.sectionWriteOps, keyBuff, data.asByteBuffer());
} catch (RocksDBException e) { } catch (RocksDBException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@@ -18,6 +18,10 @@ public class ThreadLocalMemoryBuffer {
this.threadLocal = ThreadLocal.withInitial(()->createMemoryBuffer(size)); this.threadLocal = ThreadLocal.withInitial(()->createMemoryBuffer(size));
} }
public static MemoryBuffer create(long size) {
return createMemoryBuffer(size);
}
public MemoryBuffer get() { public MemoryBuffer get() {
return this.threadLocal.get(); return this.threadLocal.get();
} }

View File

@@ -30,22 +30,22 @@ public class SaveLoadSystem {
return x|(y<<10)|(z<<5); return x|(y<<10)|(z<<5);
} }
private record SerializationCache(long[] blockStateCache, short[] compressedCache, long[] lutCache, Long2ShortOpenHashMap lutMapCache) {
private static final ThreadLocal<short[]> SHORT_CACHE = ThreadLocal.withInitial(()->new short[32*32*32]); public SerializationCache() {
private static final ThreadLocal<long[]> LONG_CACHE = ThreadLocal.withInitial(()->new long[32*32*32]); this(new long[WorldSection.SECTION_VOLUME], new short[WorldSection.SECTION_VOLUME], new long[WorldSection.SECTION_VOLUME], new Long2ShortOpenHashMap(512));
private static final ThreadLocal<Long2ShortOpenHashMap> OTHER_THING_CACHE = ThreadLocal.withInitial(()-> { this.lutMapCache.defaultReturnValue((short) -1);
var thing = new Long2ShortOpenHashMap(512); }
thing.defaultReturnValue((short) -1); }
return thing; private static final ThreadLocal<SerializationCache> CACHE = ThreadLocal.withInitial(SerializationCache::new);
});
//TODO: Cache like long2short and the short and other data to stop allocs //TODO: Cache like long2short and the short and other data to stop allocs
public static MemoryBuffer serialize(WorldSection section) { public static MemoryBuffer serialize(WorldSection section) {
var data = section.copyData(); var cache = CACHE.get();
var compressed = SHORT_CACHE.get(); var data = cache.blockStateCache;
Long2ShortOpenHashMap LUT = OTHER_THING_CACHE.get();LUT.clear(); section.copyDataTo(data);
long[] lutValues = LONG_CACHE.get();//If there are more than this many states in a section... im concerned var compressed = cache.compressedCache;
Long2ShortOpenHashMap LUT = cache.lutMapCache; LUT.clear();
long[] lutValues = cache.lutCache;//If there are more than this many states in a section... im concerned
short lutIndex = 0; short lutIndex = 0;
long pHash = 99; long pHash = 99;
for (int i = 0; i < data.length; i++) { for (int i = 0; i < data.length; i++) {
@@ -103,7 +103,7 @@ public class SaveLoadSystem {
throw new IllegalStateException("lutLen impossibly large, max size should be 32768 but got size " + lutLen); throw new IllegalStateException("lutLen impossibly large, max size should be 32768 but got size " + lutLen);
} }
//TODO: cache this in a thread local //TODO: cache this in a thread local
long[] lut = LONG_CACHE.get(); long[] lut = CACHE.get().lutCache;
long hash = 0; long hash = 0;
if (VERIFY_HASH_ON_LOAD) { if (VERIFY_HASH_ON_LOAD) {
hash = key ^ (lutLen * 1293481298141L); hash = key ^ (lutLen * 1293481298141L);

View File

@@ -0,0 +1,106 @@
package me.cortex.voxy.common.world;
import it.unimi.dsi.fastutil.longs.Long2ShortOpenHashMap;
import me.cortex.voxy.common.Logger;
import me.cortex.voxy.common.util.MemoryBuffer;
import me.cortex.voxy.common.util.ThreadLocalMemoryBuffer;
import me.cortex.voxy.common.util.UnsafeUtil;
import me.cortex.voxy.common.world.other.Mapper;
import me.cortex.voxy.commonImpl.VoxyCommon;
import org.lwjgl.system.MemoryUtil;
public class SaveLoadSystem3 {
private record SerializationCache(long[] blockStateCache, Long2ShortOpenHashMap lutMapCache, MemoryBuffer memoryBuffer) {
public SerializationCache() {
this(new long[WorldSection.SECTION_VOLUME],
new Long2ShortOpenHashMap(512),
ThreadLocalMemoryBuffer.create(WorldSection.SECTION_VOLUME*2+WorldSection.SECTION_VOLUME*8+1024));
this.lutMapCache.defaultReturnValue((short) -1);
}
}
public static int lin2z(int i) {//y,z,x
int x = i&0x1F;
int y = (i>>10)&0x1F;
int z = (i>>5)&0x1F;
return Integer.expand(x,0b1001001001001)|Integer.expand(y,0b10010010010010)|Integer.expand(z,0b100100100100100);
//zyxzyxzyxzyxzyx
}
public static int z2lin(int i) {
int x = Integer.compress(i, 0b1001001001001);
int y = Integer.compress(i, 0b10010010010010);
int z = Integer.compress(i, 0b100100100100100);
return x|(y<<10)|(z<<5);
}
private static final ThreadLocal<SerializationCache> CACHE = ThreadLocal.withInitial(SerializationCache::new);
//TODO: Cache like long2short and the short and other data to stop allocs
public static MemoryBuffer serialize(WorldSection section) {
var cache = CACHE.get();
var data = cache.blockStateCache;
section.copyDataTo(data);
Long2ShortOpenHashMap LUT = cache.lutMapCache; LUT.clear();
MemoryBuffer buffer = cache.memoryBuffer().createUntrackedUnfreeableReference();
long ptr = buffer.address;
MemoryUtil.memPutLong(ptr, section.key); ptr += 8;
long metadataPtr = ptr; ptr += 8;
long blockPtr = ptr; ptr += WorldSection.SECTION_VOLUME*2;
for (int i = 0; i < WorldSection.SECTION_VOLUME; i++) {
long block = data[i];
short mapping = LUT.putIfAbsent(block, (short) LUT.size());
if (mapping == -1) {
mapping = (short) (LUT.size()-1);
MemoryUtil.memPutLong(ptr, block); ptr+=8;
}
MemoryUtil.memPutShort(blockPtr, mapping); blockPtr+=2;
}
if (LUT.size() >= 1<<16) {
throw new IllegalStateException();
}
long metadata = 0;
metadata |= Integer.toUnsignedLong(LUT.size());//Bottom 2 bytes
metadata |= Byte.toUnsignedLong(section.getNonEmptyChildren())<<16;//Next byte
//5 bytes free
MemoryUtil.memPutLong(metadataPtr, metadata);
//TODO: do hash
//TODO: rework the storage system to not need to do useless copies like this (this is an issue for serialization, deserialization has solved this already)
return buffer.subSize(ptr-buffer.address).copy();
}
public static boolean deserialize(WorldSection section, MemoryBuffer data) {
long ptr = data.address;
long key = MemoryUtil.memGetLong(ptr); ptr += 8;
if (section.key != key) {
//throw new IllegalStateException("Decompressed section not the same as requested. got: " + key + " expected: " + section.key);
Logger.error("Decompressed section not the same as requested. got: " + key + " expected: " + section.key);
return false;
}
long metadata = MemoryUtil.memGetLong(ptr); ptr += 8;
section.nonEmptyChildren = (byte) ((metadata>>>16)&0xFF);
int nonEmptyBlockCount = 0;
long lutBasePtr = ptr + WorldSection.SECTION_VOLUME*2;
var blockData = section.data;
for (int i = 0; i < WorldSection.SECTION_VOLUME; i++) {
short lutId = MemoryUtil.memGetShort(ptr); ptr+=2;
long blockId = MemoryUtil.memGetLong(lutBasePtr+Short.toUnsignedLong(lutId)*8L);
nonEmptyBlockCount += Mapper.isAir(blockId)?0:1;
blockData[i] = blockId;
}
section.nonEmptyBlockCount = nonEmptyBlockCount;
ptr = lutBasePtr + (metadata&0xFFFF)*8L;
return true;
}
}

View File

@@ -106,6 +106,9 @@ public class WorldEngine {
public void markDirty(WorldSection section, int changeState) { public void markDirty(WorldSection section, int changeState) {
if (!this.isLive) throw new IllegalStateException("World is not live"); if (!this.isLive) throw new IllegalStateException("World is not live");
if (section.tracker != this.sectionTracker) {
throw new IllegalStateException("Section is not from here");
}
if (this.dirtyCallback != null) { if (this.dirtyCallback != null) {
this.dirtyCallback.accept(section, changeState); this.dirtyCallback.accept(section, changeState);
} }

View File

@@ -54,7 +54,7 @@ public final class WorldSection {
volatile int nonEmptyBlockCount = 0; volatile int nonEmptyBlockCount = 0;
volatile byte nonEmptyChildren; volatile byte nonEmptyChildren;
private final ActiveSectionTracker tracker; final ActiveSectionTracker tracker;
public final AtomicBoolean inSaveQueue = new AtomicBoolean(); public final AtomicBoolean inSaveQueue = new AtomicBoolean();
//When the first bit is set it means its loaded //When the first bit is set it means its loaded