new save system + rework some things

This commit is contained in:
mcrcortex
2025-03-31 16:56:19 +10:00
parent 40c4101bec
commit b85e6367b4
13 changed files with 172 additions and 30 deletions

View File

@@ -115,11 +115,13 @@ public class VoxyConfigScreenFactory implements ModMenuApi {
category.addEntry(entryBuilder.startIntSlider(Text.translatable("voxy.config.general.renderDistance"), config.sectionRenderDistance, 2, 64)
.setTooltip(Text.translatable("voxy.config.general.renderDistance.tooltip"))
.setSaveConsumer(val -> {
if (config.sectionRenderDistance != val) {
config.sectionRenderDistance = val;
var wrenderer =((IGetVoxyRenderSystem)(MinecraftClient.getInstance().worldRenderer));
var wrenderer = ((IGetVoxyRenderSystem) (MinecraftClient.getInstance().worldRenderer));
if (wrenderer != null && wrenderer.getVoxyRenderSystem() != null) {
wrenderer.getVoxyRenderSystem().setRenderDistance(val);
}
}
})
.setDefaultValue(DEFAULT.sectionRenderDistance)
.build());

View File

@@ -173,6 +173,7 @@ public class RenderService<T extends AbstractSectionRenderer<J, ?>, J extends Vi
this.nodeCleaner.free();
//Release all the unprocessed built geometry
this.geometryUpdateQueue.clear(BuiltSection::free);
this.sectionUpdateQueue.clear(WorldSection::release);//Release anything thats in the queue
}
public Viewport<?> getViewport() {

View File

@@ -9,7 +9,7 @@ import java.util.function.LongConsumer;
import static me.cortex.voxy.common.world.WorldEngine.UPDATE_TYPE_BLOCK_BIT;
public class SectionUpdateRouter implements ISectionWatcher {
private static final int SLICES = 1<<3;
private static final int SLICES = 1<<8;
public interface IChildUpdate {void accept(WorldSection section);}
private final Long2ByteOpenHashMap[] slices = new Long2ByteOpenHashMap[SLICES];

View File

@@ -3,8 +3,10 @@ package me.cortex.voxy.common.config;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.nio.ByteBuffer;
import java.util.function.LongConsumer;
public interface IMappingStorage {
void iterateStoredSectionPositions(LongConsumer consumer);
void putIdMapping(int id, ByteBuffer data);
Int2ObjectOpenHashMap<byte[]> getIdMappingsData();
void flush();

View File

@@ -8,12 +8,14 @@ import me.cortex.voxy.common.config.storage.StorageConfig;
import me.cortex.voxy.common.util.ThreadLocalMemoryBuffer;
import me.cortex.voxy.common.world.SaveLoadSystem;
import me.cortex.voxy.common.world.SaveLoadSystem2;
import me.cortex.voxy.common.world.SaveLoadSystem3;
import me.cortex.voxy.common.world.WorldSection;
import me.cortex.voxy.common.world.other.Mapper;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.LongConsumer;
public class SectionSerializationStorage extends SectionStorage {
private final StorageBackend backend;
@@ -26,7 +28,7 @@ public class SectionSerializationStorage extends SectionStorage {
public int loadSection(WorldSection into) {
var data = this.backend.getSectionData(into.key, MEMORY_CACHE.get().createUntrackedUnfreeableReference());
if (data != null) {
if (!SaveLoadSystem.deserialize(into, data)) {
if (!SaveLoadSystem3.deserialize(into, data)) {
this.backend.deleteSectionData(into.key);
//TODO: regenerate the section from children
Arrays.fill(into._unsafeGetRawDataArray(), Mapper.AIR);
@@ -46,7 +48,7 @@ public class SectionSerializationStorage extends SectionStorage {
@Override
public void saveSection(WorldSection section) {
var saveData = SaveLoadSystem.serialize(section);
var saveData = SaveLoadSystem3.serialize(section);
this.backend.setSectionData(section.key, saveData);
saveData.free();
}
@@ -71,6 +73,11 @@ public class SectionSerializationStorage extends SectionStorage {
this.backend.close();
}
@Override
public void iterateStoredSectionPositions(LongConsumer consumer) {
this.backend.iterateStoredSectionPositions(consumer);
}
public static class Config extends SectionStorageConfig {
public StorageConfig storage;

View File

@@ -10,7 +10,6 @@ import java.util.List;
import java.util.function.LongConsumer;
public abstract class StorageBackend implements IMappingStorage {
public abstract void iterateStoredSectionPositions(LongConsumer consumer);
//Implementation may use the scratch buffer as the return value, it MUST NOT free the scratch buffer
public abstract MemoryBuffer getSectionData(long key, MemoryBuffer scratch);

View File

@@ -34,7 +34,9 @@ public class FragmentedStorageBackendAdaptor extends StorageBackend {
@Override
public void iterateStoredSectionPositions(LongConsumer consumer) {
throw new IllegalStateException("Not yet implemented");
for (var backend : this.backends) {
backend.iterateStoredSectionPositions(consumer);
}
}
//TODO: reencode the key to be shifted one less OR

View File

@@ -21,6 +21,7 @@ public class RocksDBStorageBackend extends StorageBackend {
private final ColumnFamilyHandle worldSections;
private final ColumnFamilyHandle idMappings;
private final ReadOptions sectionReadOps;
private final WriteOptions sectionWriteOps;
//NOTE: closes in order
private final List<AbstractImmutableNativeReference> closeList = new ArrayList<>();
@@ -59,7 +60,8 @@ public class RocksDBStorageBackend extends StorageBackend {
final DBOptions options = new DBOptions()
.setCreateIfMissing(true)
.setCreateMissingColumnFamilies(true);
.setCreateMissingColumnFamilies(true)
.setMaxTotalWalSize(1024*1024*512);//512 mb max WAL size
List<ColumnFamilyHandle> handles = new ArrayList<>();
@@ -69,12 +71,14 @@ public class RocksDBStorageBackend extends StorageBackend {
handles);
this.sectionReadOps = new ReadOptions();
this.sectionWriteOps = new WriteOptions();
this.closeList.addAll(handles);
this.closeList.add(this.db);
this.closeList.add(options);
this.closeList.add(cfOpts);
this.closeList.add(this.sectionReadOps);
this.closeList.add(this.sectionWriteOps);
this.worldSections = handles.get(1);
this.idMappings = handles.get(2);
@@ -87,7 +91,19 @@ public class RocksDBStorageBackend extends StorageBackend {
@Override
public void iterateStoredSectionPositions(LongConsumer consumer) {
throw new IllegalStateException("Not yet implemented");
try (var stack = MemoryStack.stackPush()) {
ByteBuffer keyBuff = stack.calloc(8);
long keyBuffPtr = MemoryUtil.memAddress(keyBuff);
var iter = this.db.newIterator(this.worldSections, this.sectionReadOps);
iter.seekToFirst();
while (iter.isValid()) {
iter.key(keyBuff);
long key = Long.reverseBytes(MemoryUtil.memGetLong(keyBuffPtr));
consumer.accept(key);
iter.next();
}
iter.close();
}
}
@Override
@@ -117,10 +133,10 @@ public class RocksDBStorageBackend extends StorageBackend {
//TODO: FIXME, use the ByteBuffer variant
@Override
public void setSectionData(long key, MemoryBuffer data) {
try {
var buffer = new byte[(int) data.size];
UnsafeUtil.memcpy(data.address, buffer);
this.db.put(this.worldSections, longToBytes(key), buffer);
try (var stack = MemoryStack.stackPush()) {
var keyBuff = stack.calloc(8);
MemoryUtil.memPutLong(MemoryUtil.memAddress(keyBuff), Long.reverseBytes(key));
this.db.put(this.worldSections, this.sectionWriteOps, keyBuff, data.asByteBuffer());
} catch (RocksDBException e) {
throw new RuntimeException(e);
}

View File

@@ -18,6 +18,10 @@ public class ThreadLocalMemoryBuffer {
this.threadLocal = ThreadLocal.withInitial(()->createMemoryBuffer(size));
}
public static MemoryBuffer create(long size) {
return createMemoryBuffer(size);
}
public MemoryBuffer get() {
return this.threadLocal.get();
}

View File

@@ -30,22 +30,22 @@ public class SaveLoadSystem {
return x|(y<<10)|(z<<5);
}
private static final ThreadLocal<short[]> SHORT_CACHE = ThreadLocal.withInitial(()->new short[32*32*32]);
private static final ThreadLocal<long[]> LONG_CACHE = ThreadLocal.withInitial(()->new long[32*32*32]);
private static final ThreadLocal<Long2ShortOpenHashMap> OTHER_THING_CACHE = ThreadLocal.withInitial(()-> {
var thing = new Long2ShortOpenHashMap(512);
thing.defaultReturnValue((short) -1);
return thing;
});
private record SerializationCache(long[] blockStateCache, short[] compressedCache, long[] lutCache, Long2ShortOpenHashMap lutMapCache) {
public SerializationCache() {
this(new long[WorldSection.SECTION_VOLUME], new short[WorldSection.SECTION_VOLUME], new long[WorldSection.SECTION_VOLUME], new Long2ShortOpenHashMap(512));
this.lutMapCache.defaultReturnValue((short) -1);
}
}
private static final ThreadLocal<SerializationCache> CACHE = ThreadLocal.withInitial(SerializationCache::new);
//TODO: Cache like long2short and the short and other data to stop allocs
public static MemoryBuffer serialize(WorldSection section) {
var data = section.copyData();
var compressed = SHORT_CACHE.get();
Long2ShortOpenHashMap LUT = OTHER_THING_CACHE.get();LUT.clear();
long[] lutValues = LONG_CACHE.get();//If there are more than this many states in a section... im concerned
var cache = CACHE.get();
var data = cache.blockStateCache;
section.copyDataTo(data);
var compressed = cache.compressedCache;
Long2ShortOpenHashMap LUT = cache.lutMapCache; LUT.clear();
long[] lutValues = cache.lutCache;//If there are more than this many states in a section... im concerned
short lutIndex = 0;
long pHash = 99;
for (int i = 0; i < data.length; i++) {
@@ -103,7 +103,7 @@ public class SaveLoadSystem {
throw new IllegalStateException("lutLen impossibly large, max size should be 32768 but got size " + lutLen);
}
//TODO: cache this in a thread local
long[] lut = LONG_CACHE.get();
long[] lut = CACHE.get().lutCache;
long hash = 0;
if (VERIFY_HASH_ON_LOAD) {
hash = key ^ (lutLen * 1293481298141L);

View File

@@ -0,0 +1,106 @@
package me.cortex.voxy.common.world;
import it.unimi.dsi.fastutil.longs.Long2ShortOpenHashMap;
import me.cortex.voxy.common.Logger;
import me.cortex.voxy.common.util.MemoryBuffer;
import me.cortex.voxy.common.util.ThreadLocalMemoryBuffer;
import me.cortex.voxy.common.util.UnsafeUtil;
import me.cortex.voxy.common.world.other.Mapper;
import me.cortex.voxy.commonImpl.VoxyCommon;
import org.lwjgl.system.MemoryUtil;
public class SaveLoadSystem3 {
private record SerializationCache(long[] blockStateCache, Long2ShortOpenHashMap lutMapCache, MemoryBuffer memoryBuffer) {
public SerializationCache() {
this(new long[WorldSection.SECTION_VOLUME],
new Long2ShortOpenHashMap(512),
ThreadLocalMemoryBuffer.create(WorldSection.SECTION_VOLUME*2+WorldSection.SECTION_VOLUME*8+1024));
this.lutMapCache.defaultReturnValue((short) -1);
}
}
public static int lin2z(int i) {//y,z,x
int x = i&0x1F;
int y = (i>>10)&0x1F;
int z = (i>>5)&0x1F;
return Integer.expand(x,0b1001001001001)|Integer.expand(y,0b10010010010010)|Integer.expand(z,0b100100100100100);
//zyxzyxzyxzyxzyx
}
public static int z2lin(int i) {
int x = Integer.compress(i, 0b1001001001001);
int y = Integer.compress(i, 0b10010010010010);
int z = Integer.compress(i, 0b100100100100100);
return x|(y<<10)|(z<<5);
}
private static final ThreadLocal<SerializationCache> CACHE = ThreadLocal.withInitial(SerializationCache::new);
//TODO: Cache like long2short and the short and other data to stop allocs
public static MemoryBuffer serialize(WorldSection section) {
var cache = CACHE.get();
var data = cache.blockStateCache;
section.copyDataTo(data);
Long2ShortOpenHashMap LUT = cache.lutMapCache; LUT.clear();
MemoryBuffer buffer = cache.memoryBuffer().createUntrackedUnfreeableReference();
long ptr = buffer.address;
MemoryUtil.memPutLong(ptr, section.key); ptr += 8;
long metadataPtr = ptr; ptr += 8;
long blockPtr = ptr; ptr += WorldSection.SECTION_VOLUME*2;
for (int i = 0; i < WorldSection.SECTION_VOLUME; i++) {
long block = data[i];
short mapping = LUT.putIfAbsent(block, (short) LUT.size());
if (mapping == -1) {
mapping = (short) (LUT.size()-1);
MemoryUtil.memPutLong(ptr, block); ptr+=8;
}
MemoryUtil.memPutShort(blockPtr, mapping); blockPtr+=2;
}
if (LUT.size() >= 1<<16) {
throw new IllegalStateException();
}
long metadata = 0;
metadata |= Integer.toUnsignedLong(LUT.size());//Bottom 2 bytes
metadata |= Byte.toUnsignedLong(section.getNonEmptyChildren())<<16;//Next byte
//5 bytes free
MemoryUtil.memPutLong(metadataPtr, metadata);
//TODO: do hash
//TODO: rework the storage system to not need to do useless copies like this (this is an issue for serialization, deserialization has solved this already)
return buffer.subSize(ptr-buffer.address).copy();
}
public static boolean deserialize(WorldSection section, MemoryBuffer data) {
long ptr = data.address;
long key = MemoryUtil.memGetLong(ptr); ptr += 8;
if (section.key != key) {
//throw new IllegalStateException("Decompressed section not the same as requested. got: " + key + " expected: " + section.key);
Logger.error("Decompressed section not the same as requested. got: " + key + " expected: " + section.key);
return false;
}
long metadata = MemoryUtil.memGetLong(ptr); ptr += 8;
section.nonEmptyChildren = (byte) ((metadata>>>16)&0xFF);
int nonEmptyBlockCount = 0;
long lutBasePtr = ptr + WorldSection.SECTION_VOLUME*2;
var blockData = section.data;
for (int i = 0; i < WorldSection.SECTION_VOLUME; i++) {
short lutId = MemoryUtil.memGetShort(ptr); ptr+=2;
long blockId = MemoryUtil.memGetLong(lutBasePtr+Short.toUnsignedLong(lutId)*8L);
nonEmptyBlockCount += Mapper.isAir(blockId)?0:1;
blockData[i] = blockId;
}
section.nonEmptyBlockCount = nonEmptyBlockCount;
ptr = lutBasePtr + (metadata&0xFFFF)*8L;
return true;
}
}

View File

@@ -106,6 +106,9 @@ public class WorldEngine {
public void markDirty(WorldSection section, int changeState) {
if (!this.isLive) throw new IllegalStateException("World is not live");
if (section.tracker != this.sectionTracker) {
throw new IllegalStateException("Section is not from here");
}
if (this.dirtyCallback != null) {
this.dirtyCallback.accept(section, changeState);
}

View File

@@ -54,7 +54,7 @@ public final class WorldSection {
volatile int nonEmptyBlockCount = 0;
volatile byte nonEmptyChildren;
private final ActiveSectionTracker tracker;
final ActiveSectionTracker tracker;
public final AtomicBoolean inSaveQueue = new AtomicBoolean();
//When the first bit is set it means its loaded