who knows if this even works (it very probably doesnt)

This commit is contained in:
mcrcortex
2025-12-14 23:45:24 +10:00
parent c7cf4a74d5
commit a7ea5b2f99

View File

@@ -3,6 +3,7 @@ package me.cortex.voxy.commonImpl.importers;
import me.cortex.voxy.common.Logger;
import me.cortex.voxy.common.thread.Service;
import me.cortex.voxy.common.thread.ServiceManager;
import me.cortex.voxy.common.util.ByteBufferBackedInputStream;
import me.cortex.voxy.common.util.Pair;
import me.cortex.voxy.common.voxelization.VoxelizedSection;
import me.cortex.voxy.common.voxelization.WorldConversionFactory;
@@ -20,6 +21,8 @@ import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.Blocks;
import net.minecraft.world.level.block.state.BlockState;
import org.apache.commons.io.IOUtils;
import org.lwjgl.system.MemoryUtil;
import org.lwjgl.util.zstd.Zstd;
import org.tukaani.xz.BasicArrayCache;
import org.tukaani.xz.ResettableArrayCache;
import org.tukaani.xz.XZInputStream;
@@ -30,7 +33,9 @@ import java.io.IOException;
import java.io.InputStream;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.VarHandle;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.Channels;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
@@ -62,9 +67,32 @@ public class DHImporter implements IDataImporter {
}
}
private final ConcurrentLinkedDeque<Task> tasks = new ConcurrentLinkedDeque<>();
private record WorkCTX(PreparedStatement stmt, ResettableArrayCache cache, long[] storageCache, byte[] colScratch, VoxelizedSection section) {
private static final class WorkCTX {
private final PreparedStatement stmt;
private final ResettableArrayCache cache;
private final long[] storageCache;
private final byte[] colScratch;
private final VoxelizedSection section;
private ByteBuffer zstdScratch;
private ByteBuffer zstdScratch2;
private final long zstdDCtx;
public WorkCTX(PreparedStatement stmt, int worldHeight) {
this(stmt, new ResettableArrayCache(new BasicArrayCache()), new long[64*16*worldHeight], new byte[1<<16], VoxelizedSection.createEmpty());
this.stmt = stmt;
this.cache = new ResettableArrayCache(new BasicArrayCache());
this.storageCache = new long[64*16*worldHeight];
this.colScratch = new byte[1<<16];
this.section = VoxelizedSection.createEmpty();
this.zstdDCtx = Zstd.ZSTD_createDCtx();
}
public void free() {
if (this.zstdScratch != null) {
MemoryUtil.memFree(this.zstdScratch);
MemoryUtil.memFree(this.zstdScratch2);
Zstd.ZSTD_freeDCtx(this.zstdDCtx);
}
}
}
@@ -92,6 +120,7 @@ public class DHImporter implements IDataImporter {
return new Pair<>(()->{
this.importSection(dataFetchStmt, ctx, this.tasks.poll());
},()->{
ctx.free();
try {
dataFetchStmt.close();
} catch (SQLException e) {
@@ -123,7 +152,7 @@ public class DHImporter implements IDataImporter {
Logger.warn("Unknown format mode: " + format);
continue;
}
if (compression != 3) {
if (compression != 3 && compression != 4) {
Logger.warn("Unknown compression mode: " + compression);
continue;
}
@@ -166,13 +195,6 @@ public class DHImporter implements IDataImporter {
this.runner.start();
}
private static void readStream(InputStream in, ResettableArrayCache cache, byte[] into) throws IOException {
cache.reset();
var stream = new XZInputStream(IOUtils.toBufferedInputStream(in), cache);
stream.read(into);
stream.close();
}
private static String getSerialBlockState(BlockState state) {
var props = new ArrayList<>(state.getProperties());
props.sort((a, b) -> a.getName().compareTo(b.getName()));
@@ -191,8 +213,7 @@ public class DHImporter implements IDataImporter {
private long[] readMappings(InputStream in, WorkCTX ctx) throws IOException {
final String BLOCK_STATE_SEPARATOR_STRING = "_DH-BSW_";
final String STATE_STRING_SEPARATOR = "_STATE_";
ctx.cache.reset();
var stream = new DataInputStream(new XZInputStream(IOUtils.toBufferedInputStream(in), ctx.cache));
var stream = new DataInputStream(in);
int entries = stream.readInt();
if (entries < 0)
throw new IllegalStateException();
@@ -271,11 +292,48 @@ public class DHImporter implements IDataImporter {
return (int)((dp>>>(32+12+12+4))&0xF);
}
private static InputStream createDecompressedStream(int decompressor, InputStream in, WorkCTX ctx) throws IOException {
if (decompressor == 3) {
ctx.cache.reset();
return new XZInputStream(IOUtils.toBufferedInputStream(in), -1, false, ctx.cache);
} else if (decompressor == 4) {
if (ctx.zstdScratch == null) {
ctx.zstdScratch = MemoryUtil.memAlloc(8196);
ctx.zstdScratch2 = MemoryUtil.memAlloc(8196);
}
ctx.zstdScratch.clear();
ctx.zstdScratch2.clear();
try(var channel = Channels.newChannel(in)) {
while (IOUtils.read(channel, ctx.zstdScratch) == 0) {
var newBuffer = MemoryUtil.memAlloc(ctx.zstdScratch.position()*2);
newBuffer.put(ctx.zstdScratch.rewind());
MemoryUtil.memFree(ctx.zstdScratch);
ctx.zstdScratch = newBuffer;
}
}
ctx.zstdScratch.limit(ctx.zstdScratch.position()).rewind();
{
int decompSize = (int) Zstd.ZSTD_getFrameContentSize(ctx.zstdScratch);
if (ctx.zstdScratch2.capacity() < decompSize) {
MemoryUtil.memFree(ctx.zstdScratch2);
ctx.zstdScratch2 = MemoryUtil.memAlloc((int) (decompSize * 1.1));
}
}
long size = Zstd.ZSTD_decompressDCtx(ctx.zstdDCtx, ctx.zstdScratch, ctx.zstdScratch2);
if (Zstd.ZSTD_isError(size)) {
throw new IllegalStateException("ZSTD EXCEPTION: " + Zstd.ZSTD_getErrorName(size));
}
ctx.zstdScratch2.limit((int) size);
return new ByteBufferBackedInputStream(ctx.zstdScratch2);
} else {
throw new IllegalArgumentException("Unknown compressor " + decompressor);
}
}
//TODO: create VoxelizedSection of 32*32*32
private void readColumnData(int X, int Z, InputStream in, WorkCTX ctx, long[] mapping) throws IOException {
ctx.cache.reset();
//TODO: add datacache betweein XZ input stream
var stream = new DataInputStream(new XZInputStream(IOUtils.toBufferedInputStream(in), -1, false, ctx.cache));
var stream = new DataInputStream(in);
long[] storage = ctx.storageCache;
VoxelizedSection section = ctx.section;
byte[] col = ctx.colScratch;
@@ -345,10 +403,10 @@ public class DHImporter implements IDataImporter {
dataFetchStmt.setInt(1, task.x);
dataFetchStmt.setInt(2, task.z);
try (var rs = dataFetchStmt.executeQuery()) {
var mapping = readMappings(rs.getBinaryStream(3), ctx);
var mapping = readMappings(createDecompressedStream(task.compression, rs.getBinaryStream(3), ctx), ctx);
//var columnGenStep = new byte[64*64];
//readStream(rs.getBinaryStream(2), cache, columnGenStep);
readColumnData(task.x, task.z, rs.getBinaryStream(1), ctx, mapping);
readColumnData(task.x, task.z, createDecompressedStream(task.compression, rs.getBinaryStream(1), ctx), ctx, mapping);
};
} catch (SQLException | IOException e) {
throw new RuntimeException(e);